Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
/* eslint-env mocha */
'use strict'
const chai = require('chai')
chai.use(require('dirty-chai'))
const expect = chai.expect
const path = require('path')
const promisify = require('util').promisify
const noop = () => {}
const mkdirp = require('mkdirp')
const rimraf = promisify(require('rimraf'))
const fs = require('fs')
const fsReadFile = promisify(require('fs').readFile || noop)
const Key = require('interface-datastore').Key
const utils = require('interface-datastore').utils
const ShardingStore = require('datastore-core').ShardingDatastore
const sh = require('datastore-core').shard
const isNode = require('detect-node')
const FsStore = require('../src')
describe('FsDatastore', () => {
if (!isNode) {
it('only supports node.js', () => {
})
return
}
describe('construction', () => {
it('defaults - folder missing', () => {
return new Promise((resolve, reject) => {
const ds = ipfs._repo.datastore
if (!ds) {
return ipfs.once('start', () => {
datastore(ipfs, collaboration).then(resolve).catch(reject)
})
}
// resolve(ds)
resolve(new NamespaceStore(ds, new Key(`peer-base-collab-${collaboration.fqn()}`)))
})
}
const peerId = ipfs._peerInfo.id
pubsubDs = new PubsubDatastore(pubsub, localDatastore, peerId)
ipnsStores.push(pubsubDs)
}
// DHT should not be added as routing if we are offline or it is disabled
if (get(ipfs._options, 'offline') || !get(ipfs._options, 'libp2p.config.dht.enabled', false)) {
const offlineDatastore = new OfflineDatastore(ipfs._repo)
ipnsStores.push(offlineDatastore)
} else {
ipnsStores.push(ipfs.libp2p.dht)
}
// Create ipns routing with a set of datastores
return new TieredDatastore(ipnsStores)
}
it('sharding files', async () => {
const dir = utils.tmpdir()
const fstore = new FsStore(dir)
const shard = new sh.NextToLast(2)
await ShardingStore.create(fstore, shard)
const file = await fsReadFile(path.join(dir, sh.SHARDING_FN))
expect(file.toString()).to.be.eql('/repo/flatfs/shard/v1/next-to-last/2\n')
const readme = await fsReadFile(path.join(dir, sh.README_FN))
expect(readme.toString()).to.be.eql(sh.readme)
await rimraf(dir)
})
it('sharding files', async () => {
const dir = utils.tmpdir()
const fstore = new FsStore(dir)
const shard = new sh.NextToLast(2)
await ShardingStore.create(fstore, shard)
const file = await fsReadFile(path.join(dir, sh.SHARDING_FN))
expect(file.toString()).to.be.eql('/repo/flatfs/shard/v1/next-to-last/2\n')
const readme = await fsReadFile(path.join(dir, sh.README_FN))
expect(readme.toString()).to.be.eql(sh.readme)
await rimraf(dir)
})
it('interop with go', async () => {
const repodir = path.join(__dirname, '/test-repo/blocks')
const fstore = new FsStore(repodir)
const key = new Key('CIQGFTQ7FSI2COUXWWLOQ45VUM2GUZCGAXLWCTOKKPGTUWPXHBNIVOY')
const expected = fs.readFileSync(path.join(repodir, 'VO', key.toString() + '.data'))
const flatfs = await ShardingStore.open(fstore)
let res = await flatfs.get(key)
let queryResult = flatfs.query({})
let results = []
for await (const result of queryResult) results.push(result)
expect(results).to.have.length(23)
expect(res).to.be.eql(expected)
})
'use strict'
const chai = require('chai')
chai.use(require('dirty-chai'))
const expect = chai.expect
const path = require('path')
const promisify = require('util').promisify
const noop = () => {}
const mkdirp = require('mkdirp')
const rimraf = promisify(require('rimraf'))
const fs = require('fs')
const fsReadFile = promisify(require('fs').readFile || noop)
const Key = require('interface-datastore').Key
const utils = require('interface-datastore').utils
const ShardingStore = require('datastore-core').ShardingDatastore
const sh = require('datastore-core').shard
const isNode = require('detect-node')
const FsStore = require('../src')
describe('FsDatastore', () => {
if (!isNode) {
it('only supports node.js', () => {
})
return
}
describe('construction', () => {
it('defaults - folder missing', () => {
const dir = utils.tmpdir()
it('sharding files', async () => {
const dir = utils.tmpdir()
const fstore = new FsStore(dir)
const shard = new sh.NextToLast(2)
await ShardingStore.create(fstore, shard)
const file = await fsReadFile(path.join(dir, sh.SHARDING_FN))
expect(file.toString()).to.be.eql('/repo/flatfs/shard/v1/next-to-last/2\n')
const readme = await fsReadFile(path.join(dir, sh.README_FN))
expect(readme.toString()).to.be.eql(sh.readme)
await rimraf(dir)
})
it('sharding files', async () => {
const dir = utils.tmpdir()
const fstore = new FsStore(dir)
const shard = new sh.NextToLast(2)
await ShardingStore.create(fstore, shard)
const file = await fsReadFile(path.join(dir, sh.SHARDING_FN))
expect(file.toString()).to.be.eql('/repo/flatfs/shard/v1/next-to-last/2\n')
const readme = await fsReadFile(path.join(dir, sh.README_FN))
expect(readme.toString()).to.be.eql(sh.readme)
await rimraf(dir)
})
it('sharding files', async () => {
const dir = utils.tmpdir()
const fstore = new FsStore(dir)
const shard = new sh.NextToLast(2)
await ShardingStore.create(fstore, shard)
const file = await fsReadFile(path.join(dir, sh.SHARDING_FN))
expect(file.toString()).to.be.eql('/repo/flatfs/shard/v1/next-to-last/2\n')
const readme = await fsReadFile(path.join(dir, sh.README_FN))
expect(readme.toString()).to.be.eql(sh.readme)
await rimraf(dir)
})