diff --git a/package.json b/package.json index 0300c5b8d1..d29278f6de 100644 --- a/package.json +++ b/package.json @@ -15,7 +15,7 @@ ], "main": "src/core/index.js", "browser": { - "./src/core/components/init-assets.js": false, + "./src/core/runtime/init-assets-nodejs.js": "./src/core/runtime/init-assets-browser.js", "./src/core/runtime/add-from-fs-nodejs.js": "./src/core/runtime/add-from-fs-browser.js", "./src/core/runtime/config-nodejs.js": "./src/core/runtime/config-browser.js", "./src/core/runtime/dns-nodejs.js": "./src/core/runtime/dns-browser.js", @@ -25,7 +25,8 @@ "./src/core/runtime/repo-nodejs.js": "./src/core/runtime/repo-browser.js", "./src/core/runtime/ipld-nodejs.js": "./src/core/runtime/ipld-browser.js", "./test/utils/create-repo-nodejs.js": "./test/utils/create-repo-browser.js", - "stream": "readable-stream" + "stream": "readable-stream", + "ipfs-utils/src/files/glob-source": false }, "browser-all-ipld-formats": { "./src/core/runtime/ipld-browser.js": "./src/core/runtime/ipld-browser-all.js" @@ -100,7 +101,7 @@ "ipfs-bitswap": "^0.26.0", "ipfs-block": "~0.8.1", "ipfs-block-service": "~0.16.0", - "ipfs-http-client": "^40.0.1", + "ipfs-http-client": "github:ipfs/js-ipfs-http-client#refactor/async-iterables2", "ipfs-http-response": "~0.4.0", "ipfs-mfs": "^0.13.2", "ipfs-multipart": "^0.2.0", @@ -108,7 +109,7 @@ "ipfs-unixfs": "~0.1.16", "ipfs-unixfs-exporter": "^0.38.0", "ipfs-unixfs-importer": "^0.40.0", - "ipfs-utils": "~0.4.0", + "ipfs-utils": "^0.5.0", "ipld": "~0.25.0", "ipld-bitcoin": "~0.3.0", "ipld-dag-cbor": "~0.15.0", @@ -205,9 +206,10 @@ "execa": "^3.0.0", "form-data": "^3.0.0", "hat": "0.0.3", - "interface-ipfs-core": "^0.124.1", + "interface-ipfs-core": "github:ipfs/interface-js-ipfs-core#refactor/async-iterables", "ipfs-interop": "^0.1.1", - "ipfsd-ctl": "^0.47.2", + "ipfsd-ctl": "github:ipfs/js-ipfsd-ctl#fix/do-not-call-shutdown-twice", + "it-all": "^1.0.1", "libp2p-websocket-star": "~0.10.2", "lodash": "^4.17.15", "ncp": "^2.0.0", diff --git a/src/cli/commands/add.js b/src/cli/commands/add.js index 3bfe6f9866..b482522e02 100644 --- a/src/cli/commands/add.js +++ b/src/cli/commands/add.js @@ -156,20 +156,20 @@ module.exports = { ? globSource(argv.file, { recursive: argv.recursive }) : process.stdin // Pipe directly to ipfs.add - let finalHash + let finalCid try { - for await (const file of ipfs._addAsyncIterator(source, options)) { + for await (const file of ipfs.add(source, options)) { if (argv.silent) { continue } if (argv.quieter) { - finalHash = file.hash + finalCid = file.cid continue } - const cid = cidToString(file.hash, { base: argv.cidBase }) + const cid = cidToString(file.cid, { base: argv.cidBase }) let message = cid if (!argv.quiet) { @@ -184,7 +184,7 @@ module.exports = { bar.terminate() } - // Tweak the error message and add more relevant infor for the CLI + // Tweak the error message and add more relevant info for the CLI if (err.code === 'ERR_DIR_NON_RECURSIVE') { err.message = `'${err.path}' is a directory, use the '-r' flag to specify directories` } @@ -197,7 +197,7 @@ module.exports = { } if (argv.quieter) { - log(cidToString(finalHash, { base: argv.cidBase })) + log(cidToString(finalCid, { base: argv.cidBase })) } })()) } diff --git a/src/cli/commands/init.js b/src/cli/commands/init.js index a8c4f01b5f..899d2883ac 100644 --- a/src/cli/commands/init.js +++ b/src/cli/commands/init.js @@ -65,7 +65,7 @@ module.exports = { const IPFS = require('../../core') const Repo = require('ipfs-repo') - const node = new IPFS({ + const node = await IPFS.create({ repo: new Repo(path), init: false, start: false, diff --git a/src/cli/daemon.js b/src/cli/daemon.js index c2dc556a03..9c320b8098 100644 --- a/src/cli/daemon.js +++ b/src/cli/daemon.js @@ -55,9 +55,7 @@ class Daemon { // start the daemon const ipfsOpts = Object.assign({}, { init: true, start: true, libp2p }, this._options) - const ipfs = await IPFS.create(ipfsOpts) - - this._ipfs = ipfs + const ipfs = this._ipfs = await IPFS.create(ipfsOpts) // start HTTP servers (if API or Gateway is enabled in options) const httpApi = new HttpApi(ipfs, ipfsOpts) diff --git a/src/core/api-manager.js b/src/core/api-manager.js new file mode 100644 index 0000000000..1000a28b22 --- /dev/null +++ b/src/core/api-manager.js @@ -0,0 +1,21 @@ +module.exports = class ApiManager { + constructor () { + this._api = {} + this._onUndef = () => undefined + this.api = new Proxy({}, { + get: (_, prop) => { + if (prop === 'then') return undefined // Not a promise! + return this._api[prop] === undefined ? this._onUndef(prop) : this._api[prop] + }, + has: (_, prop) => prop in this._api + }) + } + + update (nextApi, onUndef) { + const prevApi = this._api + const prevUndef = this._onUndef + this._api = nextApi + if (onUndef) this._onUndef = onUndef + return { cancel: () => this.update(prevApi, prevUndef), api: this.api } + } +} diff --git a/src/core/boot.js b/src/core/boot.js deleted file mode 100644 index aa47be16af..0000000000 --- a/src/core/boot.js +++ /dev/null @@ -1,90 +0,0 @@ -'use strict' - -const RepoErrors = require('ipfs-repo').errors - -// Boot an IPFS node depending on the options set -module.exports = async (self) => { - self.log('booting') - const options = self._options - const doInit = options.init - const doStart = options.start - - // Checks if a repo exists, and if so opens it - // Will return callback with a bool indicating the existence - // of the repo - async function repoOpened () { - // nothing to do - if (!self._repo.closed) { - return true - } - - try { - await self._repo.open() - } catch (err) { - if (isRepoUninitializedError(err)) { - return false - } - - if (err) { - throw err - } - } - - return true - } - - // Do the actual boot sequence - try { - // Init with existing initialized, opened, repo - if (await repoOpened()) { - try { - await self.init({ repo: self._repo }) - } catch (err) { - throw Object.assign(err, { emitted: true }) - } - } else if (doInit) { - const defaultInitOptions = { - bits: 2048, - pass: self._options.pass - } - - const initOptions = Object.assign(defaultInitOptions, typeof options.init === 'object' ? options.init : {}) - - await self.init(initOptions) - } - - if (doStart) { - await self.start() - } - - self.log('booted') - self.emit('ready') - } catch (err) { - if (!err.emitted) { - self.emit('error', err) - } - } -} - -function isRepoUninitializedError (err) { - // If the error is that no repo exists, - // which happens when the version file is not found - // we just want to signal that no repo exist, not - // fail the whole process. - - // Use standardized errors as much as possible - if (err.code === RepoErrors.ERR_REPO_NOT_INITIALIZED) { - return true - } - - // TODO: As error codes continue to be standardized, this logic can be phase out; - // it is here to maintain compatibility - if (err.message.match(/not found/) || // indexeddb - err.message.match(/ENOENT/) || // fs - err.message.match(/No value/) // memory - ) { - return true - } - - return false -} diff --git a/src/core/components/add/index.js b/src/core/components/add/index.js new file mode 100644 index 0000000000..ee655817e1 --- /dev/null +++ b/src/core/components/add/index.js @@ -0,0 +1,121 @@ +'use strict' + +const importer = require('ipfs-unixfs-importer') +const normaliseAddInput = require('ipfs-utils/src/files/normalise-input') +const { parseChunkerString } = require('./utils') +const pipe = require('it-pipe') + +module.exports = ({ ipld, dag, gcLock, preload, pin, constructorOptions }) => { + const isShardingEnabled = constructorOptions.EXPERIMENTAL && constructorOptions.EXPERIMENTAL.sharding + return async function * add (source, options) { + options = options || {} + + const opts = { + shardSplitThreshold: isShardingEnabled ? 1000 : Infinity, + ...options, + strategy: 'balanced', + ...parseChunkerString(options.chunker) + } + + // CID v0 is for multihashes encoded with sha2-256 + if (opts.hashAlg && opts.cidVersion !== 1) { + opts.cidVersion = 1 + } + + if (opts.trickle) { + opts.strategy = 'trickle' + } + + delete opts.trickle + + if (opts.progress) { + let total = 0 + const prog = opts.progress + + opts.progress = (bytes) => { + total += bytes + prog(total) + } + } + + const iterator = pipe( + normaliseAddInput(source), + source => importer(source, ipld, opts), + transformFile(dag, opts), + preloadFile(preload, opts), + pinFile(pin, opts) + ) + + const releaseLock = await gcLock.readLock() + + try { + yield * iterator + } finally { + releaseLock() + } + } +} + +function transformFile (dag, opts) { + return async function * (source) { + for await (const { cid, path, unixfs } of source) { + if (opts.onlyHash) { + yield { + cid, + path: path || cid.toString(), + size: unixfs.fileSize() + } + + continue + } + + const { value: node } = await dag.get(cid, { ...opts, preload: false }) + + yield { + cid, + path: path || cid.toString(), + size: Buffer.isBuffer(node) ? node.length : node.size + } + } + } +} + +function preloadFile (preload, opts) { + return async function * (source) { + for await (const file of source) { + const isRootFile = !file.path || opts.wrapWithDirectory + ? file.path === '' + : !file.path.includes('/') + + const shouldPreload = isRootFile && !opts.onlyHash && opts.preload !== false + + if (shouldPreload) { + preload(file.hash) + } + + yield file + } + } +} + +function pinFile (pin, opts) { + return async function * (source) { + for await (const file of source) { + // Pin a file if it is the root dir of a recursive add or the single file + // of a direct add. + const isRootDir = !file.path.includes('/') + const shouldPin = (opts.pin == null ? true : opts.pin) && isRootDir && !opts.onlyHash + + if (shouldPin) { + // Note: addAsyncIterator() has already taken a GC lock, so tell + // pin.add() not to take a (second) GC lock + await pin.add(file.hash, { + preload: false, + lock: false + }) + } + + yield file + } + } +} diff --git a/src/core/components/add/utils.js b/src/core/components/add/utils.js new file mode 100644 index 0000000000..5c3ee6cc2a --- /dev/null +++ b/src/core/components/add/utils.js @@ -0,0 +1,87 @@ +'use strict' + +/** + * Parses chunker string into options used by DAGBuilder in ipfs-unixfs-engine + * + * + * @param {String} chunker Chunker algorithm supported formats: + * "size-{size}" + * "rabin" + * "rabin-{avg}" + * "rabin-{min}-{avg}-{max}" + * + * @return {Object} Chunker options for DAGBuilder + */ +const parseChunkerString = (chunker) => { + if (!chunker) { + return { + chunker: 'fixed' + } + } else if (chunker.startsWith('size-')) { + const sizeStr = chunker.split('-')[1] + const size = parseInt(sizeStr) + if (isNaN(size)) { + throw new Error('Chunker parameter size must be an integer') + } + return { + chunker: 'fixed', + chunkerOptions: { + maxChunkSize: size + } + } + } else if (chunker.startsWith('rabin')) { + return { + chunker: 'rabin', + chunkerOptions: parseRabinString(chunker) + } + } else { + throw new Error(`Unrecognized chunker option: ${chunker}`) + } +} + +/** + * Parses rabin chunker string + * + * @param {String} chunker Chunker algorithm supported formats: + * "rabin" + * "rabin-{avg}" + * "rabin-{min}-{avg}-{max}" + * + * @return {Object} rabin chunker options + */ +const parseRabinString = (chunker) => { + const options = {} + const parts = chunker.split('-') + switch (parts.length) { + case 1: + options.avgChunkSize = 262144 + break + case 2: + options.avgChunkSize = parseChunkSize(parts[1], 'avg') + break + case 4: + options.minChunkSize = parseChunkSize(parts[1], 'min') + options.avgChunkSize = parseChunkSize(parts[2], 'avg') + options.maxChunkSize = parseChunkSize(parts[3], 'max') + break + default: + throw new Error('Incorrect chunker format (expected "rabin" "rabin-[avg]" or "rabin-[min]-[avg]-[max]"') + } + + return options +} + +const parseChunkSize = (str, name) => { + const size = parseInt(str) + if (isNaN(size)) { + throw new Error(`Chunker parameter ${name} must be an integer`) + } + + return size +} + +module.exports = { + parseChunkSize, + parseRabinString, + parseChunkerString +} diff --git a/src/core/components/bitswap.js b/src/core/components/bitswap.js deleted file mode 100644 index 654f9f045b..0000000000 --- a/src/core/components/bitswap.js +++ /dev/null @@ -1,72 +0,0 @@ -'use strict' - -const OFFLINE_ERROR = require('../utils').OFFLINE_ERROR -const callbackify = require('callbackify') -const Big = require('bignumber.js') -const CID = require('cids') -const PeerId = require('peer-id') -const errCode = require('err-code') - -function formatWantlist (list, cidBase) { - return Array.from(list).map((e) => ({ '/': e[1].cid.toBaseEncodedString(cidBase) })) -} - -module.exports = function bitswap (self) { - return { - wantlist: callbackify.variadic(async (peerId) => { // eslint-disable-line require-await - if (!self.isOnline()) { - throw new Error(OFFLINE_ERROR) - } - - let list - - if (peerId) { - peerId = PeerId.createFromB58String(peerId) - - list = self._bitswap.wantlistForPeer(peerId) - } else { - list = self._bitswap.getWantlist() - } - - return { Keys: formatWantlist(list) } - }), - - stat: callbackify(async () => { // eslint-disable-line require-await - if (!self.isOnline()) { - throw new Error(OFFLINE_ERROR) - } - - const snapshot = self._bitswap.stat().snapshot - - return { - provideBufLen: parseInt(snapshot.providesBufferLength.toString()), - blocksReceived: new Big(snapshot.blocksReceived), - wantlist: formatWantlist(self._bitswap.getWantlist()), - peers: self._bitswap.peers().map((id) => id.toB58String()), - dupBlksReceived: new Big(snapshot.dupBlksReceived), - dupDataReceived: new Big(snapshot.dupDataReceived), - dataReceived: new Big(snapshot.dataReceived), - blocksSent: new Big(snapshot.blocksSent), - dataSent: new Big(snapshot.dataSent) - } - }), - - unwant: callbackify(async (keys) => { // eslint-disable-line require-await - if (!self.isOnline()) { - throw new Error(OFFLINE_ERROR) - } - - if (!Array.isArray(keys)) { - keys = [keys] - } - - try { - keys = keys.map((key) => new CID(key)) - } catch (err) { - throw errCode(err, 'ERR_INVALID_CID') - } - - return self._bitswap.unwant(keys) - }) - } -} diff --git a/src/core/components/bitswap/stat.js b/src/core/components/bitswap/stat.js new file mode 100644 index 0000000000..b2b609d44d --- /dev/null +++ b/src/core/components/bitswap/stat.js @@ -0,0 +1,25 @@ +'use strict' + +const Big = require('bignumber.js') + +function formatWantlist (list) { + return Array.from(list).map((e) => ({ '/': e[1].cid.toString() })) +} + +module.exports = ({ bitswap }) => { + return async function stat () { // eslint-disable-line require-await + const snapshot = bitswap.stat().snapshot + + return { + provideBufLen: parseInt(snapshot.providesBufferLength.toString()), + blocksReceived: new Big(snapshot.blocksReceived), + wantlist: formatWantlist(bitswap.getWantlist()), + peers: bitswap.peers().map((id) => id.toB58String()), + dupBlksReceived: new Big(snapshot.dupBlksReceived), + dupDataReceived: new Big(snapshot.dupDataReceived), + dataReceived: new Big(snapshot.dataReceived), + blocksSent: new Big(snapshot.blocksSent), + dataSent: new Big(snapshot.dataSent) + } + } +} diff --git a/src/core/components/bitswap/unwant.js b/src/core/components/bitswap/unwant.js new file mode 100644 index 0000000000..9f71172ef4 --- /dev/null +++ b/src/core/components/bitswap/unwant.js @@ -0,0 +1,20 @@ +'use strict' + +const CID = require('cids') +const errCode = require('err-code') + +module.exports = ({ bitswap }) => { + return async function unwant (keys) { // eslint-disable-line require-await + if (!Array.isArray(keys)) { + keys = [keys] + } + + try { + keys = keys.map((key) => new CID(key)) + } catch (err) { + throw errCode(err, 'ERR_INVALID_CID') + } + + return bitswap.unwant(keys) + } +} diff --git a/src/core/components/bitswap/wantlist.js b/src/core/components/bitswap/wantlist.js new file mode 100644 index 0000000000..02c882abc6 --- /dev/null +++ b/src/core/components/bitswap/wantlist.js @@ -0,0 +1,17 @@ +'use strict' + +const PeerId = require('peer-id') + +function formatWantlist (list) { + return Array.from(list).map((e) => ({ '/': e[1].cid.toString() })) +} + +module.exports = ({ bitswap }) => { + return async function wantlist (peerId) { // eslint-disable-line require-await + const list = peerId + ? bitswap.wantlistForPeer(PeerId.createFromCID(peerId)) + : bitswap.getWantlist() + + return { Keys: formatWantlist(list) } + } +} diff --git a/src/core/components/config.js b/src/core/components/config.js index 381a36ce61..c747387c73 100644 --- a/src/core/components/config.js +++ b/src/core/components/config.js @@ -1,17 +1,16 @@ 'use strict' -const callbackify = require('callbackify') const getDefaultConfig = require('../runtime/config-nodejs.js') const log = require('debug')('ipfs:core:config') -module.exports = function config (self) { +module.exports = ({ repo }) => { return { - get: callbackify.variadic(self._repo.config.get), - set: callbackify(self._repo.config.set), - replace: callbackify.variadic(self._repo.config.set), + get: repo.config.get, + set: repo.config.set, + replace: repo.config.set, profiles: { - apply: callbackify.variadic(applyProfile), - list: callbackify.variadic(listProfiles) + apply: applyProfile, + list: listProfiles } } @@ -26,12 +25,12 @@ module.exports = function config (self) { } try { - const oldCfg = await self.config.get() + const oldCfg = await repo.config.get() let newCfg = JSON.parse(JSON.stringify(oldCfg)) // clone newCfg = profile.transform(newCfg) if (!dryRun) { - await self.config.replace(newCfg) + await repo.config.set(newCfg) } // Scrub private key from output diff --git a/src/core/components/files-regular/add-async-iterator.js b/src/core/components/files-regular/add-async-iterator.js index e138a1cd66..f69d7268f6 100644 --- a/src/core/components/files-regular/add-async-iterator.js +++ b/src/core/components/files-regular/add-async-iterator.js @@ -1,155 +1,24 @@ 'use strict' -const importer = require('ipfs-unixfs-importer') -const normaliseAddInput = require('ipfs-utils/src/files/normalise-input') -const { parseChunkerString } = require('./utils') -const pipe = require('it-pipe') -const log = require('debug')('ipfs:add') -log.error = require('debug')('ipfs:add:error') - -function noop () {} +const createAdd = require('../../components-ipfsx/add') module.exports = function (self) { - // Internal add func that gets used by all add funcs - return async function * addAsyncIterator (source, options) { - options = options || {} - - const chunkerOptions = parseChunkerString(options.chunker) - - const opts = Object.assign({}, { - shardSplitThreshold: self._options.EXPERIMENTAL.sharding - ? 1000 - : Infinity - }, options, { - strategy: 'balanced', - chunker: chunkerOptions.chunker, - chunkerOptions: chunkerOptions.chunkerOptions - }) - - // CID v0 is for multihashes encoded with sha2-256 - if (opts.hashAlg && opts.cidVersion !== 1) { - opts.cidVersion = 1 - } - - if (opts.trickle) { - opts.strategy = 'trickle' - } - - delete opts.trickle - - let total = 0 - - const prog = opts.progress || noop - const progress = (bytes) => { - total += bytes - prog(total) - } - - opts.progress = progress - - const iterator = pipe( - normaliseAddInput(source), - doImport(self, opts), - transformFile(self, opts), - preloadFile(self, opts), - pinFile(self, opts) - ) - - const releaseLock = await self._gcLock.readLock() - - try { - yield * iterator - } finally { - releaseLock() - } - } -} - -function doImport (ipfs, opts) { - return async function * (source) { // eslint-disable-line require-await - yield * importer(source, ipfs._ipld, opts) - } -} - -function transformFile (ipfs, opts) { - return async function * (source) { - for await (const file of source) { - let cid = file.cid - const hash = cid.toBaseEncodedString() - let path = file.path ? file.path : hash - - if (opts.wrapWithDirectory && !file.path) { - path = '' - } - - if (opts.onlyHash) { - yield { - path, - hash, - size: file.unixfs.fileSize() - } - - return - } - - const node = await ipfs.object.get(file.cid, Object.assign({}, opts, { preload: false })) - - if (opts.cidVersion === 1) { - cid = cid.toV1() - } - - let size = node.size - - if (Buffer.isBuffer(node)) { - size = node.length - } - - yield { - path, - hash, - size - } - } - } -} - -function preloadFile (ipfs, opts) { - return async function * (source) { - for await (const file of source) { - const isRootFile = !file.path || opts.wrapWithDirectory - ? file.path === '' - : !file.path.includes('/') + const { + _ipld: ipld, + dag, + _gcLock: gcLock, + _preload: preload, + pin, + _options: config + } = self - const shouldPreload = isRootFile && !opts.onlyHash && opts.preload !== false + const add = createAdd({ ipld, dag, gcLock, preload, pin, config }) - if (shouldPreload) { - ipfs._preload(file.hash) - } - - yield file - } - } -} - -function pinFile (ipfs, opts) { - return async function * (source) { - for await (const file of source) { - // Pin a file if it is the root dir of a recursive add or the single file - // of a direct add. - const pin = 'pin' in opts ? opts.pin : true - const isRootDir = !file.path.includes('/') - const shouldPin = pin && isRootDir && !opts.onlyHash && !opts.hashAlg - - if (shouldPin) { - // Note: addAsyncIterator() has already taken a GC lock, so tell - // pin.add() not to take a (second) GC lock - await ipfs.pin.add(file.hash, { - preload: false, - lock: false - }) - } + return async function * addAsyncIterator (source, options) { + options = options || {} - yield file + for await (const file of add(source, options)) { + yield { hash: file.cid.toString(), ...file } } } } diff --git a/src/core/components/files-regular/utils.js b/src/core/components/files-regular/utils.js index 876d0b0d48..0a5c8a57ba 100644 --- a/src/core/components/files-regular/utils.js +++ b/src/core/components/files-regular/utils.js @@ -20,86 +20,6 @@ const normalizePath = (path) => { return path } -/** - * Parses chunker string into options used by DAGBuilder in ipfs-unixfs-engine - * - * - * @param {String} chunker Chunker algorithm supported formats: - * "size-{size}" - * "rabin" - * "rabin-{avg}" - * "rabin-{min}-{avg}-{max}" - * - * @return {Object} Chunker options for DAGBuilder - */ -const parseChunkerString = (chunker) => { - if (!chunker) { - return { - chunker: 'fixed' - } - } else if (chunker.startsWith('size-')) { - const sizeStr = chunker.split('-')[1] - const size = parseInt(sizeStr) - if (isNaN(size)) { - throw new Error('Chunker parameter size must be an integer') - } - return { - chunker: 'fixed', - chunkerOptions: { - maxChunkSize: size - } - } - } else if (chunker.startsWith('rabin')) { - return { - chunker: 'rabin', - chunkerOptions: parseRabinString(chunker) - } - } else { - throw new Error(`Unrecognized chunker option: ${chunker}`) - } -} - -/** - * Parses rabin chunker string - * - * @param {String} chunker Chunker algorithm supported formats: - * "rabin" - * "rabin-{avg}" - * "rabin-{min}-{avg}-{max}" - * - * @return {Object} rabin chunker options - */ -const parseRabinString = (chunker) => { - const options = {} - const parts = chunker.split('-') - switch (parts.length) { - case 1: - options.avgChunkSize = 262144 - break - case 2: - options.avgChunkSize = parseChunkSize(parts[1], 'avg') - break - case 4: - options.minChunkSize = parseChunkSize(parts[1], 'min') - options.avgChunkSize = parseChunkSize(parts[2], 'avg') - options.maxChunkSize = parseChunkSize(parts[3], 'max') - break - default: - throw new Error('Incorrect chunker format (expected "rabin" "rabin-[avg]" or "rabin-[min]-[avg]-[max]"') - } - - return options -} - -const parseChunkSize = (str, name) => { - const size = parseInt(str) - if (isNaN(size)) { - throw new Error(`Chunker parameter ${name} must be an integer`) - } - - return size -} - const mapFile = (file, options) => { options = options || {} @@ -129,8 +49,5 @@ const mapFile = (file, options) => { module.exports = { normalizePath, - parseChunkSize, - parseRabinString, - parseChunkerString, mapFile } diff --git a/src/core/components/id.js b/src/core/components/id.js index a8fd75f92d..f848d96e8f 100644 --- a/src/core/components/id.js +++ b/src/core/components/id.js @@ -1,14 +1,13 @@ 'use strict' -const callbackify = require('callbackify') const pkgversion = require('../../../package.json').version -module.exports = function id (self) { - return callbackify(async () => { // eslint-disable-line require-await +module.exports = ({ peerInfo }) => { + return async function id () { // eslint-disable-line require-await return { - id: self._peerInfo.id.toB58String(), - publicKey: self._peerInfo.id.pubKey.bytes.toString('base64'), - addresses: self._peerInfo.multiaddrs + id: peerInfo.id.toB58String(), + publicKey: peerInfo.id.pubKey.bytes.toString('base64'), + addresses: peerInfo.multiaddrs .toArray() .map((ma) => ma.toString()) .filter((ma) => ma.indexOf('ipfs') >= 0) @@ -16,5 +15,5 @@ module.exports = function id (self) { agentVersion: `js-ipfs/${pkgversion}`, protocolVersion: '9000' } - }) + } } diff --git a/src/core/components/index.js b/src/core/components/index.js index ac893efbdd..4a9bcab492 100644 --- a/src/core/components/index.js +++ b/src/core/components/index.js @@ -1,31 +1,42 @@ 'use strict' -exports.preStart = require('./pre-start') -exports.start = require('./start') -exports.stop = require('./stop') -exports.isOnline = require('./is-online') -exports.version = require('./version') +exports.add = require('./add') +exports.bitswap = { + stat: require('./bitswap/stat'), + unwant: require('./bitswap/unwant'), + wantlist: require('./bitswap/wantlist') +} +exports.config = require('./config') exports.id = require('./id') -exports.repo = require('./repo') exports.init = require('./init') -exports.bootstrap = require('./bootstrap') -exports.config = require('./config') -exports.block = require('./block') -exports.object = require('./object') -exports.dag = require('./dag') -exports.libp2p = require('./libp2p') -exports.swarm = require('./swarm') +exports.object = { + data: require('./object/data'), + get: require('./object/get'), + links: require('./object/links'), + new: require('./object/new'), + patch: { + addLink: require('./object/patch/add-link'), + appendData: require('./object/patch/append-data'), + rmLink: require('./object/patch/rm-link'), + setData: require('./object/patch/set-data') + }, + put: require('./object/put'), + stat: require('./object/stat') +} exports.ping = require('./ping') -exports.pingPullStream = require('./ping-pull-stream') -exports.pingReadableStream = require('./ping-readable-stream') -exports.pin = require('./pin') -exports.filesRegular = require('./files-regular') -exports.filesMFS = require('./files-mfs') -exports.bitswap = require('./bitswap') -exports.pubsub = require('./pubsub') -exports.dht = require('./dht') -exports.dns = require('./dns') -exports.key = require('./key') -exports.stats = require('./stats') -exports.resolve = require('./resolve') -exports.name = require('./name') +exports.start = require('./start') +exports.stop = require('./stop') +exports.swarm = { + addrs: require('./swarm/addrs'), + connect: require('./swarm/connect'), + disconnect: require('./swarm/disconnect'), + localAddrs: require('./swarm/localAddrs'), + peers: require('./swarm/peers') +} +exports.version = require('./version') + +exports.legacy = { // TODO: these will be removed as the new API is completed + dag: require('./dag'), + libp2p: require('./libp2p'), + pin: require('./pin') +} diff --git a/src/core/components/init-assets.js b/src/core/components/init-assets.js deleted file mode 100644 index 6f0e4799d3..0000000000 --- a/src/core/components/init-assets.js +++ /dev/null @@ -1,20 +0,0 @@ -'use strict' - -const path = require('path') -const CID = require('cids') - -// Add the default assets to the repo. -module.exports = async function addDefaultAssets (self, log) { - const initDocsPath = path.join(__dirname, '../../init-files/init-docs') - - const results = await self.addFromFs(initDocsPath, { - recursive: true, - preload: false - }) - - const dir = results.filter(file => file.path === 'init-docs').pop() - const cid = new CID(dir.hash) - - log('to get started, enter:\n') - log(`\tjsipfs cat /ipfs/${cid.toBaseEncodedString()}/readme\n`) -} diff --git a/src/core/components/init.js b/src/core/components/init.js index 5b2a1ec2df..91d457d7db 100644 --- a/src/core/components/init.js +++ b/src/core/components/init.js @@ -1,164 +1,333 @@ 'use strict' -const peerId = require('peer-id') +const log = require('debug')('ipfs:components:init') +const PeerId = require('peer-id') +const PeerInfo = require('peer-info') const mergeOptions = require('merge-options') -const callbackify = require('callbackify') const promisify = require('promisify-es6') -const defaultConfig = require('../runtime/config-nodejs.js') +const getDefaultConfig = require('../runtime/config-nodejs.js') +const createRepo = require('../runtime/repo-nodejs') const Keychain = require('libp2p-keychain') -const { - DAGNode -} = require('ipld-dag-pb') +const NoKeychain = require('./no-keychain') +const GCLock = require('./pin/gc-lock') +const { DAGNode } = require('ipld-dag-pb') const UnixFs = require('ipfs-unixfs') const multicodec = require('multicodec') - +const multiaddr = require('multiaddr') +const { + AlreadyInitializingError, + AlreadyInitializedError, + NotStartedError +} = require('../errors') +const BlockService = require('ipfs-block-service') +const Ipld = require('ipld') +const getDefaultIpldOptions = require('../runtime/ipld-nodejs') +const createPreloader = require('../preload') +const { ERR_REPO_NOT_INITIALIZED } = require('ipfs-repo').errors const IPNS = require('../ipns') const OfflineDatastore = require('../ipns/routing/offline-datastore') +const initAssets = require('../runtime/init-assets-nodejs') +const PinManager = require('./pin/pin-manager') +const Commands = require('./') + +module.exports = ({ + apiManager, + print, + constructorOptions +}) => async function init (options) { + const { cancel } = apiManager.update({ init: () => { throw new AlreadyInitializingError() } }) + + try { + options = options || {} + + if (typeof constructorOptions.init === 'object') { + options = mergeOptions(options, constructorOptions.init) + } -const addDefaultAssets = require('./init-assets') -const { profiles } = require('./config') + if (constructorOptions.pass) { + options.pass = constructorOptions.pass + } -function createPeerId (self, opts) { - if (opts.privateKey) { - self.log('using user-supplied private-key') - if (typeof opts.privateKey === 'object') { - return opts.privateKey - } else { - return promisify(peerId.createFromPrivKey)(Buffer.from(opts.privateKey, 'base64')) + if (constructorOptions.config) { + options.config = constructorOptions.config } - } else { - // Generate peer identity keypair + transform to desired format + add to config. - opts.log(`generating ${opts.bits}-bit RSA keypair...`, false) - self.log('generating peer id: %s bits', opts.bits) - return promisify(peerId.create)({ bits: opts.bits }) - } -} + const repo = typeof options.repo === 'string' || options.repo == null + ? createRepo({ path: options.repo, autoMigrate: options.repoAutoMigrate }) + : options.repo + + let isInitialized = true + + if (repo.closed) { + try { + await repo.open() + } catch (err) { + if (err.code === ERR_REPO_NOT_INITIALIZED) { + isInitialized = false + } else { + throw err + } + } + } -async function createRepo (self, opts) { - if (self.state.state() !== 'uninitialized') { - throw new Error('Not able to init from state: ' + self.state.state()) - } + const { peerId, config, keychain } = isInitialized + ? await initExistingRepo(repo, options) + : await initNewRepo(repo, options) + + log('peer created') + const peerInfo = new PeerInfo(peerId) - self.state.init() - self.log('init') + if (config.Addresses && config.Addresses.Swarm) { + config.Addresses.Swarm.forEach(addr => { + let ma = multiaddr(addr) - // An initialized, open repo was passed, use this one! - if (opts.repo) { - self._repo = opts.repo + if (ma.getPeerId()) { + ma = ma.encapsulate(`/p2p/${peerInfo.id.toB58String()}`) + } - return + peerInfo.multiaddrs.add(ma) + }) + } + + const blockService = new BlockService(repo) + const ipld = new Ipld(getDefaultIpldOptions(blockService, constructorOptions.ipld, log)) + + const preload = createPreloader(constructorOptions.preload) + await preload.start() + + const gcLock = new GCLock(constructorOptions.repoOwner, { + // Make sure GCLock is specific to repo, for tests where there are + // multiple instances of IPFS + morticeId: repo.path + }) + + const dag = Commands.legacy.dag({ _ipld: ipld, _preload: preload }) + const object = { + data: Commands.object.data({ ipld, preload }), + get: Commands.object.get({ ipld, preload }), + links: Commands.object.links({ dag }), + new: Commands.object.new({ ipld, preload }), + patch: { + addLink: Commands.object.patch.addLink({ ipld, gcLock, preload }), + appendData: Commands.object.patch.appendData({ ipld, gcLock, preload }), + rmLink: Commands.object.patch.rmLink({ ipld, gcLock, preload }), + setData: Commands.object.patch.setData({ ipld, gcLock, preload }) + }, + put: Commands.object.put({ ipld, gcLock, preload }), + stat: Commands.object.stat({ ipld, preload }) + } + + const pinManager = new PinManager(repo, dag) + await pinManager.load() + + const pin = Commands.legacy.pin({ _ipld: ipld, _preload: preload, object, _repo: repo, _pinManager: pinManager }) + const add = Commands.add({ ipld, dag, preload, pin, gcLock, constructorOptions }) + + if (!isInitialized && !options.emptyRepo) { + // add empty unixfs dir object (go-ipfs assumes this exists) + const emptyDirCid = await addEmptyDir({ dag }) + + log('adding default assets') + await initAssets({ add, print }) + + log('initializing IPNS keyspace') + // Setup the offline routing for IPNS. + // This is primarily used for offline ipns modifications, such as the initializeKeyspace feature. + const offlineDatastore = new OfflineDatastore(repo) + const ipns = new IPNS(offlineDatastore, repo.datastore, peerInfo, keychain, { pass: options.pass }) + await ipns.initializeKeyspace(peerId.privKey.bytes, emptyDirCid.toString()) + } + + const api = createApi({ + add, + apiManager, + constructorOptions, + blockService, + gcLock, + initOptions: options, + ipld, + keychain, + object, + peerInfo, + pinManager, + preload, + print, + repo + }) + + apiManager.update(api, () => { throw new NotStartedError() }) + } catch (err) { + cancel() + throw err } - opts.emptyRepo = opts.emptyRepo || false - opts.bits = Number(opts.bits) || 2048 - opts.log = opts.log || function () {} + return apiManager.api +} - const config = mergeOptions(defaultConfig(), self._options.config) +async function initNewRepo (repo, { privateKey, emptyRepo, bits, profiles, config, pass, print }) { + emptyRepo = emptyRepo || false + bits = bits == null ? 2048 : Number(bits) - applyProfile(self, config, opts) + config = mergeOptions(getDefaultConfig(), config) + config = applyProfiles(profiles, config) // Verify repo does not exist yet - const exists = await self._repo.exists() - self.log('repo exists?', exists) + const exists = await repo.exists() + log('repo exists?', exists) + if (exists === true) { - throw Error('repo already exists') + throw new Error('repo already exists') } - const peerId = await createPeerId(self, opts) + const peerId = await createPeerId({ privateKey, bits, print }) + let keychain = new NoKeychain() - self.log('identity generated') + log('identity generated') config.Identity = { PeerID: peerId.toB58String(), PrivKey: peerId.privKey.bytes.toString('base64') } - const privateKey = peerId.privKey - if (opts.pass) { - config.Keychain = Keychain.generateOptions() + privateKey = peerId.privKey + + config.Keychain = Keychain.generateOptions() + + log('peer identity: %s', config.Identity.PeerID) + + await repo.init(config) + await repo.open() + + log('repo opened') + + if (pass) { + log('creating keychain') + const keychainOptions = { passPhrase: pass, ...config.Keychain } + keychain = new Keychain(repo.keys, keychainOptions) + await keychain.importPeer('self', { privKey: privateKey }) } - opts.log('done') - opts.log('peer identity: ' + config.Identity.PeerID) + return { peerId, keychain, config } +} - await self._repo.init(config) - await self._repo.open() +async function initExistingRepo (repo, { config: newConfig, profiles, pass }) { + let config = await repo.config.get() - self.log('repo opened') + if (newConfig || profiles) { + if (newConfig) { + config = mergeOptions(config, newConfig) + } + if (profiles) { + config = applyProfiles(profiles, config) + } + await repo.config.set(config) + } - if (opts.pass) { - self.log('creating keychain') - const keychainOptions = Object.assign({ passPhrase: opts.pass }, config.Keychain) - self._keychain = new Keychain(self._repo.keys, keychainOptions) + let keychain = new NoKeychain() - await self._keychain.importPeer('self', { privKey: privateKey }) + if (pass) { + const keychainOptions = { passPhrase: pass, ...config.Keychain } + keychain = new Keychain(repo.keys, keychainOptions) + log('keychain constructed') } - // Setup the offline routing for IPNS. - // This is primarily used for offline ipns modifications, such as the initializeKeyspace feature. - const offlineDatastore = new OfflineDatastore(self._repo) + const peerId = await promisify(PeerId.createFromPrivKey)(config.Identity.PrivKey) - self._ipns = new IPNS(offlineDatastore, self._repo.datastore, self._peerInfo, self._keychain, self._options) + // Import the private key as 'self', if needed. + if (pass) { + try { + await keychain.findKeyByName('self') + } catch (err) { + log('Creating "self" key') + await keychain.importPeer('self', peerId) + } + } - // add empty unixfs dir object (go-ipfs assumes this exists) - return addRepoAssets(self, privateKey, opts) + return { peerId, keychain, config } } -async function addRepoAssets (self, privateKey, opts) { - if (opts.emptyRepo) { - return +function createPeerId ({ privateKey, bits, print }) { + if (privateKey) { + log('using user-supplied private-key') + return typeof privateKey === 'object' + ? privateKey + : promisify(PeerId.createFromPrivKey)(Buffer.from(privateKey, 'base64')) + } else { + // Generate peer identity keypair + transform to desired format + add to config. + print('generating %s-bit RSA keypair...', bits) + return promisify(PeerId.create)({ bits }) } +} - self.log('adding assets') - +async function addEmptyDir ({ dag }) { const node = new DAGNode(new UnixFs('directory').marshal()) - const cid = await self.dag.put(node, { + return dag.put(node, { version: 0, format: multicodec.DAG_PB, hashAlg: multicodec.SHA2_256, preload: false }) - - await self._ipns.initializeKeyspace(privateKey, cid.toBaseEncodedString()) - - self.log('Initialised keyspace') - - if (typeof addDefaultAssets === 'function') { - self.log('Adding default assets') - // addDefaultAssets is undefined on browsers. - // See package.json browser config - return addDefaultAssets(self, opts.log) - } } -// Apply profiles (eg "server,lowpower") to config -function applyProfile (self, config, opts) { - if (opts.profiles) { - for (const name of opts.profiles) { - const profile = profiles[name] - - if (!profile) { - throw new Error(`Could not find profile with name '${name}'`) - } - - self.log(`applying profile ${name}`) - profile.transform(config) +// Apply profiles (e.g. ['server', 'lowpower']) to config +function applyProfiles (profiles, config) { + return (profiles || []).reduce((name, config) => { + const profile = require('./config').profiles[name] + if (!profile) { + throw new Error(`No profile with name '${name}'`) } - } + log('applying profile %s', name) + return profile.transform(config) + }) } -module.exports = function init (self) { - return callbackify.variadic(async (opts) => { - opts = opts || {} - - await createRepo(self, opts) - self.log('Created repo') +function createApi ({ + add, + apiManager, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + keychain, + object, + peerInfo, + pinManager, + preload, + print, + repo +}) { + const start = Commands.start({ + apiManager, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + keychain, + peerInfo, + pinManager, + preload, + print, + repo + }) - await self.preStart() - self.log('Done pre-start') + const api = { + add, + config: Commands.config({ repo }), + id: Commands.id({ peerInfo }), + init: () => { throw new AlreadyInitializedError() }, + object, + start, + swarm: { + addrs: () => { throw new NotStartedError() }, + connect: () => { throw new NotStartedError() }, + disconnect: () => { throw new NotStartedError() }, + localAddrs: Commands.swarm.localAddrs({ peerInfo }), + peers: () => { throw new NotStartedError() } + }, + version: Commands.version({ repo }) + } - self.state.initialized() - self.emit('init') - }) + return api } diff --git a/src/core/components/object.js b/src/core/components/object.js deleted file mode 100644 index 1f7e3f7cbe..0000000000 --- a/src/core/components/object.js +++ /dev/null @@ -1,302 +0,0 @@ -'use strict' - -const callbackify = require('callbackify') -const dagPB = require('ipld-dag-pb') -const DAGNode = dagPB.DAGNode -const DAGLink = dagPB.DAGLink -const CID = require('cids') -const mh = require('multihashes') -const multicodec = require('multicodec') -const Unixfs = require('ipfs-unixfs') -const errCode = require('err-code') - -function normalizeMultihash (multihash, enc) { - if (typeof multihash === 'string') { - if (enc === 'base58' || !enc) { - return multihash - } - - return Buffer.from(multihash, enc) - } else if (Buffer.isBuffer(multihash)) { - return multihash - } else if (CID.isCID(multihash)) { - return multihash.buffer - } else { - throw new Error('unsupported multihash') - } -} - -function parseBuffer (buf, encoding) { - switch (encoding) { - case 'json': - return parseJSONBuffer(buf) - case 'protobuf': - return parseProtoBuffer(buf) - default: - throw new Error(`unkown encoding: ${encoding}`) - } -} - -function parseJSONBuffer (buf) { - let data - let links - - try { - const parsed = JSON.parse(buf.toString()) - - links = (parsed.Links || []).map((link) => { - return new DAGLink( - link.Name || link.name, - link.Size || link.size, - mh.fromB58String(link.Hash || link.hash || link.multihash) - ) - }) - data = Buffer.from(parsed.Data) - } catch (err) { - throw new Error('failed to parse JSON: ' + err) - } - - return new DAGNode(data, links) -} - -function parseProtoBuffer (buf) { - return dagPB.util.deserialize(buf) -} - -function findLinks (node, links = []) { - for (const key in node) { - const val = node[key] - - if (key === '/' && Object.keys(node).length === 1) { - try { - links.push(new DAGLink('', 0, new CID(val))) - continue - } catch (_) { - // not a CID - } - } - - if (CID.isCID(val)) { - links.push(new DAGLink('', 0, val)) - - continue - } - - if (Array.isArray(val)) { - findLinks(val, links) - } - - if (typeof val === 'object' && !(val instanceof String)) { - findLinks(val, links) - } - } - - return links -} - -module.exports = function object (self) { - async function editAndSave (multihash, edit, options) { - options = options || {} - - const node = await self.object.get(multihash, options) - - // edit applies the edit func passed to - // editAndSave - const cid = await self._ipld.put(edit(node), multicodec.DAG_PB, { - cidVersion: 0, - hashAlg: multicodec.SHA2_256 - }) - - if (options.preload !== false) { - self._preload(cid) - } - - return cid - } - - return { - new: callbackify.variadic(async (template, options) => { - options = options || {} - - // allow options in the template position - if (template && typeof template !== 'string') { - options = template - template = null - } - - let data - - if (template) { - if (template === 'unixfs-dir') { - data = (new Unixfs('directory')).marshal() - } else { - throw new Error('unknown template') - } - } else { - data = Buffer.alloc(0) - } - - const node = new DAGNode(data) - - const cid = await self._ipld.put(node, multicodec.DAG_PB, { - cidVersion: 0, - hashAlg: multicodec.SHA2_256 - }) - - if (options.preload !== false) { - self._preload(cid) - } - - return cid - }), - put: callbackify.variadic(async (obj, options) => { - options = options || {} - - const encoding = options.enc - let node - - if (Buffer.isBuffer(obj)) { - if (encoding) { - node = await parseBuffer(obj, encoding) - } else { - node = new DAGNode(obj) - } - } else if (DAGNode.isDAGNode(obj)) { - // already a dag node - node = obj - } else if (typeof obj === 'object') { - node = new DAGNode(obj.Data, obj.Links) - } else { - throw new Error('obj not recognized') - } - - const release = await self._gcLock.readLock() - - try { - const cid = await self._ipld.put(node, multicodec.DAG_PB, { - cidVersion: 0, - hashAlg: multicodec.SHA2_256 - }) - - if (options.preload !== false) { - self._preload(cid) - } - - return cid - } finally { - release() - } - }), - - get: callbackify.variadic(async (multihash, options) => { // eslint-disable-line require-await - options = options || {} - - let mh, cid - - try { - mh = normalizeMultihash(multihash, options.enc) - } catch (err) { - throw errCode(err, 'ERR_INVALID_MULTIHASH') - } - - try { - cid = new CID(mh) - } catch (err) { - throw errCode(err, 'ERR_INVALID_CID') - } - - if (options.cidVersion === 1) { - cid = cid.toV1() - } - - if (options.preload !== false) { - self._preload(cid) - } - - return self._ipld.get(cid) - }), - - data: callbackify.variadic(async (multihash, options) => { - options = options || {} - - const node = await self.object.get(multihash, options) - - return node.Data - }), - - links: callbackify.variadic(async (multihash, options) => { - options = options || {} - - const cid = new CID(multihash) - const result = await self.dag.get(cid, options) - - if (cid.codec === 'raw') { - return [] - } - - if (cid.codec === 'dag-pb') { - return result.value.Links - } - - if (cid.codec === 'dag-cbor') { - return findLinks(result) - } - - throw new Error(`Cannot resolve links from codec ${cid.codec}`) - }), - - stat: callbackify.variadic(async (multihash, options) => { - options = options || {} - - const node = await self.object.get(multihash, options) - const serialized = dagPB.util.serialize(node) - const cid = await dagPB.util.cid(serialized, { - cidVersion: 0 - }) - - const blockSize = serialized.length - const linkLength = node.Links.reduce((a, l) => a + l.Tsize, 0) - - return { - Hash: cid.toBaseEncodedString(), - NumLinks: node.Links.length, - BlockSize: blockSize, - LinksSize: blockSize - node.Data.length, - DataSize: node.Data.length, - CumulativeSize: blockSize + linkLength - } - }), - - patch: { - addLink: callbackify.variadic(async (multihash, link, options) => { // eslint-disable-line require-await - return editAndSave(multihash, (node) => { - node.addLink(link) - - return node - }, options) - }), - - rmLink: callbackify.variadic(async (multihash, linkRef, options) => { // eslint-disable-line require-await - return editAndSave(multihash, (node) => { - node.rmLink(linkRef.Name || linkRef.name) - - return node - }, options) - }), - - appendData: callbackify.variadic(async (multihash, data, options) => { // eslint-disable-line require-await - return editAndSave(multihash, (node) => { - const newData = Buffer.concat([node.Data, data]) - - return new DAGNode(newData, node.Links) - }, options) - }), - - setData: callbackify.variadic(async (multihash, data, options) => { // eslint-disable-line require-await - return editAndSave(multihash, (node) => { - return new DAGNode(data, node.Links) - }, options) - }) - } - } -} diff --git a/src/core/components/object/data.js b/src/core/components/object/data.js new file mode 100644 index 0000000000..e7066f3d74 --- /dev/null +++ b/src/core/components/object/data.js @@ -0,0 +1,9 @@ +'use strict' + +module.exports = ({ ipld, preload }) => { + const get = require('./get')({ ipld, preload }) + return async function data (multihash, options) { + const node = await get(multihash, options) + return node.Data + } +} diff --git a/src/core/components/object/get.js b/src/core/components/object/get.js new file mode 100644 index 0000000000..394235cc6c --- /dev/null +++ b/src/core/components/object/get.js @@ -0,0 +1,48 @@ +'use strict' + +const CID = require('cids') +const errCode = require('err-code') + +function normalizeMultihash (multihash, enc) { + if (typeof multihash === 'string') { + if (enc === 'base58' || !enc) { + return multihash + } + return Buffer.from(multihash, enc) + } else if (Buffer.isBuffer(multihash)) { + return multihash + } else if (CID.isCID(multihash)) { + return multihash.buffer + } + throw new Error('unsupported multihash') +} + +module.exports = ({ ipld, preload }) => { + return async function get (multihash, options) { // eslint-disable-line require-await + options = options || {} + + let mh, cid + + try { + mh = normalizeMultihash(multihash, options.enc) + } catch (err) { + throw errCode(err, 'ERR_INVALID_MULTIHASH') + } + + try { + cid = new CID(mh) + } catch (err) { + throw errCode(err, 'ERR_INVALID_CID') + } + + if (options.cidVersion === 1) { + cid = cid.toV1() + } + + if (options.preload !== false) { + preload(cid) + } + + return ipld.get(cid) + } +} diff --git a/src/core/components/object/links.js b/src/core/components/object/links.js new file mode 100644 index 0000000000..8e6a58f177 --- /dev/null +++ b/src/core/components/object/links.js @@ -0,0 +1,58 @@ +'use strict' + +const dagPB = require('ipld-dag-pb') +const DAGLink = dagPB.DAGLink +const CID = require('cids') + +function findLinks (node, links = []) { + for (const key in node) { + const val = node[key] + + if (key === '/' && Object.keys(node).length === 1) { + try { + links.push(new DAGLink('', 0, new CID(val))) + continue + } catch (_) { + // not a CID + } + } + + if (CID.isCID(val)) { + links.push(new DAGLink('', 0, val)) + continue + } + + if (Array.isArray(val)) { + findLinks(val, links) + } + + if (val && typeof val === 'object') { + findLinks(val, links) + } + } + + return links +} + +module.exports = ({ dag }) => { + return async function links (multihash, options) { + options = options || {} + + const cid = new CID(multihash) + const result = await dag.get(cid, options) + + if (cid.codec === 'raw') { + return [] + } + + if (cid.codec === 'dag-pb') { + return result.value.Links + } + + if (cid.codec === 'dag-cbor') { + return findLinks(result) + } + + throw new Error(`Cannot resolve links from codec ${cid.codec}`) + } +} diff --git a/src/core/components/object/new.js b/src/core/components/object/new.js new file mode 100644 index 0000000000..4d6e6291b0 --- /dev/null +++ b/src/core/components/object/new.js @@ -0,0 +1,43 @@ +'use strict' + +const dagPB = require('ipld-dag-pb') +const DAGNode = dagPB.DAGNode +const multicodec = require('multicodec') +const Unixfs = require('ipfs-unixfs') + +module.exports = ({ ipld, preload }) => { + return async function _new (template, options) { + options = options || {} + + // allow options in the template position + if (template && typeof template !== 'string') { + options = template + template = null + } + + let data + + if (template) { + if (template === 'unixfs-dir') { + data = (new Unixfs('directory')).marshal() + } else { + throw new Error('unknown template') + } + } else { + data = Buffer.alloc(0) + } + + const node = new DAGNode(data) + + const cid = await ipld.put(node, multicodec.DAG_PB, { + cidVersion: 0, + hashAlg: multicodec.SHA2_256 + }) + + if (options.preload !== false) { + preload(cid) + } + + return cid + } +} diff --git a/src/core/components/object/patch/add-link.js b/src/core/components/object/patch/add-link.js new file mode 100644 index 0000000000..2cdd990749 --- /dev/null +++ b/src/core/components/object/patch/add-link.js @@ -0,0 +1,12 @@ +'use strict' + +module.exports = ({ ipld, gcLock, preload }) => { + const get = require('../get')({ ipld, preload }) + const put = require('../put')({ ipld, gcLock, preload }) + + return async function addLink (multihash, link, options) { + const node = await get(multihash, options) + node.addLink(link) + return put(node, options) + } +} diff --git a/src/core/components/object/patch/append-data.js b/src/core/components/object/patch/append-data.js new file mode 100644 index 0000000000..511d79feb3 --- /dev/null +++ b/src/core/components/object/patch/append-data.js @@ -0,0 +1,14 @@ +'use strict' + +const { DAGNode } = require('ipld-dag-pb') + +module.exports = ({ ipld, gcLock, preload }) => { + const get = require('../get')({ ipld, preload }) + const put = require('../put')({ ipld, gcLock, preload }) + + return async function appendData (multihash, data, options) { + const node = await get(multihash, options) + const newData = Buffer.concat([node.Data, data]) + return put(new DAGNode(newData, node.Links), options) + } +} diff --git a/src/core/components/object/patch/rm-link.js b/src/core/components/object/patch/rm-link.js new file mode 100644 index 0000000000..bd3033a06b --- /dev/null +++ b/src/core/components/object/patch/rm-link.js @@ -0,0 +1,12 @@ +'use strict' + +module.exports = ({ ipld, gcLock, preload }) => { + const get = require('../get')({ ipld, preload }) + const put = require('../put')({ ipld, gcLock, preload }) + + return async function rmLink (multihash, linkRef, options) { + const node = await get(multihash, options) + node.rmLink(linkRef.Name || linkRef.name) + return put(node, options) + } +} diff --git a/src/core/components/object/patch/set-data.js b/src/core/components/object/patch/set-data.js new file mode 100644 index 0000000000..7693a5b5ba --- /dev/null +++ b/src/core/components/object/patch/set-data.js @@ -0,0 +1,13 @@ +'use strict' + +const { DAGNode } = require('ipld-dag-pb') + +module.exports = ({ ipld, gcLock, preload }) => { + const get = require('../get')({ ipld, preload }) + const put = require('../put')({ ipld, gcLock, preload }) + + return async function setData (multihash, data, options) { + const node = await get(multihash, options) + return put(new DAGNode(data, node.Links), options) + } +} diff --git a/src/core/components/object/put.js b/src/core/components/object/put.js new file mode 100644 index 0000000000..2a8a195f53 --- /dev/null +++ b/src/core/components/object/put.js @@ -0,0 +1,85 @@ +'use strict' + +const dagPB = require('ipld-dag-pb') +const DAGNode = dagPB.DAGNode +const DAGLink = dagPB.DAGLink +const mh = require('multihashes') +const multicodec = require('multicodec') + +function parseBuffer (buf, encoding) { + switch (encoding) { + case 'json': + return parseJSONBuffer(buf) + case 'protobuf': + return parseProtoBuffer(buf) + default: + throw new Error(`unkown encoding: ${encoding}`) + } +} + +function parseJSONBuffer (buf) { + let data + let links + + try { + const parsed = JSON.parse(buf.toString()) + + links = (parsed.Links || []).map((link) => { + return new DAGLink( + link.Name || link.name, + link.Size || link.size, + mh.fromB58String(link.Hash || link.hash || link.multihash) + ) + }) + data = Buffer.from(parsed.Data) + } catch (err) { + throw new Error('failed to parse JSON: ' + err) + } + + return new DAGNode(data, links) +} + +function parseProtoBuffer (buf) { + return dagPB.util.deserialize(buf) +} + +module.exports = ({ ipld, gcLock, preload }) => { + return async function put (obj, options) { + options = options || {} + + const encoding = options.enc + let node + + if (Buffer.isBuffer(obj)) { + if (encoding) { + node = await parseBuffer(obj, encoding) + } else { + node = new DAGNode(obj) + } + } else if (DAGNode.isDAGNode(obj)) { + // already a dag node + node = obj + } else if (typeof obj === 'object') { + node = new DAGNode(obj.Data, obj.Links) + } else { + throw new Error('obj not recognized') + } + + const release = await gcLock.readLock() + + try { + const cid = await ipld.put(node, multicodec.DAG_PB, { + cidVersion: 0, + hashAlg: multicodec.SHA2_256 + }) + + if (options.preload !== false) { + preload(cid) + } + + return cid + } finally { + release() + } + } +} diff --git a/src/core/components/object/stat.js b/src/core/components/object/stat.js new file mode 100644 index 0000000000..ea2f06c72c --- /dev/null +++ b/src/core/components/object/stat.js @@ -0,0 +1,28 @@ +'use strict' + +const dagPB = require('ipld-dag-pb') + +module.exports = ({ ipld, preload }) => { + const get = require('./get')({ ipld, preload }) + return async function stat (multihash, options) { + options = options || {} + + const node = await get(multihash, options) + const serialized = dagPB.util.serialize(node) + const cid = await dagPB.util.cid(serialized, { + cidVersion: 0 + }) + + const blockSize = serialized.length + const linkLength = node.Links.reduce((a, l) => a + l.Tsize, 0) + + return { + Hash: cid.toBaseEncodedString(), + NumLinks: node.Links.length, + BlockSize: blockSize, + LinksSize: blockSize - node.Data.length, + DataSize: node.Data.length, + CumulativeSize: blockSize + linkLength + } + } +} diff --git a/src/core/components/pin.js b/src/core/components/pin.js index cbe0c8a250..176e5f5cc8 100644 --- a/src/core/components/pin.js +++ b/src/core/components/pin.js @@ -10,7 +10,7 @@ const PinTypes = PinManager.PinTypes module.exports = (self) => { const dag = self.dag - const pinManager = new PinManager(self._repo, dag) + const pinManager = self._pinManager || new PinManager(self._repo, dag) const pin = { add: callbackify.variadic(async (paths, options) => { diff --git a/src/core/components/ping-pull-stream.js b/src/core/components/ping-pull-stream.js deleted file mode 100644 index 838378bace..0000000000 --- a/src/core/components/ping-pull-stream.js +++ /dev/null @@ -1,100 +0,0 @@ -'use strict' - -const debug = require('debug') -const OFFLINE_ERROR = require('../utils').OFFLINE_ERROR -const PeerId = require('peer-id') -const pull = require('pull-stream') -const Pushable = require('pull-pushable') - -const log = debug('ipfs:pingPullStream') -log.error = debug('ipfs:pingPullStream:error') - -module.exports = function pingPullStream (self) { - return (peerId, opts) => { - if (!self.isOnline()) { - return pull.error(new Error(OFFLINE_ERROR)) - } - - opts = Object.assign({ count: 10 }, opts) - - const source = Pushable() - - getPeer(self.libp2p, source, peerId, (err, peer) => { - if (err) { - log.error(err) - source.end(err) - return - } - - runPing(self.libp2p, source, opts.count, peer, (err) => { - if (err) { - log.error(err) - source.push(getPacket({ success: false, text: err.toString() })) - source.end() - } - }) - }) - - return source - } -} - -function getPacket (msg) { - // Default msg - const basePacket = { success: true, time: 0, text: '' } - return Object.assign(basePacket, msg) -} - -function getPeer (libp2pNode, statusStream, peerIdStr, cb) { - let peerId - - try { - peerId = PeerId.createFromB58String(peerIdStr) - } catch (err) { - return cb(err) - } - - let peerInfo - - try { - peerInfo = libp2pNode.peerBook.get(peerId) - } catch (err) { - log('Peer not found in peer book, trying peer routing') - - // Share lookup status just as in the go implemmentation - statusStream.push(getPacket({ text: `Looking up peer ${peerIdStr}` })) - return libp2pNode.peerRouting.findPeer(peerId, cb) - } - - cb(null, peerInfo) -} - -function runPing (libp2pNode, statusStream, count, peer, cb) { - libp2pNode.ping(peer, (err, p) => { - if (err) { return cb(err) } - - let packetCount = 0 - let totalTime = 0 - statusStream.push(getPacket({ text: `PING ${peer.id.toB58String()}` })) - - p.on('ping', (time) => { - statusStream.push(getPacket({ time })) - totalTime += time - packetCount++ - if (packetCount >= count) { - const average = totalTime / count - p.stop() - statusStream.push(getPacket({ text: `Average latency: ${average}ms` })) - statusStream.end() - } - }) - - p.on('error', (err) => { - log.error(err) - p.stop() - cb(err) - }) - - p.start() - }) -} diff --git a/src/core/components/ping-readable-stream.js b/src/core/components/ping-readable-stream.js deleted file mode 100644 index b6809ffb48..0000000000 --- a/src/core/components/ping-readable-stream.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict' - -const toStream = require('pull-stream-to-stream') - -module.exports = function pingReadableStream (self) { - return (peerId, opts) => toStream.source(self.pingPullStream(peerId, opts)) -} diff --git a/src/core/components/ping.js b/src/core/components/ping.js index 5f0aa61be3..efc0e1bc33 100644 --- a/src/core/components/ping.js +++ b/src/core/components/ping.js @@ -1,18 +1,44 @@ 'use strict' -const promisify = require('promisify-es6') -const pull = require('pull-stream/pull') - -module.exports = function ping (self) { - return promisify((peerId, opts, callback) => { - if (typeof opts === 'function') { - callback = opts - opts = {} +const PeerId = require('peer-id') +const basePacket = { success: true, time: 0, text: '' } + +module.exports = ({ libp2p }) => { + return async function * (peerId, options) { + options = options || {} + options.count = options.count || 10 + + if (!PeerId.isPeerId(peerId)) { + peerId = PeerId.createFromCID(peerId) } - pull( - self.pingPullStream(peerId, opts), - pull.collect(callback) - ) - }) + let peerInfo + if (libp2p.peerStore.has(peerId)) { + peerInfo = libp2p.peerStore.get(peerId) + } else { + yield { ...basePacket, text: `Looking up peer ${peerId}` } + peerInfo = await libp2p.peerRouting.findPeer(peerId) + } + + yield { ...basePacket, text: `PING ${peerInfo.id.toB58String()}` } + + let packetCount = 0 + let totalTime = 0 + + for (let i = 0; i < options.count; i++) { + try { + const time = libp2p.ping(peerInfo) + totalTime += time + packetCount++ + yield { ...basePacket, time } + } catch (err) { + yield { ...basePacket, success: false, text: err.toString() } + } + } + + if (packetCount) { + const average = totalTime / packetCount + yield { ...basePacket, text: `Average latency: ${average}ms` } + } + } } diff --git a/src/core/components/pre-start.js b/src/core/components/pre-start.js deleted file mode 100644 index 639b94a61f..0000000000 --- a/src/core/components/pre-start.js +++ /dev/null @@ -1,75 +0,0 @@ -'use strict' - -const peerId = require('peer-id') -const PeerInfo = require('peer-info') -const multiaddr = require('multiaddr') -const Keychain = require('libp2p-keychain') -const mergeOptions = require('merge-options') -const NoKeychain = require('./no-keychain') -const callbackify = require('callbackify') -const promisify = require('promisify-es6') - -/* - * Load stuff from Repo into memory - */ -module.exports = function preStart (self) { - return callbackify(async () => { - self.log('pre-start') - - const pass = self._options.pass - let config = await self._repo.config.get() - - if (self._options.config) { - config = mergeOptions(config, self._options.config) - await self.config.replace(config) - } - - // Create keychain configuration, if needed. - if (!config.Keychain) { - config.Keychain = Keychain.generateOptions() - await self.config.set('Keychain', config.Keychain) - self.log('using default keychain options') - } - - // Construct the keychain - if (self._keychain) { - // most likely an init or upgrade has happened - } else if (pass) { - const keychainOptions = Object.assign({ passPhrase: pass }, config.Keychain) - self._keychain = new Keychain(self._repo.keys, keychainOptions) - self.log('keychain constructed') - } else { - self._keychain = new NoKeychain() - self.log('no keychain, use --pass') - } - - const privKey = config.Identity.PrivKey - const id = await promisify(peerId.createFromPrivKey)(privKey) - - // Import the private key as 'self', if needed. - if (pass) { - try { - await self._keychain.findKeyByName('self') - } catch (err) { - self.log('Creating "self" key') - await self._keychain.importPeer('self', id) - } - } - - self.log('peer created') - self._peerInfo = new PeerInfo(id) - if (config.Addresses && config.Addresses.Swarm) { - config.Addresses.Swarm.forEach((addr) => { - let ma = multiaddr(addr) - - if (ma.getPeerId()) { - ma = ma.encapsulate('/ipfs/' + self._peerInfo.id.toB58String()) - } - - self._peerInfo.multiaddrs.add(ma) - }) - } - - await self.pin.pinManager.load() - }) -} diff --git a/src/core/components/pubsub.js b/src/core/components/pubsub.js index 8c5916b906..0954304f46 100644 --- a/src/core/components/pubsub.js +++ b/src/core/components/pubsub.js @@ -1,90 +1,12 @@ 'use strict' -const callbackify = require('callbackify') -const OFFLINE_ERROR = require('../utils').OFFLINE_ERROR -const errcode = require('err-code') - -module.exports = function pubsub (self) { - function checkOnlineAndEnabled () { - if (!self.isOnline()) { - throw errcode(new Error(OFFLINE_ERROR), 'ERR_OFFLINE') - } - - if (!self.libp2p.pubsub) { - throw errcode(new Error('pubsub is not enabled'), 'ERR_PUBSUB_DISABLED') - } - } - +module.exports = ({ libp2p }) => { return { - subscribe: (topic, handler, options, callback) => { - if (typeof options === 'function') { - callback = options - options = {} - } - - if (typeof callback === 'function') { - try { - checkOnlineAndEnabled() - } catch (err) { - return callback(err) - } - - self.libp2p.pubsub.subscribe(topic, handler, options, callback) - return - } - - try { - checkOnlineAndEnabled() - } catch (err) { - return Promise.reject(err) - } - - return self.libp2p.pubsub.subscribe(topic, handler, options) - }, - - unsubscribe: (topic, handler, callback) => { - if (typeof callback === 'function') { - try { - checkOnlineAndEnabled() - } catch (err) { - return callback(err) - } - - self.libp2p.pubsub.unsubscribe(topic, handler, callback) - return - } - - try { - checkOnlineAndEnabled() - } catch (err) { - return Promise.reject(err) - } - - return self.libp2p.pubsub.unsubscribe(topic, handler) - }, - - publish: callbackify(async (topic, data) => { // eslint-disable-line require-await - checkOnlineAndEnabled() - - await self.libp2p.pubsub.publish(topic, data) - }), - - ls: callbackify(async () => { // eslint-disable-line require-await - checkOnlineAndEnabled() - - return self.libp2p.pubsub.ls() - }), - - peers: callbackify(async (topic) => { // eslint-disable-line require-await - checkOnlineAndEnabled() - - return self.libp2p.pubsub.peers(topic) - }), - - setMaxListeners (n) { - checkOnlineAndEnabled() - - self.libp2p.pubsub.setMaxListeners(n) - } + subscribe: (...args) => libp2p.pubsub.subscribe(...args), + unsubscribe: (...args) => libp2p.pubsub.unsubscribe(...args), + publish: (...args) => libp2p.pubsub.publish(...args), + ls: (...args) => libp2p.pubsub.getTopics(...args), + peers: (...args) => libp2p.pubsub.getSubscribers(...args), + setMaxListeners: (n) => libp2p.pubsub.setMaxListeners(n) } } diff --git a/src/core/components/start.js b/src/core/components/start.js index b3ea02bfa3..2465be61f7 100644 --- a/src/core/components/start.js +++ b/src/core/components/start.js @@ -1,51 +1,173 @@ 'use strict' const Bitswap = require('ipfs-bitswap') -const callbackify = require('callbackify') - +const PeerBook = require('peer-book') const IPNS = require('../ipns') const routingConfig = require('../ipns/routing/config') -const createLibp2pBundle = require('./libp2p') - -module.exports = (self) => { - return callbackify(async () => { - if (self.state.state() !== 'stopped') { - throw new Error(`Not able to start from state: ${self.state.state()}`) - } +const defer = require('p-defer') +const { AlreadyInitializedError, NotEnabledError } = require('../errors') +const Commands = require('./') - self.log('starting') - self.state.start() +module.exports = ({ + apiManager, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + keychain, + peerInfo, + pinManager, + preload, + print, + repo +}) => async function start () { + const startPromise = defer() + const { cancel } = apiManager.update({ start: () => startPromise.promise }) + try { // The repo may be closed if previously stopped - if (self._repo.closed) { - await self._repo.open() + if (repo.closed) { + await repo.open() } - const config = await self._repo.config.get() - const libp2p = createLibp2pBundle(self, config) + const config = await repo.config.get() + + const peerBook = new PeerBook() + const libp2p = Commands.legacy.libp2p({ + _options: constructorOptions, + _repo: repo, + _peerInfo: peerInfo, + _peerInfoBook: peerBook, + _print: print + }, config) await libp2p.start() - self.libp2p = libp2p - const ipnsRouting = routingConfig(self) - self._ipns = new IPNS(ipnsRouting, self._repo.datastore, self._peerInfo, self._keychain, self._options) + const ipnsRouting = routingConfig({ + _options: constructorOptions, + libp2p, + _repo: repo, + _peerInfo: peerInfo + }) + const ipns = new IPNS(ipnsRouting, repo.datastore, peerInfo, keychain, { pass: initOptions.pass }) + const bitswap = new Bitswap(libp2p, repo.blocks, { statsEnabled: true }) + + await bitswap.start() - self._bitswap = new Bitswap( - self.libp2p, - self._repo.blocks, { - statsEnabled: true - } - ) + blockService.setExchange(bitswap) - await self._bitswap.start() + await preload.start() + await ipns.republisher.start() + // TODO: start mfs preload here - self._blockService.setExchange(self._bitswap) + const api = createApi({ + apiManager, + bitswap, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + ipns, + keychain, + libp2p, + peerInfo, + pinManager, + preload, + print, + repo + }) - await self._preload.start() - await self._ipns.republisher.start() - await self._mfsPreload.start() + apiManager.update(api, () => undefined) + } catch (err) { + cancel() + startPromise.reject(err) + throw err + } - self.state.started() - self.emit('start') + startPromise.resolve(apiManager.api) + return apiManager.api +} + +function createApi ({ + apiManager, + bitswap, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + ipns, + keychain, + libp2p, + peerInfo, + pinManager, + preload, + print, + repo +}) { + const dag = Commands.legacy.dag({ _ipld: ipld, _preload: preload }) + const object = { + data: Commands.object.data({ ipld, preload }), + get: Commands.object.get({ ipld, preload }), + links: Commands.object.links({ dag }), + new: Commands.object.new({ ipld, preload }), + patch: { + addLink: Commands.object.patch.addLink({ ipld, gcLock, preload }), + appendData: Commands.object.patch.appendData({ ipld, gcLock, preload }), + rmLink: Commands.object.patch.rmLink({ ipld, gcLock, preload }), + setData: Commands.object.patch.setData({ ipld, gcLock, preload }) + }, + put: Commands.object.put({ ipld, gcLock, preload }), + stat: Commands.object.stat({ ipld, preload }) + } + const pin = Commands.legacy.pin({ _ipld: ipld, _preload: preload, object, _repo: repo, _pinManager: pinManager }) + const add = Commands.add({ ipld, dag, preload, pin, gcLock, constructorOptions }) + + const stop = Commands.stop({ + apiManager, + bitswap, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + ipns, + keychain, + libp2p, + object, + peerInfo, + preload, + print, + repo }) + + const api = { + add, + bitswap: { + stat: Commands.bitswap.stat({ bitswap }), + unwant: Commands.bitswap.unwant({ bitswap }), + wantlist: Commands.bitswap.wantlist({ bitswap }) + }, + config: Commands.config({ repo }), + id: Commands.id({ peerInfo }), + init: () => { throw new AlreadyInitializedError() }, + ping: Commands.ping({ libp2p }), + pubsub: libp2p.pubsub + ? Commands.pubsub({ libp2p }) + : () => { throw new NotEnabledError('pubsub not enabled') }, + start: () => apiManager.api, + stop, + swarm: { + addrs: () => Commands.swarm.addrs({ libp2p }), + connect: () => Commands.swarm.connect({ libp2p }), + disconnect: () => Commands.swarm.disconnect({ libp2p }), + localAddrs: Commands.swarm.localAddrs({ peerInfo }), + peers: () => Commands.swarm.peers({ libp2p }) + }, + version: Commands.version({ repo }) + } + + return api } diff --git a/src/core/components/stop.js b/src/core/components/stop.js index 1ee7bb9518..3bd422f297 100644 --- a/src/core/components/stop.js +++ b/src/core/components/stop.js @@ -1,40 +1,131 @@ 'use strict' -const callbackify = require('callbackify') - -module.exports = (self) => { - return callbackify(async () => { - self.log('stop') - - if (self.state.state() === 'stopped') { - throw new Error('Already stopped') - } - - if (self.state.state() !== 'running') { - throw new Error('Not able to stop from state: ' + self.state.state()) - } - - self.state.stop() - self._blockService.unsetExchange() - self._bitswap.stop() - self._preload.stop() - - const libp2p = self.libp2p - self.libp2p = null - - try { - await Promise.all([ - self._ipns.republisher.stop(), - self._mfsPreload.stop(), - libp2p.stop(), - self._repo.close() - ]) - - self.state.stopped() - self.emit('stop') - } catch (err) { - self.emit('error', err) - throw err - } +const defer = require('p-defer') +const { NotStartedError, AlreadyInitializedError } = require('../errors') +const Commands = require('./') + +module.exports = ({ + apiManager, + constructorOptions, + bitswap, + blockService, + gcLock, + initOptions, + ipld, + ipns, + keychain, + libp2p, + peerInfo, + pinManager, + preload, + print, + repo +}) => async function stop () { + const stopPromise = defer() + const { cancel } = apiManager.update({ stop: () => stopPromise.promise }) + + try { + blockService.unsetExchange() + bitswap.stop() + preload.stop() + + await Promise.all([ + ipns.republisher.stop(), + // mfsPreload.stop(), + libp2p.stop(), + repo.close() + ]) + + const api = createApi({ + apiManager, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + keychain, + peerInfo, + pinManager, + preload, + print, + repo + }) + + apiManager.update(api, () => { throw new NotStartedError() }) + } catch (err) { + cancel() + stopPromise.reject(err) + throw err + } + + stopPromise.resolve(apiManager.api) + return apiManager.api +} + +function createApi ({ + apiManager, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + keychain, + peerInfo, + pinManager, + preload, + print, + repo +}) { + const dag = Commands.legacy.dag({ _ipld: ipld, _preload: preload }) + const object = { + data: Commands.object.data({ ipld, preload }), + get: Commands.object.get({ ipld, preload }), + links: Commands.object.links({ dag }), + new: Commands.object.new({ ipld, preload }), + patch: { + addLink: Commands.object.patch.addLink({ ipld, gcLock, preload }), + appendData: Commands.object.patch.appendData({ ipld, gcLock, preload }), + rmLink: Commands.object.patch.rmLink({ ipld, gcLock, preload }), + setData: Commands.object.patch.setData({ ipld, gcLock, preload }) + }, + put: Commands.object.put({ ipld, gcLock, preload }), + stat: Commands.object.stat({ ipld, preload }) + } + const pin = Commands.legacy.pin({ _ipld: ipld, _preload: preload, object, _repo: repo, _pinManager: pinManager }) + const add = Commands.add({ ipld, dag, preload, pin, gcLock, constructorOptions }) + + const start = Commands.start({ + apiManager, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + keychain, + object, + peerInfo, + pinManager, + preload, + print, + repo }) + + const api = { + add, + config: Commands.config({ repo }), + id: Commands.id({ peerInfo }), + init: () => { throw new AlreadyInitializedError() }, + start, + stop: () => apiManager.api, + swarm: { + addrs: () => { throw new NotStartedError() }, + connect: () => { throw new NotStartedError() }, + disconnect: () => { throw new NotStartedError() }, + localAddrs: Commands.swarm.localAddrs({ peerInfo }), + peers: () => { throw new NotStartedError() } + }, + version: Commands.version({ repo }) + } + + return api } diff --git a/src/core/components/swarm.js b/src/core/components/swarm.js deleted file mode 100644 index 45d1b8ebe5..0000000000 --- a/src/core/components/swarm.js +++ /dev/null @@ -1,79 +0,0 @@ -'use strict' - -const callbackify = require('callbackify') -const OFFLINE_ERROR = require('../utils').OFFLINE_ERROR - -module.exports = function swarm (self) { - return { - peers: callbackify.variadic(async (opts) => { // eslint-disable-line require-await - opts = opts || {} - - if (!self.isOnline()) { - throw new Error(OFFLINE_ERROR) - } - - const verbose = opts.v || opts.verbose - // TODO: return latency and streams when verbose is set - // we currently don't have this information - - const peers = [] - - Object.values(self._peerInfoBook.getAll()).forEach((peer) => { - const connectedAddr = peer.isConnected() - - if (!connectedAddr) { return } - - const tupple = { - addr: connectedAddr, - peer: peer.id - } - if (verbose) { - tupple.latency = 'n/a' - } - - peers.push(tupple) - }) - - return peers - }), - - // all the addrs we know - addrs: callbackify(async () => { // eslint-disable-line require-await - if (!self.isOnline()) { - throw new Error(OFFLINE_ERROR) - } - - const peers = Object.values(self._peerInfoBook.getAll()) - - return peers - }), - - localAddrs: callbackify(async () => { // eslint-disable-line require-await - if (!self.isOnline()) { - throw new Error(OFFLINE_ERROR) - } - - return self.libp2p.peerInfo.multiaddrs.toArray() - }), - - connect: callbackify(async (maddr) => { // eslint-disable-line require-await - if (!self.isOnline()) { - throw new Error(OFFLINE_ERROR) - } - - return self.libp2p.dial(maddr) - }), - - disconnect: callbackify(async (maddr) => { // eslint-disable-line require-await - if (!self.isOnline()) { - throw new Error(OFFLINE_ERROR) - } - - return self.libp2p.hangUp(maddr) - }), - - filters: callbackify(async () => { // eslint-disable-line require-await - throw new Error('Not implemented') - }) - } -} diff --git a/src/core/components/swarm/addrs.js b/src/core/components/swarm/addrs.js new file mode 100644 index 0000000000..ee095be07a --- /dev/null +++ b/src/core/components/swarm/addrs.js @@ -0,0 +1,13 @@ +'use strict' + +const CID = require('cids') + +module.exports = ({ libp2p }) => { + return async function addrs () { // eslint-disable-line require-await + const peers = [] + for (const [peerId, peerInfo] of libp2p.peerStore.entries()) { + peers.push({ id: new CID(peerId), addrs: peerInfo.multiaddrs.toArray() }) + } + return peers + } +} diff --git a/src/core/components/swarm/connect.js b/src/core/components/swarm/connect.js new file mode 100644 index 0000000000..98f7217f71 --- /dev/null +++ b/src/core/components/swarm/connect.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports = ({ libp2p }) => { + return function connect (addr) { + return libp2p.dial(addr) + } +} diff --git a/src/core/components/swarm/disconnect.js b/src/core/components/swarm/disconnect.js new file mode 100644 index 0000000000..3e9aadae52 --- /dev/null +++ b/src/core/components/swarm/disconnect.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports = ({ libp2p }) => { + return function disconnect (addr) { + return libp2p.hangUp(addr) + } +} diff --git a/src/core/components/swarm/local-addrs.js b/src/core/components/swarm/local-addrs.js new file mode 100644 index 0000000000..bc2ee7df71 --- /dev/null +++ b/src/core/components/swarm/local-addrs.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports = ({ peerInfo }) => { + return async function localAddrs () { // eslint-disable-line require-await + return peerInfo.multiaddrs.toArray() + } +} diff --git a/src/core/components/swarm/peers.js b/src/core/components/swarm/peers.js new file mode 100644 index 0000000000..3fbc45c9c8 --- /dev/null +++ b/src/core/components/swarm/peers.js @@ -0,0 +1,34 @@ +'use strict' + +const CID = require('cids') + +module.exports = ({ libp2p }) => { + return async function peers (options) { // eslint-disable-line require-await + options = options || {} + + const verbose = options.v || options.verbose + const peers = [] + + for (const [peerId, connections] of libp2p.connections) { + for (const connection of connections) { + const tupple = { + addr: connection.remoteAddr, + peer: new CID(peerId) + } + + if (verbose || options.direction) { + tupple.direction = connection.stat.direction + } + + if (verbose) { + tupple.muxer = connection.stat.multiplexer + tupple.latency = 'n/a' + } + + peers.push(tupple) + } + } + + return peers + } +} diff --git a/src/core/components/version.js b/src/core/components/version.js index cc850c465d..7b9d963b14 100644 --- a/src/core/components/version.js +++ b/src/core/components/version.js @@ -1,17 +1,16 @@ 'use strict' const pkg = require('../../../package.json') -const callbackify = require('callbackify') // TODO add the commit hash of the current ipfs version to the response. -module.exports = function version (self) { - return callbackify(async () => { - const repoVersion = await self.repo.version() +module.exports = ({ repo }) => { + return async function version () { + const repoVersion = await repo.version() return { version: pkg.version, repo: repoVersion, commit: '' } - }) + } } diff --git a/src/core/config.js b/src/core/config.js deleted file mode 100644 index 6f2353efb1..0000000000 --- a/src/core/config.js +++ /dev/null @@ -1,101 +0,0 @@ -'use strict' - -const Multiaddr = require('multiaddr') -const mafmt = require('mafmt') -const { struct, superstruct } = require('superstruct') -const { isTest } = require('ipfs-utils/src/env') - -const { optional, union } = struct -const s = superstruct({ - types: { - multiaddr: v => { - if (v === null) { - return `multiaddr invalid, value must be a string, Buffer, or another Multiaddr got ${v}` - } - - try { - Multiaddr(v) - } catch (err) { - return `multiaddr invalid, ${err.message}` - } - - return true - }, - 'multiaddr-ipfs': v => mafmt.IPFS.matches(v) ? true : 'multiaddr IPFS invalid' - } -}) - -const configSchema = s({ - repo: optional(s('object|string')), - repoOwner: 'boolean?', - repoAutoMigrate: 'boolean?', - preload: s({ - enabled: 'boolean?', - addresses: optional(s(['multiaddr'])), - interval: 'number?' - }, { enabled: !isTest, interval: 30 * 1000 }), - init: optional(union(['boolean', s({ - bits: 'number?', - emptyRepo: 'boolean?', - privateKey: optional(s('object|string')), // object should be a custom type for PeerId using 'kind-of' - pass: 'string?', - profiles: 'array?' - })])), - start: 'boolean?', - offline: 'boolean?', - pass: 'string?', - silent: 'boolean?', - relay: 'object?', // relay validates in libp2p - EXPERIMENTAL: optional(s({ - pubsub: 'boolean?', - ipnsPubsub: 'boolean?', - sharding: 'boolean?', - dht: 'boolean?' - })), - connectionManager: 'object?', - config: optional(s({ - API: 'object?', - Addresses: optional(s({ - Delegates: optional(s(['multiaddr'])), - Swarm: optional(s(['multiaddr'])), - API: optional(union([s('multiaddr'), s(['multiaddr'])])), - Gateway: optional(union([s('multiaddr'), s(['multiaddr'])])) - })), - Discovery: optional(s({ - MDNS: optional(s({ - Enabled: 'boolean?', - Interval: 'number?' - })), - webRTCStar: optional(s({ - Enabled: 'boolean?' - })) - })), - Bootstrap: optional(s(['multiaddr-ipfs'])), - Pubsub: optional(s({ - Router: 'string?', - Enabled: 'boolean?' - })), - Swarm: optional(s({ - ConnMgr: optional(s({ - LowWater: 'number?', - HighWater: 'number?' - })) - })) - })), - ipld: 'object?', - libp2p: optional(union(['function', 'object'])) // libp2p validates this -}, { - repoOwner: true -}) - -const validate = (opts) => { - const [err, options] = configSchema.validate(opts) - - if (err) { - throw err - } - - return options -} - -module.exports = { validate } diff --git a/src/core/errors.js b/src/core/errors.js new file mode 100644 index 0000000000..c7cf7ac938 --- /dev/null +++ b/src/core/errors.js @@ -0,0 +1,54 @@ +class NotInitializedError extends Error { + constructor (message = 'not initialized') { + super(message) + this.name = 'NotInitializedError' + this.code = NotInitializedError.code + } +} + +NotInitializedError.code = 'ERR_NOT_INITIALIZED' +exports.NotInitializedError = NotInitializedError + +class AlreadyInitializingError extends Error { + constructor (message = 'cannot initialize an initializing node') { + super(message) + this.name = 'AlreadyInitializingError' + this.code = AlreadyInitializedError.code + } +} + +AlreadyInitializingError.code = 'ERR_ALREADY_INITIALIZING' +exports.AlreadyInitializingError = AlreadyInitializingError + +class AlreadyInitializedError extends Error { + constructor (message = 'cannot re-initialize an initialized node') { + super(message) + this.name = 'AlreadyInitializedError' + this.code = AlreadyInitializedError.code + } +} + +AlreadyInitializedError.code = 'ERR_ALREADY_INITIALIZED' +exports.AlreadyInitializedError = AlreadyInitializedError + +class NotStartedError extends Error { + constructor (message = 'not started') { + super(message) + this.name = 'NotStartedError' + this.code = NotStartedError.code + } +} + +NotStartedError.code = 'ERR_NOT_STARTED' +exports.NotStartedError = NotStartedError + +class NotEnabledError extends Error { + constructor (message = 'not enabled') { + super(message) + this.name = 'NotEnabledError' + this.code = NotEnabledError.code + } +} + +NotEnabledError.code = 'ERR_NOT_ENABLED' +exports.NotEnabledError = NotEnabledError diff --git a/src/core/index.js b/src/core/index.js index a5ad33edf9..878e1c4957 100644 --- a/src/core/index.js +++ b/src/core/index.js @@ -1,181 +1,74 @@ 'use strict' -const BlockService = require('ipfs-block-service') -const Ipld = require('ipld') +const log = require('debug')('ipfs') +const mergeOptions = require('merge-options') +const { isTest } = require('ipfs-utils/src/env') +const globSource = require('ipfs-utils/src/files/glob-source') +const urlSource = require('ipfs-utils/src/files/url-source') +const { Buffer } = require('buffer') const PeerId = require('peer-id') const PeerInfo = require('peer-info') const crypto = require('libp2p-crypto') const isIPFS = require('is-ipfs') const multiaddr = require('multiaddr') const multihash = require('multihashes') -const PeerBook = require('peer-book') const multibase = require('multibase') const multicodec = require('multicodec') const multihashing = require('multihashing-async') const CID = require('cids') -const debug = require('debug') -const mergeOptions = require('merge-options') -const EventEmitter = require('events') - -const config = require('./config') -const boot = require('./boot') -const components = require('./components') -const GCLock = require('./components/pin/gc-lock') - -// replaced by repo-browser when running in the browser -const defaultRepo = require('./runtime/repo-nodejs') -const preload = require('./preload') -const mfsPreload = require('./mfs-preload') -const ipldOptions = require('./runtime/ipld-nodejs') -const { isTest } = require('ipfs-utils/src/env') - -/** - * @typedef { import("./ipns/index") } IPNS - */ - -/** - * - * - * @class IPFS - * @extends {EventEmitter} - */ -class IPFS extends EventEmitter { - constructor (options) { - super() - - const defaults = { - init: true, - start: true, - EXPERIMENTAL: {}, - preload: { - enabled: !isTest, // preload by default, unless in test env - addresses: [ - '/dnsaddr/node0.preload.ipfs.io/https', - '/dnsaddr/node1.preload.ipfs.io/https' - ] - } - } - - options = config.validate(options || {}) - - this._options = mergeOptions(defaults, options) - - if (options.init === false) { - this._options.init = false - } - - if (!(options.start === false)) { - this._options.start = true - } - - if (typeof options.repo === 'string' || - options.repo === undefined) { - this._repo = defaultRepo(options) - } else { - this._repo = options.repo - } - - // IPFS utils - this.log = debug('ipfs') - this.log.err = debug('ipfs:err') - - // IPFS Core Internals - // this._repo - assigned above - this._peerInfoBook = new PeerBook() - this._peerInfo = undefined - this._bitswap = undefined - this._blockService = new BlockService(this._repo) - this._ipld = new Ipld(ipldOptions(this._blockService, this._options.ipld, this.log)) - this._preload = preload(this) - this._mfsPreload = mfsPreload(this) - /** @type {IPNS} */ - this._ipns = undefined - // eslint-disable-next-line no-console - this._print = this._options.silent ? this.log : console.log - this._gcLock = new GCLock(this._options.repoOwner, { - // Make sure GCLock is specific to repo, for tests where there are - // multiple instances of IPFS - morticeId: this._repo.path - }) - - // IPFS Core exposed components - // - for booting up a node - this.init = components.init(this) - this.preStart = components.preStart(this) - this.start = components.start(this) - this.stop = components.stop(this) - this.shutdown = this.stop - this.isOnline = components.isOnline(this) - // - interface-ipfs-core defined API - Object.assign(this, components.filesRegular(this)) - this.version = components.version(this) - this.id = components.id(this) - this.repo = components.repo(this) - this.bootstrap = components.bootstrap(this) - this.config = components.config(this) - this.block = components.block(this) - this.object = components.object(this) - this.dag = components.dag(this) - this.files = components.filesMFS(this) - this.libp2p = null // assigned on start - this.swarm = components.swarm(this) - this.name = components.name(this) - this.bitswap = components.bitswap(this) - this.pin = components.pin(this) - this.ping = components.ping(this) - this.pingPullStream = components.pingPullStream(this) - this.pingReadableStream = components.pingReadableStream(this) - this.pubsub = components.pubsub(this) - this.dht = components.dht(this) - this.dns = components.dns(this) - this.key = components.key(this) - this.stats = components.stats(this) - this.resolve = components.resolve(this) - - if (this._options.EXPERIMENTAL.ipnsPubsub) { - this.log('EXPERIMENTAL IPNS pubsub is enabled') - } - if (this._options.EXPERIMENTAL.sharding) { - this.log('EXPERIMENTAL sharding is enabled') - } - - this.state = require('./state')(this) - - const onReady = () => { - this.removeListener('error', onError) - this._ready = true - } +const { NotInitializedError } = require('./errors') +const createInitApi = require('./components/init') +const ApiManager = require('./api-manager') + +const getDefaultOptions = () => ({ + init: true, + start: true, + EXPERIMENTAL: {}, + preload: { + enabled: !isTest, // preload by default, unless in test env + addresses: [ + '/dns4/node0.preload.ipfs.io/https', + '/dns4/node1.preload.ipfs.io/https' + ] + } +}) - const onError = err => { - this.removeListener('ready', onReady) - this._readyError = err - } +async function create (options) { + options = mergeOptions(getDefaultOptions(), options) - this.once('ready', onReady).once('error', onError) + // eslint-disable-next-line no-console + const print = options.silent ? log : console.log - boot(this) - } + const apiManager = new ApiManager() + const init = createInitApi({ apiManager, print, constructorOptions: options }) + const { api } = apiManager.update({ init }, () => { throw new NotInitializedError() }) - get ready () { - return new Promise((resolve, reject) => { - if (this._ready) return resolve(this) - if (this._readyError) return reject(this._readyError) - this.once('ready', () => resolve(this)) - this.once('error', reject) - }) + if (!options.init) { + return api } -} -module.exports = IPFS + await api.init() -// Note: We need to do this to force browserify to load the Buffer module -const BufferImpl = Buffer -Object.assign(module.exports, { crypto, isIPFS, Buffer: BufferImpl, CID, multiaddr, multibase, multihash, multihashing, multicodec, PeerId, PeerInfo }) + if (!options.start) { + return api + } -module.exports.createNode = (options) => { - return new IPFS(options) + return api.start() } -module.exports.create = (options) => { - return new IPFS(options).ready +module.exports = { + create, + crypto, + isIPFS, + Buffer, + CID, + multiaddr, + multibase, + multihash, + multihashing, + multicodec, + PeerId, + PeerInfo, + globSource, + urlSource } diff --git a/src/core/preload.js b/src/core/preload.js index 5427a2ecd0..053103c648 100644 --- a/src/core/preload.js +++ b/src/core/preload.js @@ -10,8 +10,8 @@ const preload = require('./runtime/preload-nodejs') const log = debug('ipfs:preload') log.error = debug('ipfs:preload:error') -module.exports = self => { - const options = self._options.preload || {} +module.exports = options => { + options = options || {} options.enabled = Boolean(options.enabled) options.addresses = options.addresses || [] diff --git a/src/core/runtime/init-assets-browser.js b/src/core/runtime/init-assets-browser.js new file mode 100644 index 0000000000..0c0c42d5b5 --- /dev/null +++ b/src/core/runtime/init-assets-browser.js @@ -0,0 +1 @@ +module.exports = () => {} diff --git a/src/core/runtime/init-assets-nodejs.js b/src/core/runtime/init-assets-nodejs.js new file mode 100644 index 0000000000..81c0a34832 --- /dev/null +++ b/src/core/runtime/init-assets-nodejs.js @@ -0,0 +1,15 @@ +'use strict' + +const path = require('path') +const globSource = require('ipfs-utils/src/files/glob-source') +const all = require('async-iterator-all') + +// Add the default assets to the repo. +module.exports = async function initAssets ({ add, print }) { + const initDocsPath = path.join(__dirname, '..', '..', 'init-files', 'init-docs') + const results = await all(add(globSource(initDocsPath, { recursive: true }), { preload: false })) + const dir = results.filter(file => file.path === 'init-docs').pop() + + print('to get started, enter:\n') + print(`\tjsipfs cat /ipfs/${dir.cid}/readme\n`) +} diff --git a/src/core/runtime/repo-browser.js b/src/core/runtime/repo-browser.js index 8bd0f330e2..de4c9f59bf 100644 --- a/src/core/runtime/repo-browser.js +++ b/src/core/runtime/repo-browser.js @@ -3,6 +3,7 @@ const IPFSRepo = require('ipfs-repo') module.exports = (options) => { - const repoPath = options.repo || 'ipfs' - return new IPFSRepo(repoPath, { autoMigrate: options.repoAutoMigrate }) + options = options || {} + const repoPath = options.path || 'ipfs' + return new IPFSRepo(repoPath, { autoMigrate: options.autoMigrate }) } diff --git a/src/core/runtime/repo-nodejs.js b/src/core/runtime/repo-nodejs.js index 431d59b377..d8581b7e32 100644 --- a/src/core/runtime/repo-nodejs.js +++ b/src/core/runtime/repo-nodejs.js @@ -4,8 +4,8 @@ const os = require('os') const IPFSRepo = require('ipfs-repo') const path = require('path') -module.exports = (options) => { - const repoPath = options.repo || path.join(os.homedir(), '.jsipfs') - - return new IPFSRepo(repoPath, { autoMigrate: options.repoAutoMigrate }) +module.exports = options => { + options = options || {} + const repoPath = options.path || path.join(os.homedir(), '.jsipfs') + return new IPFSRepo(repoPath, { autoMigrate: options.autoMigrate }) } diff --git a/src/http/api/resources/files-regular.js b/src/http/api/resources/files-regular.js index bc963ce3d1..1f949fb7a0 100644 --- a/src/http/api/resources/files-regular.js +++ b/src/http/api/resources/files-regular.js @@ -208,7 +208,7 @@ exports.add = { } }, function (source) { - return ipfs._addAsyncIterator(source, { + return ipfs.add(source, { cidVersion: request.query['cid-version'], rawLeaves: request.query['raw-leaves'], progress: request.query.progress ? progressHandler : null, @@ -225,7 +225,7 @@ exports.add = { for await (const file of source) { output.write(JSON.stringify({ Name: file.path, - Hash: cidToString(file.hash, { base: request.query['cid-base'] }), + Hash: cidToString(file.cid, { base: request.query['cid-base'] }), Size: file.size }) + '\n') } diff --git a/test/cli/files.js b/test/cli/files.js index 4858cc0cb0..ec7e0263a6 100644 --- a/test/cli/files.js +++ b/test/cli/files.js @@ -26,7 +26,7 @@ const HASH_ALGS = [ 'keccak-512' ] -describe('files', () => runOnAndOff((thing) => { +describe.only('files', () => runOnAndOff((thing) => { let ipfs const readme = fs.readFileSync(path.join(process.cwd(), '/src/init-files/init-docs/readme')) .toString('utf-8') diff --git a/test/core/config.spec.js b/test/core/config.spec.js deleted file mode 100644 index ee1fa4a00e..0000000000 --- a/test/core/config.spec.js +++ /dev/null @@ -1,223 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const { expect } = require('interface-ipfs-core/src/utils/mocha') -const config = require('../../src/core/config') - -describe('config', () => { - it('should allow empty config', () => { - const cfg = {} - expect(() => config.validate(cfg)).to.not.throw() - }) - - it('should allow undefined config', () => { - const cfg = undefined - expect(() => config.validate(cfg)).to.not.throw() - }) - - it('should validate valid repo', () => { - const cfgs = [ - { repo: { unknown: 'value' } }, - { repo: '/path/to-repo' }, - { repo: undefined } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.not.throw()) - }) - - it('should validate invalid repo', () => { - const cfgs = [ - { repo: 138 } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.throw()) - }) - - it('should validate valid init', () => { - const cfgs = [ - { init: { bits: 138 } }, - { init: true }, - { init: false }, - { init: undefined } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.not.throw()) - }) - - it('should validate invalid init', () => { - const cfgs = [ - { init: 138 }, - { init: { bits: 'not an int' } } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.throw()) - }) - - it('should validate valid start', () => { - const cfgs = [ - { start: true }, - { start: false }, - { start: undefined } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.not.throw()) - }) - - it('should validate invalid start', () => { - const cfgs = [ - { start: 138 }, - { start: 'make it so number 1' }, - { start: null } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.throw()) - }) - - it('should validate valid pass', () => { - const cfgs = [ - { pass: 'correctbatteryhorsestaple' }, - { pass: '' }, - { pass: undefined } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.not.throw()) - }) - - it('should validate invalid pass', () => { - const cfgs = [ - { pass: 138 }, - { pass: null } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.throw()) - }) - - it('should validate valid EXPERIMENTAL', () => { - const cfgs = [ - { EXPERIMENTAL: { dht: true, sharding: true } }, - { EXPERIMENTAL: { dht: false, sharding: false } }, - { EXPERIMENTAL: undefined } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.not.throw()) - }) - - it('should validate invalid EXPERIMENTAL', () => { - const cfgs = [ - { EXPERIMENTAL: { dht: 138 } }, - { EXPERIMENTAL: { sharding: 138 } } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.throw()) - }) - - it('should validate valid config', () => { - const cfgs = [ - { config: { Addresses: { Swarm: ['/ip4/0.0.0.0/tcp/4002'] } } }, - { config: { Addresses: { Swarm: [] } } }, - { config: { Addresses: { Swarm: undefined } } }, - - { config: { Addresses: { API: '/ip4/127.0.0.1/tcp/5002' } } }, - { config: { Addresses: { API: ['/ip4/127.0.0.1/tcp/5002', '/ip4/127.0.0.1/tcp/5003'] } } }, - { config: { Addresses: { API: undefined } } }, - - { config: { Addresses: { Gateway: '/ip4/127.0.0.1/tcp/9090' } } }, - { config: { Addresses: { Gateway: ['/ip4/127.0.0.1/tcp/9090', '/ip4/127.0.0.1/tcp/9091'] } } }, - { config: { Addresses: { Gateway: undefined } } }, - - { config: { Addresses: { Delegates: ['/dns4/node0.preload.ipfs.io/tcp/443/https'] } } }, - { config: { Addresses: { Delegates: [] } } }, - { config: { Addresses: { Delegates: undefined } } }, - - { config: { Addresses: undefined } }, - - { config: { Discovery: { MDNS: { Enabled: true } } } }, - { config: { Discovery: { MDNS: { Enabled: false } } } }, - { config: { Discovery: { MDNS: { Interval: 138 } } } }, - { config: { Discovery: { MDNS: undefined } } }, - - { config: { Discovery: { webRTCStar: { Enabled: true } } } }, - { config: { Discovery: { webRTCStar: { Enabled: false } } } }, - { config: { Discovery: { webRTCStar: undefined } } }, - - { config: { Discovery: undefined } }, - - { config: { Bootstrap: ['/ip4/104.236.176.52/tcp/4001/ipfs/QmSoLnSGccFuZQJzRadHn95W2CrSFmZuTdDWP8HXaHca9z'] } }, - { config: { Bootstrap: [] } }, - - { config: { Swarm: { ConnMgr: { LowWater: 200, HighWater: 500 } } } }, - { config: { Swarm: { ConnMgr: { LowWater: undefined, HighWater: undefined } } } }, - { config: { Swarm: { ConnMgr: undefined } } }, - { config: { Swarm: undefined } }, - - { config: { Pubsub: { Enabled: true, Router: 'gossipsub' } } }, - { config: { Pubsub: { Enabled: false } } }, - - { config: undefined } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.not.throw()) - }) - - it('should validate invalid config', () => { - const cfgs = [ - { config: { Addresses: { Swarm: 138 } } }, - { config: { Addresses: { Swarm: null } } }, - - { config: { Addresses: { API: 138 } } }, - { config: { Addresses: { API: null } } }, - - { config: { Addresses: { Gateway: 138 } } }, - { config: { Addresses: { Gateway: null } } }, - - { config: { Discovery: { MDNS: { Enabled: 138 } } } }, - { config: { Discovery: { MDNS: { Interval: true } } } }, - - { config: { Discovery: { webRTCStar: { Enabled: 138 } } } }, - - { config: { Bootstrap: ['/ip4/0.0.0.0/tcp/4002'] } }, - { config: { Bootstrap: 138 } }, - - { config: { Swarm: { ConnMgr: { LowWater: 200, HighWater: {} } } } }, - { config: { Swarm: { ConnMgr: { LowWater: {}, HighWater: 500 } } } }, - { config: { Swarm: { ConnMgr: 138 } } }, - { config: { Swarm: 138 } }, - - { config: { Pubsub: { Enabled: 1 } } }, - - { config: 138 } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.throw()) - }) - - it('should validate valid libp2p', () => { - const cfgs = [ - { libp2p: { modules: {} } }, - { libp2p: () => {} }, - { libp2p: undefined } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.not.throw()) - }) - - it('should validate invalid libp2p', () => { - const cfgs = [ - { libp2p: 'error' }, - { libp2p: 138 } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.throw()) - }) - - it('should validate valid profiles', () => { - expect( - () => config.validate({ init: { profiles: ['test'] } }) - ).to.not.throw() - }) - it('should validate invalid profiles', () => { - expect( - () => config.validate({ init: { profiles: 'test' } }) - ).to.throw() - }) -}) diff --git a/test/core/files.spec.js b/test/core/files.spec.js index ea8ca2380a..484c06ba43 100644 --- a/test/core/files.spec.js +++ b/test/core/files.spec.js @@ -6,9 +6,10 @@ const { expect } = require('interface-ipfs-core/src/utils/mocha') const hat = require('hat') const pull = require('pull-stream') const IPFSFactory = require('ipfsd-ctl') +const all = require('it-all') const IPFS = require('../../src/core') -describe('files', function () { +describe.only('files', function () { this.timeout(10 * 1000) let ipfsd, ipfs @@ -74,13 +75,13 @@ describe('files', function () { describe('add', () => { it('should not error when passed null options', async () => { - await ipfs.add(Buffer.from(hat()), null) + await all(ipfs.add(Buffer.from(hat()), null)) }) it('should add a file with a v1 CID', async () => { - const files = await ipfs.add(Buffer.from([0, 1, 2]), { + const files = await all(ipfs.add(Buffer.from([0, 1, 2]), { cidVersion: 1 - }) + })) expect(files.length).to.equal(1) expect(files[0].hash).to.equal('bafkreifojmzibzlof6xyh5auu3r5vpu5l67brf3fitaf73isdlglqw2t7q') @@ -88,10 +89,10 @@ describe('files', function () { }) it('should add a file with a v1 CID and not raw leaves', async () => { - const files = await ipfs.add(Buffer.from([0, 1, 2]), { + const files = await all(ipfs.add(Buffer.from([0, 1, 2]), { cidVersion: 1, rawLeaves: false - }) + })) expect(files.length).to.equal(1) expect(files[0].hash).to.equal('bafybeide2caf5we5a7izifzwzz5ds2gla67vsfgrzvbzpnyyirnfzgwf5e') diff --git a/test/core/interface.spec.js b/test/core/interface.spec.js index bfc0cb6508..ff5cc32391 100644 --- a/test/core/interface.spec.js +++ b/test/core/interface.spec.js @@ -41,17 +41,7 @@ describe('interface-ipfs-core tests', function () { } }) - tests.filesRegular(defaultCommonFactory, { - skip: isNode ? null : [{ - name: 'addFromStream', - reason: 'Not designed to run in the browser' - }, { - name: 'addFromFs', - reason: 'Not designed to run in the browser' - }] - }) - - tests.filesMFS(defaultCommonFactory) + tests.files(defaultCommonFactory) tests.key(CommonFactory.createAsync({ spawnOptions: {