Skip to content
This repository was archived by the owner on Mar 10, 2020. It is now read-only.

feat: support for reconstructed CIDs of refs-local #540

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 13 additions & 4 deletions SPEC/REFS.md
Original file line number Diff line number Diff line change
Expand Up @@ -102,9 +102,12 @@ pull(

#### `refs.local`

> Output all local references (CIDs of all blocks in the blockstore)
> Output all local references (CIDs of all blocks in the blockstore. CIDs are reconstructed, hence they might not match the CIDs under the blocks were originally stored)

##### `ipfs.refs.local([callback])`
##### `ipfs.refs.local([options], [callback])`

`options` is an optional object that may contain the following keys:
- `multihash (false)`: instead of reconstructed CIDs, the original multihashes are returned as base32 encoded strings

`callback` must follow `function (err, refs) {}` signature, where `err` is an error if the operation was not successful and `refs` is an array of `{ ref: "myref", err: "error msg" }`

Expand All @@ -129,10 +132,13 @@ ipfs.refs.local(function (err, refs) {
})
```

#### `refs.localReadableStream`
#### `refs.localReadableStream([options])`

> Output all local references using a [Readable Stream][rs]

`options` is an optional object that may contain the following keys:
- `multihash (false)`: instead of reconstructed CIDs, the original multihashes are returned as base32 encoded strings

##### `ipfs.localReadableStream()` -> [Readable Stream][rs]

**Example:**
Expand All @@ -148,10 +154,13 @@ stream.on('data', function (ref) {
})
```

#### `refs.localPullStream`
#### `refs.localPullStream([options])`

> Output all local references using a [Pull Stream][ps].

`options` is an optional object that may contain the following keys:
- `multihash (false)`: instead of reconstructed CIDs, the original multihashes are returned as base32 encoded strings

##### `ipfs.refs.localReadableStream()` -> [Pull Stream][ps]

**Example:**
Expand Down
4 changes: 2 additions & 2 deletions SPEC/REPO.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ Where:
- `err` is an Error if the whole GC operation was not successful.
- `res` is an array of objects that contains the following properties
- `err` is an Error if it was not possible to GC a particular block.
- `cid` is the [CID][cid] of the block that was Garbage Collected.
- `multihash` is the [multihashes][] of the block that was Garbage Collected.

If no `callback` is passed, a promise is returned.

Expand Down Expand Up @@ -84,4 +84,4 @@ ipfs.repo.version((err, version) => console.log(version))
```

[1]: https://github.com/MikeMcl/bignumber.js/
[cid]: https://www.npmjs.com/package/cids
[multihashes]: https://www.npmjs.com/package/multihashes
4 changes: 2 additions & 2 deletions src/files-regular/refs-local-tests.js
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,8 @@ module.exports = (createCommon, suiteName, ipfsRefsLocal, options) => {
expect(err).to.not.exist()

const cids = refs.map(r => r.ref)
expect(cids).to.include('QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn')
expect(cids).to.include('QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr')
expect(cids).to.include('bafkreicuinkdxczmxol5edpb2jumkbkvtoehj6qixz6yvvxgstp3cr5hey')
expect(cids).to.include('bafkreigm5vpfwjayhkmp7d3gc6hwj4c536ns6ajxi3cyi3uulta45rpyzy')

done()
})
Expand Down
50 changes: 30 additions & 20 deletions src/repo/gc.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,12 @@

const { getDescribe, getIt, expect } = require('../utils/mocha')
const { DAGNode } = require('ipld-dag-pb')
const CID = require('cids')

function cidV0ToV1Raw (hash) {
const multihash = new CID(hash).multihash
return new CID(1, 'raw', multihash).toString()
}

module.exports = (createCommon, options) => {
const describe = getDescribe(options)
Expand Down Expand Up @@ -52,20 +58,21 @@ module.exports = (createCommon, options) => {
// information that refers to the blocks
const addRes = await ipfs.add(Buffer.from('apples'))
const hash = addRes[0].hash
const cidV1 = cidV0ToV1Raw(hash)

// Get the list of local blocks after the add, should be bigger than
// the initial list and contain hash
const refsAfterAdd = await ipfs.refs.local()
expect(refsAfterAdd.length).to.be.gt(refsBeforeAdd.length)
expect(refsAfterAdd.map(r => r.ref)).includes(hash)
expect(refsAfterAdd.map(r => r.ref)).includes(cidV1)

// Run garbage collection
await ipfs.repo.gc()

// Get the list of local blocks after GC, should still contain the hash,
// because the file is still pinned
const refsAfterGc = await ipfs.refs.local()
expect(refsAfterGc.map(r => r.ref)).includes(hash)
expect(refsAfterGc.map(r => r.ref)).includes(cidV1)

// Unpin the data
await ipfs.pin.rm(hash)
Expand All @@ -75,7 +82,7 @@ module.exports = (createCommon, options) => {

// The list of local blocks should no longer contain the hash
const refsAfterUnpinAndGc = await ipfs.refs.local()
expect(refsAfterUnpinAndGc.map(r => r.ref)).not.includes(hash)
expect(refsAfterUnpinAndGc.map(r => r.ref)).not.includes(cidV1)
})

it('should clean up removed MFS files', async () => {
Expand All @@ -86,21 +93,21 @@ module.exports = (createCommon, options) => {
await ipfs.files.write('/test', Buffer.from('oranges'), { create: true })
const stats = await ipfs.files.stat('/test')
expect(stats.type).to.equal('file')
const hash = stats.hash
const cidV1 = cidV0ToV1Raw(stats.hash)

// Get the list of local blocks after the add, should be bigger than
// the initial list and contain hash
const refsAfterAdd = await ipfs.refs.local()
expect(refsAfterAdd.length).to.be.gt(refsBeforeAdd.length)
expect(refsAfterAdd.map(r => r.ref)).includes(hash)
expect(refsAfterAdd.map(r => r.ref)).includes(cidV1)

// Run garbage collection
await ipfs.repo.gc()

// Get the list of local blocks after GC, should still contain the hash,
// because the file is in MFS
const refsAfterGc = await ipfs.refs.local()
expect(refsAfterGc.map(r => r.ref)).includes(hash)
expect(refsAfterGc.map(r => r.ref)).includes(cidV1)

// Remove the file
await ipfs.files.rm('/test')
Expand All @@ -110,7 +117,7 @@ module.exports = (createCommon, options) => {

// The list of local blocks should no longer contain the hash
const refsAfterUnpinAndGc = await ipfs.refs.local()
expect(refsAfterUnpinAndGc.map(r => r.ref)).not.includes(hash)
expect(refsAfterUnpinAndGc.map(r => r.ref)).not.includes(cidV1)
})

it('should clean up block only after unpinned and removed from MFS', async () => {
Expand All @@ -121,21 +128,22 @@ module.exports = (createCommon, options) => {
await ipfs.files.write('/test', Buffer.from('peaches'), { create: true })
const stats = await ipfs.files.stat('/test')
expect(stats.type).to.equal('file')
const mfsFileHash = stats.hash
const mfsFileCidV1 = cidV0ToV1Raw(stats.hash)

// Get the CID of the data in the file
const block = await ipfs.block.get(mfsFileHash)
const block = await ipfs.block.get(mfsFileCidV1)

// Add the data to IPFS (which implicitly pins the data)
const addRes = await ipfs.add(block.data)
const dataHash = addRes[0].hash
const dataCidV1 = cidV0ToV1Raw(dataHash)

// Get the list of local blocks after the add, should be bigger than
// the initial list and contain the data hash
const refsAfterAdd = await ipfs.refs.local()
expect(refsAfterAdd.length).to.be.gt(refsBeforeAdd.length)
const hashesAfterAdd = refsAfterAdd.map(r => r.ref)
expect(hashesAfterAdd).includes(dataHash)
expect(hashesAfterAdd).includes(dataCidV1)

// Run garbage collection
await ipfs.repo.gc()
Expand All @@ -144,7 +152,7 @@ module.exports = (createCommon, options) => {
// because the file is pinned and in MFS
const refsAfterGc = await ipfs.refs.local()
const hashesAfterGc = refsAfterGc.map(r => r.ref)
expect(hashesAfterGc).includes(dataHash)
expect(hashesAfterGc).includes(dataCidV1)

// Remove the file
await ipfs.files.rm('/test')
Expand All @@ -156,8 +164,8 @@ module.exports = (createCommon, options) => {
// because the file is still pinned
const refsAfterRmAndGc = await ipfs.refs.local()
const hashesAfterRmAndGc = refsAfterRmAndGc.map(r => r.ref)
expect(hashesAfterRmAndGc).not.includes(mfsFileHash)
expect(hashesAfterRmAndGc).includes(dataHash)
expect(hashesAfterRmAndGc).not.includes(mfsFileCidV1)
expect(hashesAfterRmAndGc).includes(dataCidV1)

// Unpin the data
await ipfs.pin.rm(dataHash)
Expand All @@ -168,8 +176,8 @@ module.exports = (createCommon, options) => {
// The list of local blocks should no longer contain the hashes
const refsAfterUnpinAndGc = await ipfs.refs.local()
const hashesAfterUnpinAndGc = refsAfterUnpinAndGc.map(r => r.ref)
expect(hashesAfterUnpinAndGc).not.includes(mfsFileHash)
expect(hashesAfterUnpinAndGc).not.includes(dataHash)
expect(hashesAfterUnpinAndGc).not.includes(mfsFileCidV1)
expect(hashesAfterUnpinAndGc).not.includes(dataCidV1)
})

it('should clean up indirectly pinned data after recursive pin removal', async () => {
Expand All @@ -179,6 +187,7 @@ module.exports = (createCommon, options) => {
// Add some data
const addRes = await ipfs.add(Buffer.from('pears'))
const dataHash = addRes[0].hash
const dataHashCidV1 = cidV0ToV1Raw(dataHash)

// Unpin the data
await ipfs.pin.rm(dataHash)
Expand All @@ -192,6 +201,7 @@ module.exports = (createCommon, options) => {

// Put the object into IPFS
const objHash = (await ipfs.object.put(obj)).toString()
const objCidV1 = cidV0ToV1Raw(objHash)

// Putting an object doesn't pin it
expect((await ipfs.pin.ls()).map(p => p.hash)).not.includes(objHash)
Expand All @@ -201,8 +211,8 @@ module.exports = (createCommon, options) => {
const refsAfterAdd = await ipfs.refs.local()
expect(refsAfterAdd.length).to.be.gt(refsBeforeAdd.length)
const hashesAfterAdd = refsAfterAdd.map(r => r.ref)
expect(hashesAfterAdd).includes(objHash)
expect(hashesAfterAdd).includes(dataHash)
expect(hashesAfterAdd).includes(objCidV1)
expect(hashesAfterAdd).includes(dataHashCidV1)

// Recursively pin the object
await ipfs.pin.add(objHash, { recursive: true })
Expand All @@ -217,7 +227,7 @@ module.exports = (createCommon, options) => {
// Get the list of local blocks after GC, should still contain the data
// hash, because the data is still (indirectly) pinned
const refsAfterGc = await ipfs.refs.local()
expect(refsAfterGc.map(r => r.ref)).includes(dataHash)
expect(refsAfterGc.map(r => r.ref)).includes(dataHashCidV1)

// Recursively unpin the object
await ipfs.pin.rm(objHash)
Expand All @@ -228,8 +238,8 @@ module.exports = (createCommon, options) => {
// The list of local blocks should no longer contain the hashes
const refsAfterUnpinAndGc = await ipfs.refs.local()
const hashesAfterUnpinAndGc = refsAfterUnpinAndGc.map(r => r.ref)
expect(hashesAfterUnpinAndGc).not.includes(objHash)
expect(hashesAfterUnpinAndGc).not.includes(dataHash)
expect(hashesAfterUnpinAndGc).not.includes(objCidV1)
expect(hashesAfterUnpinAndGc).not.includes(dataHashCidV1)
})
})
}