Skip to content
This repository was archived by the owner on Mar 10, 2020. It is now read-only.
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit e2dfe3b

Browse files
committedDec 4, 2018
docs: add mfs stream ls methods
The only mfs ls method at the moment buffers the output into an array before returning it to the user. This PR adds two new methods, lsPullStream and lsReadableStream to allow the user to either buffer the output themseleves (in case they need sorting, etc) or just stream it on to an output of some sort. N.b the http API will not actually do any streaming until ipfs/kubo#5611 is released.
1 parent 9959d02 commit e2dfe3b

File tree

5 files changed

+298
-10
lines changed

5 files changed

+298
-10
lines changed
 

‎SPEC/FILES.md

+72
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,8 @@
2222
- [files.cp](#filescp)
2323
- [files.flush](#filesflush)
2424
- [files.ls](#filesls)
25+
- [files.lsReadableStream](#fileslsreadablestream)
26+
- [files.lsPullStream](#fileslspullstream)
2527
- [files.mkdir](#filesmkdir)
2628
- [files.mv](#filesmv)
2729
- [files.read](#filesread)
@@ -1090,6 +1092,7 @@ Where:
10901092
- `options` is an optional Object that might contain the following keys:
10911093
- `long` is a Boolean value to decide whether or not to populate `type`, `size` and `hash` (default: false)
10921094
- `cidBase` is which number base to use to format hashes - e.g. `base32`, `base64` etc (default: `base58btc`)
1095+
- `sort` is a Boolean value, if true entries will be sorted by filename (default: false)
10931096
- `callback` is an optional function with the signature `function (error, files) {}`, where `error` may be an Error that occured if the operation was not successful and `files` is an array containing Objects that contain the following keys:
10941097

10951098
- `name` which is the file's name
@@ -1112,6 +1115,75 @@ ipfs.files.ls('/screenshots', function (err, files) {
11121115
// 2018-01-22T18:08:49.184Z.png
11131116
```
11141117

1118+
#### `files.lsReadableStream`
1119+
1120+
> Lists a directory from the local mutable namespace that is addressed by a valid IPFS Path. The list will be yielded as Readable Streams.
1121+
1122+
##### `Go` **WIP**
1123+
1124+
##### `JavaScript` - ipfs.files.lsReadableStream([path], [options]) -> [Readable Stream][rs]
1125+
1126+
Where:
1127+
1128+
- `path` is an optional string to show listing for (default: `/`)
1129+
- `options` is an optional Object that might contain the following keys:
1130+
- `long` is a Boolean value to decide whether or not to populate `type`, `size` and `hash` (default: false)
1131+
- `cidBase` is which number base to use to format hashes - e.g. `base32`, `base64` etc (default: `base58btc`)
1132+
1133+
It returns a [Readable Stream][rs] in [Object mode](https://nodejs.org/api/stream.html#stream_object_mode) that will yield objects containing the following keys:
1134+
1135+
- `name` which is the file's name
1136+
- `type` which is the object's type (`directory` or `file`)
1137+
- `size` the size of the file in bytes
1138+
- `hash` the hash of the file
1139+
1140+
**Example:**
1141+
1142+
```JavaScript
1143+
const stream = ipfs.lsReadableStream('/some-dir')
1144+
1145+
stream.on('data', (file) => {
1146+
// write the file's path and contents to standard out
1147+
console.log(file.name)
1148+
})
1149+
```
1150+
1151+
#### `files.lsPullStream`
1152+
1153+
> Fetch a file or an entire directory tree from IPFS that is addressed by a valid IPFS Path. The files will be yielded through a Pull Stream.
1154+
1155+
##### `Go` **WIP**
1156+
1157+
##### `JavaScript` - ipfs.lsPullStream([path], [options]) -> [Pull Stream][ps]
1158+
1159+
Where:
1160+
1161+
- `path` is an optional string to show listing for (default: `/`)
1162+
- `options` is an optional Object that might contain the following keys:
1163+
- `long` is a Boolean value to decide whether or not to populate `type`, `size` and `hash` (default: false)
1164+
- `cidBase` is which number base to use to format hashes - e.g. `base32`, `base64` etc (default: `base58btc`)
1165+
1166+
It returns a [Pull Stream][os] that will yield objects containing the following keys:
1167+
1168+
- `name` which is the file's name
1169+
- `type` which is the object's type (`directory` or `file`)
1170+
- `size` the size of the file in bytes
1171+
- `hash` the hash of the file
1172+
1173+
**Example:**
1174+
1175+
```JavaScript
1176+
pull(
1177+
ipfs.lsPullStream('/some-dir'),
1178+
pull.through(file => {
1179+
console.log(file.name)
1180+
})
1181+
pull.onEnd(...)
1182+
)
1183+
```
1184+
1185+
A great source of [examples][] can be found in the tests for this API.
1186+
11151187
[examples]: https://github.com/ipfs/interface-ipfs-core/blob/master/js/src/files
11161188
[b]: https://www.npmjs.com/package/buffer
11171189
[rs]: https://www.npmjs.com/package/readable-stream

‎js/src/files-mfs/index.js

+2
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,8 @@ const tests = {
1313
readReadableStream: require('./read-readable-stream'),
1414
readPullStream: require('./read-pull-stream'),
1515
ls: require('./ls'),
16+
lsReadableStream: require('./ls-readable-stream'),
17+
lsPullStream: require('./ls-pull-stream'),
1618
flush: require('./flush')
1719
}
1820

‎js/src/files-mfs/ls-pull-stream.js

+107
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,107 @@
1+
/* eslint-env mocha */
2+
'use strict'
3+
4+
const series = require('async/series')
5+
const hat = require('hat')
6+
const { getDescribe, getIt, expect } = require('../utils/mocha')
7+
const pull = require('pull-stream/pull')
8+
const onEnd = require('pull-stream/sinks/on-end')
9+
const collect = require('pull-stream/sinks/collect')
10+
11+
module.exports = (createCommon, options) => {
12+
const describe = getDescribe(options)
13+
const it = getIt(options)
14+
const common = createCommon()
15+
16+
describe('.files.lsPullStream', function () {
17+
this.timeout(40 * 1000)
18+
19+
let ipfs
20+
21+
before(function (done) {
22+
// CI takes longer to instantiate the daemon, so we need to increase the
23+
// timeout for the before step
24+
this.timeout(60 * 1000)
25+
26+
common.setup((err, factory) => {
27+
expect(err).to.not.exist()
28+
factory.spawnNode((err, node) => {
29+
expect(err).to.not.exist()
30+
ipfs = node
31+
done()
32+
})
33+
})
34+
})
35+
36+
after((done) => common.teardown(done))
37+
38+
it('should not ls not found file/dir, expect error', (done) => {
39+
const testDir = `/test-${hat()}`
40+
41+
pull(
42+
ipfs.files.lsPullStream(`${testDir}/404`),
43+
onEnd((err) => {
44+
expect(err).to.exist()
45+
expect(err.message).to.include('does not exist')
46+
done()
47+
})
48+
)
49+
})
50+
51+
it('should ls directory', (done) => {
52+
const testDir = `/test-${hat()}`
53+
54+
series([
55+
(cb) => ipfs.files.mkdir(`${testDir}/lv1`, { p: true }, cb),
56+
(cb) => ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }, cb)
57+
], (err) => {
58+
expect(err).to.not.exist()
59+
60+
pull(
61+
ipfs.files.lsPullStream(testDir),
62+
collect((err, entries) => {
63+
expect(err).to.not.exist()
64+
expect(entries.sort((a, b) => a.name.localeCompare(b.name))).to.eql([
65+
{ name: 'b', type: 0, size: 0, hash: '' },
66+
{ name: 'lv1', type: 0, size: 0, hash: '' }
67+
])
68+
done()
69+
})
70+
)
71+
})
72+
})
73+
74+
it('should ls -l directory', (done) => {
75+
const testDir = `/test-${hat()}`
76+
77+
series([
78+
(cb) => ipfs.files.mkdir(`${testDir}/lv1`, { p: true }, cb),
79+
(cb) => ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }, cb)
80+
], (err) => {
81+
expect(err).to.not.exist()
82+
83+
pull(
84+
ipfs.files.lsPullStream(testDir, { l: true }),
85+
collect((err, entries) => {
86+
expect(err).to.not.exist()
87+
expect(entries.sort((a, b) => a.name.localeCompare(b.name))).to.eql([
88+
{
89+
name: 'b',
90+
type: 0,
91+
size: 13,
92+
hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T'
93+
},
94+
{
95+
name: 'lv1',
96+
type: 1,
97+
size: 0,
98+
hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'
99+
}
100+
])
101+
done()
102+
})
103+
)
104+
})
105+
})
106+
})
107+
}
+107
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,107 @@
1+
/* eslint-env mocha */
2+
'use strict'
3+
4+
const series = require('async/series')
5+
const hat = require('hat')
6+
const { getDescribe, getIt, expect } = require('../utils/mocha')
7+
8+
module.exports = (createCommon, options) => {
9+
const describe = getDescribe(options)
10+
const it = getIt(options)
11+
const common = createCommon()
12+
13+
describe('.files.lsReadableStream', function () {
14+
this.timeout(40 * 1000)
15+
16+
let ipfs
17+
18+
before(function (done) {
19+
// CI takes longer to instantiate the daemon, so we need to increase the
20+
// timeout for the before step
21+
this.timeout(60 * 1000)
22+
23+
common.setup((err, factory) => {
24+
expect(err).to.not.exist()
25+
factory.spawnNode((err, node) => {
26+
expect(err).to.not.exist()
27+
ipfs = node
28+
done()
29+
})
30+
})
31+
})
32+
33+
after((done) => common.teardown(done))
34+
35+
it('should not ls not found file/dir, expect error', (done) => {
36+
const testDir = `/test-${hat()}`
37+
38+
const stream = ipfs.files.lsReadableStream(`${testDir}/404`)
39+
40+
stream.once('error', (err) => {
41+
expect(err).to.exist()
42+
expect(err.message).to.include('does not exist')
43+
done()
44+
})
45+
})
46+
47+
it('should ls directory', (done) => {
48+
const testDir = `/test-${hat()}`
49+
50+
series([
51+
(cb) => ipfs.files.mkdir(`${testDir}/lv1`, { p: true }, cb),
52+
(cb) => ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }, cb)
53+
], (err) => {
54+
expect(err).to.not.exist()
55+
56+
const stream = ipfs.files.lsReadableStream(testDir)
57+
58+
let entries = []
59+
60+
stream.on('data', entry => entries.push(entry))
61+
62+
stream.once('end', () => {
63+
expect(entries.sort((a, b) => a.name.localeCompare(b.name))).to.eql([
64+
{ name: 'b', type: 0, size: 0, hash: '' },
65+
{ name: 'lv1', type: 0, size: 0, hash: '' }
66+
])
67+
done()
68+
})
69+
})
70+
})
71+
72+
it('should ls -l directory', (done) => {
73+
const testDir = `/test-${hat()}`
74+
75+
series([
76+
(cb) => ipfs.files.mkdir(`${testDir}/lv1`, { p: true }, cb),
77+
(cb) => ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }, cb)
78+
], (err) => {
79+
expect(err).to.not.exist()
80+
81+
const stream = ipfs.files.lsReadableStream(testDir, { l: true })
82+
83+
let entries = []
84+
85+
stream.on('data', entry => entries.push(entry))
86+
87+
stream.once('end', () => {
88+
expect(entries.sort((a, b) => a.name.localeCompare(b.name))).to.eql([
89+
{
90+
name: 'b',
91+
type: 0,
92+
size: 13,
93+
hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T'
94+
},
95+
{
96+
name: 'lv1',
97+
type: 1,
98+
size: 0,
99+
hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'
100+
}
101+
])
102+
done()
103+
})
104+
})
105+
})
106+
})
107+
}

‎js/src/files-mfs/ls.js

+10-10
Original file line numberDiff line numberDiff line change
@@ -53,9 +53,9 @@ module.exports = (createCommon, options) => {
5353

5454
ipfs.files.ls(testDir, (err, info) => {
5555
expect(err).to.not.exist()
56-
expect(info).to.eql([
57-
{ name: 'lv1', type: 0, size: 0, hash: '' },
58-
{ name: 'b', type: 0, size: 0, hash: '' }
56+
expect(info.sort((a, b) => a.name.localeCompare(b.name))).to.eql([
57+
{ name: 'b', type: 0, size: 0, hash: '' },
58+
{ name: 'lv1', type: 0, size: 0, hash: '' }
5959
])
6060
done()
6161
})
@@ -73,18 +73,18 @@ module.exports = (createCommon, options) => {
7373

7474
ipfs.files.ls(testDir, { l: true }, (err, info) => {
7575
expect(err).to.not.exist()
76-
expect(info).to.eql([
77-
{
78-
name: 'lv1',
79-
type: 1,
80-
size: 0,
81-
hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'
82-
},
76+
expect(info.sort((a, b) => a.name.localeCompare(b.name))).to.eql([
8377
{
8478
name: 'b',
8579
type: 0,
8680
size: 13,
8781
hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T'
82+
},
83+
{
84+
name: 'lv1',
85+
type: 1,
86+
size: 0,
87+
hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'
8888
}
8989
])
9090
done()

0 commit comments

Comments
 (0)
This repository has been archived.