1
1
const fs = require ( 'fs-extra' )
2
2
const fetch = require ( 'node-fetch' )
3
3
const tar = require ( 'tar' )
4
+ const { createGunzip } = require ( 'zlib' )
4
5
const { exec } = require ( 'child_process' )
5
6
const Progress = require ( 'node-fetch-progress' )
6
7
const AbortController = require ( 'abort-controller' )
7
8
const ora = require ( 'ora' )
8
9
const prettyBytes = require ( 'pretty-bytes' )
9
10
10
- function fetchIPFS ( { cid, path } ) {
11
+ function fetchIPFS ( { cid, path, archive , compress } ) {
11
12
return new Promise ( ( resolve , reject ) => {
12
- exec ( `ipfs get ${ cid } -o ${ path } ` , err => {
13
+ archive = archive ? '-a' : ''
14
+ compress = compress ? '-C' : ''
15
+ exec ( `ipfs get ${ cid } -o ${ path } ${ archive } ${ compress } ` , err => {
13
16
if ( err ) return reject ( err )
14
17
resolve ( )
15
18
} )
16
19
} )
17
20
}
18
21
19
- async function fetchHTTP ( { api, cid, timeout : timeoutMs , path, spinner } ) {
22
+ async function fetchHTTP ( { api, cid, timeout : timeoutMs , path, archive , compress , spinner } ) {
20
23
const url = `${ api } /v0/get?arg=${ cid } &archive=true&compress=true`
21
24
const controller = new AbortController ( )
22
25
const fetchPromise = fetch ( url , { signal : controller . signal } )
@@ -40,14 +43,18 @@ async function fetchHTTP ({ api, cid, timeout: timeoutMs, path, spinner }) {
40
43
}
41
44
} )
42
45
43
- const extractor = tar . extract ( {
44
- strip : 1 ,
45
- C : path ,
46
- strict : true
47
- } )
46
+ const writer = archive
47
+ ? fs . createWriteStream ( path )
48
+ : tar . extract ( {
49
+ strip : 1 ,
50
+ C : path ,
51
+ strict : true
52
+ } )
48
53
49
54
await new Promise ( ( resolve , reject ) => {
50
- res . body . pipe ( extractor )
55
+ ( compress
56
+ ? res . body . pipe ( writer )
57
+ : res . body . pipe ( createGunzip ( ) ) . pipe ( writer ) )
51
58
. on ( 'error' , reject )
52
59
. on ( 'finish' , ( ) => {
53
60
if ( progress ) progress . removeAllListeners ( 'progress' )
@@ -59,31 +66,35 @@ async function fetchHTTP ({ api, cid, timeout: timeoutMs, path, spinner }) {
59
66
}
60
67
}
61
68
62
- module . exports = async ( opts ) => {
63
- opts . timeout = opts . timeout || 60000
64
- opts . retries = opts . retries || 3
65
- opts . api = opts . api || 'https://ipfs.io/api'
66
-
67
- const { cid, path, clean, verbose, timeout, api, retries } = opts
68
-
69
+ module . exports = async ( { cid, path, clean, archive, compress, verbose, timeout, api, retries } ) => {
69
70
if ( ! cid || ! path ) {
70
71
throw new Error ( 'cid and path must be defined' )
71
72
}
73
+ if ( compress && ! archive ) {
74
+ throw new Error ( 'compress requires archive mode' )
75
+ }
76
+
77
+ // match go-ipfs behaviour: 'ipfs get' adds .tar and .tar.gz if missing
78
+ if ( compress && ! path . endsWith ( '.tar.gz' ) ) { path += '.tar.gz' }
79
+ if ( archive && ! path . includes ( '.tar' ) ) { path += '.tar' }
72
80
73
81
if ( await fs . pathExists ( path ) ) {
74
82
if ( clean ) {
75
- await fs . emptyDir ( path )
83
+ fs . lstatSync ( path ) . isDirectory ( )
84
+ ? fs . emptyDirSync ( path )
85
+ : fs . unlinkSync ( path ) // --archive produces a file
76
86
} else {
87
+ // no-op if destination already exists
77
88
return
78
89
}
79
90
}
80
91
81
- await fs . ensureDir ( path )
92
+ if ( ! archive ) await fs . ensureDir ( path )
82
93
let spinner = ora ( )
83
94
84
95
try {
85
96
spinner . start ( 'Fetching via IPFS…' )
86
- await fetchIPFS ( { cid, path } )
97
+ await fetchIPFS ( { cid, path, archive , compress } )
87
98
spinner . succeed ( `Fetched ${ cid } to ${ path } !` )
88
99
return
89
100
} catch ( _error ) {
@@ -97,7 +108,7 @@ module.exports = async (opts) => {
97
108
spinner . start ( `Fetching via IPFS HTTP gateway (attempt ${ i } )…` )
98
109
99
110
try {
100
- await fetchHTTP ( { cid, path, timeout, api, verbose, spinner } )
111
+ await fetchHTTP ( { cid, path, archive , compress , timeout, api, verbose, spinner } )
101
112
spinner . succeed ( `Fetched ${ cid } to ${ path } !` )
102
113
return
103
114
} catch ( e ) {
0 commit comments