-
Notifications
You must be signed in to change notification settings - Fork 4
/
index.js
189 lines (164 loc) · 5.34 KB
/
index.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
const pull = require('pull-stream')
const { values, asyncMap, drain, onEnd } = pull
const pushable = require('pull-pushable')
const paramap = require('pull-paramap')
const { readdir, stat, readFile, writeFile } = require('fs')
const { join, resolve, relative: relative_path, dirname, normalize, basename } = require('path')
const mkdirp = require('mkdirp')
const glob_parse = require('glob-base')
const mm = require('micromatch')
const absolute = require('is-absolute')
const filter = pull.filter()
exports.read = read
exports.write = write
var ALLOW = { dot: true }
function read (globs, options) {
if (!Array.isArray(globs)) globs = [globs]
if (!options) options = {}
const cwd = options.cwd || process.cwd()
const stream_mode = options.stream !== undefined && options.stream
// Source stream used to collect entry paths recursively (files and dirs)
// Also tracking amount of pending reads, so we know when to end the stream
const files = pushable()
let pending = 0
// Parse globs to { glob, negated, base, pattern } to check if pattern has negation,
// and push the non-negated base directory paths to start off the pipeline
for (var i = globs.length; i--;) {
const glob = resolve(cwd, globs[i])
const ast = glob_parse(glob)
const pattern = ast.glob
const negated = pattern[0] === '!'
const base = ast.base
const is_glob = ast.isGlob
globs[i] = { glob, negated, base, pattern }
// If glob isn't negated, kickstart the stream with it's base directory
// or file path
if (!negated) {
if (is_glob) {
files.push({ base, path: null, data: null })
} else {
files.push({ base: absolute(glob) ? null : cwd, path: glob, data: null })
}
}
}
// Handle directory and file paths, recursively adding more into the stream
var accumulator = paramap((file, done) => {
const base = file.base
const path = file.path
const entry = base ? (path ? resolve(base, path) : base) : path
// If path is directory, read their children and push more onto `paths`
// This is the recursive operation that creates more file in the pipeline
function directory_path () {
pending++
readdir(entry, (err, children) => {
if (err) return done(err)
// Push children into pipeline, order is not important
for (var i = children.length; i--;) {
if (path !== null && base !== null) {
files.push({ base, path: join(path, children[i]) })
} else if (path !== null) {
files.push({ base: null, path: join(path, children[i]) })
} else {
files.push({ base, path: children[i] })
}
}
// Filter directory out of pipeline after adding children files
pending--
done(null, null)
})
}
// If path is file, verify against globs
function file_path () {
pending++
for (var i = globs.length; i--;) {
const glob = globs[i]
const glob_full = glob.glob
const glob_base = glob.base
const glob_negated = glob.negated
const glob_pattern = glob.pattern
if (entry === glob_full || mm.isMatch(entry, glob_pattern, ALLOW)) {
return done(null, file)
}
}
// Did not match globs
pending--
done(null, null)
}
// New read
if (path) {
stat(entry, (err, stat) => {
// Handle path by type
if (err) return done(err)
else if (stat.isDirectory()) directory_path()
else if (stat.isFile()) file_path()
else done(null, null)
})
} else {
// If we have no relative path that would imply it is a directory, so we
// can save some time by skipping fs.stat call
directory_path()
}
}, 5)
// Adds contents to { base, relative } from accumulator
// Skipped when in stream mode
var reader = paramap((file, done) => {
const base = file.base
const path = file.path
if (stream_mode) {
function data_stream (end, cb) {
if (end) return cb(end)
readFile(base ? join(base, path) : path, (err, buf) => {
if (err) return cb(end)
cb(null, buf)
cb(true)
})
}
file.data = data_stream
// console.log(pending)
done(null, file)
pending--
if (!pending) files.end()
} else {
readFile(base ? join(base, path) : path, (err, buf) => {
if (err) return done(err)
file.data = buf
done(null, file)
pending--
if (!pending) files.end()
})
}
})
// Pull all the pieces together
return pull(files, accumulator, filter, reader)
}
function write (new_base, done) {
if (typeof new_base === 'function') {
done = new_base
new_base = null
}
const written_dirs = []
return pull(
paramap((file, next) => {
const path = file.path
const base = file.base
const data = file.data
let dest = path
if (new_base && !base) {
dest = join(new_base, basename(path))
} else if (base) {
dest = join(new_base || base, path)
}
const dir = dirname(dest)
if (written_dirs.indexOf(dir) === -1) {
mkdirp(dir, err => {
if (err) return next(err)
written_dirs.push(dir)
writeFile(dest, data, next)
})
} else {
writeFile(dest, data, next)
}
}, 5),
onEnd(done)
)
}