Files: efcf9fd06b02fa9bcd28681b4c777224e19702bb / lib / walker.js
22444 bytesRaw
1 | /* eslint-disable require-atomic-updates */ |
2 | |
3 | import { ALIAS_AS_RELATIVE, ALIAS_AS_RESOLVABLE, |
4 | STORE_BLOB, STORE_CONTENT, STORE_LINKS, STORE_STAT, |
5 | isDotJS, isDotJSON, isDotNODE, isPackageJson, normalizePath |
6 | } from '../prelude/common.js'; |
7 | |
8 | import { follow, natives } from './follow.js'; |
9 | import { log, wasReported } from './log.js'; |
10 | import assert from 'assert'; |
11 | import detector from './detector.js'; |
12 | import fs from 'fs-extra'; |
13 | import globby from 'globby'; |
14 | import path from 'path'; |
15 | import { system } from 'ssb-pkg-fetch' |
16 | |
17 | const { abiToNodeRange } = system; |
18 | |
19 | const win32 = process.platform === 'win32'; |
20 | |
21 | function unlikelyJavascript (file) { |
22 | return [ '.css', '.html', '.json' ].includes(path.extname(file)); |
23 | } |
24 | |
25 | function isPublic (config) { |
26 | if (config.private) return false; |
27 | let { license, licenses } = config; |
28 | if (licenses) { |
29 | license = licenses; |
30 | } |
31 | if (license) { |
32 | license = license.type || license; |
33 | } |
34 | if (Array.isArray(license)) { |
35 | license = license.map((c) => String(c.type || c)).join(','); |
36 | } |
37 | if (!license) return false; |
38 | if (/^\(/.test(license)) license = license.slice(1); |
39 | if (/\)$/.test(license)) license = license.slice(0, -1); |
40 | license = license.toLowerCase(); |
41 | licenses = Array.prototype.concat( |
42 | license.split(' or '), license.split(' and '), |
43 | license.split('/'), license.split(',') |
44 | ); |
45 | let result = false; |
46 | const foss = [ 'isc', 'mit', 'apache-2.0', 'apache 2.0', |
47 | 'public domain', 'bsd', 'bsd-2-clause', 'bsd-3-clause', 'wtfpl', |
48 | 'cc-by-3.0', 'x11', 'artistic-2.0', 'gplv3', 'mpl', 'mplv2.0', |
49 | 'unlicense', 'apache license 2.0', 'zlib', 'mpl-2.0', 'nasa-1.3', |
50 | 'apache license, version 2.0', 'lgpl-2.1+', 'cc0-1.0' ]; |
51 | for (const c of licenses) { |
52 | result = foss.indexOf(c) >= 0; |
53 | if (result) break; |
54 | } |
55 | return result; |
56 | } |
57 | |
58 | function upon (p, base) { |
59 | if (typeof p !== 'string') { |
60 | throw wasReported( |
61 | 'Config items must be strings. See examples' |
62 | ); |
63 | } |
64 | let negate = false; |
65 | if (p[0] === '!') { |
66 | p = p.slice(1); |
67 | negate = true; |
68 | } |
69 | p = path.join(base, p); |
70 | if (win32) { |
71 | p = p.replace(/\\/g, '/'); |
72 | } |
73 | if (negate) { |
74 | p = '!' + p; |
75 | } |
76 | return p; |
77 | } |
78 | |
79 | function collect (ps) { |
80 | return globby.sync( |
81 | ps, { dot: true } |
82 | ); |
83 | } |
84 | |
85 | function expandFiles (efs, base) { |
86 | if (!Array.isArray(efs)) { |
87 | efs = [ efs ]; |
88 | } |
89 | efs = collect( |
90 | efs.map((p) => upon(p, base)) |
91 | ); |
92 | return efs; |
93 | } |
94 | |
95 | class Walker { |
96 | appendRecord (task) { |
97 | const { file } = task; |
98 | if (this.records[file]) return; |
99 | this.records[file] = { file }; |
100 | } |
101 | |
102 | append (task) { |
103 | task.file = normalizePath(task.file); |
104 | this.appendRecord(task); |
105 | this.tasks.push(task); |
106 | |
107 | const what = { |
108 | [STORE_BLOB]: 'Bytecode of', |
109 | [STORE_CONTENT]: 'Content of', |
110 | [STORE_LINKS]: 'Directory', |
111 | [STORE_STAT]: 'Stat info of' |
112 | }[task.store]; |
113 | if (task.reason) { |
114 | log.debug(what + ' %1 is added to queue. It was required from %2', |
115 | [ '%1: ' + task.file, '%2: ' + task.reason ]); |
116 | } else { |
117 | log.debug(what + ' %1 is added to queue', [ '%1: ' + task.file ]); |
118 | } |
119 | } |
120 | |
121 | async appendFilesFromConfig (marker) { |
122 | const { config, configPath, base } = marker; |
123 | const pkgConfig = config.pkg; |
124 | |
125 | if (pkgConfig) { |
126 | let { scripts } = pkgConfig; |
127 | |
128 | if (scripts) { |
129 | scripts = expandFiles(scripts, base); |
130 | for (const script of scripts) { |
131 | const stat = await fs.stat(script); |
132 | if (stat.isFile()) { |
133 | if (!isDotJS(script) && !isDotJSON(script) & !isDotNODE(script)) { |
134 | log.warn('Non-javascript file is specified in \'scripts\'.', [ |
135 | 'Pkg will probably fail to parse. Specify *.js in glob.', |
136 | script ]); |
137 | } |
138 | |
139 | this.append({ |
140 | file: script, |
141 | marker, |
142 | store: STORE_BLOB, |
143 | reason: configPath |
144 | }); |
145 | } |
146 | } |
147 | } |
148 | |
149 | let { assets, blobAssets } = pkgConfig; |
150 | |
151 | if (assets) { |
152 | assets = expandFiles(assets, base); |
153 | for (const asset of assets) { |
154 | const stat = await fs.stat(asset); |
155 | if (stat.isFile()) { |
156 | this.append({ |
157 | file: asset, |
158 | marker, |
159 | store: STORE_CONTENT, |
160 | reason: configPath |
161 | }); |
162 | } |
163 | } |
164 | } |
165 | |
166 | if (blobAssets) { |
167 | if (Array.isArray(blobAssets)) blobAssets = {'*': blobAssets} |
168 | for (const group in blobAssets) { |
169 | const theseBlobAssets = expandFiles(blobAssets[group], base); |
170 | for (const asset of theseBlobAssets) { |
171 | const stat = await fs.stat(asset); |
172 | if (stat.isFile()) { |
173 | this.append({ |
174 | file: asset, |
175 | marker, |
176 | store: STORE_CONTENT, |
177 | reason: configPath, |
178 | blobGroup: group |
179 | }); |
180 | } |
181 | } |
182 | } |
183 | } |
184 | |
185 | } else { |
186 | let { files } = config; |
187 | |
188 | if (files) { |
189 | files = expandFiles(files, base); |
190 | for (const file of files) { |
191 | const stat = await fs.stat(file); |
192 | if (stat.isFile()) { |
193 | // 1) remove sources of top-level(!) package 'files' i.e. ship as BLOB |
194 | // 2) non-source (non-js) files of top-level package are shipped as CONTENT |
195 | // 3) parsing some js 'files' of non-top-level packages fails, hence all CONTENT |
196 | if (marker.toplevel) { |
197 | this.append({ |
198 | file, |
199 | marker, |
200 | store: isDotJS(file) ? STORE_BLOB : STORE_CONTENT, |
201 | reason: configPath |
202 | }); |
203 | } else { |
204 | this.append({ |
205 | file, |
206 | marker, |
207 | store: STORE_CONTENT, |
208 | reason: configPath |
209 | }); |
210 | } |
211 | } |
212 | } |
213 | } |
214 | } |
215 | } |
216 | |
217 | async stepActivate (marker, derivatives) { |
218 | if (!marker) assert(false); |
219 | if (marker.activated) return; |
220 | const { config, base } = marker; |
221 | if (!config) assert(false); |
222 | |
223 | const { name } = config; |
224 | if (name) { |
225 | const d = this.dictionary[name]; |
226 | if (d) { |
227 | if (typeof config.dependencies === 'object' && |
228 | typeof d.dependencies === 'object') { |
229 | Object.assign(config.dependencies, d.dependencies); |
230 | delete d.dependencies; |
231 | } |
232 | Object.assign(config, d); |
233 | marker.hasDictionary = true; |
234 | } |
235 | } |
236 | |
237 | const { dependencies } = config; |
238 | if (typeof dependencies === 'object') { |
239 | for (const dependency in dependencies) { |
240 | // it may be `undefined` - overridden |
241 | // in dictionary (see publicsuffixlist) |
242 | if (dependencies[dependency]) { |
243 | derivatives.push({ |
244 | alias: dependency, |
245 | aliasType: ALIAS_AS_RESOLVABLE, |
246 | fromDependencies: true |
247 | }); |
248 | |
249 | derivatives.push({ |
250 | alias: dependency + '/package.json', |
251 | aliasType: ALIAS_AS_RESOLVABLE, |
252 | fromDependencies: true |
253 | }); |
254 | } |
255 | } |
256 | } |
257 | |
258 | const pkgConfig = config.pkg; |
259 | if (pkgConfig) { |
260 | const { patches } = pkgConfig; |
261 | if (patches) { |
262 | for (const key in patches) { |
263 | const p = path.join(base, key); |
264 | this.patches[p] = patches[key]; |
265 | } |
266 | } |
267 | |
268 | const { deployFiles } = pkgConfig; |
269 | if (deployFiles) { |
270 | marker.hasDeployFiles = true; |
271 | for (const deployFile of deployFiles) { |
272 | const type = deployFile[2] || 'file'; |
273 | log.warn(`Cannot include ${type} %1 into executable.`, [ |
274 | `The ${type} must be distributed with executable as %2.`, |
275 | '%1: ' + path.relative(process.cwd(), path.join(base, deployFile[0])), |
276 | '%2: path-to-executable/' + deployFile[1] ]); |
277 | } |
278 | } |
279 | |
280 | if (pkgConfig.log) { |
281 | pkgConfig.log(log, { packagePath: base }); |
282 | } |
283 | } |
284 | |
285 | await this.appendFilesFromConfig(marker); |
286 | marker.public = isPublic(config); |
287 | if (!marker.public && marker.toplevel) { |
288 | marker.public = this.params.publicToplevel; |
289 | } |
290 | if (!marker.public && !marker.toplevel && this.params.publicPackages) { |
291 | marker.public = (this.params.publicPackages[0] === '*') || |
292 | (this.params.publicPackages.indexOf(name) !== -1); |
293 | } |
294 | |
295 | marker.activated = true; |
296 | // assert no further work with config |
297 | delete marker.config; |
298 | } |
299 | |
300 | async stepRead (record) { |
301 | let body; |
302 | |
303 | try { |
304 | body = await fs.readFile(record.file); |
305 | } catch (error) { |
306 | log.error('Cannot read file, ' + error.code, record.file); |
307 | throw wasReported(error); |
308 | } |
309 | |
310 | record.body = body; |
311 | } |
312 | |
313 | hasPatch (record) { |
314 | const patch = this.patches[record.file]; |
315 | if (!patch) return; |
316 | return true; |
317 | } |
318 | |
319 | stepPatch (record) { |
320 | const patch = this.patches[record.file]; |
321 | if (!patch) return; |
322 | |
323 | let body = record.body.toString('utf8'); |
324 | |
325 | for (let i = 0; i < patch.length; i += 2) { |
326 | if (typeof patch[i] === 'object') { |
327 | if (patch[i].do === 'erase') { |
328 | body = patch[i + 1]; |
329 | } else |
330 | if (patch[i].do === 'prepend') { |
331 | body = patch[i + 1] + body; |
332 | } else |
333 | if (patch[i].do === 'append') { |
334 | body += patch[i + 1]; |
335 | } |
336 | } else |
337 | if (typeof patch[i] === 'string') { |
338 | // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions |
339 | // function escapeRegExp |
340 | const esc = patch[i].replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); |
341 | const regexp = new RegExp(esc, 'g'); |
342 | body = body.replace(regexp, patch[i + 1]); |
343 | } |
344 | } |
345 | |
346 | record.body = body; |
347 | } |
348 | |
349 | stepStrip (record) { |
350 | let body = record.body.toString('utf8'); |
351 | |
352 | if (/^\ufeff/.test(body)) { |
353 | body = body.replace(/^\ufeff/, ''); |
354 | } |
355 | if (/^#!/.test(body)) { |
356 | body = body.replace(/^#![^\n]*\n/, '\n'); |
357 | } |
358 | |
359 | record.body = body; |
360 | } |
361 | |
362 | stepDetect (record, marker, derivatives) { |
363 | const body = record.body; |
364 | |
365 | try { |
366 | detector.detect(body, (node, trying) => { |
367 | const { toplevel } = marker; |
368 | let d = detector.visitor_SUCCESSFUL(node); |
369 | if (d) { |
370 | if (d.mustExclude) return false; |
371 | d.mayExclude = d.mayExclude || trying; |
372 | derivatives.push(d); |
373 | return false; |
374 | } |
375 | d = detector.visitor_NONLITERAL(node); |
376 | if (d) { |
377 | if (d.mustExclude) return false; |
378 | const debug = !toplevel || d.mayExclude || trying; |
379 | const level = debug ? 'debug' : 'warn'; |
380 | log[level](`Cannot resolve '${d.alias}'`, [ record.file, |
381 | 'Dynamic require may fail at run time, because the requested file', |
382 | 'is unknown at compilation time and not included into executable.', |
383 | 'Use a string literal as an argument for \'require\', or leave it', |
384 | 'as is and specify the resolved file name in \'scripts\' option.' ]); |
385 | return false; |
386 | } |
387 | d = detector.visitor_MALFORMED(node); |
388 | if (d) { |
389 | // there is no 'mustExclude' |
390 | const debug = !toplevel || trying; |
391 | const level = debug ? 'debug' : 'warn'; // there is no 'mayExclude' |
392 | log[level](`Malformed requirement for '${d.alias}'`, [ record.file ]); |
393 | return false; |
394 | } |
395 | d = detector.visitor_USESCWD(node); |
396 | if (d) { |
397 | // there is no 'mustExclude' |
398 | const level = 'debug'; // there is no 'mayExclude' |
399 | log[level](`Path.resolve(${d.alias}) is ambiguous`, [ record.file, |
400 | 'It resolves relatively to \'process.cwd\' by default, however', |
401 | 'you may want to use \'path.dirname(require.main.filename)\'' ]); |
402 | return false; |
403 | } |
404 | return true; // can i go inside? |
405 | }); |
406 | } catch (error) { |
407 | log.error(error.message, record.file); |
408 | throw wasReported(error); |
409 | } |
410 | } |
411 | |
412 | async stepDerivatives_ALIAS_AS_RELATIVE (record, marker, derivative) { // eslint-disable-line camelcase |
413 | const file = path.join( |
414 | path.dirname(record.file), |
415 | derivative.alias |
416 | ); |
417 | |
418 | let stat; |
419 | |
420 | try { |
421 | stat = await fs.stat(file); |
422 | } catch (error) { |
423 | const { toplevel } = marker; |
424 | const debug = !toplevel && error.code === 'ENOENT'; |
425 | const level = debug ? 'debug' : 'warn'; |
426 | log[level]('Cannot stat, ' + error.code, [ file, |
427 | 'The file was required from \'' + record.file + '\'' ]); |
428 | } |
429 | |
430 | if (stat && stat.isFile()) { |
431 | this.append({ |
432 | file, |
433 | marker, |
434 | store: STORE_CONTENT, |
435 | reason: record.file |
436 | }); |
437 | } |
438 | } |
439 | |
440 | async stepDerivatives_ALIAS_AS_RESOLVABLE (record, marker, derivative) { // eslint-disable-line camelcase |
441 | const newPackages = []; |
442 | |
443 | const catchReadFile = (file) => { |
444 | assert(isPackageJson(file), 'walker: ' + |
445 | file + ' must be package.json'); |
446 | newPackages.push({ packageJson: file }); |
447 | }; |
448 | |
449 | const catchPackageFilter = (config, base) => { |
450 | const newPackage = newPackages[newPackages.length - 1]; |
451 | newPackage.marker = { config, configPath: newPackage.packageJson, base }; |
452 | }; |
453 | |
454 | let newFile, failure; |
455 | |
456 | try { |
457 | newFile = await follow(derivative.alias, { |
458 | basedir: path.dirname(record.file), |
459 | // default is extensions: ['.js'], but |
460 | // it is not enough because 'typos.json' |
461 | // is not taken in require('./typos') |
462 | // in 'normalize-package-data/lib/fixer.js' |
463 | extensions: [ '.js', '.json', '.node' ], |
464 | readFile: catchReadFile, |
465 | packageFilter: catchPackageFilter |
466 | }); |
467 | } catch (error) { |
468 | failure = error; |
469 | } |
470 | |
471 | if (failure) { |
472 | const { toplevel } = marker; |
473 | const mainNotFound = newPackages.length > 0 && !newPackages[0].marker.config.main; |
474 | const debug = !toplevel || derivative.mayExclude || |
475 | (mainNotFound && derivative.fromDependencies); |
476 | const level = debug ? 'debug' : 'warn'; |
477 | if (mainNotFound) { |
478 | const message = 'Entry \'main\' not found in %1'; |
479 | log[level](message, [ '%1: ' + newPackages[0].packageJson, '%2: ' + record.file ]); |
480 | } else { |
481 | log[level](failure.message, [ '%1: ' + record.file ]); |
482 | } |
483 | return; |
484 | } |
485 | |
486 | let newPackageForNewRecords; |
487 | |
488 | for (const newPackage of newPackages) { |
489 | let newFile2; |
490 | |
491 | try { |
492 | newFile2 = await follow(derivative.alias, { |
493 | basedir: path.dirname(record.file), |
494 | extensions: [ '.js', '.json', '.node' ], |
495 | ignoreFile: newPackage.packageJson |
496 | }); |
497 | } catch (_) { |
498 | // not setting is enough |
499 | } |
500 | |
501 | if (newFile2 !== newFile) { |
502 | newPackageForNewRecords = newPackage; |
503 | break; |
504 | } |
505 | } |
506 | |
507 | if (newPackageForNewRecords) { |
508 | this.append({ |
509 | file: newPackageForNewRecords.packageJson, |
510 | marker: newPackageForNewRecords.marker, |
511 | store: STORE_CONTENT, |
512 | reason: record.file |
513 | }); |
514 | } |
515 | |
516 | this.append({ |
517 | file: newFile, |
518 | marker: newPackageForNewRecords ? newPackageForNewRecords.marker : marker, |
519 | store: STORE_BLOB, |
520 | reason: record.file |
521 | }); |
522 | } |
523 | |
524 | async stepDerivatives (record, marker, derivatives) { |
525 | for (const derivative of derivatives) { |
526 | if (natives[derivative.alias]) continue; |
527 | |
528 | if (derivative.aliasType === ALIAS_AS_RELATIVE) { |
529 | await this.stepDerivatives_ALIAS_AS_RELATIVE(record, marker, derivative); |
530 | } else |
531 | if (derivative.aliasType === ALIAS_AS_RESOLVABLE) { |
532 | await this.stepDerivatives_ALIAS_AS_RESOLVABLE(record, marker, derivative); |
533 | } else { |
534 | assert(false, 'walker: unknown aliasType ' + derivative.aliasType); |
535 | } |
536 | } |
537 | } |
538 | |
539 | async stepAddPrebuildsForTarget (record, marker, target) { |
540 | const majorRange = target.nodeRange.replace(/\..*/, '') |
541 | const prebuildPlatform = |
542 | target.platform === 'linux' ? 'linux' : |
543 | target.platform === 'macos' ? 'darwin' : |
544 | null |
545 | if (!prebuildPlatform) { |
546 | throw new Error('Unknown platform for prebuilds: ' + target.platform) |
547 | } |
548 | const prebuildArch = |
549 | target.arch === 'x64' ? 'x64' : |
550 | target.arch === 'x86' ? 'ia32' : |
551 | target.arch === 'armv7' ? 'arm' : |
552 | target.arch === 'arm64' ? 'arm64' : |
553 | null |
554 | if (!prebuildArch) { |
555 | throw new Error('Unknown architecture for prebuilds: ' + target.platform) |
556 | } |
557 | const platformArch = prebuildPlatform + '-' + prebuildArch |
558 | const dir = path.resolve(record.file, '../prebuilds', platformArch) |
559 | let files |
560 | try { files = fs.readdirSync(dir) } |
561 | catch(e) { return } |
562 | |
563 | // sodium-native needs *.node and libsodium.so*/dylib/dll |
564 | var foundNativeModule, foundSharedLib |
565 | var filesToAdd = files.filter(function (file) { |
566 | if (file === 'libsodium.dylib' |
567 | || /^libsodium\.so\.\d+$/.test(file)) { |
568 | return foundSharedLib = true |
569 | } |
570 | var m |
571 | if (file === 'node.napi.node' |
572 | || ((m = /^node\.abi(\d+)\.node$/.exec(file)) |
573 | && abiToNodeRange(m[1]) === majorRange)) { |
574 | return foundNativeModule = true |
575 | } |
576 | }) |
577 | if (!foundNativeModule) { |
578 | throw new Error('Missing native module for ' |
579 | + platformArch + ' (' + majorRange + ')') |
580 | } |
581 | if (!foundSharedLib) { |
582 | throw new Error('Missing shared library for ' |
583 | + platformArch + ' (' + majorRange + ')') |
584 | } |
585 | filesToAdd.map(function (name) { |
586 | return path.join(dir, name) |
587 | }).filter(function (file) { |
588 | return fs.existsSync(file) |
589 | }).map((file) => { |
590 | this.append({ |
591 | file: file, |
592 | marker, |
593 | store: STORE_CONTENT, |
594 | reason: 'prebuilds', |
595 | blobGroup: platformArch |
596 | }) |
597 | }) |
598 | } |
599 | |
600 | async stepAddPrebuilds (record, marker) { |
601 | for (const target of this.targets) { |
602 | this.stepAddPrebuildsForTarget(record, marker, target) |
603 | } |
604 | } |
605 | |
606 | async step_STORE_ANY (record, marker, store, blobGroup) { // eslint-disable-line camelcase |
607 | if (record[store] !== undefined) return; |
608 | record[store] = false; // default is discard |
609 | record.blobGroup = blobGroup |
610 | |
611 | this.append({ |
612 | file: record.file, |
613 | store: STORE_STAT |
614 | }); |
615 | |
616 | // detect sodium-native |
617 | if (store === STORE_BLOB |
618 | && path.basename(record.file) === 'index.js' |
619 | && path.basename(path.dirname(record.file)) === 'sodium-native' |
620 | && await fs.exists(path.resolve(record.file, '../prebuilds'))) { |
621 | if (this.sodiumNative) { |
622 | log.error('sodium-native found multiple times:', |
623 | [ '%1: ' + this.sodiumNative, '%2: ' + record.file ]); |
624 | this.sodiumNative = record.file |
625 | } |
626 | await this.stepAddPrebuilds(record, marker); |
627 | } |
628 | |
629 | const derivatives1 = []; |
630 | await this.stepActivate(marker, derivatives1); |
631 | await this.stepDerivatives(record, marker, derivatives1); |
632 | if (store === STORE_BLOB) { |
633 | if (unlikelyJavascript(record.file) || isDotNODE(record.file)) { |
634 | this.append({ |
635 | file: record.file, |
636 | marker, |
637 | store: STORE_CONTENT |
638 | }); |
639 | return; // discard |
640 | } |
641 | |
642 | if (marker.public || |
643 | marker.hasDictionary) { |
644 | this.append({ |
645 | file: record.file, |
646 | marker, |
647 | store: STORE_CONTENT |
648 | }); |
649 | } |
650 | } |
651 | |
652 | if (store === STORE_BLOB || |
653 | this.hasPatch(record)) { |
654 | if (!record.body) { |
655 | await this.stepRead(record); |
656 | this.stepPatch(record); |
657 | if (store === STORE_BLOB) { |
658 | this.stepStrip(record); |
659 | } |
660 | } |
661 | |
662 | if (store === STORE_BLOB) { |
663 | const derivatives2 = []; |
664 | this.stepDetect(record, marker, derivatives2); |
665 | await this.stepDerivatives(record, marker, derivatives2); |
666 | } |
667 | } |
668 | |
669 | record[store] = true; |
670 | } |
671 | |
672 | step_STORE_LINKS (record, data) { // eslint-disable-line camelcase |
673 | if (record[STORE_LINKS]) { |
674 | record[STORE_LINKS].push(data); |
675 | return; |
676 | } |
677 | |
678 | record[STORE_LINKS] = [ data ]; |
679 | |
680 | this.append({ |
681 | file: record.file, |
682 | store: STORE_STAT |
683 | }); |
684 | } |
685 | |
686 | async step_STORE_STAT (record) { // eslint-disable-line camelcase |
687 | if (record[STORE_STAT]) return; |
688 | |
689 | try { |
690 | record[STORE_STAT] = await fs.stat(record.file); |
691 | } catch (error) { |
692 | log.error('Cannot stat, ' + error.code, record.file); |
693 | throw wasReported(error); |
694 | } |
695 | |
696 | if (path.dirname(record.file) !== record.file) { // root directory |
697 | this.append({ |
698 | file: path.dirname(record.file), |
699 | store: STORE_LINKS, |
700 | data: path.basename(record.file) |
701 | }); |
702 | } |
703 | } |
704 | |
705 | async step (task) { |
706 | const { file, store, data } = task; |
707 | const record = this.records[file]; |
708 | if (store === STORE_BLOB || |
709 | store === STORE_CONTENT) { |
710 | await this.step_STORE_ANY(record, task.marker, store, task.blobGroup); |
711 | } else |
712 | if (store === STORE_LINKS) { |
713 | this.step_STORE_LINKS(record, data); |
714 | } else |
715 | if (store === STORE_STAT) { |
716 | await this.step_STORE_STAT(record); |
717 | } else { |
718 | assert(false, 'walker: unknown store ' + store); |
719 | } |
720 | } |
721 | |
722 | async readDictionary () { |
723 | const dd = path.join(__dirname, '../dictionary'); |
724 | const files = await fs.readdir(dd); |
725 | |
726 | for (const file of files) { |
727 | if (/\.js$/.test(file)) { |
728 | const name = file.slice(0, -3); |
729 | const config = require(path.join(dd, file)); |
730 | this.dictionary[name] = config; |
731 | } |
732 | } |
733 | } |
734 | |
735 | async start (marker, entrypoint, addition, params, targets, otherEntrypoints) { |
736 | this.tasks = []; |
737 | this.records = {}; |
738 | this.dictionary = {}; |
739 | this.patches = {}; |
740 | this.params = params; |
741 | this.targets = targets; |
742 | this.otherEntrypoints = otherEntrypoints |
743 | |
744 | await this.readDictionary(); |
745 | |
746 | this.append({ |
747 | file: entrypoint, |
748 | marker, |
749 | store: STORE_BLOB |
750 | }); |
751 | |
752 | if (addition) { |
753 | this.append({ |
754 | file: addition, |
755 | marker, |
756 | store: STORE_CONTENT |
757 | }); |
758 | } |
759 | |
760 | if (otherEntrypoints) for (const name in otherEntrypoints) { |
761 | const file = otherEntrypoints[name] |
762 | this.append({ |
763 | file, |
764 | marker, |
765 | store: STORE_BLOB |
766 | }) |
767 | } |
768 | |
769 | const tasks = this.tasks; |
770 | for (let i = 0; i < tasks.length; i += 1) { |
771 | // NO MULTIPLE WORKERS! THIS WILL LEAD TO NON-DETERMINISTIC |
772 | // ORDER. one-by-one fifo is the only way to iterate tasks |
773 | await this.step(tasks[i]); |
774 | } |
775 | |
776 | return { |
777 | records: this.records, |
778 | entrypoint: normalizePath(entrypoint) |
779 | }; |
780 | } |
781 | } |
782 | |
783 | export default async function (...args) { |
784 | const w = new Walker(); |
785 | return await w.start(...args); |
786 | } |
787 |
Built with git-ssb-web