Files: 65191f9a9664f013ebe726bb95a1a54b086e1bd5 / lib / walker.js
18752 bytesRaw
1 | /* eslint-disable require-atomic-updates */ |
2 | |
3 | import { ALIAS_AS_RELATIVE, ALIAS_AS_RESOLVABLE, |
4 | STORE_BLOB, STORE_CONTENT, STORE_LINKS, STORE_STAT, |
5 | isDotJS, isDotJSON, isDotNODE, isPackageJson, normalizePath |
6 | } from '../prelude/common.js'; |
7 | |
8 | import { follow, natives } from './follow.js'; |
9 | import { log, wasReported } from './log.js'; |
10 | import assert from 'assert'; |
11 | import detector from './detector.js'; |
12 | import fs from 'fs-extra'; |
13 | import globby from 'globby'; |
14 | import path from 'path'; |
15 | |
16 | const win32 = process.platform === 'win32'; |
17 | |
18 | function unlikelyJavascript (file) { |
19 | return [ '.css', '.html', '.json' ].includes(path.extname(file)); |
20 | } |
21 | |
22 | function isPublic (config) { |
23 | if (config.private) return false; |
24 | let { license, licenses } = config; |
25 | if (licenses) { |
26 | license = licenses; |
27 | } |
28 | if (license) { |
29 | license = license.type || license; |
30 | } |
31 | if (Array.isArray(license)) { |
32 | license = license.map((c) => String(c.type || c)).join(','); |
33 | } |
34 | if (!license) return false; |
35 | if (/^\(/.test(license)) license = license.slice(1); |
36 | if (/\)$/.test(license)) license = license.slice(0, -1); |
37 | license = license.toLowerCase(); |
38 | licenses = Array.prototype.concat( |
39 | license.split(' or '), license.split(' and '), |
40 | license.split('/'), license.split(',') |
41 | ); |
42 | let result = false; |
43 | const foss = [ 'isc', 'mit', 'apache-2.0', 'apache 2.0', |
44 | 'public domain', 'bsd', 'bsd-2-clause', 'bsd-3-clause', 'wtfpl', |
45 | 'cc-by-3.0', 'x11', 'artistic-2.0', 'gplv3', 'mpl', 'mplv2.0', |
46 | 'unlicense', 'apache license 2.0', 'zlib', 'mpl-2.0', 'nasa-1.3', |
47 | 'apache license, version 2.0', 'lgpl-2.1+', 'cc0-1.0' ]; |
48 | for (const c of licenses) { |
49 | result = foss.indexOf(c) >= 0; |
50 | if (result) break; |
51 | } |
52 | return result; |
53 | } |
54 | |
55 | function upon (p, base) { |
56 | if (typeof p !== 'string') { |
57 | throw wasReported( |
58 | 'Config items must be strings. See examples' |
59 | ); |
60 | } |
61 | let negate = false; |
62 | if (p[0] === '!') { |
63 | p = p.slice(1); |
64 | negate = true; |
65 | } |
66 | p = path.join(base, p); |
67 | if (win32) { |
68 | p = p.replace(/\\/g, '/'); |
69 | } |
70 | if (negate) { |
71 | p = '!' + p; |
72 | } |
73 | return p; |
74 | } |
75 | |
76 | function collect (ps) { |
77 | return globby.sync( |
78 | ps, { dot: true } |
79 | ); |
80 | } |
81 | |
82 | function expandFiles (efs, base) { |
83 | if (!Array.isArray(efs)) { |
84 | efs = [ efs ]; |
85 | } |
86 | efs = collect( |
87 | efs.map((p) => upon(p, base)) |
88 | ); |
89 | return efs; |
90 | } |
91 | |
92 | class Walker { |
93 | appendRecord (task) { |
94 | const { file } = task; |
95 | if (this.records[file]) return; |
96 | this.records[file] = { file }; |
97 | } |
98 | |
99 | append (task) { |
100 | task.file = normalizePath(task.file); |
101 | this.appendRecord(task); |
102 | this.tasks.push(task); |
103 | |
104 | const what = { |
105 | [STORE_BLOB]: 'Bytecode of', |
106 | [STORE_CONTENT]: 'Content of', |
107 | [STORE_LINKS]: 'Directory', |
108 | [STORE_STAT]: 'Stat info of' |
109 | }[task.store]; |
110 | if (task.reason) { |
111 | log.debug(what + ' %1 is added to queue. It was required from %2', |
112 | [ '%1: ' + task.file, '%2: ' + task.reason ]); |
113 | } else { |
114 | log.debug(what + ' %1 is added to queue', [ '%1: ' + task.file ]); |
115 | } |
116 | } |
117 | |
118 | async appendFilesFromConfig (marker) { |
119 | const { config, configPath, base } = marker; |
120 | const pkgConfig = config.pkg; |
121 | |
122 | if (pkgConfig) { |
123 | let { scripts } = pkgConfig; |
124 | |
125 | if (scripts) { |
126 | scripts = expandFiles(scripts, base); |
127 | for (const script of scripts) { |
128 | const stat = await fs.stat(script); |
129 | if (stat.isFile()) { |
130 | if (!isDotJS(script) && !isDotJSON(script) & !isDotNODE(script)) { |
131 | log.warn('Non-javascript file is specified in \'scripts\'.', [ |
132 | 'Pkg will probably fail to parse. Specify *.js in glob.', |
133 | script ]); |
134 | } |
135 | |
136 | this.append({ |
137 | file: script, |
138 | marker, |
139 | store: STORE_BLOB, |
140 | reason: configPath |
141 | }); |
142 | } |
143 | } |
144 | } |
145 | |
146 | let { assets } = pkgConfig; |
147 | |
148 | if (assets) { |
149 | assets = expandFiles(assets, base); |
150 | for (const asset of assets) { |
151 | const stat = await fs.stat(asset); |
152 | if (stat.isFile()) { |
153 | this.append({ |
154 | file: asset, |
155 | marker, |
156 | store: STORE_CONTENT, |
157 | reason: configPath |
158 | }); |
159 | } |
160 | } |
161 | } |
162 | } else { |
163 | let { files } = config; |
164 | |
165 | if (files) { |
166 | files = expandFiles(files, base); |
167 | for (const file of files) { |
168 | const stat = await fs.stat(file); |
169 | if (stat.isFile()) { |
170 | // 1) remove sources of top-level(!) package 'files' i.e. ship as BLOB |
171 | // 2) non-source (non-js) files of top-level package are shipped as CONTENT |
172 | // 3) parsing some js 'files' of non-top-level packages fails, hence all CONTENT |
173 | if (marker.toplevel) { |
174 | this.append({ |
175 | file, |
176 | marker, |
177 | store: isDotJS(file) ? STORE_BLOB : STORE_CONTENT, |
178 | reason: configPath |
179 | }); |
180 | } else { |
181 | this.append({ |
182 | file, |
183 | marker, |
184 | store: STORE_CONTENT, |
185 | reason: configPath |
186 | }); |
187 | } |
188 | } |
189 | } |
190 | } |
191 | } |
192 | } |
193 | |
194 | async stepActivate (marker, derivatives) { |
195 | if (!marker) assert(false); |
196 | if (marker.activated) return; |
197 | const { config, base } = marker; |
198 | if (!config) assert(false); |
199 | |
200 | const { name } = config; |
201 | if (name) { |
202 | const d = this.dictionary[name]; |
203 | if (d) { |
204 | if (typeof config.dependencies === 'object' && |
205 | typeof d.dependencies === 'object') { |
206 | Object.assign(config.dependencies, d.dependencies); |
207 | delete d.dependencies; |
208 | } |
209 | Object.assign(config, d); |
210 | marker.hasDictionary = true; |
211 | } |
212 | } |
213 | |
214 | const { dependencies } = config; |
215 | if (typeof dependencies === 'object') { |
216 | for (const dependency in dependencies) { |
217 | // it may be `undefined` - overridden |
218 | // in dictionary (see publicsuffixlist) |
219 | if (dependencies[dependency]) { |
220 | derivatives.push({ |
221 | alias: dependency, |
222 | aliasType: ALIAS_AS_RESOLVABLE, |
223 | fromDependencies: true |
224 | }); |
225 | |
226 | derivatives.push({ |
227 | alias: dependency + '/package.json', |
228 | aliasType: ALIAS_AS_RESOLVABLE, |
229 | fromDependencies: true |
230 | }); |
231 | } |
232 | } |
233 | } |
234 | |
235 | const pkgConfig = config.pkg; |
236 | if (pkgConfig) { |
237 | const { patches } = pkgConfig; |
238 | if (patches) { |
239 | for (const key in patches) { |
240 | const p = path.join(base, key); |
241 | this.patches[p] = patches[key]; |
242 | } |
243 | } |
244 | |
245 | const { deployFiles } = pkgConfig; |
246 | if (deployFiles) { |
247 | marker.hasDeployFiles = true; |
248 | for (const deployFile of deployFiles) { |
249 | const type = deployFile[2] || 'file'; |
250 | log.warn(`Cannot include ${type} %1 into executable.`, [ |
251 | `The ${type} must be distributed with executable as %2.`, |
252 | '%1: ' + path.relative(process.cwd(), path.join(base, deployFile[0])), |
253 | '%2: path-to-executable/' + deployFile[1] ]); |
254 | } |
255 | } |
256 | |
257 | if (pkgConfig.log) { |
258 | pkgConfig.log(log, { packagePath: base }); |
259 | } |
260 | } |
261 | |
262 | await this.appendFilesFromConfig(marker); |
263 | marker.public = isPublic(config); |
264 | if (!marker.public && marker.toplevel) { |
265 | marker.public = this.params.publicToplevel; |
266 | } |
267 | if (!marker.public && !marker.toplevel && this.params.publicPackages) { |
268 | marker.public = (this.params.publicPackages[0] === '*') || |
269 | (this.params.publicPackages.indexOf(name) !== -1); |
270 | } |
271 | |
272 | marker.activated = true; |
273 | // assert no further work with config |
274 | delete marker.config; |
275 | } |
276 | |
277 | async stepRead (record) { |
278 | let body; |
279 | |
280 | try { |
281 | body = await fs.readFile(record.file); |
282 | } catch (error) { |
283 | log.error('Cannot read file, ' + error.code, record.file); |
284 | throw wasReported(error); |
285 | } |
286 | |
287 | record.body = body; |
288 | } |
289 | |
290 | hasPatch (record) { |
291 | const patch = this.patches[record.file]; |
292 | if (!patch) return; |
293 | return true; |
294 | } |
295 | |
296 | stepPatch (record) { |
297 | const patch = this.patches[record.file]; |
298 | if (!patch) return; |
299 | |
300 | let body = record.body.toString('utf8'); |
301 | |
302 | for (let i = 0; i < patch.length; i += 2) { |
303 | if (typeof patch[i] === 'object') { |
304 | if (patch[i].do === 'erase') { |
305 | body = patch[i + 1]; |
306 | } else |
307 | if (patch[i].do === 'prepend') { |
308 | body = patch[i + 1] + body; |
309 | } else |
310 | if (patch[i].do === 'append') { |
311 | body += patch[i + 1]; |
312 | } |
313 | } else |
314 | if (typeof patch[i] === 'string') { |
315 | // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions |
316 | // function escapeRegExp |
317 | const esc = patch[i].replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); |
318 | const regexp = new RegExp(esc, 'g'); |
319 | body = body.replace(regexp, patch[i + 1]); |
320 | } |
321 | } |
322 | |
323 | record.body = body; |
324 | } |
325 | |
326 | stepStrip (record) { |
327 | let body = record.body.toString('utf8'); |
328 | |
329 | if (/^\ufeff/.test(body)) { |
330 | body = body.replace(/^\ufeff/, ''); |
331 | } |
332 | if (/^#!/.test(body)) { |
333 | body = body.replace(/^#![^\n]*\n/, '\n'); |
334 | } |
335 | |
336 | record.body = body; |
337 | } |
338 | |
339 | stepDetect (record, marker, derivatives) { |
340 | const body = record.body; |
341 | |
342 | try { |
343 | detector.detect(body, (node, trying) => { |
344 | const { toplevel } = marker; |
345 | let d = detector.visitor_SUCCESSFUL(node); |
346 | if (d) { |
347 | if (d.mustExclude) return false; |
348 | d.mayExclude = d.mayExclude || trying; |
349 | derivatives.push(d); |
350 | return false; |
351 | } |
352 | d = detector.visitor_NONLITERAL(node); |
353 | if (d) { |
354 | if (d.mustExclude) return false; |
355 | const debug = !toplevel || d.mayExclude || trying; |
356 | const level = debug ? 'debug' : 'warn'; |
357 | log[level](`Cannot resolve '${d.alias}'`, [ record.file, |
358 | 'Dynamic require may fail at run time, because the requested file', |
359 | 'is unknown at compilation time and not included into executable.', |
360 | 'Use a string literal as an argument for \'require\', or leave it', |
361 | 'as is and specify the resolved file name in \'scripts\' option.' ]); |
362 | return false; |
363 | } |
364 | d = detector.visitor_MALFORMED(node); |
365 | if (d) { |
366 | // there is no 'mustExclude' |
367 | const debug = !toplevel || trying; |
368 | const level = debug ? 'debug' : 'warn'; // there is no 'mayExclude' |
369 | log[level](`Malformed requirement for '${d.alias}'`, [ record.file ]); |
370 | return false; |
371 | } |
372 | d = detector.visitor_USESCWD(node); |
373 | if (d) { |
374 | // there is no 'mustExclude' |
375 | const level = 'debug'; // there is no 'mayExclude' |
376 | log[level](`Path.resolve(${d.alias}) is ambiguous`, [ record.file, |
377 | 'It resolves relatively to \'process.cwd\' by default, however', |
378 | 'you may want to use \'path.dirname(require.main.filename)\'' ]); |
379 | return false; |
380 | } |
381 | return true; // can i go inside? |
382 | }); |
383 | } catch (error) { |
384 | log.error(error.message, record.file); |
385 | throw wasReported(error); |
386 | } |
387 | } |
388 | |
389 | async stepDerivatives_ALIAS_AS_RELATIVE (record, marker, derivative) { // eslint-disable-line camelcase |
390 | const file = path.join( |
391 | path.dirname(record.file), |
392 | derivative.alias |
393 | ); |
394 | |
395 | let stat; |
396 | |
397 | try { |
398 | stat = await fs.stat(file); |
399 | } catch (error) { |
400 | const { toplevel } = marker; |
401 | const debug = !toplevel && error.code === 'ENOENT'; |
402 | const level = debug ? 'debug' : 'warn'; |
403 | log[level]('Cannot stat, ' + error.code, [ file, |
404 | 'The file was required from \'' + record.file + '\'' ]); |
405 | } |
406 | |
407 | if (stat && stat.isFile()) { |
408 | this.append({ |
409 | file, |
410 | marker, |
411 | store: STORE_CONTENT, |
412 | reason: record.file |
413 | }); |
414 | } |
415 | } |
416 | |
417 | async stepDerivatives_ALIAS_AS_RESOLVABLE (record, marker, derivative) { // eslint-disable-line camelcase |
418 | const newPackages = []; |
419 | |
420 | const catchReadFile = (file) => { |
421 | assert(isPackageJson(file), 'walker: ' + |
422 | file + ' must be package.json'); |
423 | newPackages.push({ packageJson: file }); |
424 | }; |
425 | |
426 | const catchPackageFilter = (config, base) => { |
427 | const newPackage = newPackages[newPackages.length - 1]; |
428 | newPackage.marker = { config, configPath: newPackage.packageJson, base }; |
429 | }; |
430 | |
431 | let newFile, failure; |
432 | |
433 | try { |
434 | newFile = await follow(derivative.alias, { |
435 | basedir: path.dirname(record.file), |
436 | // default is extensions: ['.js'], but |
437 | // it is not enough because 'typos.json' |
438 | // is not taken in require('./typos') |
439 | // in 'normalize-package-data/lib/fixer.js' |
440 | extensions: [ '.js', '.json', '.node' ], |
441 | readFile: catchReadFile, |
442 | packageFilter: catchPackageFilter |
443 | }); |
444 | } catch (error) { |
445 | failure = error; |
446 | } |
447 | |
448 | if (failure) { |
449 | const { toplevel } = marker; |
450 | const mainNotFound = newPackages.length > 0 && !newPackages[0].marker.config.main; |
451 | const debug = !toplevel || derivative.mayExclude || |
452 | (mainNotFound && derivative.fromDependencies); |
453 | const level = debug ? 'debug' : 'warn'; |
454 | if (mainNotFound) { |
455 | const message = 'Entry \'main\' not found in %1'; |
456 | log[level](message, [ '%1: ' + newPackages[0].packageJson, '%2: ' + record.file ]); |
457 | } else { |
458 | log[level](failure.message, [ '%1: ' + record.file ]); |
459 | } |
460 | return; |
461 | } |
462 | |
463 | let newPackageForNewRecords; |
464 | |
465 | for (const newPackage of newPackages) { |
466 | let newFile2; |
467 | |
468 | try { |
469 | newFile2 = await follow(derivative.alias, { |
470 | basedir: path.dirname(record.file), |
471 | extensions: [ '.js', '.json', '.node' ], |
472 | ignoreFile: newPackage.packageJson |
473 | }); |
474 | } catch (_) { |
475 | // not setting is enough |
476 | } |
477 | |
478 | if (newFile2 !== newFile) { |
479 | newPackageForNewRecords = newPackage; |
480 | break; |
481 | } |
482 | } |
483 | |
484 | if (newPackageForNewRecords) { |
485 | this.append({ |
486 | file: newPackageForNewRecords.packageJson, |
487 | marker: newPackageForNewRecords.marker, |
488 | store: STORE_CONTENT, |
489 | reason: record.file |
490 | }); |
491 | } |
492 | |
493 | this.append({ |
494 | file: newFile, |
495 | marker: newPackageForNewRecords ? newPackageForNewRecords.marker : marker, |
496 | store: STORE_BLOB, |
497 | reason: record.file |
498 | }); |
499 | } |
500 | |
501 | async stepDerivatives (record, marker, derivatives) { |
502 | for (const derivative of derivatives) { |
503 | if (natives[derivative.alias]) continue; |
504 | |
505 | if (derivative.aliasType === ALIAS_AS_RELATIVE) { |
506 | await this.stepDerivatives_ALIAS_AS_RELATIVE(record, marker, derivative); |
507 | } else |
508 | if (derivative.aliasType === ALIAS_AS_RESOLVABLE) { |
509 | await this.stepDerivatives_ALIAS_AS_RESOLVABLE(record, marker, derivative); |
510 | } else { |
511 | assert(false, 'walker: unknown aliasType ' + derivative.aliasType); |
512 | } |
513 | } |
514 | } |
515 | |
516 | async step_STORE_ANY (record, marker, store) { // eslint-disable-line camelcase |
517 | if (record[store] !== undefined) return; |
518 | record[store] = false; // default is discard |
519 | |
520 | this.append({ |
521 | file: record.file, |
522 | store: STORE_STAT |
523 | }); |
524 | |
525 | const derivatives1 = []; |
526 | await this.stepActivate(marker, derivatives1); |
527 | await this.stepDerivatives(record, marker, derivatives1); |
528 | if (store === STORE_BLOB) { |
529 | if (unlikelyJavascript(record.file) || isDotNODE(record.file)) { |
530 | this.append({ |
531 | file: record.file, |
532 | marker, |
533 | store: STORE_CONTENT |
534 | }); |
535 | return; // discard |
536 | } |
537 | |
538 | if (marker.public || |
539 | marker.hasDictionary) { |
540 | this.append({ |
541 | file: record.file, |
542 | marker, |
543 | store: STORE_CONTENT |
544 | }); |
545 | } |
546 | } |
547 | |
548 | if (store === STORE_BLOB || |
549 | this.hasPatch(record)) { |
550 | if (!record.body) { |
551 | await this.stepRead(record); |
552 | this.stepPatch(record); |
553 | if (store === STORE_BLOB) { |
554 | this.stepStrip(record); |
555 | } |
556 | } |
557 | |
558 | if (store === STORE_BLOB) { |
559 | const derivatives2 = []; |
560 | this.stepDetect(record, marker, derivatives2); |
561 | await this.stepDerivatives(record, marker, derivatives2); |
562 | } |
563 | } |
564 | |
565 | record[store] = true; |
566 | } |
567 | |
568 | step_STORE_LINKS (record, data) { // eslint-disable-line camelcase |
569 | if (record[STORE_LINKS]) { |
570 | record[STORE_LINKS].push(data); |
571 | return; |
572 | } |
573 | |
574 | record[STORE_LINKS] = [ data ]; |
575 | |
576 | this.append({ |
577 | file: record.file, |
578 | store: STORE_STAT |
579 | }); |
580 | } |
581 | |
582 | async step_STORE_STAT (record) { // eslint-disable-line camelcase |
583 | if (record[STORE_STAT]) return; |
584 | |
585 | try { |
586 | record[STORE_STAT] = await fs.stat(record.file); |
587 | } catch (error) { |
588 | log.error('Cannot stat, ' + error.code, record.file); |
589 | throw wasReported(error); |
590 | } |
591 | |
592 | if (path.dirname(record.file) !== record.file) { // root directory |
593 | this.append({ |
594 | file: path.dirname(record.file), |
595 | store: STORE_LINKS, |
596 | data: path.basename(record.file) |
597 | }); |
598 | } |
599 | } |
600 | |
601 | async step (task) { |
602 | const { file, store, data } = task; |
603 | const record = this.records[file]; |
604 | if (store === STORE_BLOB || |
605 | store === STORE_CONTENT) { |
606 | await this.step_STORE_ANY(record, task.marker, store); |
607 | } else |
608 | if (store === STORE_LINKS) { |
609 | this.step_STORE_LINKS(record, data); |
610 | } else |
611 | if (store === STORE_STAT) { |
612 | await this.step_STORE_STAT(record); |
613 | } else { |
614 | assert(false, 'walker: unknown store ' + store); |
615 | } |
616 | } |
617 | |
618 | async readDictionary () { |
619 | const dd = path.join(__dirname, '../dictionary'); |
620 | const files = await fs.readdir(dd); |
621 | |
622 | for (const file of files) { |
623 | if (/\.js$/.test(file)) { |
624 | const name = file.slice(0, -3); |
625 | const config = require(path.join(dd, file)); |
626 | this.dictionary[name] = config; |
627 | } |
628 | } |
629 | } |
630 | |
631 | async start (marker, entrypoint, addition, params) { |
632 | this.tasks = []; |
633 | this.records = {}; |
634 | this.dictionary = {}; |
635 | this.patches = {}; |
636 | this.params = params; |
637 | |
638 | await this.readDictionary(); |
639 | |
640 | this.append({ |
641 | file: entrypoint, |
642 | marker, |
643 | store: STORE_BLOB |
644 | }); |
645 | |
646 | if (addition) { |
647 | this.append({ |
648 | file: addition, |
649 | marker, |
650 | store: STORE_CONTENT |
651 | }); |
652 | } |
653 | |
654 | const tasks = this.tasks; |
655 | for (let i = 0; i < tasks.length; i += 1) { |
656 | // NO MULTIPLE WORKERS! THIS WILL LEAD TO NON-DETERMINISTIC |
657 | // ORDER. one-by-one fifo is the only way to iterate tasks |
658 | await this.step(tasks[i]); |
659 | } |
660 | |
661 | return { |
662 | records: this.records, |
663 | entrypoint: normalizePath(entrypoint) |
664 | }; |
665 | } |
666 | } |
667 | |
668 | export default async function (...args) { |
669 | const w = new Walker(); |
670 | return await w.start(...args); |
671 | } |
672 |
Built with git-ssb-web