git ssb

3+

cel / ssb-npm-registry



Commit 4f27567900a7b5b821bccc74e95bfc96c8233159

Check in dependencies

cel committed on 5/17/2020, 11:40:58 PM
Parent: aaa1df0018178db95a6bc4d24a4db3ec9f8975bc

Files changed

README.mdchanged
node_modules/.bin/semveradded
node_modules/asyncmemo/README.mdadded
node_modules/asyncmemo/index.jsadded
node_modules/asyncmemo/package.jsonadded
node_modules/asyncmemo/test.jsadded
node_modules/bl/.jshintrcadded
node_modules/bl/.travis.ymladded
node_modules/bl/LICENSE.mdadded
node_modules/bl/README.mdadded
node_modules/bl/bl.jsadded
node_modules/bl/package.jsonadded
node_modules/bl/test/indexOf.jsadded
node_modules/bl/test/test.jsadded
node_modules/end-of-stream/LICENSEadded
node_modules/end-of-stream/README.mdadded
node_modules/end-of-stream/index.jsadded
node_modules/end-of-stream/package.jsonadded
node_modules/fs-constants/LICENSEadded
node_modules/fs-constants/README.mdadded
node_modules/fs-constants/browser.jsadded
node_modules/fs-constants/index.jsadded
node_modules/fs-constants/package.jsonadded
node_modules/hashlru/.travis.ymladded
node_modules/hashlru/LICENSEadded
node_modules/hashlru/README.mdadded
node_modules/hashlru/bench.jsadded
node_modules/hashlru/index.d.tsadded
node_modules/hashlru/index.jsadded
node_modules/hashlru/package.jsonadded
node_modules/hashlru/test/test.jsadded
node_modules/inherits/LICENSEadded
node_modules/inherits/README.mdadded
node_modules/inherits/inherits.jsadded
node_modules/inherits/inherits_browser.jsadded
node_modules/inherits/package.jsonadded
node_modules/looper/.travis.ymladded
node_modules/looper/LICENSEadded
node_modules/looper/README.mdadded
node_modules/looper/index.jsadded
node_modules/looper/package.jsonadded
node_modules/looper/test/test.jsadded
node_modules/multicb/README.mdadded
node_modules/multicb/index.jsadded
node_modules/multicb/package.jsonadded
node_modules/multicb/test/errors-multi.jsadded
node_modules/multicb/test/errors.jsadded
node_modules/multicb/test/multicb.jsadded
node_modules/multicb/test/pluck.jsadded
node_modules/multicb/test/spread.jsadded
node_modules/once/LICENSEadded
node_modules/once/README.mdadded
node_modules/once/once.jsadded
node_modules/once/package.jsonadded
node_modules/pull-cat/.npmignoreadded
node_modules/pull-cat/.travis.ymladded
node_modules/pull-cat/LICENSEadded
node_modules/pull-cat/README.mdadded
node_modules/pull-cat/index.jsadded
node_modules/pull-cat/package.jsonadded
node_modules/pull-cat/test/index.jsadded
node_modules/pull-file/.npmignoreadded
node_modules/pull-file/.travis.ymladded
node_modules/pull-file/README.mdadded
node_modules/pull-file/examples/bench.jsadded
node_modules/pull-file/examples/ipsum-chunks.jsadded
node_modules/pull-file/examples/node-rate.jsadded
node_modules/pull-file/examples/rate.jsadded
node_modules/pull-file/index.jsadded
node_modules/pull-file/package.jsonadded
node_modules/pull-file/test/append.jsadded
node_modules/pull-file/test/assets/ipsum.txtadded
node_modules/pull-file/test/assets/test.txtadded
node_modules/pull-file/test/explicit-buffer.jsadded
node_modules/pull-file/test/fd.jsadded
node_modules/pull-file/test/largefile.jsadded
node_modules/pull-file/test/partial.jsadded
node_modules/pull-file/test/small.jsadded
node_modules/pull-file/test/terminate-read.jsadded
node_modules/pull-hash/README.mdadded
node_modules/pull-hash/ext/git.jsadded
node_modules/pull-hash/ext/ssb.jsadded
node_modules/pull-hash/index.jsadded
node_modules/pull-hash/package.jsonadded
node_modules/pull-hash/test.jsadded
node_modules/pull-stream/.travis.ymladded
node_modules/pull-stream/LICENSEadded
node_modules/pull-stream/README.mdadded
node_modules/pull-stream/benchmarks/node.jsadded
node_modules/pull-stream/benchmarks/pull.jsadded
node_modules/pull-stream/docs/examples.mdadded
node_modules/pull-stream/docs/glossary.mdadded
node_modules/pull-stream/docs/pull.mdadded
node_modules/pull-stream/docs/sinks/collect.mdadded
node_modules/pull-stream/docs/sinks/concat.mdadded
node_modules/pull-stream/docs/sinks/drain.mdadded
node_modules/pull-stream/docs/sinks/index.mdadded
node_modules/pull-stream/docs/sinks/log.mdadded
node_modules/pull-stream/docs/sinks/on-end.mdadded
node_modules/pull-stream/docs/sinks/reduce.mdadded
node_modules/pull-stream/docs/sources/count.mdadded
node_modules/pull-stream/docs/sources/empty.mdadded
node_modules/pull-stream/docs/sources/error.mdadded
node_modules/pull-stream/docs/sources/index.mdadded
node_modules/pull-stream/docs/sources/infinite.mdadded
node_modules/pull-stream/docs/sources/keys.mdadded
node_modules/pull-stream/docs/sources/once.mdadded
node_modules/pull-stream/docs/sources/values.mdadded
node_modules/pull-stream/docs/spec.mdadded
node_modules/pull-stream/docs/throughs/async-map.mdadded
node_modules/pull-stream/docs/throughs/filter-not.mdadded
node_modules/pull-stream/docs/throughs/filter.mdadded
node_modules/pull-stream/docs/throughs/flatten.mdadded
node_modules/pull-stream/docs/throughs/index.mdadded
node_modules/pull-stream/docs/throughs/map.mdadded
node_modules/pull-stream/docs/throughs/non-unique.mdadded
node_modules/pull-stream/docs/throughs/take.mdadded
node_modules/pull-stream/docs/throughs/through.mdadded
node_modules/pull-stream/docs/throughs/unique.mdadded
node_modules/pull-stream/index.jsadded
node_modules/pull-stream/package.jsonadded
node_modules/pull-stream/pull.jsadded
node_modules/pull-stream/sinks/collect.jsadded
node_modules/pull-stream/sinks/concat.jsadded
node_modules/pull-stream/sinks/drain.jsadded
node_modules/pull-stream/sinks/find.jsadded
node_modules/pull-stream/sinks/index.jsadded
node_modules/pull-stream/sinks/log.jsadded
node_modules/pull-stream/sinks/on-end.jsadded
node_modules/pull-stream/sinks/reduce.jsadded
node_modules/pull-stream/sources/count.jsadded
node_modules/pull-stream/sources/empty.jsadded
node_modules/pull-stream/sources/error.jsadded
node_modules/pull-stream/sources/index.jsadded
node_modules/pull-stream/sources/infinite.jsadded
node_modules/pull-stream/sources/keys.jsadded
node_modules/pull-stream/sources/once.jsadded
node_modules/pull-stream/sources/values.jsadded
node_modules/pull-stream/test/abort-stalled.jsadded
node_modules/pull-stream/test/async-map.jsadded
node_modules/pull-stream/test/collect.jsadded
node_modules/pull-stream/test/compose.jsadded
node_modules/pull-stream/test/concat.jsadded
node_modules/pull-stream/test/continuable.jsadded
node_modules/pull-stream/test/drain-abort.jsadded
node_modules/pull-stream/test/drain-if.jsadded
node_modules/pull-stream/test/filter.jsadded
node_modules/pull-stream/test/find.jsadded
node_modules/pull-stream/test/flatten.jsadded
node_modules/pull-stream/test/map.jsadded
node_modules/pull-stream/test/pull.jsadded
node_modules/pull-stream/test/take.jsadded
node_modules/pull-stream/test/through.jsadded
node_modules/pull-stream/test/unique.jsadded
node_modules/pull-stream/test/values.jsadded
node_modules/pull-stream/throughs/async-map.jsadded
node_modules/pull-stream/throughs/filter-not.jsadded
node_modules/pull-stream/throughs/filter.jsadded
node_modules/pull-stream/throughs/flatten.jsadded
node_modules/pull-stream/throughs/index.jsadded
node_modules/pull-stream/throughs/map.jsadded
node_modules/pull-stream/throughs/non-unique.jsadded
node_modules/pull-stream/throughs/take.jsadded
node_modules/pull-stream/throughs/through.jsadded
node_modules/pull-stream/throughs/unique.jsadded
node_modules/pull-stream/util/abort-cb.jsadded
node_modules/pull-stream/util/prop.jsadded
node_modules/pull-stream/util/tester.jsadded
node_modules/pull-utf8-decoder/.travis.ymladded
node_modules/pull-utf8-decoder/LICENSEadded
node_modules/pull-utf8-decoder/README.mdadded
node_modules/pull-utf8-decoder/index.jsadded
node_modules/pull-utf8-decoder/package.jsonadded
node_modules/pull-utf8-decoder/test/index.jsadded
node_modules/readable-stream/CONTRIBUTING.mdadded
node_modules/readable-stream/GOVERNANCE.mdadded
node_modules/readable-stream/LICENSEadded
node_modules/readable-stream/README.mdadded
node_modules/readable-stream/errors-browser.jsadded
node_modules/readable-stream/errors.jsadded
node_modules/readable-stream/experimentalWarning.jsadded
node_modules/readable-stream/lib/_stream_duplex.jsadded
node_modules/readable-stream/lib/_stream_passthrough.jsadded
node_modules/readable-stream/lib/_stream_readable.jsadded
node_modules/readable-stream/lib/_stream_transform.jsadded
node_modules/readable-stream/lib/_stream_writable.jsadded
node_modules/readable-stream/lib/internal/streams/async_iterator.jsadded
node_modules/readable-stream/lib/internal/streams/buffer_list.jsadded
node_modules/readable-stream/lib/internal/streams/destroy.jsadded
node_modules/readable-stream/lib/internal/streams/end-of-stream.jsadded
node_modules/readable-stream/lib/internal/streams/pipeline.jsadded
node_modules/readable-stream/lib/internal/streams/state.jsadded
node_modules/readable-stream/lib/internal/streams/stream-browser.jsadded
node_modules/readable-stream/lib/internal/streams/stream.jsadded
node_modules/readable-stream/package.jsonadded
node_modules/readable-stream/readable-browser.jsadded
node_modules/readable-stream/readable.jsadded
node_modules/safe-buffer/LICENSEadded
node_modules/safe-buffer/README.mdadded
node_modules/safe-buffer/index.d.tsadded
node_modules/safe-buffer/index.jsadded
node_modules/safe-buffer/package.jsonadded
node_modules/semver/CHANGELOG.mdadded
node_modules/semver/LICENSEadded
node_modules/semver/README.mdadded
node_modules/semver/bin/semver.jsadded
node_modules/semver/package.jsonadded
node_modules/semver/range.bnfadded
node_modules/semver/semver.jsadded
node_modules/stream-to-pull-stream/.travis.ymladded
node_modules/stream-to-pull-stream/LICENSEadded
node_modules/stream-to-pull-stream/README.mdadded
node_modules/stream-to-pull-stream/index.jsadded
node_modules/stream-to-pull-stream/package.jsonadded
node_modules/stream-to-pull-stream/test/abort.jsadded
node_modules/stream-to-pull-stream/test/close.jsadded
node_modules/stream-to-pull-stream/test/collect.jsadded
node_modules/stream-to-pull-stream/test/http.jsadded
node_modules/stream-to-pull-stream/test/index.jsadded
node_modules/stream-to-pull-stream/test/sink.jsadded
node_modules/stream-to-pull-stream/test/stack.jsadded
node_modules/stream-to-pull-stream/test/stdout.jsadded
node_modules/stream-to-pull-stream/test/streams2.jsadded
node_modules/stream-to-pull-stream/test/test-stdout.jsadded
node_modules/string_decoder/LICENSEadded
node_modules/string_decoder/README.mdadded
node_modules/string_decoder/lib/string_decoder.jsadded
node_modules/string_decoder/package.jsonadded
node_modules/tar-stream/LICENSEadded
node_modules/tar-stream/README.mdadded
node_modules/tar-stream/extract.jsadded
node_modules/tar-stream/headers.jsadded
node_modules/tar-stream/index.jsadded
node_modules/tar-stream/pack.jsadded
node_modules/tar-stream/package.jsonadded
node_modules/util-deprecate/History.mdadded
node_modules/util-deprecate/LICENSEadded
node_modules/util-deprecate/README.mdadded
node_modules/util-deprecate/browser.jsadded
node_modules/util-deprecate/node.jsadded
node_modules/util-deprecate/package.jsonadded
node_modules/wrappy/LICENSEadded
node_modules/wrappy/README.mdadded
node_modules/wrappy/package.jsonadded
node_modules/wrappy/wrappy.jsadded
README.mdView
@@ -24,11 +24,8 @@
2424 As a scuttlebot plugin:
2525 ```
2626 git clone ssb://%pFqjcdVKHqsrtOjVEAVZeCF0iY4s+3Hr0vA3EFCq5UM=.sha256 ~/.ssb/node_modules/ssb-npm-registry
2727 cd ~/.ssb/node_modules/ssb-npm-registry
28-wget -qO- 'http://localhost:8989/blobs/get/&E+tZfD6eodncvEddM3QAfsmzTJ003jlPGsqFN5TO7sQ=.sha256' | tar xz package/node_modules
29-mv package/node_modules node_modules
30-rmdir package
3128 ./enable.js
3229 # restart sbot
3330 ```
3431
node_modules/.bin/semverView
@@ -1,0 +1,1 @@
1 +../semver/bin/semver.js
node_modules/asyncmemo/README.mdView
@@ -1,0 +1,60 @@
1 +# asyncmemo
2 +
3 +Memoize asynchronous function calls. Combine multiple calls with the same
4 +argument so that the underlying function is only called once for that argument.
5 +Optionally cache the result for that argument.
6 +
7 +## Install
8 +
9 +```
10 +npm install --save asyncmemo
11 +```
12 +
13 +## Example
14 +
15 +```js
16 +var asyncMemo = require('asyncmemo')
17 +var xhr = require('xhr')
18 +var get = asyncMemo(function (base, path, cb) {
19 + xhr.get(base + path, cb)
20 +}, 'http://localhost')
21 +get('/foo', function (err, resp) {
22 + console.log(err, resp)
23 +})
24 +```
25 +
26 +## API
27 +
28 +### `asyncMemo([opts,] fn, [args...]): memo`
29 +
30 +- `opts.cache`: cache object, or false to disable cache
31 +- `opts.asString`: function to convert an argument to a string
32 +- `fn([args...], arg, cb(err, result))`: the asyncronous function to memoize
33 +- `args`: arguments to prepend to each call to `fn`
34 +- `memo(arg, cb(err, result))`: memoized asynchronous function
35 +- `memo.cache`: cache object, either `opts.cache` or a auto-created one
36 +
37 +A custom cache object can be passed using the `cache` option. This should have
38 +`has`, `get`, and `set` methods.
39 +
40 +If you want to be able to use memoize a key that is not a string or number, you
41 +can specify a `asString` function to convert the key argument to a string.
42 +
43 +## Related
44 +
45 +- [async-memo](https://www.npmjs.com/package/async-memo)
46 +- [async-cache](https://www.npmjs.com/package/async-cache)
47 +- [memo-cache](https://www.npmjs.com/package/memo-cache)
48 +- [memoizeasync](https://www.npmjs.com/package/memoizeasync)
49 +- [thunky](https://github.com/mafintosh/thunky) - does the same except memoizes
50 + only one value
51 +
52 +## License
53 +
54 +Copyright (c) 2016 Charles Lehner
55 +
56 +Usage of the works is permitted provided that this instrument is
57 +retained with the works, so that any entity that uses the works is
58 +notified of this instrument.
59 +
60 +DISCLAIMER: THE WORKS ARE WITHOUT WARRANTY.
node_modules/asyncmemo/index.jsView
@@ -1,0 +1,72 @@
1 +var has = Object.prototype.hasOwnProperty
2 +
3 +function toArgString() {
4 + return [].join.call(arguments)
5 +}
6 +
7 +module.exports = function (opts, fn /*, preArgs... */) {
8 + var preArgs = [].slice.call(arguments, 2)
9 + if (typeof opts === 'function') {
10 + if (arguments.length >= 2) preArgs.unshift(fn)
11 + fn = opts
12 + opts = {}
13 + }
14 + var cache =
15 + opts.cache === false ? null :
16 + opts.cache === true || opts.cache == null ? new Storage() :
17 + opts.cache
18 + var callbacks = {/* arg: [callback] */}
19 + var toString = opts.asString || toArgString
20 +
21 + var memoized = function (/* args..., cb */) {
22 + var args = [].slice.call(arguments)
23 + var cb = args.pop()
24 + var memo = toString.apply(this, args)
25 + if (cache && cache.has(memo)) {
26 + var self = this
27 + return process.nextTick(function () {
28 + if (cache.has(memo))
29 + cb.call(self, null, cache.get(memo))
30 + else
31 + run.call(self, args, memo, cb)
32 + })
33 + }
34 + run.call(this, args, memo, cb)
35 + }
36 + memoized.cache = cache
37 + return memoized
38 +
39 + function run(args, memo, cb) {
40 + if (has.call(callbacks, memo))
41 + return callbacks[memo].push([this, cb])
42 + var cbs = callbacks[memo] = [[this, cb]]
43 + fn.apply(this, preArgs.concat(args, function (err, result) {
44 + if (!err && cache)
45 + cache.set(memo, result)
46 + while (cbs.length) {
47 + cb = cbs.shift()
48 + cb[1].call(cb[0], err, result)
49 + }
50 + delete callbacks[memo]
51 + }))
52 + }
53 +}
54 +
55 +function Storage() {
56 + this.data = {}
57 +}
58 +Storage.prototype.has = function (key) {
59 + return has.call(this.data, key)
60 +}
61 +Storage.prototype.get = function (key) {
62 + return this.data[key]
63 +}
64 +Storage.prototype.set = function (key, value) {
65 + this.data[key] = value
66 +}
67 +Storage.prototype.remove = function (key) {
68 + delete this.data[key]
69 +}
70 +Storage.prototype.clear = function (key) {
71 + this.data = {}
72 +}
node_modules/asyncmemo/package.jsonView
@@ -1,0 +1,59 @@
1 +{
2 + "_args": [
3 + [
4 + "asyncmemo@1.3.0",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "asyncmemo@1.3.0",
9 + "_id": "asyncmemo@1.3.0",
10 + "_inBundle": false,
11 + "_integrity": "sha256-U96yJbGCHK+6wk8CK/IAj5Mypb1AXtyxvxl1E0zMqRI=",
12 + "_location": "/asyncmemo",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "asyncmemo@1.3.0",
18 + "name": "asyncmemo",
19 + "escapedName": "asyncmemo",
20 + "rawSpec": "1.3.0",
21 + "saveSpec": null,
22 + "fetchSpec": "1.3.0"
23 + },
24 + "_requiredBy": [
25 + "/"
26 + ],
27 + "_resolved": "http://localhost:8989/blobs/get/&U96yJbGCHK+6wk8CK/IAj5Mypb1AXtyxvxl1E0zMqRI=.sha256",
28 + "_spec": "1.3.0",
29 + "_where": "/home/cel/src/ssb-npm-registry",
30 + "author": {
31 + "name": "Charles Lehner",
32 + "url": "http://celehner.com/"
33 + },
34 + "bugs": {
35 + "url": "https://github.com/clehner/asyncmemo/issues"
36 + },
37 + "description": "memoize asynchronous function calls",
38 + "devDependencies": {
39 + "tape": "^4.6.2"
40 + },
41 + "homepage": "https://github.com/clehner/asyncmemo#readme",
42 + "keywords": [
43 + "async",
44 + "cache",
45 + "memo",
46 + "memoize"
47 + ],
48 + "license": "Fair",
49 + "main": "index.js",
50 + "name": "asyncmemo",
51 + "repository": {
52 + "type": "git",
53 + "url": "git+https://github.com/clehner/asyncmemo.git"
54 + },
55 + "scripts": {
56 + "test": "node test"
57 + },
58 + "version": "1.3.0"
59 +}
node_modules/asyncmemo/test.jsView
@@ -1,0 +1,139 @@
1 +var asyncMemo = require('.')
2 +var test = require('tape')
3 +
4 +var i = 0
5 +var get = asyncMemo(function (foo, bar, arg, cb) {
6 + setTimeout(function () {
7 + cb(i == 3, [foo, bar, arg, i++].join('-'))
8 + }, 10)
9 +}, 'foo', 'bar')
10 +
11 +test('memoize values', function (t) {
12 + get('thing', function (err, result) {
13 + t.error(err, 'thing')
14 + t.equal(result, 'foo-bar-thing-0')
15 +
16 + get('thing', function (err, result) {
17 + t.error(err, 'thing 2')
18 + t.equal(result, 'foo-bar-thing-0')
19 +
20 + get('pasta', function (err, result) {
21 + t.error(err, 'pasta')
22 + t.equal(result, 'foo-bar-pasta-1')
23 +
24 + get('pasta', function (err, result) {
25 + t.error(err, 'pasta 2')
26 + t.equal(result, 'foo-bar-pasta-1')
27 +
28 + get('more', function (err, result) {
29 + t.error(err, 'more')
30 + t.equal(result, 'foo-bar-more-2')
31 + t.end()
32 + })
33 + })
34 + })
35 + })
36 + })
37 +})
38 +
39 +test('don\'t cache results asynchronously with errors', function (t) {
40 + get('blah', function (err, result) {
41 + t.ok(err, 'error')
42 + setImmediate(function () {
43 + get('blah', function (err, result) {
44 + t.error(err, 'blah')
45 + t.equal(result, 'foo-bar-blah-4')
46 + t.end()
47 + })
48 + })
49 + })
50 +})
51 +
52 +test('handle concurrent requests', function (t) {
53 + t.plan(4)
54 + get('one', function (err, result) {
55 + t.error(err, 'one')
56 + t.equal(result, 'foo-bar-one-5')
57 + })
58 + get('one', function (err, result) {
59 + t.error(err, 'one 2')
60 + t.equal(result, 'foo-bar-one-5')
61 + })
62 +})
63 +
64 +test('preserve this', function (t) {
65 + var obj = {get: get}
66 + obj.get('thing', function (err, result) {
67 + t.error(err, 'thing')
68 + t.equal(this, obj)
69 + t.equal(result, 'foo-bar-thing-0')
70 + t.end()
71 + })
72 +})
73 +
74 +test('memoize undefined', function (t) {
75 + var i = 0
76 + var get = asyncMemo(function (arg, cb) {
77 + i++
78 + cb()
79 + })
80 +
81 + get('one', function (err, result) {
82 + t.error(err, 'one')
83 + t.equal(i, 1)
84 + t.equal(result, undefined)
85 +
86 + get('one', function (err, result) {
87 + t.error(err, 'two')
88 + t.equal(i, 1)
89 + t.equal(result, undefined)
90 + t.end()
91 + })
92 + })
93 +})
94 +
95 +test('without cache', function (t) {
96 + t.plan(4)
97 +
98 + var run = asyncMemo({cache: false}, function (arg, cb) {
99 + setImmediate(function () {
100 + t.pass('called')
101 + cb(null, arg)
102 + })
103 + })
104 +
105 + run('a', function () {
106 + t.pass('one')
107 +
108 + setImmediate(function () {
109 + run('a', function () {
110 + t.pass('two')
111 + })
112 + })
113 + })
114 +})
115 +
116 +test('toString', function (t) {
117 + var get = asyncMemo({
118 + asString: function (arg) {
119 + return arg.first + '-' + arg.last
120 + }
121 + }, function (arg, cb) {
122 + cb(null, {})
123 + })
124 +
125 + var abcdef
126 + get({first: 'abc', last: 'def'}, function (err, obj) {
127 + t.error(err)
128 + abcdef = obj
129 + })
130 + get({first: 'abc', last: 'def'}, function (err, obj) {
131 + t.error(err)
132 + t.equals(obj, abcdef)
133 + })
134 + get({first: 'zzz', last: 'def'}, function (err, obj) {
135 + t.error(err)
136 + t.notEquals(obj, abcdef)
137 + })
138 + t.end()
139 +})
node_modules/bl/.jshintrcView
@@ -1,0 +1,60 @@
1 +{
2 + "predef": [ ]
3 + , "bitwise": false
4 + , "camelcase": false
5 + , "curly": false
6 + , "eqeqeq": false
7 + , "forin": false
8 + , "immed": false
9 + , "latedef": false
10 + , "noarg": true
11 + , "noempty": true
12 + , "nonew": true
13 + , "plusplus": false
14 + , "quotmark": true
15 + , "regexp": false
16 + , "undef": true
17 + , "unused": true
18 + , "strict": false
19 + , "trailing": true
20 + , "maxlen": 120
21 + , "asi": true
22 + , "boss": true
23 + , "debug": true
24 + , "eqnull": true
25 + , "esnext": false
26 + , "evil": true
27 + , "expr": true
28 + , "funcscope": false
29 + , "globalstrict": false
30 + , "iterator": false
31 + , "lastsemic": true
32 + , "laxbreak": true
33 + , "laxcomma": true
34 + , "loopfunc": true
35 + , "multistr": false
36 + , "onecase": false
37 + , "proto": false
38 + , "regexdash": false
39 + , "scripturl": true
40 + , "smarttabs": false
41 + , "shadow": false
42 + , "sub": true
43 + , "supernew": false
44 + , "validthis": true
45 + , "browser": true
46 + , "couch": false
47 + , "devel": false
48 + , "dojo": false
49 + , "mootools": false
50 + , "node": true
51 + , "nonstandard": true
52 + , "prototypejs": false
53 + , "rhino": false
54 + , "worker": true
55 + , "wsh": false
56 + , "nomen": false
57 + , "onevar": false
58 + , "passfail": false
59 + , "esversion": 3
60 +}
node_modules/bl/.travis.ymlView
@@ -1,0 +1,13 @@
1 +sudo: false
2 +language: node_js
3 +node_js:
4 + - '6'
5 + - '8'
6 + - '10'
7 +branches:
8 + only:
9 + - master
10 +notifications:
11 + email:
12 + - rod@vagg.org
13 + - matteo.collina@gmail.com
node_modules/bl/LICENSE.mdView
@@ -1,0 +1,13 @@
1 +The MIT License (MIT)
2 +=====================
3 +
4 +Copyright (c) 2013-2018 bl contributors
5 +----------------------------------
6 +
7 +*bl contributors listed at <https://github.com/rvagg/bl#contributors>*
8 +
9 +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
10 +
11 +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
12 +
13 +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
node_modules/bl/README.mdView
@@ -1,0 +1,218 @@
1 +# bl *(BufferList)*
2 +
3 +[![Build Status](https://travis-ci.org/rvagg/bl.svg?branch=master)](https://travis-ci.org/rvagg/bl)
4 +
5 +**A Node.js Buffer list collector, reader and streamer thingy.**
6 +
7 +[![NPM](https://nodei.co/npm/bl.png?downloads=true&downloadRank=true)](https://nodei.co/npm/bl/)
8 +[![NPM](https://nodei.co/npm-dl/bl.png?months=6&height=3)](https://nodei.co/npm/bl/)
9 +
10 +**bl** is a storage object for collections of Node Buffers, exposing them with the main Buffer readable API. Also works as a duplex stream so you can collect buffers from a stream that emits them and emit buffers to a stream that consumes them!
11 +
12 +The original buffers are kept intact and copies are only done as necessary. Any reads that require the use of a single original buffer will return a slice of that buffer only (which references the same memory as the original buffer). Reads that span buffers perform concatenation as required and return the results transparently.
13 +
14 +```js
15 +const BufferList = require('bl')
16 +
17 +var bl = new BufferList()
18 +bl.append(Buffer.from('abcd'))
19 +bl.append(Buffer.from('efg'))
20 +bl.append('hi') // bl will also accept & convert Strings
21 +bl.append(Buffer.from('j'))
22 +bl.append(Buffer.from([ 0x3, 0x4 ]))
23 +
24 +console.log(bl.length) // 12
25 +
26 +console.log(bl.slice(0, 10).toString('ascii')) // 'abcdefghij'
27 +console.log(bl.slice(3, 10).toString('ascii')) // 'defghij'
28 +console.log(bl.slice(3, 6).toString('ascii')) // 'def'
29 +console.log(bl.slice(3, 8).toString('ascii')) // 'defgh'
30 +console.log(bl.slice(5, 10).toString('ascii')) // 'fghij'
31 +
32 +console.log(bl.indexOf('def')) // 3
33 +console.log(bl.indexOf('asdf')) // -1
34 +
35 +// or just use toString!
36 +console.log(bl.toString()) // 'abcdefghij\u0003\u0004'
37 +console.log(bl.toString('ascii', 3, 8)) // 'defgh'
38 +console.log(bl.toString('ascii', 5, 10)) // 'fghij'
39 +
40 +// other standard Buffer readables
41 +console.log(bl.readUInt16BE(10)) // 0x0304
42 +console.log(bl.readUInt16LE(10)) // 0x0403
43 +```
44 +
45 +Give it a callback in the constructor and use it just like **[concat-stream](https://github.com/maxogden/node-concat-stream)**:
46 +
47 +```js
48 +const bl = require('bl')
49 + , fs = require('fs')
50 +
51 +fs.createReadStream('README.md')
52 + .pipe(bl(function (err, data) { // note 'new' isn't strictly required
53 + // `data` is a complete Buffer object containing the full data
54 + console.log(data.toString())
55 + }))
56 +```
57 +
58 +Note that when you use the *callback* method like this, the resulting `data` parameter is a concatenation of all `Buffer` objects in the list. If you want to avoid the overhead of this concatenation (in cases of extreme performance consciousness), then avoid the *callback* method and just listen to `'end'` instead, like a standard Stream.
59 +
60 +Or to fetch a URL using [hyperquest](https://github.com/substack/hyperquest) (should work with [request](http://github.com/mikeal/request) and even plain Node http too!):
61 +```js
62 +const hyperquest = require('hyperquest')
63 + , bl = require('bl')
64 + , url = 'https://raw.github.com/rvagg/bl/master/README.md'
65 +
66 +hyperquest(url).pipe(bl(function (err, data) {
67 + console.log(data.toString())
68 +}))
69 +```
70 +
71 +Or, use it as a readable stream to recompose a list of Buffers to an output source:
72 +
73 +```js
74 +const BufferList = require('bl')
75 + , fs = require('fs')
76 +
77 +var bl = new BufferList()
78 +bl.append(Buffer.from('abcd'))
79 +bl.append(Buffer.from('efg'))
80 +bl.append(Buffer.from('hi'))
81 +bl.append(Buffer.from('j'))
82 +
83 +bl.pipe(fs.createWriteStream('gibberish.txt'))
84 +```
85 +
86 +## API
87 +
88 + * <a href="#ctor"><code><b>new BufferList([ callback ])</b></code></a>
89 + * <a href="#length"><code>bl.<b>length</b></code></a>
90 + * <a href="#append"><code>bl.<b>append(buffer)</b></code></a>
91 + * <a href="#get"><code>bl.<b>get(index)</b></code></a>
92 + * <a href="#indexOf"><code>bl.<b>indexOf(value[, byteOffset][, encoding])</b></code></a>
93 + * <a href="#slice"><code>bl.<b>slice([ start[, end ] ])</b></code></a>
94 + * <a href="#shallowSlice"><code>bl.<b>shallowSlice([ start[, end ] ])</b></code></a>
95 + * <a href="#copy"><code>bl.<b>copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ])</b></code></a>
96 + * <a href="#duplicate"><code>bl.<b>duplicate()</b></code></a>
97 + * <a href="#consume"><code>bl.<b>consume(bytes)</b></code></a>
98 + * <a href="#toString"><code>bl.<b>toString([encoding, [ start, [ end ]]])</b></code></a>
99 + * <a href="#readXX"><code>bl.<b>readDoubleBE()</b></code>, <code>bl.<b>readDoubleLE()</b></code>, <code>bl.<b>readFloatBE()</b></code>, <code>bl.<b>readFloatLE()</b></code>, <code>bl.<b>readInt32BE()</b></code>, <code>bl.<b>readInt32LE()</b></code>, <code>bl.<b>readUInt32BE()</b></code>, <code>bl.<b>readUInt32LE()</b></code>, <code>bl.<b>readInt16BE()</b></code>, <code>bl.<b>readInt16LE()</b></code>, <code>bl.<b>readUInt16BE()</b></code>, <code>bl.<b>readUInt16LE()</b></code>, <code>bl.<b>readInt8()</b></code>, <code>bl.<b>readUInt8()</b></code></a>
100 + * <a href="#streams">Streams</a>
101 +
102 +--------------------------------------------------------
103 +<a name="ctor"></a>
104 +### new BufferList([ callback | Buffer | Buffer array | BufferList | BufferList array | String ])
105 +The constructor takes an optional callback, if supplied, the callback will be called with an error argument followed by a reference to the **bl** instance, when `bl.end()` is called (i.e. from a piped stream). This is a convenient method of collecting the entire contents of a stream, particularly when the stream is *chunky*, such as a network stream.
106 +
107 +Normally, no arguments are required for the constructor, but you can initialise the list by passing in a single `Buffer` object or an array of `Buffer` object.
108 +
109 +`new` is not strictly required, if you don't instantiate a new object, it will be done automatically for you so you can create a new instance simply with:
110 +
111 +```js
112 +var bl = require('bl')
113 +var myinstance = bl()
114 +
115 +// equivalent to:
116 +
117 +var BufferList = require('bl')
118 +var myinstance = new BufferList()
119 +```
120 +
121 +--------------------------------------------------------
122 +<a name="length"></a>
123 +### bl.length
124 +Get the length of the list in bytes. This is the sum of the lengths of all of the buffers contained in the list, minus any initial offset for a semi-consumed buffer at the beginning. Should accurately represent the total number of bytes that can be read from the list.
125 +
126 +--------------------------------------------------------
127 +<a name="append"></a>
128 +### bl.append(Buffer | Buffer array | BufferList | BufferList array | String)
129 +`append(buffer)` adds an additional buffer or BufferList to the internal list. `this` is returned so it can be chained.
130 +
131 +--------------------------------------------------------
132 +<a name="get"></a>
133 +### bl.get(index)
134 +`get()` will return the byte at the specified index.
135 +
136 +--------------------------------------------------------
137 +<a name="indexOf"></a>
138 +### bl.indexOf(value[, byteOffset][, encoding])
139 +`get()` will return the byte at the specified index.
140 +`indexOf()` method returns the first index at which a given element can be found in the BufferList, or -1 if it is not present.
141 +
142 +--------------------------------------------------------
143 +<a name="slice"></a>
144 +### bl.slice([ start, [ end ] ])
145 +`slice()` returns a new `Buffer` object containing the bytes within the range specified. Both `start` and `end` are optional and will default to the beginning and end of the list respectively.
146 +
147 +If the requested range spans a single internal buffer then a slice of that buffer will be returned which shares the original memory range of that Buffer. If the range spans multiple buffers then copy operations will likely occur to give you a uniform Buffer.
148 +
149 +--------------------------------------------------------
150 +<a name="shallowSlice"></a>
151 +### bl.shallowSlice([ start, [ end ] ])
152 +`shallowSlice()` returns a new `BufferList` object containing the bytes within the range specified. Both `start` and `end` are optional and will default to the beginning and end of the list respectively.
153 +
154 +No copies will be performed. All buffers in the result share memory with the original list.
155 +
156 +--------------------------------------------------------
157 +<a name="copy"></a>
158 +### bl.copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ])
159 +`copy()` copies the content of the list in the `dest` buffer, starting from `destStart` and containing the bytes within the range specified with `srcStart` to `srcEnd`. `destStart`, `start` and `end` are optional and will default to the beginning of the `dest` buffer, and the beginning and end of the list respectively.
160 +
161 +--------------------------------------------------------
162 +<a name="duplicate"></a>
163 +### bl.duplicate()
164 +`duplicate()` performs a **shallow-copy** of the list. The internal Buffers remains the same, so if you change the underlying Buffers, the change will be reflected in both the original and the duplicate. This method is needed if you want to call `consume()` or `pipe()` and still keep the original list.Example:
165 +
166 +```js
167 +var bl = new BufferList()
168 +
169 +bl.append('hello')
170 +bl.append(' world')
171 +bl.append('\n')
172 +
173 +bl.duplicate().pipe(process.stdout, { end: false })
174 +
175 +console.log(bl.toString())
176 +```
177 +
178 +--------------------------------------------------------
179 +<a name="consume"></a>
180 +### bl.consume(bytes)
181 +`consume()` will shift bytes *off the start of the list*. The number of bytes consumed don't need to line up with the sizes of the internal Buffers&mdash;initial offsets will be calculated accordingly in order to give you a consistent view of the data.
182 +
183 +--------------------------------------------------------
184 +<a name="toString"></a>
185 +### bl.toString([encoding, [ start, [ end ]]])
186 +`toString()` will return a string representation of the buffer. The optional `start` and `end` arguments are passed on to `slice()`, while the `encoding` is passed on to `toString()` of the resulting Buffer. See the [Buffer#toString()](http://nodejs.org/docs/latest/api/buffer.html#buffer_buf_tostring_encoding_start_end) documentation for more information.
187 +
188 +--------------------------------------------------------
189 +<a name="readXX"></a>
190 +### bl.readDoubleBE(), bl.readDoubleLE(), bl.readFloatBE(), bl.readFloatLE(), bl.readInt32BE(), bl.readInt32LE(), bl.readUInt32BE(), bl.readUInt32LE(), bl.readInt16BE(), bl.readInt16LE(), bl.readUInt16BE(), bl.readUInt16LE(), bl.readInt8(), bl.readUInt8()
191 +
192 +All of the standard byte-reading methods of the `Buffer` interface are implemented and will operate across internal Buffer boundaries transparently.
193 +
194 +See the <b><code>[Buffer](http://nodejs.org/docs/latest/api/buffer.html)</code></b> documentation for how these work.
195 +
196 +--------------------------------------------------------
197 +<a name="streams"></a>
198 +### Streams
199 +**bl** is a Node **[Duplex Stream](http://nodejs.org/docs/latest/api/stream.html#stream_class_stream_duplex)**, so it can be read from and written to like a standard Node stream. You can also `pipe()` to and from a **bl** instance.
200 +
201 +--------------------------------------------------------
202 +
203 +## Contributors
204 +
205 +**bl** is brought to you by the following hackers:
206 +
207 + * [Rod Vagg](https://github.com/rvagg)
208 + * [Matteo Collina](https://github.com/mcollina)
209 + * [Jarett Cruger](https://github.com/jcrugzz)
210 +
211 +=======
212 +
213 +<a name="license"></a>
214 +## License &amp; copyright
215 +
216 +Copyright (c) 2013-2018 bl contributors (listed above).
217 +
218 +bl is licensed under the MIT license. All rights not explicitly granted in the MIT license are reserved. See the included LICENSE.md file for more details.
node_modules/bl/bl.jsView
@@ -1,0 +1,382 @@
1 +'use strict'
2 +var DuplexStream = require('readable-stream').Duplex
3 + , util = require('util')
4 +
5 +function BufferList (callback) {
6 + if (!(this instanceof BufferList))
7 + return new BufferList(callback)
8 +
9 + this._bufs = []
10 + this.length = 0
11 +
12 + if (typeof callback == 'function') {
13 + this._callback = callback
14 +
15 + var piper = function piper (err) {
16 + if (this._callback) {
17 + this._callback(err)
18 + this._callback = null
19 + }
20 + }.bind(this)
21 +
22 + this.on('pipe', function onPipe (src) {
23 + src.on('error', piper)
24 + })
25 + this.on('unpipe', function onUnpipe (src) {
26 + src.removeListener('error', piper)
27 + })
28 + } else {
29 + this.append(callback)
30 + }
31 +
32 + DuplexStream.call(this)
33 +}
34 +
35 +
36 +util.inherits(BufferList, DuplexStream)
37 +
38 +
39 +BufferList.prototype._offset = function _offset (offset) {
40 + var tot = 0, i = 0, _t
41 + if (offset === 0) return [ 0, 0 ]
42 + for (; i < this._bufs.length; i++) {
43 + _t = tot + this._bufs[i].length
44 + if (offset < _t || i == this._bufs.length - 1) {
45 + return [ i, offset - tot ]
46 + }
47 + tot = _t
48 + }
49 +}
50 +
51 +BufferList.prototype._reverseOffset = function (blOffset) {
52 + var bufferId = blOffset[0]
53 + var offset = blOffset[1]
54 + for (var i = 0; i < bufferId; i++) {
55 + offset += this._bufs[i].length
56 + }
57 + return offset
58 +}
59 +
60 +BufferList.prototype.append = function append (buf) {
61 + var i = 0
62 +
63 + if (Buffer.isBuffer(buf)) {
64 + this._appendBuffer(buf)
65 + } else if (Array.isArray(buf)) {
66 + for (; i < buf.length; i++)
67 + this.append(buf[i])
68 + } else if (buf instanceof BufferList) {
69 + // unwrap argument into individual BufferLists
70 + for (; i < buf._bufs.length; i++)
71 + this.append(buf._bufs[i])
72 + } else if (buf != null) {
73 + // coerce number arguments to strings, since Buffer(number) does
74 + // uninitialized memory allocation
75 + if (typeof buf == 'number')
76 + buf = buf.toString()
77 +
78 + this._appendBuffer(Buffer.from(buf))
79 + }
80 +
81 + return this
82 +}
83 +
84 +
85 +BufferList.prototype._appendBuffer = function appendBuffer (buf) {
86 + this._bufs.push(buf)
87 + this.length += buf.length
88 +}
89 +
90 +
91 +BufferList.prototype._write = function _write (buf, encoding, callback) {
92 + this._appendBuffer(buf)
93 +
94 + if (typeof callback == 'function')
95 + callback()
96 +}
97 +
98 +
99 +BufferList.prototype._read = function _read (size) {
100 + if (!this.length)
101 + return this.push(null)
102 +
103 + size = Math.min(size, this.length)
104 + this.push(this.slice(0, size))
105 + this.consume(size)
106 +}
107 +
108 +
109 +BufferList.prototype.end = function end (chunk) {
110 + DuplexStream.prototype.end.call(this, chunk)
111 +
112 + if (this._callback) {
113 + this._callback(null, this.slice())
114 + this._callback = null
115 + }
116 +}
117 +
118 +
119 +BufferList.prototype.get = function get (index) {
120 + if (index > this.length || index < 0) {
121 + return undefined
122 + }
123 + var offset = this._offset(index)
124 + return this._bufs[offset[0]][offset[1]]
125 +}
126 +
127 +
128 +BufferList.prototype.slice = function slice (start, end) {
129 + if (typeof start == 'number' && start < 0)
130 + start += this.length
131 + if (typeof end == 'number' && end < 0)
132 + end += this.length
133 + return this.copy(null, 0, start, end)
134 +}
135 +
136 +
137 +BufferList.prototype.copy = function copy (dst, dstStart, srcStart, srcEnd) {
138 + if (typeof srcStart != 'number' || srcStart < 0)
139 + srcStart = 0
140 + if (typeof srcEnd != 'number' || srcEnd > this.length)
141 + srcEnd = this.length
142 + if (srcStart >= this.length)
143 + return dst || Buffer.alloc(0)
144 + if (srcEnd <= 0)
145 + return dst || Buffer.alloc(0)
146 +
147 + var copy = !!dst
148 + , off = this._offset(srcStart)
149 + , len = srcEnd - srcStart
150 + , bytes = len
151 + , bufoff = (copy && dstStart) || 0
152 + , start = off[1]
153 + , l
154 + , i
155 +
156 + // copy/slice everything
157 + if (srcStart === 0 && srcEnd == this.length) {
158 + if (!copy) { // slice, but full concat if multiple buffers
159 + return this._bufs.length === 1
160 + ? this._bufs[0]
161 + : Buffer.concat(this._bufs, this.length)
162 + }
163 +
164 + // copy, need to copy individual buffers
165 + for (i = 0; i < this._bufs.length; i++) {
166 + this._bufs[i].copy(dst, bufoff)
167 + bufoff += this._bufs[i].length
168 + }
169 +
170 + return dst
171 + }
172 +
173 + // easy, cheap case where it's a subset of one of the buffers
174 + if (bytes <= this._bufs[off[0]].length - start) {
175 + return copy
176 + ? this._bufs[off[0]].copy(dst, dstStart, start, start + bytes)
177 + : this._bufs[off[0]].slice(start, start + bytes)
178 + }
179 +
180 + if (!copy) // a slice, we need something to copy in to
181 + dst = Buffer.allocUnsafe(len)
182 +
183 + for (i = off[0]; i < this._bufs.length; i++) {
184 + l = this._bufs[i].length - start
185 +
186 + if (bytes > l) {
187 + this._bufs[i].copy(dst, bufoff, start)
188 + } else {
189 + this._bufs[i].copy(dst, bufoff, start, start + bytes)
190 + break
191 + }
192 +
193 + bufoff += l
194 + bytes -= l
195 +
196 + if (start)
197 + start = 0
198 + }
199 +
200 + return dst
201 +}
202 +
203 +BufferList.prototype.shallowSlice = function shallowSlice (start, end) {
204 + start = start || 0
205 + end = typeof end !== 'number' ? this.length : end
206 +
207 + if (start < 0)
208 + start += this.length
209 + if (end < 0)
210 + end += this.length
211 +
212 + if (start === end) {
213 + return new BufferList()
214 + }
215 + var startOffset = this._offset(start)
216 + , endOffset = this._offset(end)
217 + , buffers = this._bufs.slice(startOffset[0], endOffset[0] + 1)
218 +
219 + if (endOffset[1] == 0)
220 + buffers.pop()
221 + else
222 + buffers[buffers.length-1] = buffers[buffers.length-1].slice(0, endOffset[1])
223 +
224 + if (startOffset[1] != 0)
225 + buffers[0] = buffers[0].slice(startOffset[1])
226 +
227 + return new BufferList(buffers)
228 +}
229 +
230 +BufferList.prototype.toString = function toString (encoding, start, end) {
231 + return this.slice(start, end).toString(encoding)
232 +}
233 +
234 +BufferList.prototype.consume = function consume (bytes) {
235 + while (this._bufs.length) {
236 + if (bytes >= this._bufs[0].length) {
237 + bytes -= this._bufs[0].length
238 + this.length -= this._bufs[0].length
239 + this._bufs.shift()
240 + } else {
241 + this._bufs[0] = this._bufs[0].slice(bytes)
242 + this.length -= bytes
243 + break
244 + }
245 + }
246 + return this
247 +}
248 +
249 +
250 +BufferList.prototype.duplicate = function duplicate () {
251 + var i = 0
252 + , copy = new BufferList()
253 +
254 + for (; i < this._bufs.length; i++)
255 + copy.append(this._bufs[i])
256 +
257 + return copy
258 +}
259 +
260 +
261 +BufferList.prototype._destroy = function _destroy (err, cb) {
262 + this._bufs.length = 0
263 + this.length = 0
264 + cb(err)
265 +}
266 +
267 +
268 +BufferList.prototype.indexOf = function (search, offset, encoding) {
269 + if (encoding === undefined && typeof offset === 'string') {
270 + encoding = offset
271 + offset = undefined
272 + }
273 + if (typeof search === 'function' || Array.isArray(search)) {
274 + throw new TypeError('The "value" argument must be one of type string, Buffer, BufferList, or Uint8Array.')
275 + } else if (typeof search === 'number') {
276 + search = Buffer.from([search])
277 + } else if (typeof search === 'string') {
278 + search = Buffer.from(search, encoding)
279 + } else if (search instanceof BufferList) {
280 + search = search.slice()
281 + } else if (!Buffer.isBuffer(search)) {
282 + search = Buffer.from(search)
283 + }
284 +
285 + offset = Number(offset || 0)
286 + if (isNaN(offset)) {
287 + offset = 0
288 + }
289 +
290 + if (offset < 0) {
291 + offset = this.length + offset
292 + }
293 +
294 + if (offset < 0) {
295 + offset = 0
296 + }
297 +
298 + if (search.length === 0) {
299 + return offset > this.length ? this.length : offset
300 + }
301 +
302 + var blOffset = this._offset(offset)
303 + var blIndex = blOffset[0] // index of which internal buffer we're working on
304 + var buffOffset = blOffset[1] // offset of the internal buffer we're working on
305 +
306 + // scan over each buffer
307 + for (blIndex; blIndex < this._bufs.length; blIndex++) {
308 + var buff = this._bufs[blIndex]
309 + while(buffOffset < buff.length) {
310 + var availableWindow = buff.length - buffOffset
311 + if (availableWindow >= search.length) {
312 + var nativeSearchResult = buff.indexOf(search, buffOffset)
313 + if (nativeSearchResult !== -1) {
314 + return this._reverseOffset([blIndex, nativeSearchResult])
315 + }
316 + buffOffset = buff.length - search.length + 1 // end of native search window
317 + } else {
318 + var revOffset = this._reverseOffset([blIndex, buffOffset])
319 + if (this._match(revOffset, search)) {
320 + return revOffset
321 + }
322 + buffOffset++
323 + }
324 + }
325 + buffOffset = 0
326 + }
327 + return -1
328 +}
329 +
330 +BufferList.prototype._match = function(offset, search) {
331 + if (this.length - offset < search.length) {
332 + return false
333 + }
334 + for (var searchOffset = 0; searchOffset < search.length ; searchOffset++) {
335 + if(this.get(offset + searchOffset) !== search[searchOffset]){
336 + return false
337 + }
338 + }
339 + return true
340 +}
341 +
342 +
343 +;(function () {
344 + var methods = {
345 + 'readDoubleBE' : 8
346 + , 'readDoubleLE' : 8
347 + , 'readFloatBE' : 4
348 + , 'readFloatLE' : 4
349 + , 'readInt32BE' : 4
350 + , 'readInt32LE' : 4
351 + , 'readUInt32BE' : 4
352 + , 'readUInt32LE' : 4
353 + , 'readInt16BE' : 2
354 + , 'readInt16LE' : 2
355 + , 'readUInt16BE' : 2
356 + , 'readUInt16LE' : 2
357 + , 'readInt8' : 1
358 + , 'readUInt8' : 1
359 + , 'readIntBE' : null
360 + , 'readIntLE' : null
361 + , 'readUIntBE' : null
362 + , 'readUIntLE' : null
363 + }
364 +
365 + for (var m in methods) {
366 + (function (m) {
367 + if (methods[m] === null) {
368 + BufferList.prototype[m] = function (offset, byteLength) {
369 + return this.slice(offset, offset + byteLength)[m](0, byteLength)
370 + }
371 + }
372 + else {
373 + BufferList.prototype[m] = function (offset) {
374 + return this.slice(offset, offset + methods[m])[m](0)
375 + }
376 + }
377 + }(m))
378 + }
379 +}())
380 +
381 +
382 +module.exports = BufferList
node_modules/bl/package.jsonView
@@ -1,0 +1,65 @@
1 +{
2 + "_args": [
3 + [
4 + "bl@3.0.0",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "bl@3.0.0",
9 + "_id": "bl@3.0.0",
10 + "_inBundle": false,
11 + "_integrity": "sha256-wezXsoQPBGN/Lpfgr+kaU26zuLDrtfLbG5MRvJJuh7w=",
12 + "_location": "/bl",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "bl@3.0.0",
18 + "name": "bl",
19 + "escapedName": "bl",
20 + "rawSpec": "3.0.0",
21 + "saveSpec": null,
22 + "fetchSpec": "3.0.0"
23 + },
24 + "_requiredBy": [
25 + "/tar-stream"
26 + ],
27 + "_resolved": "http://localhost:8989/blobs/get/&wezXsoQPBGN/Lpfgr+kaU26zuLDrtfLbG5MRvJJuh7w=.sha256",
28 + "_spec": "3.0.0",
29 + "_where": "/home/cel/src/ssb-npm-registry",
30 + "authors": [
31 + "Rod Vagg <rod@vagg.org> (https://github.com/rvagg)",
32 + "Matteo Collina <matteo.collina@gmail.com> (https://github.com/mcollina)",
33 + "Jarett Cruger <jcrugzz@gmail.com> (https://github.com/jcrugzz)"
34 + ],
35 + "bugs": {
36 + "url": "https://github.com/rvagg/bl/issues"
37 + },
38 + "dependencies": {
39 + "readable-stream": "^3.0.1"
40 + },
41 + "description": "Buffer List: collect buffers and access with a standard readable Buffer interface, streamable too!",
42 + "devDependencies": {
43 + "faucet": "0.0.1",
44 + "hash_file": "~0.1.1",
45 + "tape": "~4.9.1"
46 + },
47 + "homepage": "https://github.com/rvagg/bl",
48 + "keywords": [
49 + "buffer",
50 + "buffers",
51 + "stream",
52 + "awesomesauce"
53 + ],
54 + "license": "MIT",
55 + "main": "bl.js",
56 + "name": "bl",
57 + "repository": {
58 + "type": "git",
59 + "url": "git+https://github.com/rvagg/bl.git"
60 + },
61 + "scripts": {
62 + "test": "node test/test.js | faucet"
63 + },
64 + "version": "3.0.0"
65 +}
node_modules/bl/test/indexOf.jsView
@@ -1,0 +1,463 @@
1 +'use strict'
2 +
3 +var tape = require('tape')
4 + , BufferList = require('../')
5 + , Buffer = require('safe-buffer').Buffer
6 +
7 +tape('indexOf single byte needle', t => {
8 + const bl = new BufferList(['abcdefg', 'abcdefg', '12345'])
9 + t.equal(bl.indexOf('e'), 4)
10 + t.equal(bl.indexOf('e', 5), 11)
11 + t.equal(bl.indexOf('e', 12), -1)
12 + t.equal(bl.indexOf('5'), 18)
13 + t.end()
14 +})
15 +
16 +tape('indexOf multiple byte needle', t => {
17 + const bl = new BufferList(['abcdefg', 'abcdefg'])
18 + t.equal(bl.indexOf('ef'), 4)
19 + t.equal(bl.indexOf('ef', 5), 11)
20 + t.end()
21 +})
22 +
23 +tape('indexOf multiple byte needles across buffer boundaries', t => {
24 + const bl = new BufferList(['abcdefg', 'abcdefg'])
25 + t.equal(bl.indexOf('fgabc'), 5)
26 + t.end()
27 +})
28 +
29 +tape('indexOf takes a buffer list search', t => {
30 + const bl = new BufferList(['abcdefg', 'abcdefg'])
31 + const search = new BufferList('fgabc')
32 + t.equal(bl.indexOf(search), 5)
33 + t.end()
34 +})
35 +
36 +tape('indexOf a zero byte needle', t => {
37 + const b = new BufferList('abcdef')
38 + const buf_empty = Buffer.from('')
39 + t.equal(b.indexOf(''), 0)
40 + t.equal(b.indexOf('', 1), 1)
41 + t.equal(b.indexOf('', b.length + 1), b.length)
42 + t.equal(b.indexOf('', Infinity), b.length)
43 + t.equal(b.indexOf(buf_empty), 0)
44 + t.equal(b.indexOf(buf_empty, 1), 1)
45 + t.equal(b.indexOf(buf_empty, b.length + 1), b.length)
46 + t.equal(b.indexOf(buf_empty, Infinity), b.length)
47 + t.end()
48 +})
49 +
50 +tape('indexOf buffers smaller and larger than the needle', t => {
51 + const bl = new BufferList(['abcdefg', 'a', 'bcdefg', 'a', 'bcfgab'])
52 + t.equal(bl.indexOf('fgabc'), 5)
53 + t.equal(bl.indexOf('fgabc', 6), 12)
54 + t.equal(bl.indexOf('fgabc', 13), -1)
55 + t.end()
56 +})
57 +
58 +// only present in node 6+
59 +;(process.version.substr(1).split('.')[0] >= 6) && tape('indexOf latin1 and binary encoding', t => {
60 + const b = new BufferList('abcdef')
61 +
62 + // test latin1 encoding
63 + t.equal(
64 + new BufferList(Buffer.from(b.toString('latin1'), 'latin1'))
65 + .indexOf('d', 0, 'latin1'),
66 + 3
67 + )
68 + t.equal(
69 + new BufferList(Buffer.from(b.toString('latin1'), 'latin1'))
70 + .indexOf(Buffer.from('d', 'latin1'), 0, 'latin1'),
71 + 3
72 + )
73 + t.equal(
74 + new BufferList(Buffer.from('aa\u00e8aa', 'latin1'))
75 + .indexOf('\u00e8', 'latin1'),
76 + 2
77 + )
78 + t.equal(
79 + new BufferList(Buffer.from('\u00e8', 'latin1'))
80 + .indexOf('\u00e8', 'latin1'),
81 + 0
82 + )
83 + t.equal(
84 + new BufferList(Buffer.from('\u00e8', 'latin1'))
85 + .indexOf(Buffer.from('\u00e8', 'latin1'), 'latin1'),
86 + 0
87 + )
88 +
89 + // test binary encoding
90 + t.equal(
91 + new BufferList(Buffer.from(b.toString('binary'), 'binary'))
92 + .indexOf('d', 0, 'binary'),
93 + 3
94 + )
95 + t.equal(
96 + new BufferList(Buffer.from(b.toString('binary'), 'binary'))
97 + .indexOf(Buffer.from('d', 'binary'), 0, 'binary'),
98 + 3
99 + )
100 + t.equal(
101 + new BufferList(Buffer.from('aa\u00e8aa', 'binary'))
102 + .indexOf('\u00e8', 'binary'),
103 + 2
104 + )
105 + t.equal(
106 + new BufferList(Buffer.from('\u00e8', 'binary'))
107 + .indexOf('\u00e8', 'binary'),
108 + 0
109 + )
110 + t.equal(
111 + new BufferList(Buffer.from('\u00e8', 'binary'))
112 + .indexOf(Buffer.from('\u00e8', 'binary'), 'binary'),
113 + 0
114 + )
115 + t.end()
116 +})
117 +
118 +tape('indexOf the entire nodejs10 buffer test suite', t => {
119 + const b = new BufferList('abcdef')
120 + const buf_a = Buffer.from('a')
121 + const buf_bc = Buffer.from('bc')
122 + const buf_f = Buffer.from('f')
123 + const buf_z = Buffer.from('z')
124 +
125 + const stringComparison = 'abcdef'
126 +
127 + t.equal(b.indexOf('a'), 0)
128 + t.equal(b.indexOf('a', 1), -1)
129 + t.equal(b.indexOf('a', -1), -1)
130 + t.equal(b.indexOf('a', -4), -1)
131 + t.equal(b.indexOf('a', -b.length), 0)
132 + t.equal(b.indexOf('a', NaN), 0)
133 + t.equal(b.indexOf('a', -Infinity), 0)
134 + t.equal(b.indexOf('a', Infinity), -1)
135 + t.equal(b.indexOf('bc'), 1)
136 + t.equal(b.indexOf('bc', 2), -1)
137 + t.equal(b.indexOf('bc', -1), -1)
138 + t.equal(b.indexOf('bc', -3), -1)
139 + t.equal(b.indexOf('bc', -5), 1)
140 + t.equal(b.indexOf('bc', NaN), 1)
141 + t.equal(b.indexOf('bc', -Infinity), 1)
142 + t.equal(b.indexOf('bc', Infinity), -1)
143 + t.equal(b.indexOf('f'), b.length - 1)
144 + t.equal(b.indexOf('z'), -1)
145 + // empty search tests
146 + t.equal(b.indexOf(buf_a), 0)
147 + t.equal(b.indexOf(buf_a, 1), -1)
148 + t.equal(b.indexOf(buf_a, -1), -1)
149 + t.equal(b.indexOf(buf_a, -4), -1)
150 + t.equal(b.indexOf(buf_a, -b.length), 0)
151 + t.equal(b.indexOf(buf_a, NaN), 0)
152 + t.equal(b.indexOf(buf_a, -Infinity), 0)
153 + t.equal(b.indexOf(buf_a, Infinity), -1)
154 + t.equal(b.indexOf(buf_bc), 1)
155 + t.equal(b.indexOf(buf_bc, 2), -1)
156 + t.equal(b.indexOf(buf_bc, -1), -1)
157 + t.equal(b.indexOf(buf_bc, -3), -1)
158 + t.equal(b.indexOf(buf_bc, -5), 1)
159 + t.equal(b.indexOf(buf_bc, NaN), 1)
160 + t.equal(b.indexOf(buf_bc, -Infinity), 1)
161 + t.equal(b.indexOf(buf_bc, Infinity), -1)
162 + t.equal(b.indexOf(buf_f), b.length - 1)
163 + t.equal(b.indexOf(buf_z), -1)
164 + t.equal(b.indexOf(0x61), 0)
165 + t.equal(b.indexOf(0x61, 1), -1)
166 + t.equal(b.indexOf(0x61, -1), -1)
167 + t.equal(b.indexOf(0x61, -4), -1)
168 + t.equal(b.indexOf(0x61, -b.length), 0)
169 + t.equal(b.indexOf(0x61, NaN), 0)
170 + t.equal(b.indexOf(0x61, -Infinity), 0)
171 + t.equal(b.indexOf(0x61, Infinity), -1)
172 + t.equal(b.indexOf(0x0), -1)
173 +
174 + // test offsets
175 + t.equal(b.indexOf('d', 2), 3)
176 + t.equal(b.indexOf('f', 5), 5)
177 + t.equal(b.indexOf('f', -1), 5)
178 + t.equal(b.indexOf('f', 6), -1)
179 +
180 + t.equal(b.indexOf(Buffer.from('d'), 2), 3)
181 + t.equal(b.indexOf(Buffer.from('f'), 5), 5)
182 + t.equal(b.indexOf(Buffer.from('f'), -1), 5)
183 + t.equal(b.indexOf(Buffer.from('f'), 6), -1)
184 +
185 + t.equal(Buffer.from('ff').indexOf(Buffer.from('f'), 1, 'ucs2'), -1)
186 +
187 + // test invalid and uppercase encoding
188 + t.equal(b.indexOf('b', 'utf8'), 1)
189 + t.equal(b.indexOf('b', 'UTF8'), 1)
190 + t.equal(b.indexOf('62', 'HEX'), 1)
191 + t.throws(() => b.indexOf('bad', 'enc'), TypeError)
192 +
193 + // test hex encoding
194 + t.equal(
195 + Buffer.from(b.toString('hex'), 'hex')
196 + .indexOf('64', 0, 'hex'),
197 + 3
198 + )
199 + t.equal(
200 + Buffer.from(b.toString('hex'), 'hex')
201 + .indexOf(Buffer.from('64', 'hex'), 0, 'hex'),
202 + 3
203 + )
204 +
205 + // test base64 encoding
206 + t.equal(
207 + Buffer.from(b.toString('base64'), 'base64')
208 + .indexOf('ZA==', 0, 'base64'),
209 + 3
210 + )
211 + t.equal(
212 + Buffer.from(b.toString('base64'), 'base64')
213 + .indexOf(Buffer.from('ZA==', 'base64'), 0, 'base64'),
214 + 3
215 + )
216 +
217 + // test ascii encoding
218 + t.equal(
219 + Buffer.from(b.toString('ascii'), 'ascii')
220 + .indexOf('d', 0, 'ascii'),
221 + 3
222 + )
223 + t.equal(
224 + Buffer.from(b.toString('ascii'), 'ascii')
225 + .indexOf(Buffer.from('d', 'ascii'), 0, 'ascii'),
226 + 3
227 + )
228 +
229 + // test optional offset with passed encoding
230 + t.equal(Buffer.from('aaaa0').indexOf('30', 'hex'), 4)
231 + t.equal(Buffer.from('aaaa00a').indexOf('3030', 'hex'), 4)
232 +
233 + {
234 + // test usc2 encoding
235 + const twoByteString = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'ucs2')
236 +
237 + t.equal(8, twoByteString.indexOf('\u0395', 4, 'ucs2'))
238 + t.equal(6, twoByteString.indexOf('\u03a3', -4, 'ucs2'))
239 + t.equal(4, twoByteString.indexOf('\u03a3', -6, 'ucs2'))
240 + t.equal(4, twoByteString.indexOf(
241 + Buffer.from('\u03a3', 'ucs2'), -6, 'ucs2'))
242 + t.equal(-1, twoByteString.indexOf('\u03a3', -2, 'ucs2'))
243 + }
244 +
245 + const mixedByteStringUcs2 =
246 + Buffer.from('\u039a\u0391abc\u03a3\u03a3\u0395', 'ucs2')
247 + t.equal(6, mixedByteStringUcs2.indexOf('bc', 0, 'ucs2'))
248 + t.equal(10, mixedByteStringUcs2.indexOf('\u03a3', 0, 'ucs2'))
249 + t.equal(-1, mixedByteStringUcs2.indexOf('\u0396', 0, 'ucs2'))
250 +
251 + t.equal(
252 + 6, mixedByteStringUcs2.indexOf(Buffer.from('bc', 'ucs2'), 0, 'ucs2'))
253 + t.equal(
254 + 10, mixedByteStringUcs2.indexOf(Buffer.from('\u03a3', 'ucs2'), 0, 'ucs2'))
255 + t.equal(
256 + -1, mixedByteStringUcs2.indexOf(Buffer.from('\u0396', 'ucs2'), 0, 'ucs2'))
257 +
258 + {
259 + const twoByteString = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'ucs2')
260 +
261 + // Test single char pattern
262 + t.equal(0, twoByteString.indexOf('\u039a', 0, 'ucs2'))
263 + let index = twoByteString.indexOf('\u0391', 0, 'ucs2')
264 + t.equal(2, index, `Alpha - at index ${index}`)
265 + index = twoByteString.indexOf('\u03a3', 0, 'ucs2')
266 + t.equal(4, index, `First Sigma - at index ${index}`)
267 + index = twoByteString.indexOf('\u03a3', 6, 'ucs2')
268 + t.equal(6, index, `Second Sigma - at index ${index}`)
269 + index = twoByteString.indexOf('\u0395', 0, 'ucs2')
270 + t.equal(8, index, `Epsilon - at index ${index}`)
271 + index = twoByteString.indexOf('\u0392', 0, 'ucs2')
272 + t.equal(-1, index, `Not beta - at index ${index}`)
273 +
274 + // Test multi-char pattern
275 + index = twoByteString.indexOf('\u039a\u0391', 0, 'ucs2')
276 + t.equal(0, index, `Lambda Alpha - at index ${index}`)
277 + index = twoByteString.indexOf('\u0391\u03a3', 0, 'ucs2')
278 + t.equal(2, index, `Alpha Sigma - at index ${index}`)
279 + index = twoByteString.indexOf('\u03a3\u03a3', 0, 'ucs2')
280 + t.equal(4, index, `Sigma Sigma - at index ${index}`)
281 + index = twoByteString.indexOf('\u03a3\u0395', 0, 'ucs2')
282 + t.equal(6, index, `Sigma Epsilon - at index ${index}`)
283 + }
284 +
285 + const mixedByteStringUtf8 = Buffer.from('\u039a\u0391abc\u03a3\u03a3\u0395')
286 + t.equal(5, mixedByteStringUtf8.indexOf('bc'))
287 + t.equal(5, mixedByteStringUtf8.indexOf('bc', 5))
288 + t.equal(5, mixedByteStringUtf8.indexOf('bc', -8))
289 + t.equal(7, mixedByteStringUtf8.indexOf('\u03a3'))
290 + t.equal(-1, mixedByteStringUtf8.indexOf('\u0396'))
291 +
292 +
293 + // Test complex string indexOf algorithms. Only trigger for long strings.
294 + // Long string that isn't a simple repeat of a shorter string.
295 + let longString = 'A'
296 + for (let i = 66; i < 76; i++) { // from 'B' to 'K'
297 + longString = longString + String.fromCharCode(i) + longString
298 + }
299 +
300 + const longBufferString = Buffer.from(longString)
301 +
302 + // pattern of 15 chars, repeated every 16 chars in long
303 + let pattern = 'ABACABADABACABA'
304 + for (let i = 0; i < longBufferString.length - pattern.length; i += 7) {
305 + const index = longBufferString.indexOf(pattern, i)
306 + t.equal((i + 15) & ~0xf, index,
307 + `Long ABACABA...-string at index ${i}`)
308 + }
309 +
310 + let index = longBufferString.indexOf('AJABACA')
311 + t.equal(510, index, `Long AJABACA, First J - at index ${index}`)
312 + index = longBufferString.indexOf('AJABACA', 511)
313 + t.equal(1534, index, `Long AJABACA, Second J - at index ${index}`)
314 +
315 + pattern = 'JABACABADABACABA'
316 + index = longBufferString.indexOf(pattern)
317 + t.equal(511, index, `Long JABACABA..., First J - at index ${index}`)
318 + index = longBufferString.indexOf(pattern, 512)
319 + t.equal(
320 + 1535, index, `Long JABACABA..., Second J - at index ${index}`)
321 +
322 + // Search for a non-ASCII string in a pure ASCII string.
323 + const asciiString = Buffer.from(
324 + 'arglebargleglopglyfarglebargleglopglyfarglebargleglopglyf')
325 + t.equal(-1, asciiString.indexOf('\x2061'))
326 + t.equal(3, asciiString.indexOf('leb', 0))
327 +
328 + // Search in string containing many non-ASCII chars.
329 + const allCodePoints = []
330 + for (let i = 0; i < 65536; i++) allCodePoints[i] = i
331 + const allCharsString = String.fromCharCode.apply(String, allCodePoints)
332 + const allCharsBufferUtf8 = Buffer.from(allCharsString)
333 + const allCharsBufferUcs2 = Buffer.from(allCharsString, 'ucs2')
334 +
335 + // Search for string long enough to trigger complex search with ASCII pattern
336 + // and UC16 subject.
337 + t.equal(-1, allCharsBufferUtf8.indexOf('notfound'))
338 + t.equal(-1, allCharsBufferUcs2.indexOf('notfound'))
339 +
340 + // Needle is longer than haystack, but only because it's encoded as UTF-16
341 + t.equal(Buffer.from('aaaa').indexOf('a'.repeat(4), 'ucs2'), -1)
342 +
343 + t.equal(Buffer.from('aaaa').indexOf('a'.repeat(4), 'utf8'), 0)
344 + t.equal(Buffer.from('aaaa').indexOf('你好', 'ucs2'), -1)
345 +
346 + // Haystack has odd length, but the needle is UCS2.
347 + t.equal(Buffer.from('aaaaa').indexOf('b', 'ucs2'), -1)
348 +
349 + {
350 + // Find substrings in Utf8.
351 + const lengths = [1, 3, 15]; // Single char, simple and complex.
352 + const indices = [0x5, 0x60, 0x400, 0x680, 0x7ee, 0xFF02, 0x16610, 0x2f77b]
353 + for (let lengthIndex = 0; lengthIndex < lengths.length; lengthIndex++) {
354 + for (let i = 0; i < indices.length; i++) {
355 + const index = indices[i]
356 + let length = lengths[lengthIndex]
357 +
358 + if (index + length > 0x7F) {
359 + length = 2 * length
360 + }
361 +
362 + if (index + length > 0x7FF) {
363 + length = 3 * length
364 + }
365 +
366 + if (index + length > 0xFFFF) {
367 + length = 4 * length
368 + }
369 +
370 + const patternBufferUtf8 = allCharsBufferUtf8.slice(index, index + length)
371 + t.equal(index, allCharsBufferUtf8.indexOf(patternBufferUtf8))
372 +
373 + const patternStringUtf8 = patternBufferUtf8.toString()
374 + t.equal(index, allCharsBufferUtf8.indexOf(patternStringUtf8))
375 + }
376 + }
377 + }
378 +
379 + {
380 + // Find substrings in Usc2.
381 + const lengths = [2, 4, 16]; // Single char, simple and complex.
382 + const indices = [0x5, 0x65, 0x105, 0x205, 0x285, 0x2005, 0x2085, 0xfff0]
383 + for (let lengthIndex = 0; lengthIndex < lengths.length; lengthIndex++) {
384 + for (let i = 0; i < indices.length; i++) {
385 + const index = indices[i] * 2
386 + const length = lengths[lengthIndex]
387 +
388 + const patternBufferUcs2 =
389 + allCharsBufferUcs2.slice(index, index + length)
390 + t.equal(
391 + index, allCharsBufferUcs2.indexOf(patternBufferUcs2, 0, 'ucs2'))
392 +
393 + const patternStringUcs2 = patternBufferUcs2.toString('ucs2')
394 + t.equal(
395 + index, allCharsBufferUcs2.indexOf(patternStringUcs2, 0, 'ucs2'))
396 + }
397 + }
398 + }
399 +
400 + [
401 + () => {},
402 + {},
403 + []
404 + ].forEach(val => {
405 + debugger
406 + t.throws(() => b.indexOf(val), TypeError, `"${JSON.stringify(val)}" should throw`)
407 + })
408 +
409 + // Test weird offset arguments.
410 + // The following offsets coerce to NaN or 0, searching the whole Buffer
411 + t.equal(b.indexOf('b', undefined), 1)
412 + t.equal(b.indexOf('b', {}), 1)
413 + t.equal(b.indexOf('b', 0), 1)
414 + t.equal(b.indexOf('b', null), 1)
415 + t.equal(b.indexOf('b', []), 1)
416 +
417 + // The following offset coerces to 2, in other words +[2] === 2
418 + t.equal(b.indexOf('b', [2]), -1)
419 +
420 + // Behavior should match String.indexOf()
421 + t.equal(
422 + b.indexOf('b', undefined),
423 + stringComparison.indexOf('b', undefined))
424 + t.equal(
425 + b.indexOf('b', {}),
426 + stringComparison.indexOf('b', {}))
427 + t.equal(
428 + b.indexOf('b', 0),
429 + stringComparison.indexOf('b', 0))
430 + t.equal(
431 + b.indexOf('b', null),
432 + stringComparison.indexOf('b', null))
433 + t.equal(
434 + b.indexOf('b', []),
435 + stringComparison.indexOf('b', []))
436 + t.equal(
437 + b.indexOf('b', [2]),
438 + stringComparison.indexOf('b', [2]))
439 +
440 + // test truncation of Number arguments to uint8
441 + {
442 + const buf = Buffer.from('this is a test')
443 + t.equal(buf.indexOf(0x6973), 3)
444 + t.equal(buf.indexOf(0x697320), 4)
445 + t.equal(buf.indexOf(0x69732069), 2)
446 + t.equal(buf.indexOf(0x697374657374), 0)
447 + t.equal(buf.indexOf(0x69737374), 0)
448 + t.equal(buf.indexOf(0x69737465), 11)
449 + t.equal(buf.indexOf(0x69737465), 11)
450 + t.equal(buf.indexOf(-140), 0)
451 + t.equal(buf.indexOf(-152), 1)
452 + t.equal(buf.indexOf(0xff), -1)
453 + t.equal(buf.indexOf(0xffff), -1)
454 + }
455 +
456 + // Test that Uint8Array arguments are okay.
457 + {
458 + const needle = new Uint8Array([ 0x66, 0x6f, 0x6f ])
459 + const haystack = new BufferList(Buffer.from('a foo b foo'))
460 + t.equal(haystack.indexOf(needle), 2)
461 + }
462 + t.end()
463 +})
node_modules/bl/test/test.jsView
@@ -1,0 +1,780 @@
1 +'use strict'
2 +
3 +var tape = require('tape')
4 + , crypto = require('crypto')
5 + , fs = require('fs')
6 + , hash = require('hash_file')
7 + , BufferList = require('../')
8 + , Buffer = require('safe-buffer').Buffer
9 +
10 + , encodings =
11 + ('hex utf8 utf-8 ascii binary base64'
12 + + (process.browser ? '' : ' ucs2 ucs-2 utf16le utf-16le')).split(' ')
13 +
14 +// run the indexOf tests
15 +require('./indexOf')
16 +
17 +tape('single bytes from single buffer', function (t) {
18 + var bl = new BufferList()
19 + bl.append(Buffer.from('abcd'))
20 +
21 + t.equal(bl.length, 4)
22 + t.equal(bl.get(-1), undefined)
23 + t.equal(bl.get(0), 97)
24 + t.equal(bl.get(1), 98)
25 + t.equal(bl.get(2), 99)
26 + t.equal(bl.get(3), 100)
27 + t.equal(bl.get(4), undefined)
28 +
29 + t.end()
30 +})
31 +
32 +tape('single bytes from multiple buffers', function (t) {
33 + var bl = new BufferList()
34 + bl.append(Buffer.from('abcd'))
35 + bl.append(Buffer.from('efg'))
36 + bl.append(Buffer.from('hi'))
37 + bl.append(Buffer.from('j'))
38 +
39 + t.equal(bl.length, 10)
40 +
41 + t.equal(bl.get(0), 97)
42 + t.equal(bl.get(1), 98)
43 + t.equal(bl.get(2), 99)
44 + t.equal(bl.get(3), 100)
45 + t.equal(bl.get(4), 101)
46 + t.equal(bl.get(5), 102)
47 + t.equal(bl.get(6), 103)
48 + t.equal(bl.get(7), 104)
49 + t.equal(bl.get(8), 105)
50 + t.equal(bl.get(9), 106)
51 + t.end()
52 +})
53 +
54 +tape('multi bytes from single buffer', function (t) {
55 + var bl = new BufferList()
56 + bl.append(Buffer.from('abcd'))
57 +
58 + t.equal(bl.length, 4)
59 +
60 + t.equal(bl.slice(0, 4).toString('ascii'), 'abcd')
61 + t.equal(bl.slice(0, 3).toString('ascii'), 'abc')
62 + t.equal(bl.slice(1, 4).toString('ascii'), 'bcd')
63 + t.equal(bl.slice(-4, -1).toString('ascii'), 'abc')
64 +
65 + t.end()
66 +})
67 +
68 +tape('multi bytes from single buffer (negative indexes)', function (t) {
69 + var bl = new BufferList()
70 + bl.append(Buffer.from('buffer'))
71 +
72 + t.equal(bl.length, 6)
73 +
74 + t.equal(bl.slice(-6, -1).toString('ascii'), 'buffe')
75 + t.equal(bl.slice(-6, -2).toString('ascii'), 'buff')
76 + t.equal(bl.slice(-5, -2).toString('ascii'), 'uff')
77 +
78 + t.end()
79 +})
80 +
81 +tape('multiple bytes from multiple buffers', function (t) {
82 + var bl = new BufferList()
83 +
84 + bl.append(Buffer.from('abcd'))
85 + bl.append(Buffer.from('efg'))
86 + bl.append(Buffer.from('hi'))
87 + bl.append(Buffer.from('j'))
88 +
89 + t.equal(bl.length, 10)
90 +
91 + t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij')
92 + t.equal(bl.slice(3, 10).toString('ascii'), 'defghij')
93 + t.equal(bl.slice(3, 6).toString('ascii'), 'def')
94 + t.equal(bl.slice(3, 8).toString('ascii'), 'defgh')
95 + t.equal(bl.slice(5, 10).toString('ascii'), 'fghij')
96 + t.equal(bl.slice(-7, -4).toString('ascii'), 'def')
97 +
98 + t.end()
99 +})
100 +
101 +tape('multiple bytes from multiple buffer lists', function (t) {
102 + var bl = new BufferList()
103 +
104 + bl.append(new BufferList([ Buffer.from('abcd'), Buffer.from('efg') ]))
105 + bl.append(new BufferList([ Buffer.from('hi'), Buffer.from('j') ]))
106 +
107 + t.equal(bl.length, 10)
108 +
109 + t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij')
110 +
111 + t.equal(bl.slice(3, 10).toString('ascii'), 'defghij')
112 + t.equal(bl.slice(3, 6).toString('ascii'), 'def')
113 + t.equal(bl.slice(3, 8).toString('ascii'), 'defgh')
114 + t.equal(bl.slice(5, 10).toString('ascii'), 'fghij')
115 +
116 + t.end()
117 +})
118 +
119 +// same data as previous test, just using nested constructors
120 +tape('multiple bytes from crazy nested buffer lists', function (t) {
121 + var bl = new BufferList()
122 +
123 + bl.append(new BufferList([
124 + new BufferList([
125 + new BufferList(Buffer.from('abc'))
126 + , Buffer.from('d')
127 + , new BufferList(Buffer.from('efg'))
128 + ])
129 + , new BufferList([ Buffer.from('hi') ])
130 + , new BufferList(Buffer.from('j'))
131 + ]))
132 +
133 + t.equal(bl.length, 10)
134 +
135 + t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij')
136 +
137 + t.equal(bl.slice(3, 10).toString('ascii'), 'defghij')
138 + t.equal(bl.slice(3, 6).toString('ascii'), 'def')
139 + t.equal(bl.slice(3, 8).toString('ascii'), 'defgh')
140 + t.equal(bl.slice(5, 10).toString('ascii'), 'fghij')
141 +
142 + t.end()
143 +})
144 +
145 +tape('append accepts arrays of Buffers', function (t) {
146 + var bl = new BufferList()
147 + bl.append(Buffer.from('abc'))
148 + bl.append([ Buffer.from('def') ])
149 + bl.append([ Buffer.from('ghi'), Buffer.from('jkl') ])
150 + bl.append([ Buffer.from('mnop'), Buffer.from('qrstu'), Buffer.from('vwxyz') ])
151 + t.equal(bl.length, 26)
152 + t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz')
153 + t.end()
154 +})
155 +
156 +tape('append accepts arrays of BufferLists', function (t) {
157 + var bl = new BufferList()
158 + bl.append(Buffer.from('abc'))
159 + bl.append([ new BufferList('def') ])
160 + bl.append(new BufferList([ Buffer.from('ghi'), new BufferList('jkl') ]))
161 + bl.append([ Buffer.from('mnop'), new BufferList([ Buffer.from('qrstu'), Buffer.from('vwxyz') ]) ])
162 + t.equal(bl.length, 26)
163 + t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz')
164 + t.end()
165 +})
166 +
167 +tape('append chainable', function (t) {
168 + var bl = new BufferList()
169 + t.ok(bl.append(Buffer.from('abcd')) === bl)
170 + t.ok(bl.append([ Buffer.from('abcd') ]) === bl)
171 + t.ok(bl.append(new BufferList(Buffer.from('abcd'))) === bl)
172 + t.ok(bl.append([ new BufferList(Buffer.from('abcd')) ]) === bl)
173 + t.end()
174 +})
175 +
176 +tape('append chainable (test results)', function (t) {
177 + var bl = new BufferList('abc')
178 + .append([ new BufferList('def') ])
179 + .append(new BufferList([ Buffer.from('ghi'), new BufferList('jkl') ]))
180 + .append([ Buffer.from('mnop'), new BufferList([ Buffer.from('qrstu'), Buffer.from('vwxyz') ]) ])
181 +
182 + t.equal(bl.length, 26)
183 + t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz')
184 + t.end()
185 +})
186 +
187 +tape('consuming from multiple buffers', function (t) {
188 + var bl = new BufferList()
189 +
190 + bl.append(Buffer.from('abcd'))
191 + bl.append(Buffer.from('efg'))
192 + bl.append(Buffer.from('hi'))
193 + bl.append(Buffer.from('j'))
194 +
195 + t.equal(bl.length, 10)
196 +
197 + t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij')
198 +
199 + bl.consume(3)
200 + t.equal(bl.length, 7)
201 + t.equal(bl.slice(0, 7).toString('ascii'), 'defghij')
202 +
203 + bl.consume(2)
204 + t.equal(bl.length, 5)
205 + t.equal(bl.slice(0, 5).toString('ascii'), 'fghij')
206 +
207 + bl.consume(1)
208 + t.equal(bl.length, 4)
209 + t.equal(bl.slice(0, 4).toString('ascii'), 'ghij')
210 +
211 + bl.consume(1)
212 + t.equal(bl.length, 3)
213 + t.equal(bl.slice(0, 3).toString('ascii'), 'hij')
214 +
215 + bl.consume(2)
216 + t.equal(bl.length, 1)
217 + t.equal(bl.slice(0, 1).toString('ascii'), 'j')
218 +
219 + t.end()
220 +})
221 +
222 +tape('complete consumption', function (t) {
223 + var bl = new BufferList()
224 +
225 + bl.append(Buffer.from('a'))
226 + bl.append(Buffer.from('b'))
227 +
228 + bl.consume(2)
229 +
230 + t.equal(bl.length, 0)
231 + t.equal(bl._bufs.length, 0)
232 +
233 + t.end()
234 +})
235 +
236 +tape('test readUInt8 / readInt8', function (t) {
237 + var buf1 = Buffer.alloc(1)
238 + , buf2 = Buffer.alloc(3)
239 + , buf3 = Buffer.alloc(3)
240 + , bl = new BufferList()
241 +
242 + buf2[1] = 0x3
243 + buf2[2] = 0x4
244 + buf3[0] = 0x23
245 + buf3[1] = 0x42
246 +
247 + bl.append(buf1)
248 + bl.append(buf2)
249 + bl.append(buf3)
250 +
251 + t.equal(bl.readUInt8(2), 0x3)
252 + t.equal(bl.readInt8(2), 0x3)
253 + t.equal(bl.readUInt8(3), 0x4)
254 + t.equal(bl.readInt8(3), 0x4)
255 + t.equal(bl.readUInt8(4), 0x23)
256 + t.equal(bl.readInt8(4), 0x23)
257 + t.equal(bl.readUInt8(5), 0x42)
258 + t.equal(bl.readInt8(5), 0x42)
259 + t.end()
260 +})
261 +
262 +tape('test readUInt16LE / readUInt16BE / readInt16LE / readInt16BE', function (t) {
263 + var buf1 = Buffer.alloc(1)
264 + , buf2 = Buffer.alloc(3)
265 + , buf3 = Buffer.alloc(3)
266 + , bl = new BufferList()
267 +
268 + buf2[1] = 0x3
269 + buf2[2] = 0x4
270 + buf3[0] = 0x23
271 + buf3[1] = 0x42
272 +
273 + bl.append(buf1)
274 + bl.append(buf2)
275 + bl.append(buf3)
276 +
277 + t.equal(bl.readUInt16BE(2), 0x0304)
278 + t.equal(bl.readUInt16LE(2), 0x0403)
279 + t.equal(bl.readInt16BE(2), 0x0304)
280 + t.equal(bl.readInt16LE(2), 0x0403)
281 + t.equal(bl.readUInt16BE(3), 0x0423)
282 + t.equal(bl.readUInt16LE(3), 0x2304)
283 + t.equal(bl.readInt16BE(3), 0x0423)
284 + t.equal(bl.readInt16LE(3), 0x2304)
285 + t.equal(bl.readUInt16BE(4), 0x2342)
286 + t.equal(bl.readUInt16LE(4), 0x4223)
287 + t.equal(bl.readInt16BE(4), 0x2342)
288 + t.equal(bl.readInt16LE(4), 0x4223)
289 + t.end()
290 +})
291 +
292 +tape('test readUInt32LE / readUInt32BE / readInt32LE / readInt32BE', function (t) {
293 + var buf1 = Buffer.alloc(1)
294 + , buf2 = Buffer.alloc(3)
295 + , buf3 = Buffer.alloc(3)
296 + , bl = new BufferList()
297 +
298 + buf2[1] = 0x3
299 + buf2[2] = 0x4
300 + buf3[0] = 0x23
301 + buf3[1] = 0x42
302 +
303 + bl.append(buf1)
304 + bl.append(buf2)
305 + bl.append(buf3)
306 +
307 + t.equal(bl.readUInt32BE(2), 0x03042342)
308 + t.equal(bl.readUInt32LE(2), 0x42230403)
309 + t.equal(bl.readInt32BE(2), 0x03042342)
310 + t.equal(bl.readInt32LE(2), 0x42230403)
311 + t.end()
312 +})
313 +
314 +tape('test readUIntLE / readUIntBE / readIntLE / readIntBE', function (t) {
315 + var buf1 = Buffer.alloc(1)
316 + , buf2 = Buffer.alloc(3)
317 + , buf3 = Buffer.alloc(3)
318 + , bl = new BufferList()
319 +
320 + buf2[0] = 0x2
321 + buf2[1] = 0x3
322 + buf2[2] = 0x4
323 + buf3[0] = 0x23
324 + buf3[1] = 0x42
325 + buf3[2] = 0x61
326 +
327 + bl.append(buf1)
328 + bl.append(buf2)
329 + bl.append(buf3)
330 +
331 + t.equal(bl.readUIntBE(1, 1), 0x02)
332 + t.equal(bl.readUIntBE(1, 2), 0x0203)
333 + t.equal(bl.readUIntBE(1, 3), 0x020304)
334 + t.equal(bl.readUIntBE(1, 4), 0x02030423)
335 + t.equal(bl.readUIntBE(1, 5), 0x0203042342)
336 + t.equal(bl.readUIntBE(1, 6), 0x020304234261)
337 + t.equal(bl.readUIntLE(1, 1), 0x02)
338 + t.equal(bl.readUIntLE(1, 2), 0x0302)
339 + t.equal(bl.readUIntLE(1, 3), 0x040302)
340 + t.equal(bl.readUIntLE(1, 4), 0x23040302)
341 + t.equal(bl.readUIntLE(1, 5), 0x4223040302)
342 + t.equal(bl.readUIntLE(1, 6), 0x614223040302)
343 + t.equal(bl.readIntBE(1, 1), 0x02)
344 + t.equal(bl.readIntBE(1, 2), 0x0203)
345 + t.equal(bl.readIntBE(1, 3), 0x020304)
346 + t.equal(bl.readIntBE(1, 4), 0x02030423)
347 + t.equal(bl.readIntBE(1, 5), 0x0203042342)
348 + t.equal(bl.readIntBE(1, 6), 0x020304234261)
349 + t.equal(bl.readIntLE(1, 1), 0x02)
350 + t.equal(bl.readIntLE(1, 2), 0x0302)
351 + t.equal(bl.readIntLE(1, 3), 0x040302)
352 + t.equal(bl.readIntLE(1, 4), 0x23040302)
353 + t.equal(bl.readIntLE(1, 5), 0x4223040302)
354 + t.equal(bl.readIntLE(1, 6), 0x614223040302)
355 + t.end()
356 +})
357 +
358 +tape('test readFloatLE / readFloatBE', function (t) {
359 + var buf1 = Buffer.alloc(1)
360 + , buf2 = Buffer.alloc(3)
361 + , buf3 = Buffer.alloc(3)
362 + , bl = new BufferList()
363 +
364 + buf2[1] = 0x00
365 + buf2[2] = 0x00
366 + buf3[0] = 0x80
367 + buf3[1] = 0x3f
368 +
369 + bl.append(buf1)
370 + bl.append(buf2)
371 + bl.append(buf3)
372 +
373 + t.equal(bl.readFloatLE(2), 0x01)
374 + t.end()
375 +})
376 +
377 +tape('test readDoubleLE / readDoubleBE', function (t) {
378 + var buf1 = Buffer.alloc(1)
379 + , buf2 = Buffer.alloc(3)
380 + , buf3 = Buffer.alloc(10)
381 + , bl = new BufferList()
382 +
383 + buf2[1] = 0x55
384 + buf2[2] = 0x55
385 + buf3[0] = 0x55
386 + buf3[1] = 0x55
387 + buf3[2] = 0x55
388 + buf3[3] = 0x55
389 + buf3[4] = 0xd5
390 + buf3[5] = 0x3f
391 +
392 + bl.append(buf1)
393 + bl.append(buf2)
394 + bl.append(buf3)
395 +
396 + t.equal(bl.readDoubleLE(2), 0.3333333333333333)
397 + t.end()
398 +})
399 +
400 +tape('test toString', function (t) {
401 + var bl = new BufferList()
402 +
403 + bl.append(Buffer.from('abcd'))
404 + bl.append(Buffer.from('efg'))
405 + bl.append(Buffer.from('hi'))
406 + bl.append(Buffer.from('j'))
407 +
408 + t.equal(bl.toString('ascii', 0, 10), 'abcdefghij')
409 + t.equal(bl.toString('ascii', 3, 10), 'defghij')
410 + t.equal(bl.toString('ascii', 3, 6), 'def')
411 + t.equal(bl.toString('ascii', 3, 8), 'defgh')
412 + t.equal(bl.toString('ascii', 5, 10), 'fghij')
413 +
414 + t.end()
415 +})
416 +
417 +tape('test toString encoding', function (t) {
418 + var bl = new BufferList()
419 + , b = Buffer.from('abcdefghij\xff\x00')
420 +
421 + bl.append(Buffer.from('abcd'))
422 + bl.append(Buffer.from('efg'))
423 + bl.append(Buffer.from('hi'))
424 + bl.append(Buffer.from('j'))
425 + bl.append(Buffer.from('\xff\x00'))
426 +
427 + encodings.forEach(function (enc) {
428 + t.equal(bl.toString(enc), b.toString(enc), enc)
429 + })
430 +
431 + t.end()
432 +})
433 +
434 +!process.browser && tape('test stream', function (t) {
435 + var random = crypto.randomBytes(65534)
436 + , rndhash = hash(random, 'md5')
437 + , md5sum = crypto.createHash('md5')
438 + , bl = new BufferList(function (err, buf) {
439 + t.ok(Buffer.isBuffer(buf))
440 + t.ok(err === null)
441 + t.equal(rndhash, hash(bl.slice(), 'md5'))
442 + t.equal(rndhash, hash(buf, 'md5'))
443 +
444 + bl.pipe(fs.createWriteStream('/tmp/bl_test_rnd_out.dat'))
445 + .on('close', function () {
446 + var s = fs.createReadStream('/tmp/bl_test_rnd_out.dat')
447 + s.on('data', md5sum.update.bind(md5sum))
448 + s.on('end', function() {
449 + t.equal(rndhash, md5sum.digest('hex'), 'woohoo! correct hash!')
450 + t.end()
451 + })
452 + })
453 +
454 + })
455 +
456 + fs.writeFileSync('/tmp/bl_test_rnd.dat', random)
457 + fs.createReadStream('/tmp/bl_test_rnd.dat').pipe(bl)
458 +})
459 +
460 +tape('instantiation with Buffer', function (t) {
461 + var buf = crypto.randomBytes(1024)
462 + , buf2 = crypto.randomBytes(1024)
463 + , b = BufferList(buf)
464 +
465 + t.equal(buf.toString('hex'), b.slice().toString('hex'), 'same buffer')
466 + b = BufferList([ buf, buf2 ])
467 + t.equal(b.slice().toString('hex'), Buffer.concat([ buf, buf2 ]).toString('hex'), 'same buffer')
468 + t.end()
469 +})
470 +
471 +tape('test String appendage', function (t) {
472 + var bl = new BufferList()
473 + , b = Buffer.from('abcdefghij\xff\x00')
474 +
475 + bl.append('abcd')
476 + bl.append('efg')
477 + bl.append('hi')
478 + bl.append('j')
479 + bl.append('\xff\x00')
480 +
481 + encodings.forEach(function (enc) {
482 + t.equal(bl.toString(enc), b.toString(enc))
483 + })
484 +
485 + t.end()
486 +})
487 +
488 +tape('test Number appendage', function (t) {
489 + var bl = new BufferList()
490 + , b = Buffer.from('1234567890')
491 +
492 + bl.append(1234)
493 + bl.append(567)
494 + bl.append(89)
495 + bl.append(0)
496 +
497 + encodings.forEach(function (enc) {
498 + t.equal(bl.toString(enc), b.toString(enc))
499 + })
500 +
501 + t.end()
502 +})
503 +
504 +tape('write nothing, should get empty buffer', function (t) {
505 + t.plan(3)
506 + BufferList(function (err, data) {
507 + t.notOk(err, 'no error')
508 + t.ok(Buffer.isBuffer(data), 'got a buffer')
509 + t.equal(0, data.length, 'got a zero-length buffer')
510 + t.end()
511 + }).end()
512 +})
513 +
514 +tape('unicode string', function (t) {
515 + t.plan(2)
516 + var inp1 = '\u2600'
517 + , inp2 = '\u2603'
518 + , exp = inp1 + ' and ' + inp2
519 + , bl = BufferList()
520 + bl.write(inp1)
521 + bl.write(' and ')
522 + bl.write(inp2)
523 + t.equal(exp, bl.toString())
524 + t.equal(Buffer.from(exp).toString('hex'), bl.toString('hex'))
525 +})
526 +
527 +tape('should emit finish', function (t) {
528 + var source = BufferList()
529 + , dest = BufferList()
530 +
531 + source.write('hello')
532 + source.pipe(dest)
533 +
534 + dest.on('finish', function () {
535 + t.equal(dest.toString('utf8'), 'hello')
536 + t.end()
537 + })
538 +})
539 +
540 +tape('basic copy', function (t) {
541 + var buf = crypto.randomBytes(1024)
542 + , buf2 = Buffer.alloc(1024)
543 + , b = BufferList(buf)
544 +
545 + b.copy(buf2)
546 + t.equal(b.slice().toString('hex'), buf2.toString('hex'), 'same buffer')
547 + t.end()
548 +})
549 +
550 +tape('copy after many appends', function (t) {
551 + var buf = crypto.randomBytes(512)
552 + , buf2 = Buffer.alloc(1024)
553 + , b = BufferList(buf)
554 +
555 + b.append(buf)
556 + b.copy(buf2)
557 + t.equal(b.slice().toString('hex'), buf2.toString('hex'), 'same buffer')
558 + t.end()
559 +})
560 +
561 +tape('copy at a precise position', function (t) {
562 + var buf = crypto.randomBytes(1004)
563 + , buf2 = Buffer.alloc(1024)
564 + , b = BufferList(buf)
565 +
566 + b.copy(buf2, 20)
567 + t.equal(b.slice().toString('hex'), buf2.slice(20).toString('hex'), 'same buffer')
568 + t.end()
569 +})
570 +
571 +tape('copy starting from a precise location', function (t) {
572 + var buf = crypto.randomBytes(10)
573 + , buf2 = Buffer.alloc(5)
574 + , b = BufferList(buf)
575 +
576 + b.copy(buf2, 0, 5)
577 + t.equal(b.slice(5).toString('hex'), buf2.toString('hex'), 'same buffer')
578 + t.end()
579 +})
580 +
581 +tape('copy in an interval', function (t) {
582 + var rnd = crypto.randomBytes(10)
583 + , b = BufferList(rnd) // put the random bytes there
584 + , actual = Buffer.alloc(3)
585 + , expected = Buffer.alloc(3)
586 +
587 + rnd.copy(expected, 0, 5, 8)
588 + b.copy(actual, 0, 5, 8)
589 +
590 + t.equal(actual.toString('hex'), expected.toString('hex'), 'same buffer')
591 + t.end()
592 +})
593 +
594 +tape('copy an interval between two buffers', function (t) {
595 + var buf = crypto.randomBytes(10)
596 + , buf2 = Buffer.alloc(10)
597 + , b = BufferList(buf)
598 +
599 + b.append(buf)
600 + b.copy(buf2, 0, 5, 15)
601 +
602 + t.equal(b.slice(5, 15).toString('hex'), buf2.toString('hex'), 'same buffer')
603 + t.end()
604 +})
605 +
606 +tape('shallow slice across buffer boundaries', function (t) {
607 + var bl = new BufferList(['First', 'Second', 'Third'])
608 +
609 + t.equal(bl.shallowSlice(3, 13).toString(), 'stSecondTh')
610 + t.end()
611 +})
612 +
613 +tape('shallow slice within single buffer', function (t) {
614 + t.plan(2)
615 + var bl = new BufferList(['First', 'Second', 'Third'])
616 +
617 + t.equal(bl.shallowSlice(5, 10).toString(), 'Secon')
618 + t.equal(bl.shallowSlice(7, 10).toString(), 'con')
619 + t.end()
620 +})
621 +
622 +tape('shallow slice single buffer', function (t) {
623 + t.plan(3)
624 + var bl = new BufferList(['First', 'Second', 'Third'])
625 +
626 + t.equal(bl.shallowSlice(0, 5).toString(), 'First')
627 + t.equal(bl.shallowSlice(5, 11).toString(), 'Second')
628 + t.equal(bl.shallowSlice(11, 16).toString(), 'Third')
629 +})
630 +
631 +tape('shallow slice with negative or omitted indices', function (t) {
632 + t.plan(4)
633 + var bl = new BufferList(['First', 'Second', 'Third'])
634 +
635 + t.equal(bl.shallowSlice().toString(), 'FirstSecondThird')
636 + t.equal(bl.shallowSlice(5).toString(), 'SecondThird')
637 + t.equal(bl.shallowSlice(5, -3).toString(), 'SecondTh')
638 + t.equal(bl.shallowSlice(-8).toString(), 'ondThird')
639 +})
640 +
641 +tape('shallow slice does not make a copy', function (t) {
642 + t.plan(1)
643 + var buffers = [Buffer.from('First'), Buffer.from('Second'), Buffer.from('Third')]
644 + var bl = (new BufferList(buffers)).shallowSlice(5, -3)
645 +
646 + buffers[1].fill('h')
647 + buffers[2].fill('h')
648 +
649 + t.equal(bl.toString(), 'hhhhhhhh')
650 +})
651 +
652 +tape('shallow slice with 0 length', function (t) {
653 + t.plan(1)
654 + var buffers = [Buffer.from('First'), Buffer.from('Second'), Buffer.from('Third')]
655 + var bl = (new BufferList(buffers)).shallowSlice(0, 0)
656 + t.equal(bl.length, 0)
657 +})
658 +
659 +tape('shallow slice with 0 length from middle', function (t) {
660 + t.plan(1)
661 + var buffers = [Buffer.from('First'), Buffer.from('Second'), Buffer.from('Third')]
662 + var bl = (new BufferList(buffers)).shallowSlice(10, 10)
663 + t.equal(bl.length, 0)
664 +})
665 +
666 +tape('duplicate', function (t) {
667 + t.plan(2)
668 +
669 + var bl = new BufferList('abcdefghij\xff\x00')
670 + , dup = bl.duplicate()
671 +
672 + t.equal(bl.prototype, dup.prototype)
673 + t.equal(bl.toString('hex'), dup.toString('hex'))
674 +})
675 +
676 +tape('destroy no pipe', function (t) {
677 + t.plan(2)
678 +
679 + var bl = new BufferList('alsdkfja;lsdkfja;lsdk')
680 + bl.destroy()
681 +
682 + t.equal(bl._bufs.length, 0)
683 + t.equal(bl.length, 0)
684 +})
685 +
686 +tape('destroy with error', function (t) {
687 + t.plan(3)
688 +
689 + var bl = new BufferList('alsdkfja;lsdkfja;lsdk')
690 + var err = new Error('kaboom')
691 + bl.destroy(err)
692 + bl.on('error', function (_err) {
693 + t.equal(_err, err)
694 + })
695 +
696 + t.equal(bl._bufs.length, 0)
697 + t.equal(bl.length, 0)
698 +})
699 +
700 +!process.browser && tape('destroy with pipe before read end', function (t) {
701 + t.plan(2)
702 +
703 + var bl = new BufferList()
704 + fs.createReadStream(__dirname + '/test.js')
705 + .pipe(bl)
706 +
707 + bl.destroy()
708 +
709 + t.equal(bl._bufs.length, 0)
710 + t.equal(bl.length, 0)
711 +
712 +})
713 +
714 +!process.browser && tape('destroy with pipe before read end with race', function (t) {
715 + t.plan(2)
716 +
717 + var bl = new BufferList()
718 + fs.createReadStream(__dirname + '/test.js')
719 + .pipe(bl)
720 +
721 + setTimeout(function () {
722 + bl.destroy()
723 + setTimeout(function () {
724 + t.equal(bl._bufs.length, 0)
725 + t.equal(bl.length, 0)
726 + }, 500)
727 + }, 500)
728 +})
729 +
730 +!process.browser && tape('destroy with pipe after read end', function (t) {
731 + t.plan(2)
732 +
733 + var bl = new BufferList()
734 + fs.createReadStream(__dirname + '/test.js')
735 + .on('end', onEnd)
736 + .pipe(bl)
737 +
738 + function onEnd () {
739 + bl.destroy()
740 +
741 + t.equal(bl._bufs.length, 0)
742 + t.equal(bl.length, 0)
743 + }
744 +})
745 +
746 +!process.browser && tape('destroy with pipe while writing to a destination', function (t) {
747 + t.plan(4)
748 +
749 + var bl = new BufferList()
750 + , ds = new BufferList()
751 +
752 + fs.createReadStream(__dirname + '/test.js')
753 + .on('end', onEnd)
754 + .pipe(bl)
755 +
756 + function onEnd () {
757 + bl.pipe(ds)
758 +
759 + setTimeout(function () {
760 + bl.destroy()
761 +
762 + t.equals(bl._bufs.length, 0)
763 + t.equals(bl.length, 0)
764 +
765 + ds.destroy()
766 +
767 + t.equals(bl._bufs.length, 0)
768 + t.equals(bl.length, 0)
769 +
770 + }, 100)
771 + }
772 +})
773 +
774 +!process.browser && tape('handle error', function (t) {
775 + t.plan(2)
776 + fs.createReadStream('/does/not/exist').pipe(BufferList(function (err, data) {
777 + t.ok(err instanceof Error, 'has error')
778 + t.notOk(data, 'no data')
779 + }))
780 +})
node_modules/end-of-stream/LICENSEView
@@ -1,0 +1,21 @@
1 +The MIT License (MIT)
2 +
3 +Copyright (c) 2014 Mathias Buus
4 +
5 +Permission is hereby granted, free of charge, to any person obtaining a copy
6 +of this software and associated documentation files (the "Software"), to deal
7 +in the Software without restriction, including without limitation the rights
8 +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 +copies of the Software, and to permit persons to whom the Software is
10 +furnished to do so, subject to the following conditions:
11 +
12 +The above copyright notice and this permission notice shall be included in
13 +all copies or substantial portions of the Software.
14 +
15 +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21 +THE SOFTWARE.
node_modules/end-of-stream/README.mdView
@@ -1,0 +1,52 @@
1 +# end-of-stream
2 +
3 +A node module that calls a callback when a readable/writable/duplex stream has completed or failed.
4 +
5 + npm install end-of-stream
6 +
7 +## Usage
8 +
9 +Simply pass a stream and a callback to the `eos`.
10 +Both legacy streams, streams2 and stream3 are supported.
11 +
12 +``` js
13 +var eos = require('end-of-stream');
14 +
15 +eos(readableStream, function(err) {
16 + // this will be set to the stream instance
17 + if (err) return console.log('stream had an error or closed early');
18 + console.log('stream has ended', this === readableStream);
19 +});
20 +
21 +eos(writableStream, function(err) {
22 + if (err) return console.log('stream had an error or closed early');
23 + console.log('stream has finished', this === writableStream);
24 +});
25 +
26 +eos(duplexStream, function(err) {
27 + if (err) return console.log('stream had an error or closed early');
28 + console.log('stream has ended and finished', this === duplexStream);
29 +});
30 +
31 +eos(duplexStream, {readable:false}, function(err) {
32 + if (err) return console.log('stream had an error or closed early');
33 + console.log('stream has finished but might still be readable');
34 +});
35 +
36 +eos(duplexStream, {writable:false}, function(err) {
37 + if (err) return console.log('stream had an error or closed early');
38 + console.log('stream has ended but might still be writable');
39 +});
40 +
41 +eos(readableStream, {error:false}, function(err) {
42 + // do not treat emit('error', err) as a end-of-stream
43 +});
44 +```
45 +
46 +## License
47 +
48 +MIT
49 +
50 +## Related
51 +
52 +`end-of-stream` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one.
node_modules/end-of-stream/index.jsView
@@ -1,0 +1,87 @@
1 +var once = require('once');
2 +
3 +var noop = function() {};
4 +
5 +var isRequest = function(stream) {
6 + return stream.setHeader && typeof stream.abort === 'function';
7 +};
8 +
9 +var isChildProcess = function(stream) {
10 + return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3
11 +};
12 +
13 +var eos = function(stream, opts, callback) {
14 + if (typeof opts === 'function') return eos(stream, null, opts);
15 + if (!opts) opts = {};
16 +
17 + callback = once(callback || noop);
18 +
19 + var ws = stream._writableState;
20 + var rs = stream._readableState;
21 + var readable = opts.readable || (opts.readable !== false && stream.readable);
22 + var writable = opts.writable || (opts.writable !== false && stream.writable);
23 +
24 + var onlegacyfinish = function() {
25 + if (!stream.writable) onfinish();
26 + };
27 +
28 + var onfinish = function() {
29 + writable = false;
30 + if (!readable) callback.call(stream);
31 + };
32 +
33 + var onend = function() {
34 + readable = false;
35 + if (!writable) callback.call(stream);
36 + };
37 +
38 + var onexit = function(exitCode) {
39 + callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null);
40 + };
41 +
42 + var onerror = function(err) {
43 + callback.call(stream, err);
44 + };
45 +
46 + var onclose = function() {
47 + if (readable && !(rs && rs.ended)) return callback.call(stream, new Error('premature close'));
48 + if (writable && !(ws && ws.ended)) return callback.call(stream, new Error('premature close'));
49 + };
50 +
51 + var onrequest = function() {
52 + stream.req.on('finish', onfinish);
53 + };
54 +
55 + if (isRequest(stream)) {
56 + stream.on('complete', onfinish);
57 + stream.on('abort', onclose);
58 + if (stream.req) onrequest();
59 + else stream.on('request', onrequest);
60 + } else if (writable && !ws) { // legacy streams
61 + stream.on('end', onlegacyfinish);
62 + stream.on('close', onlegacyfinish);
63 + }
64 +
65 + if (isChildProcess(stream)) stream.on('exit', onexit);
66 +
67 + stream.on('end', onend);
68 + stream.on('finish', onfinish);
69 + if (opts.error !== false) stream.on('error', onerror);
70 + stream.on('close', onclose);
71 +
72 + return function() {
73 + stream.removeListener('complete', onfinish);
74 + stream.removeListener('abort', onclose);
75 + stream.removeListener('request', onrequest);
76 + if (stream.req) stream.req.removeListener('finish', onfinish);
77 + stream.removeListener('end', onlegacyfinish);
78 + stream.removeListener('close', onlegacyfinish);
79 + stream.removeListener('finish', onfinish);
80 + stream.removeListener('exit', onexit);
81 + stream.removeListener('end', onend);
82 + stream.removeListener('error', onerror);
83 + stream.removeListener('close', onclose);
84 + };
85 +};
86 +
87 +module.exports = eos;
node_modules/end-of-stream/package.jsonView
@@ -1,0 +1,65 @@
1 +{
2 + "_args": [
3 + [
4 + "end-of-stream@1.4.1",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "end-of-stream@1.4.1",
9 + "_id": "end-of-stream@1.4.1",
10 + "_inBundle": false,
11 + "_integrity": "sha256-P8z5hny+y3o1XkPZydXCy+IWvXs1Uy1SttnMErC6OF4=",
12 + "_location": "/end-of-stream",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "end-of-stream@1.4.1",
18 + "name": "end-of-stream",
19 + "escapedName": "end-of-stream",
20 + "rawSpec": "1.4.1",
21 + "saveSpec": null,
22 + "fetchSpec": "1.4.1"
23 + },
24 + "_requiredBy": [
25 + "/tar-stream"
26 + ],
27 + "_resolved": "http://localhost:8989/blobs/get/&P8z5hny+y3o1XkPZydXCy+IWvXs1Uy1SttnMErC6OF4=.sha256",
28 + "_spec": "1.4.1",
29 + "_where": "/home/cel/src/ssb-npm-registry",
30 + "author": {
31 + "name": "Mathias Buus",
32 + "email": "mathiasbuus@gmail.com"
33 + },
34 + "bugs": {
35 + "url": "https://github.com/mafintosh/end-of-stream/issues"
36 + },
37 + "dependencies": {
38 + "once": "^1.4.0"
39 + },
40 + "description": "Call a callback when a readable/writable/duplex stream has completed or failed.",
41 + "files": [
42 + "index.js"
43 + ],
44 + "homepage": "https://github.com/mafintosh/end-of-stream",
45 + "keywords": [
46 + "stream",
47 + "streams",
48 + "callback",
49 + "finish",
50 + "close",
51 + "end",
52 + "wait"
53 + ],
54 + "license": "MIT",
55 + "main": "index.js",
56 + "name": "end-of-stream",
57 + "repository": {
58 + "type": "git",
59 + "url": "git://github.com/mafintosh/end-of-stream.git"
60 + },
61 + "scripts": {
62 + "test": "node test.js"
63 + },
64 + "version": "1.4.1"
65 +}
node_modules/fs-constants/LICENSEView
@@ -1,0 +1,21 @@
1 +The MIT License (MIT)
2 +
3 +Copyright (c) 2018 Mathias Buus
4 +
5 +Permission is hereby granted, free of charge, to any person obtaining a copy
6 +of this software and associated documentation files (the "Software"), to deal
7 +in the Software without restriction, including without limitation the rights
8 +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 +copies of the Software, and to permit persons to whom the Software is
10 +furnished to do so, subject to the following conditions:
11 +
12 +The above copyright notice and this permission notice shall be included in
13 +all copies or substantial portions of the Software.
14 +
15 +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21 +THE SOFTWARE.
node_modules/fs-constants/README.mdView
@@ -1,0 +1,26 @@
1 +# fs-constants
2 +
3 +Small module that allows you to get the fs constants across
4 +Node and the browser.
5 +
6 +```
7 +npm install fs-constants
8 +```
9 +
10 +Previously you would use `require('constants')` for this in node but that has been
11 +deprecated and changed to `require('fs').constants` which does not browserify.
12 +
13 +This module uses `require('constants')` in the browser and `require('fs').constants` in node to work around this
14 +
15 +
16 +## Usage
17 +
18 +``` js
19 +var constants = require('fs-constants')
20 +
21 +console.log('constants:', constants)
22 +```
23 +
24 +## License
25 +
26 +MIT
node_modules/fs-constants/browser.jsView
@@ -1,0 +1,1 @@
1 +module.exports = require('constants')
node_modules/fs-constants/index.jsView
@@ -1,0 +1,1 @@
1 +module.exports = require('fs').constants || require('constants')
node_modules/fs-constants/package.jsonView
@@ -1,0 +1,50 @@
1 +{
2 + "_args": [
3 + [
4 + "fs-constants@1.0.0",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "fs-constants@1.0.0",
9 + "_id": "fs-constants@1.0.0",
10 + "_inBundle": false,
11 + "_integrity": "sha256-55NtfhskR1vVkxLEAF8w1CU5Yn38HE9d/iMm17fHxfo=",
12 + "_location": "/fs-constants",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "fs-constants@1.0.0",
18 + "name": "fs-constants",
19 + "escapedName": "fs-constants",
20 + "rawSpec": "1.0.0",
21 + "saveSpec": null,
22 + "fetchSpec": "1.0.0"
23 + },
24 + "_requiredBy": [
25 + "/tar-stream"
26 + ],
27 + "_resolved": "http://localhost:8989/blobs/get/&55NtfhskR1vVkxLEAF8w1CU5Yn38HE9d/iMm17fHxfo=.sha256",
28 + "_spec": "1.0.0",
29 + "_where": "/home/cel/src/ssb-npm-registry",
30 + "author": {
31 + "name": "Mathias Buus",
32 + "url": "@mafintosh"
33 + },
34 + "browser": "browser.js",
35 + "bugs": {
36 + "url": "https://github.com/mafintosh/fs-constants/issues"
37 + },
38 + "dependencies": {},
39 + "description": "Require constants across node and the browser",
40 + "devDependencies": {},
41 + "homepage": "https://github.com/mafintosh/fs-constants",
42 + "license": "MIT",
43 + "main": "index.js",
44 + "name": "fs-constants",
45 + "repository": {
46 + "type": "git",
47 + "url": "git+https://github.com/mafintosh/fs-constants.git"
48 + },
49 + "version": "1.0.0"
50 +}
node_modules/hashlru/.travis.ymlView
@@ -1,0 +1,8 @@
1 +language: node_js
2 +node_js:
3 +- '0.10'
4 +- '0.12'
5 +- '4'
6 +- '5'
7 +- '6'
8 +- '7'
node_modules/hashlru/LICENSEView
@@ -1,0 +1,22 @@
1 +Copyright (c) 2016 'Dominic Tarr'
2 +
3 +Permission is hereby granted, free of charge,
4 +to any person obtaining a copy of this software and
5 +associated documentation files (the "Software"), to
6 +deal in the Software without restriction, including
7 +without limitation the rights to use, copy, modify,
8 +merge, publish, distribute, sublicense, and/or sell
9 +copies of the Software, and to permit persons to whom
10 +the Software is furnished to do so,
11 +subject to the following conditions:
12 +
13 +The above copyright notice and this permission notice
14 +shall be included in all copies or substantial portions of the Software.
15 +
16 +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17 +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
18 +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
19 +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
20 +ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
node_modules/hashlru/README.mdView
@@ -1,0 +1,101 @@
1 +# hashlru
2 +
3 +Simpler, faster LRU cache algorithm
4 +
5 +A Least Recently Used cache is used to speedup requests to a key-value oriented resource,
6 +while making a bounded memory commitment.
7 +
8 +I've recently [benchmarked the various lru implementations available on npm](https://github.com/dominictarr/bench-lru)
9 +and found wildly varing performance. There where some that performed well overall,
10 +and others that performed extremely well in some cases, but poorly in others, due to
11 +compromises made to maintain correctness.
12 +
13 +After writing the benchmark, of course I had to try my hand at my own LRU implementation.
14 +I soon found a few things, LRUs are quite difficult to implement, first of all contain a linked
15 +list. LRUs use a linked list to maintain the order
16 +that keys have been accessed, so that when the cache fills, the old values
17 +(which presumably are the least likely to be needed again) can be removed from the cache.
18 +Linked Lists are not easy to implement correctly!
19 +
20 +Then I discovered why some of the fast algorithms where so slow - they used `delete cache[key]`
21 +which is much slower than `cache[key] = value`, much much slower.
22 +
23 +So, why looking for a way to avoid `delete` I had an idea - have two cache objects,
24 +and when one fills - create a new one and start putting items in that, and then it's sufficiently
25 +full, throw it away. It avoids delete, at at max, only commits us to only N values and between N and 2N keys.
26 +
27 +Then I realized with this pattern, you _don't actually need_ the linked list anymore!
28 +This makes a N-2N least recently used cache very very simple. This both has performance benefits,
29 +and it's also very easy to verify it's correctness.
30 +
31 +This algorithm does not give you an ordered list of the N most recently used items,
32 +but you do not really need that! The property of dropping the least recent items is still preserved.
33 +
34 +see a [benchmark](https://github.com/dominictarr/bench-lru) of this against
35 +the other LRU implementations on npm.
36 +
37 +## example
38 +
39 +``` js
40 +var HLRU = require('hashlru')
41 +var lru = HLRU(100)
42 +lru.set(key, value)
43 +lru.get(key)
44 +```
45 +
46 +## algorithm
47 +
48 +create two caches - `old_cache` and `new_cache`, and a counter, `size`.
49 +
50 +When an `key, value` pair is added, if `key` is already in `new_cache` update the value,
51 +not currently in `new_cache`, set `new_cache[key] = value`.
52 +If the key was _not_ already in `new_cache` then `size` is incremented.
53 +If `size > max`, move the `old_cache = new_cache`, reset `size = 0`, and initialize a new `new_cache={}`
54 +
55 +To get a `key`, check if `new_cache` contains key, and if so, return it.
56 +If not, check if it is in `old_cache` and if so, move that value to `new_cache`, and increment `size`.
57 +If `size > max`, move the `old_cache = new_cache`, reset `size = 0`, and initialize a new `new_cache={}`
58 +
59 +## complexity
60 +
61 +Writes are O(1) on average, like a hash table.
62 +
63 +When implemented in a garbage collected language, the old cache is thrown away when the new cache is
64 +full. To better manage memory usage, it could also be implemented as two fixes sized hash tables.
65 +In this case, instead of discarding the old cache, it would be zeroed. This means at most every N
66 +writes when the caches are rotated, that write will require N operations (to clear the old cache)
67 +
68 +This still averages out to O(1) but it does cost O(N) but only every N writes (except for updates)
69 +so N/N is still 1.
70 +
71 +## HashLRU (max) => lru
72 +
73 +initialize a lru object.
74 +
75 +### lru.get (key) => value | undefined
76 +
77 +Returns the value in the cache, or `undefined` if the value is not in the cache.
78 +
79 +### lru.set(key, value)
80 +
81 +update the value for key.
82 +
83 +### lru.has(key) => boolean
84 +
85 +Checks if the `key` is in the cache.
86 +
87 +### lru.remove(key)
88 +
89 +Removes the `key` from the cache.
90 +
91 +### lru.clear()
92 +
93 +Empties the entire cache.
94 +
95 +## License
96 +
97 +MIT
98 +
99 +
100 +
101 +
node_modules/hashlru/bench.jsView
@@ -1,0 +1,47 @@
1 +var Stats = require('statistics/mutate')
2 +var LRU = require('./')
3 +
4 +//simple benchmarks, and measure standard deviation
5 +
6 +function run (N, op, init) {
7 + var stats = null, value
8 + for(var j = 0; j < 100; j++) {
9 + if(init) value = init(j)
10 + var start = Date.now()
11 + for(var i = 0; i < N; i++) op(value, i)
12 + stats = Stats(stats, N/((Date.now() - start)))
13 + }
14 + return stats
15 +}
16 +
17 +//set 1000 random items, then read 10000 items.
18 +//since they are random, there will be misses as well as hits
19 +console.log('GET', run(100000, function (lru, n) {
20 + lru.get(~~(Math.random()*1000))
21 +// lru.set(n, Math.random())
22 +}, function () {
23 + var lru = LRU(1000)
24 + for(var i = 0; i ++ ; i < 1000)
25 + lru.set(~~(Math.random()*1000), Math.random())
26 + return lru
27 +}))
28 +
29 +//set 100000 random values into LRU for 1000 values.
30 +//this means 99/100 should be evictions
31 +console.log('SET', run(100000, function (lru, n) {
32 + lru.set(~~(Math.random()*100000), Math.random())
33 +}, function () {
34 + return LRU(1000)
35 +}))
36 +
37 +
38 +
39 +
40 +
41 +
42 +
43 +
44 +
45 +
46 +
47 +
node_modules/hashlru/index.d.tsView
@@ -1,0 +1,7 @@
1 +export default function HLRU(max: number): {
2 + has: (key: string | number) => boolean;
3 + remove: (key: string | number) => void;
4 + get: (key: string | number) => any;
5 + set: (key: string | number, value: any) => void;
6 + clear: () => void;
7 +};
node_modules/hashlru/index.jsView
@@ -1,0 +1,51 @@
1 +module.exports = function (max) {
2 +
3 + if (!max) throw Error('hashlru must have a max value, of type number, greater than 0')
4 +
5 + var size = 0, cache = Object.create(null), _cache = Object.create(null)
6 +
7 + function update (key, value) {
8 + cache[key] = value
9 + size ++
10 + if(size >= max) {
11 + size = 0
12 + _cache = cache
13 + cache = Object.create(null)
14 + }
15 + }
16 +
17 + return {
18 + has: function (key) {
19 + return cache[key] !== undefined || _cache[key] !== undefined
20 + },
21 + remove: function (key) {
22 + if(cache[key] !== undefined)
23 + cache[key] = undefined
24 + if(_cache[key] !== undefined)
25 + _cache[key] = undefined
26 + },
27 + get: function (key) {
28 + var v = cache[key]
29 + if(v !== undefined) return v
30 + if((v = _cache[key]) !== undefined) {
31 + update(key, v)
32 + return v
33 + }
34 + },
35 + set: function (key, value) {
36 + if(cache[key] !== undefined) cache[key] = value
37 + else update(key, value)
38 + },
39 + clear: function () {
40 + cache = Object.create(null)
41 + _cache = Object.create(null)
42 + }
43 + }
44 +}
45 +
46 +
47 +
48 +
49 +
50 +
51 +
node_modules/hashlru/package.jsonView
@@ -1,0 +1,56 @@
1 +{
2 + "_args": [
3 + [
4 + "hashlru@2.3.0",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "hashlru@2.3.0",
9 + "_id": "hashlru@2.3.0",
10 + "_inBundle": false,
11 + "_integrity": "sha256-A3SehgJzx5gtrxWIf4ckf/LoJWAzYsT7hFUiOVbt0hE=",
12 + "_location": "/hashlru",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "hashlru@2.3.0",
18 + "name": "hashlru",
19 + "escapedName": "hashlru",
20 + "rawSpec": "2.3.0",
21 + "saveSpec": null,
22 + "fetchSpec": "2.3.0"
23 + },
24 + "_requiredBy": [
25 + "/"
26 + ],
27 + "_resolved": "http://localhost:8989/blobs/get/&A3SehgJzx5gtrxWIf4ckf/LoJWAzYsT7hFUiOVbt0hE=.sha256",
28 + "_spec": "2.3.0",
29 + "_where": "/home/cel/src/ssb-npm-registry",
30 + "author": {
31 + "name": "'Dominic Tarr'",
32 + "email": "dominic.tarr@gmail.com",
33 + "url": "dominictarr.com"
34 + },
35 + "bugs": {
36 + "url": "https://github.com/dominictarr/hashlru/issues"
37 + },
38 + "dependencies": {},
39 + "description": "simpler faster substitute for LRU",
40 + "devDependencies": {
41 + "istanbul": "^0.4.5"
42 + },
43 + "homepage": "https://github.com/dominictarr/hashlru",
44 + "license": "MIT",
45 + "name": "hashlru",
46 + "repository": {
47 + "type": "git",
48 + "url": "git://github.com/dominictarr/hashlru.git"
49 + },
50 + "scripts": {
51 + "cov": "istanbul cover test/*.js",
52 + "test": "set -e; for t in test/*.js; do node $t; done"
53 + },
54 + "types": "index.d.ts",
55 + "version": "2.3.0"
56 +}
node_modules/hashlru/test/test.jsView
@@ -1,0 +1,46 @@
1 +var assert = require('assert')
2 +var HLRU = require('../')
3 +var lru = HLRU(2)
4 +
5 +// set-get:
6 +lru.set('test', 'test')
7 +
8 +assert.equal(lru.get('test'), 'test')
9 +
10 +// has:
11 +assert.equal(lru.has('test'), true)
12 +assert.equal(lru.has('blah'), false)
13 +
14 +// update:
15 +lru.set('test', 'test2')
16 +
17 +assert.equal(lru.get('test'), 'test2')
18 +
19 +// cache cycle:
20 +lru.set('test2', 'test')
21 +
22 +assert.equal(lru.get('test2'), 'test')
23 +
24 +// get previous after cache cycle:
25 +assert.equal(lru.get('test'), 'test2')
26 +
27 +// update new cache:
28 +lru.set('test2', 'test2')
29 +
30 +assert.equal(lru.get('test2'), 'test2')
31 +
32 +// object purity:
33 +assert.equal(lru.get('constructor'), undefined)
34 +
35 +// max validation:
36 +assert.throws(HLRU)
37 +
38 +// remove:
39 +assert.equal(lru.has('test2'), true)
40 +lru.remove('test2')
41 +assert.equal(lru.has('test2'), false)
42 +
43 +// clear
44 +assert.equal(lru.has('test'), true)
45 +lru.clear()
46 +assert.equal(lru.has('test'), false)
node_modules/inherits/LICENSEView
@@ -1,0 +1,16 @@
1 +The ISC License
2 +
3 +Copyright (c) Isaac Z. Schlueter
4 +
5 +Permission to use, copy, modify, and/or distribute this software for any
6 +purpose with or without fee is hereby granted, provided that the above
7 +copyright notice and this permission notice appear in all copies.
8 +
9 +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
10 +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
11 +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
12 +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
13 +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
14 +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
15 +PERFORMANCE OF THIS SOFTWARE.
16 +
node_modules/inherits/README.mdView
@@ -1,0 +1,42 @@
1 +Browser-friendly inheritance fully compatible with standard node.js
2 +[inherits](http://nodejs.org/api/util.html#util_util_inherits_constructor_superconstructor).
3 +
4 +This package exports standard `inherits` from node.js `util` module in
5 +node environment, but also provides alternative browser-friendly
6 +implementation through [browser
7 +field](https://gist.github.com/shtylman/4339901). Alternative
8 +implementation is a literal copy of standard one located in standalone
9 +module to avoid requiring of `util`. It also has a shim for old
10 +browsers with no `Object.create` support.
11 +
12 +While keeping you sure you are using standard `inherits`
13 +implementation in node.js environment, it allows bundlers such as
14 +[browserify](https://github.com/substack/node-browserify) to not
15 +include full `util` package to your client code if all you need is
16 +just `inherits` function. It worth, because browser shim for `util`
17 +package is large and `inherits` is often the single function you need
18 +from it.
19 +
20 +It's recommended to use this package instead of
21 +`require('util').inherits` for any code that has chances to be used
22 +not only in node.js but in browser too.
23 +
24 +## usage
25 +
26 +```js
27 +var inherits = require('inherits');
28 +// then use exactly as the standard one
29 +```
30 +
31 +## note on version ~1.0
32 +
33 +Version ~1.0 had completely different motivation and is not compatible
34 +neither with 2.0 nor with standard node.js `inherits`.
35 +
36 +If you are using version ~1.0 and planning to switch to ~2.0, be
37 +careful:
38 +
39 +* new version uses `super_` instead of `super` for referencing
40 + superclass
41 +* new version overwrites current prototype while old one preserves any
42 + existing fields on it
node_modules/inherits/inherits.jsView
@@ -1,0 +1,7 @@
1 +try {
2 + var util = require('util');
3 + if (typeof util.inherits !== 'function') throw '';
4 + module.exports = util.inherits;
5 +} catch (e) {
6 + module.exports = require('./inherits_browser.js');
7 +}
node_modules/inherits/inherits_browser.jsView
@@ -1,0 +1,23 @@
1 +if (typeof Object.create === 'function') {
2 + // implementation from standard node.js 'util' module
3 + module.exports = function inherits(ctor, superCtor) {
4 + ctor.super_ = superCtor
5 + ctor.prototype = Object.create(superCtor.prototype, {
6 + constructor: {
7 + value: ctor,
8 + enumerable: false,
9 + writable: true,
10 + configurable: true
11 + }
12 + });
13 + };
14 +} else {
15 + // old school shim for old browsers
16 + module.exports = function inherits(ctor, superCtor) {
17 + ctor.super_ = superCtor
18 + var TempCtor = function () {}
19 + TempCtor.prototype = superCtor.prototype
20 + ctor.prototype = new TempCtor()
21 + ctor.prototype.constructor = ctor
22 + }
23 +}
node_modules/inherits/package.jsonView
@@ -1,0 +1,65 @@
1 +{
2 + "_args": [
3 + [
4 + "inherits@2.0.3",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "inherits@2.0.3",
9 + "_id": "inherits@2.0.3",
10 + "_inBundle": false,
11 + "_integrity": "sha256-f19Y6bVOh+JkeG5+hNngeKr2jBAD3p+miUUQHgI1bN8=",
12 + "_location": "/inherits",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "inherits@2.0.3",
18 + "name": "inherits",
19 + "escapedName": "inherits",
20 + "rawSpec": "2.0.3",
21 + "saveSpec": null,
22 + "fetchSpec": "2.0.3"
23 + },
24 + "_requiredBy": [
25 + "/readable-stream",
26 + "/tar-stream"
27 + ],
28 + "_resolved": "http://localhost:8989/blobs/get/&f19Y6bVOh+JkeG5+hNngeKr2jBAD3p+miUUQHgI1bN8=.sha256",
29 + "_spec": "2.0.3",
30 + "_where": "/home/cel/src/ssb-npm-registry",
31 + "browser": "./inherits_browser.js",
32 + "bugs": {
33 + "url": "https://github.com/isaacs/inherits/issues"
34 + },
35 + "description": "Browser-friendly inheritance fully compatible with standard node.js inherits()",
36 + "devDependencies": {
37 + "tap": "^7.1.0"
38 + },
39 + "files": [
40 + "inherits.js",
41 + "inherits_browser.js"
42 + ],
43 + "homepage": "https://github.com/isaacs/inherits#readme",
44 + "keywords": [
45 + "inheritance",
46 + "class",
47 + "klass",
48 + "oop",
49 + "object-oriented",
50 + "inherits",
51 + "browser",
52 + "browserify"
53 + ],
54 + "license": "ISC",
55 + "main": "./inherits.js",
56 + "name": "inherits",
57 + "repository": {
58 + "type": "git",
59 + "url": "git://github.com/isaacs/inherits.git"
60 + },
61 + "scripts": {
62 + "test": "node test"
63 + },
64 + "version": "2.0.3"
65 +}
node_modules/looper/.travis.ymlView
@@ -1,0 +1,4 @@
1 +language: node_js
2 +node_js:
3 + - 0.8
4 + - '0.10'
node_modules/looper/LICENSEView
@@ -1,0 +1,22 @@
1 +Copyright (c) 2013 Dominic Tarr
2 +
3 +Permission is hereby granted, free of charge,
4 +to any person obtaining a copy of this software and
5 +associated documentation files (the "Software"), to
6 +deal in the Software without restriction, including
7 +without limitation the rights to use, copy, modify,
8 +merge, publish, distribute, sublicense, and/or sell
9 +copies of the Software, and to permit persons to whom
10 +the Software is furnished to do so,
11 +subject to the following conditions:
12 +
13 +The above copyright notice and this permission notice
14 +shall be included in all copies or substantial portions of the Software.
15 +
16 +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17 +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
18 +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
19 +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
20 +ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
node_modules/looper/README.mdView
@@ -1,0 +1,43 @@
1 +# looper
2 +
3 +Loop with callbacks but don't RangeError
4 +
5 +[![travis](https://travis-ci.org/dominictarr/looper.png?branch=master)
6 +](https://travis-ci.org/dominictarr/looper)
7 +
8 +[![testling](http://ci.testling.com/dominictarr/looper.png)
9 +](http://ci.testling.com/dominictarr/looper)
10 +
11 +## Synopsis
12 +
13 +Normally, if `mightBeAsync` calls it's cb immediately
14 +this would `RangeError`:
15 +
16 +``` js
17 +var l = 100000
18 +;(function next () {
19 + if(--l) mightBeAsync(next)
20 +})
21 +```
22 +
23 +`looper` detects that case, and falls back to a `while` loop,
24 +
25 +## Example
26 +
27 +``` js
28 +var loop = require('looper')
29 +
30 +var l = 100000
31 +loop(function () {
32 + var next = this
33 + if(--l) probablySync(next)
34 +})()
35 +```
36 +
37 +when you want to stop looping, don't call `next`.
38 +`looper` checks if each callback is sync or not,
39 +so you can even mix sync and async calls!
40 +
41 +## License
42 +
43 +MIT
node_modules/looper/index.jsView
@@ -1,0 +1,14 @@
1 +
2 +var looper = module.exports = function (fun) {
3 + (function next () {
4 + var loop = true, returned = false, sync = false
5 + do {
6 + sync = true; loop = false
7 + fun.call(this, function () {
8 + if(sync) loop = true
9 + else next()
10 + })
11 + sync = false
12 + } while(loop)
13 + })()
14 +}
node_modules/looper/package.jsonView
@@ -1,0 +1,70 @@
1 +{
2 + "_args": [
3 + [
4 + "looper@3.0.0",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "looper@3.0.0",
9 + "_id": "looper@3.0.0",
10 + "_inBundle": false,
11 + "_integrity": "sha256-UKhTvWst16euOATlhckl4mr/zcz+aByyUJO11lMf+Jc=",
12 + "_location": "/looper",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "looper@3.0.0",
18 + "name": "looper",
19 + "escapedName": "looper",
20 + "rawSpec": "3.0.0",
21 + "saveSpec": null,
22 + "fetchSpec": "3.0.0"
23 + },
24 + "_requiredBy": [
25 + "/stream-to-pull-stream"
26 + ],
27 + "_resolved": "http://localhost:8989/blobs/get/&UKhTvWst16euOATlhckl4mr/zcz+aByyUJO11lMf+Jc=.sha256",
28 + "_spec": "3.0.0",
29 + "_where": "/home/cel/src/ssb-npm-registry",
30 + "author": {
31 + "name": "Dominic Tarr",
32 + "email": "dominic.tarr@gmail.com",
33 + "url": "http://dominictarr.com"
34 + },
35 + "bugs": {
36 + "url": "https://github.com/dominictarr/looper/issues"
37 + },
38 + "dependencies": {},
39 + "description": "async loops that never RangeError",
40 + "devDependencies": {
41 + "tape": "1.0.4"
42 + },
43 + "homepage": "https://github.com/dominictarr/looper",
44 + "license": "MIT",
45 + "name": "looper",
46 + "repository": {
47 + "type": "git",
48 + "url": "git://github.com/dominictarr/looper.git"
49 + },
50 + "scripts": {
51 + "test": "set -e; for t in test/*.js; do node $t; done"
52 + },
53 + "testling": {
54 + "files": "test/*.js",
55 + "browsers": [
56 + "ie/8..latest",
57 + "firefox/17..latest",
58 + "firefox/nightly",
59 + "chrome/22..latest",
60 + "chrome/canary",
61 + "opera/12..latest",
62 + "opera/next",
63 + "safari/5.1..latest",
64 + "ipad/6.0..latest",
65 + "iphone/6.0..latest",
66 + "android-browser/4.2..latest"
67 + ]
68 + },
69 + "version": "3.0.0"
70 +}
node_modules/looper/test/test.jsView
@@ -1,0 +1,37 @@
1 +
2 +var tape = require('tape')
3 +var looper = require('../')
4 +
5 +tape('n=1000000, with no RangeError', function (t) {
6 + var n = 1000000, c = 0
7 + looper(function (next) {
8 + c ++
9 + if(--n) return next()
10 + t.equal(c, 1000000)
11 + t.end()
12 + })
13 +})
14 +
15 +tape('async is okay', function (t) {
16 +
17 + var n = 100, c = 0
18 + looper(function (next) {
19 + c ++
20 + if(--n) return setTimeout(next)
21 + t.equal(c, 100)
22 + t.end()
23 + })
24 +
25 +})
26 +
27 +tape('sometimes async is okay', function (t) {
28 + var i = 1000; c = 0
29 + looper(function (next) {
30 + c++
31 + if(--i) return Math.random() < 0.1 ? setTimeout(next) : next()
32 + t.equal(c, 1000)
33 + t.end()
34 + })
35 +
36 +})
37 +
node_modules/multicb/README.mdView
@@ -1,0 +1,54 @@
1 +# MultiCB
2 +
3 +Simple way to aggregate multiple node-style callbacks
4 +
5 +```js
6 +var multicb = require('multicb')
7 +
8 +// default usage
9 +
10 +var done = multicb()
11 +doAsync(done())
12 +doAsync(done())
13 +doAsync(done())
14 +done(function(err, results) {
15 + console.log(err) // => undefined
16 + console.log(results) /* =>
17 + [
18 + [undefined, 'foo'],
19 + [undefined, 'bar'],
20 + [undefined, 'baz']
21 + ]
22 + */
23 +})
24 +
25 +// pluck argument
26 +
27 +var done = multicb({ pluck: 1 })
28 +doAsync(done())
29 +doAsync(done())
30 +doAsync(done())
31 +done(function(err, results) {
32 + console.log(err) // => undefined
33 + console.log(results) /* =>
34 + [
35 + 'foo',
36 + 'bar',
37 + 'baz'
38 + ]
39 + */
40 +})
41 +
42 +// spread argument
43 +
44 +var done = multicb({ pluck: 1, spread: true })
45 +doAsync(done())
46 +doAsync(done())
47 +doAsync(done())
48 +done(function(err, a, b, c) {
49 + console.log(err) // => undefined
50 + console.log(a) // => 'foo'
51 + console.log(b) // => 'bar'
52 + console.log(c) // => 'baz'
53 +})
54 +```
node_modules/multicb/index.jsView
@@ -1,0 +1,46 @@
1 +module.exports = function(allopts) {
2 + var n = 0, m = 0, _cb, results = [], _err;
3 + function o (k, d) { return allopts && allopts[k] !== void 0 ? allopts[k] : d }
4 +
5 + return function(cb) {
6 + if (cb) {
7 + results.length = m
8 +
9 + if(_err) {
10 + var err = _err; _err = null
11 + return cb(err)
12 + }
13 + if(n == m) {
14 + if (o('spread'))
15 + return cb.apply(null, [null].concat(results))
16 + else
17 + return cb(null, results)
18 + }
19 +
20 + _cb = cb
21 + return
22 + }
23 +
24 + var i = m++
25 + return function (err) {
26 + if (err) {
27 + if (_err) return
28 + _err = err
29 + n = -1 // stop
30 + if (_cb) _cb(err)
31 + } else {
32 + n++
33 + if (o('pluck'))
34 + results[i] = arguments[o('pluck')]
35 + else
36 + results[i] = Array.prototype.slice.call(arguments)
37 + if (n === m && _cb) {
38 + if (o('spread'))
39 + _cb.apply(null, [null].concat(results))
40 + else
41 + _cb(null, results)
42 + }
43 + }
44 + }
45 + }
46 +}
node_modules/multicb/package.jsonView
@@ -1,0 +1,48 @@
1 +{
2 + "_args": [
3 + [
4 + "multicb@1.2.2",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "multicb@1.2.2",
9 + "_id": "multicb@1.2.2",
10 + "_inBundle": false,
11 + "_integrity": "sha256-Z6lAzBXF2f5DqwKHdL9yaArwseg8iYuljWDmEs6VcT8=",
12 + "_location": "/multicb",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "multicb@1.2.2",
18 + "name": "multicb",
19 + "escapedName": "multicb",
20 + "rawSpec": "1.2.2",
21 + "saveSpec": null,
22 + "fetchSpec": "1.2.2"
23 + },
24 + "_requiredBy": [
25 + "/"
26 + ],
27 + "_resolved": "http://localhost:8989/blobs/get/&Z6lAzBXF2f5DqwKHdL9yaArwseg8iYuljWDmEs6VcT8=.sha256",
28 + "_spec": "1.2.2",
29 + "_where": "/home/cel/src/ssb-npm-registry",
30 + "bugs": {
31 + "url": "https://github.com/pfrazee/multicb/issues"
32 + },
33 + "dependencies": {},
34 + "description": "Simple way to aggregate multiple node-style callbacks",
35 + "devDependencies": {
36 + "interleavings": "~0.3.0"
37 + },
38 + "homepage": "https://github.com/pfrazee/multicb#readme",
39 + "name": "multicb",
40 + "repository": {
41 + "type": "git",
42 + "url": "git+https://github.com/pfrazee/multicb.git"
43 + },
44 + "scripts": {
45 + "test": "set -e; for t in test/*.js; do node $t; done"
46 + },
47 + "version": "1.2.2"
48 +}
node_modules/multicb/test/errors-multi.jsView
@@ -1,0 +1,14 @@
1 +'use strict'
2 +var multicb = require('../')
3 +var t = require('assert')
4 +
5 +var done = multicb()
6 +var cbs = [done(), done()]
7 +var called = 0
8 +done(function(err, results) {
9 + called++
10 + t.equal(results, void 0)
11 +})
12 +cbs[0]('fail')
13 +cbs[1]('fail')
14 +t.equal(called, 1)
node_modules/multicb/test/errors.jsView
@@ -1,0 +1,21 @@
1 +'use strict'
2 +var multicb = require('../')
3 +var t = require('assert')
4 +
5 +require('interleavings').test(function (isAsync) {
6 +
7 + function async(cb, delay, args) {
8 + isAsync(function() { cb.apply(null, args) })()
9 + }
10 +
11 + var done = multicb()
12 + async(done(), 5, [null, 1])
13 + async(done(), 15, [null, 2])
14 + async(done(), 10, ['fail'])
15 + done(function(err, results) {
16 + t.equal(err, 'fail')
17 + t.equal(results, void 0)
18 + isAsync.done()
19 + })
20 +
21 +})
node_modules/multicb/test/multicb.jsView
@@ -1,0 +1,24 @@
1 +'use strict'
2 +var multicb = require('../')
3 +var t = require('assert')
4 +
5 +require('interleavings').test(function (isAsync) {
6 +
7 + function async(cb, delay, args) {
8 + isAsync(function() { cb.apply(null, args) })()
9 + }
10 +
11 + var done = multicb()
12 + async(done(), 5, [null, 1])
13 + async(done(), 15, [null, 2])
14 + async(done(), 10, [null, 3])
15 + done(function(err, results) {
16 + console.log('done')
17 + t.equal(err, null)
18 + t.equal(results[0][1], 1)
19 + t.equal(results[1][1], 2)
20 + t.equal(results[2][1], 3)
21 + isAsync.done()
22 + })
23 +
24 +})
node_modules/multicb/test/pluck.jsView
@@ -1,0 +1,24 @@
1 +'use strict'
2 +var multicb = require('../')
3 +var t = require('assert')
4 +
5 +require('interleavings').test(function (isAsync) {
6 +
7 + function async(cb, delay, args) {
8 + isAsync(function() { cb.apply(null, args) })()
9 + }
10 +
11 + var done = multicb({ pluck: 1 })
12 + async(done(), 5, [null, 1])
13 + async(done(), 15, [null, 2])
14 + async(done(), 10, [null, 3])
15 + done(function(err, results) {
16 + console.log('done')
17 + t.equal(err, null)
18 + t.equal(results[0], 1)
19 + t.equal(results[1], 2)
20 + t.equal(results[2], 3)
21 + isAsync.done()
22 + })
23 +
24 +})
node_modules/multicb/test/spread.jsView
@@ -1,0 +1,24 @@
1 +'use strict'
2 +var multicb = require('../')
3 +var t = require('assert')
4 +
5 +require('interleavings').test(function (isAsync) {
6 +
7 + function async(cb, delay, args) {
8 + isAsync(function() { cb.apply(null, args) })()
9 + }
10 +
11 + var done = multicb({ pluck: 1, spread: true })
12 + async(done(), 5, [null, 1])
13 + async(done(), 15, [null, 2])
14 + async(done(), 10, [null, 3])
15 + done(function(err, first, second, third) {
16 + console.log('done')
17 + t.equal(err, null)
18 + t.equal(first, 1)
19 + t.equal(second, 2)
20 + t.equal(third, 3)
21 + isAsync.done()
22 + })
23 +
24 +})
node_modules/once/LICENSEView
@@ -1,0 +1,15 @@
1 +The ISC License
2 +
3 +Copyright (c) Isaac Z. Schlueter and Contributors
4 +
5 +Permission to use, copy, modify, and/or distribute this software for any
6 +purpose with or without fee is hereby granted, provided that the above
7 +copyright notice and this permission notice appear in all copies.
8 +
9 +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
10 +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
11 +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
12 +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
13 +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
14 +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
15 +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
node_modules/once/README.mdView
@@ -1,0 +1,79 @@
1 +# once
2 +
3 +Only call a function once.
4 +
5 +## usage
6 +
7 +```javascript
8 +var once = require('once')
9 +
10 +function load (file, cb) {
11 + cb = once(cb)
12 + loader.load('file')
13 + loader.once('load', cb)
14 + loader.once('error', cb)
15 +}
16 +```
17 +
18 +Or add to the Function.prototype in a responsible way:
19 +
20 +```javascript
21 +// only has to be done once
22 +require('once').proto()
23 +
24 +function load (file, cb) {
25 + cb = cb.once()
26 + loader.load('file')
27 + loader.once('load', cb)
28 + loader.once('error', cb)
29 +}
30 +```
31 +
32 +Ironically, the prototype feature makes this module twice as
33 +complicated as necessary.
34 +
35 +To check whether you function has been called, use `fn.called`. Once the
36 +function is called for the first time the return value of the original
37 +function is saved in `fn.value` and subsequent calls will continue to
38 +return this value.
39 +
40 +```javascript
41 +var once = require('once')
42 +
43 +function load (cb) {
44 + cb = once(cb)
45 + var stream = createStream()
46 + stream.once('data', cb)
47 + stream.once('end', function () {
48 + if (!cb.called) cb(new Error('not found'))
49 + })
50 +}
51 +```
52 +
53 +## `once.strict(func)`
54 +
55 +Throw an error if the function is called twice.
56 +
57 +Some functions are expected to be called only once. Using `once` for them would
58 +potentially hide logical errors.
59 +
60 +In the example below, the `greet` function has to call the callback only once:
61 +
62 +```javascript
63 +function greet (name, cb) {
64 + // return is missing from the if statement
65 + // when no name is passed, the callback is called twice
66 + if (!name) cb('Hello anonymous')
67 + cb('Hello ' + name)
68 +}
69 +
70 +function log (msg) {
71 + console.log(msg)
72 +}
73 +
74 +// this will print 'Hello anonymous' but the logical error will be missed
75 +greet(null, once(msg))
76 +
77 +// once.strict will print 'Hello anonymous' and throw an error when the callback will be called the second time
78 +greet(null, once.strict(msg))
79 +```
node_modules/once/once.jsView
@@ -1,0 +1,42 @@
1 +var wrappy = require('wrappy')
2 +module.exports = wrappy(once)
3 +module.exports.strict = wrappy(onceStrict)
4 +
5 +once.proto = once(function () {
6 + Object.defineProperty(Function.prototype, 'once', {
7 + value: function () {
8 + return once(this)
9 + },
10 + configurable: true
11 + })
12 +
13 + Object.defineProperty(Function.prototype, 'onceStrict', {
14 + value: function () {
15 + return onceStrict(this)
16 + },
17 + configurable: true
18 + })
19 +})
20 +
21 +function once (fn) {
22 + var f = function () {
23 + if (f.called) return f.value
24 + f.called = true
25 + return f.value = fn.apply(this, arguments)
26 + }
27 + f.called = false
28 + return f
29 +}
30 +
31 +function onceStrict (fn) {
32 + var f = function () {
33 + if (f.called)
34 + throw new Error(f.onceError)
35 + f.called = true
36 + return f.value = fn.apply(this, arguments)
37 + }
38 + var name = fn.name || 'Function wrapped with `once`'
39 + f.onceError = name + " shouldn't be called more than once"
40 + f.called = false
41 + return f
42 +}
node_modules/once/package.jsonView
@@ -1,0 +1,69 @@
1 +{
2 + "_args": [
3 + [
4 + "once@1.4.0",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "once@1.4.0",
9 + "_id": "once@1.4.0",
10 + "_inBundle": false,
11 + "_integrity": "sha256-z1FGC6Nwxpj2i5duUU0RNJczm6AYtgA+jo61acb8z88=",
12 + "_location": "/once",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "once@1.4.0",
18 + "name": "once",
19 + "escapedName": "once",
20 + "rawSpec": "1.4.0",
21 + "saveSpec": null,
22 + "fetchSpec": "1.4.0"
23 + },
24 + "_requiredBy": [
25 + "/end-of-stream"
26 + ],
27 + "_resolved": "http://localhost:8989/blobs/get/&z1FGC6Nwxpj2i5duUU0RNJczm6AYtgA+jo61acb8z88=.sha256",
28 + "_spec": "1.4.0",
29 + "_where": "/home/cel/src/ssb-npm-registry",
30 + "author": {
31 + "name": "Isaac Z. Schlueter",
32 + "email": "i@izs.me",
33 + "url": "http://blog.izs.me/"
34 + },
35 + "bugs": {
36 + "url": "https://github.com/isaacs/once/issues"
37 + },
38 + "dependencies": {
39 + "wrappy": "1"
40 + },
41 + "description": "Run a function exactly one time",
42 + "devDependencies": {
43 + "tap": "^7.0.1"
44 + },
45 + "directories": {
46 + "test": "test"
47 + },
48 + "files": [
49 + "once.js"
50 + ],
51 + "homepage": "https://github.com/isaacs/once#readme",
52 + "keywords": [
53 + "once",
54 + "function",
55 + "one",
56 + "single"
57 + ],
58 + "license": "ISC",
59 + "main": "once.js",
60 + "name": "once",
61 + "repository": {
62 + "type": "git",
63 + "url": "git://github.com/isaacs/once.git"
64 + },
65 + "scripts": {
66 + "test": "tap test/*.js"
67 + },
68 + "version": "1.4.0"
69 +}
node_modules/pull-cat/.npmignoreView
@@ -1,0 +1,3 @@
1 +node_modules
2 +node_modules/*
3 +npm_debug.log
node_modules/pull-cat/.travis.ymlView
@@ -1,0 +1,5 @@
1 +sudo: false
2 +language: node_js
3 +node_js:
4 + - stable
5 + - 0.10
node_modules/pull-cat/LICENSEView
@@ -1,0 +1,22 @@
1 +Copyright (c) 2013 Dominic Tarr
2 +
3 +Permission is hereby granted, free of charge,
4 +to any person obtaining a copy of this software and
5 +associated documentation files (the "Software"), to
6 +deal in the Software without restriction, including
7 +without limitation the rights to use, copy, modify,
8 +merge, publish, distribute, sublicense, and/or sell
9 +copies of the Software, and to permit persons to whom
10 +the Software is furnished to do so,
11 +subject to the following conditions:
12 +
13 +The above copyright notice and this permission notice
14 +shall be included in all copies or substantial portions of the Software.
15 +
16 +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17 +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
18 +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
19 +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
20 +ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
node_modules/pull-cat/README.mdView
@@ -1,0 +1,56 @@
1 +# pull-cat
2 +
3 +> Concatenate pull-streams
4 +
5 +## Install
6 +
7 +```shell
8 +npm install --save pull-cat
9 +```
10 +
11 +## Example
12 +
13 +Construct a new source stream from a sequential list of source streams,
14 +reading from each one in turn until it ends, then the next, etc.
15 +If one stream errors, then the rest of the streams are aborted immediately.
16 +If the cat stream is aborted (i.e. if it's sink errors) then all the streams
17 +are aborted.
18 +
19 +A cat stream is a moderately challenging stream to implement,
20 +especially in the context of error states.
21 +
22 +```js
23 +var cat = require('pull-cat')
24 +var pull = require('pull-stream')
25 +
26 +pull(
27 + cat([
28 + pull.values([1,2,3]),
29 + pull.values([4,5,6])
30 + ]),
31 + pull.log()
32 +)
33 +// 1
34 +// 2
35 +// 3
36 +// 4
37 +// 5
38 +// 6
39 +```
40 +
41 +
42 +## Api
43 +
44 +### `cat = require('pull-cat')`
45 +
46 +### `stream = cat(streams)`
47 +
48 +Reads from each stream in `streams` until finished.
49 +
50 +If a stream errors, stop all the streams.
51 +if the concatenated stream is aborted, abort all the streams,
52 +then callback to the aborter.
53 +
54 +## License
55 +
56 +MIT
node_modules/pull-cat/index.jsView
@@ -1,0 +1,41 @@
1 +var noop = function () {}
2 +
3 +function abortAll(ary, abort, cb) {
4 + var n = ary.length
5 + if(!n) return cb(abort)
6 + ary.forEach(function (f) {
7 + if(f) f(abort, next)
8 + else next()
9 + })
10 +
11 + function next() {
12 + if(--n) return
13 + cb(abort)
14 + }
15 + if(!n) next()
16 +}
17 +
18 +module.exports = function (streams) {
19 + return function (abort, cb) {
20 + ;(function next () {
21 + if(abort)
22 + abortAll(streams, abort, cb)
23 + else if(!streams.length)
24 + cb(true)
25 + else if(!streams[0])
26 + streams.shift(), next()
27 + else
28 + streams[0](null, function (err, data) {
29 + if(err) {
30 + streams.shift() //drop the first, has already ended.
31 + if(err === true) next()
32 + else abortAll(streams, err, cb)
33 + }
34 + else
35 + cb(null, data)
36 + })
37 + })()
38 + }
39 +}
40 +
41 +
node_modules/pull-cat/package.jsonView
@@ -1,0 +1,56 @@
1 +{
2 + "_args": [
3 + [
4 + "pull-cat@1.1.11",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "pull-cat@1.1.11",
9 + "_id": "pull-cat@1.1.11",
10 + "_inBundle": false,
11 + "_integrity": "sha256-+uVE8RHNwJIJa68sQGICGbxnCTCOdLLhSt/Pb4NmgL0=",
12 + "_location": "/pull-cat",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "pull-cat@1.1.11",
18 + "name": "pull-cat",
19 + "escapedName": "pull-cat",
20 + "rawSpec": "1.1.11",
21 + "saveSpec": null,
22 + "fetchSpec": "1.1.11"
23 + },
24 + "_requiredBy": [
25 + "/"
26 + ],
27 + "_resolved": "http://localhost:8989/blobs/get/&+uVE8RHNwJIJa68sQGICGbxnCTCOdLLhSt/Pb4NmgL0=.sha256",
28 + "_spec": "1.1.11",
29 + "_where": "/home/cel/src/ssb-npm-registry",
30 + "author": {
31 + "name": "Dominic Tarr",
32 + "email": "dominic.tarr@gmail.com",
33 + "url": "http://dominictarr.com"
34 + },
35 + "bugs": {
36 + "url": "https://github.com/pull-stream/pull-cat/issues"
37 + },
38 + "description": "concatenate pull-streams",
39 + "devDependencies": {
40 + "pull-abortable": "~4.1.0",
41 + "pull-pushable": "~2.0.1",
42 + "pull-stream": "^3.4.2",
43 + "tape": "~4.6"
44 + },
45 + "homepage": "https://github.com/pull-stream/pull-cat",
46 + "license": "MIT",
47 + "name": "pull-cat",
48 + "repository": {
49 + "type": "git",
50 + "url": "git://github.com/pull-stream/pull-cat.git"
51 + },
52 + "scripts": {
53 + "test": "set -e; for t in test/*.js; do node $t; done"
54 + },
55 + "version": "1.1.11"
56 +}
node_modules/pull-cat/test/index.jsView
@@ -1,0 +1,159 @@
1 +var pull = require('pull-stream')
2 +var cat = require('../')
3 +var test = require('tape')
4 +var Pushable = require('pull-pushable')
5 +var Abortable = require('pull-abortable')
6 +
7 +test('cat', function (t) {
8 + pull(
9 + cat([pull.values([1,2,3]), pull.values([4,5,6])]),
10 + pull.collect(function (err, ary) {
11 + console.log(err, ary)
12 + t.notOk(err)
13 + t.deepEqual(ary, [1,2,3,4,5,6])
14 + t.end()
15 + })
16 + )
17 +})
18 +
19 +test('cat - with empty', function (t) {
20 + pull(
21 + cat([pull.values([1,2,3]), null, pull.values([4,5,6])]),
22 + pull.collect(function (err, ary) {
23 + console.log(err, ary)
24 + t.notOk(err)
25 + t.deepEqual(ary, [1,2,3,4,5,6])
26 + t.end()
27 + })
28 + )
29 +})
30 +
31 +test('cat - with empty stream', function (t) {
32 + var ended = false
33 + var justEnd = function (err, cb) { ended = true; cb(true) }
34 +
35 + pull(
36 + cat([pull.values([1,2,3]), justEnd, pull.values([4,5,6])]),
37 + pull.collect(function (err, ary) {
38 + console.log(err, ary)
39 + t.ok(ended)
40 + t.notOk(err)
41 + t.deepEqual(ary, [1,2,3,4,5,6])
42 + t.end()
43 + })
44 + )
45 +})
46 +
47 +
48 +
49 +test('abort - with empty', function (t) {
50 + pull(
51 + cat([pull.values([1,2,3]), null, pull.values([4,5,6])]),
52 + function (read) {
53 + read(true, function (err) {
54 + t.equal(err, true)
55 + t.end()
56 + })
57 + }
58 + )
59 +})
60 +
61 +test('error', function (t) {
62 + var err = new Error('test error')
63 + pull(
64 + cat([pull.values([1,2,3]), function (_, cb) {
65 + cb(err)
66 + }]),
67 + pull.collect(function (_err) {
68 + console.log('COLLECT END', _err)
69 + t.equal(_err, err)
70 + t.end()
71 + })
72 + )
73 +})
74 +
75 +test('abort stalled', function (t) {
76 + var err = new Error('intentional'), n = 2
77 + var abortable = Abortable()
78 + var pushable = Pushable(function (_err) {
79 + t.equal(_err, err)
80 + next()
81 + })
82 +
83 + pushable.push(4)
84 +
85 + pull(
86 + cat([pull.values([1,2,3]), undefined, pushable]),
87 + abortable,
88 + pull.drain(function (item) {
89 + if(item == 4)
90 + process.nextTick(function () {
91 + abortable.abort(err)
92 + })
93 + }, function (err) {
94 + next()
95 + })
96 + )
97 +
98 + function next () {
99 + if(--n) return
100 + t.end()
101 + }
102 +})
103 +
104 +test('abort empty', function (t) {
105 + cat([])(true, function (end) {
106 + t.equal(end, true)
107 + t.end()
108 + })
109 +})
110 +
111 +test('error + undefined', function (t) {
112 + var err = new Error('test error')
113 + pull(
114 + cat([pull.values([1,2,3]), function (_, cb) {
115 + cb(err)
116 + }, undefined]),
117 + pull.collect(function (_err) {
118 + t.equal(_err, err)
119 + t.end()
120 + })
121 + )
122 +})
123 +
124 +test('take cat', function (t) {
125 + pull(
126 + cat([
127 + pull(pull.values([1,2,3]), pull.take(2)),
128 + pull(pull.values([8,7,6,5]), pull.take(3)),
129 + ]),
130 + pull.collect(function (err, data) {
131 + t.error(err)
132 + t.deepEqual(data, [1,2,8,7,6])
133 + t.end()
134 + })
135 + )
136 +})
137 +
138 +test('abort streams after error', function (t) {
139 + var err = new Error('test error')
140 + var aborted = false
141 + pull(
142 + cat([pull.values([1,2,3]), function (_, cb) {
143 + cb(err)
144 + }, function (_err, cb) {
145 + //this stream should be aborted.
146 + aborted = true
147 + t.strictEqual(_err, err)
148 + cb()
149 + }]),
150 + pull.collect(function (_err) {
151 + t.equal(aborted, true)
152 + t.equal(_err, err)
153 + t.end()
154 + })
155 + )
156 +})
157 +
158 +
159 +
node_modules/pull-file/.npmignoreView
@@ -1,0 +1,2 @@
1 +node_modules
2 +.DS_Store
node_modules/pull-file/.travis.ymlView
@@ -1,0 +1,9 @@
1 +sudo: false
2 +language: node_js
3 +node_js:
4 + - 4.2
5 + - 5.4
6 +
7 +notifications:
8 + email:
9 + - damon.oehlman@gmail.com
node_modules/pull-file/README.mdView
@@ -1,0 +1,58 @@
1 +# pull-file
2 +
3 +a pull-streaming file reader, build directly on the low level stream functions.
4 +by passing node's fs streams.
5 +
6 +[![NPM](https://nodei.co/npm/pull-file.png)](https://nodei.co/npm/pull-file/)
7 +
8 +[![Build Status](https://img.shields.io/travis/pull-stream/pull-file.svg?branch=master)](https://travis-ci.org/pull-stream/pull-file)
9 +
10 +## Example Usage
11 +
12 +```js
13 +var file = require('pull-file');
14 +var pull = require('pull-stream');
15 +var path = require('path');
16 +var inputFile = path.resolve(__dirname, '../test/assets/ipsum.txt');
17 +
18 +pull(
19 + file(inputFile, { bufferSize: 40 }),
20 + pull.take(4),
21 + pull.drain(function(buffer) {
22 + console.log(buffer.toString());
23 + })
24 +);
25 +```
26 +## options
27 +
28 +this supports all the options that node's [fs.createReadStream](https://nodejs.org/dist/latest-v6.x/docs/api/fs.html#fs_fs_createreadstream_path_options) supports,
29 +and _also_ this supports a `live: true` property which will keep the stream open and wait for appends
30 +when it gets to the end and an explicit `buffer` option where your chunks will be read to.
31 +Note that if your downstream operations are async you may run into concurrency
32 +issues with this option. Use at your own risk!
33 +
34 +
35 +## License(s)
36 +
37 +### MIT
38 +
39 +Copyright (c) 2014 Damon Oehlman <damon.oehlman@gmail.com>
40 +
41 +Permission is hereby granted, free of charge, to any person obtaining
42 +a copy of this software and associated documentation files (the
43 +'Software'), to deal in the Software without restriction, including
44 +without limitation the rights to use, copy, modify, merge, publish,
45 +distribute, sublicense, and/or sell copies of the Software, and to
46 +permit persons to whom the Software is furnished to do so, subject to
47 +the following conditions:
48 +
49 +The above copyright notice and this permission notice shall be
50 +included in all copies or substantial portions of the Software.
51 +
52 +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
53 +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
54 +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
55 +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
56 +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
57 +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
58 +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
node_modules/pull-file/examples/bench.jsView
@@ -1,0 +1,74 @@
1 +
2 +var Stats = require('statistics')
3 +var cont = require('cont')
4 +
5 +function bench (a, b, N) {
6 +
7 + N = N || 20,n = N
8 + var sA = Stats(), sB = Stats()
9 + var results_a = [], results_b = []
10 + var A, B
11 +
12 + var wins = 0
13 +
14 + ;(function next () {
15 +
16 + cont.series([
17 + function (cb) {
18 + a(function (err, data) {
19 + var time = data.time/1000
20 + var size = data.total/(1024*1024)
21 + sA.value(size/time) //bytes per ms
22 + results_a.push(A = data)
23 + cb()
24 +
25 + })
26 + },
27 + function (cb) {
28 + b(function (err, data) {
29 + var time = data.time/1000
30 + var size = data.total/(1024*1024)
31 + sB.value(size/time) //bytes per ms
32 + results_b.push(B = data)
33 + cb()
34 + })
35 + }
36 + ].sort(function () {
37 + return Math.random() - 0.5
38 + }))(function (err) {
39 + if(A.time < B.time)
40 + wins ++
41 +
42 + console.log('winner:', A.time < B.time ? 'A' : 'B', A, B)
43 +
44 + if(0<--n) next()
45 + else {
46 + console.log('A: pull-stream')
47 + console.log(sA.toJSON())
48 + console.log('B: node stream')
49 + console.log(sB.toJSON())
50 + console.log('chance A wins:', wins/N, wins, N - wins)
51 + }
52 + })
53 + })()
54 +
55 +}
56 +
57 +if(!module.parent) {
58 + var file = process.argv[2]
59 + var pull = require('./rate')
60 + var node = require('./node-rate')
61 + bench(function (cb) {
62 + pull(file, cb)
63 + }, function (cb) {
64 + node(file, cb)
65 + })
66 +
67 +}
68 +
69 +
70 +
71 +
72 +
73 +
74 +
node_modules/pull-file/examples/ipsum-chunks.jsView
@@ -1,0 +1,12 @@
1 +var file = require('..');
2 +var pull = require('pull-stream');
3 +var path = require('path');
4 +var inputFile = path.resolve(__dirname, '../test/assets/ipsum.txt');
5 +
6 +pull(
7 + file(inputFile, { bufferSize: 40 }),
8 + pull.take(4),
9 + pull.drain(function(buffer) {
10 + console.log(buffer.toString());
11 + })
12 +);
node_modules/pull-file/examples/node-rate.jsView
@@ -1,0 +1,22 @@
1 +
2 +
3 +var fs = require('fs')
4 +
5 +module.exports = function (file, cb) {
6 + var start = Date.now(), total = 0
7 + fs.createReadStream(file)
8 + .on('data', function (b) {
9 + total += b.length
10 + })
11 + .on('end', function () {
12 + cb(null, {time: Date.now() - start, total: total})
13 + })
14 +}
15 +
16 +if(!module.parent)
17 + module.exports (process.argv[2], function (err, stats) {
18 + var seconds = stats.time/1000, mb = stats.total/(1024*1024)
19 + console.log(seconds, mb, mb/seconds)
20 + })
21 +
22 +
node_modules/pull-file/examples/rate.jsView
@@ -1,0 +1,26 @@
1 +var pull = require('pull-stream')
2 +var File = require('../')
3 +
4 +
5 +module.exports = function (file, cb) {
6 + var start = Date.now(), total = 0
7 + pull(
8 + File(file),
9 + pull.drain(function (b) {
10 + total += b.length
11 + }, function (err) {
12 + cb(null, {time: Date.now() - start, total: total})
13 + })
14 + )
15 +}
16 +
17 +
18 +
19 +if(!module.parent)
20 + module.exports (process.argv[2], function (err, stats) {
21 + var seconds = stats.time/1000, mb = stats.total/(1024*1024)
22 + console.log(seconds, mb, mb/seconds)
23 + })
24 +
25 +
26 +
node_modules/pull-file/index.jsView
@@ -1,0 +1,170 @@
1 +
2 +/* jshint node: true */
3 +'use strict';
4 +
5 +var fs = require('fs');
6 +var Decoder = require('pull-utf8-decoder')
7 +/**
8 + # pull-file
9 +
10 + This is a simple module which uses raw file reading methods available in
11 + the node `fs` module to read files on-demand. It's a work in progress
12 + and feedback is welcome :)
13 +
14 + ## Example Usage
15 +
16 + <<< examples/ipsum-chunks.js
17 +
18 +**/
19 +module.exports = function(filename, opts) {
20 + var mode = opts && opts.mode || 0x1B6; // 0666
21 + var bufferSize = opts && (opts.bufferSize || (opts.buffer && opts.buffer.length)) || 1024*64;
22 + var start = opts && opts.start || 0
23 + var end = opts && opts.end || Number.MAX_SAFE_INTEGER
24 + var fd = opts && opts.fd
25 +
26 + var ended, closeNext, busy;
27 + var _buffer = opts && opts.buffer || new Buffer(bufferSize)
28 + var live = opts && !!opts.live
29 + var liveCb, closeCb
30 + var watcher
31 + if(live) {
32 + watcher = fs.watch(filename, {
33 + persistent: opts.persistent !== false,
34 + },
35 + function (event) {
36 + if(liveCb && event === 'change') {
37 + var cb = liveCb
38 + liveCb = null
39 + closeNext = false
40 + readNext(cb)
41 + }
42 + })
43 +
44 + }
45 +
46 + var flags = opts && opts.flags || 'r'
47 +
48 + function readNext(cb) {
49 + if(closeNext) {
50 + if(!live) close(cb);
51 + else liveCb = cb;
52 + return
53 + }
54 + var toRead = Math.min(end - start, bufferSize);
55 + busy = true;
56 +
57 + fs.read(
58 + fd,
59 + _buffer,
60 + 0,
61 + toRead,
62 + start,
63 + function(err, count, buffer) {
64 + busy = false;
65 + start += count;
66 + // if we have received an end noticiation, just discard this data
67 + if(closeNext && !live) {
68 + close(closeCb);
69 + return cb(closeNext);
70 + }
71 +
72 + if (ended) {
73 + return cb(err || ended);
74 + }
75 +
76 + // if we encountered a read error pass it on
77 + if (err) {
78 + return cb(err);
79 + }
80 +
81 + if(count === buffer.length) {
82 + cb(null, buffer);
83 + } else if(count === 0 && live) {
84 + liveCb = cb; closeNext = true
85 + } else {
86 + closeNext = true;
87 + cb(null, buffer.slice(0, count));
88 + }
89 + }
90 + );
91 + _buffer = opts && opts.buffer || new Buffer(Math.min(end - start, bufferSize))
92 + }
93 +
94 + function open(cb) {
95 + busy = true;
96 + fs.open(filename, flags, mode, function(err, descriptor) {
97 + // save the file descriptor
98 + fd = descriptor;
99 +
100 + busy = false
101 + if(closeNext) {
102 + close(closeCb);
103 + return cb(closeNext);
104 + }
105 +
106 + if (err) {
107 + return cb(err);
108 + }
109 +
110 + // read the next bytes
111 + return readNext(cb);
112 + });
113 + }
114 +
115 + function close (cb) {
116 + if(!cb) throw new Error('close must have cb')
117 + if(watcher) watcher.close()
118 + //if auto close is disabled, then user manages fd.
119 + if(opts && opts.autoClose === false) return cb(true)
120 +
121 + //wait until we have got out of bed, then go back to bed.
122 + //or if we are reading, wait till we read, then go back to bed.
123 + else if(busy) {
124 + closeCb = cb
125 + return closeNext = true
126 + }
127 +
128 + //first read was close, don't even get out of bed.
129 + else if(!fd) {
130 + return cb(true)
131 + }
132 +
133 + //go back to bed
134 + else {
135 + fs.close(fd, function(err) {
136 + fd = null;
137 + cb(err || true);
138 + });
139 + }
140 + }
141 +
142 + function source (end, cb) {
143 + if (end) {
144 + ended = end;
145 + live = false;
146 + if(liveCb) {
147 + liveCb(end || true);
148 + }
149 + close(cb);
150 + }
151 + // if we have already received the end notification, abort further
152 + else if (ended) {
153 + cb(ended);
154 + }
155 +
156 + else if (! fd) {
157 + open(cb);
158 + }
159 +
160 + else
161 + readNext(cb);
162 + };
163 +
164 + //read directly to text
165 + if(opts && opts.encoding)
166 + return Decoder(opts.encoding)(source)
167 +
168 + return source
169 +
170 +};
node_modules/pull-file/package.jsonView
@@ -1,0 +1,70 @@
1 +{
2 + "_args": [
3 + [
4 + "pull-file@1.1.0",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "pull-file@1.1.0",
9 + "_id": "pull-file@1.1.0",
10 + "_inBundle": false,
11 + "_integrity": "sha256-Ky5N01jAWyETQvZOzKq7wL6DT8cyJxrQ8XaEwmP6/ts=",
12 + "_location": "/pull-file",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "pull-file@1.1.0",
18 + "name": "pull-file",
19 + "escapedName": "pull-file",
20 + "rawSpec": "1.1.0",
21 + "saveSpec": null,
22 + "fetchSpec": "1.1.0"
23 + },
24 + "_requiredBy": [
25 + "/"
26 + ],
27 + "_resolved": "http://localhost:8989/blobs/get/&Ky5N01jAWyETQvZOzKq7wL6DT8cyJxrQ8XaEwmP6/ts=.sha256",
28 + "_spec": "1.1.0",
29 + "_where": "/home/cel/src/ssb-npm-registry",
30 + "author": {
31 + "name": "Damon Oehlman",
32 + "email": "damon.oehlman@gmail.com"
33 + },
34 + "bugs": {
35 + "url": "https://github.com/DamonOehlman/pull-file/issues"
36 + },
37 + "dependencies": {
38 + "pull-utf8-decoder": "^1.0.2"
39 + },
40 + "description": "Pull streams implementation of a file reader",
41 + "devDependencies": {
42 + "cont": "^1.0.3",
43 + "osenv": "^0.1.3",
44 + "pull-stream": "^3.1.0",
45 + "statistics": "^2.0.1",
46 + "tape": "^4.4.0"
47 + },
48 + "directories": {
49 + "example": "examples",
50 + "test": "test"
51 + },
52 + "homepage": "https://github.com/DamonOehlman/pull-file",
53 + "keywords": [
54 + "pull-stream",
55 + "fs",
56 + "read"
57 + ],
58 + "license": "MIT",
59 + "main": "index.js",
60 + "name": "pull-file",
61 + "repository": {
62 + "type": "git",
63 + "url": "git+https://github.com/DamonOehlman/pull-file.git"
64 + },
65 + "scripts": {
66 + "gendocs": "gendocs > README.md",
67 + "test": "set -e; for t in test/*.js; do node $t; done"
68 + },
69 + "version": "1.1.0"
70 +}
node_modules/pull-file/test/append.jsView
@@ -1,0 +1,49 @@
1 +
2 +var pull = require('pull-stream')
3 +var fs = require('fs')
4 +var File = require('../')
5 +
6 +var tape = require('tape')
7 +
8 +tape('append to a file', function (t) {
9 +
10 + var filename = '/tmp/test_pull-file_append'+Date.now()
11 +
12 + var n = 10, r = 0, ended = false
13 + ;(function next () {
14 + --n
15 + fs.appendFile(filename, Date.now() +'\n', function (err) {
16 + if(err) throw err
17 +
18 + if(n) setTimeout(next, 20)
19 + else { ended = true; }
20 + })
21 + })()
22 +
23 + pull(
24 + File(filename, {live: true}),
25 + pull.through(function (chunk) {
26 + r ++
27 + t.notEqual(chunk.length, 0)
28 + }),
29 + pull.take(10),
30 + pull.drain(null, function (err) {
31 + if(err) throw err
32 + t.equal(n, 0, 'writes')
33 + t.equal(r, 10, 'reads')
34 + t.end()
35 + })
36 + )
37 +})
38 +
39 +
40 +
41 +
42 +
43 +
44 +
45 +
46 +
47 +
48 +
49 +
node_modules/pull-file/test/assets/ipsum.txtView
@@ -1,0 +1,9 @@
1 +Lorem ipsum dolor sit amet, consectetur adipiscing elit. Quisque quis tortor elit. Donec vulputate lacus at posuere sodales. Suspendisse cursus, turpis eget dapibus pulvinar, quam nunc scelerisque purus, eu sollicitudin ipsum purus vitae nibh. Phasellus aliquet, magna id convallis faucibus, justo turpis auctor massa, et iaculis nibh orci lobortis purus. Quisque venenatis dolor justo, ac fringilla nunc lobortis in. Maecenas sagittis accumsan sagittis. Quisque vel egestas nisl, eget commodo dolor. Fusce feugiat condimentum iaculis. Suspendisse potenti. Etiam mauris sem, fringilla elementum ultricies eu, consectetur sit amet lectus. Maecenas sit amet sagittis nibh. Donec lobortis, ipsum at malesuada congue, tellus libero ornare ipsum, sit amet pulvinar risus est pellentesque diam. Nunc lorem metus, blandit vitae tristique at, scelerisque vel metus.
2 +
3 +Nam non sagittis lorem, quis egestas enim. Maecenas cursus sagittis metus, ut condimentum purus sodales eget. Vestibulum et imperdiet turpis. Praesent ullamcorper sem non condimentum porta. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Aliquam et nunc at enim vulputate consectetur vitae id leo. Duis rutrum mattis velit, a luctus eros imperdiet id. Phasellus faucibus nulla quis condimentum congue. Fusce mi metus, pharetra eget malesuada et, scelerisque ac justo. Etiam fermentum lectus sit amet posuere gravida. Ut facilisis massa sed erat commodo, sed ultricies dolor porttitor. Donec ac ipsum laoreet, lobortis augue in, ullamcorper risus. Sed sagittis sapien ipsum, sit amet sollicitudin quam tempor quis. Mauris lacus augue, porta eget consequat posuere, eleifend id turpis.
4 +
5 +Pellentesque vitae augue eget nisi sollicitudin ullamcorper placerat in nisl. Nunc malesuada ipsum vel justo luctus suscipit. Nullam elementum id odio vel commodo. Nam sed sem at est sollicitudin suscipit quis in nisi. Aliquam erat volutpat. Nullam tincidunt, nunc eget fermentum tincidunt, lectus mi tempor mauris, vel suscipit elit felis ut arcu. Vestibulum ut malesuada neque, sit amet porttitor magna. Pellentesque adipiscing velit mauris, ut pharetra lorem vestibulum eu. Cras quis lacus tellus. Quisque eu facilisis odio. Morbi ac est porta, molestie nisi vel, euismod augue. Aenean tincidunt justo odio, at gravida felis consequat non. Vestibulum congue auctor arcu, quis placerat mauris posuere vitae. Integer rutrum nisl at consectetur aliquet.
6 +
7 +Donec enim lacus, feugiat nec urna nec, pulvinar venenatis massa. Aenean sed ante urna. Nullam dictum nulla nec lacus tincidunt venenatis. Morbi sed massa et odio rhoncus facilisis. Nullam interdum aliquet iaculis. Quisque vel risus et nunc malesuada tincidunt a sit amet dolor. Ut congue nibh at nulla sodales blandit. In sed massa cursus, dictum orci et, vestibulum neque. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Duis rutrum a purus at tempor.
8 +
9 +Integer posuere orci vel vehicula varius. Maecenas sed vehicula velit. Duis adipiscing lacus ac euismod pellentesque. Suspendisse lacinia enim ligula, nec dapibus eros faucibus sit amet. Aliquam malesuada diam sed nunc hendrerit interdum. Nam scelerisque, velit at lobortis blandit, ligula lacus fringilla metus, vitae pretium lectus ante vel sem. Morbi dapibus ante ut diam euismod vehicula. Phasellus accumsan scelerisque augue gravida luctus. Aenean et hendrerit erat. Sed placerat lacinia risus, vitae tincidunt metus eleifend eu. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Curabitur metus augue, sollicitudin id sagittis eu, posuere eget eros. Vestibulum convallis nunc sed nisi dictum, non tempus mi fringilla.
node_modules/pull-file/test/assets/test.txtView
@@ -1,0 +1,1 @@
1 +hello
node_modules/pull-file/test/explicit-buffer.jsView
@@ -1,0 +1,33 @@
1 +var test = require('tape');
2 +var pull = require('pull-stream');
3 +var file = require('..');
4 +
5 +var path = require('path');
6 +var crypto = require('crypto')
7 +var osenv = require('osenv')
8 +var fs = require('fs')
9 +
10 +var tmpfile = path.join(osenv.tmpdir(), 'test_pull-file_big')
11 +
12 +var big = crypto.pseudoRandomBytes(10*1024*1024)
13 +fs.writeFileSync(tmpfile, big)
14 +
15 +function hash (data) {
16 + return crypto.createHash('sha256').update(data).digest('hex')
17 +}
18 +
19 +test('large file in explicit buffer', function(t) {
20 + var buf = new Buffer(65551) // prime close to 1024 * 64
21 + var h = crypto.createHash('sha256')
22 +
23 + pull(
24 + file(tmpfile, {buffer: buf}),
25 + pull.through(function (chunk) {
26 + h.update(chunk)
27 + }),
28 + pull.onEnd(function(err) {
29 + t.equal(hash(big), h.digest('hex'))
30 + t.end()
31 + })
32 + );
33 +});
node_modules/pull-file/test/fd.jsView
@@ -1,0 +1,57 @@
1 +
2 +var tape = require('tape')
3 +var File = require('../')
4 +var pull = require('pull-stream')
5 +var fs = require('fs')
6 +
7 +var path = require('path')
8 +
9 +function asset(file) {
10 + return path.join(__dirname, 'assets', file)
11 +}
12 +
13 +function all(stream, cb) {
14 + pull(stream, pull.collect(function (err, ary) {
15 + cb(err, Buffer.concat(ary))
16 + }))
17 +}
18 +
19 +tape('can read a file with a provided fd', function (t) {
20 +
21 + var fd = fs.openSync(asset('ipsum.txt'), 'r')
22 +
23 + all(File(null, {fd: fd}), function (err, buf) {
24 + if(err) throw err
25 + t.ok(buf)
26 + t.end()
27 + })
28 +
29 +})
30 +
31 +
32 +tape('two files can read from one fd if autoClose is disabled', function (t) {
33 + var fd = fs.openSync(asset('ipsum.txt'), 'r')
34 +
35 + all(File(null, {fd: fd, autoClose: false}), function (err, buf1) {
36 + if(err) throw err
37 + t.ok(buf1)
38 + all(File(null, {fd: fd, autoClose: false}), function (err, buf2) {
39 + if(err) throw err
40 + t.ok(buf2)
41 + t.equal(buf1.toString(), buf2.toString())
42 + fs.close(fd, function (err) {
43 + if(err) throw err
44 + t.end()
45 + })
46 + })
47 + })
48 +
49 +})
50 +
51 +
52 +
53 +
54 +
55 +
56 +
57 +
node_modules/pull-file/test/largefile.jsView
@@ -1,0 +1,50 @@
1 +var test = require('tape');
2 +var pull = require('pull-stream');
3 +var file = require('..');
4 +
5 +var path = require('path');
6 +var crypto = require('crypto')
7 +var osenv = require('osenv')
8 +var fs = require('fs')
9 +
10 +var tmpfile = path.join(osenv.tmpdir(), 'test_pull-file_big')
11 +
12 +function hash (data) {
13 + return crypto.createHash('sha256').update(data).digest('hex')
14 +}
15 +
16 +test('large file', function(t) {
17 + var big = crypto.pseudoRandomBytes(10*1024*1024)
18 + fs.writeFileSync(tmpfile, big)
19 +
20 + pull(
21 + file(tmpfile),
22 + pull.collect(function(err, items) {
23 + t.equal(hash(big), hash(Buffer.concat(items)))
24 + t.end()
25 + })
26 + );
27 +});
28 +
29 +
30 +test('large file as ascii strings', function(t) {
31 + var big = crypto.pseudoRandomBytes(10*1024*1024).toString('base64')
32 + fs.writeFileSync(tmpfile, big, 'ascii');
33 +
34 + pull(
35 + file(tmpfile, {encoding: 'ascii'}),
36 + pull.through(function (str) {
37 + t.equal(typeof str, 'string');
38 + }),
39 + pull.collect(function(err, items) {
40 + t.equal(hash(big), hash(items.join('')))
41 + t.end()
42 + })
43 + );
44 +});
45 +
46 +
47 +
48 +
49 +
50 +
node_modules/pull-file/test/partial.jsView
@@ -1,0 +1,74 @@
1 +
2 +var tape = require('tape')
3 +var path = require('path')
4 +var pull = require('pull-stream')
5 +var File = require('../')
6 +var cont = require('cont')
7 +var fs = require('fs')
8 +
9 +var crypto = require('crypto')
10 +var osenv = require('osenv')
11 +
12 +var tmpfile = path.join(osenv.tmpdir(), 'test_pull-file_big')
13 +var crypto = require('crypto')
14 +
15 +var big = crypto.pseudoRandomBytes(10*1024*1024)
16 +fs.writeFileSync(tmpfile, big)
17 +
18 +function hash (data) {
19 + return crypto.createHash('sha256').update(data).digest('hex')
20 +}
21 +
22 +function asset(file) {
23 + return path.join(__dirname, 'assets', file)
24 +}
25 +
26 +var MB = 1024*1024
27 +
28 +tape('read files partially', function (t) {
29 +
30 + function test (file, start, end) {
31 + return function (cb) {
32 + var opts = {start: start, end: end}
33 + var expected
34 + var _expected = fs.readFileSync(file, opts)
35 +
36 + expected = _expected
37 + .slice(
38 + start || 0,
39 + end || _expected.length
40 + )
41 +
42 + pull(
43 + File(file, opts),
44 + pull.collect(function (err, ary) {
45 + var actual = Buffer.concat(ary)
46 + t.equal(actual.length, expected.length)
47 + t.equal(hash(actual), hash(expected))
48 + cb()
49 + })
50 + )
51 + }
52 +
53 + }
54 +
55 + cont.para([
56 + test(tmpfile, 0, 9*MB),
57 + test(tmpfile, 5*MB, 10*MB),
58 + test(tmpfile, 5*MB, 6*MB),
59 + test(asset('ipsum.txt')),
60 + test(asset('test.txt'), 1, 4)
61 + ])(function (err) {
62 + t.end()
63 + })
64 +
65 +})
66 +
67 +
68 +
69 +
70 +
71 +
72 +
73 +
74 +
node_modules/pull-file/test/small.jsView
@@ -1,0 +1,31 @@
1 +var path = require('path');
2 +var test = require('tape');
3 +var pull = require('pull-stream');
4 +var file = require('..');
5 +
6 +test('small text', function(t) {
7 + t.plan(1);
8 +
9 + pull(
10 + file(path.resolve(__dirname, 'assets', 'test.txt')),
11 + pull.map(function(data) {
12 + return data.toString();
13 + }),
14 + pull.collect(function(err, items) {
15 + t.equal(items.join(''), 'hello');
16 + })
17 + );
18 +});
19 +
20 +test('buffer size respected', function(t) {
21 + var expected = ['he', 'll', 'o'];
22 +
23 + t.plan(3);
24 +
25 + pull(
26 + file(path.resolve(__dirname, 'assets', 'test.txt'), { bufferSize: 2 }),
27 + pull.drain(function(data) {
28 + t.equal(data.toString(), expected.shift());
29 + })
30 + );
31 +});
node_modules/pull-file/test/terminate-read.jsView
@@ -1,0 +1,140 @@
1 +var path = require('path');
2 +var test = require('tape');
3 +var pull = require('pull-stream');
4 +var file = require('..');
5 +var fs = require('fs')
6 +
7 +var ipsum = path.resolve(__dirname, 'assets', 'ipsum.txt')
8 +var au = path.resolve(__dirname, 'assets', 'AU.txt')
9 +
10 +test('can terminate read process', function(t) {
11 +
12 + var expected = [
13 + 'Lorem ipsum dolor sit amet, consectetur ',
14 + 'adipiscing elit. Quisque quis tortor eli',
15 + 't. Donec vulputate lacus at posuere soda',
16 + 'les. Suspendisse cursus, turpis eget dap'
17 + ];
18 +
19 + pull(
20 + file(ipsum, { bufferSize: 40 }),
21 + pull.take(expected.length),
22 + pull.drain(function(data) {
23 + t.equal(data.toString(), expected.shift(), 'line ok in drain');
24 + }, function (err) {
25 + if(err) throw err
26 + t.end()
27 + })
28 + );
29 +});
30 +
31 +test('can terminate file immediately (before open)', function (t) {
32 +
33 + var source = file(ipsum)
34 + var sync = false
35 + source(true, function (end) {
36 + sync = true
37 + t.equal(end, true)
38 + })
39 + t.ok(sync)
40 + t.end()
41 +
42 +})
43 +
44 +test('can terminate file immediately (after open)', function (t) {
45 +
46 + var source = file(ipsum)
47 + var sync1 = false, sync2 = false
48 + t.plan(6)
49 + source(null, function (end, data) {
50 + if(sync1) throw new Error('read1 called twice')
51 + sync1 = true
52 + t.equal(end, true, 'read aborted, end=true')
53 + t.notOk(data, 'read aborted, data = null')
54 + })
55 + source(true, function (end) {
56 + if(sync2) throw new Error('read2 called twice')
57 + sync2 = true
58 + t.ok(sync1, 'read cb was first')
59 + t.equal(end, true)
60 + t.end()
61 + })
62 + t.notOk(sync1)
63 + t.notOk(sync2)
64 +
65 +})
66 +
67 +test('can terminate file during a read', function (t) {
68 +
69 + var source = file(ipsum, {bufferSize: 1024})
70 + var sync1 = false, sync2 = false
71 + source(null, function (end, data) {
72 + t.equal(end, null)
73 + t.ok(data)
74 + source(null, function (end, data) {
75 + sync1 = true
76 + t.equal(end, true)
77 + t.notOk(data, "data can't have been read")
78 + })
79 + source(true, function (end) {
80 + sync2 = true
81 + t.equal(end, true, 'valid abort end')
82 + t.ok(sync1, 'read called back first')
83 + t.end()
84 + })
85 + t.notOk(sync1)
86 + t.notOk(sync2)
87 + })
88 +
89 +})
90 +
91 +//usually the read succeeds before the close does,
92 +//but not always
93 +
94 +test('after 10k times, cb order is always correct', function (t) {
95 +
96 + var C = 0, R = 0, T = 0
97 + ;(function next () {
98 + T++
99 +
100 + if(T > 10000) {
101 + t.equal(R, 10000)
102 + t.equal(C, 0)
103 + t.equal(R+C, 10000)
104 + console.log(C, R, T)
105 + return t.end()
106 + }
107 +
108 + var fd = fs.openSync(__filename, 'r+', 0666)
109 + var data, closed
110 +
111 + //create a file stream with a fixed fd,
112 + //configured to automatically close (as by default)
113 + var source = file(null, {fd: fd})
114 +
115 + //read.
116 + source(null, function (err, _data) {
117 + data = true
118 + if(!closed) R++
119 + if(data && closed) next()
120 + })
121 +
122 + //abort.
123 + source(true, function (err) {
124 + closed = true
125 + if(!data) C ++
126 + if(data && closed) next()
127 + })
128 + })()
129 +
130 +})
131 +
132 +
133 +
134 +
135 +
136 +
137 +
138 +
139 +
140 +
node_modules/pull-hash/README.mdView
@@ -1,0 +1,62 @@
1 +# pull-hash
2 +
3 +crypto hash API using [pull-stream][]s
4 +
5 +```js
6 +var hash = require('pull-hash')
7 +var pull = require('pull-stream')
8 +
9 +pull(
10 + source,
11 + hash('sha256', 'hex', function (err, sum) {
12 + console.log('shasum:', sum)
13 + }),
14 + pull.drain()
15 +)
16 +```
17 +
18 +## API
19 +
20 +```js
21 +var hash = require('pull-hash')
22 +```
23 +#### `hash(type[, encoding], onEnd): through`
24 +
25 +Create a through stream that hashes the data and calls `onEnd` when it is done.
26 +
27 +- `type`: `crypto.Hash` object, or string to pass to `crypto.createHash`, e.g.
28 + `"sha256"`
29 +- `encoding`: encoding for the digest to pass to `Hash.digest()`
30 +- `onEnd(err, digest)`: callback with digest when stream is ended or errored
31 +
32 +### Extras
33 +
34 +```js
35 +var gitHash = require('pull-hash/ext/git')
36 +```
37 +#### `gitHash(object[, encoding], onEnd): through`
38 +
39 +Get a git object id.
40 +
41 +- `object.type`: one of `["commit", "tree", "blob", "tag"]`
42 +- `object.length || object.size`: size in bytes of the git object's data
43 +
44 +```js
45 +var ssbHash = require('pull-hash/ext/ssb')
46 +```
47 +#### `ssbHash(onEnd): through`
48 +
49 +Get a hash id of a secure-scuttlebutt blob.
50 +
51 +[secure-scuttlebutt]: %iljFzUwTYposC7vs2V6AZgObPqwRVNAXjxYVVUoG4tU=.sha256
52 +[pull-stream]: %xAFKL6PBr1CIzL5xGHZC5DFVWiXmc7R0MMnZnBwq/yk=.sha256
53 +
54 +## License
55 +
56 +Copyright (c) 2016-2018 cel
57 +
58 +Usage of the works is permitted provided that this instrument is
59 +retained with the works, so that any entity that uses the works is
60 +notified of this instrument.
61 +
62 +DISCLAIMER: THE WORKS ARE WITHOUT WARRANTY.
node_modules/pull-hash/ext/git.jsView
@@ -1,0 +1,9 @@
1 +var hash = require('../')
2 +
3 +module.exports = function (object, encoding, onEnd) {
4 + if (onEnd == null) onEnd = encoding, encoding = null
5 + var hasher = hash('sha1', encoding || 'hex', onEnd)
6 + var size = Number(object.length || object.size || 0)
7 + hasher.hash.update(object.type + ' ' + size + '\0')
8 + return hasher
9 +}
node_modules/pull-hash/ext/ssb.jsView
@@ -1,0 +1,7 @@
1 +var hash = require('../')
2 +
3 +module.exports = function (onEnd) {
4 + return hash('sha256', 'base64', function (err, digest) {
5 + onEnd(err, digest && ('&' + digest + '.sha256'))
6 + })
7 +}
node_modules/pull-hash/index.jsView
@@ -1,0 +1,22 @@
1 +var crypto = require('crypto')
2 +
3 +module.exports = function (type, encoding, onEnd) {
4 + if (onEnd == null) onEnd = encoding, encoding = null
5 + var hash = (typeof type == 'string') ? crypto.createHash(type) : type
6 + var ended
7 + function hasher(read) {
8 + return function (abort, cb) {
9 + read(abort, function (end, data) {
10 + if (end === true && !hasher.digest) hasher.digest = hash.digest(encoding)
11 + else if (!end) hash.update(data)
12 + if (end && onEnd && !ended) {
13 + onEnd(end === true ? null : end, hasher.digest)
14 + ended = true
15 + }
16 + cb(end, data)
17 + })
18 + }
19 + }
20 + hasher.hash = hash
21 + return hasher
22 +}
node_modules/pull-hash/package.jsonView
@@ -1,0 +1,52 @@
1 +{
2 + "_args": [
3 + [
4 + "pull-hash@1.0.1",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "pull-hash@1.0.1",
9 + "_id": "pull-hash@1.0.1",
10 + "_inBundle": false,
11 + "_integrity": "sha256-GI8bZT7TX4QLtqbZw9VvYHrK1lNyMqg8xfF5ai7TJ48=",
12 + "_location": "/pull-hash",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "pull-hash@1.0.1",
18 + "name": "pull-hash",
19 + "escapedName": "pull-hash",
20 + "rawSpec": "1.0.1",
21 + "saveSpec": null,
22 + "fetchSpec": "1.0.1"
23 + },
24 + "_requiredBy": [
25 + "/"
26 + ],
27 + "_resolved": "http://localhost:8989/blobs/get/&GI8bZT7TX4QLtqbZw9VvYHrK1lNyMqg8xfF5ai7TJ48=.sha256",
28 + "_spec": "1.0.1",
29 + "_where": "/home/cel/src/ssb-npm-registry",
30 + "author": {
31 + "name": "cel"
32 + },
33 + "description": "pull-stream API for hashing data",
34 + "devDependencies": {
35 + "multicb": "^1.2.1",
36 + "pull-stream": "^3.2.0",
37 + "stream-to-pull-stream": "^1.6.6",
38 + "tape": "^4.5.0"
39 + },
40 + "homepage": "https://git.scuttlebot.io/%25fQT3h6iV6tTONormNQ6eRN0yqsdsv2%2Fv1KudeDiXwZs%3D.sha256",
41 + "license": "Fair",
42 + "main": "index.js",
43 + "name": "pull-hash",
44 + "repository": {
45 + "type": "git",
46 + "url": "ssb://%fQT3h6iV6tTONormNQ6eRN0yqsdsv2/v1KudeDiXwZs=.sha256"
47 + },
48 + "scripts": {
49 + "test": "node test"
50 + },
51 + "version": "1.0.1"
52 +}
node_modules/pull-hash/test.jsView
@@ -1,0 +1,78 @@
1 +var test = require('tape')
2 +var hash = require('.')
3 +var crypto = require('crypto')
4 +var fs = require('fs')
5 +var pull = require('pull-stream')
6 +var toPull = require('stream-to-pull-stream')
7 +var multicb = require('multicb')
8 +var path = require('path')
9 +var gitHash = require('./ext/git')
10 +var ssbHash = require('./ext/ssb')
11 +
12 +function hashFile(filename, type, encoding, cb) {
13 + var shasum = crypto.createHash(type)
14 + fs.createReadStream(filename).on('data', function (d) {
15 + shasum.update(d)
16 + })
17 + .on('error', cb)
18 + .on('end', function () {
19 + cb(null, shasum.digest(encoding))
20 + })
21 +}
22 +
23 +function hashFilePull(filename, type, encoding, cb) {
24 + pull(
25 + toPull(fs.createReadStream(filename)),
26 + hash(type, encoding, cb),
27 + pull.drain()
28 + )
29 +}
30 +
31 +test('hash a file', function (t) {
32 + var done = multicb({ pluck: 1, spread: true })
33 + hashFile(__filename, 'md5', 'hex', done())
34 + hashFilePull(__filename, 'md5', 'hex', done())
35 + done(function (err, digestNodejs, digestPull) {
36 + t.error(err, 'hashes')
37 + t.equals(digestPull, digestNodejs, 'hash')
38 + t.end()
39 + })
40 +})
41 +
42 +test('git object hash', function (t) {
43 + pull(
44 + pull.once('asdf\n'),
45 + gitHash({type: 'blob', size: 5}, function (err, digest) {
46 + t.error(err, 'git hash')
47 + t.equals(digest, '8bd6648ed130ac9ece0f89cd9a8fbbfd2608427a', 'hash')
48 + t.end()
49 + }),
50 + pull.drain()
51 + )
52 +})
53 +
54 +test('empty git blob', function (t) {
55 + var emptyId = 'e69de29bb2d1d6434b8b29ae775ad8c2e48c5391'
56 + pull(
57 + pull.empty(),
58 + gitHash({type: 'blob', length: 0}, function (err, digest) {
59 + t.error(err, 'git hash')
60 + t.equals(digest, emptyId, 'blob id')
61 + t.end()
62 + }),
63 + pull.drain()
64 + )
65 +})
66 +
67 +test('ssb blob id', function (t) {
68 + var emptyId = '&47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=.sha256'
69 + pull(
70 + pull.empty(),
71 + ssbHash(function (err, digest) {
72 + t.error(err, 'ssb hash')
73 + t.equals(digest, emptyId, 'blob id')
74 + t.end()
75 + }),
76 + pull.drain()
77 + )
78 +})
node_modules/pull-stream/.travis.ymlView
@@ -1,0 +1,5 @@
1 +language: node_js
2 +node_js:
3 + - 0.12
4 + - 4
5 + - 5
node_modules/pull-stream/LICENSEView
@@ -1,0 +1,22 @@
1 +Copyright (c) 2013 Dominic Tarr
2 +
3 +Permission is hereby granted, free of charge,
4 +to any person obtaining a copy of this software and
5 +associated documentation files (the "Software"), to
6 +deal in the Software without restriction, including
7 +without limitation the rights to use, copy, modify,
8 +merge, publish, distribute, sublicense, and/or sell
9 +copies of the Software, and to permit persons to whom
10 +the Software is furnished to do so,
11 +subject to the following conditions:
12 +
13 +The above copyright notice and this permission notice
14 +shall be included in all copies or substantial portions of the Software.
15 +
16 +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17 +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
18 +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
19 +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
20 +ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
node_modules/pull-stream/README.mdView
@@ -1,0 +1,357 @@
1 +# pull-stream
2 +
3 +Minimal Pipeable Pull-stream
4 +
5 +In [classic-streams](https://github.com/nodejs/node-v0.x-archive/blob/v0.8/doc/api/stream.markdown),
6 +streams _push_ data to the next stream in the pipeline.
7 +In [new-streams](https://github.com/nodejs/node-v0.x-archive/blob/v0.10/doc/api/stream.markdown),
8 +data is pulled out of the source stream, into the destination.
9 +`pull-stream` is a minimal take on streams,
10 +pull streams work great for "object" streams as well as streams of raw text or binary data.
11 +
12 +[![build status](https://secure.travis-ci.org/pull-stream/pull-stream.png)](https://travis-ci.org/pull-stream/pull-stream)
13 +
14 +
15 +## Quick Example
16 +
17 +Stat some files:
18 +
19 +```js
20 +pull(
21 + pull.values(['file1', 'file2', 'file3']),
22 + pull.asyncMap(fs.stat),
23 + pull.collect(function (err, array) {
24 + console.log(array)
25 + })
26 +)
27 +```
28 +Note that `pull(a, b, c)` is basically the same as `a.pipe(b).pipe(c)`.
29 +
30 +To grok how pull-streams work, read through [pull-streams workshop](https://github.com/pull-stream/pull-stream-workshop)
31 +
32 +## How do I do X with pull-streams?
33 +
34 +There is a module for that!
35 +
36 +Check the [pull-stream FAQ](https://github.com/pull-stream/pull-stream-faq)
37 +and post an issue if you have a question that is not covered.
38 +
39 +## Compatibily with node streams
40 +
41 +pull-streams are not _directly_ compatible with node streams,
42 +but pull-streams can be converted into node streams with
43 +[pull-stream-to-stream](https://github.com/pull-stream/pull-stream-to-stream)
44 +and node streams can be converted into pull-stream using [stream-to-pull-stream](https://github.com/pull-stream/stream-to-pull-stream)
45 +correct back pressure is preserved.
46 +
47 +### Readable & Reader vs. Readable & Writable
48 +
49 +Instead of a readable stream, and a writable stream, there is a `readable` stream,
50 + (aka "Source") and a `reader` stream (aka "Sink"). Through streams
51 +is a Sink that returns a Source.
52 +
53 +See also:
54 +* [Sources](./docs/sources/index.md)
55 +* [Throughs](./docs/throughs/index.md)
56 +* [Sinks](./docs/sinks/index.md)
57 +
58 +### Source (readable stream that produces values)
59 +
60 +A Source is a function `read(end, cb)`,
61 +that may be called many times,
62 +and will (asynchronously) call `cb(null, data)` once for each call.
63 +
64 +To signify an end state, the stream eventually returns `cb(err)` or `cb(true)`.
65 +When signifying an end state, `data` *must* be ignored.
66 +
67 +The `read` function *must not* be called until the previous call has called back.
68 +Unless, it is a call to abort the stream (`read(Error || true, cb)`).
69 +
70 +```js
71 +var n = 5;
72 +
73 +// random is a source 5 of random numbers.
74 +function random (end, cb) {
75 + if(end) return cb(end)
76 + // only read n times, then stop.
77 + if(0 > --n) return cb(true)
78 + cb(null, Math.random())
79 +}
80 +
81 +```
82 +
83 +### Sink (reader or writable stream that consumes values)
84 +
85 +A Sink is a function `reader(read)` that calls a Source (`read(null, cb)`),
86 +until it decides to stop (by calling `read(true, cb)`), or the readable ends (`read` calls
87 +`cb(Error || true)`
88 +
89 +All [Throughs](./docs/throughs/index.md)
90 +and [Sinks](./docs/sinks/index.md)
91 +are reader streams.
92 +
93 +```js
94 +// logger reads a source and logs it.
95 +function logger (read) {
96 + read(null, function next(end, data) {
97 + if(end === true) return
98 + if(end) throw end
99 +
100 + console.log(data)
101 + read(null, next)
102 + })
103 +}
104 +```
105 +
106 +Since Sources and Sinks are functions, you can pass them to each other!
107 +
108 +```js
109 +logger(random) //"pipe" the streams.
110 +
111 +```
112 +
113 +but, it's easier to read if you use's pull-stream's `pull` method
114 +
115 +```js
116 +var pull = require('pull-stream')
117 +
118 +pull(random, logger)
119 +```
120 +
121 +### Creating reusable streams
122 +
123 +When working with pull streams it is common to create functions that return a stream.
124 +This is because streams contain mutable state and so can only be used once.
125 +In the above example, once `random` has been connected to a sink and has produced 5 random numbers it will not produce any more random numbers if connected to another sink.
126 +
127 +Therefore, use a function like this to create a random number generating stream that can be reused:
128 +
129 +```js
130 +
131 +// create a stream of n random numbers
132 +function createRandomStream (n) {
133 + return function randomReadable (end, cb) {
134 + if(end) return cb(end)
135 + if(0 > --n) return cb(true)
136 + cb(null, Math.random())
137 + }
138 +}
139 +
140 +pull(createRandomStream(5), logger)
141 +```
142 +
143 +
144 +### Through
145 +
146 +A through stream is both a reader (consumes values) and a readable (produces values).
147 +It's a function that takes a `read` function (a Sink),
148 +and returns another `read` function (a Source).
149 +
150 +```js
151 +// double is a through stream that doubles values.
152 +function double (read) {
153 + return function readable (end, cb) {
154 + read(end, function (end, data) {
155 + cb(end, data != null ? data * 2 : null)
156 + })
157 + }
158 +}
159 +
160 +pull(createRandomStream(5), double, logger)
161 +```
162 +
163 +### Pipeability
164 +
165 +Every pipeline must go from a `source` to a `sink`.
166 +Data will not start moving until the whole thing is connected.
167 +
168 +```js
169 +pull(source, through, sink)
170 +```
171 +
172 +some times, it's simplest to describe a stream in terms of other streams.
173 +pull can detect what sort of stream it starts with (by counting arguments)
174 +and if you pull together through streams, it gives you a new through stream.
175 +
176 +```js
177 +var tripleThrough =
178 + pull(through1(), through2(), through3())
179 +// The three through streams become one.
180 +
181 +pull(source(), tripleThrough, sink())
182 +```
183 +
184 +pull detects if it's missing a Source by checking function arity,
185 +if the function takes only one argument it's either a sink or a through.
186 +Otherwise it's a Source.
187 +
188 +## Duplex Streams
189 +
190 +Duplex streams, which are used to communicate between two things,
191 +(i.e. over a network) are a little different. In a duplex stream,
192 +messages go both ways, so instead of a single function that represents the stream,
193 +you need a pair of streams. `{source: sourceStream, sink: sinkStream}`
194 +
195 +pipe duplex streams like this:
196 +
197 +``` js
198 +var a = duplex()
199 +var b = duplex()
200 +
201 +pull(a.source, b.sink)
202 +pull(b.source, a.sink)
203 +
204 +//which is the same as
205 +
206 +b.sink(a.source); a.sink(b.source)
207 +
208 +//but the easiest way is to allow pull to handle this
209 +
210 +pull(a, b, a)
211 +
212 +//"pull from a to b and then back to a"
213 +
214 +```
215 +
216 +## Design Goals & Rationale
217 +
218 +There is a deeper,
219 +[platonic abstraction](http://en.wikipedia.org/wiki/Platonic_idealism),
220 +where a streams is just an array in time, instead of in space.
221 +And all the various streaming "abstractions" are just crude implementations
222 +of this abstract idea.
223 +
224 +[classic-streams](https://github.com/joyent/node/blob/v0.8.16/doc/api/stream.markdown),
225 +[new-streams](https://github.com/joyent/node/blob/v0.10/doc/api/stream.markdown),
226 +[reducers](https://github.com/Gozala/reducers)
227 +
228 +The objective here is to find a simple realization of the best features of the above.
229 +
230 +### Type Agnostic
231 +
232 +A stream abstraction should be able to handle both streams of text and streams
233 +of objects.
234 +
235 +### A pipeline is also a stream.
236 +
237 +Something like this should work: `a.pipe(x.pipe(y).pipe(z)).pipe(b)`
238 +this makes it possible to write a custom stream simply by
239 +combining a few available streams.
240 +
241 +### Propagate End/Error conditions.
242 +
243 +If a stream ends in an unexpected way (error),
244 +then other streams in the pipeline should be notified.
245 +(this is a problem in node streams - when an error occurs,
246 +the stream is disconnected, and the user must handle that specially)
247 +
248 +Also, the stream should be able to be ended from either end.
249 +
250 +### Transparent Backpressure & Laziness
251 +
252 +Very simple transform streams must be able to transfer back pressure
253 +instantly.
254 +
255 +This is a problem in node streams, pause is only transfered on write, so
256 +on a long chain (`a.pipe(b).pipe(c)`), if `c` pauses, `b` will have to write to it
257 +to pause, and then `a` will have to write to `b` to pause.
258 +If `b` only transforms `a`'s output, then `a` will have to write to `b` twice to
259 +find out that `c` is paused.
260 +
261 +[reducers](https://github.com/Gozala/reducers) reducers has an interesting method,
262 +where synchronous tranformations propagate back pressure instantly!
263 +
264 +This means you can have two "smart" streams doing io at the ends, and lots of dumb
265 +streams in the middle, and back pressure will work perfectly, as if the dumb streams
266 +are not there.
267 +
268 +This makes laziness work right.
269 +
270 +### handling end, error, and abort.
271 +
272 +in pull streams, any part of the stream (source, sink, or through)
273 +may terminate the stream. (this is the case with node streams too,
274 +but it's not handled well).
275 +
276 +#### source: end, error
277 +
278 +A source may end (`cb(true)` after read) or error (`cb(error)` after read)
279 +After ending, the source *must* never `cb(null, data)`
280 +
281 +#### sink: abort
282 +
283 +Sinks do not normally end the stream, but if they decide they do
284 +not need any more data they may "abort" the source by calling `read(true, cb)`.
285 +A abort (`read(true, cb)`) may be called before a preceding read call
286 +has called back.
287 +
288 +### handling end/abort/error in through streams
289 +
290 +Rules for implementing `read` in a through stream:
291 +1) Sink wants to stop. sink aborts the through
292 +
293 + just forward the exact read() call to your source,
294 + any future read calls should cb(true).
295 +
296 +2) We want to stop. (abort from the middle of the stream)
297 +
298 + abort your source, and then cb(true) to tell the sink we have ended.
299 + If the source errored during abort, end the sink by cb read with `cb(err)`.
300 + (this will be an ordinary end/error for the sink)
301 +
302 +3) Source wants to stop. (`read(null, cb) -> cb(err||true)`)
303 +
304 + forward that exact callback towards the sink chain,
305 + we must respond to any future read calls with `cb(err||true)`.
306 +
307 +In none of the above cases data is flowing!
308 +4) If data is flowing (normal operation: `read(null, cb) -> cb(null, data)`
309 +
310 + forward data downstream (towards the Sink)
311 + do none of the above!
312 +
313 +There either is data flowing (4) OR you have the error/abort cases (1-3), never both.
314 +
315 +
316 +## 1:1 read-callback ratio
317 +
318 +A pull stream source (and thus transform) returns *exactly one value* per read.
319 +
320 +This differs from node streams, which can use `this.push(value)` and in internal
321 +buffer to create transforms that write many values from a single read value.
322 +
323 +Pull streams don't come with their own buffering mechanism, but [there are ways
324 +to get around this](https://github.com/dominictarr/pull-stream-examples/blob/master/buffering.js).
325 +
326 +
327 +## Minimal bundle
328 +
329 +If you need only the `pull` function from this package you can reduce the size
330 +of the imported code (for instance to reduce a Browserify bundle) by requiring
331 +it directly:
332 +
333 +
334 +```js
335 +var pull = require('pull-stream/pull')
336 +
337 +pull(createRandomStream(5), logger())
338 +```
339 +
340 +
341 +## Further Examples
342 +
343 +- [dominictarr/pull-stream-examples](https://github.com/dominictarr/pull-stream-examples)
344 +- [./docs/examples](./docs/examples.md)
345 +
346 +Explore this repo further for more information about
347 +[sources](./docs/sources/index.md),
348 +[throughs](./docs/throughs/index.md),
349 +[sinks](./docs/sinks/index.md), and
350 +[glossary](./docs/glossary.md).
351 +
352 +
353 +## License
354 +
355 +MIT
356 +
357 +
node_modules/pull-stream/benchmarks/node.jsView
@@ -1,0 +1,97 @@
1 +var stream = require('stream')
2 +var inherits = require('util').inherits
3 +
4 +inherits(Values, stream.Readable)
5 +
6 +function Values (v) {
7 + this.i = 0
8 + this.values = v
9 + stream.Readable.call(this, {objectMode: true})
10 +}
11 +
12 +Values.prototype._read = function () {
13 + if(this.i >= this.values.length)
14 + this.push(null)
15 + else
16 + this.push(this.values[this.i++])
17 +}
18 +
19 +
20 +inherits(Async, stream.Transform)
21 +
22 +function Async (fn) {
23 + this._map = fn
24 + stream.Transform.call(this, {objectMode: true})
25 +}
26 +
27 +Async.prototype._transform = function (chunk, _, callback) {
28 + var self = this
29 + this._map(chunk, function (err, data) {
30 + self.push(JSON.parse(data))
31 + //it seems that this HAS to be async, which slows this down a lot.
32 + setImmediate(callback)
33 + })
34 +}
35 +Async.prototype._flush = function (callback) {
36 + this.push(null)
37 + setImmediate(callback)
38 +}
39 +
40 +inherits(Collect, stream.Writable)
41 +
42 +function Collect (cb) {
43 + this._ary = []
44 + this._cb = cb
45 + stream.Writable.call(this, {objectMode: true})
46 +}
47 +
48 +Collect.prototype._write = function (chunk, _, callback) {
49 + this._ary.push(chunk)
50 + setImmediate(callback)
51 +}
52 +
53 +//I couldn't figure out which method you are ment to override to implement a writable
54 +//stream so I ended up just using .end and that worked.
55 +
56 +//Collect.prototype._destroy = Collect.prototype._final = function (callback) {
57 +// this._cb(this._ary)
58 +// callback()
59 +//}
60 +//
61 +//Collect.prototype._flush = function (callback) {
62 +// this._cb(this._ary)
63 +// callback()
64 +//}
65 +//
66 +Collect.prototype.end = function () {
67 + this._cb(null, this._ary)
68 +}
69 +
70 +var bench = require('fastbench')
71 +const values = [
72 + JSON.stringify({ hello: 'world' }),
73 + JSON.stringify({ foo: 'bar' }),
74 + JSON.stringify({ bin: 'baz' })
75 +]
76 +
77 +const run = bench([
78 + function pull3 (done) {
79 + var c = new Collect(function (err, array) {
80 + if (err) return console.error(err)
81 + if(array.length < 3) throw new Error('wrong array')
82 + setImmediate(done)
83 + })
84 +
85 + new Values(values)
86 + .pipe(new Async(function (val, done) {
87 + done(null, val)
88 + }))
89 + .pipe(c)
90 + }]
91 +, N=100000)
92 +
93 +var heap = process.memoryUsage().heapUsed
94 +run(function () {
95 + console.log((process.memoryUsage().heapUsed - heap)/N)
96 +})
97 +
node_modules/pull-stream/benchmarks/pull.jsView
@@ -1,0 +1,57 @@
1 +const bench = require('fastbench')
2 +const pull = require('../')
3 +
4 +const values = [
5 + JSON.stringify({ hello: 'world' }),
6 + JSON.stringify({ foo: 'bar' }),
7 + JSON.stringify({ bin: 'baz' })
8 +]
9 +
10 +const run = bench([
11 + function pull3 (done) {
12 + const source = pull.values(values)
13 + const through = pull.asyncMap(function (val, done) {
14 + const json = JSON.parse(val)
15 + done(null, json)
16 + })
17 +
18 + const sink = pull.collect(function (err, array) {
19 + if (err) return console.error(err)
20 + setImmediate(done)
21 + })
22 + pull(source, through, sink)
23 + }/*,
24 + function pull_compose (done) {
25 + const source = pull.values(values)
26 + const through = pull.asyncMap(function (val, done) {
27 + const json = JSON.parse(val)
28 + done(null, json)
29 + })
30 +
31 + const sink = pull.collect(function (err, array) {
32 + if (err) return console.error(err)
33 + setImmediate(done)
34 + })
35 + pull(source, pull(through, sink))
36 + },
37 + function pull_chain (done) {
38 + const source = pull.values(values)
39 + const through = pull.asyncMap(function (val, done) {
40 + const json = JSON.parse(val)
41 + done(null, json)
42 + })
43 +
44 + const sink = pull.collect(function (err, array) {
45 + if (err) return console.error(err)
46 + setImmediate(done)
47 + })
48 + pull(pull(source, through), sink)
49 + }*/
50 +], N=100000)
51 +
52 +var heap = process.memoryUsage().heapUsed
53 +run(function () {
54 + console.log((process.memoryUsage().heapUsed - heap)/N)
55 +})
56 +
57 +
node_modules/pull-stream/docs/examples.mdView
@@ -1,0 +1,92 @@
1 +
2 +This document describes some examples of where various features
3 +of pull streams are used in simple real-world examples.
4 +
5 +Much of the focus here is handling the error cases. Indeed,
6 +distributed systems are _all about_ handling the error cases.
7 +
8 +# A simple source that ends correctly. (read, end)
9 +
10 +A normal file (source) is read, and sent to a sink stream
11 +that computes some aggregation upon that input such as
12 +the number of bytes, or number of occurances of the `\n`
13 +character (i.e. the number of lines).
14 +
15 +The source reads a chunk of the file at each time it's called,
16 +there is some optimium size depending on your operating system,
17 +file system, physical hardware,
18 +and how many other files are being read concurrently.
19 +
20 +When the sink gets a chunk, it iterates over the characters in it
21 +counting the `\n` characters. When the source returns `end` to the
22 +sink, the sink calls a user provided callback.
23 +
24 +# A source that may fail. (read, err, end)
25 +
26 +A file is downloaded over http and written to a file.
27 +The network should always be considered to be unreliable,
28 +and you must design your system to recover if the download
29 +fails. (For example if the wifi were to cut out).
30 +
31 +The read stream is just the http download, and the sink
32 +writes it to a temporary file. If the source ends normally,
33 +the temporary file is moved to the correct location.
34 +If the source errors, the temporary file is deleted.
35 +
36 +(You could also write the file to the correct location,
37 +and delete it if it errors, but the temporary file method has the advantage
38 +that if the computer or process crashes it leaves only a temporary file
39 +and not a file that appears valid. Stray temporary files can be cleaned up
40 +or resumed when the process restarts.)
41 +
42 +# A sink that may fail
43 +
44 +If we read a file from disk, and upload it, then the upload is the sink that may error.
45 +The file system is probably faster than the upload and
46 +so it will mostly be waiting for the sink to ask for more data.
47 +Usually the sink calls `read(null, cb)` and the source retrives chunks of the file
48 +until the file ends. If the sink errors, it then calls `read(true, cb)`
49 +and the source closes the file descriptor and stops reading.
50 +In this case the whole file is never loaded into memory.
51 +
52 +# A sink that may fail out of turn.
53 +
54 +A http client connects to a log server and tails a log in realtime.
55 +(Another process will write to the log file,
56 +but we don't need to worry about that.)
57 +
58 +The source is the server's log stream, and the sink is the client.
59 +First the source outputs the old data, this will always be a fast
60 +response, because that data is already at hand. When the old data is all
61 +written then the output rate may drop significantly because the server (the source) will
62 +wait for new data to be added to the file. Therefore,
63 +it becomes much more likely that the sink will error (for example if the network connection
64 +drops) while the source is waiting for new data. Because of this,
65 +it's necessary to be able to abort the stream reading (after you called
66 +read, but before it called back). If it was not possible to abort
67 +out of turn, you'd have to wait for the next read before you can abort
68 +but, depending on the source of the stream, the next read may never come.
69 +
70 +# A through stream that needs to abort.
71 +
72 +Say we wish to read from a file (source), parse each line as JSON (through),
73 +and then output to another file (sink).
74 +If the parser encounters illegal JSON then it will error and,
75 +if this parsing is a fatal error, then the parser needs to abort the pipeline
76 +from the middle. Here the source reads normaly, but then the through fails.
77 +When the through finds an invalid line, it should first abort the source,
78 +and then callback to the sink with an error. This way,
79 +by the time the sink receives the error, the entire stream has been cleaned up.
80 +
81 +(You could abort the source and error back to the sink in parallel.
82 +However, if something happened to the source while aborting, for the user
83 +discover this error they would have to call the source again with another callback, as
84 +situation would occur only rarely users would be inclined to not handle it leading to
85 +the possiblity of undetected errors.
86 +Therefore, as it is better to have one callback at the sink, wait until the source
87 +has finished cleaning up before callingback to the pink with an error.)
88 +
89 +In some cases you may want the stream to continue, and the the through stream can just ignore
90 +an any lines that do not parse. An example where you definately
91 +want a through stream to abort on invalid input would be an encrypted stream, which
92 +should be broken into chunks that are encrypted separately.
node_modules/pull-stream/docs/glossary.mdView
@@ -1,0 +1,51 @@
1 +# Glossary
2 +
3 +## read (end, cb)
4 +
5 +A function that retrives the next chunk.
6 +All readable streams (sources, and throughs)
7 +must return a `read` function.
8 +
9 +## reader (read,...)
10 +
11 +A function to create a reader. It takes a `read` function
12 +as the first argument, and any other options after that.
13 +
14 +When passed to `pipeable` or `pipeableSource`,
15 +a new function is created that adds `.pipe(dest)`
16 +
17 +## Lazy vs Eager
18 +
19 +Lazy means to avoid doing something until you know you have
20 +to do it.
21 +
22 +Eager means to do something early, so you have it ready
23 +immediately when you need it.
24 +
25 +## [Source](sources/index.md)
26 +
27 +The first stream in the pipeline. The Source is not a reader (not writable).
28 +
29 +## [Sink](sinks/index.md)
30 +
31 +The last stream in the pipeline. The Sink is not readable.
32 +
33 +## [Through](throughs/index.md)
34 +
35 +The stream (or streams) in the middle of the pipeline, between your source and sink. A through is a reader and readable.
36 +
37 +## Push vs Pull
38 +
39 +A pull-stream is a stream where the movement of data
40 +is initiated by the sink, and a push-stream
41 +is a stream where the movement of data is initiated
42 +by the source.
43 +
44 +## Reader vs Writable
45 +
46 +In push streams, destination streams (Through and Sink),
47 +are _writable_. They are written to by the source streams.
48 +
49 +In pull streams, destination streams _read_ from the source
50 +streams. They are the active participant, so they are called
51 +_readers_ rather than _writables_.
node_modules/pull-stream/docs/pull.mdView
@@ -1,0 +1,143 @@
1 +# pull-stream/pull
2 +
3 +> pipe many pull streams into a pipeline
4 +
5 +## Background
6 +
7 +In pull-streams, you need a complete pipeline before data will flow.
8 +
9 +That means: a source, zero or more throughs, and a sink.
10 +
11 +But you can still create a _partial_ pipeline, which is a great for tiny pull-stream modules.
12 +
13 +## Usage
14 +
15 +```js
16 +var pull = require('pull-stream/pull')
17 +```
18 +
19 +Create a simple complete pipeline:
20 +
21 +```js
22 +pull(source, sink) => undefined
23 +```
24 +
25 +Create a source modified by a through:
26 +
27 +```js
28 +pull(source, through) => source
29 +```
30 +
31 +Create a sink, but modify it's input before it goes.
32 +
33 +```js
34 +pull(through, sink) => sink
35 +```
36 +
37 +Create a through, by chainging several throughs:
38 +
39 +```js
40 +pull(through1, through2) => through
41 +```
42 +
43 +These streams combine just like normal streams.
44 +
45 +```js
46 +pull(
47 + pull(source, through),
48 + pull(through1, through2),
49 + pull(through, sink)
50 +) => undefined
51 +```
52 +
53 +The complete pipeline returns undefined, because it cannot be piped to anything else.
54 +
55 +Pipe duplex streams like this:
56 +
57 +```js
58 +var a = duplex()
59 +var b = duplex()
60 +
61 +pull(a.source, b.sink)
62 +pull(b.source, a.sink)
63 +
64 +//which is the same as
65 +
66 +b.sink(a.source); a.sink(b.source)
67 +
68 +//but the easiest way is to allow pull to handle this
69 +
70 +pull(a, b, a)
71 +
72 +//"pull from a to b and then back to a"
73 +```
74 +
75 +## Continuable
76 +
77 +[Continuables](https://github.com/Raynos/continuable) let you defer a stream and handle the completion of the sink stream. For example:
78 +
79 +```js
80 +var cont = pull(...streams, sink)
81 +
82 +// ...
83 +
84 +cont(function (err) {
85 + // stream finished
86 +})
87 +```
88 +
89 +Or call beside it if you are not deferring:
90 +
91 +```js
92 +pull(...streams, sink)(function (err) {
93 + // stream finished
94 +})
95 +```
96 +
97 +They are created by making a sink stream return a continuable, which uses it's callback and reads:
98 +
99 +```js
100 +function sink (read) {
101 + return function continuable (done) {
102 + // Do reads and eventually call `done`
103 + read(null, function (end, data) {
104 + if (end === true) return done(null)
105 + if (end) return done(end)
106 + // ... otherwise use `data`
107 + })
108 + }
109 +}
110 +```
111 +
112 +## API
113 +
114 +```js
115 +var pull = require('pull-stream/pull')
116 +```
117 +
118 +### `pull(...streams)`
119 +
120 +`pull` is a function that receives n-arity stream arguments and connects them into a pipeline.
121 +
122 +`pull` detects the type of stream by checking function arity, if the function takes only one argument it's either a sink or a through. Otherwise it's a source. A duplex stream is an object with the shape `{ source, sink }`.
123 +
124 +If the pipeline is complete (reduces into a source being passed into a sink), then `pull` returns `undefined`, as the data is flowing.
125 +
126 +If the pipeline is partial (reduces into either a source, a through, or a sink), then `pull` returns the partial pipeline, as it must be composed with other streams before the data will flow.
127 +
128 +## Install
129 +
130 +With [npm](https://npmjs.org/) installed, run
131 +
132 +```sh
133 +$ npm install pull-stream
134 +```
135 +
136 +## See Also
137 +
138 +- [`mafintosh/pump`](https://github.com/mafintosh/pump)
139 +- [`mafintosh/pumpify`](https://github.com/mafintosh/pumpify)
140 +
141 +## License
142 +
143 +[MIT](https://tldrlegal.com/license/mit-license)
node_modules/pull-stream/docs/sinks/collect.mdView
@@ -1,0 +1,10 @@
1 +# pull-stream/sinks/collect
2 +
3 +## usage
4 +
5 +### `collect = require('pull-stream/sinks/collect')`
6 +
7 +### `collect(cb)`
8 +
9 +Read the stream into an array, then callback.
10 +
node_modules/pull-stream/docs/sinks/concat.mdView
@@ -1,0 +1,9 @@
1 +# pull-stream/sinks/concat
2 +
3 +## usage
4 +
5 +### `concat = require('pull-stream/sinks/concat')`
6 +
7 +### `concat(cb)`
8 +
9 +concat stream of strings into single string, then callback.
node_modules/pull-stream/docs/sinks/drain.mdView
@@ -1,0 +1,11 @@
1 +# pull-stream/sinks/drain
2 +
3 +## usage
4 +
5 +### `drain = require('pull-stream/sinks/drain')`
6 +
7 +### `drain(op?, done?)`
8 +
9 +Drain the stream, calling `op` on each `data`.
10 +call `done` when stream is finished.
11 +If op returns `===false`, abort the stream.
node_modules/pull-stream/docs/sinks/index.mdView
@@ -1,0 +1,22 @@
1 +# Sinks
2 +
3 +A Sink is a stream that is not readable.
4 +You *must* have a sink at the end of a pipeline
5 +for data to move towards.
6 +
7 +You can only use _one_ sink per pipeline.
8 +
9 +``` js
10 +pull(source, through, sink)
11 +```
12 +
13 +See also:
14 +* [Sources](../sources/index.md)
15 +* [Throughs](../throughs/index.md)
16 +
17 +## [drain](./drain.md)
18 +## [reduce](./reduce.md)
19 +## [concat](./collect.md)
20 +## [collect](./collect.md)
21 +## [onEnd](./on-end.md)
22 +## [log](./log.md)
node_modules/pull-stream/docs/sinks/log.mdView
@@ -1,0 +1,9 @@
1 +# pull-stream/sinks/log
2 +
3 +## usage
4 +
5 +### `log = require('pull-stream/sinks/log')`
6 +
7 +### `log()`
8 +
9 +output the stream to `console.log`
node_modules/pull-stream/docs/sinks/on-end.mdView
@@ -1,0 +1,9 @@
1 +# pull-stream/sinks/on-end
2 +
3 +## usage
4 +
5 +### `onEnd = require('pull-stream/sinks/on-end')`
6 +
7 +### `onEnd(cb)`
8 +
9 +Drain the stream and then callback when done.
node_modules/pull-stream/docs/sinks/reduce.mdView
@@ -1,0 +1,9 @@
1 +# pull-stream/sinks/reduce
2 +
3 +## usage
4 +
5 +### `reduce = require('pull-stream/sinks/reduce')`
6 +
7 +### `reduce (reduce, initial, cb)`
8 +
9 +reduce stream into single value, then callback.
node_modules/pull-stream/docs/sources/count.mdView
@@ -1,0 +1,12 @@
1 +# pull-stream/sources/count
2 +
3 +## usage
4 +
5 +### `count = require('pull-stream/sources/count')`
6 +
7 +### `count(max, onAbort)`
8 +
9 +create a stream that outputs `0 ... max`.
10 +by default, `max = Infinity`, see
11 +[take](../throughs/take.md)
12 +
node_modules/pull-stream/docs/sources/empty.mdView
@@ -1,0 +1,20 @@
1 +# pull-stream/sources/empty
2 +
3 +## usage
4 +
5 +### `empty = require('pull-stream/sources/empty')`
6 +
7 +### `empty()`
8 +
9 +A stream with no contents (it just ends immediately)
10 +
11 +``` js
12 +pull(
13 + pull.empty(),
14 + pull.collect(function (err, ary) {
15 + console.log(arg)
16 + // ==> []
17 + })
18 +}
19 +```
20 +
node_modules/pull-stream/docs/sources/error.mdView
@@ -1,0 +1,9 @@
1 +# pull-stream/sources/error
2 +
3 +## usage
4 +
5 +### `error = require('pull-stream/sources/error')`
6 +
7 +### `error(err)`
8 +
9 +a stream that errors immediately
node_modules/pull-stream/docs/sources/index.mdView
@@ -1,0 +1,23 @@
1 +# Sources
2 +
3 +A source is a stream that is not writable.
4 +You *must* have a source at the start of a pipeline
5 +for data to move through.
6 +
7 +in general:
8 +
9 +``` js
10 +pull(source, through, sink)
11 +```
12 +
13 +See also:
14 +* [Throughs](../throughs/index.md)
15 +* [Sinks](../sinks/index.md)
16 +
17 +## [values](./values.md)
18 +## [keys](./keys.md)
19 +## [count](./count.md)
20 +## [infinite](./infinite.md)
21 +## [empty](./empty.md)
22 +## [once](./once.md)
23 +## [error](./error.md)
node_modules/pull-stream/docs/sources/infinite.mdView
@@ -1,0 +1,11 @@
1 +# pull-stream/sources/infinite
2 +
3 +## usage
4 +
5 +### `infinite = require('pull-stream/sources/infinite')`
6 +
7 +### `infinite(generator, onAbort)`
8 +
9 +create an unending stream by repeatedly calling a generator
10 +function (by default, `Math.random`)
11 +see [take](../throughs/take.md)
node_modules/pull-stream/docs/sources/keys.mdView
@@ -1,0 +1,10 @@
1 +# pull-stream/sources/keys
2 +
3 +## usage
4 +
5 +### `keys = require('pull-stream/sources/keys')`
6 +
7 +### `keys(array | object, onAbort)`
8 +
9 +stream the key names from an object (or array)
10 +
node_modules/pull-stream/docs/sources/once.mdView
@@ -1,0 +1,9 @@
1 +# pull-stream/sources/once
2 +
3 +## usage
4 +
5 +### `once = require('pull-stream/sources/once')`
6 +
7 +### `once(value, onAbort)`
8 +
9 +a stream with a single value
node_modules/pull-stream/docs/sources/values.mdView
@@ -1,0 +1,9 @@
1 +# pull-stream/sources/values
2 +
3 +## usage
4 +
5 +### `values = require('pull-stream/sources/values')`
6 +
7 +### `values(array | object, onAbort)`
8 +
9 +create a SourceStream that reads the values from an array or object and then stops.
node_modules/pull-stream/docs/spec.mdView
@@ -1,0 +1,67 @@
1 +# Synopsis
2 +
3 +In Pull-Streams, there are two fundamental types of streams `Source`s and `Sink`s. There are two composite types of streams `Through` (aka transform) and `Duplex`. A Through Stream is a sink stream that reads what goes into the Source Stream, it can also be written to. A duplex stream is a pair of streams (`{Source, Sink}`) streams.
4 +
5 +# Pull-Streams
6 +## Source Streams
7 +
8 +A Source Stream (aka readable stream) is a asynchronous function that may be called repeatedly until it returns a terminal state. Pull-streams have back pressure, but it is implicit instead of sending an explicit back pressure signal. If a source
9 +needs the sink to slow down, it may delay returning a read. If a sink needs the source to slow down, it just waits until it reads the source again.
10 +
11 +For example, the Source Stream `fn(abort, cb)` may have an internal implementation that will read data from a disk or network. If `fn` is called with the first argument (`abort`) being truthy, the callback will be passed `abort` as it's first argument. The callback has three different argument configurations...
12 +
13 + 1. `cb(null, data)`, indicates there there is data.
14 + 2. `cb(true)`, indicates the stream has ended normally.
15 + 3. `cb(error)`, indicates that there was an error.
16 +
17 +The read method *must not* be called until the previous call has returned, except for a call to abort the stream.
18 +
19 +### End
20 +The stream may be terminated, for example `cb(err|end)`. The read method *must not* be called after it has terminated. As a normal stream end is propagated up the pipeline, an error should be propagated also, because it also means the end of the stream. If `cb(end=true)` that is a "end" which means it's a valid termination, if `cb(err)` that is an error.
21 +`error` and `end` are mostly the same. If you are buffering inputs and see an `end`, process those inputs and then the end.
22 +If you are buffering inputs and get an `error`, then you _may_ throw away that buffer and return the end.
23 +
24 +### Abort
25 +Sometimes it's the sink that errors, and if it can't read anymore then we _must_ abort the source. (example, source is a file stream from local fs, and sink is a http upload. prehaps the network drops or remote server crashes, in this case we should abort the source, so that it's resources can be released.)
26 +
27 +To abort the sink, call read with a truthy first argument. You may abort a source _before_ it has returned from a regular read. (if you wait for the previous read to complete, it's possible you'd get a deadlock, if you a reading a stream that takes a long time, example, `tail -f` is reading a file, but nothing has appended to that file yet).
28 +
29 +When a stream is aborted during a read, the callback provided to the read function *must* be called first, with an error, and then the abort callback.
30 +
31 +## Sink Streams
32 +
33 +A Sink Stream (aka writable stream) is a function that a Source Stream is passed to. The Sink Stream calls the `read` function of the Source Stream, abiding by the rules about when it may not call.
34 +
35 +### Abort
36 +The Sink Stream may also abort the source if it can no longer read from it.
37 +
38 +## Through Streams
39 +
40 +A through stream is a sink stream that returns another source when it is passed a source.
41 +A through stream may be thought of as wrapping a source.
42 +
43 +## Duplex Streams
44 +
45 +A pair of independent streams, one Source and one Sink. The purpose of a duplex stream is not transformation of the data that passes though it. It's meant for communication only.
46 +
47 +# Composing Streams
48 +
49 +Since a Sink is a function that takes a Source, a Source may be fed into a Sink by simply passing the Source to the Sink.
50 +For example, `sink(source)`. Since a transform is a Sink that returns a Source, you can just add to that pattern by wrapping the source. For example, `sink(transform(source))`. This works, but it reads from right-to-left, and we are used to left-to-right.
51 +
52 +A method for creating a left-to-rihght reading pipeline of pull-streams. For example, a method could implement the following interface...
53 +
54 +```
55 +pull([source] [,transform ...] [,sink ...])
56 +```
57 +
58 +The interface could alllow for the following scenarios...
59 +
60 +1. Connect a complete pipeline: `pull(source, transform,* sink)` this connects a source to a sink via zero or more transforms.
61 +
62 +2. If a sink is not provided: `pull(source, transform+)` then pull should return the last `source`,
63 +this way streams can be easily combined in a functional way.
64 +
65 +3. If a source is not provided: `pull(transform,* sink)` then pull should return a sink that will complete the pipeline when
66 +it's passed a source. `function (source) { return pull(source, pipeline) }`
67 +If neither a source or a sink are provided, this will return a source that will return another source (via 2) i.e. a through stream.
node_modules/pull-stream/docs/throughs/async-map.mdView
@@ -1,0 +1,10 @@
1 +# pull-stream/throughs/async-map
2 +
3 +## usage
4 +
5 +### `asyncMap = require('pull-stream/throughs/async-map')`
6 +
7 +### `asyncMap(fn)`
8 +
9 +Like [`map`](./map.md) but the signature of `fn` must be
10 +`function (data, cb) { cb(null, data) }`
node_modules/pull-stream/docs/throughs/filter-not.mdView
@@ -1,0 +1,9 @@
1 +# pull-stream/throughs/filter-not
2 +
3 +## usage
4 +
5 +### `filterNot = require('pull-stream/throughs/filter-not')`
6 +
7 +### `filterNot(test)`
8 +
9 +Like [`filter`](./filter.md), but remove items where the filter returns true.
node_modules/pull-stream/docs/throughs/filter.mdView
@@ -1,0 +1,14 @@
1 +# pull-stream/throughs/filter
2 +
3 +## usage
4 +
5 +### `filter = require('pull-stream/throughs/filter')`
6 +
7 +### `filter(test)`
8 +
9 +Like `[].filter(function (data) {return true || false})`
10 +only `data` where `test(data) == true` are let through
11 +to the next stream.
12 +
13 +`test` defaults to `function id (e) { return e }` this means
14 +any truthy javascript value is allowed through.
node_modules/pull-stream/docs/throughs/flatten.mdView
@@ -1,0 +1,42 @@
1 +# pull-stream/throughs/flatten
2 +
3 +## usage
4 +### `flatten = require('pull-stream/throughs/flatten')`
5 +### `flatten(streams)`
6 +Turn a stream of streams or a stream of arrays into a stream of their items, (undoes group).
7 +
8 +
9 +## example
10 +```js
11 +test('flatten arrays', function (t) {
12 + pull(
13 + pull.values([
14 + [1, 2, 3],
15 + [4, 5, 6],
16 + [7, 8, 9]
17 + ]),
18 + pull.flatten(),
19 + pull.collect(function (err, numbers) {
20 + t.deepEqual([1, 2, 3, 4, 5, 6, 7, 8, 9], numbers)
21 + t.end()
22 + })
23 + )
24 +})
25 +
26 +test('flatten stream of streams', function (t) {
27 +
28 + pull(
29 + pull.values([
30 + pull.values([1, 2, 3]),
31 + pull.values([4, 5, 6]),
32 + pull.values([7, 8, 9])
33 + ]),
34 + pull.flatten(),
35 + pull.collect(function (err, numbers) {
36 + t.deepEqual([1, 2, 3, 4, 5, 6, 7, 8, 9], numbers)
37 + t.end()
38 + })
39 + )
40 +
41 +})
42 +```
node_modules/pull-stream/docs/throughs/index.mdView
@@ -1,0 +1,46 @@
1 +# Throughs
2 +
3 +A Through is a stream that both reads and is read by
4 +another stream.
5 +
6 +Through streams are optional.
7 +
8 +Put through streams in-between [sources](../sources/index.md) and [sinks](../sinks/index.md),
9 +like this:
10 +
11 +```js
12 +pull(source, through, sink)
13 +```
14 +
15 +Also, if you don't have the source/sink yet,
16 +you can pipe multiple through streams together
17 +to get one through stream!
18 +
19 +```js
20 +var throughABC = function () {
21 + return pull(
22 + throughA(),
23 + throughB(),
24 + throughC()
25 + )
26 +}
27 +```
28 +
29 +Which can then be treated like a normal through stream!
30 +
31 +```js
32 +pull(source(), throughABC(), sink())
33 +```
34 +
35 +See also:
36 +* [Sources](../sources/index.md)
37 +* [Sinks](../sinks/index.md)
38 +
39 +## [map](./map.md)
40 +## [asyncMap](./async-map.md)
41 +## [filter](./filter.md)
42 +## [filterNot](./filter-not.md)
43 +## [unique](./unique.md)
44 +## [nonUnique](./non-unique.md)
45 +## [take](./take.md)
46 +## [flatten](./flatten.md)
node_modules/pull-stream/docs/throughs/map.mdView
@@ -1,0 +1,54 @@
1 +# pull-stream/throughs/map
2 +
3 +> [].map for pull-streams
4 +
5 +## Background
6 +
7 +Pull-streams are arrays of data in time rather than space.
8 +
9 +As with a [`[].map`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/map), we may want to map a function over a stream.
10 +
11 +## Example
12 +
13 +```js
14 +var map = require('pull-stream/throughs/map')
15 +```
16 +
17 +```js
18 +pull(
19 + values([0, 1, 2, 3]),
20 + map(function (x) {
21 + return x * x
22 + }),
23 + log()
24 +)
25 +// 0
26 +// 1
27 +// 4
28 +// 9
29 +```
30 +
31 +## Usage
32 +
33 +### `map = require('pull-stream/throughs/map')`
34 +
35 +### `map((data) => data)`
36 +
37 +`map(fn)` returns a through stream that calls the given `fn` for each chunk of incoming data and outputs the return value, in the same order as before.
38 +
39 +## Install
40 +
41 +With [npm](https://npmjs.org/) installed, run
42 +
43 +```
44 +$ npm install pull-stream
45 +```
46 +
47 +## See Also
48 +
49 +- [`brycebaril/through2-map`](https://github.com/brycebaril/through2-map)
50 +- [`Rx.Obsevable#map`](http://xgrommx.github.io/rx-book/content/observable/observable_instance_methods/map.html)
51 +
52 +## License
53 +
54 +[MIT](https://tldrlegal.com/license/mit-license)
node_modules/pull-stream/docs/throughs/non-unique.mdView
@@ -1,0 +1,10 @@
1 +# pull-stream/throughs/non-unique
2 +
3 +## usage
4 +
5 +### `nonUnique = require('pull-stream/throughs/non-unique')`
6 +
7 +### `nonUnique(prop)`
8 +
9 +Filter unique items -- get the duplicates.
10 +The inverse of [`unique`](./unique.md)
node_modules/pull-stream/docs/throughs/take.mdView
@@ -1,0 +1,55 @@
1 +# pull-stream/throughs/take
2 +
3 +## Example usage
4 +
5 +```js
6 +var pull = require('pull-stream')
7 +// var take = require('pull-stream/throughs/take') // if you just need take
8 +
9 +pull(
10 + pull.values(['a', 'b', 'c', 'd', 'e']),
11 + pull.take(3),
12 + pull.collect((err, data) => {
13 + console.log(data)
14 + // => ['a', 'b', 'c']
15 + })
16 +)
17 +```
18 +
19 +## API
20 +
21 +take has 2 valid signatures:
22 +
23 +### `take(n) => through`
24 +
25 +Where `n` is a positive integer.
26 +`take` pulls n values from the source and then closes the stream.
27 +This is really useful for limiting how much you pull.
28 +
29 +### `take(testFn [, opts]) => through`
30 +
31 +If `testFn` is a function, read data from the source stream and forward it downstream until `testFn(data)` returns false, then close the stream.
32 +
33 +`opts` is an optional Object of form `{ last: Boolean }`, where `opts.last` determines whether the last value tested (before closing the stream) is included or excluded (default). e.g.
34 +
35 +```js
36 +pull(
37 + pull.values([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]),
38 + pull.take(n => n < 4.6), { last: true }), // include the last value tested (5)
39 + pull.collect(function (err, results) {
40 + console.log(results)
41 + // => [1, 2, 3, 4, 5]
42 + })
43 +})
44 +```
45 +
46 +```js
47 +pull(
48 + pull.values([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]),
49 + pull.take(n => n < 4.6), { last: false }), // exclude the last value tested (5)
50 + pull.collect(function (err, results) {
51 + console.log(results)
52 + // => [1, 2, 3, 4]
53 + })
54 +})
55 +```
node_modules/pull-stream/docs/throughs/through.mdView
@@ -1,0 +1,5 @@
1 +# pull-stream/throughs/filter
2 +
3 +## usage
4 +
5 +### `filter = require('pull-stream/throughs/filter')`
node_modules/pull-stream/docs/throughs/unique.mdView
@@ -1,0 +1,11 @@
1 +# pull-stream/throughs/unique
2 +
3 +## usage
4 +
5 +### `unique = require('pull-stream/throughs/unique')`
6 +
7 +### `unique(prop)`
8 +
9 +Filter items that have a repeated value for `prop()`,
10 +by default, `prop = function (it) {return it }`, if prop is a string,
11 +it will filter nodes which have repeated values for that property.
node_modules/pull-stream/index.jsView
@@ -1,0 +1,19 @@
1 +'use strict'
2 +
3 +var sources = require('./sources')
4 +var sinks = require('./sinks')
5 +var throughs = require('./throughs')
6 +
7 +exports = module.exports = require('./pull')
8 +
9 +exports.pull = exports
10 +
11 +for(var k in sources)
12 + exports[k] = sources[k]
13 +
14 +for(var k in throughs)
15 + exports[k] = throughs[k]
16 +
17 +for(var k in sinks)
18 + exports[k] = sinks[k]
19 +
node_modules/pull-stream/package.jsonView
@@ -1,0 +1,56 @@
1 +{
2 + "_args": [
3 + [
4 + "pull-stream@3.6.14",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "pull-stream@3.6.14",
9 + "_id": "pull-stream@3.6.14",
10 + "_inBundle": false,
11 + "_integrity": "sha256-WUect/OXBboO6Xft04OCQ06Vf39ZSNag27SC1kxtrFw=",
12 + "_location": "/pull-stream",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "pull-stream@3.6.14",
18 + "name": "pull-stream",
19 + "escapedName": "pull-stream",
20 + "rawSpec": "3.6.14",
21 + "saveSpec": null,
22 + "fetchSpec": "3.6.14"
23 + },
24 + "_requiredBy": [
25 + "/",
26 + "/stream-to-pull-stream"
27 + ],
28 + "_resolved": "http://localhost:46465/-/blobs/get/&WUect/OXBboO6Xft04OCQ06Vf39ZSNag27SC1kxtrFw=.sha256",
29 + "_spec": "3.6.14",
30 + "_where": "/home/cel/src/ssb-npm-registry",
31 + "author": {
32 + "name": "Dominic Tarr",
33 + "email": "dominic.tarr@gmail.com",
34 + "url": "http://dominictarr.com"
35 + },
36 + "bugs": {
37 + "url": "https://github.com/pull-stream/pull-stream/issues"
38 + },
39 + "description": "minimal pull stream",
40 + "devDependencies": {
41 + "fastbench": "^1.0.1",
42 + "pull-abortable": "~2.0.0",
43 + "tape": "~2.12.3"
44 + },
45 + "homepage": "https://pull-stream.github.io",
46 + "license": "MIT",
47 + "name": "pull-stream",
48 + "repository": {
49 + "type": "git",
50 + "url": "git://github.com/pull-stream/pull-stream.git"
51 + },
52 + "scripts": {
53 + "test": "set -e; for t in test/*.js; do node $t; done"
54 + },
55 + "version": "3.6.14"
56 +}
node_modules/pull-stream/pull.jsView
@@ -1,0 +1,49 @@
1 +'use strict'
2 +
3 +module.exports = function pull (a) {
4 + var length = arguments.length
5 + if (typeof a === 'function' && a.length === 1) {
6 + var args = new Array(length)
7 + for(var i = 0; i < length; i++)
8 + args[i] = arguments[i]
9 + return function (read) {
10 + if (args == null) {
11 + throw new TypeError("partial sink should only be called once!")
12 + }
13 +
14 + // Grab the reference after the check, because it's always an array now
15 + // (engines like that kind of consistency).
16 + var ref = args
17 + args = null
18 +
19 + // Prioritize common case of small number of pulls.
20 + switch (length) {
21 + case 1: return pull(read, ref[0])
22 + case 2: return pull(read, ref[0], ref[1])
23 + case 3: return pull(read, ref[0], ref[1], ref[2])
24 + case 4: return pull(read, ref[0], ref[1], ref[2], ref[3])
25 + default:
26 + ref.unshift(read)
27 + return pull.apply(null, ref)
28 + }
29 + }
30 + }
31 +
32 + var read = a
33 +
34 + if (read && typeof read.source === 'function') {
35 + read = read.source
36 + }
37 +
38 + for (var i = 1; i < length; i++) {
39 + var s = arguments[i]
40 + if (typeof s === 'function') {
41 + read = s(read)
42 + } else if (s && typeof s === 'object') {
43 + s.sink(read)
44 + read = s.source
45 + }
46 + }
47 +
48 + return read
49 +}
node_modules/pull-stream/sinks/collect.jsView
@@ -1,0 +1,10 @@
1 +'use strict'
2 +
3 +var reduce = require('./reduce')
4 +
5 +module.exports = function collect (cb) {
6 + return reduce(function (arr, item) {
7 + arr.push(item)
8 + return arr
9 + }, [], cb)
10 +}
node_modules/pull-stream/sinks/concat.jsView
@@ -1,0 +1,9 @@
1 +'use strict'
2 +
3 +var reduce = require('./reduce')
4 +
5 +module.exports = function concat (cb) {
6 + return reduce(function (a, b) {
7 + return a + b
8 + }, '', cb)
9 +}
node_modules/pull-stream/sinks/drain.jsView
@@ -1,0 +1,48 @@
1 +'use strict'
2 +
3 +module.exports = function drain (op, done) {
4 + var read, abort
5 +
6 + function sink (_read) {
7 + read = _read
8 + if(abort) return sink.abort()
9 + //this function is much simpler to write if you
10 + //just use recursion, but by using a while loop
11 + //we do not blow the stack if the stream happens to be sync.
12 + ;(function next() {
13 + var loop = true, cbed = false
14 + while(loop) {
15 + cbed = false
16 + read(null, function (end, data) {
17 + cbed = true
18 + if(end = end || abort) {
19 + loop = false
20 + if(done) done(end === true ? null : end)
21 + else if(end && end !== true)
22 + throw end
23 + }
24 + else if(op && false === op(data) || abort) {
25 + loop = false
26 + read(abort || true, done || function () {})
27 + }
28 + else if(!loop){
29 + next()
30 + }
31 + })
32 + if(!cbed) {
33 + loop = false
34 + return
35 + }
36 + }
37 + })()
38 + }
39 +
40 + sink.abort = function (err, cb) {
41 + if('function' == typeof err)
42 + cb = err, err = true
43 + abort = err || true
44 + if(read) return read(abort, cb || function () {})
45 + }
46 +
47 + return sink
48 +}
node_modules/pull-stream/sinks/find.jsView
@@ -1,0 +1,28 @@
1 +'use strict'
2 +
3 +function id (e) { return e }
4 +var prop = require('../util/prop')
5 +var drain = require('./drain')
6 +
7 +module.exports = function find (test, cb) {
8 + var ended = false
9 + if(!cb)
10 + cb = test, test = id
11 + else
12 + test = prop(test) || id
13 +
14 + return drain(function (data) {
15 + if(test(data)) {
16 + ended = true
17 + cb(null, data)
18 + return false
19 + }
20 + }, function (err) {
21 + if(ended) return //already called back
22 + cb(err === true ? null : err, null)
23 + })
24 +}
25 +
26 +
27 +
28 +
node_modules/pull-stream/sinks/index.jsView
@@ -1,0 +1,12 @@
1 +'use strict'
2 +
3 +module.exports = {
4 + drain: require('./drain'),
5 + onEnd: require('./on-end'),
6 + log: require('./log'),
7 + find: require('./find'),
8 + reduce: require('./reduce'),
9 + collect: require('./collect'),
10 + concat: require('./concat')
11 +}
12 +
node_modules/pull-stream/sinks/log.jsView
@@ -1,0 +1,9 @@
1 +'use strict'
2 +
3 +var drain = require('./drain')
4 +
5 +module.exports = function log (done) {
6 + return drain(function (data) {
7 + console.log(data)
8 + }, done)
9 +}
node_modules/pull-stream/sinks/on-end.jsView
@@ -1,0 +1,7 @@
1 +'use strict'
2 +
3 +var drain = require('./drain')
4 +
5 +module.exports = function onEnd (done) {
6 + return drain(null, done)
7 +}
node_modules/pull-stream/sinks/reduce.jsView
@@ -1,0 +1,22 @@
1 +'use strict'
2 +
3 +var drain = require('./drain')
4 +
5 +module.exports = function reduce (reducer, acc, cb ) {
6 + if(!cb) cb = acc, acc = null
7 + var sink = drain(function (data) {
8 + acc = reducer(acc, data)
9 + }, function (err) {
10 + cb(err, acc)
11 + })
12 + if (arguments.length === 2)
13 + return function (source) {
14 + source(null, function (end, data) {
15 + //if ended immediately, and no initial...
16 + if(end) return cb(end === true ? null : end)
17 + acc = data; sink(source)
18 + })
19 + }
20 + else
21 + return sink
22 +}
node_modules/pull-stream/sources/count.jsView
@@ -1,0 +1,13 @@
1 +'use strict'
2 +
3 +module.exports = function count (max) {
4 + var i = 0; max = max || Infinity
5 + return function (end, cb) {
6 + if(end) return cb && cb(end)
7 + if(i > max)
8 + return cb(true)
9 + cb(null, i++)
10 + }
11 +}
12 +
13 +
node_modules/pull-stream/sources/empty.jsView
@@ -1,0 +1,7 @@
1 +'use strict'
2 +//a stream that ends immediately.
3 +module.exports = function empty () {
4 + return function (abort, cb) {
5 + cb(true)
6 + }
7 +}
node_modules/pull-stream/sources/error.jsView
@@ -1,0 +1,8 @@
1 +'use strict'
2 +//a stream that errors immediately.
3 +module.exports = function error (err) {
4 + return function (abort, cb) {
5 + cb(err)
6 + }
7 +}
8 +
node_modules/pull-stream/sources/index.jsView
@@ -1,0 +1,10 @@
1 +'use strict'
2 +module.exports = {
3 + keys: require('./keys'),
4 + once: require('./once'),
5 + values: require('./values'),
6 + count: require('./count'),
7 + infinite: require('./infinite'),
8 + empty: require('./empty'),
9 + error: require('./error')
10 +}
node_modules/pull-stream/sources/infinite.jsView
@@ -1,0 +1,10 @@
1 +'use strict'
2 +module.exports = function infinite (generate) {
3 + generate = generate || Math.random
4 + return function (end, cb) {
5 + if(end) return cb && cb(end)
6 + return cb(null, generate())
7 + }
8 +}
9 +
10 +
node_modules/pull-stream/sources/keys.jsView
@@ -1,0 +1,7 @@
1 +'use strict'
2 +var values = require('./values')
3 +module.exports = function (object) {
4 + return values(Object.keys(object))
5 +}
6 +
7 +
node_modules/pull-stream/sources/once.jsView
@@ -1,0 +1,16 @@
1 +'use strict'
2 +var abortCb = require('../util/abort-cb')
3 +
4 +module.exports = function once (value, onAbort) {
5 + return function (abort, cb) {
6 + if(abort)
7 + return abortCb(cb, abort, onAbort)
8 + if(value != null) {
9 + var _value = value; value = null
10 + cb(null, _value)
11 + } else
12 + cb(true)
13 + }
14 +}
15 +
16 +
node_modules/pull-stream/sources/values.jsView
@@ -1,0 +1,23 @@
1 +'use strict'
2 +var abortCb = require('../util/abort-cb')
3 +
4 +module.exports = function values (array, onAbort) {
5 + if(!array)
6 + return function (abort, cb) {
7 + if(abort) return abortCb(cb, abort, onAbort)
8 + return cb(true)
9 + }
10 + if(!Array.isArray(array))
11 + array = Object.keys(array).map(function (k) {
12 + return array[k]
13 + })
14 + var i = 0
15 + return function (abort, cb) {
16 + if(abort)
17 + return abortCb(cb, abort, onAbort)
18 + if(i >= array.length)
19 + cb(true)
20 + else
21 + cb(null, array[i++])
22 + }
23 +}
node_modules/pull-stream/test/abort-stalled.jsView
@@ -1,0 +1,68 @@
1 +
2 +
3 +var tape = require('tape')
4 +var pull = require('../')
5 +//var abortable = require('pull-abortable')
6 +
7 +function hang (values, onAbort) {
8 + var i = 0
9 + return function (abort, cb) {
10 + if(i < values.length)
11 + cb(null, values[i++])
12 + else if(!abort)
13 + _cb = cb
14 + else {
15 + _cb(abort)
16 + cb(abort) //??
17 + onAbort && onAbort()
18 + }
19 + }
20 +}
21 +
22 +function abortable () {
23 + var _read, aborted
24 + function reader (read) {
25 + _read = read
26 + return function (abort, cb) {
27 + if(abort) aborted = abort
28 + read(abort, cb)
29 + }
30 + }
31 +
32 + reader.abort = function (cb) {
33 + cb = cb || function (err) {
34 + if(err && err !== true) throw err
35 + }
36 + if(aborted)
37 + cb(aborted)
38 + else _read(true, cb)
39 + }
40 +
41 + return reader
42 +}
43 +
44 +function test (name, trx) {
45 + tape('test abort:'+name, function (t) {
46 + var a = abortable()
47 +
48 + pull(
49 + hang([1,2,3], function () {
50 + t.end()
51 + }),
52 + trx,
53 + a,
54 + pull.drain(function (e) {
55 + if(e === 3)
56 + setImmediate(function () {
57 + a.abort()
58 + })
59 + }, function (err) {
60 + })
61 + )
62 + })
63 +}
64 +
65 +test('through', pull.through())
66 +test('map', pull.map(function (e) { return e }))
67 +test('take', pull.take(Boolean))
68 +
node_modules/pull-stream/test/async-map.jsView
@@ -1,0 +1,152 @@
1 +var pull = require('../')
2 +var tape = require('tape')
3 +tape('async-map', function (t) {
4 +
5 + pull(
6 + pull.count(),
7 + pull.take(21),
8 + pull.asyncMap(function (data, cb) {
9 + return cb(null, data + 1)
10 + }),
11 + pull.collect(function (err, ary) {
12 + console.log(ary)
13 + t.equal(ary.length, 21)
14 + t.end()
15 + })
16 + )
17 +})
18 +
19 +tape('abort async map', function (t) {
20 + var err = new Error('abort')
21 + t.plan(2)
22 +
23 + var read = pull(
24 + pull.infinite(),
25 + pull.asyncMap(function (data, cb) {
26 + setImmediate(function () {
27 + cb(null, data)
28 + })
29 + })
30 + )
31 +
32 + read(null, function (end) {
33 + if(!end) throw new Error('expected read to end')
34 + t.ok(end, "read's callback")
35 + })
36 +
37 + read(err, function (end) {
38 + if(!end) throw new Error('expected abort to end')
39 + t.ok(end, "Abort's callback")
40 + t.end()
41 + })
42 +
43 +})
44 +
45 +tape('abort async map (source is slow to ack abort)', function (t) {
46 + var err = new Error('abort')
47 + t.plan(3)
48 +
49 + function source(end, cb) {
50 + if (end) setTimeout(function () { cb(end) }, 20)
51 + else cb(null, 10)
52 + }
53 +
54 + var read = pull(
55 + source,
56 + pull.asyncMap(function (data, cb) {
57 + setImmediate(function () {
58 + cb(null, data)
59 + })
60 + })
61 + )
62 +
63 + var ended = false
64 +
65 + read(null, function (end) {
66 + if(!end) throw new Error('expected read to end')
67 + ended = true
68 + t.ok(end, "read's callback")
69 + })
70 +
71 + read(err, function (end) {
72 + if(!end) throw new Error('expected abort to end')
73 + t.ok(end, "Abort's callback")
74 + t.ok(ended, 'read called back first')
75 + t.end()
76 + })
77 +
78 +})
79 +
80 +tape('abort async map (async source)', function (t) {
81 + var err = new Error('abort')
82 + t.plan(2)
83 +
84 + var read = pull(
85 + function(err, cb) {
86 + setImmediate(function() {
87 + if (err) return cb(err)
88 + cb(null, 'x')
89 + })
90 + },
91 + pull.asyncMap(function (data, cb) {
92 + setImmediate(function () {
93 + cb(null, data)
94 + })
95 + })
96 + )
97 +
98 + read(null, function (end) {
99 + if(!end) throw new Error('expected read to end')
100 + t.ok(end, "read's callback")
101 + })
102 +
103 + read(err, function (end) {
104 + if(!end) throw new Error('expected abort to end')
105 + t.ok(end, "Abort's callback")
106 + t.end()
107 + })
108 +
109 +})
110 +tape('asyncMap aborts when map errors', function (t) {
111 + t.plan(2)
112 + var ERR = new Error('abort')
113 + pull(
114 + pull.values([1,2,3], function (err) {
115 + console.log('on abort')
116 + t.equal(err, ERR, 'abort gets error')
117 + t.end()
118 + }),
119 + pull.asyncMap(function (data, cb) {
120 + cb(ERR)
121 + }),
122 + pull.collect(function (err) {
123 + t.equal(err, ERR, 'collect gets error')
124 + })
125 + )
126 +})
127 +
128 +tape("async map should pass its own error", function (t) {
129 + var i = 0
130 + var error = new Error('error on last call')
131 +
132 + pull(
133 + function (end, cb) {
134 + end ? cb(true) : cb(null, i+1)
135 + },
136 + pull.asyncMap(function (data, cb) {
137 + setTimeout(function () {
138 + if(++i < 5) cb(null, data)
139 + else {
140 + cb(error)
141 + }
142 + }, 100)
143 + }),
144 + pull.collect(function (err, five) {
145 + t.equal(err, error, 'should return err')
146 + t.deepEqual(five, [1,2,3,4], 'should skip failed item')
147 + t.end()
148 + })
149 + )
150 +})
151 +
152 +
node_modules/pull-stream/test/collect.jsView
@@ -1,0 +1,13 @@
1 +var pull = require('../')
2 +var test = require('tape')
3 +
4 +test('collect empty', function (t) {
5 + pull(
6 + pull.empty(),
7 + pull.collect(function (err, ary) {
8 + t.notOk(err)
9 + t.deepEqual(ary, [])
10 + t.end()
11 + })
12 + )
13 +})
node_modules/pull-stream/test/compose.jsView
@@ -1,0 +1,57 @@
1 +var pull = require('../')
2 +var test = require('tape')
3 +//test through streams compose on pipe!
4 +
5 +test('join through streams with pipe', function (t) {
6 +
7 + var map = pull.map
8 +
9 + var pipeline =
10 + pull(
11 + map(function (d) {
12 + //make exciting!
13 + return d + '!'
14 + }),
15 + map(function (d) {
16 + //make loud
17 + return d.toUpperCase()
18 + }),
19 + map(function (d) {
20 + //add sparkles
21 + return '*** ' + d + ' ***'
22 + })
23 + )
24 + //the pipe line does not have a source stream.
25 + //so it should be a reader (function that accepts
26 + //a read function)
27 +
28 + t.equal('function', typeof pipeline)
29 + t.equal(1, pipeline.length)
30 +
31 + //if we pipe a read function to the pipeline,
32 + //the pipeline will become readable!
33 +
34 + var read =
35 + pull(
36 + pull.values(['billy', 'joe', 'zeke']),
37 + pipeline
38 + )
39 +
40 + t.equal('function', typeof read)
41 + //we will know it's a read function,
42 + //because read takes two args.
43 + t.equal(2, read.length)
44 +
45 + pull(
46 + read,
47 + pull.collect(function (err, array) {
48 + console.log(array)
49 + t.deepEqual(
50 + array,
51 + [ '*** BILLY! ***', '*** JOE! ***', '*** ZEKE! ***' ]
52 + )
53 + t.end()
54 + })
55 + )
56 +
57 +})
node_modules/pull-stream/test/concat.jsView
@@ -1,0 +1,18 @@
1 +var test = require('tape')
2 +var pull = require('../')
3 +
4 +test('concat', function (t) {
5 + var n = 0
6 + pull(
7 + pull.values('hello there this is a test'.split(/([aeiou])/)),
8 + pull.through(function () {
9 + n++
10 + }),
11 + pull.concat(function (err, mess) {
12 + t.equal(mess, 'hello there this is a test')
13 + t.equal(n, 17)
14 + t.end()
15 + })
16 + )
17 +
18 +})
node_modules/pull-stream/test/continuable.jsView
@@ -1,0 +1,38 @@
1 +var pull = require('../pull')
2 +var count = require('../sources/count')
3 +var error = require('../sources/error')
4 +var map = require('../throughs/map')
5 +var test = require('tape')
6 +
7 +test('continuable stream', function (t) {
8 + t.plan(2)
9 +
10 + var continuable = function (read) {
11 + return function (cb) {
12 + read(null, function next (end, data) {
13 + if (end === true) return cb(null)
14 + if (end) return cb(end)
15 + read(end, next)
16 + })
17 + }
18 + }
19 +
20 + // With values:
21 + pull(
22 + count(5),
23 + map(function (item) {
24 + return item * 2
25 + }),
26 + continuable
27 + )(function (err) {
28 + t.false(err, 'no error')
29 + })
30 +
31 + // With error:
32 + pull(
33 + error(new Error('test error')),
34 + continuable
35 + )(function (err) {
36 + t.is(err.message, 'test error', 'error')
37 + })
38 +})
node_modules/pull-stream/test/drain-abort.jsView
@@ -1,0 +1,77 @@
1 +var pull = require('..')
2 +
3 +var tape = require('tape')
4 +
5 +tape('abort on drain', function (t) {
6 +
7 + var c = 100
8 + var drain = pull.drain(function () {
9 + if(c < 0) throw new Error('stream should have aborted')
10 + if(!--c) return false //drain.abort()
11 + }, function () {
12 + t.end()
13 + })
14 +
15 + pull(pull.infinite(), drain)
16 +
17 +})
18 +
19 +
20 +function delay () {
21 + return pull.asyncMap(function (e, cb) {
22 + setTimeout(function () { cb(null, e) })
23 + })
24 +}
25 +
26 +tape('abort on drain - async', function (t) {
27 +
28 + var c = 100
29 + var drain = pull.drain(function () {
30 + if(c < 0) throw new Error('stream should have aborted')
31 + if(!--c) return drain.abort()
32 + }, function () {
33 + t.end()
34 + })
35 +
36 + pull(pull.infinite(), delay(), drain)
37 +
38 +})
39 +
40 +tape('abort on drain - sync', function (t) {
41 +
42 + var c = 100
43 + var drain = pull.drain(function () {
44 + if(c < 0) throw new Error('stream should have aborted')
45 + if(!--c) return drain.abort()
46 + }, function () {
47 + t.end()
48 + })
49 +
50 + pull(pull.infinite(), drain)
51 +
52 +})
53 +
54 +
55 +tape('abort on drain - async, out of cb', function (t) {
56 +
57 + var c = 0, ERR = new Error('test ABORT')
58 + var drain = pull.drain(function () {
59 + --c
60 + }, function (err) {
61 + t.ok(c < 0)
62 + t.equal(err, ERR)
63 + t.end()
64 + })
65 +
66 + pull(pull.infinite(), delay(), drain)
67 +
68 + setTimeout(function () {
69 + drain.abort(ERR)
70 + }, 100)
71 +
72 +})
73 +
74 +
75 +
76 +
77 +
node_modules/pull-stream/test/drain-if.jsView
@@ -1,0 +1,40 @@
1 +
2 +var pull = require('../')
3 +var test = require('tape')
4 +
5 +test('reduce becomes through', function (t) {
6 + pull(
7 + pull.values([1,2,3]),
8 + pull.reduce(function (a, b) {return a + b}, 0, function (err, val) {
9 + t.equal(val, 6)
10 + t.end()
11 + })
12 + )
13 +})
14 +
15 +test('reduce without initial value', function (t) {
16 + pull(
17 + pull.values([1,2,3]),
18 + pull.reduce(function (a, b) {return a + b}, function (err, val) {
19 + t.equal(val, 6)
20 + t.end()
21 + })
22 + )
23 +})
24 +
25 +
26 +test('reduce becomes drain', function (t) {
27 + pull(
28 + pull.values([1,2,3]),
29 + pull.reduce(
30 + function (a, b) {return a + b},
31 + 0,
32 + function (err, acc) {
33 + t.equal(acc, 6)
34 + t.end()
35 + }
36 + )
37 + )
38 +})
39 +
40 +
node_modules/pull-stream/test/filter.jsView
@@ -1,0 +1,61 @@
1 +
2 +var test = require('tape')
3 +var pull = require('../')
4 +
5 +test('filtered randomnes', function (t) {
6 + pull(
7 + pull.infinite(),
8 + pull.filter(function (d) {
9 + console.log('f', d)
10 + return d > 0.5
11 + }),
12 + pull.take(100),
13 + pull.collect(function (err, array) {
14 + t.equal(array.length, 100)
15 + array.forEach(function (d) {
16 + t.ok(d > 0.5)
17 + t.ok(d <= 1)
18 + })
19 + console.log(array)
20 + t.end()
21 + })
22 + )
23 +})
24 +
25 +test('filter with regexp', function (t) {
26 + pull(
27 + pull.infinite(),
28 + pull.map(function (d) {
29 + return Math.round(d * 1000).toString(16)
30 + }),
31 + pull.filter(/^[^e]+$/i), //no E
32 + pull.take(37),
33 + pull.collect(function (err, array) {
34 + t.equal(array.length, 37)
35 + console.log(array)
36 + array.forEach(function (d) {
37 + t.equal(d.indexOf('e'), -1)
38 + })
39 + t.end()
40 + })
41 + )
42 +})
43 +
44 +test('inverse filter with regexp', function (t) {
45 + pull(
46 + pull.infinite(),
47 + pull.map(function (d) {
48 + return Math.round(d * 1000).toString(16)
49 + }),
50 + pull.filterNot(/^[^e]+$/i), //no E
51 + pull.take(37),
52 + pull.collect(function (err, array) {
53 + t.equal(array.length, 37)
54 + array.forEach(function (d) {
55 + t.notEqual(d.indexOf('e'), -1)
56 + })
57 + t.end()
58 + })
59 + )
60 +})
61 +
node_modules/pull-stream/test/find.jsView
@@ -1,0 +1,80 @@
1 +
2 +var pull = require('../')
3 +var test = require('tape')
4 +
5 +test('find 7', function (t) {
6 + pull(
7 + pull.values([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]),
8 + pull.find(function (d) {
9 + return d == 7
10 + }, function (err, seven) {
11 + t.equal(seven, 7)
12 + t.notOk(err)
13 + t.end()
14 + })
15 + )
16 +})
17 +
18 +var target = Math.random()
19 +test('find ' + target, function (t) {
20 + var f = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10].map(Math.random)
21 +
22 + f.push(target)
23 + pull(
24 + pull.values(f.sort()),
25 + pull.find(function (d) {
26 + return d == target
27 + }, function (err, found) {
28 + t.equal(found, target)
29 + t.notOk(err)
30 + t.end()
31 + })
32 + )
33 +})
34 +
35 +test('find missing', function (t) {
36 + var f = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
37 +
38 + pull(
39 + pull.values(f.sort()),
40 + pull.find(function (d) {
41 + return d == target
42 + }, function (err, found) {
43 + t.equal(found, null)
44 + t.notOk(err)
45 + t.end()
46 + })
47 + )
48 +})
49 +
50 +
51 +test('there can only be one', function (t) {
52 +
53 + pull(
54 + pull.values([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]),
55 + pull.asyncMap(function (e, cb) {
56 + process.nextTick(function () {
57 + cb(null, e)
58 + })
59 + }),
60 + pull.find(function (d) {
61 + return d >= 7
62 + }, function (err, seven) {
63 + t.equal(seven, 7)
64 + t.notOk(err)
65 + t.end()
66 + })
67 + )
68 +
69 +})
70 +
71 +test('find null', function (t) {
72 + pull(
73 + pull.values([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]),
74 + pull.find(null, function (err, first) {
75 + t.equal(first, 1)
76 + t.notOk(err)
77 + t.end()
78 + })
79 + )
80 +})
node_modules/pull-stream/test/flatten.jsView
@@ -1,0 +1,135 @@
1 +var pull = require('../')
2 +var test = require('tape')
3 +
4 +test('flatten arrays', function (t) {
5 + pull(
6 + pull.values([
7 + [1, 2, 3],
8 + [4, 5, 6],
9 + [7, 8, 9]
10 + ]),
11 + pull.flatten(),
12 + pull.collect(function (err, numbers) {
13 + t.deepEqual([1, 2, 3, 4, 5, 6, 7, 8, 9], numbers)
14 + t.end()
15 + })
16 + )
17 +})
18 +
19 +test('flatten - number of reads', function (t) {
20 + var reads = 0
21 + pull(
22 + pull.values([
23 + pull.values([1, 2, 3]),
24 + ]),
25 + pull.flatten(),
26 + pull.through(function() {
27 + reads++
28 + console.log('READ', reads)
29 + }),
30 + pull.take(2),
31 + pull.collect(function (err, numbers) {
32 + t.deepEqual([1, 2], numbers)
33 + t.equal(reads, 2)
34 + t.end()
35 + })
36 + )
37 +
38 +})
39 +test('flatten stream of streams', function (t) {
40 +
41 + pull(
42 + pull.values([
43 + pull.values([1, 2, 3]),
44 + pull.values([4, 5, 6]),
45 + pull.values([7, 8, 9])
46 + ]),
47 + pull.flatten(),
48 + pull.collect(function (err, numbers) {
49 + t.deepEqual([1, 2, 3, 4, 5, 6, 7, 8, 9], numbers)
50 + t.end()
51 + })
52 + )
53 +
54 +})
55 +
56 +test('flatten stream of broken streams', function (t) {
57 + var _err = new Error('I am broken'), sosEnded
58 + pull(
59 + pull.values([
60 + pull.error(_err)
61 + ], function(err) {
62 + sosEnded = err;
63 + }),
64 + pull.flatten(),
65 + pull.onEnd(function (err) {
66 + t.equal(err, _err)
67 + process.nextTick(function() {
68 + t.equal(sosEnded, null, 'should abort stream of streams')
69 + t.end()
70 + })
71 + })
72 + )
73 +})
74 +
75 +test('abort flatten', function (t) {
76 + var sosEnded, s1Ended, s2Ended
77 + var read = pull(
78 + pull.values([
79 + pull.values([1,2], function(err) {s1Ended = err}),
80 + pull.values([3,4], function(err) {s2Ended = err}),
81 + ], function(err) {
82 + sosEnded = err;
83 + }),
84 + pull.flatten()
85 + )
86 +
87 + read(null, function(err, data) {
88 + t.notOk(err)
89 + t.equal(data,1)
90 + read(true, function(err, data) {
91 + t.equal(err, true)
92 + process.nextTick(function() {
93 + t.equal(sosEnded, null, 'should abort stream of streams')
94 + t.equal(s1Ended, null, 'should abort current nested stream')
95 + t.equal(s2Ended, undefined, 'should not abort queued nested stream')
96 + t.end()
97 + })
98 + })
99 + })
100 +})
101 +
102 +test('abort flatten before 1st read', function (t) {
103 + var sosEnded, s1Ended
104 + var read = pull(
105 + pull.values([
106 + pull.values([1,2], function(err) {s1Ended = err})
107 + ], function(err) {
108 + sosEnded = err;
109 + }),
110 + pull.flatten()
111 + )
112 +
113 + read(true, function(err, data) {
114 + t.equal(err, true)
115 + t.notOk(data)
116 + process.nextTick(function() {
117 + t.equal(sosEnded, null, 'should abort stream of streams')
118 + t.equal(s1Ended, undefined, 'should abort current nested stream')
119 + t.end()
120 + })
121 + })
122 +})
123 +
124 +test('flattern handles stream with normal objects', function (t) {
125 + pull(
126 + pull.values([
127 + [1,2,3], 4, [5,6,7], 8, 9 ,10
128 + ]),
129 + pull.flatten(),
130 + pull.collect(function (err, ary) {
131 + t.deepEqual(ary, [1,2,3,4,5,6,7,8,9,10])
132 + t.end()
133 + })
134 + )
135 +})
node_modules/pull-stream/test/map.jsView
@@ -1,0 +1,20 @@
1 +
2 +var tape = require('tape')
3 +
4 +var pull = require('../')
5 +
6 +
7 +tape('map throughs ends stream', function (t) {
8 + var err = new Error('unwholesome number')
9 + pull(
10 + pull.values([1,2,3,3.4,4]),
11 + pull.map(function (e) {
12 + if(e !== ~~e)
13 + throw err
14 + }),
15 + pull.drain(null, function (_err) {
16 + t.equal(_err, err)
17 + t.end()
18 + })
19 + )
20 +})
node_modules/pull-stream/test/pull.jsView
@@ -1,0 +1,113 @@
1 +var tape = require('tape')
2 +
3 +function curry (fun) {
4 + return function () {
5 + var args = [].slice.call(arguments)
6 + return function (read) {
7 + return fun.apply(null, [read].concat(args))
8 + }
9 + }
10 +}
11 +
12 +var pull = require('../')
13 +
14 +function values (array) {
15 + var i = 0
16 + return function (abort, cb) {
17 + if(abort) i = array.length, cb(abort)
18 + else if(i >= array.length) cb(true)
19 + else cb(null, array[i++])
20 + }
21 +}
22 +
23 +var map = curry(function (read, mapper) {
24 + return function (abort, cb) {
25 + read(abort, function (end, data) {
26 + if(end) cb(end)
27 + else cb(null, mapper(data))
28 + })
29 + }
30 + })
31 +
32 +var sum = curry(function (read, done) {
33 + var total = 0
34 + read(null, function next (end, data) {
35 + if(end) return done(end === true ? null : end, total)
36 + total += data
37 + read(null, next)
38 + })
39 + })
40 +
41 +var log = curry(function (read) {
42 + return function (abort, cb) {
43 + read(abort, function (end, data) {
44 + if(end) return cb(end)
45 + console.error(data)
46 + cb(null, data)
47 + })
48 + }
49 + })
50 +
51 +tape('wrap pull streams into stream', function (t) {
52 +
53 + pull(
54 + values([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]),
55 + map(function (e) { return e*e }),
56 + log(),
57 + sum(function (err, value) {
58 + console.log(value)
59 + t.equal(value, 385)
60 + t.end()
61 + })
62 + )
63 +
64 +})
65 +
66 +tape('turn pull(through,...) -> Through', function (t) {
67 +
68 + pull(
69 + values([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]),
70 + pull(
71 + map(function (e) { return e*e }),
72 + log()
73 + ),
74 + sum(function (err, value) {
75 + console.log(value)
76 + t.equal(value, 385)
77 + t.end()
78 + })
79 + )
80 +
81 +})
82 +
83 +// pull(
84 +// values ([1 2 3 4 5 6 7 8 9 10])
85 +// pull(
86 +// map({x y;: e*e })
87 +// log()
88 +// )
89 +// sum({
90 +// err value:
91 +// t.equal(value 385)
92 +// t.end()
93 +// })
94 +// )
95 +//
96 +
97 +tape("writable pull() should throw when called twice", function (t) {
98 + t.plan(2)
99 +
100 + var stream = pull(
101 + map(function (e) { return e*e }),
102 + sum(function (err, value) {
103 + console.log(value)
104 + t.equal(value, 385)
105 + })
106 + )
107 +
108 + stream(values([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]))
109 +
110 + t.throws(function () {
111 + stream(values([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]))
112 + }, TypeError)
113 +})
node_modules/pull-stream/test/take.jsView
@@ -1,0 +1,140 @@
1 +var pull = require('../')
2 +var test = require('tape')
3 +
4 +test('through - onEnd', function (t) {
5 + t.plan(2)
6 + var values = [1,2,3,4,5,6,7,8,9,10]
7 +
8 + //read values, and then just stop!
9 + //this is a subtle edge case for take!
10 +
11 +//I did have a thing that used this edge case,
12 +//but it broke take, actually. so removing it.
13 +//TODO: fix that thing - was a test for some level-db stream thing....
14 +
15 +// pull.Source(function () {
16 +// return function (end, cb) {
17 +// if(end) cb(end)
18 +// else if(values.length)
19 +// cb(null, values.shift())
20 +// else console.log('drop')
21 +// }
22 +// })()
23 +
24 + pull(
25 + pull.values(values),
26 + pull.take(10),
27 + pull.through(null, function (err) {
28 + console.log('end')
29 + t.ok(true)
30 + process.nextTick(function () {
31 + t.end()
32 + })
33 + }),
34 + pull.collect(function (err, ary) {
35 + console.log(ary)
36 + t.ok(true)
37 + })
38 + )
39 +})
40 +
41 +
42 +test('take - exclude last (default)', function (t) {
43 + pull(
44 + pull.values([1,2,3,4,5,6,7,8,9,10]),
45 + pull.take(function(n) {return n<5}),
46 + pull.collect(function (err, four) {
47 + t.deepEqual(four, [1,2,3,4])
48 + t.end()
49 + })
50 + )
51 +})
52 +test('take - include last', function (t) {
53 + pull(
54 + pull.values([1,2,3,4,5,6,7,8,9,10]),
55 + pull.take(function(n) {return n<5}, {last: true}),
56 + pull.collect(function (err, five) {
57 + t.deepEqual(five, [1,2,3,4,5])
58 + t.end()
59 + })
60 + )
61 +})
62 +
63 +test('take 5 causes 5 reads upstream', function (t) {
64 + var reads = 0
65 + pull(
66 + pull.values([1,2,3,4,5,6,7,8,9,10]),
67 + function (read) {
68 + return function (end, cb) {
69 + if (end !== true) reads++
70 + console.log(reads, end)
71 + read(end, cb)
72 + }
73 + },
74 + pull.take(5),
75 + pull.collect(function (err, five) {
76 + t.deepEqual(five, [1,2,3,4,5])
77 + process.nextTick(function() {
78 + t.equal(reads, 5)
79 + t.end()
80 + })
81 + })
82 + )
83 +})
84 +
85 +test("take doesn't abort until the last read", function (t) {
86 +
87 + var aborted = false
88 +
89 + var ary = [1,2,3,4,5], i = 0
90 +
91 + var read = pull(
92 + function (abort, cb) {
93 + if(abort) cb(aborted = true)
94 + else if(i > ary.length) cb(true)
95 + else cb(null, ary[i++])
96 + },
97 + pull.take(function (d) {
98 + return d < 3
99 + }, {last: true})
100 + )
101 +
102 + read(null, function (_, d) {
103 + t.notOk(aborted, "hasn't aborted yet")
104 + read(null, function (_, d) {
105 + t.notOk(aborted, "hasn't aborted yet")
106 + read(null, function (_, d) {
107 + t.notOk(aborted, "hasn't aborted yet")
108 + read(null, function (end, d) {
109 + t.ok(end, 'stream ended')
110 + t.equal(d, undefined, 'data undefined')
111 + t.ok(aborted, "has aborted by now")
112 + t.end()
113 + })
114 + })
115 + })
116 + })
117 +
118 +})
119 +
120 +test('take should throw error on last read', function (t) {
121 + var i = 0
122 + var error = new Error('error on last call')
123 +
124 + pull(
125 + pull.values([1,2,3,4,5,6,7,8,9,10]),
126 + pull.take(function(n) {return n<5}, {last: true}),
127 + // pull.take(5),
128 + pull.asyncMap(function (data, cb) {
129 + setTimeout(function () {
130 + if(++i < 5) cb(null, data)
131 + else cb(error)
132 + }, 100)
133 + }),
134 + pull.collect(function (err, five) {
135 + t.equal(err, error, 'should return err')
136 + t.deepEqual(five, [1,2,3,4], 'should skip failed item')
137 + t.end()
138 + })
139 + )
140 +})
node_modules/pull-stream/test/through.jsView
@@ -1,0 +1,21 @@
1 +
2 +
3 +var pull = require('../')
4 +require('tape')('through - onEnd', function (t) {
5 + t.plan(2)
6 + pull(
7 + pull.infinite(),
8 + pull.through(null, function (err) {
9 + console.log('end')
10 + t.ok(true)
11 + process.nextTick(function () {
12 + t.end()
13 + })
14 + }),
15 + pull.take(10),
16 + pull.collect(function (err, ary) {
17 + console.log(ary)
18 + t.ok(true)
19 + })
20 + )
21 +})
node_modules/pull-stream/test/unique.jsView
@@ -1,0 +1,31 @@
1 +
2 +var pull = require('../')
3 +var test = require('tape')
4 +
5 +test('unique', function (t) {
6 + var numbers = [1, 2, 2, 3, 4, 5, 6, 4, 0, 6, 7, 8, 3, 1, 2, 9, 0]
7 +
8 + pull(
9 + pull.values(numbers),
10 + pull.unique(),
11 + pull.collect(function (err, ary) {
12 + t.deepEqual(ary.sort(), [0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
13 + t.end()
14 + })
15 + )
16 +})
17 +
18 +test('non-unique', function (t) {
19 + var numbers = [1, 2, 2, 3, 4, 5, 6, 4, 0, 6, 7, 8, 3, 1, 2, 9, 0]
20 +
21 + pull(
22 + pull.values(numbers),
23 + pull.nonUnique(),
24 + pull.collect(function (err, ary) {
25 + t.deepEqual(ary.sort(), [0, 1, 2, 2, 3, 4, 6])
26 + t.end()
27 + })
28 + )
29 +
30 +
31 +})
node_modules/pull-stream/test/values.jsView
@@ -1,0 +1,47 @@
1 +
2 +
3 +var pull = require('../')
4 +var tape = require('tape')
5 +
6 +tape('values - array', function (t) {
7 + pull(
8 + pull.values([1,2,3]),
9 + pull.collect(function (err, ary) {
10 + t.notOk(err)
11 + t.deepEqual(ary, [1, 2, 3])
12 + t.end()
13 + })
14 + )
15 +})
16 +
17 +tape('values - object', function (t) {
18 + pull(
19 + pull.values({a:1,b:2,c:3}),
20 + pull.collect(function (err, ary) {
21 + t.notOk(err)
22 + t.deepEqual(ary, [1, 2, 3])
23 + t.end()
24 + })
25 + )
26 +
27 +})
28 +
29 +tape('values, abort', function (t) {
30 +
31 + t.plan(3)
32 +
33 + var err = new Error('intentional')
34 +
35 + var read = pull.values([1,2,3], function (err) {
36 + t.end()
37 + })
38 +
39 + read(null, function (_, one) {
40 + t.notOk(_)
41 + t.equal(one, 1)
42 + read(err, function (_err) {
43 + t.equal(_err, err)
44 + })
45 + })
46 +
47 +})
node_modules/pull-stream/throughs/async-map.jsView
@@ -1,0 +1,52 @@
1 +'use strict'
2 +
3 +function id (e) { return e }
4 +var prop = require('../util/prop')
5 +
6 +module.exports = function asyncMap (map) {
7 + if(!map) return id
8 + map = prop(map)
9 + var busy = false, abortCb, aborted
10 + return function (read) {
11 + return function next (abort, cb) {
12 + if(aborted) return cb(aborted)
13 + if(abort) {
14 + aborted = abort
15 + if(!busy) read(abort, function (err) {
16 + //incase the source has already ended normally,
17 + //we should pass our own error.
18 + cb(abort)
19 + })
20 + else read(abort, function (err) {
21 + //if we are still busy, wait for the mapper to complete.
22 + if(busy) abortCb = cb
23 + else cb(abort)
24 + })
25 + }
26 + else
27 + read(null, function (end, data) {
28 + if(end) cb(end)
29 + else if(aborted) cb(aborted)
30 + else {
31 + busy = true
32 + map(data, function (err, data) {
33 + busy = false
34 + if(aborted) {
35 + cb(aborted)
36 + abortCb && abortCb(aborted)
37 + }
38 + else if(err) next (err, cb)
39 + else cb(null, data)
40 + })
41 + }
42 + })
43 + }
44 + }
45 +}
46 +
47 +
48 +
49 +
50 +
51 +
52 +
node_modules/pull-stream/throughs/filter-not.jsView
@@ -1,0 +1,9 @@
1 +'use strict'
2 +
3 +var tester = require('../util/tester')
4 +var filter = require('./filter')
5 +
6 +module.exports = function filterNot (test) {
7 + test = tester(test)
8 + return filter(function (data) { return !test(data) })
9 +}
node_modules/pull-stream/throughs/filter.jsView
@@ -1,0 +1,24 @@
1 +'use strict'
2 +
3 +var tester = require('../util/tester')
4 +
5 +module.exports = function filter (test) {
6 + //regexp
7 + test = tester(test)
8 + return function (read) {
9 + return function next (end, cb) {
10 + var sync, loop = true
11 + while(loop) {
12 + loop = false
13 + sync = true
14 + read(end, function (end, data) {
15 + if(!end && !test(data))
16 + return sync ? loop = true : next(end, cb)
17 + cb(end, data)
18 + })
19 + sync = false
20 + }
21 + }
22 + }
23 +}
24 +
node_modules/pull-stream/throughs/flatten.jsView
@@ -1,0 +1,47 @@
1 +'use strict'
2 +
3 +var values = require('../sources/values')
4 +var once = require('../sources/once')
5 +
6 +//convert a stream of arrays or streams into just a stream.
7 +module.exports = function flatten () {
8 + return function (read) {
9 + var _read
10 + return function (abort, cb) {
11 + if (abort) { //abort the current stream, and then stream of streams.
12 + _read ? _read(abort, function(err) {
13 + read(err || abort, cb)
14 + }) : read(abort, cb)
15 + }
16 + else if(_read) nextChunk()
17 + else nextStream()
18 +
19 + function nextChunk () {
20 + _read(null, function (err, data) {
21 + if (err === true) nextStream()
22 + else if (err) {
23 + read(true, function(abortErr) {
24 + // TODO: what do we do with the abortErr?
25 + cb(err)
26 + })
27 + }
28 + else cb(null, data)
29 + })
30 + }
31 + function nextStream () {
32 + _read = null
33 + read(null, function (end, stream) {
34 + if(end)
35 + return cb(end)
36 + if(Array.isArray(stream) || stream && 'object' === typeof stream)
37 + stream = values(stream)
38 + else if('function' != typeof stream)
39 + stream = once(stream)
40 + _read = stream
41 + nextChunk()
42 + })
43 + }
44 + }
45 + }
46 +}
47 +
node_modules/pull-stream/throughs/index.jsView
@@ -1,0 +1,16 @@
1 +'use strict'
2 +
3 +module.exports = {
4 + map: require('./map'),
5 + asyncMap: require('./async-map'),
6 + filter: require('./filter'),
7 + filterNot: require('./filter-not'),
8 + through: require('./through'),
9 + take: require('./take'),
10 + unique: require('./unique'),
11 + nonUnique: require('./non-unique'),
12 + flatten: require('./flatten')
13 +}
14 +
15 +
16 +
node_modules/pull-stream/throughs/map.jsView
@@ -1,0 +1,23 @@
1 +'use strict'
2 +
3 +function id (e) { return e }
4 +var prop = require('../util/prop')
5 +
6 +module.exports = function map (mapper) {
7 + if(!mapper) return id
8 + mapper = prop(mapper)
9 + return function (read) {
10 + return function (abort, cb) {
11 + read(abort, function (end, data) {
12 + try {
13 + data = !end ? mapper(data) : null
14 + } catch (err) {
15 + return read(err, function () {
16 + return cb(err)
17 + })
18 + }
19 + cb(end, data)
20 + })
21 + }
22 + }
23 +}
node_modules/pull-stream/throughs/non-unique.jsView
@@ -1,0 +1,8 @@
1 +'use strict'
2 +
3 +var unique = require('./unique')
4 +
5 +//passes an item through when you see it for the second time.
6 +module.exports = function nonUnique (field) {
7 + return unique(field, true)
8 +}
node_modules/pull-stream/throughs/take.jsView
@@ -1,0 +1,41 @@
1 +'use strict'
2 +
3 +//read a number of items and then stop.
4 +module.exports = function take (test, opts) {
5 + opts = opts || {}
6 + var last = opts.last || false // whether the first item for which !test(item) should still pass
7 + var ended = false
8 + if('number' === typeof test) {
9 + last = true
10 + var n = test; test = function () {
11 + return --n
12 + }
13 + }
14 +
15 + return function (read) {
16 +
17 + function terminate (cb) {
18 + read(true, function (err) {
19 + last = false; cb(err || true)
20 + })
21 + }
22 +
23 + return function (end, cb) {
24 + if(ended && !end) last ? terminate(cb) : cb(ended)
25 + else if(ended = end) read(ended, cb)
26 + else
27 + read(null, function (end, data) {
28 + if(ended = ended || end) {
29 + //last ? terminate(cb) :
30 + cb(ended)
31 + }
32 + else if(!test(data)) {
33 + ended = true
34 + last ? cb(null, data) : terminate(cb)
35 + }
36 + else
37 + cb(null, data)
38 + })
39 + }
40 + }
41 +}
node_modules/pull-stream/throughs/through.jsView
@@ -1,0 +1,23 @@
1 +'use strict'
2 +
3 +//a pass through stream that doesn't change the value.
4 +module.exports = function through (op, onEnd) {
5 + var a = false
6 +
7 + function once (abort) {
8 + if(a || !onEnd) return
9 + a = true
10 + onEnd(abort === true ? null : abort)
11 + }
12 +
13 + return function (read) {
14 + return function (end, cb) {
15 + if(end) once(end)
16 + return read(end, function (end, data) {
17 + if(!end) op && op(data)
18 + else once(end)
19 + cb(end, data)
20 + })
21 + }
22 + }
23 +}
node_modules/pull-stream/throughs/unique.jsView
@@ -1,0 +1,18 @@
1 +'use strict'
2 +
3 +function id (e) { return e }
4 +var prop = require('../util/prop')
5 +var filter = require('./filter')
6 +
7 +//drop items you have already seen.
8 +module.exports = function unique (field, invert) {
9 + field = prop(field) || id
10 + var seen = {}
11 + return filter(function (data) {
12 + var key = field(data)
13 + if(seen[key]) return !!invert //false, by default
14 + else seen[key] = true
15 + return !invert //true by default
16 + })
17 +}
18 +
node_modules/pull-stream/util/abort-cb.jsView
@@ -1,0 +1,6 @@
1 +module.exports = function abortCb(cb, abort, onAbort) {
2 + cb(abort)
3 + onAbort && onAbort(abort === true ? null: abort)
4 + return
5 +}
6 +
node_modules/pull-stream/util/prop.jsView
@@ -1,0 +1,9 @@
1 +module.exports = function prop (key) {
2 + return key && (
3 + 'string' == typeof key
4 + ? function (data) { return data[key] }
5 + : 'object' === typeof key && 'function' === typeof key.exec //regexp
6 + ? function (data) { var v = key.exec(data); return v && v[0] }
7 + : key
8 + )
9 +}
node_modules/pull-stream/util/tester.jsView
@@ -1,0 +1,11 @@
1 +var prop = require('./prop')
2 +
3 +function id (e) { return e }
4 +
5 +module.exports = function tester (test) {
6 + return (
7 + 'object' === typeof test && 'function' === typeof test.test //regexp
8 + ? function (data) { return test.test(data) }
9 + : prop (test) || id
10 + )
11 +}
node_modules/pull-utf8-decoder/.travis.ymlView
@@ -1,0 +1,5 @@
1 +language: node_js
2 +node_js:
3 + - 0.6
4 + - 0.8
5 + - '0.10'
node_modules/pull-utf8-decoder/LICENSEView
@@ -1,0 +1,22 @@
1 +Copyright (c) 2013 Dominic Tarr
2 +
3 +Permission is hereby granted, free of charge,
4 +to any person obtaining a copy of this software and
5 +associated documentation files (the "Software"), to
6 +deal in the Software without restriction, including
7 +without limitation the rights to use, copy, modify,
8 +merge, publish, distribute, sublicense, and/or sell
9 +copies of the Software, and to permit persons to whom
10 +the Software is furnished to do so,
11 +subject to the following conditions:
12 +
13 +The above copyright notice and this permission notice
14 +shall be included in all copies or substantial portions of the Software.
15 +
16 +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17 +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
18 +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
19 +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
20 +ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
node_modules/pull-utf8-decoder/README.mdView
@@ -1,0 +1,15 @@
1 +# pull-decode
2 +
3 +Take a stream of buffers and output a stream of valid strings,
4 +
5 +If you have utf8 characters this is absolutely necessary!
6 +
7 +[![travis](https://travis-ci.org/dominictarr/pull-utf8-decoder.png?branch=master)
8 +](https://travis-ci.org/dominictarr/pull-utf8-decoder)
9 +
10 +[![testling](http://ci.testling.com/dominictarr/pull-utf8-decoder.png)
11 +](http://ci.testling.com/dominictarr/pull-utf8-decoder)
12 +
13 +## License
14 +
15 +MIT
node_modules/pull-utf8-decoder/index.jsView
@@ -1,0 +1,22 @@
1 +
2 +var Decode = require('string_decoder').StringDecoder
3 +
4 +module.exports = function (enc) {
5 + var decoder = new Decode(enc), ended
6 + return function (read) {
7 + return function (abort, cb) {
8 + if(ended) return cb(ended)
9 + read(abort, function (end, data) {
10 + ended = end
11 + if(true === end) {
12 + if(data = decoder.end()) cb(null, data)
13 + else cb(true)
14 + }
15 + else if(end && (true !== end))
16 + cb(end)
17 + else
18 + cb(null, decoder.write(data))
19 + })
20 + }
21 + }
22 +}
node_modules/pull-utf8-decoder/package.jsonView
@@ -1,0 +1,54 @@
1 +{
2 + "_args": [
3 + [
4 + "pull-utf8-decoder@1.0.2",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "pull-utf8-decoder@1.0.2",
9 + "_id": "pull-utf8-decoder@1.0.2",
10 + "_inBundle": false,
11 + "_integrity": "sha256-JHBdLm6x7Rsu08/eLVWYKq+YpGna4xnvqM44ucX2tpk=",
12 + "_location": "/pull-utf8-decoder",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "pull-utf8-decoder@1.0.2",
18 + "name": "pull-utf8-decoder",
19 + "escapedName": "pull-utf8-decoder",
20 + "rawSpec": "1.0.2",
21 + "saveSpec": null,
22 + "fetchSpec": "1.0.2"
23 + },
24 + "_requiredBy": [
25 + "/pull-file"
26 + ],
27 + "_resolved": "http://localhost:8989/blobs/get/&JHBdLm6x7Rsu08/eLVWYKq+YpGna4xnvqM44ucX2tpk=.sha256",
28 + "_spec": "1.0.2",
29 + "_where": "/home/cel/src/ssb-npm-registry",
30 + "author": {
31 + "name": "Dominic Tarr",
32 + "email": "dominic.tarr@gmail.com",
33 + "url": "http://dominictarr.com"
34 + },
35 + "bugs": {
36 + "url": "https://github.com/dominictarr/pull-utf8-decoder/issues"
37 + },
38 + "description": "pull-stream interface to node's string_decoder",
39 + "devDependencies": {
40 + "pull-stream": "~2.19.0",
41 + "tape": "~1.0.4"
42 + },
43 + "homepage": "https://github.com/dominictarr/pull-utf8-decoder",
44 + "license": "MIT",
45 + "name": "pull-utf8-decoder",
46 + "repository": {
47 + "type": "git",
48 + "url": "git://github.com/dominictarr/pull-utf8-decoder.git"
49 + },
50 + "scripts": {
51 + "test": "set -e; for t in test/*.js; do node $t; done"
52 + },
53 + "version": "1.0.2"
54 +}
node_modules/pull-utf8-decoder/test/index.jsView
@@ -1,0 +1,79 @@
1 +var pull = require('pull-stream')
2 +var fs = require('fs')
3 +var file = fs.readFileSync(__filename, 'utf-8').split(/(\n)/).map(function (e) { return new Buffer(e) })
4 +var decode = require('../')
5 +
6 +console.log(file)
7 +
8 +var test = require('tape')
9 +
10 +//handle old node and new node
11 +function A(buf) {
12 + return [].slice.call(buf)
13 +}
14 +
15 +test('lines', function (t) {
16 +
17 + pull(
18 + pull.values(file),
19 + decode('utf8'),
20 + pull.collect(function (err, ary) {
21 + if(err) throw err
22 + console.log(ary.join(''))
23 + t.equal(file.map(String).join(''), ary.join(''))
24 + t.end()
25 + })
26 + )
27 +
28 +})
29 +
30 +test('utf-8', function (t) {
31 + var expected = 'cents:¢\neuros:€'
32 +
33 + var coinage = [
34 + A(new Buffer('cents:')),
35 + [0xC2, 0xA2],
36 + A(new Buffer('\n')),
37 + A(new Buffer('euros:')),
38 + [0xE2, 0x82, 0xAC]
39 + ].reduce(function (a, b) {
40 + return a.concat(b)
41 + })
42 +
43 + function rSplit() {
44 + var s = coinage.slice()
45 + var a = []
46 + while(s.length) {
47 + var n = ~~(Math.random()*s.length) + 1
48 + a.push(s.splice(0, n))
49 + }
50 + return a.map(function (e) { return new Buffer(e) })
51 + }
52 +
53 + t.plan(100)
54 + var N = 100
55 +
56 + while(N--)
57 + pull(
58 + pull.values(rSplit()),
59 + decode(),
60 + pull.collect(function (err, ary) {
61 + t.equal(ary.join(''), expected)
62 + })
63 + )
64 +
65 + t.end()
66 +
67 +})
68 +
69 +
70 +
71 +
72 +
73 +
74 +
75 +
76 +
77 +
78 +
79 +
node_modules/readable-stream/CONTRIBUTING.mdView
@@ -1,0 +1,38 @@
1 +# Developer's Certificate of Origin 1.1
2 +
3 +By making a contribution to this project, I certify that:
4 +
5 +* (a) The contribution was created in whole or in part by me and I
6 + have the right to submit it under the open source license
7 + indicated in the file; or
8 +
9 +* (b) The contribution is based upon previous work that, to the best
10 + of my knowledge, is covered under an appropriate open source
11 + license and I have the right under that license to submit that
12 + work with modifications, whether created in whole or in part
13 + by me, under the same open source license (unless I am
14 + permitted to submit under a different license), as indicated
15 + in the file; or
16 +
17 +* (c) The contribution was provided directly to me by some other
18 + person who certified (a), (b) or (c) and I have not modified
19 + it.
20 +
21 +* (d) I understand and agree that this project and the contribution
22 + are public and that a record of the contribution (including all
23 + personal information I submit with it, including my sign-off) is
24 + maintained indefinitely and may be redistributed consistent with
25 + this project or the open source license(s) involved.
26 +
27 +## Moderation Policy
28 +
29 +The [Node.js Moderation Policy] applies to this WG.
30 +
31 +## Code of Conduct
32 +
33 +The [Node.js Code of Conduct][] applies to this WG.
34 +
35 +[Node.js Code of Conduct]:
36 +https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md
37 +[Node.js Moderation Policy]:
38 +https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md
node_modules/readable-stream/GOVERNANCE.mdView
@@ -1,0 +1,136 @@
1 +### Streams Working Group
2 +
3 +The Node.js Streams is jointly governed by a Working Group
4 +(WG)
5 +that is responsible for high-level guidance of the project.
6 +
7 +The WG has final authority over this project including:
8 +
9 +* Technical direction
10 +* Project governance and process (including this policy)
11 +* Contribution policy
12 +* GitHub repository hosting
13 +* Conduct guidelines
14 +* Maintaining the list of additional Collaborators
15 +
16 +For the current list of WG members, see the project
17 +[README.md](./README.md#current-project-team-members).
18 +
19 +### Collaborators
20 +
21 +The readable-stream GitHub repository is
22 +maintained by the WG and additional Collaborators who are added by the
23 +WG on an ongoing basis.
24 +
25 +Individuals making significant and valuable contributions are made
26 +Collaborators and given commit-access to the project. These
27 +individuals are identified by the WG and their addition as
28 +Collaborators is discussed during the WG meeting.
29 +
30 +_Note:_ If you make a significant contribution and are not considered
31 +for commit-access log an issue or contact a WG member directly and it
32 +will be brought up in the next WG meeting.
33 +
34 +Modifications of the contents of the readable-stream repository are
35 +made on
36 +a collaborative basis. Anybody with a GitHub account may propose a
37 +modification via pull request and it will be considered by the project
38 +Collaborators. All pull requests must be reviewed and accepted by a
39 +Collaborator with sufficient expertise who is able to take full
40 +responsibility for the change. In the case of pull requests proposed
41 +by an existing Collaborator, an additional Collaborator is required
42 +for sign-off. Consensus should be sought if additional Collaborators
43 +participate and there is disagreement around a particular
44 +modification. See _Consensus Seeking Process_ below for further detail
45 +on the consensus model used for governance.
46 +
47 +Collaborators may opt to elevate significant or controversial
48 +modifications, or modifications that have not found consensus to the
49 +WG for discussion by assigning the ***WG-agenda*** tag to a pull
50 +request or issue. The WG should serve as the final arbiter where
51 +required.
52 +
53 +For the current list of Collaborators, see the project
54 +[README.md](./README.md#members).
55 +
56 +### WG Membership
57 +
58 +WG seats are not time-limited. There is no fixed size of the WG.
59 +However, the expected target is between 6 and 12, to ensure adequate
60 +coverage of important areas of expertise, balanced with the ability to
61 +make decisions efficiently.
62 +
63 +There is no specific set of requirements or qualifications for WG
64 +membership beyond these rules.
65 +
66 +The WG may add additional members to the WG by unanimous consensus.
67 +
68 +A WG member may be removed from the WG by voluntary resignation, or by
69 +unanimous consensus of all other WG members.
70 +
71 +Changes to WG membership should be posted in the agenda, and may be
72 +suggested as any other agenda item (see "WG Meetings" below).
73 +
74 +If an addition or removal is proposed during a meeting, and the full
75 +WG is not in attendance to participate, then the addition or removal
76 +is added to the agenda for the subsequent meeting. This is to ensure
77 +that all members are given the opportunity to participate in all
78 +membership decisions. If a WG member is unable to attend a meeting
79 +where a planned membership decision is being made, then their consent
80 +is assumed.
81 +
82 +No more than 1/3 of the WG members may be affiliated with the same
83 +employer. If removal or resignation of a WG member, or a change of
84 +employment by a WG member, creates a situation where more than 1/3 of
85 +the WG membership shares an employer, then the situation must be
86 +immediately remedied by the resignation or removal of one or more WG
87 +members affiliated with the over-represented employer(s).
88 +
89 +### WG Meetings
90 +
91 +The WG meets occasionally on a Google Hangout On Air. A designated moderator
92 +approved by the WG runs the meeting. Each meeting should be
93 +published to YouTube.
94 +
95 +Items are added to the WG agenda that are considered contentious or
96 +are modifications of governance, contribution policy, WG membership,
97 +or release process.
98 +
99 +The intention of the agenda is not to approve or review all patches;
100 +that should happen continuously on GitHub and be handled by the larger
101 +group of Collaborators.
102 +
103 +Any community member or contributor can ask that something be added to
104 +the next meeting's agenda by logging a GitHub Issue. Any Collaborator,
105 +WG member or the moderator can add the item to the agenda by adding
106 +the ***WG-agenda*** tag to the issue.
107 +
108 +Prior to each WG meeting the moderator will share the Agenda with
109 +members of the WG. WG members can add any items they like to the
110 +agenda at the beginning of each meeting. The moderator and the WG
111 +cannot veto or remove items.
112 +
113 +The WG may invite persons or representatives from certain projects to
114 +participate in a non-voting capacity.
115 +
116 +The moderator is responsible for summarizing the discussion of each
117 +agenda item and sends it as a pull request after the meeting.
118 +
119 +### Consensus Seeking Process
120 +
121 +The WG follows a
122 +[Consensus
123 +Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making)
124 +decision-making model.
125 +
126 +When an agenda item has appeared to reach a consensus the moderator
127 +will ask "Does anyone object?" as a final call for dissent from the
128 +consensus.
129 +
130 +If an agenda item cannot reach a consensus a WG member can call for
131 +either a closing vote or a vote to table the issue to the next
132 +meeting. The call for a vote must be seconded by a majority of the WG
133 +or else the discussion will continue. Simple majority wins.
134 +
135 +Note that changes to WG membership require a majority consensus. See
136 +"WG Membership" above.
node_modules/readable-stream/LICENSEView
@@ -1,0 +1,47 @@
1 +Node.js is licensed for use as follows:
2 +
3 +"""
4 +Copyright Node.js contributors. All rights reserved.
5 +
6 +Permission is hereby granted, free of charge, to any person obtaining a copy
7 +of this software and associated documentation files (the "Software"), to
8 +deal in the Software without restriction, including without limitation the
9 +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 +sell copies of the Software, and to permit persons to whom the Software is
11 +furnished to do so, subject to the following conditions:
12 +
13 +The above copyright notice and this permission notice shall be included in
14 +all copies or substantial portions of the Software.
15 +
16 +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
22 +IN THE SOFTWARE.
23 +"""
24 +
25 +This license applies to parts of Node.js originating from the
26 +https://github.com/joyent/node repository:
27 +
28 +"""
29 +Copyright Joyent, Inc. and other Node contributors. All rights reserved.
30 +Permission is hereby granted, free of charge, to any person obtaining a copy
31 +of this software and associated documentation files (the "Software"), to
32 +deal in the Software without restriction, including without limitation the
33 +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
34 +sell copies of the Software, and to permit persons to whom the Software is
35 +furnished to do so, subject to the following conditions:
36 +
37 +The above copyright notice and this permission notice shall be included in
38 +all copies or substantial portions of the Software.
39 +
40 +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
41 +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
42 +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
43 +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
44 +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
45 +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
46 +IN THE SOFTWARE.
47 +"""
node_modules/readable-stream/README.mdView
@@ -1,0 +1,111 @@
1 +# readable-stream
2 +
3 +***Node.js core streams for userland*** [![Build Status](https://travis-ci.com/nodejs/readable-stream.svg?branch=master)](https://travis-ci.com/nodejs/readable-stream)
4 +
5 +
6 +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/)
7 +[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/)
8 +
9 +
10 +[![Sauce Test Status](https://saucelabs.com/browser-matrix/readabe-stream.svg)](https://saucelabs.com/u/readabe-stream)
11 +
12 +```bash
13 +npm install --save readable-stream
14 +```
15 +
16 +This package is a mirror of the streams implementations in Node.js.
17 +
18 +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v10.15.2/docs/api/stream.html).
19 +
20 +If you want to guarantee a stable streams base, regardless of what version of
21 +Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html).
22 +
23 +As of version 2.0.0 **readable-stream** uses semantic versioning.
24 +
25 +## Version 3.x.x
26 +
27 +v3.x.x of `readable-stream` supports Node 6, 8, and 10, as well as
28 +evergreen browsers, IE 11 and latest Safari. The breaking changes
29 +introduced by v3 are composed by the combined breaking changes in [Node v9](https://nodejs.org/en/blog/release/v9.0.0/)
30 +and [Node v10](https://nodejs.org/en/blog/release/v10.0.0/), as follows:
31 +
32 +1. Error codes: https://github.com/nodejs/node/pull/13310,
33 + https://github.com/nodejs/node/pull/13291,
34 + https://github.com/nodejs/node/pull/16589,
35 + https://github.com/nodejs/node/pull/15042,
36 + https://github.com/nodejs/node/pull/15665,
37 + https://github.com/nodejs/readable-stream/pull/344
38 +2. 'readable' have precedence over flowing
39 + https://github.com/nodejs/node/pull/18994
40 +3. make virtual methods errors consistent
41 + https://github.com/nodejs/node/pull/18813
42 +4. updated streams error handling
43 + https://github.com/nodejs/node/pull/18438
44 +5. writable.end should return this.
45 + https://github.com/nodejs/node/pull/18780
46 +6. readable continues to read when push('')
47 + https://github.com/nodejs/node/pull/18211
48 +7. add custom inspect to BufferList
49 + https://github.com/nodejs/node/pull/17907
50 +8. always defer 'readable' with nextTick
51 + https://github.com/nodejs/node/pull/17979
52 +
53 +## Version 2.x.x
54 +
55 +v2.x.x of `readable-stream` supports all Node.js version from 0.8, as well as
56 +evergreen browsers and IE 10 & 11.
57 +
58 +### Big Thanks
59 +
60 +Cross-browser Testing Platform and Open Source <3 Provided by [Sauce Labs][sauce]
61 +
62 +# Usage
63 +
64 +You can swap your `require('stream')` with `require('readable-stream')`
65 +without any changes, if you are just using one of the main classes and
66 +functions.
67 +
68 +```js
69 +const {
70 + Readable,
71 + Writable,
72 + Transform,
73 + Duplex,
74 + pipeline,
75 + finished
76 +} = require('readable-stream')
77 +````
78 +
79 +Note that `require('stream')` will return `Stream`, while
80 +`require('readable-stream')` will return `Readable`. We discourage using
81 +whatever is exported directly, but rather use one of the properties as
82 +shown in the example above.
83 +
84 +# Streams Working Group
85 +
86 +`readable-stream` is maintained by the Streams Working Group, which
87 +oversees the development and maintenance of the Streams API within
88 +Node.js. The responsibilities of the Streams Working Group include:
89 +
90 +* Addressing stream issues on the Node.js issue tracker.
91 +* Authoring and editing stream documentation within the Node.js project.
92 +* Reviewing changes to stream subclasses within the Node.js project.
93 +* Redirecting changes to streams from the Node.js project to this
94 + project.
95 +* Assisting in the implementation of stream providers within Node.js.
96 +* Recommending versions of `readable-stream` to be included in Node.js.
97 +* Messaging about the future of streams to give the community advance
98 + notice of changes.
99 +
100 +<a name="members"></a>
101 +## Team Members
102 +
103 +* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) &lt;calvin.metcalf@gmail.com&gt;
104 + - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242
105 +* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) &lt;mathiasbuus@gmail.com&gt;
106 +* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) &lt;matteo.collina@gmail.com&gt;
107 + - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E
108 +* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) &lt;shestak.irina@gmail.com&gt;
109 +* **Yoshua Wyuts** ([@yoshuawuyts](https://github.com/yoshuawuyts)) &lt;yoshuawuyts@gmail.com&gt;
110 +
111 +[sauce]: https://saucelabs.com
node_modules/readable-stream/errors-browser.jsView
@@ -1,0 +1,127 @@
1 +'use strict';
2 +
3 +function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
4 +
5 +var codes = {};
6 +
7 +function createErrorType(code, message, Base) {
8 + if (!Base) {
9 + Base = Error;
10 + }
11 +
12 + function getMessage(arg1, arg2, arg3) {
13 + if (typeof message === 'string') {
14 + return message;
15 + } else {
16 + return message(arg1, arg2, arg3);
17 + }
18 + }
19 +
20 + var NodeError =
21 + /*#__PURE__*/
22 + function (_Base) {
23 + _inheritsLoose(NodeError, _Base);
24 +
25 + function NodeError(arg1, arg2, arg3) {
26 + return _Base.call(this, getMessage(arg1, arg2, arg3)) || this;
27 + }
28 +
29 + return NodeError;
30 + }(Base);
31 +
32 + NodeError.prototype.name = Base.name;
33 + NodeError.prototype.code = code;
34 + codes[code] = NodeError;
35 +} // https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js
36 +
37 +
38 +function oneOf(expected, thing) {
39 + if (Array.isArray(expected)) {
40 + var len = expected.length;
41 + expected = expected.map(function (i) {
42 + return String(i);
43 + });
44 +
45 + if (len > 2) {
46 + return "one of ".concat(thing, " ").concat(expected.slice(0, len - 1).join(', '), ", or ") + expected[len - 1];
47 + } else if (len === 2) {
48 + return "one of ".concat(thing, " ").concat(expected[0], " or ").concat(expected[1]);
49 + } else {
50 + return "of ".concat(thing, " ").concat(expected[0]);
51 + }
52 + } else {
53 + return "of ".concat(thing, " ").concat(String(expected));
54 + }
55 +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith
56 +
57 +
58 +function startsWith(str, search, pos) {
59 + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search;
60 +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
61 +
62 +
63 +function endsWith(str, search, this_len) {
64 + if (this_len === undefined || this_len > str.length) {
65 + this_len = str.length;
66 + }
67 +
68 + return str.substring(this_len - search.length, this_len) === search;
69 +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes
70 +
71 +
72 +function includes(str, search, start) {
73 + if (typeof start !== 'number') {
74 + start = 0;
75 + }
76 +
77 + if (start + search.length > str.length) {
78 + return false;
79 + } else {
80 + return str.indexOf(search, start) !== -1;
81 + }
82 +}
83 +
84 +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) {
85 + return 'The value "' + value + '" is invalid for option "' + name + '"';
86 +}, TypeError);
87 +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) {
88 + // determiner: 'must be' or 'must not be'
89 + var determiner;
90 +
91 + if (typeof expected === 'string' && startsWith(expected, 'not ')) {
92 + determiner = 'must not be';
93 + expected = expected.replace(/^not /, '');
94 + } else {
95 + determiner = 'must be';
96 + }
97 +
98 + var msg;
99 +
100 + if (endsWith(name, ' argument')) {
101 + // For cases like 'first argument'
102 + msg = "The ".concat(name, " ").concat(determiner, " ").concat(oneOf(expected, 'type'));
103 + } else {
104 + var type = includes(name, '.') ? 'property' : 'argument';
105 + msg = "The \"".concat(name, "\" ").concat(type, " ").concat(determiner, " ").concat(oneOf(expected, 'type'));
106 + }
107 +
108 + msg += ". Received type ".concat(typeof actual);
109 + return msg;
110 +}, TypeError);
111 +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF');
112 +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) {
113 + return 'The ' + name + ' method is not implemented';
114 +});
115 +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close');
116 +createErrorType('ERR_STREAM_DESTROYED', function (name) {
117 + return 'Cannot call ' + name + ' after a stream was destroyed';
118 +});
119 +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times');
120 +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable');
121 +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end');
122 +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError);
123 +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) {
124 + return 'Unknown encoding: ' + arg;
125 +}, TypeError);
126 +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event');
127 +module.exports.codes = codes;
node_modules/readable-stream/errors.jsView
@@ -1,0 +1,116 @@
1 +'use strict';
2 +
3 +const codes = {};
4 +
5 +function createErrorType(code, message, Base) {
6 + if (!Base) {
7 + Base = Error
8 + }
9 +
10 + function getMessage (arg1, arg2, arg3) {
11 + if (typeof message === 'string') {
12 + return message
13 + } else {
14 + return message(arg1, arg2, arg3)
15 + }
16 + }
17 +
18 + class NodeError extends Base {
19 + constructor (arg1, arg2, arg3) {
20 + super(getMessage(arg1, arg2, arg3));
21 + }
22 + }
23 +
24 + NodeError.prototype.name = Base.name;
25 + NodeError.prototype.code = code;
26 +
27 + codes[code] = NodeError;
28 +}
29 +
30 +// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js
31 +function oneOf(expected, thing) {
32 + if (Array.isArray(expected)) {
33 + const len = expected.length;
34 + expected = expected.map((i) => String(i));
35 + if (len > 2) {
36 + return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` +
37 + expected[len - 1];
38 + } else if (len === 2) {
39 + return `one of ${thing} ${expected[0]} or ${expected[1]}`;
40 + } else {
41 + return `of ${thing} ${expected[0]}`;
42 + }
43 + } else {
44 + return `of ${thing} ${String(expected)}`;
45 + }
46 +}
47 +
48 +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith
49 +function startsWith(str, search, pos) {
50 + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search;
51 +}
52 +
53 +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
54 +function endsWith(str, search, this_len) {
55 + if (this_len === undefined || this_len > str.length) {
56 + this_len = str.length;
57 + }
58 + return str.substring(this_len - search.length, this_len) === search;
59 +}
60 +
61 +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes
62 +function includes(str, search, start) {
63 + if (typeof start !== 'number') {
64 + start = 0;
65 + }
66 +
67 + if (start + search.length > str.length) {
68 + return false;
69 + } else {
70 + return str.indexOf(search, start) !== -1;
71 + }
72 +}
73 +
74 +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) {
75 + return 'The value "' + value + '" is invalid for option "' + name + '"'
76 +}, TypeError);
77 +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) {
78 + // determiner: 'must be' or 'must not be'
79 + let determiner;
80 + if (typeof expected === 'string' && startsWith(expected, 'not ')) {
81 + determiner = 'must not be';
82 + expected = expected.replace(/^not /, '');
83 + } else {
84 + determiner = 'must be';
85 + }
86 +
87 + let msg;
88 + if (endsWith(name, ' argument')) {
89 + // For cases like 'first argument'
90 + msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`;
91 + } else {
92 + const type = includes(name, '.') ? 'property' : 'argument';
93 + msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`;
94 + }
95 +
96 + msg += `. Received type ${typeof actual}`;
97 + return msg;
98 +}, TypeError);
99 +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF');
100 +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) {
101 + return 'The ' + name + ' method is not implemented'
102 +});
103 +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close');
104 +createErrorType('ERR_STREAM_DESTROYED', function (name) {
105 + return 'Cannot call ' + name + ' after a stream was destroyed';
106 +});
107 +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times');
108 +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable');
109 +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end');
110 +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError);
111 +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) {
112 + return 'Unknown encoding: ' + arg
113 +}, TypeError);
114 +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event');
115 +
116 +module.exports.codes = codes;
node_modules/readable-stream/experimentalWarning.jsView
@@ -1,0 +1,17 @@
1 +'use strict'
2 +
3 +var experimentalWarnings = new Set();
4 +
5 +function emitExperimentalWarning(feature) {
6 + if (experimentalWarnings.has(feature)) return;
7 + var msg = feature + ' is an experimental feature. This feature could ' +
8 + 'change at any time';
9 + experimentalWarnings.add(feature);
10 + process.emitWarning(msg, 'ExperimentalWarning');
11 +}
12 +
13 +function noop() {}
14 +
15 +module.exports.emitExperimentalWarning = process.emitWarning
16 + ? emitExperimentalWarning
17 + : noop;
node_modules/readable-stream/lib/_stream_duplex.jsView
@@ -1,0 +1,139 @@
1 +// Copyright Joyent, Inc. and other Node contributors.
2 +//
3 +// Permission is hereby granted, free of charge, to any person obtaining a
4 +// copy of this software and associated documentation files (the
5 +// "Software"), to deal in the Software without restriction, including
6 +// without limitation the rights to use, copy, modify, merge, publish,
7 +// distribute, sublicense, and/or sell copies of the Software, and to permit
8 +// persons to whom the Software is furnished to do so, subject to the
9 +// following conditions:
10 +//
11 +// The above copyright notice and this permission notice shall be included
12 +// in all copies or substantial portions of the Software.
13 +//
14 +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
15 +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16 +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
17 +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
18 +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
19 +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
20 +// USE OR OTHER DEALINGS IN THE SOFTWARE.
21 +// a duplex stream is just a stream that is both readable and writable.
22 +// Since JS doesn't have multiple prototypal inheritance, this class
23 +// prototypally inherits from Readable, and then parasitically from
24 +// Writable.
25 +'use strict';
26 +/*<replacement>*/
27 +
28 +var objectKeys = Object.keys || function (obj) {
29 + var keys = [];
30 +
31 + for (var key in obj) {
32 + keys.push(key);
33 + }
34 +
35 + return keys;
36 +};
37 +/*</replacement>*/
38 +
39 +
40 +module.exports = Duplex;
41 +
42 +var Readable = require('./_stream_readable');
43 +
44 +var Writable = require('./_stream_writable');
45 +
46 +require('inherits')(Duplex, Readable);
47 +
48 +{
49 + // Allow the keys array to be GC'ed.
50 + var keys = objectKeys(Writable.prototype);
51 +
52 + for (var v = 0; v < keys.length; v++) {
53 + var method = keys[v];
54 + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];
55 + }
56 +}
57 +
58 +function Duplex(options) {
59 + if (!(this instanceof Duplex)) return new Duplex(options);
60 + Readable.call(this, options);
61 + Writable.call(this, options);
62 + this.allowHalfOpen = true;
63 +
64 + if (options) {
65 + if (options.readable === false) this.readable = false;
66 + if (options.writable === false) this.writable = false;
67 +
68 + if (options.allowHalfOpen === false) {
69 + this.allowHalfOpen = false;
70 + this.once('end', onend);
71 + }
72 + }
73 +}
74 +
75 +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', {
76 + // making it explicit this property is not enumerable
77 + // because otherwise some prototype manipulation in
78 + // userland will fail
79 + enumerable: false,
80 + get: function get() {
81 + return this._writableState.highWaterMark;
82 + }
83 +});
84 +Object.defineProperty(Duplex.prototype, 'writableBuffer', {
85 + // making it explicit this property is not enumerable
86 + // because otherwise some prototype manipulation in
87 + // userland will fail
88 + enumerable: false,
89 + get: function get() {
90 + return this._writableState && this._writableState.getBuffer();
91 + }
92 +});
93 +Object.defineProperty(Duplex.prototype, 'writableLength', {
94 + // making it explicit this property is not enumerable
95 + // because otherwise some prototype manipulation in
96 + // userland will fail
97 + enumerable: false,
98 + get: function get() {
99 + return this._writableState.length;
100 + }
101 +}); // the no-half-open enforcer
102 +
103 +function onend() {
104 + // If the writable side ended, then we're ok.
105 + if (this._writableState.ended) return; // no more data can be written.
106 + // But allow more writes to happen in this tick.
107 +
108 + process.nextTick(onEndNT, this);
109 +}
110 +
111 +function onEndNT(self) {
112 + self.end();
113 +}
114 +
115 +Object.defineProperty(Duplex.prototype, 'destroyed', {
116 + // making it explicit this property is not enumerable
117 + // because otherwise some prototype manipulation in
118 + // userland will fail
119 + enumerable: false,
120 + get: function get() {
121 + if (this._readableState === undefined || this._writableState === undefined) {
122 + return false;
123 + }
124 +
125 + return this._readableState.destroyed && this._writableState.destroyed;
126 + },
127 + set: function set(value) {
128 + // we ignore the value if the stream
129 + // has not been initialized yet
130 + if (this._readableState === undefined || this._writableState === undefined) {
131 + return;
132 + } // backward compatibility, the user is explicitly
133 + // managing destroyed
134 +
135 +
136 + this._readableState.destroyed = value;
137 + this._writableState.destroyed = value;
138 + }
139 +});
node_modules/readable-stream/lib/_stream_passthrough.jsView
@@ -1,0 +1,39 @@
1 +// Copyright Joyent, Inc. and other Node contributors.
2 +//
3 +// Permission is hereby granted, free of charge, to any person obtaining a
4 +// copy of this software and associated documentation files (the
5 +// "Software"), to deal in the Software without restriction, including
6 +// without limitation the rights to use, copy, modify, merge, publish,
7 +// distribute, sublicense, and/or sell copies of the Software, and to permit
8 +// persons to whom the Software is furnished to do so, subject to the
9 +// following conditions:
10 +//
11 +// The above copyright notice and this permission notice shall be included
12 +// in all copies or substantial portions of the Software.
13 +//
14 +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
15 +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16 +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
17 +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
18 +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
19 +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
20 +// USE OR OTHER DEALINGS IN THE SOFTWARE.
21 +// a passthrough stream.
22 +// basically just the most minimal sort of Transform stream.
23 +// Every written chunk gets output as-is.
24 +'use strict';
25 +
26 +module.exports = PassThrough;
27 +
28 +var Transform = require('./_stream_transform');
29 +
30 +require('inherits')(PassThrough, Transform);
31 +
32 +function PassThrough(options) {
33 + if (!(this instanceof PassThrough)) return new PassThrough(options);
34 + Transform.call(this, options);
35 +}
36 +
37 +PassThrough.prototype._transform = function (chunk, encoding, cb) {
38 + cb(null, chunk);
39 +};
node_modules/readable-stream/lib/_stream_readable.jsView
@@ -1,0 +1,1065 @@
1 +// Copyright Joyent, Inc. and other Node contributors.
2 +//
3 +// Permission is hereby granted, free of charge, to any person obtaining a
4 +// copy of this software and associated documentation files (the
5 +// "Software"), to deal in the Software without restriction, including
6 +// without limitation the rights to use, copy, modify, merge, publish,
7 +// distribute, sublicense, and/or sell copies of the Software, and to permit
8 +// persons to whom the Software is furnished to do so, subject to the
9 +// following conditions:
10 +//
11 +// The above copyright notice and this permission notice shall be included
12 +// in all copies or substantial portions of the Software.
13 +//
14 +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
15 +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16 +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
17 +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
18 +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
19 +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
20 +// USE OR OTHER DEALINGS IN THE SOFTWARE.
21 +'use strict';
22 +
23 +module.exports = Readable;
24 +/*<replacement>*/
25 +
26 +var Duplex;
27 +/*</replacement>*/
28 +
29 +Readable.ReadableState = ReadableState;
30 +/*<replacement>*/
31 +
32 +var EE = require('events').EventEmitter;
33 +
34 +var EElistenerCount = function EElistenerCount(emitter, type) {
35 + return emitter.listeners(type).length;
36 +};
37 +/*</replacement>*/
38 +
39 +/*<replacement>*/
40 +
41 +
42 +var Stream = require('./internal/streams/stream');
43 +/*</replacement>*/
44 +
45 +
46 +var Buffer = require('buffer').Buffer;
47 +
48 +var OurUint8Array = global.Uint8Array || function () {};
49 +
50 +function _uint8ArrayToBuffer(chunk) {
51 + return Buffer.from(chunk);
52 +}
53 +
54 +function _isUint8Array(obj) {
55 + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
56 +}
57 +/*<replacement>*/
58 +
59 +
60 +var debugUtil = require('util');
61 +
62 +var debug;
63 +
64 +if (debugUtil && debugUtil.debuglog) {
65 + debug = debugUtil.debuglog('stream');
66 +} else {
67 + debug = function debug() {};
68 +}
69 +/*</replacement>*/
70 +
71 +
72 +var BufferList = require('./internal/streams/buffer_list');
73 +
74 +var destroyImpl = require('./internal/streams/destroy');
75 +
76 +var _require = require('./internal/streams/state'),
77 + getHighWaterMark = _require.getHighWaterMark;
78 +
79 +var _require$codes = require('../errors').codes,
80 + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE,
81 + ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF,
82 + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,
83 + ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT;
84 +
85 +var _require2 = require('../experimentalWarning'),
86 + emitExperimentalWarning = _require2.emitExperimentalWarning; // Lazy loaded to improve the startup performance.
87 +
88 +
89 +var StringDecoder;
90 +var createReadableStreamAsyncIterator;
91 +
92 +require('inherits')(Readable, Stream);
93 +
94 +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume'];
95 +
96 +function prependListener(emitter, event, fn) {
97 + // Sadly this is not cacheable as some libraries bundle their own
98 + // event emitter implementation with them.
99 + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); // This is a hack to make sure that our error handler is attached before any
100 + // userland ones. NEVER DO THIS. This is here only because this code needs
101 + // to continue to work with older versions of Node.js that do not include
102 + // the prependListener() method. The goal is to eventually remove this hack.
103 +
104 + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]];
105 +}
106 +
107 +function ReadableState(options, stream, isDuplex) {
108 + Duplex = Duplex || require('./_stream_duplex');
109 + options = options || {}; // Duplex streams are both readable and writable, but share
110 + // the same options object.
111 + // However, some cases require setting options to different
112 + // values for the readable and the writable sides of the duplex stream.
113 + // These options can be provided separately as readableXXX and writableXXX.
114 +
115 + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag. Used to make read(n) ignore n and to
116 + // make all the buffer merging and length checks go away
117 +
118 + this.objectMode = !!options.objectMode;
119 + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; // the point at which it stops calling _read() to fill the buffer
120 + // Note: 0 is a valid value, means "don't call _read preemptively ever"
121 +
122 + this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); // A linked list is used to store data chunks instead of an array because the
123 + // linked list can remove elements from the beginning faster than
124 + // array.shift()
125 +
126 + this.buffer = new BufferList();
127 + this.length = 0;
128 + this.pipes = null;
129 + this.pipesCount = 0;
130 + this.flowing = null;
131 + this.ended = false;
132 + this.endEmitted = false;
133 + this.reading = false; // a flag to be able to tell if the event 'readable'/'data' is emitted
134 + // immediately, or on a later tick. We set this to true at first, because
135 + // any actions that shouldn't happen until "later" should generally also
136 + // not happen before the first read call.
137 +
138 + this.sync = true; // whenever we return null, then we set a flag to say
139 + // that we're awaiting a 'readable' event emission.
140 +
141 + this.needReadable = false;
142 + this.emittedReadable = false;
143 + this.readableListening = false;
144 + this.resumeScheduled = false;
145 + this.paused = true; // Should close be emitted on destroy. Defaults to true.
146 +
147 + this.emitClose = options.emitClose !== false; // has it been destroyed
148 +
149 + this.destroyed = false; // Crypto is kind of old and crusty. Historically, its default string
150 + // encoding is 'binary' so we have to make this configurable.
151 + // Everything else in the universe uses 'utf8', though.
152 +
153 + this.defaultEncoding = options.defaultEncoding || 'utf8'; // the number of writers that are awaiting a drain event in .pipe()s
154 +
155 + this.awaitDrain = 0; // if true, a maybeReadMore has been scheduled
156 +
157 + this.readingMore = false;
158 + this.decoder = null;
159 + this.encoding = null;
160 +
161 + if (options.encoding) {
162 + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;
163 + this.decoder = new StringDecoder(options.encoding);
164 + this.encoding = options.encoding;
165 + }
166 +}
167 +
168 +function Readable(options) {
169 + Duplex = Duplex || require('./_stream_duplex');
170 + if (!(this instanceof Readable)) return new Readable(options); // Checking for a Stream.Duplex instance is faster here instead of inside
171 + // the ReadableState constructor, at least with V8 6.5
172 +
173 + var isDuplex = this instanceof Duplex;
174 + this._readableState = new ReadableState(options, this, isDuplex); // legacy
175 +
176 + this.readable = true;
177 +
178 + if (options) {
179 + if (typeof options.read === 'function') this._read = options.read;
180 + if (typeof options.destroy === 'function') this._destroy = options.destroy;
181 + }
182 +
183 + Stream.call(this);
184 +}
185 +
186 +Object.defineProperty(Readable.prototype, 'destroyed', {
187 + // making it explicit this property is not enumerable
188 + // because otherwise some prototype manipulation in
189 + // userland will fail
190 + enumerable: false,
191 + get: function get() {
192 + if (this._readableState === undefined) {
193 + return false;
194 + }
195 +
196 + return this._readableState.destroyed;
197 + },
198 + set: function set(value) {
199 + // we ignore the value if the stream
200 + // has not been initialized yet
201 + if (!this._readableState) {
202 + return;
203 + } // backward compatibility, the user is explicitly
204 + // managing destroyed
205 +
206 +
207 + this._readableState.destroyed = value;
208 + }
209 +});
210 +Readable.prototype.destroy = destroyImpl.destroy;
211 +Readable.prototype._undestroy = destroyImpl.undestroy;
212 +
213 +Readable.prototype._destroy = function (err, cb) {
214 + cb(err);
215 +}; // Manually shove something into the read() buffer.
216 +// This returns true if the highWaterMark has not been hit yet,
217 +// similar to how Writable.write() returns true if you should
218 +// write() some more.
219 +
220 +
221 +Readable.prototype.push = function (chunk, encoding) {
222 + var state = this._readableState;
223 + var skipChunkCheck;
224 +
225 + if (!state.objectMode) {
226 + if (typeof chunk === 'string') {
227 + encoding = encoding || state.defaultEncoding;
228 +
229 + if (encoding !== state.encoding) {
230 + chunk = Buffer.from(chunk, encoding);
231 + encoding = '';
232 + }
233 +
234 + skipChunkCheck = true;
235 + }
236 + } else {
237 + skipChunkCheck = true;
238 + }
239 +
240 + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck);
241 +}; // Unshift should *always* be something directly out of read()
242 +
243 +
244 +Readable.prototype.unshift = function (chunk) {
245 + return readableAddChunk(this, chunk, null, true, false);
246 +};
247 +
248 +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) {
249 + debug('readableAddChunk', chunk);
250 + var state = stream._readableState;
251 +
252 + if (chunk === null) {
253 + state.reading = false;
254 + onEofChunk(stream, state);
255 + } else {
256 + var er;
257 + if (!skipChunkCheck) er = chunkInvalid(state, chunk);
258 +
259 + if (er) {
260 + stream.emit('error', er);
261 + } else if (state.objectMode || chunk && chunk.length > 0) {
262 + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) {
263 + chunk = _uint8ArrayToBuffer(chunk);
264 + }
265 +
266 + if (addToFront) {
267 + if (state.endEmitted) stream.emit('error', new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true);
268 + } else if (state.ended) {
269 + stream.emit('error', new ERR_STREAM_PUSH_AFTER_EOF());
270 + } else if (state.destroyed) {
271 + return false;
272 + } else {
273 + state.reading = false;
274 +
275 + if (state.decoder && !encoding) {
276 + chunk = state.decoder.write(chunk);
277 + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state);
278 + } else {
279 + addChunk(stream, state, chunk, false);
280 + }
281 + }
282 + } else if (!addToFront) {
283 + state.reading = false;
284 + maybeReadMore(stream, state);
285 + }
286 + } // We can push more data if we are below the highWaterMark.
287 + // Also, if we have no data yet, we can stand some more bytes.
288 + // This is to work around cases where hwm=0, such as the repl.
289 +
290 +
291 + return !state.ended && (state.length < state.highWaterMark || state.length === 0);
292 +}
293 +
294 +function addChunk(stream, state, chunk, addToFront) {
295 + if (state.flowing && state.length === 0 && !state.sync) {
296 + state.awaitDrain = 0;
297 + stream.emit('data', chunk);
298 + } else {
299 + // update the buffer info.
300 + state.length += state.objectMode ? 1 : chunk.length;
301 + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);
302 + if (state.needReadable) emitReadable(stream);
303 + }
304 +
305 + maybeReadMore(stream, state);
306 +}
307 +
308 +function chunkInvalid(state, chunk) {
309 + var er;
310 +
311 + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
312 + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk);
313 + }
314 +
315 + return er;
316 +}
317 +
318 +Readable.prototype.isPaused = function () {
319 + return this._readableState.flowing === false;
320 +}; // backwards compatibility.
321 +
322 +
323 +Readable.prototype.setEncoding = function (enc) {
324 + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;
325 + this._readableState.decoder = new StringDecoder(enc); // if setEncoding(null), decoder.encoding equals utf8
326 +
327 + this._readableState.encoding = this._readableState.decoder.encoding;
328 + return this;
329 +}; // Don't raise the hwm > 8MB
330 +
331 +
332 +var MAX_HWM = 0x800000;
333 +
334 +function computeNewHighWaterMark(n) {
335 + if (n >= MAX_HWM) {
336 + n = MAX_HWM;
337 + } else {
338 + // Get the next highest power of 2 to prevent increasing hwm excessively in
339 + // tiny amounts
340 + n--;
341 + n |= n >>> 1;
342 + n |= n >>> 2;
343 + n |= n >>> 4;
344 + n |= n >>> 8;
345 + n |= n >>> 16;
346 + n++;
347 + }
348 +
349 + return n;
350 +} // This function is designed to be inlinable, so please take care when making
351 +// changes to the function body.
352 +
353 +
354 +function howMuchToRead(n, state) {
355 + if (n <= 0 || state.length === 0 && state.ended) return 0;
356 + if (state.objectMode) return 1;
357 +
358 + if (n !== n) {
359 + // Only flow one buffer at a time
360 + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length;
361 + } // If we're asking for more than the current hwm, then raise the hwm.
362 +
363 +
364 + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);
365 + if (n <= state.length) return n; // Don't have enough
366 +
367 + if (!state.ended) {
368 + state.needReadable = true;
369 + return 0;
370 + }
371 +
372 + return state.length;
373 +} // you can override either this method, or the async _read(n) below.
374 +
375 +
376 +Readable.prototype.read = function (n) {
377 + debug('read', n);
378 + n = parseInt(n, 10);
379 + var state = this._readableState;
380 + var nOrig = n;
381 + if (n !== 0) state.emittedReadable = false; // if we're doing read(0) to trigger a readable event, but we
382 + // already have a bunch of data in the buffer, then just trigger
383 + // the 'readable' event and move on.
384 +
385 + if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) {
386 + debug('read: emitReadable', state.length, state.ended);
387 + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this);
388 + return null;
389 + }
390 +
391 + n = howMuchToRead(n, state); // if we've ended, and we're now clear, then finish it up.
392 +
393 + if (n === 0 && state.ended) {
394 + if (state.length === 0) endReadable(this);
395 + return null;
396 + } // All the actual chunk generation logic needs to be
397 + // *below* the call to _read. The reason is that in certain
398 + // synthetic stream cases, such as passthrough streams, _read
399 + // may be a completely synchronous operation which may change
400 + // the state of the read buffer, providing enough data when
401 + // before there was *not* enough.
402 + //
403 + // So, the steps are:
404 + // 1. Figure out what the state of things will be after we do
405 + // a read from the buffer.
406 + //
407 + // 2. If that resulting state will trigger a _read, then call _read.
408 + // Note that this may be asynchronous, or synchronous. Yes, it is
409 + // deeply ugly to write APIs this way, but that still doesn't mean
410 + // that the Readable class should behave improperly, as streams are
411 + // designed to be sync/async agnostic.
412 + // Take note if the _read call is sync or async (ie, if the read call
413 + // has returned yet), so that we know whether or not it's safe to emit
414 + // 'readable' etc.
415 + //
416 + // 3. Actually pull the requested chunks out of the buffer and return.
417 + // if we need a readable event, then we need to do some reading.
418 +
419 +
420 + var doRead = state.needReadable;
421 + debug('need readable', doRead); // if we currently have less than the highWaterMark, then also read some
422 +
423 + if (state.length === 0 || state.length - n < state.highWaterMark) {
424 + doRead = true;
425 + debug('length less than watermark', doRead);
426 + } // however, if we've ended, then there's no point, and if we're already
427 + // reading, then it's unnecessary.
428 +
429 +
430 + if (state.ended || state.reading) {
431 + doRead = false;
432 + debug('reading or ended', doRead);
433 + } else if (doRead) {
434 + debug('do read');
435 + state.reading = true;
436 + state.sync = true; // if the length is currently zero, then we *need* a readable event.
437 +
438 + if (state.length === 0) state.needReadable = true; // call internal read method
439 +
440 + this._read(state.highWaterMark);
441 +
442 + state.sync = false; // If _read pushed data synchronously, then `reading` will be false,
443 + // and we need to re-evaluate how much data we can return to the user.
444 +
445 + if (!state.reading) n = howMuchToRead(nOrig, state);
446 + }
447 +
448 + var ret;
449 + if (n > 0) ret = fromList(n, state);else ret = null;
450 +
451 + if (ret === null) {
452 + state.needReadable = true;
453 + n = 0;
454 + } else {
455 + state.length -= n;
456 + state.awaitDrain = 0;
457 + }
458 +
459 + if (state.length === 0) {
460 + // If we have nothing in the buffer, then we want to know
461 + // as soon as we *do* get something into the buffer.
462 + if (!state.ended) state.needReadable = true; // If we tried to read() past the EOF, then emit end on the next tick.
463 +
464 + if (nOrig !== n && state.ended) endReadable(this);
465 + }
466 +
467 + if (ret !== null) this.emit('data', ret);
468 + return ret;
469 +};
470 +
471 +function onEofChunk(stream, state) {
472 + if (state.ended) return;
473 +
474 + if (state.decoder) {
475 + var chunk = state.decoder.end();
476 +
477 + if (chunk && chunk.length) {
478 + state.buffer.push(chunk);
479 + state.length += state.objectMode ? 1 : chunk.length;
480 + }
481 + }
482 +
483 + state.ended = true;
484 +
485 + if (state.sync) {
486 + // if we are sync, wait until next tick to emit the data.
487 + // Otherwise we risk emitting data in the flow()
488 + // the readable code triggers during a read() call
489 + emitReadable(stream);
490 + } else {
491 + // emit 'readable' now to make sure it gets picked up.
492 + state.needReadable = false;
493 +
494 + if (!state.emittedReadable) {
495 + state.emittedReadable = true;
496 + emitReadable_(stream);
497 + }
498 + }
499 +} // Don't emit readable right away in sync mode, because this can trigger
500 +// another read() call => stack overflow. This way, it might trigger
501 +// a nextTick recursion warning, but that's not so bad.
502 +
503 +
504 +function emitReadable(stream) {
505 + var state = stream._readableState;
506 + state.needReadable = false;
507 +
508 + if (!state.emittedReadable) {
509 + debug('emitReadable', state.flowing);
510 + state.emittedReadable = true;
511 + process.nextTick(emitReadable_, stream);
512 + }
513 +}
514 +
515 +function emitReadable_(stream) {
516 + var state = stream._readableState;
517 + debug('emitReadable_', state.destroyed, state.length, state.ended);
518 +
519 + if (!state.destroyed && (state.length || state.ended)) {
520 + stream.emit('readable');
521 + } // The stream needs another readable event if
522 + // 1. It is not flowing, as the flow mechanism will take
523 + // care of it.
524 + // 2. It is not ended.
525 + // 3. It is below the highWaterMark, so we can schedule
526 + // another readable later.
527 +
528 +
529 + state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark;
530 + flow(stream);
531 +} // at this point, the user has presumably seen the 'readable' event,
532 +// and called read() to consume some data. that may have triggered
533 +// in turn another _read(n) call, in which case reading = true if
534 +// it's in progress.
535 +// However, if we're not ended, or reading, and the length < hwm,
536 +// then go ahead and try to read some more preemptively.
537 +
538 +
539 +function maybeReadMore(stream, state) {
540 + if (!state.readingMore) {
541 + state.readingMore = true;
542 + process.nextTick(maybeReadMore_, stream, state);
543 + }
544 +}
545 +
546 +function maybeReadMore_(stream, state) {
547 + var len = state.length;
548 +
549 + while (!state.reading && !state.ended && state.length < state.highWaterMark) {
550 + debug('maybeReadMore read 0');
551 + stream.read(0);
552 + if (len === state.length) // didn't get any data, stop spinning.
553 + break;else len = state.length;
554 + }
555 +
556 + state.readingMore = false;
557 +} // abstract method. to be overridden in specific implementation classes.
558 +// call cb(er, data) where data is <= n in length.
559 +// for virtual (non-string, non-buffer) streams, "length" is somewhat
560 +// arbitrary, and perhaps not very meaningful.
561 +
562 +
563 +Readable.prototype._read = function (n) {
564 + this.emit('error', new ERR_METHOD_NOT_IMPLEMENTED('_read()'));
565 +};
566 +
567 +Readable.prototype.pipe = function (dest, pipeOpts) {
568 + var src = this;
569 + var state = this._readableState;
570 +
571 + switch (state.pipesCount) {
572 + case 0:
573 + state.pipes = dest;
574 + break;
575 +
576 + case 1:
577 + state.pipes = [state.pipes, dest];
578 + break;
579 +
580 + default:
581 + state.pipes.push(dest);
582 + break;
583 + }
584 +
585 + state.pipesCount += 1;
586 + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);
587 + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr;
588 + var endFn = doEnd ? onend : unpipe;
589 + if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn);
590 + dest.on('unpipe', onunpipe);
591 +
592 + function onunpipe(readable, unpipeInfo) {
593 + debug('onunpipe');
594 +
595 + if (readable === src) {
596 + if (unpipeInfo && unpipeInfo.hasUnpiped === false) {
597 + unpipeInfo.hasUnpiped = true;
598 + cleanup();
599 + }
600 + }
601 + }
602 +
603 + function onend() {
604 + debug('onend');
605 + dest.end();
606 + } // when the dest drains, it reduces the awaitDrain counter
607 + // on the source. This would be more elegant with a .once()
608 + // handler in flow(), but adding and removing repeatedly is
609 + // too slow.
610 +
611 +
612 + var ondrain = pipeOnDrain(src);
613 + dest.on('drain', ondrain);
614 + var cleanedUp = false;
615 +
616 + function cleanup() {
617 + debug('cleanup'); // cleanup event handlers once the pipe is broken
618 +
619 + dest.removeListener('close', onclose);
620 + dest.removeListener('finish', onfinish);
621 + dest.removeListener('drain', ondrain);
622 + dest.removeListener('error', onerror);
623 + dest.removeListener('unpipe', onunpipe);
624 + src.removeListener('end', onend);
625 + src.removeListener('end', unpipe);
626 + src.removeListener('data', ondata);
627 + cleanedUp = true; // if the reader is waiting for a drain event from this
628 + // specific writer, then it would cause it to never start
629 + // flowing again.
630 + // So, if this is awaiting a drain, then we just call it now.
631 + // If we don't know, then assume that we are waiting for one.
632 +
633 + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();
634 + }
635 +
636 + src.on('data', ondata);
637 +
638 + function ondata(chunk) {
639 + debug('ondata');
640 + var ret = dest.write(chunk);
641 + debug('dest.write', ret);
642 +
643 + if (ret === false) {
644 + // If the user unpiped during `dest.write()`, it is possible
645 + // to get stuck in a permanently paused state if that write
646 + // also returned false.
647 + // => Check whether `dest` is still a piping destination.
648 + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) {
649 + debug('false write response, pause', state.awaitDrain);
650 + state.awaitDrain++;
651 + }
652 +
653 + src.pause();
654 + }
655 + } // if the dest has an error, then stop piping into it.
656 + // however, don't suppress the throwing behavior for this.
657 +
658 +
659 + function onerror(er) {
660 + debug('onerror', er);
661 + unpipe();
662 + dest.removeListener('error', onerror);
663 + if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er);
664 + } // Make sure our error handler is attached before userland ones.
665 +
666 +
667 + prependListener(dest, 'error', onerror); // Both close and finish should trigger unpipe, but only once.
668 +
669 + function onclose() {
670 + dest.removeListener('finish', onfinish);
671 + unpipe();
672 + }
673 +
674 + dest.once('close', onclose);
675 +
676 + function onfinish() {
677 + debug('onfinish');
678 + dest.removeListener('close', onclose);
679 + unpipe();
680 + }
681 +
682 + dest.once('finish', onfinish);
683 +
684 + function unpipe() {
685 + debug('unpipe');
686 + src.unpipe(dest);
687 + } // tell the dest that it's being piped to
688 +
689 +
690 + dest.emit('pipe', src); // start the flow if it hasn't been started already.
691 +
692 + if (!state.flowing) {
693 + debug('pipe resume');
694 + src.resume();
695 + }
696 +
697 + return dest;
698 +};
699 +
700 +function pipeOnDrain(src) {
701 + return function pipeOnDrainFunctionResult() {
702 + var state = src._readableState;
703 + debug('pipeOnDrain', state.awaitDrain);
704 + if (state.awaitDrain) state.awaitDrain--;
705 +
706 + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) {
707 + state.flowing = true;
708 + flow(src);
709 + }
710 + };
711 +}
712 +
713 +Readable.prototype.unpipe = function (dest) {
714 + var state = this._readableState;
715 + var unpipeInfo = {
716 + hasUnpiped: false
717 + }; // if we're not piping anywhere, then do nothing.
718 +
719 + if (state.pipesCount === 0) return this; // just one destination. most common case.
720 +
721 + if (state.pipesCount === 1) {
722 + // passed in one, but it's not the right one.
723 + if (dest && dest !== state.pipes) return this;
724 + if (!dest) dest = state.pipes; // got a match.
725 +
726 + state.pipes = null;
727 + state.pipesCount = 0;
728 + state.flowing = false;
729 + if (dest) dest.emit('unpipe', this, unpipeInfo);
730 + return this;
731 + } // slow case. multiple pipe destinations.
732 +
733 +
734 + if (!dest) {
735 + // remove all.
736 + var dests = state.pipes;
737 + var len = state.pipesCount;
738 + state.pipes = null;
739 + state.pipesCount = 0;
740 + state.flowing = false;
741 +
742 + for (var i = 0; i < len; i++) {
743 + dests[i].emit('unpipe', this, {
744 + hasUnpiped: false
745 + });
746 + }
747 +
748 + return this;
749 + } // try to find the right one.
750 +
751 +
752 + var index = indexOf(state.pipes, dest);
753 + if (index === -1) return this;
754 + state.pipes.splice(index, 1);
755 + state.pipesCount -= 1;
756 + if (state.pipesCount === 1) state.pipes = state.pipes[0];
757 + dest.emit('unpipe', this, unpipeInfo);
758 + return this;
759 +}; // set up data events if they are asked for
760 +// Ensure readable listeners eventually get something
761 +
762 +
763 +Readable.prototype.on = function (ev, fn) {
764 + var res = Stream.prototype.on.call(this, ev, fn);
765 + var state = this._readableState;
766 +
767 + if (ev === 'data') {
768 + // update readableListening so that resume() may be a no-op
769 + // a few lines down. This is needed to support once('readable').
770 + state.readableListening = this.listenerCount('readable') > 0; // Try start flowing on next tick if stream isn't explicitly paused
771 +
772 + if (state.flowing !== false) this.resume();
773 + } else if (ev === 'readable') {
774 + if (!state.endEmitted && !state.readableListening) {
775 + state.readableListening = state.needReadable = true;
776 + state.flowing = false;
777 + state.emittedReadable = false;
778 + debug('on readable', state.length, state.reading);
779 +
780 + if (state.length) {
781 + emitReadable(this);
782 + } else if (!state.reading) {
783 + process.nextTick(nReadingNextTick, this);
784 + }
785 + }
786 + }
787 +
788 + return res;
789 +};
790 +
791 +Readable.prototype.addListener = Readable.prototype.on;
792 +
793 +Readable.prototype.removeListener = function (ev, fn) {
794 + var res = Stream.prototype.removeListener.call(this, ev, fn);
795 +
796 + if (ev === 'readable') {
797 + // We need to check if there is someone still listening to
798 + // readable and reset the state. However this needs to happen
799 + // after readable has been emitted but before I/O (nextTick) to
800 + // support once('readable', fn) cycles. This means that calling
801 + // resume within the same tick will have no
802 + // effect.
803 + process.nextTick(updateReadableListening, this);
804 + }
805 +
806 + return res;
807 +};
808 +
809 +Readable.prototype.removeAllListeners = function (ev) {
810 + var res = Stream.prototype.removeAllListeners.apply(this, arguments);
811 +
812 + if (ev === 'readable' || ev === undefined) {
813 + // We need to check if there is someone still listening to
814 + // readable and reset the state. However this needs to happen
815 + // after readable has been emitted but before I/O (nextTick) to
816 + // support once('readable', fn) cycles. This means that calling
817 + // resume within the same tick will have no
818 + // effect.
819 + process.nextTick(updateReadableListening, this);
820 + }
821 +
822 + return res;
823 +};
824 +
825 +function updateReadableListening(self) {
826 + var state = self._readableState;
827 + state.readableListening = self.listenerCount('readable') > 0;
828 +
829 + if (state.resumeScheduled && !state.paused) {
830 + // flowing needs to be set to true now, otherwise
831 + // the upcoming resume will not flow.
832 + state.flowing = true; // crude way to check if we should resume
833 + } else if (self.listenerCount('data') > 0) {
834 + self.resume();
835 + }
836 +}
837 +
838 +function nReadingNextTick(self) {
839 + debug('readable nexttick read 0');
840 + self.read(0);
841 +} // pause() and resume() are remnants of the legacy readable stream API
842 +// If the user uses them, then switch into old mode.
843 +
844 +
845 +Readable.prototype.resume = function () {
846 + var state = this._readableState;
847 +
848 + if (!state.flowing) {
849 + debug('resume'); // we flow only if there is no one listening
850 + // for readable, but we still have to call
851 + // resume()
852 +
853 + state.flowing = !state.readableListening;
854 + resume(this, state);
855 + }
856 +
857 + state.paused = false;
858 + return this;
859 +};
860 +
861 +function resume(stream, state) {
862 + if (!state.resumeScheduled) {
863 + state.resumeScheduled = true;
864 + process.nextTick(resume_, stream, state);
865 + }
866 +}
867 +
868 +function resume_(stream, state) {
869 + debug('resume', state.reading);
870 +
871 + if (!state.reading) {
872 + stream.read(0);
873 + }
874 +
875 + state.resumeScheduled = false;
876 + stream.emit('resume');
877 + flow(stream);
878 + if (state.flowing && !state.reading) stream.read(0);
879 +}
880 +
881 +Readable.prototype.pause = function () {
882 + debug('call pause flowing=%j', this._readableState.flowing);
883 +
884 + if (this._readableState.flowing !== false) {
885 + debug('pause');
886 + this._readableState.flowing = false;
887 + this.emit('pause');
888 + }
889 +
890 + this._readableState.paused = true;
891 + return this;
892 +};
893 +
894 +function flow(stream) {
895 + var state = stream._readableState;
896 + debug('flow', state.flowing);
897 +
898 + while (state.flowing && stream.read() !== null) {
899 + ;
900 + }
901 +} // wrap an old-style stream as the async data source.
902 +// This is *not* part of the readable stream interface.
903 +// It is an ugly unfortunate mess of history.
904 +
905 +
906 +Readable.prototype.wrap = function (stream) {
907 + var _this = this;
908 +
909 + var state = this._readableState;
910 + var paused = false;
911 + stream.on('end', function () {
912 + debug('wrapped end');
913 +
914 + if (state.decoder && !state.ended) {
915 + var chunk = state.decoder.end();
916 + if (chunk && chunk.length) _this.push(chunk);
917 + }
918 +
919 + _this.push(null);
920 + });
921 + stream.on('data', function (chunk) {
922 + debug('wrapped data');
923 + if (state.decoder) chunk = state.decoder.write(chunk); // don't skip over falsy values in objectMode
924 +
925 + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;
926 +
927 + var ret = _this.push(chunk);
928 +
929 + if (!ret) {
930 + paused = true;
931 + stream.pause();
932 + }
933 + }); // proxy all the other methods.
934 + // important when wrapping filters and duplexes.
935 +
936 + for (var i in stream) {
937 + if (this[i] === undefined && typeof stream[i] === 'function') {
938 + this[i] = function methodWrap(method) {
939 + return function methodWrapReturnFunction() {
940 + return stream[method].apply(stream, arguments);
941 + };
942 + }(i);
943 + }
944 + } // proxy certain important events.
945 +
946 +
947 + for (var n = 0; n < kProxyEvents.length; n++) {
948 + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n]));
949 + } // when we try to consume some more bytes, simply unpause the
950 + // underlying stream.
951 +
952 +
953 + this._read = function (n) {
954 + debug('wrapped _read', n);
955 +
956 + if (paused) {
957 + paused = false;
958 + stream.resume();
959 + }
960 + };
961 +
962 + return this;
963 +};
964 +
965 +if (typeof Symbol === 'function') {
966 + Readable.prototype[Symbol.asyncIterator] = function () {
967 + emitExperimentalWarning('Readable[Symbol.asyncIterator]');
968 +
969 + if (createReadableStreamAsyncIterator === undefined) {
970 + createReadableStreamAsyncIterator = require('./internal/streams/async_iterator');
971 + }
972 +
973 + return createReadableStreamAsyncIterator(this);
974 + };
975 +}
976 +
977 +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', {
978 + // making it explicit this property is not enumerable
979 + // because otherwise some prototype manipulation in
980 + // userland will fail
981 + enumerable: false,
982 + get: function get() {
983 + return this._readableState.highWaterMark;
984 + }
985 +});
986 +Object.defineProperty(Readable.prototype, 'readableBuffer', {
987 + // making it explicit this property is not enumerable
988 + // because otherwise some prototype manipulation in
989 + // userland will fail
990 + enumerable: false,
991 + get: function get() {
992 + return this._readableState && this._readableState.buffer;
993 + }
994 +});
995 +Object.defineProperty(Readable.prototype, 'readableFlowing', {
996 + // making it explicit this property is not enumerable
997 + // because otherwise some prototype manipulation in
998 + // userland will fail
999 + enumerable: false,
1000 + get: function get() {
1001 + return this._readableState.flowing;
1002 + },
1003 + set: function set(state) {
1004 + if (this._readableState) {
1005 + this._readableState.flowing = state;
1006 + }
1007 + }
1008 +}); // exposed for testing purposes only.
1009 +
1010 +Readable._fromList = fromList;
1011 +Object.defineProperty(Readable.prototype, 'readableLength', {
1012 + // making it explicit this property is not enumerable
1013 + // because otherwise some prototype manipulation in
1014 + // userland will fail
1015 + enumerable: false,
1016 + get: function get() {
1017 + return this._readableState.length;
1018 + }
1019 +}); // Pluck off n bytes from an array of buffers.
1020 +// Length is the combined lengths of all the buffers in the list.
1021 +// This function is designed to be inlinable, so please take care when making
1022 +// changes to the function body.
1023 +
1024 +function fromList(n, state) {
1025 + // nothing buffered
1026 + if (state.length === 0) return null;
1027 + var ret;
1028 + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) {
1029 + // read it all, truncate the list
1030 + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length);
1031 + state.buffer.clear();
1032 + } else {
1033 + // read part of list
1034 + ret = state.buffer.consume(n, state.decoder);
1035 + }
1036 + return ret;
1037 +}
1038 +
1039 +function endReadable(stream) {
1040 + var state = stream._readableState;
1041 + debug('endReadable', state.endEmitted);
1042 +
1043 + if (!state.endEmitted) {
1044 + state.ended = true;
1045 + process.nextTick(endReadableNT, state, stream);
1046 + }
1047 +}
1048 +
1049 +function endReadableNT(state, stream) {
1050 + debug('endReadableNT', state.endEmitted, state.length); // Check that we didn't get one last unshift.
1051 +
1052 + if (!state.endEmitted && state.length === 0) {
1053 + state.endEmitted = true;
1054 + stream.readable = false;
1055 + stream.emit('end');
1056 + }
1057 +}
1058 +
1059 +function indexOf(xs, x) {
1060 + for (var i = 0, l = xs.length; i < l; i++) {
1061 + if (xs[i] === x) return i;
1062 + }
1063 +
1064 + return -1;
1065 +}
node_modules/readable-stream/lib/_stream_transform.jsView
@@ -1,0 +1,201 @@
1 +// Copyright Joyent, Inc. and other Node contributors.
2 +//
3 +// Permission is hereby granted, free of charge, to any person obtaining a
4 +// copy of this software and associated documentation files (the
5 +// "Software"), to deal in the Software without restriction, including
6 +// without limitation the rights to use, copy, modify, merge, publish,
7 +// distribute, sublicense, and/or sell copies of the Software, and to permit
8 +// persons to whom the Software is furnished to do so, subject to the
9 +// following conditions:
10 +//
11 +// The above copyright notice and this permission notice shall be included
12 +// in all copies or substantial portions of the Software.
13 +//
14 +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
15 +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16 +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
17 +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
18 +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
19 +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
20 +// USE OR OTHER DEALINGS IN THE SOFTWARE.
21 +// a transform stream is a readable/writable stream where you do
22 +// something with the data. Sometimes it's called a "filter",
23 +// but that's not a great name for it, since that implies a thing where
24 +// some bits pass through, and others are simply ignored. (That would
25 +// be a valid example of a transform, of course.)
26 +//
27 +// While the output is causally related to the input, it's not a
28 +// necessarily symmetric or synchronous transformation. For example,
29 +// a zlib stream might take multiple plain-text writes(), and then
30 +// emit a single compressed chunk some time in the future.
31 +//
32 +// Here's how this works:
33 +//
34 +// The Transform stream has all the aspects of the readable and writable
35 +// stream classes. When you write(chunk), that calls _write(chunk,cb)
36 +// internally, and returns false if there's a lot of pending writes
37 +// buffered up. When you call read(), that calls _read(n) until
38 +// there's enough pending readable data buffered up.
39 +//
40 +// In a transform stream, the written data is placed in a buffer. When
41 +// _read(n) is called, it transforms the queued up data, calling the
42 +// buffered _write cb's as it consumes chunks. If consuming a single
43 +// written chunk would result in multiple output chunks, then the first
44 +// outputted bit calls the readcb, and subsequent chunks just go into
45 +// the read buffer, and will cause it to emit 'readable' if necessary.
46 +//
47 +// This way, back-pressure is actually determined by the reading side,
48 +// since _read has to be called to start processing a new chunk. However,
49 +// a pathological inflate type of transform can cause excessive buffering
50 +// here. For example, imagine a stream where every byte of input is
51 +// interpreted as an integer from 0-255, and then results in that many
52 +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
53 +// 1kb of data being output. In this case, you could write a very small
54 +// amount of input, and end up with a very large amount of output. In
55 +// such a pathological inflating mechanism, there'd be no way to tell
56 +// the system to stop doing the transform. A single 4MB write could
57 +// cause the system to run out of memory.
58 +//
59 +// However, even in such a pathological case, only a single written chunk
60 +// would be consumed, and then the rest would wait (un-transformed) until
61 +// the results of the previous transformed chunk were consumed.
62 +'use strict';
63 +
64 +module.exports = Transform;
65 +
66 +var _require$codes = require('../errors').codes,
67 + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,
68 + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK,
69 + ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING,
70 + ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0;
71 +
72 +var Duplex = require('./_stream_duplex');
73 +
74 +require('inherits')(Transform, Duplex);
75 +
76 +function afterTransform(er, data) {
77 + var ts = this._transformState;
78 + ts.transforming = false;
79 + var cb = ts.writecb;
80 +
81 + if (cb === null) {
82 + return this.emit('error', new ERR_MULTIPLE_CALLBACK());
83 + }
84 +
85 + ts.writechunk = null;
86 + ts.writecb = null;
87 + if (data != null) // single equals check for both `null` and `undefined`
88 + this.push(data);
89 + cb(er);
90 + var rs = this._readableState;
91 + rs.reading = false;
92 +
93 + if (rs.needReadable || rs.length < rs.highWaterMark) {
94 + this._read(rs.highWaterMark);
95 + }
96 +}
97 +
98 +function Transform(options) {
99 + if (!(this instanceof Transform)) return new Transform(options);
100 + Duplex.call(this, options);
101 + this._transformState = {
102 + afterTransform: afterTransform.bind(this),
103 + needTransform: false,
104 + transforming: false,
105 + writecb: null,
106 + writechunk: null,
107 + writeencoding: null
108 + }; // start out asking for a readable event once data is transformed.
109 +
110 + this._readableState.needReadable = true; // we have implemented the _read method, and done the other things
111 + // that Readable wants before the first _read call, so unset the
112 + // sync guard flag.
113 +
114 + this._readableState.sync = false;
115 +
116 + if (options) {
117 + if (typeof options.transform === 'function') this._transform = options.transform;
118 + if (typeof options.flush === 'function') this._flush = options.flush;
119 + } // When the writable side finishes, then flush out anything remaining.
120 +
121 +
122 + this.on('prefinish', prefinish);
123 +}
124 +
125 +function prefinish() {
126 + var _this = this;
127 +
128 + if (typeof this._flush === 'function' && !this._readableState.destroyed) {
129 + this._flush(function (er, data) {
130 + done(_this, er, data);
131 + });
132 + } else {
133 + done(this, null, null);
134 + }
135 +}
136 +
137 +Transform.prototype.push = function (chunk, encoding) {
138 + this._transformState.needTransform = false;
139 + return Duplex.prototype.push.call(this, chunk, encoding);
140 +}; // This is the part where you do stuff!
141 +// override this function in implementation classes.
142 +// 'chunk' is an input chunk.
143 +//
144 +// Call `push(newChunk)` to pass along transformed output
145 +// to the readable side. You may call 'push' zero or more times.
146 +//
147 +// Call `cb(err)` when you are done with this chunk. If you pass
148 +// an error, then that'll put the hurt on the whole operation. If you
149 +// never call cb(), then you'll never get another chunk.
150 +
151 +
152 +Transform.prototype._transform = function (chunk, encoding, cb) {
153 + cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()'));
154 +};
155 +
156 +Transform.prototype._write = function (chunk, encoding, cb) {
157 + var ts = this._transformState;
158 + ts.writecb = cb;
159 + ts.writechunk = chunk;
160 + ts.writeencoding = encoding;
161 +
162 + if (!ts.transforming) {
163 + var rs = this._readableState;
164 + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);
165 + }
166 +}; // Doesn't matter what the args are here.
167 +// _transform does all the work.
168 +// That we got here means that the readable side wants more data.
169 +
170 +
171 +Transform.prototype._read = function (n) {
172 + var ts = this._transformState;
173 +
174 + if (ts.writechunk !== null && !ts.transforming) {
175 + ts.transforming = true;
176 +
177 + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
178 + } else {
179 + // mark that we need a transform, so that any data that comes in
180 + // will get processed, now that we've asked for it.
181 + ts.needTransform = true;
182 + }
183 +};
184 +
185 +Transform.prototype._destroy = function (err, cb) {
186 + Duplex.prototype._destroy.call(this, err, function (err2) {
187 + cb(err2);
188 + });
189 +};
190 +
191 +function done(stream, er, data) {
192 + if (er) return stream.emit('error', er);
193 + if (data != null) // single equals check for both `null` and `undefined`
194 + stream.push(data); // TODO(BridgeAR): Write a test for these two error cases
195 + // if there's nothing in the write buffer, then that means
196 + // that nothing more will ever be provided
197 +
198 + if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0();
199 + if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING();
200 + return stream.push(null);
201 +}
node_modules/readable-stream/lib/_stream_writable.jsView
@@ -1,0 +1,683 @@
1 +// Copyright Joyent, Inc. and other Node contributors.
2 +//
3 +// Permission is hereby granted, free of charge, to any person obtaining a
4 +// copy of this software and associated documentation files (the
5 +// "Software"), to deal in the Software without restriction, including
6 +// without limitation the rights to use, copy, modify, merge, publish,
7 +// distribute, sublicense, and/or sell copies of the Software, and to permit
8 +// persons to whom the Software is furnished to do so, subject to the
9 +// following conditions:
10 +//
11 +// The above copyright notice and this permission notice shall be included
12 +// in all copies or substantial portions of the Software.
13 +//
14 +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
15 +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16 +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
17 +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
18 +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
19 +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
20 +// USE OR OTHER DEALINGS IN THE SOFTWARE.
21 +// A bit simpler than readable streams.
22 +// Implement an async ._write(chunk, encoding, cb), and it'll handle all
23 +// the drain event emission and buffering.
24 +'use strict';
25 +
26 +module.exports = Writable;
27 +/* <replacement> */
28 +
29 +function WriteReq(chunk, encoding, cb) {
30 + this.chunk = chunk;
31 + this.encoding = encoding;
32 + this.callback = cb;
33 + this.next = null;
34 +} // It seems a linked list but it is not
35 +// there will be only 2 of these for each stream
36 +
37 +
38 +function CorkedRequest(state) {
39 + var _this = this;
40 +
41 + this.next = null;
42 + this.entry = null;
43 +
44 + this.finish = function () {
45 + onCorkedFinish(_this, state);
46 + };
47 +}
48 +/* </replacement> */
49 +
50 +/*<replacement>*/
51 +
52 +
53 +var Duplex;
54 +/*</replacement>*/
55 +
56 +Writable.WritableState = WritableState;
57 +/*<replacement>*/
58 +
59 +var internalUtil = {
60 + deprecate: require('util-deprecate')
61 +};
62 +/*</replacement>*/
63 +
64 +/*<replacement>*/
65 +
66 +var Stream = require('./internal/streams/stream');
67 +/*</replacement>*/
68 +
69 +
70 +var Buffer = require('buffer').Buffer;
71 +
72 +var OurUint8Array = global.Uint8Array || function () {};
73 +
74 +function _uint8ArrayToBuffer(chunk) {
75 + return Buffer.from(chunk);
76 +}
77 +
78 +function _isUint8Array(obj) {
79 + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
80 +}
81 +
82 +var destroyImpl = require('./internal/streams/destroy');
83 +
84 +var _require = require('./internal/streams/state'),
85 + getHighWaterMark = _require.getHighWaterMark;
86 +
87 +var _require$codes = require('../errors').codes,
88 + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE,
89 + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,
90 + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK,
91 + ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE,
92 + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED,
93 + ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES,
94 + ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END,
95 + ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING;
96 +
97 +require('inherits')(Writable, Stream);
98 +
99 +function nop() {}
100 +
101 +function WritableState(options, stream, isDuplex) {
102 + Duplex = Duplex || require('./_stream_duplex');
103 + options = options || {}; // Duplex streams are both readable and writable, but share
104 + // the same options object.
105 + // However, some cases require setting options to different
106 + // values for the readable and the writable sides of the duplex stream,
107 + // e.g. options.readableObjectMode vs. options.writableObjectMode, etc.
108 +
109 + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag to indicate whether or not this stream
110 + // contains buffers or objects.
111 +
112 + this.objectMode = !!options.objectMode;
113 + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; // the point at which write() starts returning false
114 + // Note: 0 is a valid value, means that we always return false if
115 + // the entire buffer is not flushed immediately on write()
116 +
117 + this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); // if _final has been called
118 +
119 + this.finalCalled = false; // drain event flag.
120 +
121 + this.needDrain = false; // at the start of calling end()
122 +
123 + this.ending = false; // when end() has been called, and returned
124 +
125 + this.ended = false; // when 'finish' is emitted
126 +
127 + this.finished = false; // has it been destroyed
128 +
129 + this.destroyed = false; // should we decode strings into buffers before passing to _write?
130 + // this is here so that some node-core streams can optimize string
131 + // handling at a lower level.
132 +
133 + var noDecode = options.decodeStrings === false;
134 + this.decodeStrings = !noDecode; // Crypto is kind of old and crusty. Historically, its default string
135 + // encoding is 'binary' so we have to make this configurable.
136 + // Everything else in the universe uses 'utf8', though.
137 +
138 + this.defaultEncoding = options.defaultEncoding || 'utf8'; // not an actual buffer we keep track of, but a measurement
139 + // of how much we're waiting to get pushed to some underlying
140 + // socket or file.
141 +
142 + this.length = 0; // a flag to see when we're in the middle of a write.
143 +
144 + this.writing = false; // when true all writes will be buffered until .uncork() call
145 +
146 + this.corked = 0; // a flag to be able to tell if the onwrite cb is called immediately,
147 + // or on a later tick. We set this to true at first, because any
148 + // actions that shouldn't happen until "later" should generally also
149 + // not happen before the first write call.
150 +
151 + this.sync = true; // a flag to know if we're processing previously buffered items, which
152 + // may call the _write() callback in the same tick, so that we don't
153 + // end up in an overlapped onwrite situation.
154 +
155 + this.bufferProcessing = false; // the callback that's passed to _write(chunk,cb)
156 +
157 + this.onwrite = function (er) {
158 + onwrite(stream, er);
159 + }; // the callback that the user supplies to write(chunk,encoding,cb)
160 +
161 +
162 + this.writecb = null; // the amount that is being written when _write is called.
163 +
164 + this.writelen = 0;
165 + this.bufferedRequest = null;
166 + this.lastBufferedRequest = null; // number of pending user-supplied write callbacks
167 + // this must be 0 before 'finish' can be emitted
168 +
169 + this.pendingcb = 0; // emit prefinish if the only thing we're waiting for is _write cbs
170 + // This is relevant for synchronous Transform streams
171 +
172 + this.prefinished = false; // True if the error was already emitted and should not be thrown again
173 +
174 + this.errorEmitted = false; // Should close be emitted on destroy. Defaults to true.
175 +
176 + this.emitClose = options.emitClose !== false; // count buffered requests
177 +
178 + this.bufferedRequestCount = 0; // allocate the first CorkedRequest, there is always
179 + // one allocated and free to use, and we maintain at most two
180 +
181 + this.corkedRequestsFree = new CorkedRequest(this);
182 +}
183 +
184 +WritableState.prototype.getBuffer = function getBuffer() {
185 + var current = this.bufferedRequest;
186 + var out = [];
187 +
188 + while (current) {
189 + out.push(current);
190 + current = current.next;
191 + }
192 +
193 + return out;
194 +};
195 +
196 +(function () {
197 + try {
198 + Object.defineProperty(WritableState.prototype, 'buffer', {
199 + get: internalUtil.deprecate(function writableStateBufferGetter() {
200 + return this.getBuffer();
201 + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003')
202 + });
203 + } catch (_) {}
204 +})(); // Test _writableState for inheritance to account for Duplex streams,
205 +// whose prototype chain only points to Readable.
206 +
207 +
208 +var realHasInstance;
209 +
210 +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {
211 + realHasInstance = Function.prototype[Symbol.hasInstance];
212 + Object.defineProperty(Writable, Symbol.hasInstance, {
213 + value: function value(object) {
214 + if (realHasInstance.call(this, object)) return true;
215 + if (this !== Writable) return false;
216 + return object && object._writableState instanceof WritableState;
217 + }
218 + });
219 +} else {
220 + realHasInstance = function realHasInstance(object) {
221 + return object instanceof this;
222 + };
223 +}
224 +
225 +function Writable(options) {
226 + Duplex = Duplex || require('./_stream_duplex'); // Writable ctor is applied to Duplexes, too.
227 + // `realHasInstance` is necessary because using plain `instanceof`
228 + // would return false, as no `_writableState` property is attached.
229 + // Trying to use the custom `instanceof` for Writable here will also break the
230 + // Node.js LazyTransform implementation, which has a non-trivial getter for
231 + // `_writableState` that would lead to infinite recursion.
232 + // Checking for a Stream.Duplex instance is faster here instead of inside
233 + // the WritableState constructor, at least with V8 6.5
234 +
235 + var isDuplex = this instanceof Duplex;
236 + if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options);
237 + this._writableState = new WritableState(options, this, isDuplex); // legacy.
238 +
239 + this.writable = true;
240 +
241 + if (options) {
242 + if (typeof options.write === 'function') this._write = options.write;
243 + if (typeof options.writev === 'function') this._writev = options.writev;
244 + if (typeof options.destroy === 'function') this._destroy = options.destroy;
245 + if (typeof options.final === 'function') this._final = options.final;
246 + }
247 +
248 + Stream.call(this);
249 +} // Otherwise people can pipe Writable streams, which is just wrong.
250 +
251 +
252 +Writable.prototype.pipe = function () {
253 + this.emit('error', new ERR_STREAM_CANNOT_PIPE());
254 +};
255 +
256 +function writeAfterEnd(stream, cb) {
257 + var er = new ERR_STREAM_WRITE_AFTER_END(); // TODO: defer error events consistently everywhere, not just the cb
258 +
259 + stream.emit('error', er);
260 + process.nextTick(cb, er);
261 +} // Checks that a user-supplied chunk is valid, especially for the particular
262 +// mode the stream is in. Currently this means that `null` is never accepted
263 +// and undefined/non-string values are only allowed in object mode.
264 +
265 +
266 +function validChunk(stream, state, chunk, cb) {
267 + var er;
268 +
269 + if (chunk === null) {
270 + er = new ERR_STREAM_NULL_VALUES();
271 + } else if (typeof chunk !== 'string' && !state.objectMode) {
272 + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk);
273 + }
274 +
275 + if (er) {
276 + stream.emit('error', er);
277 + process.nextTick(cb, er);
278 + return false;
279 + }
280 +
281 + return true;
282 +}
283 +
284 +Writable.prototype.write = function (chunk, encoding, cb) {
285 + var state = this._writableState;
286 + var ret = false;
287 +
288 + var isBuf = !state.objectMode && _isUint8Array(chunk);
289 +
290 + if (isBuf && !Buffer.isBuffer(chunk)) {
291 + chunk = _uint8ArrayToBuffer(chunk);
292 + }
293 +
294 + if (typeof encoding === 'function') {
295 + cb = encoding;
296 + encoding = null;
297 + }
298 +
299 + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;
300 + if (typeof cb !== 'function') cb = nop;
301 + if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) {
302 + state.pendingcb++;
303 + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb);
304 + }
305 + return ret;
306 +};
307 +
308 +Writable.prototype.cork = function () {
309 + this._writableState.corked++;
310 +};
311 +
312 +Writable.prototype.uncork = function () {
313 + var state = this._writableState;
314 +
315 + if (state.corked) {
316 + state.corked--;
317 + if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);
318 + }
319 +};
320 +
321 +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
322 + // node::ParseEncoding() requires lower case.
323 + if (typeof encoding === 'string') encoding = encoding.toLowerCase();
324 + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding);
325 + this._writableState.defaultEncoding = encoding;
326 + return this;
327 +};
328 +
329 +Object.defineProperty(Writable.prototype, 'writableBuffer', {
330 + // making it explicit this property is not enumerable
331 + // because otherwise some prototype manipulation in
332 + // userland will fail
333 + enumerable: false,
334 + get: function get() {
335 + return this._writableState && this._writableState.getBuffer();
336 + }
337 +});
338 +
339 +function decodeChunk(state, chunk, encoding) {
340 + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {
341 + chunk = Buffer.from(chunk, encoding);
342 + }
343 +
344 + return chunk;
345 +}
346 +
347 +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', {
348 + // making it explicit this property is not enumerable
349 + // because otherwise some prototype manipulation in
350 + // userland will fail
351 + enumerable: false,
352 + get: function get() {
353 + return this._writableState.highWaterMark;
354 + }
355 +}); // if we're already writing something, then just put this
356 +// in the queue, and wait our turn. Otherwise, call _write
357 +// If we return false, then we need a drain event, so set that flag.
358 +
359 +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) {
360 + if (!isBuf) {
361 + var newChunk = decodeChunk(state, chunk, encoding);
362 +
363 + if (chunk !== newChunk) {
364 + isBuf = true;
365 + encoding = 'buffer';
366 + chunk = newChunk;
367 + }
368 + }
369 +
370 + var len = state.objectMode ? 1 : chunk.length;
371 + state.length += len;
372 + var ret = state.length < state.highWaterMark; // we must ensure that previous needDrain will not be reset to false.
373 +
374 + if (!ret) state.needDrain = true;
375 +
376 + if (state.writing || state.corked) {
377 + var last = state.lastBufferedRequest;
378 + state.lastBufferedRequest = {
379 + chunk: chunk,
380 + encoding: encoding,
381 + isBuf: isBuf,
382 + callback: cb,
383 + next: null
384 + };
385 +
386 + if (last) {
387 + last.next = state.lastBufferedRequest;
388 + } else {
389 + state.bufferedRequest = state.lastBufferedRequest;
390 + }
391 +
392 + state.bufferedRequestCount += 1;
393 + } else {
394 + doWrite(stream, state, false, len, chunk, encoding, cb);
395 + }
396 +
397 + return ret;
398 +}
399 +
400 +function doWrite(stream, state, writev, len, chunk, encoding, cb) {
401 + state.writelen = len;
402 + state.writecb = cb;
403 + state.writing = true;
404 + state.sync = true;
405 + if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);
406 + state.sync = false;
407 +}
408 +
409 +function onwriteError(stream, state, sync, er, cb) {
410 + --state.pendingcb;
411 +
412 + if (sync) {
413 + // defer the callback if we are being called synchronously
414 + // to avoid piling up things on the stack
415 + process.nextTick(cb, er); // this can emit finish, and it will always happen
416 + // after error
417 +
418 + process.nextTick(finishMaybe, stream, state);
419 + stream._writableState.errorEmitted = true;
420 + stream.emit('error', er);
421 + } else {
422 + // the caller expect this to happen before if
423 + // it is async
424 + cb(er);
425 + stream._writableState.errorEmitted = true;
426 + stream.emit('error', er); // this can emit finish, but finish must
427 + // always follow error
428 +
429 + finishMaybe(stream, state);
430 + }
431 +}
432 +
433 +function onwriteStateUpdate(state) {
434 + state.writing = false;
435 + state.writecb = null;
436 + state.length -= state.writelen;
437 + state.writelen = 0;
438 +}
439 +
440 +function onwrite(stream, er) {
441 + var state = stream._writableState;
442 + var sync = state.sync;
443 + var cb = state.writecb;
444 + if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK();
445 + onwriteStateUpdate(state);
446 + if (er) onwriteError(stream, state, sync, er, cb);else {
447 + // Check if we're actually ready to finish, but don't emit yet
448 + var finished = needFinish(state) || stream.destroyed;
449 +
450 + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {
451 + clearBuffer(stream, state);
452 + }
453 +
454 + if (sync) {
455 + process.nextTick(afterWrite, stream, state, finished, cb);
456 + } else {
457 + afterWrite(stream, state, finished, cb);
458 + }
459 + }
460 +}
461 +
462 +function afterWrite(stream, state, finished, cb) {
463 + if (!finished) onwriteDrain(stream, state);
464 + state.pendingcb--;
465 + cb();
466 + finishMaybe(stream, state);
467 +} // Must force callback to be called on nextTick, so that we don't
468 +// emit 'drain' before the write() consumer gets the 'false' return
469 +// value, and has a chance to attach a 'drain' listener.
470 +
471 +
472 +function onwriteDrain(stream, state) {
473 + if (state.length === 0 && state.needDrain) {
474 + state.needDrain = false;
475 + stream.emit('drain');
476 + }
477 +} // if there's something in the buffer waiting, then process it
478 +
479 +
480 +function clearBuffer(stream, state) {
481 + state.bufferProcessing = true;
482 + var entry = state.bufferedRequest;
483 +
484 + if (stream._writev && entry && entry.next) {
485 + // Fast case, write everything using _writev()
486 + var l = state.bufferedRequestCount;
487 + var buffer = new Array(l);
488 + var holder = state.corkedRequestsFree;
489 + holder.entry = entry;
490 + var count = 0;
491 + var allBuffers = true;
492 +
493 + while (entry) {
494 + buffer[count] = entry;
495 + if (!entry.isBuf) allBuffers = false;
496 + entry = entry.next;
497 + count += 1;
498 + }
499 +
500 + buffer.allBuffers = allBuffers;
501 + doWrite(stream, state, true, state.length, buffer, '', holder.finish); // doWrite is almost always async, defer these to save a bit of time
502 + // as the hot path ends with doWrite
503 +
504 + state.pendingcb++;
505 + state.lastBufferedRequest = null;
506 +
507 + if (holder.next) {
508 + state.corkedRequestsFree = holder.next;
509 + holder.next = null;
510 + } else {
511 + state.corkedRequestsFree = new CorkedRequest(state);
512 + }
513 +
514 + state.bufferedRequestCount = 0;
515 + } else {
516 + // Slow case, write chunks one-by-one
517 + while (entry) {
518 + var chunk = entry.chunk;
519 + var encoding = entry.encoding;
520 + var cb = entry.callback;
521 + var len = state.objectMode ? 1 : chunk.length;
522 + doWrite(stream, state, false, len, chunk, encoding, cb);
523 + entry = entry.next;
524 + state.bufferedRequestCount--; // if we didn't call the onwrite immediately, then
525 + // it means that we need to wait until it does.
526 + // also, that means that the chunk and cb are currently
527 + // being processed, so move the buffer counter past them.
528 +
529 + if (state.writing) {
530 + break;
531 + }
532 + }
533 +
534 + if (entry === null) state.lastBufferedRequest = null;
535 + }
536 +
537 + state.bufferedRequest = entry;
538 + state.bufferProcessing = false;
539 +}
540 +
541 +Writable.prototype._write = function (chunk, encoding, cb) {
542 + cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()'));
543 +};
544 +
545 +Writable.prototype._writev = null;
546 +
547 +Writable.prototype.end = function (chunk, encoding, cb) {
548 + var state = this._writableState;
549 +
550 + if (typeof chunk === 'function') {
551 + cb = chunk;
552 + chunk = null;
553 + encoding = null;
554 + } else if (typeof encoding === 'function') {
555 + cb = encoding;
556 + encoding = null;
557 + }
558 +
559 + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); // .end() fully uncorks
560 +
561 + if (state.corked) {
562 + state.corked = 1;
563 + this.uncork();
564 + } // ignore unnecessary end() calls.
565 +
566 +
567 + if (!state.ending) endWritable(this, state, cb);
568 + return this;
569 +};
570 +
571 +Object.defineProperty(Writable.prototype, 'writableLength', {
572 + // making it explicit this property is not enumerable
573 + // because otherwise some prototype manipulation in
574 + // userland will fail
575 + enumerable: false,
576 + get: function get() {
577 + return this._writableState.length;
578 + }
579 +});
580 +
581 +function needFinish(state) {
582 + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;
583 +}
584 +
585 +function callFinal(stream, state) {
586 + stream._final(function (err) {
587 + state.pendingcb--;
588 +
589 + if (err) {
590 + stream.emit('error', err);
591 + }
592 +
593 + state.prefinished = true;
594 + stream.emit('prefinish');
595 + finishMaybe(stream, state);
596 + });
597 +}
598 +
599 +function prefinish(stream, state) {
600 + if (!state.prefinished && !state.finalCalled) {
601 + if (typeof stream._final === 'function' && !state.destroyed) {
602 + state.pendingcb++;
603 + state.finalCalled = true;
604 + process.nextTick(callFinal, stream, state);
605 + } else {
606 + state.prefinished = true;
607 + stream.emit('prefinish');
608 + }
609 + }
610 +}
611 +
612 +function finishMaybe(stream, state) {
613 + var need = needFinish(state);
614 +
615 + if (need) {
616 + prefinish(stream, state);
617 +
618 + if (state.pendingcb === 0) {
619 + state.finished = true;
620 + stream.emit('finish');
621 + }
622 + }
623 +
624 + return need;
625 +}
626 +
627 +function endWritable(stream, state, cb) {
628 + state.ending = true;
629 + finishMaybe(stream, state);
630 +
631 + if (cb) {
632 + if (state.finished) process.nextTick(cb);else stream.once('finish', cb);
633 + }
634 +
635 + state.ended = true;
636 + stream.writable = false;
637 +}
638 +
639 +function onCorkedFinish(corkReq, state, err) {
640 + var entry = corkReq.entry;
641 + corkReq.entry = null;
642 +
643 + while (entry) {
644 + var cb = entry.callback;
645 + state.pendingcb--;
646 + cb(err);
647 + entry = entry.next;
648 + } // reuse the free corkReq.
649 +
650 +
651 + state.corkedRequestsFree.next = corkReq;
652 +}
653 +
654 +Object.defineProperty(Writable.prototype, 'destroyed', {
655 + // making it explicit this property is not enumerable
656 + // because otherwise some prototype manipulation in
657 + // userland will fail
658 + enumerable: false,
659 + get: function get() {
660 + if (this._writableState === undefined) {
661 + return false;
662 + }
663 +
664 + return this._writableState.destroyed;
665 + },
666 + set: function set(value) {
667 + // we ignore the value if the stream
668 + // has not been initialized yet
669 + if (!this._writableState) {
670 + return;
671 + } // backward compatibility, the user is explicitly
672 + // managing destroyed
673 +
674 +
675 + this._writableState.destroyed = value;
676 + }
677 +});
678 +Writable.prototype.destroy = destroyImpl.destroy;
679 +Writable.prototype._undestroy = destroyImpl.undestroy;
680 +
681 +Writable.prototype._destroy = function (err, cb) {
682 + cb(err);
683 +};
node_modules/readable-stream/lib/internal/streams/async_iterator.jsView
@@ -1,0 +1,204 @@
1 +'use strict';
2 +
3 +var _Object$setPrototypeO;
4 +
5 +function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
6 +
7 +var finished = require('./end-of-stream');
8 +
9 +var kLastResolve = Symbol('lastResolve');
10 +var kLastReject = Symbol('lastReject');
11 +var kError = Symbol('error');
12 +var kEnded = Symbol('ended');
13 +var kLastPromise = Symbol('lastPromise');
14 +var kHandlePromise = Symbol('handlePromise');
15 +var kStream = Symbol('stream');
16 +
17 +function createIterResult(value, done) {
18 + return {
19 + value: value,
20 + done: done
21 + };
22 +}
23 +
24 +function readAndResolve(iter) {
25 + var resolve = iter[kLastResolve];
26 +
27 + if (resolve !== null) {
28 + var data = iter[kStream].read(); // we defer if data is null
29 + // we can be expecting either 'end' or
30 + // 'error'
31 +
32 + if (data !== null) {
33 + iter[kLastPromise] = null;
34 + iter[kLastResolve] = null;
35 + iter[kLastReject] = null;
36 + resolve(createIterResult(data, false));
37 + }
38 + }
39 +}
40 +
41 +function onReadable(iter) {
42 + // we wait for the next tick, because it might
43 + // emit an error with process.nextTick
44 + process.nextTick(readAndResolve, iter);
45 +}
46 +
47 +function wrapForNext(lastPromise, iter) {
48 + return function (resolve, reject) {
49 + lastPromise.then(function () {
50 + iter[kHandlePromise](resolve, reject);
51 + }, reject);
52 + };
53 +}
54 +
55 +var AsyncIteratorPrototype = Object.getPrototypeOf(function () {});
56 +var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = {
57 + get stream() {
58 + return this[kStream];
59 + },
60 +
61 + next: function next() {
62 + var _this = this;
63 +
64 + // if we have detected an error in the meanwhile
65 + // reject straight away
66 + var error = this[kError];
67 +
68 + if (error !== null) {
69 + return Promise.reject(error);
70 + }
71 +
72 + if (this[kEnded]) {
73 + return Promise.resolve(createIterResult(null, true));
74 + }
75 +
76 + if (this[kStream].destroyed) {
77 + // We need to defer via nextTick because if .destroy(err) is
78 + // called, the error will be emitted via nextTick, and
79 + // we cannot guarantee that there is no error lingering around
80 + // waiting to be emitted.
81 + return new Promise(function (resolve, reject) {
82 + process.nextTick(function () {
83 + if (_this[kError]) {
84 + reject(_this[kError]);
85 + } else {
86 + resolve(createIterResult(null, true));
87 + }
88 + });
89 + });
90 + } // if we have multiple next() calls
91 + // we will wait for the previous Promise to finish
92 + // this logic is optimized to support for await loops,
93 + // where next() is only called once at a time
94 +
95 +
96 + var lastPromise = this[kLastPromise];
97 + var promise;
98 +
99 + if (lastPromise) {
100 + promise = new Promise(wrapForNext(lastPromise, this));
101 + } else {
102 + // fast path needed to support multiple this.push()
103 + // without triggering the next() queue
104 + var data = this[kStream].read();
105 +
106 + if (data !== null) {
107 + return Promise.resolve(createIterResult(data, false));
108 + }
109 +
110 + promise = new Promise(this[kHandlePromise]);
111 + }
112 +
113 + this[kLastPromise] = promise;
114 + return promise;
115 + }
116 +}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () {
117 + return this;
118 +}), _defineProperty(_Object$setPrototypeO, "return", function _return() {
119 + var _this2 = this;
120 +
121 + // destroy(err, cb) is a private API
122 + // we can guarantee we have that here, because we control the
123 + // Readable class this is attached to
124 + return new Promise(function (resolve, reject) {
125 + _this2[kStream].destroy(null, function (err) {
126 + if (err) {
127 + reject(err);
128 + return;
129 + }
130 +
131 + resolve(createIterResult(null, true));
132 + });
133 + });
134 +}), _Object$setPrototypeO), AsyncIteratorPrototype);
135 +
136 +var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) {
137 + var _Object$create;
138 +
139 + var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, {
140 + value: stream,
141 + writable: true
142 + }), _defineProperty(_Object$create, kLastResolve, {
143 + value: null,
144 + writable: true
145 + }), _defineProperty(_Object$create, kLastReject, {
146 + value: null,
147 + writable: true
148 + }), _defineProperty(_Object$create, kError, {
149 + value: null,
150 + writable: true
151 + }), _defineProperty(_Object$create, kEnded, {
152 + value: stream._readableState.endEmitted,
153 + writable: true
154 + }), _defineProperty(_Object$create, kLastPromise, {
155 + value: null,
156 + writable: true
157 + }), _defineProperty(_Object$create, kHandlePromise, {
158 + value: function value(resolve, reject) {
159 + var data = iterator[kStream].read();
160 +
161 + if (data) {
162 + iterator[kLastPromise] = null;
163 + iterator[kLastResolve] = null;
164 + iterator[kLastReject] = null;
165 + resolve(createIterResult(data, false));
166 + } else {
167 + iterator[kLastResolve] = resolve;
168 + iterator[kLastReject] = reject;
169 + }
170 + },
171 + writable: true
172 + }), _Object$create));
173 + finished(stream, function (err) {
174 + if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') {
175 + var reject = iterator[kLastReject]; // reject if we are waiting for data in the Promise
176 + // returned by next() and store the error
177 +
178 + if (reject !== null) {
179 + iterator[kLastPromise] = null;
180 + iterator[kLastResolve] = null;
181 + iterator[kLastReject] = null;
182 + reject(err);
183 + }
184 +
185 + iterator[kError] = err;
186 + return;
187 + }
188 +
189 + var resolve = iterator[kLastResolve];
190 +
191 + if (resolve !== null) {
192 + iterator[kLastPromise] = null;
193 + iterator[kLastResolve] = null;
194 + iterator[kLastReject] = null;
195 + resolve(createIterResult(null, true));
196 + }
197 +
198 + iterator[kEnded] = true;
199 + });
200 + stream.on('readable', onReadable.bind(null, iterator));
201 + return iterator;
202 +};
203 +
204 +module.exports = createReadableStreamAsyncIterator;
node_modules/readable-stream/lib/internal/streams/buffer_list.jsView
@@ -1,0 +1,189 @@
1 +'use strict';
2 +
3 +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; var ownKeys = Object.keys(source); if (typeof Object.getOwnPropertySymbols === 'function') { ownKeys = ownKeys.concat(Object.getOwnPropertySymbols(source).filter(function (sym) { return Object.getOwnPropertyDescriptor(source, sym).enumerable; })); } ownKeys.forEach(function (key) { _defineProperty(target, key, source[key]); }); } return target; }
4 +
5 +function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
6 +
7 +var _require = require('buffer'),
8 + Buffer = _require.Buffer;
9 +
10 +var _require2 = require('util'),
11 + inspect = _require2.inspect;
12 +
13 +var custom = inspect && inspect.custom || 'inspect';
14 +
15 +function copyBuffer(src, target, offset) {
16 + Buffer.prototype.copy.call(src, target, offset);
17 +}
18 +
19 +module.exports =
20 +/*#__PURE__*/
21 +function () {
22 + function BufferList() {
23 + this.head = null;
24 + this.tail = null;
25 + this.length = 0;
26 + }
27 +
28 + var _proto = BufferList.prototype;
29 +
30 + _proto.push = function push(v) {
31 + var entry = {
32 + data: v,
33 + next: null
34 + };
35 + if (this.length > 0) this.tail.next = entry;else this.head = entry;
36 + this.tail = entry;
37 + ++this.length;
38 + };
39 +
40 + _proto.unshift = function unshift(v) {
41 + var entry = {
42 + data: v,
43 + next: this.head
44 + };
45 + if (this.length === 0) this.tail = entry;
46 + this.head = entry;
47 + ++this.length;
48 + };
49 +
50 + _proto.shift = function shift() {
51 + if (this.length === 0) return;
52 + var ret = this.head.data;
53 + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;
54 + --this.length;
55 + return ret;
56 + };
57 +
58 + _proto.clear = function clear() {
59 + this.head = this.tail = null;
60 + this.length = 0;
61 + };
62 +
63 + _proto.join = function join(s) {
64 + if (this.length === 0) return '';
65 + var p = this.head;
66 + var ret = '' + p.data;
67 +
68 + while (p = p.next) {
69 + ret += s + p.data;
70 + }
71 +
72 + return ret;
73 + };
74 +
75 + _proto.concat = function concat(n) {
76 + if (this.length === 0) return Buffer.alloc(0);
77 + var ret = Buffer.allocUnsafe(n >>> 0);
78 + var p = this.head;
79 + var i = 0;
80 +
81 + while (p) {
82 + copyBuffer(p.data, ret, i);
83 + i += p.data.length;
84 + p = p.next;
85 + }
86 +
87 + return ret;
88 + } // Consumes a specified amount of bytes or characters from the buffered data.
89 + ;
90 +
91 + _proto.consume = function consume(n, hasStrings) {
92 + var ret;
93 +
94 + if (n < this.head.data.length) {
95 + // `slice` is the same for buffers and strings.
96 + ret = this.head.data.slice(0, n);
97 + this.head.data = this.head.data.slice(n);
98 + } else if (n === this.head.data.length) {
99 + // First chunk is a perfect match.
100 + ret = this.shift();
101 + } else {
102 + // Result spans more than one buffer.
103 + ret = hasStrings ? this._getString(n) : this._getBuffer(n);
104 + }
105 +
106 + return ret;
107 + };
108 +
109 + _proto.first = function first() {
110 + return this.head.data;
111 + } // Consumes a specified amount of characters from the buffered data.
112 + ;
113 +
114 + _proto._getString = function _getString(n) {
115 + var p = this.head;
116 + var c = 1;
117 + var ret = p.data;
118 + n -= ret.length;
119 +
120 + while (p = p.next) {
121 + var str = p.data;
122 + var nb = n > str.length ? str.length : n;
123 + if (nb === str.length) ret += str;else ret += str.slice(0, n);
124 + n -= nb;
125 +
126 + if (n === 0) {
127 + if (nb === str.length) {
128 + ++c;
129 + if (p.next) this.head = p.next;else this.head = this.tail = null;
130 + } else {
131 + this.head = p;
132 + p.data = str.slice(nb);
133 + }
134 +
135 + break;
136 + }
137 +
138 + ++c;
139 + }
140 +
141 + this.length -= c;
142 + return ret;
143 + } // Consumes a specified amount of bytes from the buffered data.
144 + ;
145 +
146 + _proto._getBuffer = function _getBuffer(n) {
147 + var ret = Buffer.allocUnsafe(n);
148 + var p = this.head;
149 + var c = 1;
150 + p.data.copy(ret);
151 + n -= p.data.length;
152 +
153 + while (p = p.next) {
154 + var buf = p.data;
155 + var nb = n > buf.length ? buf.length : n;
156 + buf.copy(ret, ret.length - n, 0, nb);
157 + n -= nb;
158 +
159 + if (n === 0) {
160 + if (nb === buf.length) {
161 + ++c;
162 + if (p.next) this.head = p.next;else this.head = this.tail = null;
163 + } else {
164 + this.head = p;
165 + p.data = buf.slice(nb);
166 + }
167 +
168 + break;
169 + }
170 +
171 + ++c;
172 + }
173 +
174 + this.length -= c;
175 + return ret;
176 + } // Make sure the linked list only shows the minimal necessary information.
177 + ;
178 +
179 + _proto[custom] = function (_, options) {
180 + return inspect(this, _objectSpread({}, options, {
181 + // Only inspect one level.
182 + depth: 0,
183 + // It should not recurse.
184 + customInspect: false
185 + }));
186 + };
187 +
188 + return BufferList;
189 +}();
node_modules/readable-stream/lib/internal/streams/destroy.jsView
@@ -1,0 +1,85 @@
1 +'use strict'; // undocumented cb() API, needed for core, not for public API
2 +
3 +function destroy(err, cb) {
4 + var _this = this;
5 +
6 + var readableDestroyed = this._readableState && this._readableState.destroyed;
7 + var writableDestroyed = this._writableState && this._writableState.destroyed;
8 +
9 + if (readableDestroyed || writableDestroyed) {
10 + if (cb) {
11 + cb(err);
12 + } else if (err && (!this._writableState || !this._writableState.errorEmitted)) {
13 + process.nextTick(emitErrorNT, this, err);
14 + }
15 +
16 + return this;
17 + } // we set destroyed to true before firing error callbacks in order
18 + // to make it re-entrance safe in case destroy() is called within callbacks
19 +
20 +
21 + if (this._readableState) {
22 + this._readableState.destroyed = true;
23 + } // if this is a duplex stream mark the writable part as destroyed as well
24 +
25 +
26 + if (this._writableState) {
27 + this._writableState.destroyed = true;
28 + }
29 +
30 + this._destroy(err || null, function (err) {
31 + if (!cb && err) {
32 + process.nextTick(emitErrorAndCloseNT, _this, err);
33 +
34 + if (_this._writableState) {
35 + _this._writableState.errorEmitted = true;
36 + }
37 + } else if (cb) {
38 + process.nextTick(emitCloseNT, _this);
39 + cb(err);
40 + } else {
41 + process.nextTick(emitCloseNT, _this);
42 + }
43 + });
44 +
45 + return this;
46 +}
47 +
48 +function emitErrorAndCloseNT(self, err) {
49 + emitErrorNT(self, err);
50 + emitCloseNT(self);
51 +}
52 +
53 +function emitCloseNT(self) {
54 + if (self._writableState && !self._writableState.emitClose) return;
55 + if (self._readableState && !self._readableState.emitClose) return;
56 + self.emit('close');
57 +}
58 +
59 +function undestroy() {
60 + if (this._readableState) {
61 + this._readableState.destroyed = false;
62 + this._readableState.reading = false;
63 + this._readableState.ended = false;
64 + this._readableState.endEmitted = false;
65 + }
66 +
67 + if (this._writableState) {
68 + this._writableState.destroyed = false;
69 + this._writableState.ended = false;
70 + this._writableState.ending = false;
71 + this._writableState.finalCalled = false;
72 + this._writableState.prefinished = false;
73 + this._writableState.finished = false;
74 + this._writableState.errorEmitted = false;
75 + }
76 +}
77 +
78 +function emitErrorNT(self, err) {
79 + self.emit('error', err);
80 +}
81 +
82 +module.exports = {
83 + destroy: destroy,
84 + undestroy: undestroy
85 +};
node_modules/readable-stream/lib/internal/streams/end-of-stream.jsView
@@ -1,0 +1,91 @@
1 +// Ported from https://github.com/mafintosh/end-of-stream with
2 +// permission from the author, Mathias Buus (@mafintosh).
3 +'use strict';
4 +
5 +var ERR_STREAM_PREMATURE_CLOSE = require('../../../errors').codes.ERR_STREAM_PREMATURE_CLOSE;
6 +
7 +function noop() {}
8 +
9 +function isRequest(stream) {
10 + return stream.setHeader && typeof stream.abort === 'function';
11 +}
12 +
13 +function once(callback) {
14 + var called = false;
15 + return function (err) {
16 + if (called) return;
17 + called = true;
18 + callback.call(this, err);
19 + };
20 +}
21 +
22 +function eos(stream, opts, callback) {
23 + if (typeof opts === 'function') return eos(stream, null, opts);
24 + if (!opts) opts = {};
25 + callback = once(callback || noop);
26 + var ws = stream._writableState;
27 + var rs = stream._readableState;
28 + var readable = opts.readable || opts.readable !== false && stream.readable;
29 + var writable = opts.writable || opts.writable !== false && stream.writable;
30 +
31 + var onlegacyfinish = function onlegacyfinish() {
32 + if (!stream.writable) onfinish();
33 + };
34 +
35 + var onfinish = function onfinish() {
36 + writable = false;
37 + if (!readable) callback.call(stream);
38 + };
39 +
40 + var onend = function onend() {
41 + readable = false;
42 + if (!writable) callback.call(stream);
43 + };
44 +
45 + var onerror = function onerror(err) {
46 + callback.call(stream, err);
47 + };
48 +
49 + var onclose = function onclose() {
50 + if (readable && !(rs && rs.ended)) {
51 + return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE());
52 + }
53 +
54 + if (writable && !(ws && ws.ended)) {
55 + return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE());
56 + }
57 + };
58 +
59 + var onrequest = function onrequest() {
60 + stream.req.on('finish', onfinish);
61 + };
62 +
63 + if (isRequest(stream)) {
64 + stream.on('complete', onfinish);
65 + stream.on('abort', onclose);
66 + if (stream.req) onrequest();else stream.on('request', onrequest);
67 + } else if (writable && !ws) {
68 + // legacy streams
69 + stream.on('end', onlegacyfinish);
70 + stream.on('close', onlegacyfinish);
71 + }
72 +
73 + stream.on('end', onend);
74 + stream.on('finish', onfinish);
75 + if (opts.error !== false) stream.on('error', onerror);
76 + stream.on('close', onclose);
77 + return function () {
78 + stream.removeListener('complete', onfinish);
79 + stream.removeListener('abort', onclose);
80 + stream.removeListener('request', onrequest);
81 + if (stream.req) stream.req.removeListener('finish', onfinish);
82 + stream.removeListener('end', onlegacyfinish);
83 + stream.removeListener('close', onlegacyfinish);
84 + stream.removeListener('finish', onfinish);
85 + stream.removeListener('end', onend);
86 + stream.removeListener('error', onerror);
87 + stream.removeListener('close', onclose);
88 + };
89 +}
90 +
91 +module.exports = eos;
node_modules/readable-stream/lib/internal/streams/pipeline.jsView
@@ -1,0 +1,97 @@
1 +// Ported from https://github.com/mafintosh/pump with
2 +// permission from the author, Mathias Buus (@mafintosh).
3 +'use strict';
4 +
5 +var eos;
6 +
7 +function once(callback) {
8 + var called = false;
9 + return function () {
10 + if (called) return;
11 + called = true;
12 + callback.apply(void 0, arguments);
13 + };
14 +}
15 +
16 +var _require$codes = require('../../../errors').codes,
17 + ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS,
18 + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED;
19 +
20 +function noop(err) {
21 + // Rethrow the error if it exists to avoid swallowing it
22 + if (err) throw err;
23 +}
24 +
25 +function isRequest(stream) {
26 + return stream.setHeader && typeof stream.abort === 'function';
27 +}
28 +
29 +function destroyer(stream, reading, writing, callback) {
30 + callback = once(callback);
31 + var closed = false;
32 + stream.on('close', function () {
33 + closed = true;
34 + });
35 + if (eos === undefined) eos = require('./end-of-stream');
36 + eos(stream, {
37 + readable: reading,
38 + writable: writing
39 + }, function (err) {
40 + if (err) return callback(err);
41 + closed = true;
42 + callback();
43 + });
44 + var destroyed = false;
45 + return function (err) {
46 + if (closed) return;
47 + if (destroyed) return;
48 + destroyed = true; // request.destroy just do .end - .abort is what we want
49 +
50 + if (isRequest(stream)) return stream.abort();
51 + if (typeof stream.destroy === 'function') return stream.destroy();
52 + callback(err || new ERR_STREAM_DESTROYED('pipe'));
53 + };
54 +}
55 +
56 +function call(fn) {
57 + fn();
58 +}
59 +
60 +function pipe(from, to) {
61 + return from.pipe(to);
62 +}
63 +
64 +function popCallback(streams) {
65 + if (!streams.length) return noop;
66 + if (typeof streams[streams.length - 1] !== 'function') return noop;
67 + return streams.pop();
68 +}
69 +
70 +function pipeline() {
71 + for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) {
72 + streams[_key] = arguments[_key];
73 + }
74 +
75 + var callback = popCallback(streams);
76 + if (Array.isArray(streams[0])) streams = streams[0];
77 +
78 + if (streams.length < 2) {
79 + throw new ERR_MISSING_ARGS('streams');
80 + }
81 +
82 + var error;
83 + var destroys = streams.map(function (stream, i) {
84 + var reading = i < streams.length - 1;
85 + var writing = i > 0;
86 + return destroyer(stream, reading, writing, function (err) {
87 + if (!error) error = err;
88 + if (err) destroys.forEach(call);
89 + if (reading) return;
90 + destroys.forEach(call);
91 + callback(error);
92 + });
93 + });
94 + return streams.reduce(pipe);
95 +}
96 +
97 +module.exports = pipeline;
node_modules/readable-stream/lib/internal/streams/state.jsView
@@ -1,0 +1,27 @@
1 +'use strict';
2 +
3 +var ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE;
4 +
5 +function highWaterMarkFrom(options, isDuplex, duplexKey) {
6 + return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null;
7 +}
8 +
9 +function getHighWaterMark(state, options, duplexKey, isDuplex) {
10 + var hwm = highWaterMarkFrom(options, isDuplex, duplexKey);
11 +
12 + if (hwm != null) {
13 + if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) {
14 + var name = isDuplex ? duplexKey : 'highWaterMark';
15 + throw new ERR_INVALID_OPT_VALUE(name, hwm);
16 + }
17 +
18 + return Math.floor(hwm);
19 + } // Default value
20 +
21 +
22 + return state.objectMode ? 16 : 16 * 1024;
23 +}
24 +
25 +module.exports = {
26 + getHighWaterMark: getHighWaterMark
27 +};
node_modules/readable-stream/lib/internal/streams/stream-browser.jsView
@@ -1,0 +1,1 @@
1 +module.exports = require('events').EventEmitter;
node_modules/readable-stream/lib/internal/streams/stream.jsView
@@ -1,0 +1,1 @@
1 +module.exports = require('stream');
node_modules/readable-stream/package.jsonView
@@ -1,0 +1,99 @@
1 +{
2 + "_args": [
3 + [
4 + "readable-stream@3.2.0",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "readable-stream@3.2.0",
9 + "_id": "readable-stream@3.2.0",
10 + "_inBundle": false,
11 + "_integrity": "sha256-y86rlmkakY917Xb8hBcKVlSwBxoWtqVOkOWecWP5cuM=",
12 + "_location": "/readable-stream",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "readable-stream@3.2.0",
18 + "name": "readable-stream",
19 + "escapedName": "readable-stream",
20 + "rawSpec": "3.2.0",
21 + "saveSpec": null,
22 + "fetchSpec": "3.2.0"
23 + },
24 + "_requiredBy": [
25 + "/bl",
26 + "/tar-stream"
27 + ],
28 + "_resolved": "http://localhost:8989/blobs/get/&y86rlmkakY917Xb8hBcKVlSwBxoWtqVOkOWecWP5cuM=.sha256",
29 + "_spec": "3.2.0",
30 + "_where": "/home/cel/src/ssb-npm-registry",
31 + "browser": {
32 + "util": false,
33 + "worker_threads": false,
34 + "./errors": "./errors-browser.js",
35 + "./readable.js": "./readable-browser.js",
36 + "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js"
37 + },
38 + "bugs": {
39 + "url": "https://github.com/nodejs/readable-stream/issues"
40 + },
41 + "dependencies": {
42 + "inherits": "^2.0.3",
43 + "string_decoder": "^1.1.1",
44 + "util-deprecate": "^1.0.1"
45 + },
46 + "description": "Streams3, a user-land copy of the stream library from Node.js",
47 + "devDependencies": {
48 + "@babel/cli": "^7.2.0",
49 + "@babel/core": "^7.2.0",
50 + "@babel/polyfill": "^7.0.0",
51 + "@babel/preset-env": "^7.2.0",
52 + "airtap": "0.0.9",
53 + "assert": "^1.4.0",
54 + "bl": "^2.0.0",
55 + "deep-strict-equal": "^0.2.0",
56 + "glob": "^7.1.2",
57 + "gunzip-maybe": "^1.4.1",
58 + "hyperquest": "^2.1.3",
59 + "lolex": "^2.6.0",
60 + "nyc": "^11.0.0",
61 + "pump": "^3.0.0",
62 + "rimraf": "^2.6.2",
63 + "tap": "^11.0.0",
64 + "tape": "^4.9.0",
65 + "tar-fs": "^1.16.2",
66 + "util-promisify": "^2.1.0"
67 + },
68 + "engines": {
69 + "node": ">= 6"
70 + },
71 + "homepage": "https://github.com/nodejs/readable-stream#readme",
72 + "keywords": [
73 + "readable",
74 + "stream",
75 + "pipe"
76 + ],
77 + "license": "MIT",
78 + "main": "readable.js",
79 + "name": "readable-stream",
80 + "nyc": {
81 + "include": [
82 + "lib/**.js"
83 + ]
84 + },
85 + "repository": {
86 + "type": "git",
87 + "url": "git://github.com/nodejs/readable-stream.git"
88 + },
89 + "scripts": {
90 + "ci": "TAP=1 tap test/parallel/*.js test/ours/*.js | tee test.tap",
91 + "cover": "nyc npm test",
92 + "report": "nyc report --reporter=lcov",
93 + "test": "tap -j 4 test/parallel/*.js test/ours/*.js",
94 + "test-browser-local": "airtap --open --local -- test/browser.js",
95 + "test-browsers": "airtap --sauce-connect --loopback airtap.local -- test/browser.js",
96 + "update-browser-errors": "babel -o errors-browser.js errors.js"
97 + },
98 + "version": "3.2.0"
99 +}
node_modules/readable-stream/readable-browser.jsView
@@ -1,0 +1,7 @@
1 +exports = module.exports = require('./lib/_stream_readable.js');
2 +exports.Stream = exports;
3 +exports.Readable = exports;
4 +exports.Writable = require('./lib/_stream_writable.js');
5 +exports.Duplex = require('./lib/_stream_duplex.js');
6 +exports.Transform = require('./lib/_stream_transform.js');
7 +exports.PassThrough = require('./lib/_stream_passthrough.js');
node_modules/readable-stream/readable.jsView
@@ -1,0 +1,16 @@
1 +var Stream = require('stream');
2 +if (process.env.READABLE_STREAM === 'disable' && Stream) {
3 + module.exports = Stream.Readable;
4 + Object.assign(module.exports, Stream);
5 + module.exports.Stream = Stream;
6 +} else {
7 + exports = module.exports = require('./lib/_stream_readable.js');
8 + exports.Stream = Stream || exports;
9 + exports.Readable = exports;
10 + exports.Writable = require('./lib/_stream_writable.js');
11 + exports.Duplex = require('./lib/_stream_duplex.js');
12 + exports.Transform = require('./lib/_stream_transform.js');
13 + exports.PassThrough = require('./lib/_stream_passthrough.js');
14 + exports.finished = require('./lib/internal/streams/end-of-stream.js');
15 + exports.pipeline = require('./lib/internal/streams/pipeline.js');
16 +}
node_modules/safe-buffer/LICENSEView
@@ -1,0 +1,21 @@
1 +The MIT License (MIT)
2 +
3 +Copyright (c) Feross Aboukhadijeh
4 +
5 +Permission is hereby granted, free of charge, to any person obtaining a copy
6 +of this software and associated documentation files (the "Software"), to deal
7 +in the Software without restriction, including without limitation the rights
8 +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 +copies of the Software, and to permit persons to whom the Software is
10 +furnished to do so, subject to the following conditions:
11 +
12 +The above copyright notice and this permission notice shall be included in
13 +all copies or substantial portions of the Software.
14 +
15 +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21 +THE SOFTWARE.
node_modules/safe-buffer/README.mdView
@@ -1,0 +1,584 @@
1 +# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url]
2 +
3 +[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg
4 +[travis-url]: https://travis-ci.org/feross/safe-buffer
5 +[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg
6 +[npm-url]: https://npmjs.org/package/safe-buffer
7 +[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg
8 +[downloads-url]: https://npmjs.org/package/safe-buffer
9 +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg
10 +[standard-url]: https://standardjs.com
11 +
12 +#### Safer Node.js Buffer API
13 +
14 +**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`,
15 +`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.**
16 +
17 +**Uses the built-in implementation when available.**
18 +
19 +## install
20 +
21 +```
22 +npm install safe-buffer
23 +```
24 +
25 +## usage
26 +
27 +The goal of this package is to provide a safe replacement for the node.js `Buffer`.
28 +
29 +It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to
30 +the top of your node.js modules:
31 +
32 +```js
33 +var Buffer = require('safe-buffer').Buffer
34 +
35 +// Existing buffer code will continue to work without issues:
36 +
37 +new Buffer('hey', 'utf8')
38 +new Buffer([1, 2, 3], 'utf8')
39 +new Buffer(obj)
40 +new Buffer(16) // create an uninitialized buffer (potentially unsafe)
41 +
42 +// But you can use these new explicit APIs to make clear what you want:
43 +
44 +Buffer.from('hey', 'utf8') // convert from many types to a Buffer
45 +Buffer.alloc(16) // create a zero-filled buffer (safe)
46 +Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe)
47 +```
48 +
49 +## api
50 +
51 +### Class Method: Buffer.from(array)
52 +<!-- YAML
53 +added: v3.0.0
54 +-->
55 +
56 +* `array` {Array}
57 +
58 +Allocates a new `Buffer` using an `array` of octets.
59 +
60 +```js
61 +const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]);
62 + // creates a new Buffer containing ASCII bytes
63 + // ['b','u','f','f','e','r']
64 +```
65 +
66 +A `TypeError` will be thrown if `array` is not an `Array`.
67 +
68 +### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]])
69 +<!-- YAML
70 +added: v5.10.0
71 +-->
72 +
73 +* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or
74 + a `new ArrayBuffer()`
75 +* `byteOffset` {Number} Default: `0`
76 +* `length` {Number} Default: `arrayBuffer.length - byteOffset`
77 +
78 +When passed a reference to the `.buffer` property of a `TypedArray` instance,
79 +the newly created `Buffer` will share the same allocated memory as the
80 +TypedArray.
81 +
82 +```js
83 +const arr = new Uint16Array(2);
84 +arr[0] = 5000;
85 +arr[1] = 4000;
86 +
87 +const buf = Buffer.from(arr.buffer); // shares the memory with arr;
88 +
89 +console.log(buf);
90 + // Prints: <Buffer 88 13 a0 0f>
91 +
92 +// changing the TypedArray changes the Buffer also
93 +arr[1] = 6000;
94 +
95 +console.log(buf);
96 + // Prints: <Buffer 88 13 70 17>
97 +```
98 +
99 +The optional `byteOffset` and `length` arguments specify a memory range within
100 +the `arrayBuffer` that will be shared by the `Buffer`.
101 +
102 +```js
103 +const ab = new ArrayBuffer(10);
104 +const buf = Buffer.from(ab, 0, 2);
105 +console.log(buf.length);
106 + // Prints: 2
107 +```
108 +
109 +A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`.
110 +
111 +### Class Method: Buffer.from(buffer)
112 +<!-- YAML
113 +added: v3.0.0
114 +-->
115 +
116 +* `buffer` {Buffer}
117 +
118 +Copies the passed `buffer` data onto a new `Buffer` instance.
119 +
120 +```js
121 +const buf1 = Buffer.from('buffer');
122 +const buf2 = Buffer.from(buf1);
123 +
124 +buf1[0] = 0x61;
125 +console.log(buf1.toString());
126 + // 'auffer'
127 +console.log(buf2.toString());
128 + // 'buffer' (copy is not changed)
129 +```
130 +
131 +A `TypeError` will be thrown if `buffer` is not a `Buffer`.
132 +
133 +### Class Method: Buffer.from(str[, encoding])
134 +<!-- YAML
135 +added: v5.10.0
136 +-->
137 +
138 +* `str` {String} String to encode.
139 +* `encoding` {String} Encoding to use, Default: `'utf8'`
140 +
141 +Creates a new `Buffer` containing the given JavaScript string `str`. If
142 +provided, the `encoding` parameter identifies the character encoding.
143 +If not provided, `encoding` defaults to `'utf8'`.
144 +
145 +```js
146 +const buf1 = Buffer.from('this is a tést');
147 +console.log(buf1.toString());
148 + // prints: this is a tést
149 +console.log(buf1.toString('ascii'));
150 + // prints: this is a tC)st
151 +
152 +const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex');
153 +console.log(buf2.toString());
154 + // prints: this is a tést
155 +```
156 +
157 +A `TypeError` will be thrown if `str` is not a string.
158 +
159 +### Class Method: Buffer.alloc(size[, fill[, encoding]])
160 +<!-- YAML
161 +added: v5.10.0
162 +-->
163 +
164 +* `size` {Number}
165 +* `fill` {Value} Default: `undefined`
166 +* `encoding` {String} Default: `utf8`
167 +
168 +Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the
169 +`Buffer` will be *zero-filled*.
170 +
171 +```js
172 +const buf = Buffer.alloc(5);
173 +console.log(buf);
174 + // <Buffer 00 00 00 00 00>
175 +```
176 +
177 +The `size` must be less than or equal to the value of
178 +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is
179 +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will
180 +be created if a `size` less than or equal to 0 is specified.
181 +
182 +If `fill` is specified, the allocated `Buffer` will be initialized by calling
183 +`buf.fill(fill)`. See [`buf.fill()`][] for more information.
184 +
185 +```js
186 +const buf = Buffer.alloc(5, 'a');
187 +console.log(buf);
188 + // <Buffer 61 61 61 61 61>
189 +```
190 +
191 +If both `fill` and `encoding` are specified, the allocated `Buffer` will be
192 +initialized by calling `buf.fill(fill, encoding)`. For example:
193 +
194 +```js
195 +const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64');
196 +console.log(buf);
197 + // <Buffer 68 65 6c 6c 6f 20 77 6f 72 6c 64>
198 +```
199 +
200 +Calling `Buffer.alloc(size)` can be significantly slower than the alternative
201 +`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance
202 +contents will *never contain sensitive data*.
203 +
204 +A `TypeError` will be thrown if `size` is not a number.
205 +
206 +### Class Method: Buffer.allocUnsafe(size)
207 +<!-- YAML
208 +added: v5.10.0
209 +-->
210 +
211 +* `size` {Number}
212 +
213 +Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must
214 +be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit
215 +architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is
216 +thrown. A zero-length Buffer will be created if a `size` less than or equal to
217 +0 is specified.
218 +
219 +The underlying memory for `Buffer` instances created in this way is *not
220 +initialized*. The contents of the newly created `Buffer` are unknown and
221 +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such
222 +`Buffer` instances to zeroes.
223 +
224 +```js
225 +const buf = Buffer.allocUnsafe(5);
226 +console.log(buf);
227 + // <Buffer 78 e0 82 02 01>
228 + // (octets will be different, every time)
229 +buf.fill(0);
230 +console.log(buf);
231 + // <Buffer 00 00 00 00 00>
232 +```
233 +
234 +A `TypeError` will be thrown if `size` is not a number.
235 +
236 +Note that the `Buffer` module pre-allocates an internal `Buffer` instance of
237 +size `Buffer.poolSize` that is used as a pool for the fast allocation of new
238 +`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated
239 +`new Buffer(size)` constructor) only when `size` is less than or equal to
240 +`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default
241 +value of `Buffer.poolSize` is `8192` but can be modified.
242 +
243 +Use of this pre-allocated internal memory pool is a key difference between
244 +calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`.
245 +Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer
246 +pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal
247 +Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The
248 +difference is subtle but can be important when an application requires the
249 +additional performance that `Buffer.allocUnsafe(size)` provides.
250 +
251 +### Class Method: Buffer.allocUnsafeSlow(size)
252 +<!-- YAML
253 +added: v5.10.0
254 +-->
255 +
256 +* `size` {Number}
257 +
258 +Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The
259 +`size` must be less than or equal to the value of
260 +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is
261 +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will
262 +be created if a `size` less than or equal to 0 is specified.
263 +
264 +The underlying memory for `Buffer` instances created in this way is *not
265 +initialized*. The contents of the newly created `Buffer` are unknown and
266 +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such
267 +`Buffer` instances to zeroes.
268 +
269 +When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances,
270 +allocations under 4KB are, by default, sliced from a single pre-allocated
271 +`Buffer`. This allows applications to avoid the garbage collection overhead of
272 +creating many individually allocated Buffers. This approach improves both
273 +performance and memory usage by eliminating the need to track and cleanup as
274 +many `Persistent` objects.
275 +
276 +However, in the case where a developer may need to retain a small chunk of
277 +memory from a pool for an indeterminate amount of time, it may be appropriate
278 +to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then
279 +copy out the relevant bits.
280 +
281 +```js
282 +// need to keep around a few small chunks of memory
283 +const store = [];
284 +
285 +socket.on('readable', () => {
286 + const data = socket.read();
287 + // allocate for retained data
288 + const sb = Buffer.allocUnsafeSlow(10);
289 + // copy the data into the new allocation
290 + data.copy(sb, 0, 0, 10);
291 + store.push(sb);
292 +});
293 +```
294 +
295 +Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after*
296 +a developer has observed undue memory retention in their applications.
297 +
298 +A `TypeError` will be thrown if `size` is not a number.
299 +
300 +### All the Rest
301 +
302 +The rest of the `Buffer` API is exactly the same as in node.js.
303 +[See the docs](https://nodejs.org/api/buffer.html).
304 +
305 +
306 +## Related links
307 +
308 +- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660)
309 +- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4)
310 +
311 +## Why is `Buffer` unsafe?
312 +
313 +Today, the node.js `Buffer` constructor is overloaded to handle many different argument
314 +types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.),
315 +`ArrayBuffer`, and also `Number`.
316 +
317 +The API is optimized for convenience: you can throw any type at it, and it will try to do
318 +what you want.
319 +
320 +Because the Buffer constructor is so powerful, you often see code like this:
321 +
322 +```js
323 +// Convert UTF-8 strings to hex
324 +function toHex (str) {
325 + return new Buffer(str).toString('hex')
326 +}
327 +```
328 +
329 +***But what happens if `toHex` is called with a `Number` argument?***
330 +
331 +### Remote Memory Disclosure
332 +
333 +If an attacker can make your program call the `Buffer` constructor with a `Number`
334 +argument, then they can make it allocate uninitialized memory from the node.js process.
335 +This could potentially disclose TLS private keys, user data, or database passwords.
336 +
337 +When the `Buffer` constructor is passed a `Number` argument, it returns an
338 +**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like
339 +this, you **MUST** overwrite the contents before returning it to the user.
340 +
341 +From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size):
342 +
343 +> `new Buffer(size)`
344 +>
345 +> - `size` Number
346 +>
347 +> The underlying memory for `Buffer` instances created in this way is not initialized.
348 +> **The contents of a newly created `Buffer` are unknown and could contain sensitive
349 +> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes.
350 +
351 +(Emphasis our own.)
352 +
353 +Whenever the programmer intended to create an uninitialized `Buffer` you often see code
354 +like this:
355 +
356 +```js
357 +var buf = new Buffer(16)
358 +
359 +// Immediately overwrite the uninitialized buffer with data from another buffer
360 +for (var i = 0; i < buf.length; i++) {
361 + buf[i] = otherBuf[i]
362 +}
363 +```
364 +
365 +
366 +### Would this ever be a problem in real code?
367 +
368 +Yes. It's surprisingly common to forget to check the type of your variables in a
369 +dynamically-typed language like JavaScript.
370 +
371 +Usually the consequences of assuming the wrong type is that your program crashes with an
372 +uncaught exception. But the failure mode for forgetting to check the type of arguments to
373 +the `Buffer` constructor is more catastrophic.
374 +
375 +Here's an example of a vulnerable service that takes a JSON payload and converts it to
376 +hex:
377 +
378 +```js
379 +// Take a JSON payload {str: "some string"} and convert it to hex
380 +var server = http.createServer(function (req, res) {
381 + var data = ''
382 + req.setEncoding('utf8')
383 + req.on('data', function (chunk) {
384 + data += chunk
385 + })
386 + req.on('end', function () {
387 + var body = JSON.parse(data)
388 + res.end(new Buffer(body.str).toString('hex'))
389 + })
390 +})
391 +
392 +server.listen(8080)
393 +```
394 +
395 +In this example, an http client just has to send:
396 +
397 +```json
398 +{
399 + "str": 1000
400 +}
401 +```
402 +
403 +and it will get back 1,000 bytes of uninitialized memory from the server.
404 +
405 +This is a very serious bug. It's similar in severity to the
406 +[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process
407 +memory by remote attackers.
408 +
409 +
410 +### Which real-world packages were vulnerable?
411 +
412 +#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht)
413 +
414 +[Mathias Buus](https://github.com/mafintosh) and I
415 +([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages,
416 +[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow
417 +anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get
418 +them to reveal 20 bytes at a time of uninitialized memory from the node.js process.
419 +
420 +Here's
421 +[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8)
422 +that fixed it. We released a new fixed version, created a
423 +[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all
424 +vulnerable versions on npm so users will get a warning to upgrade to a newer version.
425 +
426 +#### [`ws`](https://www.npmjs.com/package/ws)
427 +
428 +That got us wondering if there were other vulnerable packages. Sure enough, within a short
429 +period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the
430 +most popular WebSocket implementation in node.js.
431 +
432 +If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as
433 +expected, then uninitialized server memory would be disclosed to the remote peer.
434 +
435 +These were the vulnerable methods:
436 +
437 +```js
438 +socket.send(number)
439 +socket.ping(number)
440 +socket.pong(number)
441 +```
442 +
443 +Here's a vulnerable socket server with some echo functionality:
444 +
445 +```js
446 +server.on('connection', function (socket) {
447 + socket.on('message', function (message) {
448 + message = JSON.parse(message)
449 + if (message.type === 'echo') {
450 + socket.send(message.data) // send back the user's message
451 + }
452 + })
453 +})
454 +```
455 +
456 +`socket.send(number)` called on the server, will disclose server memory.
457 +
458 +Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue
459 +was fixed, with a more detailed explanation. Props to
460 +[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the
461 +[Node Security Project disclosure](https://nodesecurity.io/advisories/67).
462 +
463 +
464 +### What's the solution?
465 +
466 +It's important that node.js offers a fast way to get memory otherwise performance-critical
467 +applications would needlessly get a lot slower.
468 +
469 +But we need a better way to *signal our intent* as programmers. **When we want
470 +uninitialized memory, we should request it explicitly.**
471 +
472 +Sensitive functionality should not be packed into a developer-friendly API that loosely
473 +accepts many different types. This type of API encourages the lazy practice of passing
474 +variables in without checking the type very carefully.
475 +
476 +#### A new API: `Buffer.allocUnsafe(number)`
477 +
478 +The functionality of creating buffers with uninitialized memory should be part of another
479 +API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that
480 +frequently gets user input of all sorts of different types passed into it.
481 +
482 +```js
483 +var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory!
484 +
485 +// Immediately overwrite the uninitialized buffer with data from another buffer
486 +for (var i = 0; i < buf.length; i++) {
487 + buf[i] = otherBuf[i]
488 +}
489 +```
490 +
491 +
492 +### How do we fix node.js core?
493 +
494 +We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as
495 +`semver-major`) which defends against one case:
496 +
497 +```js
498 +var str = 16
499 +new Buffer(str, 'utf8')
500 +```
501 +
502 +In this situation, it's implied that the programmer intended the first argument to be a
503 +string, since they passed an encoding as a second argument. Today, node.js will allocate
504 +uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not
505 +what the programmer intended.
506 +
507 +But this is only a partial solution, since if the programmer does `new Buffer(variable)`
508 +(without an `encoding` parameter) there's no way to know what they intended. If `variable`
509 +is sometimes a number, then uninitialized memory will sometimes be returned.
510 +
511 +### What's the real long-term fix?
512 +
513 +We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when
514 +we need uninitialized memory. But that would break 1000s of packages.
515 +
516 +~~We believe the best solution is to:~~
517 +
518 +~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~
519 +
520 +~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~
521 +
522 +#### Update
523 +
524 +We now support adding three new APIs:
525 +
526 +- `Buffer.from(value)` - convert from any type to a buffer
527 +- `Buffer.alloc(size)` - create a zero-filled buffer
528 +- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size
529 +
530 +This solves the core problem that affected `ws` and `bittorrent-dht` which is
531 +`Buffer(variable)` getting tricked into taking a number argument.
532 +
533 +This way, existing code continues working and the impact on the npm ecosystem will be
534 +minimal. Over time, npm maintainers can migrate performance-critical code to use
535 +`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`.
536 +
537 +
538 +### Conclusion
539 +
540 +We think there's a serious design issue with the `Buffer` API as it exists today. It
541 +promotes insecure software by putting high-risk functionality into a convenient API
542 +with friendly "developer ergonomics".
543 +
544 +This wasn't merely a theoretical exercise because we found the issue in some of the
545 +most popular npm packages.
546 +
547 +Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of
548 +`buffer`.
549 +
550 +```js
551 +var Buffer = require('safe-buffer').Buffer
552 +```
553 +
554 +Eventually, we hope that node.js core can switch to this new, safer behavior. We believe
555 +the impact on the ecosystem would be minimal since it's not a breaking change.
556 +Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while
557 +older, insecure packages would magically become safe from this attack vector.
558 +
559 +
560 +## links
561 +
562 +- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514)
563 +- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67)
564 +- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68)
565 +
566 +
567 +## credit
568 +
569 +The original issues in `bittorrent-dht`
570 +([disclosure](https://nodesecurity.io/advisories/68)) and
571 +`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by
572 +[Mathias Buus](https://github.com/mafintosh) and
573 +[Feross Aboukhadijeh](http://feross.org/).
574 +
575 +Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues
576 +and for his work running the [Node Security Project](https://nodesecurity.io/).
577 +
578 +Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and
579 +auditing the code.
580 +
581 +
582 +## license
583 +
584 +MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org)
node_modules/safe-buffer/index.d.tsView
@@ -1,0 +1,187 @@
1 +declare module "safe-buffer" {
2 + export class Buffer {
3 + length: number
4 + write(string: string, offset?: number, length?: number, encoding?: string): number;
5 + toString(encoding?: string, start?: number, end?: number): string;
6 + toJSON(): { type: 'Buffer', data: any[] };
7 + equals(otherBuffer: Buffer): boolean;
8 + compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number;
9 + copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number;
10 + slice(start?: number, end?: number): Buffer;
11 + writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
12 + writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
13 + writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
14 + writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
15 + readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number;
16 + readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number;
17 + readIntLE(offset: number, byteLength: number, noAssert?: boolean): number;
18 + readIntBE(offset: number, byteLength: number, noAssert?: boolean): number;
19 + readUInt8(offset: number, noAssert?: boolean): number;
20 + readUInt16LE(offset: number, noAssert?: boolean): number;
21 + readUInt16BE(offset: number, noAssert?: boolean): number;
22 + readUInt32LE(offset: number, noAssert?: boolean): number;
23 + readUInt32BE(offset: number, noAssert?: boolean): number;
24 + readInt8(offset: number, noAssert?: boolean): number;
25 + readInt16LE(offset: number, noAssert?: boolean): number;
26 + readInt16BE(offset: number, noAssert?: boolean): number;
27 + readInt32LE(offset: number, noAssert?: boolean): number;
28 + readInt32BE(offset: number, noAssert?: boolean): number;
29 + readFloatLE(offset: number, noAssert?: boolean): number;
30 + readFloatBE(offset: number, noAssert?: boolean): number;
31 + readDoubleLE(offset: number, noAssert?: boolean): number;
32 + readDoubleBE(offset: number, noAssert?: boolean): number;
33 + swap16(): Buffer;
34 + swap32(): Buffer;
35 + swap64(): Buffer;
36 + writeUInt8(value: number, offset: number, noAssert?: boolean): number;
37 + writeUInt16LE(value: number, offset: number, noAssert?: boolean): number;
38 + writeUInt16BE(value: number, offset: number, noAssert?: boolean): number;
39 + writeUInt32LE(value: number, offset: number, noAssert?: boolean): number;
40 + writeUInt32BE(value: number, offset: number, noAssert?: boolean): number;
41 + writeInt8(value: number, offset: number, noAssert?: boolean): number;
42 + writeInt16LE(value: number, offset: number, noAssert?: boolean): number;
43 + writeInt16BE(value: number, offset: number, noAssert?: boolean): number;
44 + writeInt32LE(value: number, offset: number, noAssert?: boolean): number;
45 + writeInt32BE(value: number, offset: number, noAssert?: boolean): number;
46 + writeFloatLE(value: number, offset: number, noAssert?: boolean): number;
47 + writeFloatBE(value: number, offset: number, noAssert?: boolean): number;
48 + writeDoubleLE(value: number, offset: number, noAssert?: boolean): number;
49 + writeDoubleBE(value: number, offset: number, noAssert?: boolean): number;
50 + fill(value: any, offset?: number, end?: number): this;
51 + indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number;
52 + lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number;
53 + includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean;
54 +
55 + /**
56 + * Allocates a new buffer containing the given {str}.
57 + *
58 + * @param str String to store in buffer.
59 + * @param encoding encoding to use, optional. Default is 'utf8'
60 + */
61 + constructor (str: string, encoding?: string);
62 + /**
63 + * Allocates a new buffer of {size} octets.
64 + *
65 + * @param size count of octets to allocate.
66 + */
67 + constructor (size: number);
68 + /**
69 + * Allocates a new buffer containing the given {array} of octets.
70 + *
71 + * @param array The octets to store.
72 + */
73 + constructor (array: Uint8Array);
74 + /**
75 + * Produces a Buffer backed by the same allocated memory as
76 + * the given {ArrayBuffer}.
77 + *
78 + *
79 + * @param arrayBuffer The ArrayBuffer with which to share memory.
80 + */
81 + constructor (arrayBuffer: ArrayBuffer);
82 + /**
83 + * Allocates a new buffer containing the given {array} of octets.
84 + *
85 + * @param array The octets to store.
86 + */
87 + constructor (array: any[]);
88 + /**
89 + * Copies the passed {buffer} data onto a new {Buffer} instance.
90 + *
91 + * @param buffer The buffer to copy.
92 + */
93 + constructor (buffer: Buffer);
94 + prototype: Buffer;
95 + /**
96 + * Allocates a new Buffer using an {array} of octets.
97 + *
98 + * @param array
99 + */
100 + static from(array: any[]): Buffer;
101 + /**
102 + * When passed a reference to the .buffer property of a TypedArray instance,
103 + * the newly created Buffer will share the same allocated memory as the TypedArray.
104 + * The optional {byteOffset} and {length} arguments specify a memory range
105 + * within the {arrayBuffer} that will be shared by the Buffer.
106 + *
107 + * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer()
108 + * @param byteOffset
109 + * @param length
110 + */
111 + static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer;
112 + /**
113 + * Copies the passed {buffer} data onto a new Buffer instance.
114 + *
115 + * @param buffer
116 + */
117 + static from(buffer: Buffer): Buffer;
118 + /**
119 + * Creates a new Buffer containing the given JavaScript string {str}.
120 + * If provided, the {encoding} parameter identifies the character encoding.
121 + * If not provided, {encoding} defaults to 'utf8'.
122 + *
123 + * @param str
124 + */
125 + static from(str: string, encoding?: string): Buffer;
126 + /**
127 + * Returns true if {obj} is a Buffer
128 + *
129 + * @param obj object to test.
130 + */
131 + static isBuffer(obj: any): obj is Buffer;
132 + /**
133 + * Returns true if {encoding} is a valid encoding argument.
134 + * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex'
135 + *
136 + * @param encoding string to test.
137 + */
138 + static isEncoding(encoding: string): boolean;
139 + /**
140 + * Gives the actual byte length of a string. encoding defaults to 'utf8'.
141 + * This is not the same as String.prototype.length since that returns the number of characters in a string.
142 + *
143 + * @param string string to test.
144 + * @param encoding encoding used to evaluate (defaults to 'utf8')
145 + */
146 + static byteLength(string: string, encoding?: string): number;
147 + /**
148 + * Returns a buffer which is the result of concatenating all the buffers in the list together.
149 + *
150 + * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer.
151 + * If the list has exactly one item, then the first item of the list is returned.
152 + * If the list has more than one item, then a new Buffer is created.
153 + *
154 + * @param list An array of Buffer objects to concatenate
155 + * @param totalLength Total length of the buffers when concatenated.
156 + * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly.
157 + */
158 + static concat(list: Buffer[], totalLength?: number): Buffer;
159 + /**
160 + * The same as buf1.compare(buf2).
161 + */
162 + static compare(buf1: Buffer, buf2: Buffer): number;
163 + /**
164 + * Allocates a new buffer of {size} octets.
165 + *
166 + * @param size count of octets to allocate.
167 + * @param fill if specified, buffer will be initialized by calling buf.fill(fill).
168 + * If parameter is omitted, buffer will be filled with zeros.
169 + * @param encoding encoding used for call to buf.fill while initalizing
170 + */
171 + static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer;
172 + /**
173 + * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents
174 + * of the newly created Buffer are unknown and may contain sensitive data.
175 + *
176 + * @param size count of octets to allocate
177 + */
178 + static allocUnsafe(size: number): Buffer;
179 + /**
180 + * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents
181 + * of the newly created Buffer are unknown and may contain sensitive data.
182 + *
183 + * @param size count of octets to allocate
184 + */
185 + static allocUnsafeSlow(size: number): Buffer;
186 + }
187 +}
node_modules/safe-buffer/index.jsView
@@ -1,0 +1,62 @@
1 +/* eslint-disable node/no-deprecated-api */
2 +var buffer = require('buffer')
3 +var Buffer = buffer.Buffer
4 +
5 +// alternative to using Object.keys for old browsers
6 +function copyProps (src, dst) {
7 + for (var key in src) {
8 + dst[key] = src[key]
9 + }
10 +}
11 +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {
12 + module.exports = buffer
13 +} else {
14 + // Copy properties from require('buffer')
15 + copyProps(buffer, exports)
16 + exports.Buffer = SafeBuffer
17 +}
18 +
19 +function SafeBuffer (arg, encodingOrOffset, length) {
20 + return Buffer(arg, encodingOrOffset, length)
21 +}
22 +
23 +// Copy static methods from Buffer
24 +copyProps(Buffer, SafeBuffer)
25 +
26 +SafeBuffer.from = function (arg, encodingOrOffset, length) {
27 + if (typeof arg === 'number') {
28 + throw new TypeError('Argument must not be a number')
29 + }
30 + return Buffer(arg, encodingOrOffset, length)
31 +}
32 +
33 +SafeBuffer.alloc = function (size, fill, encoding) {
34 + if (typeof size !== 'number') {
35 + throw new TypeError('Argument must be a number')
36 + }
37 + var buf = Buffer(size)
38 + if (fill !== undefined) {
39 + if (typeof encoding === 'string') {
40 + buf.fill(fill, encoding)
41 + } else {
42 + buf.fill(fill)
43 + }
44 + } else {
45 + buf.fill(0)
46 + }
47 + return buf
48 +}
49 +
50 +SafeBuffer.allocUnsafe = function (size) {
51 + if (typeof size !== 'number') {
52 + throw new TypeError('Argument must be a number')
53 + }
54 + return Buffer(size)
55 +}
56 +
57 +SafeBuffer.allocUnsafeSlow = function (size) {
58 + if (typeof size !== 'number') {
59 + throw new TypeError('Argument must be a number')
60 + }
61 + return buffer.SlowBuffer(size)
62 +}
node_modules/safe-buffer/package.jsonView
@@ -1,0 +1,65 @@
1 +{
2 + "_args": [
3 + [
4 + "safe-buffer@5.1.2",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "safe-buffer@5.1.2",
9 + "_id": "safe-buffer@5.1.2",
10 + "_inBundle": false,
11 + "_integrity": "sha256-4JIGxg/Mr7lSyFSvdinLsDGpjW2i4UP7OqPIpIQCqiI=",
12 + "_location": "/safe-buffer",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "safe-buffer@5.1.2",
18 + "name": "safe-buffer",
19 + "escapedName": "safe-buffer",
20 + "rawSpec": "5.1.2",
21 + "saveSpec": null,
22 + "fetchSpec": "5.1.2"
23 + },
24 + "_requiredBy": [
25 + "/string_decoder"
26 + ],
27 + "_resolved": "http://localhost:8989/blobs/get/&4JIGxg/Mr7lSyFSvdinLsDGpjW2i4UP7OqPIpIQCqiI=.sha256",
28 + "_spec": "5.1.2",
29 + "_where": "/home/cel/src/ssb-npm-registry",
30 + "author": {
31 + "name": "Feross Aboukhadijeh",
32 + "email": "feross@feross.org",
33 + "url": "http://feross.org"
34 + },
35 + "bugs": {
36 + "url": "https://github.com/feross/safe-buffer/issues"
37 + },
38 + "description": "Safer Node.js Buffer API",
39 + "devDependencies": {
40 + "standard": "*",
41 + "tape": "^4.0.0"
42 + },
43 + "homepage": "https://github.com/feross/safe-buffer",
44 + "keywords": [
45 + "buffer",
46 + "buffer allocate",
47 + "node security",
48 + "safe",
49 + "safe-buffer",
50 + "security",
51 + "uninitialized"
52 + ],
53 + "license": "MIT",
54 + "main": "index.js",
55 + "name": "safe-buffer",
56 + "repository": {
57 + "type": "git",
58 + "url": "git://github.com/feross/safe-buffer.git"
59 + },
60 + "scripts": {
61 + "test": "standard && tape test/*.js"
62 + },
63 + "types": "index.d.ts",
64 + "version": "5.1.2"
65 +}
node_modules/semver/CHANGELOG.mdView
@@ -1,0 +1,70 @@
1 +# changes log
2 +
3 +## 6.2.0
4 +
5 +* Coerce numbers to strings when passed to semver.coerce()
6 +* Add `rtl` option to coerce from right to left
7 +
8 +## 6.1.3
9 +
10 +* Handle X-ranges properly in includePrerelease mode
11 +
12 +## 6.1.2
13 +
14 +* Do not throw when testing invalid version strings
15 +
16 +## 6.1.1
17 +
18 +* Add options support for semver.coerce()
19 +* Handle undefined version passed to Range.test
20 +
21 +## 6.1.0
22 +
23 +* Add semver.compareBuild function
24 +* Support `*` in semver.intersects
25 +
26 +## 6.0
27 +
28 +* Fix `intersects` logic.
29 +
30 + This is technically a bug fix, but since it is also a change to behavior
31 + that may require users updating their code, it is marked as a major
32 + version increment.
33 +
34 +## 5.7
35 +
36 +* Add `minVersion` method
37 +
38 +## 5.6
39 +
40 +* Move boolean `loose` param to an options object, with
41 + backwards-compatibility protection.
42 +* Add ability to opt out of special prerelease version handling with
43 + the `includePrerelease` option flag.
44 +
45 +## 5.5
46 +
47 +* Add version coercion capabilities
48 +
49 +## 5.4
50 +
51 +* Add intersection checking
52 +
53 +## 5.3
54 +
55 +* Add `minSatisfying` method
56 +
57 +## 5.2
58 +
59 +* Add `prerelease(v)` that returns prerelease components
60 +
61 +## 5.1
62 +
63 +* Add Backus-Naur for ranges
64 +* Remove excessively cute inspection methods
65 +
66 +## 5.0
67 +
68 +* Remove AMD/Browserified build artifacts
69 +* Fix ltr and gtr when using the `*` range
70 +* Fix for range `*` with a prerelease identifier
node_modules/semver/LICENSEView
@@ -1,0 +1,15 @@
1 +The ISC License
2 +
3 +Copyright (c) Isaac Z. Schlueter and Contributors
4 +
5 +Permission to use, copy, modify, and/or distribute this software for any
6 +purpose with or without fee is hereby granted, provided that the above
7 +copyright notice and this permission notice appear in all copies.
8 +
9 +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
10 +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
11 +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
12 +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
13 +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
14 +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
15 +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
node_modules/semver/README.mdView
@@ -1,0 +1,443 @@
1 +semver(1) -- The semantic versioner for npm
2 +===========================================
3 +
4 +## Install
5 +
6 +```bash
7 +npm install semver
8 +````
9 +
10 +## Usage
11 +
12 +As a node module:
13 +
14 +```js
15 +const semver = require('semver')
16 +
17 +semver.valid('1.2.3') // '1.2.3'
18 +semver.valid('a.b.c') // null
19 +semver.clean(' =v1.2.3 ') // '1.2.3'
20 +semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true
21 +semver.gt('1.2.3', '9.8.7') // false
22 +semver.lt('1.2.3', '9.8.7') // true
23 +semver.minVersion('>=1.0.0') // '1.0.0'
24 +semver.valid(semver.coerce('v2')) // '2.0.0'
25 +semver.valid(semver.coerce('42.6.7.9.3-alpha')) // '42.6.7'
26 +```
27 +
28 +As a command-line utility:
29 +
30 +```
31 +$ semver -h
32 +
33 +A JavaScript implementation of the https://semver.org/ specification
34 +Copyright Isaac Z. Schlueter
35 +
36 +Usage: semver [options] <version> [<version> [...]]
37 +Prints valid versions sorted by SemVer precedence
38 +
39 +Options:
40 +-r --range <range>
41 + Print versions that match the specified range.
42 +
43 +-i --increment [<level>]
44 + Increment a version by the specified level. Level can
45 + be one of: major, minor, patch, premajor, preminor,
46 + prepatch, or prerelease. Default level is 'patch'.
47 + Only one version may be specified.
48 +
49 +--preid <identifier>
50 + Identifier to be used to prefix premajor, preminor,
51 + prepatch or prerelease version increments.
52 +
53 +-l --loose
54 + Interpret versions and ranges loosely
55 +
56 +-p --include-prerelease
57 + Always include prerelease versions in range matching
58 +
59 +-c --coerce
60 + Coerce a string into SemVer if possible
61 + (does not imply --loose)
62 +
63 +--rtl
64 + Coerce version strings right to left
65 +
66 +--ltr
67 + Coerce version strings left to right (default)
68 +
69 +Program exits successfully if any valid version satisfies
70 +all supplied ranges, and prints all satisfying versions.
71 +
72 +If no satisfying versions are found, then exits failure.
73 +
74 +Versions are printed in ascending order, so supplying
75 +multiple versions to the utility will just sort them.
76 +```
77 +
78 +## Versions
79 +
80 +A "version" is described by the `v2.0.0` specification found at
81 +<https://semver.org/>.
82 +
83 +A leading `"="` or `"v"` character is stripped off and ignored.
84 +
85 +## Ranges
86 +
87 +A `version range` is a set of `comparators` which specify versions
88 +that satisfy the range.
89 +
90 +A `comparator` is composed of an `operator` and a `version`. The set
91 +of primitive `operators` is:
92 +
93 +* `<` Less than
94 +* `<=` Less than or equal to
95 +* `>` Greater than
96 +* `>=` Greater than or equal to
97 +* `=` Equal. If no operator is specified, then equality is assumed,
98 + so this operator is optional, but MAY be included.
99 +
100 +For example, the comparator `>=1.2.7` would match the versions
101 +`1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6`
102 +or `1.1.0`.
103 +
104 +Comparators can be joined by whitespace to form a `comparator set`,
105 +which is satisfied by the **intersection** of all of the comparators
106 +it includes.
107 +
108 +A range is composed of one or more comparator sets, joined by `||`. A
109 +version matches a range if and only if every comparator in at least
110 +one of the `||`-separated comparator sets is satisfied by the version.
111 +
112 +For example, the range `>=1.2.7 <1.3.0` would match the versions
113 +`1.2.7`, `1.2.8`, and `1.2.99`, but not the versions `1.2.6`, `1.3.0`,
114 +or `1.1.0`.
115 +
116 +The range `1.2.7 || >=1.2.9 <2.0.0` would match the versions `1.2.7`,
117 +`1.2.9`, and `1.4.6`, but not the versions `1.2.8` or `2.0.0`.
118 +
119 +### Prerelease Tags
120 +
121 +If a version has a prerelease tag (for example, `1.2.3-alpha.3`) then
122 +it will only be allowed to satisfy comparator sets if at least one
123 +comparator with the same `[major, minor, patch]` tuple also has a
124 +prerelease tag.
125 +
126 +For example, the range `>1.2.3-alpha.3` would be allowed to match the
127 +version `1.2.3-alpha.7`, but it would *not* be satisfied by
128 +`3.4.5-alpha.9`, even though `3.4.5-alpha.9` is technically "greater
129 +than" `1.2.3-alpha.3` according to the SemVer sort rules. The version
130 +range only accepts prerelease tags on the `1.2.3` version. The
131 +version `3.4.5` *would* satisfy the range, because it does not have a
132 +prerelease flag, and `3.4.5` is greater than `1.2.3-alpha.7`.
133 +
134 +The purpose for this behavior is twofold. First, prerelease versions
135 +frequently are updated very quickly, and contain many breaking changes
136 +that are (by the author's design) not yet fit for public consumption.
137 +Therefore, by default, they are excluded from range matching
138 +semantics.
139 +
140 +Second, a user who has opted into using a prerelease version has
141 +clearly indicated the intent to use *that specific* set of
142 +alpha/beta/rc versions. By including a prerelease tag in the range,
143 +the user is indicating that they are aware of the risk. However, it
144 +is still not appropriate to assume that they have opted into taking a
145 +similar risk on the *next* set of prerelease versions.
146 +
147 +Note that this behavior can be suppressed (treating all prerelease
148 +versions as if they were normal versions, for the purpose of range
149 +matching) by setting the `includePrerelease` flag on the options
150 +object to any
151 +[functions](https://github.com/npm/node-semver#functions) that do
152 +range matching.
153 +
154 +#### Prerelease Identifiers
155 +
156 +The method `.inc` takes an additional `identifier` string argument that
157 +will append the value of the string as a prerelease identifier:
158 +
159 +```javascript
160 +semver.inc('1.2.3', 'prerelease', 'beta')
161 +// '1.2.4-beta.0'
162 +```
163 +
164 +command-line example:
165 +
166 +```bash
167 +$ semver 1.2.3 -i prerelease --preid beta
168 +1.2.4-beta.0
169 +```
170 +
171 +Which then can be used to increment further:
172 +
173 +```bash
174 +$ semver 1.2.4-beta.0 -i prerelease
175 +1.2.4-beta.1
176 +```
177 +
178 +### Advanced Range Syntax
179 +
180 +Advanced range syntax desugars to primitive comparators in
181 +deterministic ways.
182 +
183 +Advanced ranges may be combined in the same way as primitive
184 +comparators using white space or `||`.
185 +
186 +#### Hyphen Ranges `X.Y.Z - A.B.C`
187 +
188 +Specifies an inclusive set.
189 +
190 +* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4`
191 +
192 +If a partial version is provided as the first version in the inclusive
193 +range, then the missing pieces are replaced with zeroes.
194 +
195 +* `1.2 - 2.3.4` := `>=1.2.0 <=2.3.4`
196 +
197 +If a partial version is provided as the second version in the
198 +inclusive range, then all versions that start with the supplied parts
199 +of the tuple are accepted, but nothing that would be greater than the
200 +provided tuple parts.
201 +
202 +* `1.2.3 - 2.3` := `>=1.2.3 <2.4.0`
203 +* `1.2.3 - 2` := `>=1.2.3 <3.0.0`
204 +
205 +#### X-Ranges `1.2.x` `1.X` `1.2.*` `*`
206 +
207 +Any of `X`, `x`, or `*` may be used to "stand in" for one of the
208 +numeric values in the `[major, minor, patch]` tuple.
209 +
210 +* `*` := `>=0.0.0` (Any version satisfies)
211 +* `1.x` := `>=1.0.0 <2.0.0` (Matching major version)
212 +* `1.2.x` := `>=1.2.0 <1.3.0` (Matching major and minor versions)
213 +
214 +A partial version range is treated as an X-Range, so the special
215 +character is in fact optional.
216 +
217 +* `""` (empty string) := `*` := `>=0.0.0`
218 +* `1` := `1.x.x` := `>=1.0.0 <2.0.0`
219 +* `1.2` := `1.2.x` := `>=1.2.0 <1.3.0`
220 +
221 +#### Tilde Ranges `~1.2.3` `~1.2` `~1`
222 +
223 +Allows patch-level changes if a minor version is specified on the
224 +comparator. Allows minor-level changes if not.
225 +
226 +* `~1.2.3` := `>=1.2.3 <1.(2+1).0` := `>=1.2.3 <1.3.0`
227 +* `~1.2` := `>=1.2.0 <1.(2+1).0` := `>=1.2.0 <1.3.0` (Same as `1.2.x`)
228 +* `~1` := `>=1.0.0 <(1+1).0.0` := `>=1.0.0 <2.0.0` (Same as `1.x`)
229 +* `~0.2.3` := `>=0.2.3 <0.(2+1).0` := `>=0.2.3 <0.3.0`
230 +* `~0.2` := `>=0.2.0 <0.(2+1).0` := `>=0.2.0 <0.3.0` (Same as `0.2.x`)
231 +* `~0` := `>=0.0.0 <(0+1).0.0` := `>=0.0.0 <1.0.0` (Same as `0.x`)
232 +* `~1.2.3-beta.2` := `>=1.2.3-beta.2 <1.3.0` Note that prereleases in
233 + the `1.2.3` version will be allowed, if they are greater than or
234 + equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but
235 + `1.2.4-beta.2` would not, because it is a prerelease of a
236 + different `[major, minor, patch]` tuple.
237 +
238 +#### Caret Ranges `^1.2.3` `^0.2.5` `^0.0.4`
239 +
240 +Allows changes that do not modify the left-most non-zero element in the
241 +`[major, minor, patch]` tuple. In other words, this allows patch and
242 +minor updates for versions `1.0.0` and above, patch updates for
243 +versions `0.X >=0.1.0`, and *no* updates for versions `0.0.X`.
244 +
245 +Many authors treat a `0.x` version as if the `x` were the major
246 +"breaking-change" indicator.
247 +
248 +Caret ranges are ideal when an author may make breaking changes
249 +between `0.2.4` and `0.3.0` releases, which is a common practice.
250 +However, it presumes that there will *not* be breaking changes between
251 +`0.2.4` and `0.2.5`. It allows for changes that are presumed to be
252 +additive (but non-breaking), according to commonly observed practices.
253 +
254 +* `^1.2.3` := `>=1.2.3 <2.0.0`
255 +* `^0.2.3` := `>=0.2.3 <0.3.0`
256 +* `^0.0.3` := `>=0.0.3 <0.0.4`
257 +* `^1.2.3-beta.2` := `>=1.2.3-beta.2 <2.0.0` Note that prereleases in
258 + the `1.2.3` version will be allowed, if they are greater than or
259 + equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but
260 + `1.2.4-beta.2` would not, because it is a prerelease of a
261 + different `[major, minor, patch]` tuple.
262 +* `^0.0.3-beta` := `>=0.0.3-beta <0.0.4` Note that prereleases in the
263 + `0.0.3` version *only* will be allowed, if they are greater than or
264 + equal to `beta`. So, `0.0.3-pr.2` would be allowed.
265 +
266 +When parsing caret ranges, a missing `patch` value desugars to the
267 +number `0`, but will allow flexibility within that value, even if the
268 +major and minor versions are both `0`.
269 +
270 +* `^1.2.x` := `>=1.2.0 <2.0.0`
271 +* `^0.0.x` := `>=0.0.0 <0.1.0`
272 +* `^0.0` := `>=0.0.0 <0.1.0`
273 +
274 +A missing `minor` and `patch` values will desugar to zero, but also
275 +allow flexibility within those values, even if the major version is
276 +zero.
277 +
278 +* `^1.x` := `>=1.0.0 <2.0.0`
279 +* `^0.x` := `>=0.0.0 <1.0.0`
280 +
281 +### Range Grammar
282 +
283 +Putting all this together, here is a Backus-Naur grammar for ranges,
284 +for the benefit of parser authors:
285 +
286 +```bnf
287 +range-set ::= range ( logical-or range ) *
288 +logical-or ::= ( ' ' ) * '||' ( ' ' ) *
289 +range ::= hyphen | simple ( ' ' simple ) * | ''
290 +hyphen ::= partial ' - ' partial
291 +simple ::= primitive | partial | tilde | caret
292 +primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial
293 +partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )?
294 +xr ::= 'x' | 'X' | '*' | nr
295 +nr ::= '0' | ['1'-'9'] ( ['0'-'9'] ) *
296 +tilde ::= '~' partial
297 +caret ::= '^' partial
298 +qualifier ::= ( '-' pre )? ( '+' build )?
299 +pre ::= parts
300 +build ::= parts
301 +parts ::= part ( '.' part ) *
302 +part ::= nr | [-0-9A-Za-z]+
303 +```
304 +
305 +## Functions
306 +
307 +All methods and classes take a final `options` object argument. All
308 +options in this object are `false` by default. The options supported
309 +are:
310 +
311 +- `loose` Be more forgiving about not-quite-valid semver strings.
312 + (Any resulting output will always be 100% strict compliant, of
313 + course.) For backwards compatibility reasons, if the `options`
314 + argument is a boolean value instead of an object, it is interpreted
315 + to be the `loose` param.
316 +- `includePrerelease` Set to suppress the [default
317 + behavior](https://github.com/npm/node-semver#prerelease-tags) of
318 + excluding prerelease tagged versions from ranges unless they are
319 + explicitly opted into.
320 +
321 +Strict-mode Comparators and Ranges will be strict about the SemVer
322 +strings that they parse.
323 +
324 +* `valid(v)`: Return the parsed version, or null if it's not valid.
325 +* `inc(v, release)`: Return the version incremented by the release
326 + type (`major`, `premajor`, `minor`, `preminor`, `patch`,
327 + `prepatch`, or `prerelease`), or null if it's not valid
328 + * `premajor` in one call will bump the version up to the next major
329 + version and down to a prerelease of that major version.
330 + `preminor`, and `prepatch` work the same way.
331 + * If called from a non-prerelease version, the `prerelease` will work the
332 + same as `prepatch`. It increments the patch version, then makes a
333 + prerelease. If the input version is already a prerelease it simply
334 + increments it.
335 +* `prerelease(v)`: Returns an array of prerelease components, or null
336 + if none exist. Example: `prerelease('1.2.3-alpha.1') -> ['alpha', 1]`
337 +* `major(v)`: Return the major version number.
338 +* `minor(v)`: Return the minor version number.
339 +* `patch(v)`: Return the patch version number.
340 +* `intersects(r1, r2, loose)`: Return true if the two supplied ranges
341 + or comparators intersect.
342 +* `parse(v)`: Attempt to parse a string as a semantic version, returning either
343 + a `SemVer` object or `null`.
344 +
345 +### Comparison
346 +
347 +* `gt(v1, v2)`: `v1 > v2`
348 +* `gte(v1, v2)`: `v1 >= v2`
349 +* `lt(v1, v2)`: `v1 < v2`
350 +* `lte(v1, v2)`: `v1 <= v2`
351 +* `eq(v1, v2)`: `v1 == v2` This is true if they're logically equivalent,
352 + even if they're not the exact same string. You already know how to
353 + compare strings.
354 +* `neq(v1, v2)`: `v1 != v2` The opposite of `eq`.
355 +* `cmp(v1, comparator, v2)`: Pass in a comparison string, and it'll call
356 + the corresponding function above. `"==="` and `"!=="` do simple
357 + string comparison, but are included for completeness. Throws if an
358 + invalid comparison string is provided.
359 +* `compare(v1, v2)`: Return `0` if `v1 == v2`, or `1` if `v1` is greater, or `-1` if
360 + `v2` is greater. Sorts in ascending order if passed to `Array.sort()`.
361 +* `rcompare(v1, v2)`: The reverse of compare. Sorts an array of versions
362 + in descending order when passed to `Array.sort()`.
363 +* `compareBuild(v1, v2)`: The same as `compare` but considers `build` when two versions
364 + are equal. Sorts in ascending order if passed to `Array.sort()`.
365 + `v2` is greater. Sorts in ascending order if passed to `Array.sort()`.
366 +* `diff(v1, v2)`: Returns difference between two versions by the release type
367 + (`major`, `premajor`, `minor`, `preminor`, `patch`, `prepatch`, or `prerelease`),
368 + or null if the versions are the same.
369 +
370 +### Comparators
371 +
372 +* `intersects(comparator)`: Return true if the comparators intersect
373 +
374 +### Ranges
375 +
376 +* `validRange(range)`: Return the valid range or null if it's not valid
377 +* `satisfies(version, range)`: Return true if the version satisfies the
378 + range.
379 +* `maxSatisfying(versions, range)`: Return the highest version in the list
380 + that satisfies the range, or `null` if none of them do.
381 +* `minSatisfying(versions, range)`: Return the lowest version in the list
382 + that satisfies the range, or `null` if none of them do.
383 +* `minVersion(range)`: Return the lowest version that can possibly match
384 + the given range.
385 +* `gtr(version, range)`: Return `true` if version is greater than all the
386 + versions possible in the range.
387 +* `ltr(version, range)`: Return `true` if version is less than all the
388 + versions possible in the range.
389 +* `outside(version, range, hilo)`: Return true if the version is outside
390 + the bounds of the range in either the high or low direction. The
391 + `hilo` argument must be either the string `'>'` or `'<'`. (This is
392 + the function called by `gtr` and `ltr`.)
393 +* `intersects(range)`: Return true if any of the ranges comparators intersect
394 +
395 +Note that, since ranges may be non-contiguous, a version might not be
396 +greater than a range, less than a range, *or* satisfy a range! For
397 +example, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9`
398 +until `2.0.0`, so the version `1.2.10` would not be greater than the
399 +range (because `2.0.1` satisfies, which is higher), nor less than the
400 +range (since `1.2.8` satisfies, which is lower), and it also does not
401 +satisfy the range.
402 +
403 +If you want to know if a version satisfies or does not satisfy a
404 +range, use the `satisfies(version, range)` function.
405 +
406 +### Coercion
407 +
408 +* `coerce(version, options)`: Coerces a string to semver if possible
409 +
410 +This aims to provide a very forgiving translation of a non-semver string to
411 +semver. It looks for the first digit in a string, and consumes all
412 +remaining characters which satisfy at least a partial semver (e.g., `1`,
413 +`1.2`, `1.2.3`) up to the max permitted length (256 characters). Longer
414 +versions are simply truncated (`4.6.3.9.2-alpha2` becomes `4.6.3`). All
415 +surrounding text is simply ignored (`v3.4 replaces v3.3.1` becomes
416 +`3.4.0`). Only text which lacks digits will fail coercion (`version one`
417 +is not valid). The maximum length for any semver component considered for
418 +coercion is 16 characters; longer components will be ignored
419 +(`10000000000000000.4.7.4` becomes `4.7.4`). The maximum value for any
420 +semver component is `Integer.MAX_SAFE_INTEGER || (2**53 - 1)`; higher value
421 +components are invalid (`9999999999999999.4.7.4` is likely invalid).
422 +
423 +If the `options.rtl` flag is set, then `coerce` will return the right-most
424 +coercible tuple that does not share an ending index with a longer coercible
425 +tuple. For example, `1.2.3.4` will return `2.3.4` in rtl mode, not
426 +`4.0.0`. `1.2.3/4` will return `4.0.0`, because the `4` is not a part of
427 +any other overlapping SemVer tuple.
428 +
429 +### Clean
430 +
431 +* `clean(version)`: Clean a string to be a valid semver if possible
432 +
433 +This will return a cleaned and trimmed semver version. If the provided version is not valid a null will be returned. This does not work for ranges.
434 +
435 +ex.
436 +* `s.clean(' = v 2.1.5foo')`: `null`
437 +* `s.clean(' = v 2.1.5foo', { loose: true })`: `'2.1.5-foo'`
438 +* `s.clean(' = v 2.1.5-foo')`: `null`
439 +* `s.clean(' = v 2.1.5-foo', { loose: true })`: `'2.1.5-foo'`
440 +* `s.clean('=v2.1.5')`: `'2.1.5'`
441 +* `s.clean(' =v2.1.5')`: `2.1.5`
442 +* `s.clean(' 2.1.5 ')`: `'2.1.5'`
443 +* `s.clean('~1.0.0')`: `null`
node_modules/semver/bin/semver.jsView
@@ -1,0 +1,174 @@
1 +#!/usr/bin/env node
2 +// Standalone semver comparison program.
3 +// Exits successfully and prints matching version(s) if
4 +// any supplied version is valid and passes all tests.
5 +
6 +var argv = process.argv.slice(2)
7 +
8 +var versions = []
9 +
10 +var range = []
11 +
12 +var inc = null
13 +
14 +var version = require('../package.json').version
15 +
16 +var loose = false
17 +
18 +var includePrerelease = false
19 +
20 +var coerce = false
21 +
22 +var rtl = false
23 +
24 +var identifier
25 +
26 +var semver = require('../semver')
27 +
28 +var reverse = false
29 +
30 +var options = {}
31 +
32 +main()
33 +
34 +function main () {
35 + if (!argv.length) return help()
36 + while (argv.length) {
37 + var a = argv.shift()
38 + var indexOfEqualSign = a.indexOf('=')
39 + if (indexOfEqualSign !== -1) {
40 + a = a.slice(0, indexOfEqualSign)
41 + argv.unshift(a.slice(indexOfEqualSign + 1))
42 + }
43 + switch (a) {
44 + case '-rv': case '-rev': case '--rev': case '--reverse':
45 + reverse = true
46 + break
47 + case '-l': case '--loose':
48 + loose = true
49 + break
50 + case '-p': case '--include-prerelease':
51 + includePrerelease = true
52 + break
53 + case '-v': case '--version':
54 + versions.push(argv.shift())
55 + break
56 + case '-i': case '--inc': case '--increment':
57 + switch (argv[0]) {
58 + case 'major': case 'minor': case 'patch': case 'prerelease':
59 + case 'premajor': case 'preminor': case 'prepatch':
60 + inc = argv.shift()
61 + break
62 + default:
63 + inc = 'patch'
64 + break
65 + }
66 + break
67 + case '--preid':
68 + identifier = argv.shift()
69 + break
70 + case '-r': case '--range':
71 + range.push(argv.shift())
72 + break
73 + case '-c': case '--coerce':
74 + coerce = true
75 + break
76 + case '--rtl':
77 + rtl = true
78 + break
79 + case '--ltr':
80 + rtl = false
81 + break
82 + case '-h': case '--help': case '-?':
83 + return help()
84 + default:
85 + versions.push(a)
86 + break
87 + }
88 + }
89 +
90 + var options = { loose: loose, includePrerelease: includePrerelease, rtl: rtl }
91 +
92 + versions = versions.map(function (v) {
93 + return coerce ? (semver.coerce(v, options) || { version: v }).version : v
94 + }).filter(function (v) {
95 + return semver.valid(v)
96 + })
97 + if (!versions.length) return fail()
98 + if (inc && (versions.length !== 1 || range.length)) { return failInc() }
99 +
100 + for (var i = 0, l = range.length; i < l; i++) {
101 + versions = versions.filter(function (v) {
102 + return semver.satisfies(v, range[i], options)
103 + })
104 + if (!versions.length) return fail()
105 + }
106 + return success(versions)
107 +}
108 +
109 +function failInc () {
110 + console.error('--inc can only be used on a single version with no range')
111 + fail()
112 +}
113 +
114 +function fail () { process.exit(1) }
115 +
116 +function success () {
117 + var compare = reverse ? 'rcompare' : 'compare'
118 + versions.sort(function (a, b) {
119 + return semver[compare](a, b, options)
120 + }).map(function (v) {
121 + return semver.clean(v, options)
122 + }).map(function (v) {
123 + return inc ? semver.inc(v, inc, options, identifier) : v
124 + }).forEach(function (v, i, _) { console.log(v) })
125 +}
126 +
127 +function help () {
128 + console.log(['SemVer ' + version,
129 + '',
130 + 'A JavaScript implementation of the https://semver.org/ specification',
131 + 'Copyright Isaac Z. Schlueter',
132 + '',
133 + 'Usage: semver [options] <version> [<version> [...]]',
134 + 'Prints valid versions sorted by SemVer precedence',
135 + '',
136 + 'Options:',
137 + '-r --range <range>',
138 + ' Print versions that match the specified range.',
139 + '',
140 + '-i --increment [<level>]',
141 + ' Increment a version by the specified level. Level can',
142 + ' be one of: major, minor, patch, premajor, preminor,',
143 + " prepatch, or prerelease. Default level is 'patch'.",
144 + ' Only one version may be specified.',
145 + '',
146 + '--preid <identifier>',
147 + ' Identifier to be used to prefix premajor, preminor,',
148 + ' prepatch or prerelease version increments.',
149 + '',
150 + '-l --loose',
151 + ' Interpret versions and ranges loosely',
152 + '',
153 + '-p --include-prerelease',
154 + ' Always include prerelease versions in range matching',
155 + '',
156 + '-c --coerce',
157 + ' Coerce a string into SemVer if possible',
158 + ' (does not imply --loose)',
159 + '',
160 + '--rtl',
161 + ' Coerce version strings right to left',
162 + '',
163 + '--ltr',
164 + ' Coerce version strings left to right (default)',
165 + '',
166 + 'Program exits successfully if any valid version satisfies',
167 + 'all supplied ranges, and prints all satisfying versions.',
168 + '',
169 + 'If no satisfying versions are found, then exits failure.',
170 + '',
171 + 'Versions are printed in ascending order, so supplying',
172 + 'multiple versions to the utility will just sort them.'
173 + ].join('\n'))
174 +}
node_modules/semver/package.jsonView
@@ -1,0 +1,63 @@
1 +{
2 + "_args": [
3 + [
4 + "semver@6.3.0",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "semver@6.3.0",
9 + "_id": "semver@6.3.0",
10 + "_inBundle": false,
11 + "_integrity": "sha256-a/FK8zM4Q7YlJs7ZlRmRjH9r8s/hwRFVbTbPI5pMZ0U=",
12 + "_location": "/semver",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "semver@6.3.0",
18 + "name": "semver",
19 + "escapedName": "semver",
20 + "rawSpec": "6.3.0",
21 + "saveSpec": null,
22 + "fetchSpec": "6.3.0"
23 + },
24 + "_requiredBy": [
25 + "/"
26 + ],
27 + "_resolved": "http://localhost:46465/-/blobs/get/&a/FK8zM4Q7YlJs7ZlRmRjH9r8s/hwRFVbTbPI5pMZ0U=.sha256",
28 + "_spec": "6.3.0",
29 + "_where": "/home/cel/src/ssb-npm-registry",
30 + "bin": {
31 + "semver": "bin/semver.js"
32 + },
33 + "bugs": {
34 + "url": "https://github.com/npm/node-semver/issues"
35 + },
36 + "description": "The semantic version parser used by npm.",
37 + "devDependencies": {
38 + "tap": "^14.3.1"
39 + },
40 + "files": [
41 + "bin",
42 + "range.bnf",
43 + "semver.js"
44 + ],
45 + "homepage": "https://github.com/npm/node-semver#readme",
46 + "license": "ISC",
47 + "main": "semver.js",
48 + "name": "semver",
49 + "repository": {
50 + "type": "git",
51 + "url": "git+https://github.com/npm/node-semver.git"
52 + },
53 + "scripts": {
54 + "postpublish": "git push origin --follow-tags",
55 + "postversion": "npm publish",
56 + "preversion": "npm test",
57 + "test": "tap"
58 + },
59 + "tap": {
60 + "check-coverage": true
61 + },
62 + "version": "6.3.0"
63 +}
node_modules/semver/range.bnfView
@@ -1,0 +1,16 @@
1 +range-set ::= range ( logical-or range ) *
2 +logical-or ::= ( ' ' ) * '||' ( ' ' ) *
3 +range ::= hyphen | simple ( ' ' simple ) * | ''
4 +hyphen ::= partial ' - ' partial
5 +simple ::= primitive | partial | tilde | caret
6 +primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial
7 +partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )?
8 +xr ::= 'x' | 'X' | '*' | nr
9 +nr ::= '0' | [1-9] ( [0-9] ) *
10 +tilde ::= '~' partial
11 +caret ::= '^' partial
12 +qualifier ::= ( '-' pre )? ( '+' build )?
13 +pre ::= parts
14 +build ::= parts
15 +parts ::= part ( '.' part ) *
16 +part ::= nr | [-0-9A-Za-z]+
node_modules/semver/semver.jsView
@@ -1,0 +1,1596 @@
1 +exports = module.exports = SemVer
2 +
3 +var debug
4 +/* istanbul ignore next */
5 +if (typeof process === 'object' &&
6 + process.env &&
7 + process.env.NODE_DEBUG &&
8 + /\bsemver\b/i.test(process.env.NODE_DEBUG)) {
9 + debug = function () {
10 + var args = Array.prototype.slice.call(arguments, 0)
11 + args.unshift('SEMVER')
12 + console.log.apply(console, args)
13 + }
14 +} else {
15 + debug = function () {}
16 +}
17 +
18 +// Note: this is the semver.org version of the spec that it implements
19 +// Not necessarily the package version of this code.
20 +exports.SEMVER_SPEC_VERSION = '2.0.0'
21 +
22 +var MAX_LENGTH = 256
23 +var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
24 + /* istanbul ignore next */ 9007199254740991
25 +
26 +// Max safe segment length for coercion.
27 +var MAX_SAFE_COMPONENT_LENGTH = 16
28 +
29 +// The actual regexps go on exports.re
30 +var re = exports.re = []
31 +var src = exports.src = []
32 +var t = exports.tokens = {}
33 +var R = 0
34 +
35 +function tok (n) {
36 + t[n] = R++
37 +}
38 +
39 +// The following Regular Expressions can be used for tokenizing,
40 +// validating, and parsing SemVer version strings.
41 +
42 +// ## Numeric Identifier
43 +// A single `0`, or a non-zero digit followed by zero or more digits.
44 +
45 +tok('NUMERICIDENTIFIER')
46 +src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*'
47 +tok('NUMERICIDENTIFIERLOOSE')
48 +src[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+'
49 +
50 +// ## Non-numeric Identifier
51 +// Zero or more digits, followed by a letter or hyphen, and then zero or
52 +// more letters, digits, or hyphens.
53 +
54 +tok('NONNUMERICIDENTIFIER')
55 +src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
56 +
57 +// ## Main Version
58 +// Three dot-separated numeric identifiers.
59 +
60 +tok('MAINVERSION')
61 +src[t.MAINVERSION] = '(' + src[t.NUMERICIDENTIFIER] + ')\\.' +
62 + '(' + src[t.NUMERICIDENTIFIER] + ')\\.' +
63 + '(' + src[t.NUMERICIDENTIFIER] + ')'
64 +
65 +tok('MAINVERSIONLOOSE')
66 +src[t.MAINVERSIONLOOSE] = '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' +
67 + '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' +
68 + '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')'
69 +
70 +// ## Pre-release Version Identifier
71 +// A numeric identifier, or a non-numeric identifier.
72 +
73 +tok('PRERELEASEIDENTIFIER')
74 +src[t.PRERELEASEIDENTIFIER] = '(?:' + src[t.NUMERICIDENTIFIER] +
75 + '|' + src[t.NONNUMERICIDENTIFIER] + ')'
76 +
77 +tok('PRERELEASEIDENTIFIERLOOSE')
78 +src[t.PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[t.NUMERICIDENTIFIERLOOSE] +
79 + '|' + src[t.NONNUMERICIDENTIFIER] + ')'
80 +
81 +// ## Pre-release Version
82 +// Hyphen, followed by one or more dot-separated pre-release version
83 +// identifiers.
84 +
85 +tok('PRERELEASE')
86 +src[t.PRERELEASE] = '(?:-(' + src[t.PRERELEASEIDENTIFIER] +
87 + '(?:\\.' + src[t.PRERELEASEIDENTIFIER] + ')*))'
88 +
89 +tok('PRERELEASELOOSE')
90 +src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] +
91 + '(?:\\.' + src[t.PRERELEASEIDENTIFIERLOOSE] + ')*))'
92 +
93 +// ## Build Metadata Identifier
94 +// Any combination of digits, letters, or hyphens.
95 +
96 +tok('BUILDIDENTIFIER')
97 +src[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+'
98 +
99 +// ## Build Metadata
100 +// Plus sign, followed by one or more period-separated build metadata
101 +// identifiers.
102 +
103 +tok('BUILD')
104 +src[t.BUILD] = '(?:\\+(' + src[t.BUILDIDENTIFIER] +
105 + '(?:\\.' + src[t.BUILDIDENTIFIER] + ')*))'
106 +
107 +// ## Full Version String
108 +// A main version, followed optionally by a pre-release version and
109 +// build metadata.
110 +
111 +// Note that the only major, minor, patch, and pre-release sections of
112 +// the version string are capturing groups. The build metadata is not a
113 +// capturing group, because it should not ever be used in version
114 +// comparison.
115 +
116 +tok('FULL')
117 +tok('FULLPLAIN')
118 +src[t.FULLPLAIN] = 'v?' + src[t.MAINVERSION] +
119 + src[t.PRERELEASE] + '?' +
120 + src[t.BUILD] + '?'
121 +
122 +src[t.FULL] = '^' + src[t.FULLPLAIN] + '$'
123 +
124 +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.
125 +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty
126 +// common in the npm registry.
127 +tok('LOOSEPLAIN')
128 +src[t.LOOSEPLAIN] = '[v=\\s]*' + src[t.MAINVERSIONLOOSE] +
129 + src[t.PRERELEASELOOSE] + '?' +
130 + src[t.BUILD] + '?'
131 +
132 +tok('LOOSE')
133 +src[t.LOOSE] = '^' + src[t.LOOSEPLAIN] + '$'
134 +
135 +tok('GTLT')
136 +src[t.GTLT] = '((?:<|>)?=?)'
137 +
138 +// Something like "2.*" or "1.2.x".
139 +// Note that "x.x" is a valid xRange identifer, meaning "any version"
140 +// Only the first item is strictly required.
141 +tok('XRANGEIDENTIFIERLOOSE')
142 +src[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + '|x|X|\\*'
143 +tok('XRANGEIDENTIFIER')
144 +src[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + '|x|X|\\*'
145 +
146 +tok('XRANGEPLAIN')
147 +src[t.XRANGEPLAIN] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIER] + ')' +
148 + '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' +
149 + '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' +
150 + '(?:' + src[t.PRERELEASE] + ')?' +
151 + src[t.BUILD] + '?' +
152 + ')?)?'
153 +
154 +tok('XRANGEPLAINLOOSE')
155 +src[t.XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +
156 + '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +
157 + '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +
158 + '(?:' + src[t.PRERELEASELOOSE] + ')?' +
159 + src[t.BUILD] + '?' +
160 + ')?)?'
161 +
162 +tok('XRANGE')
163 +src[t.XRANGE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAIN] + '$'
164 +tok('XRANGELOOSE')
165 +src[t.XRANGELOOSE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAINLOOSE] + '$'
166 +
167 +// Coercion.
168 +// Extract anything that could conceivably be a part of a valid semver
169 +tok('COERCE')
170 +src[t.COERCE] = '(^|[^\\d])' +
171 + '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' +
172 + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
173 + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
174 + '(?:$|[^\\d])'
175 +tok('COERCERTL')
176 +re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g')
177 +
178 +// Tilde ranges.
179 +// Meaning is "reasonably at or greater than"
180 +tok('LONETILDE')
181 +src[t.LONETILDE] = '(?:~>?)'
182 +
183 +tok('TILDETRIM')
184 +src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+'
185 +re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g')
186 +var tildeTrimReplace = '$1~'
187 +
188 +tok('TILDE')
189 +src[t.TILDE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAIN] + '$'
190 +tok('TILDELOOSE')
191 +src[t.TILDELOOSE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + '$'
192 +
193 +// Caret ranges.
194 +// Meaning is "at least and backwards compatible with"
195 +tok('LONECARET')
196 +src[t.LONECARET] = '(?:\\^)'
197 +
198 +tok('CARETTRIM')
199 +src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+'
200 +re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g')
201 +var caretTrimReplace = '$1^'
202 +
203 +tok('CARET')
204 +src[t.CARET] = '^' + src[t.LONECARET] + src[t.XRANGEPLAIN] + '$'
205 +tok('CARETLOOSE')
206 +src[t.CARETLOOSE] = '^' + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + '$'
207 +
208 +// A simple gt/lt/eq thing, or just "" to indicate "any version"
209 +tok('COMPARATORLOOSE')
210 +src[t.COMPARATORLOOSE] = '^' + src[t.GTLT] + '\\s*(' + src[t.LOOSEPLAIN] + ')$|^$'
211 +tok('COMPARATOR')
212 +src[t.COMPARATOR] = '^' + src[t.GTLT] + '\\s*(' + src[t.FULLPLAIN] + ')$|^$'
213 +
214 +// An expression to strip any whitespace between the gtlt and the thing
215 +// it modifies, so that `> 1.2.3` ==> `>1.2.3`
216 +tok('COMPARATORTRIM')
217 +src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] +
218 + '\\s*(' + src[t.LOOSEPLAIN] + '|' + src[t.XRANGEPLAIN] + ')'
219 +
220 +// this one has to use the /g flag
221 +re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g')
222 +var comparatorTrimReplace = '$1$2$3'
223 +
224 +// Something like `1.2.3 - 1.2.4`
225 +// Note that these all use the loose form, because they'll be
226 +// checked against either the strict or loose comparator form
227 +// later.
228 +tok('HYPHENRANGE')
229 +src[t.HYPHENRANGE] = '^\\s*(' + src[t.XRANGEPLAIN] + ')' +
230 + '\\s+-\\s+' +
231 + '(' + src[t.XRANGEPLAIN] + ')' +
232 + '\\s*$'
233 +
234 +tok('HYPHENRANGELOOSE')
235 +src[t.HYPHENRANGELOOSE] = '^\\s*(' + src[t.XRANGEPLAINLOOSE] + ')' +
236 + '\\s+-\\s+' +
237 + '(' + src[t.XRANGEPLAINLOOSE] + ')' +
238 + '\\s*$'
239 +
240 +// Star ranges basically just allow anything at all.
241 +tok('STAR')
242 +src[t.STAR] = '(<|>)?=?\\s*\\*'
243 +
244 +// Compile to actual regexp objects.
245 +// All are flag-free, unless they were created above with a flag.
246 +for (var i = 0; i < R; i++) {
247 + debug(i, src[i])
248 + if (!re[i]) {
249 + re[i] = new RegExp(src[i])
250 + }
251 +}
252 +
253 +exports.parse = parse
254 +function parse (version, options) {
255 + if (!options || typeof options !== 'object') {
256 + options = {
257 + loose: !!options,
258 + includePrerelease: false
259 + }
260 + }
261 +
262 + if (version instanceof SemVer) {
263 + return version
264 + }
265 +
266 + if (typeof version !== 'string') {
267 + return null
268 + }
269 +
270 + if (version.length > MAX_LENGTH) {
271 + return null
272 + }
273 +
274 + var r = options.loose ? re[t.LOOSE] : re[t.FULL]
275 + if (!r.test(version)) {
276 + return null
277 + }
278 +
279 + try {
280 + return new SemVer(version, options)
281 + } catch (er) {
282 + return null
283 + }
284 +}
285 +
286 +exports.valid = valid
287 +function valid (version, options) {
288 + var v = parse(version, options)
289 + return v ? v.version : null
290 +}
291 +
292 +exports.clean = clean
293 +function clean (version, options) {
294 + var s = parse(version.trim().replace(/^[=v]+/, ''), options)
295 + return s ? s.version : null
296 +}
297 +
298 +exports.SemVer = SemVer
299 +
300 +function SemVer (version, options) {
301 + if (!options || typeof options !== 'object') {
302 + options = {
303 + loose: !!options,
304 + includePrerelease: false
305 + }
306 + }
307 + if (version instanceof SemVer) {
308 + if (version.loose === options.loose) {
309 + return version
310 + } else {
311 + version = version.version
312 + }
313 + } else if (typeof version !== 'string') {
314 + throw new TypeError('Invalid Version: ' + version)
315 + }
316 +
317 + if (version.length > MAX_LENGTH) {
318 + throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters')
319 + }
320 +
321 + if (!(this instanceof SemVer)) {
322 + return new SemVer(version, options)
323 + }
324 +
325 + debug('SemVer', version, options)
326 + this.options = options
327 + this.loose = !!options.loose
328 +
329 + var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL])
330 +
331 + if (!m) {
332 + throw new TypeError('Invalid Version: ' + version)
333 + }
334 +
335 + this.raw = version
336 +
337 + // these are actually numbers
338 + this.major = +m[1]
339 + this.minor = +m[2]
340 + this.patch = +m[3]
341 +
342 + if (this.major > MAX_SAFE_INTEGER || this.major < 0) {
343 + throw new TypeError('Invalid major version')
344 + }
345 +
346 + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) {
347 + throw new TypeError('Invalid minor version')
348 + }
349 +
350 + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) {
351 + throw new TypeError('Invalid patch version')
352 + }
353 +
354 + // numberify any prerelease numeric ids
355 + if (!m[4]) {
356 + this.prerelease = []
357 + } else {
358 + this.prerelease = m[4].split('.').map(function (id) {
359 + if (/^[0-9]+$/.test(id)) {
360 + var num = +id
361 + if (num >= 0 && num < MAX_SAFE_INTEGER) {
362 + return num
363 + }
364 + }
365 + return id
366 + })
367 + }
368 +
369 + this.build = m[5] ? m[5].split('.') : []
370 + this.format()
371 +}
372 +
373 +SemVer.prototype.format = function () {
374 + this.version = this.major + '.' + this.minor + '.' + this.patch
375 + if (this.prerelease.length) {
376 + this.version += '-' + this.prerelease.join('.')
377 + }
378 + return this.version
379 +}
380 +
381 +SemVer.prototype.toString = function () {
382 + return this.version
383 +}
384 +
385 +SemVer.prototype.compare = function (other) {
386 + debug('SemVer.compare', this.version, this.options, other)
387 + if (!(other instanceof SemVer)) {
388 + other = new SemVer(other, this.options)
389 + }
390 +
391 + return this.compareMain(other) || this.comparePre(other)
392 +}
393 +
394 +SemVer.prototype.compareMain = function (other) {
395 + if (!(other instanceof SemVer)) {
396 + other = new SemVer(other, this.options)
397 + }
398 +
399 + return compareIdentifiers(this.major, other.major) ||
400 + compareIdentifiers(this.minor, other.minor) ||
401 + compareIdentifiers(this.patch, other.patch)
402 +}
403 +
404 +SemVer.prototype.comparePre = function (other) {
405 + if (!(other instanceof SemVer)) {
406 + other = new SemVer(other, this.options)
407 + }
408 +
409 + // NOT having a prerelease is > having one
410 + if (this.prerelease.length && !other.prerelease.length) {
411 + return -1
412 + } else if (!this.prerelease.length && other.prerelease.length) {
413 + return 1
414 + } else if (!this.prerelease.length && !other.prerelease.length) {
415 + return 0
416 + }
417 +
418 + var i = 0
419 + do {
420 + var a = this.prerelease[i]
421 + var b = other.prerelease[i]
422 + debug('prerelease compare', i, a, b)
423 + if (a === undefined && b === undefined) {
424 + return 0
425 + } else if (b === undefined) {
426 + return 1
427 + } else if (a === undefined) {
428 + return -1
429 + } else if (a === b) {
430 + continue
431 + } else {
432 + return compareIdentifiers(a, b)
433 + }
434 + } while (++i)
435 +}
436 +
437 +SemVer.prototype.compareBuild = function (other) {
438 + if (!(other instanceof SemVer)) {
439 + other = new SemVer(other, this.options)
440 + }
441 +
442 + var i = 0
443 + do {
444 + var a = this.build[i]
445 + var b = other.build[i]
446 + debug('prerelease compare', i, a, b)
447 + if (a === undefined && b === undefined) {
448 + return 0
449 + } else if (b === undefined) {
450 + return 1
451 + } else if (a === undefined) {
452 + return -1
453 + } else if (a === b) {
454 + continue
455 + } else {
456 + return compareIdentifiers(a, b)
457 + }
458 + } while (++i)
459 +}
460 +
461 +// preminor will bump the version up to the next minor release, and immediately
462 +// down to pre-release. premajor and prepatch work the same way.
463 +SemVer.prototype.inc = function (release, identifier) {
464 + switch (release) {
465 + case 'premajor':
466 + this.prerelease.length = 0
467 + this.patch = 0
468 + this.minor = 0
469 + this.major++
470 + this.inc('pre', identifier)
471 + break
472 + case 'preminor':
473 + this.prerelease.length = 0
474 + this.patch = 0
475 + this.minor++
476 + this.inc('pre', identifier)
477 + break
478 + case 'prepatch':
479 + // If this is already a prerelease, it will bump to the next version
480 + // drop any prereleases that might already exist, since they are not
481 + // relevant at this point.
482 + this.prerelease.length = 0
483 + this.inc('patch', identifier)
484 + this.inc('pre', identifier)
485 + break
486 + // If the input is a non-prerelease version, this acts the same as
487 + // prepatch.
488 + case 'prerelease':
489 + if (this.prerelease.length === 0) {
490 + this.inc('patch', identifier)
491 + }
492 + this.inc('pre', identifier)
493 + break
494 +
495 + case 'major':
496 + // If this is a pre-major version, bump up to the same major version.
497 + // Otherwise increment major.
498 + // 1.0.0-5 bumps to 1.0.0
499 + // 1.1.0 bumps to 2.0.0
500 + if (this.minor !== 0 ||
501 + this.patch !== 0 ||
502 + this.prerelease.length === 0) {
503 + this.major++
504 + }
505 + this.minor = 0
506 + this.patch = 0
507 + this.prerelease = []
508 + break
509 + case 'minor':
510 + // If this is a pre-minor version, bump up to the same minor version.
511 + // Otherwise increment minor.
512 + // 1.2.0-5 bumps to 1.2.0
513 + // 1.2.1 bumps to 1.3.0
514 + if (this.patch !== 0 || this.prerelease.length === 0) {
515 + this.minor++
516 + }
517 + this.patch = 0
518 + this.prerelease = []
519 + break
520 + case 'patch':
521 + // If this is not a pre-release version, it will increment the patch.
522 + // If it is a pre-release it will bump up to the same patch version.
523 + // 1.2.0-5 patches to 1.2.0
524 + // 1.2.0 patches to 1.2.1
525 + if (this.prerelease.length === 0) {
526 + this.patch++
527 + }
528 + this.prerelease = []
529 + break
530 + // This probably shouldn't be used publicly.
531 + // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction.
532 + case 'pre':
533 + if (this.prerelease.length === 0) {
534 + this.prerelease = [0]
535 + } else {
536 + var i = this.prerelease.length
537 + while (--i >= 0) {
538 + if (typeof this.prerelease[i] === 'number') {
539 + this.prerelease[i]++
540 + i = -2
541 + }
542 + }
543 + if (i === -1) {
544 + // didn't increment anything
545 + this.prerelease.push(0)
546 + }
547 + }
548 + if (identifier) {
549 + // 1.2.0-beta.1 bumps to 1.2.0-beta.2,
550 + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0
551 + if (this.prerelease[0] === identifier) {
552 + if (isNaN(this.prerelease[1])) {
553 + this.prerelease = [identifier, 0]
554 + }
555 + } else {
556 + this.prerelease = [identifier, 0]
557 + }
558 + }
559 + break
560 +
561 + default:
562 + throw new Error('invalid increment argument: ' + release)
563 + }
564 + this.format()
565 + this.raw = this.version
566 + return this
567 +}
568 +
569 +exports.inc = inc
570 +function inc (version, release, loose, identifier) {
571 + if (typeof (loose) === 'string') {
572 + identifier = loose
573 + loose = undefined
574 + }
575 +
576 + try {
577 + return new SemVer(version, loose).inc(release, identifier).version
578 + } catch (er) {
579 + return null
580 + }
581 +}
582 +
583 +exports.diff = diff
584 +function diff (version1, version2) {
585 + if (eq(version1, version2)) {
586 + return null
587 + } else {
588 + var v1 = parse(version1)
589 + var v2 = parse(version2)
590 + var prefix = ''
591 + if (v1.prerelease.length || v2.prerelease.length) {
592 + prefix = 'pre'
593 + var defaultResult = 'prerelease'
594 + }
595 + for (var key in v1) {
596 + if (key === 'major' || key === 'minor' || key === 'patch') {
597 + if (v1[key] !== v2[key]) {
598 + return prefix + key
599 + }
600 + }
601 + }
602 + return defaultResult // may be undefined
603 + }
604 +}
605 +
606 +exports.compareIdentifiers = compareIdentifiers
607 +
608 +var numeric = /^[0-9]+$/
609 +function compareIdentifiers (a, b) {
610 + var anum = numeric.test(a)
611 + var bnum = numeric.test(b)
612 +
613 + if (anum && bnum) {
614 + a = +a
615 + b = +b
616 + }
617 +
618 + return a === b ? 0
619 + : (anum && !bnum) ? -1
620 + : (bnum && !anum) ? 1
621 + : a < b ? -1
622 + : 1
623 +}
624 +
625 +exports.rcompareIdentifiers = rcompareIdentifiers
626 +function rcompareIdentifiers (a, b) {
627 + return compareIdentifiers(b, a)
628 +}
629 +
630 +exports.major = major
631 +function major (a, loose) {
632 + return new SemVer(a, loose).major
633 +}
634 +
635 +exports.minor = minor
636 +function minor (a, loose) {
637 + return new SemVer(a, loose).minor
638 +}
639 +
640 +exports.patch = patch
641 +function patch (a, loose) {
642 + return new SemVer(a, loose).patch
643 +}
644 +
645 +exports.compare = compare
646 +function compare (a, b, loose) {
647 + return new SemVer(a, loose).compare(new SemVer(b, loose))
648 +}
649 +
650 +exports.compareLoose = compareLoose
651 +function compareLoose (a, b) {
652 + return compare(a, b, true)
653 +}
654 +
655 +exports.compareBuild = compareBuild
656 +function compareBuild (a, b, loose) {
657 + var versionA = new SemVer(a, loose)
658 + var versionB = new SemVer(b, loose)
659 + return versionA.compare(versionB) || versionA.compareBuild(versionB)
660 +}
661 +
662 +exports.rcompare = rcompare
663 +function rcompare (a, b, loose) {
664 + return compare(b, a, loose)
665 +}
666 +
667 +exports.sort = sort
668 +function sort (list, loose) {
669 + return list.sort(function (a, b) {
670 + return exports.compareBuild(a, b, loose)
671 + })
672 +}
673 +
674 +exports.rsort = rsort
675 +function rsort (list, loose) {
676 + return list.sort(function (a, b) {
677 + return exports.compareBuild(b, a, loose)
678 + })
679 +}
680 +
681 +exports.gt = gt
682 +function gt (a, b, loose) {
683 + return compare(a, b, loose) > 0
684 +}
685 +
686 +exports.lt = lt
687 +function lt (a, b, loose) {
688 + return compare(a, b, loose) < 0
689 +}
690 +
691 +exports.eq = eq
692 +function eq (a, b, loose) {
693 + return compare(a, b, loose) === 0
694 +}
695 +
696 +exports.neq = neq
697 +function neq (a, b, loose) {
698 + return compare(a, b, loose) !== 0
699 +}
700 +
701 +exports.gte = gte
702 +function gte (a, b, loose) {
703 + return compare(a, b, loose) >= 0
704 +}
705 +
706 +exports.lte = lte
707 +function lte (a, b, loose) {
708 + return compare(a, b, loose) <= 0
709 +}
710 +
711 +exports.cmp = cmp
712 +function cmp (a, op, b, loose) {
713 + switch (op) {
714 + case '===':
715 + if (typeof a === 'object')
716 + a = a.version
717 + if (typeof b === 'object')
718 + b = b.version
719 + return a === b
720 +
721 + case '!==':
722 + if (typeof a === 'object')
723 + a = a.version
724 + if (typeof b === 'object')
725 + b = b.version
726 + return a !== b
727 +
728 + case '':
729 + case '=':
730 + case '==':
731 + return eq(a, b, loose)
732 +
733 + case '!=':
734 + return neq(a, b, loose)
735 +
736 + case '>':
737 + return gt(a, b, loose)
738 +
739 + case '>=':
740 + return gte(a, b, loose)
741 +
742 + case '<':
743 + return lt(a, b, loose)
744 +
745 + case '<=':
746 + return lte(a, b, loose)
747 +
748 + default:
749 + throw new TypeError('Invalid operator: ' + op)
750 + }
751 +}
752 +
753 +exports.Comparator = Comparator
754 +function Comparator (comp, options) {
755 + if (!options || typeof options !== 'object') {
756 + options = {
757 + loose: !!options,
758 + includePrerelease: false
759 + }
760 + }
761 +
762 + if (comp instanceof Comparator) {
763 + if (comp.loose === !!options.loose) {
764 + return comp
765 + } else {
766 + comp = comp.value
767 + }
768 + }
769 +
770 + if (!(this instanceof Comparator)) {
771 + return new Comparator(comp, options)
772 + }
773 +
774 + debug('comparator', comp, options)
775 + this.options = options
776 + this.loose = !!options.loose
777 + this.parse(comp)
778 +
779 + if (this.semver === ANY) {
780 + this.value = ''
781 + } else {
782 + this.value = this.operator + this.semver.version
783 + }
784 +
785 + debug('comp', this)
786 +}
787 +
788 +var ANY = {}
789 +Comparator.prototype.parse = function (comp) {
790 + var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
791 + var m = comp.match(r)
792 +
793 + if (!m) {
794 + throw new TypeError('Invalid comparator: ' + comp)
795 + }
796 +
797 + this.operator = m[1] !== undefined ? m[1] : ''
798 + if (this.operator === '=') {
799 + this.operator = ''
800 + }
801 +
802 + // if it literally is just '>' or '' then allow anything.
803 + if (!m[2]) {
804 + this.semver = ANY
805 + } else {
806 + this.semver = new SemVer(m[2], this.options.loose)
807 + }
808 +}
809 +
810 +Comparator.prototype.toString = function () {
811 + return this.value
812 +}
813 +
814 +Comparator.prototype.test = function (version) {
815 + debug('Comparator.test', version, this.options.loose)
816 +
817 + if (this.semver === ANY || version === ANY) {
818 + return true
819 + }
820 +
821 + if (typeof version === 'string') {
822 + try {
823 + version = new SemVer(version, this.options)
824 + } catch (er) {
825 + return false
826 + }
827 + }
828 +
829 + return cmp(version, this.operator, this.semver, this.options)
830 +}
831 +
832 +Comparator.prototype.intersects = function (comp, options) {
833 + if (!(comp instanceof Comparator)) {
834 + throw new TypeError('a Comparator is required')
835 + }
836 +
837 + if (!options || typeof options !== 'object') {
838 + options = {
839 + loose: !!options,
840 + includePrerelease: false
841 + }
842 + }
843 +
844 + var rangeTmp
845 +
846 + if (this.operator === '') {
847 + if (this.value === '') {
848 + return true
849 + }
850 + rangeTmp = new Range(comp.value, options)
851 + return satisfies(this.value, rangeTmp, options)
852 + } else if (comp.operator === '') {
853 + if (comp.value === '') {
854 + return true
855 + }
856 + rangeTmp = new Range(this.value, options)
857 + return satisfies(comp.semver, rangeTmp, options)
858 + }
859 +
860 + var sameDirectionIncreasing =
861 + (this.operator === '>=' || this.operator === '>') &&
862 + (comp.operator === '>=' || comp.operator === '>')
863 + var sameDirectionDecreasing =
864 + (this.operator === '<=' || this.operator === '<') &&
865 + (comp.operator === '<=' || comp.operator === '<')
866 + var sameSemVer = this.semver.version === comp.semver.version
867 + var differentDirectionsInclusive =
868 + (this.operator === '>=' || this.operator === '<=') &&
869 + (comp.operator === '>=' || comp.operator === '<=')
870 + var oppositeDirectionsLessThan =
871 + cmp(this.semver, '<', comp.semver, options) &&
872 + ((this.operator === '>=' || this.operator === '>') &&
873 + (comp.operator === '<=' || comp.operator === '<'))
874 + var oppositeDirectionsGreaterThan =
875 + cmp(this.semver, '>', comp.semver, options) &&
876 + ((this.operator === '<=' || this.operator === '<') &&
877 + (comp.operator === '>=' || comp.operator === '>'))
878 +
879 + return sameDirectionIncreasing || sameDirectionDecreasing ||
880 + (sameSemVer && differentDirectionsInclusive) ||
881 + oppositeDirectionsLessThan || oppositeDirectionsGreaterThan
882 +}
883 +
884 +exports.Range = Range
885 +function Range (range, options) {
886 + if (!options || typeof options !== 'object') {
887 + options = {
888 + loose: !!options,
889 + includePrerelease: false
890 + }
891 + }
892 +
893 + if (range instanceof Range) {
894 + if (range.loose === !!options.loose &&
895 + range.includePrerelease === !!options.includePrerelease) {
896 + return range
897 + } else {
898 + return new Range(range.raw, options)
899 + }
900 + }
901 +
902 + if (range instanceof Comparator) {
903 + return new Range(range.value, options)
904 + }
905 +
906 + if (!(this instanceof Range)) {
907 + return new Range(range, options)
908 + }
909 +
910 + this.options = options
911 + this.loose = !!options.loose
912 + this.includePrerelease = !!options.includePrerelease
913 +
914 + // First, split based on boolean or ||
915 + this.raw = range
916 + this.set = range.split(/\s*\|\|\s*/).map(function (range) {
917 + return this.parseRange(range.trim())
918 + }, this).filter(function (c) {
919 + // throw out any that are not relevant for whatever reason
920 + return c.length
921 + })
922 +
923 + if (!this.set.length) {
924 + throw new TypeError('Invalid SemVer Range: ' + range)
925 + }
926 +
927 + this.format()
928 +}
929 +
930 +Range.prototype.format = function () {
931 + this.range = this.set.map(function (comps) {
932 + return comps.join(' ').trim()
933 + }).join('||').trim()
934 + return this.range
935 +}
936 +
937 +Range.prototype.toString = function () {
938 + return this.range
939 +}
940 +
941 +Range.prototype.parseRange = function (range) {
942 + var loose = this.options.loose
943 + range = range.trim()
944 + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
945 + var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE]
946 + range = range.replace(hr, hyphenReplace)
947 + debug('hyphen replace', range)
948 + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
949 + range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace)
950 + debug('comparator trim', range, re[t.COMPARATORTRIM])
951 +
952 + // `~ 1.2.3` => `~1.2.3`
953 + range = range.replace(re[t.TILDETRIM], tildeTrimReplace)
954 +
955 + // `^ 1.2.3` => `^1.2.3`
956 + range = range.replace(re[t.CARETTRIM], caretTrimReplace)
957 +
958 + // normalize spaces
959 + range = range.split(/\s+/).join(' ')
960 +
961 + // At this point, the range is completely trimmed and
962 + // ready to be split into comparators.
963 +
964 + var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
965 + var set = range.split(' ').map(function (comp) {
966 + return parseComparator(comp, this.options)
967 + }, this).join(' ').split(/\s+/)
968 + if (this.options.loose) {
969 + // in loose mode, throw out any that are not valid comparators
970 + set = set.filter(function (comp) {
971 + return !!comp.match(compRe)
972 + })
973 + }
974 + set = set.map(function (comp) {
975 + return new Comparator(comp, this.options)
976 + }, this)
977 +
978 + return set
979 +}
980 +
981 +Range.prototype.intersects = function (range, options) {
982 + if (!(range instanceof Range)) {
983 + throw new TypeError('a Range is required')
984 + }
985 +
986 + return this.set.some(function (thisComparators) {
987 + return (
988 + isSatisfiable(thisComparators, options) &&
989 + range.set.some(function (rangeComparators) {
990 + return (
991 + isSatisfiable(rangeComparators, options) &&
992 + thisComparators.every(function (thisComparator) {
993 + return rangeComparators.every(function (rangeComparator) {
994 + return thisComparator.intersects(rangeComparator, options)
995 + })
996 + })
997 + )
998 + })
999 + )
1000 + })
1001 +}
1002 +
1003 +// take a set of comparators and determine whether there
1004 +// exists a version which can satisfy it
1005 +function isSatisfiable (comparators, options) {
1006 + var result = true
1007 + var remainingComparators = comparators.slice()
1008 + var testComparator = remainingComparators.pop()
1009 +
1010 + while (result && remainingComparators.length) {
1011 + result = remainingComparators.every(function (otherComparator) {
1012 + return testComparator.intersects(otherComparator, options)
1013 + })
1014 +
1015 + testComparator = remainingComparators.pop()
1016 + }
1017 +
1018 + return result
1019 +}
1020 +
1021 +// Mostly just for testing and legacy API reasons
1022 +exports.toComparators = toComparators
1023 +function toComparators (range, options) {
1024 + return new Range(range, options).set.map(function (comp) {
1025 + return comp.map(function (c) {
1026 + return c.value
1027 + }).join(' ').trim().split(' ')
1028 + })
1029 +}
1030 +
1031 +// comprised of xranges, tildes, stars, and gtlt's at this point.
1032 +// already replaced the hyphen ranges
1033 +// turn into a set of JUST comparators.
1034 +function parseComparator (comp, options) {
1035 + debug('comp', comp, options)
1036 + comp = replaceCarets(comp, options)
1037 + debug('caret', comp)
1038 + comp = replaceTildes(comp, options)
1039 + debug('tildes', comp)
1040 + comp = replaceXRanges(comp, options)
1041 + debug('xrange', comp)
1042 + comp = replaceStars(comp, options)
1043 + debug('stars', comp)
1044 + return comp
1045 +}
1046 +
1047 +function isX (id) {
1048 + return !id || id.toLowerCase() === 'x' || id === '*'
1049 +}
1050 +
1051 +// ~, ~> --> * (any, kinda silly)
1052 +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0
1053 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0
1054 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0
1055 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0
1056 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0
1057 +function replaceTildes (comp, options) {
1058 + return comp.trim().split(/\s+/).map(function (comp) {
1059 + return replaceTilde(comp, options)
1060 + }).join(' ')
1061 +}
1062 +
1063 +function replaceTilde (comp, options) {
1064 + var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE]
1065 + return comp.replace(r, function (_, M, m, p, pr) {
1066 + debug('tilde', comp, _, M, m, p, pr)
1067 + var ret
1068 +
1069 + if (isX(M)) {
1070 + ret = ''
1071 + } else if (isX(m)) {
1072 + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
1073 + } else if (isX(p)) {
1074 + // ~1.2 == >=1.2.0 <1.3.0
1075 + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
1076 + } else if (pr) {
1077 + debug('replaceTilde pr', pr)
1078 + ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
1079 + ' <' + M + '.' + (+m + 1) + '.0'
1080 + } else {
1081 + // ~1.2.3 == >=1.2.3 <1.3.0
1082 + ret = '>=' + M + '.' + m + '.' + p +
1083 + ' <' + M + '.' + (+m + 1) + '.0'
1084 + }
1085 +
1086 + debug('tilde return', ret)
1087 + return ret
1088 + })
1089 +}
1090 +
1091 +// ^ --> * (any, kinda silly)
1092 +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0
1093 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0
1094 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0
1095 +// ^1.2.3 --> >=1.2.3 <2.0.0
1096 +// ^1.2.0 --> >=1.2.0 <2.0.0
1097 +function replaceCarets (comp, options) {
1098 + return comp.trim().split(/\s+/).map(function (comp) {
1099 + return replaceCaret(comp, options)
1100 + }).join(' ')
1101 +}
1102 +
1103 +function replaceCaret (comp, options) {
1104 + debug('caret', comp, options)
1105 + var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET]
1106 + return comp.replace(r, function (_, M, m, p, pr) {
1107 + debug('caret', comp, _, M, m, p, pr)
1108 + var ret
1109 +
1110 + if (isX(M)) {
1111 + ret = ''
1112 + } else if (isX(m)) {
1113 + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
1114 + } else if (isX(p)) {
1115 + if (M === '0') {
1116 + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
1117 + } else {
1118 + ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0'
1119 + }
1120 + } else if (pr) {
1121 + debug('replaceCaret pr', pr)
1122 + if (M === '0') {
1123 + if (m === '0') {
1124 + ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
1125 + ' <' + M + '.' + m + '.' + (+p + 1)
1126 + } else {
1127 + ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
1128 + ' <' + M + '.' + (+m + 1) + '.0'
1129 + }
1130 + } else {
1131 + ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
1132 + ' <' + (+M + 1) + '.0.0'
1133 + }
1134 + } else {
1135 + debug('no pr')
1136 + if (M === '0') {
1137 + if (m === '0') {
1138 + ret = '>=' + M + '.' + m + '.' + p +
1139 + ' <' + M + '.' + m + '.' + (+p + 1)
1140 + } else {
1141 + ret = '>=' + M + '.' + m + '.' + p +
1142 + ' <' + M + '.' + (+m + 1) + '.0'
1143 + }
1144 + } else {
1145 + ret = '>=' + M + '.' + m + '.' + p +
1146 + ' <' + (+M + 1) + '.0.0'
1147 + }
1148 + }
1149 +
1150 + debug('caret return', ret)
1151 + return ret
1152 + })
1153 +}
1154 +
1155 +function replaceXRanges (comp, options) {
1156 + debug('replaceXRanges', comp, options)
1157 + return comp.split(/\s+/).map(function (comp) {
1158 + return replaceXRange(comp, options)
1159 + }).join(' ')
1160 +}
1161 +
1162 +function replaceXRange (comp, options) {
1163 + comp = comp.trim()
1164 + var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE]
1165 + return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
1166 + debug('xRange', comp, ret, gtlt, M, m, p, pr)
1167 + var xM = isX(M)
1168 + var xm = xM || isX(m)
1169 + var xp = xm || isX(p)
1170 + var anyX = xp
1171 +
1172 + if (gtlt === '=' && anyX) {
1173 + gtlt = ''
1174 + }
1175 +
1176 + // if we're including prereleases in the match, then we need
1177 + // to fix this to -0, the lowest possible prerelease value
1178 + pr = options.includePrerelease ? '-0' : ''
1179 +
1180 + if (xM) {
1181 + if (gtlt === '>' || gtlt === '<') {
1182 + // nothing is allowed
1183 + ret = '<0.0.0-0'
1184 + } else {
1185 + // nothing is forbidden
1186 + ret = '*'
1187 + }
1188 + } else if (gtlt && anyX) {
1189 + // we know patch is an x, because we have any x at all.
1190 + // replace X with 0
1191 + if (xm) {
1192 + m = 0
1193 + }
1194 + p = 0
1195 +
1196 + if (gtlt === '>') {
1197 + // >1 => >=2.0.0
1198 + // >1.2 => >=1.3.0
1199 + // >1.2.3 => >= 1.2.4
1200 + gtlt = '>='
1201 + if (xm) {
1202 + M = +M + 1
1203 + m = 0
1204 + p = 0
1205 + } else {
1206 + m = +m + 1
1207 + p = 0
1208 + }
1209 + } else if (gtlt === '<=') {
1210 + // <=0.7.x is actually <0.8.0, since any 0.7.x should
1211 + // pass. Similarly, <=7.x is actually <8.0.0, etc.
1212 + gtlt = '<'
1213 + if (xm) {
1214 + M = +M + 1
1215 + } else {
1216 + m = +m + 1
1217 + }
1218 + }
1219 +
1220 + ret = gtlt + M + '.' + m + '.' + p + pr
1221 + } else if (xm) {
1222 + ret = '>=' + M + '.0.0' + pr + ' <' + (+M + 1) + '.0.0' + pr
1223 + } else if (xp) {
1224 + ret = '>=' + M + '.' + m + '.0' + pr +
1225 + ' <' + M + '.' + (+m + 1) + '.0' + pr
1226 + }
1227 +
1228 + debug('xRange return', ret)
1229 +
1230 + return ret
1231 + })
1232 +}
1233 +
1234 +// Because * is AND-ed with everything else in the comparator,
1235 +// and '' means "any version", just remove the *s entirely.
1236 +function replaceStars (comp, options) {
1237 + debug('replaceStars', comp, options)
1238 + // Looseness is ignored here. star is always as loose as it gets!
1239 + return comp.trim().replace(re[t.STAR], '')
1240 +}
1241 +
1242 +// This function is passed to string.replace(re[t.HYPHENRANGE])
1243 +// M, m, patch, prerelease, build
1244 +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
1245 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do
1246 +// 1.2 - 3.4 => >=1.2.0 <3.5.0
1247 +function hyphenReplace ($0,
1248 + from, fM, fm, fp, fpr, fb,
1249 + to, tM, tm, tp, tpr, tb) {
1250 + if (isX(fM)) {
1251 + from = ''
1252 + } else if (isX(fm)) {
1253 + from = '>=' + fM + '.0.0'
1254 + } else if (isX(fp)) {
1255 + from = '>=' + fM + '.' + fm + '.0'
1256 + } else {
1257 + from = '>=' + from
1258 + }
1259 +
1260 + if (isX(tM)) {
1261 + to = ''
1262 + } else if (isX(tm)) {
1263 + to = '<' + (+tM + 1) + '.0.0'
1264 + } else if (isX(tp)) {
1265 + to = '<' + tM + '.' + (+tm + 1) + '.0'
1266 + } else if (tpr) {
1267 + to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr
1268 + } else {
1269 + to = '<=' + to
1270 + }
1271 +
1272 + return (from + ' ' + to).trim()
1273 +}
1274 +
1275 +// if ANY of the sets match ALL of its comparators, then pass
1276 +Range.prototype.test = function (version) {
1277 + if (!version) {
1278 + return false
1279 + }
1280 +
1281 + if (typeof version === 'string') {
1282 + try {
1283 + version = new SemVer(version, this.options)
1284 + } catch (er) {
1285 + return false
1286 + }
1287 + }
1288 +
1289 + for (var i = 0; i < this.set.length; i++) {
1290 + if (testSet(this.set[i], version, this.options)) {
1291 + return true
1292 + }
1293 + }
1294 + return false
1295 +}
1296 +
1297 +function testSet (set, version, options) {
1298 + for (var i = 0; i < set.length; i++) {
1299 + if (!set[i].test(version)) {
1300 + return false
1301 + }
1302 + }
1303 +
1304 + if (version.prerelease.length && !options.includePrerelease) {
1305 + // Find the set of versions that are allowed to have prereleases
1306 + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0
1307 + // That should allow `1.2.3-pr.2` to pass.
1308 + // However, `1.2.4-alpha.notready` should NOT be allowed,
1309 + // even though it's within the range set by the comparators.
1310 + for (i = 0; i < set.length; i++) {
1311 + debug(set[i].semver)
1312 + if (set[i].semver === ANY) {
1313 + continue
1314 + }
1315 +
1316 + if (set[i].semver.prerelease.length > 0) {
1317 + var allowed = set[i].semver
1318 + if (allowed.major === version.major &&
1319 + allowed.minor === version.minor &&
1320 + allowed.patch === version.patch) {
1321 + return true
1322 + }
1323 + }
1324 + }
1325 +
1326 + // Version has a -pre, but it's not one of the ones we like.
1327 + return false
1328 + }
1329 +
1330 + return true
1331 +}
1332 +
1333 +exports.satisfies = satisfies
1334 +function satisfies (version, range, options) {
1335 + try {
1336 + range = new Range(range, options)
1337 + } catch (er) {
1338 + return false
1339 + }
1340 + return range.test(version)
1341 +}
1342 +
1343 +exports.maxSatisfying = maxSatisfying
1344 +function maxSatisfying (versions, range, options) {
1345 + var max = null
1346 + var maxSV = null
1347 + try {
1348 + var rangeObj = new Range(range, options)
1349 + } catch (er) {
1350 + return null
1351 + }
1352 + versions.forEach(function (v) {
1353 + if (rangeObj.test(v)) {
1354 + // satisfies(v, range, options)
1355 + if (!max || maxSV.compare(v) === -1) {
1356 + // compare(max, v, true)
1357 + max = v
1358 + maxSV = new SemVer(max, options)
1359 + }
1360 + }
1361 + })
1362 + return max
1363 +}
1364 +
1365 +exports.minSatisfying = minSatisfying
1366 +function minSatisfying (versions, range, options) {
1367 + var min = null
1368 + var minSV = null
1369 + try {
1370 + var rangeObj = new Range(range, options)
1371 + } catch (er) {
1372 + return null
1373 + }
1374 + versions.forEach(function (v) {
1375 + if (rangeObj.test(v)) {
1376 + // satisfies(v, range, options)
1377 + if (!min || minSV.compare(v) === 1) {
1378 + // compare(min, v, true)
1379 + min = v
1380 + minSV = new SemVer(min, options)
1381 + }
1382 + }
1383 + })
1384 + return min
1385 +}
1386 +
1387 +exports.minVersion = minVersion
1388 +function minVersion (range, loose) {
1389 + range = new Range(range, loose)
1390 +
1391 + var minver = new SemVer('0.0.0')
1392 + if (range.test(minver)) {
1393 + return minver
1394 + }
1395 +
1396 + minver = new SemVer('0.0.0-0')
1397 + if (range.test(minver)) {
1398 + return minver
1399 + }
1400 +
1401 + minver = null
1402 + for (var i = 0; i < range.set.length; ++i) {
1403 + var comparators = range.set[i]
1404 +
1405 + comparators.forEach(function (comparator) {
1406 + // Clone to avoid manipulating the comparator's semver object.
1407 + var compver = new SemVer(comparator.semver.version)
1408 + switch (comparator.operator) {
1409 + case '>':
1410 + if (compver.prerelease.length === 0) {
1411 + compver.patch++
1412 + } else {
1413 + compver.prerelease.push(0)
1414 + }
1415 + compver.raw = compver.format()
1416 + /* fallthrough */
1417 + case '':
1418 + case '>=':
1419 + if (!minver || gt(minver, compver)) {
1420 + minver = compver
1421 + }
1422 + break
1423 + case '<':
1424 + case '<=':
1425 + /* Ignore maximum versions */
1426 + break
1427 + /* istanbul ignore next */
1428 + default:
1429 + throw new Error('Unexpected operation: ' + comparator.operator)
1430 + }
1431 + })
1432 + }
1433 +
1434 + if (minver && range.test(minver)) {
1435 + return minver
1436 + }
1437 +
1438 + return null
1439 +}
1440 +
1441 +exports.validRange = validRange
1442 +function validRange (range, options) {
1443 + try {
1444 + // Return '*' instead of '' so that truthiness works.
1445 + // This will throw if it's invalid anyway
1446 + return new Range(range, options).range || '*'
1447 + } catch (er) {
1448 + return null
1449 + }
1450 +}
1451 +
1452 +// Determine if version is less than all the versions possible in the range
1453 +exports.ltr = ltr
1454 +function ltr (version, range, options) {
1455 + return outside(version, range, '<', options)
1456 +}
1457 +
1458 +// Determine if version is greater than all the versions possible in the range.
1459 +exports.gtr = gtr
1460 +function gtr (version, range, options) {
1461 + return outside(version, range, '>', options)
1462 +}
1463 +
1464 +exports.outside = outside
1465 +function outside (version, range, hilo, options) {
1466 + version = new SemVer(version, options)
1467 + range = new Range(range, options)
1468 +
1469 + var gtfn, ltefn, ltfn, comp, ecomp
1470 + switch (hilo) {
1471 + case '>':
1472 + gtfn = gt
1473 + ltefn = lte
1474 + ltfn = lt
1475 + comp = '>'
1476 + ecomp = '>='
1477 + break
1478 + case '<':
1479 + gtfn = lt
1480 + ltefn = gte
1481 + ltfn = gt
1482 + comp = '<'
1483 + ecomp = '<='
1484 + break
1485 + default:
1486 + throw new TypeError('Must provide a hilo val of "<" or ">"')
1487 + }
1488 +
1489 + // If it satisifes the range it is not outside
1490 + if (satisfies(version, range, options)) {
1491 + return false
1492 + }
1493 +
1494 + // From now on, variable terms are as if we're in "gtr" mode.
1495 + // but note that everything is flipped for the "ltr" function.
1496 +
1497 + for (var i = 0; i < range.set.length; ++i) {
1498 + var comparators = range.set[i]
1499 +
1500 + var high = null
1501 + var low = null
1502 +
1503 + comparators.forEach(function (comparator) {
1504 + if (comparator.semver === ANY) {
1505 + comparator = new Comparator('>=0.0.0')
1506 + }
1507 + high = high || comparator
1508 + low = low || comparator
1509 + if (gtfn(comparator.semver, high.semver, options)) {
1510 + high = comparator
1511 + } else if (ltfn(comparator.semver, low.semver, options)) {
1512 + low = comparator
1513 + }
1514 + })
1515 +
1516 + // If the edge version comparator has a operator then our version
1517 + // isn't outside it
1518 + if (high.operator === comp || high.operator === ecomp) {
1519 + return false
1520 + }
1521 +
1522 + // If the lowest version comparator has an operator and our version
1523 + // is less than it then it isn't higher than the range
1524 + if ((!low.operator || low.operator === comp) &&
1525 + ltefn(version, low.semver)) {
1526 + return false
1527 + } else if (low.operator === ecomp && ltfn(version, low.semver)) {
1528 + return false
1529 + }
1530 + }
1531 + return true
1532 +}
1533 +
1534 +exports.prerelease = prerelease
1535 +function prerelease (version, options) {
1536 + var parsed = parse(version, options)
1537 + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null
1538 +}
1539 +
1540 +exports.intersects = intersects
1541 +function intersects (r1, r2, options) {
1542 + r1 = new Range(r1, options)
1543 + r2 = new Range(r2, options)
1544 + return r1.intersects(r2)
1545 +}
1546 +
1547 +exports.coerce = coerce
1548 +function coerce (version, options) {
1549 + if (version instanceof SemVer) {
1550 + return version
1551 + }
1552 +
1553 + if (typeof version === 'number') {
1554 + version = String(version)
1555 + }
1556 +
1557 + if (typeof version !== 'string') {
1558 + return null
1559 + }
1560 +
1561 + options = options || {}
1562 +
1563 + var match = null
1564 + if (!options.rtl) {
1565 + match = version.match(re[t.COERCE])
1566 + } else {
1567 + // Find the right-most coercible string that does not share
1568 + // a terminus with a more left-ward coercible string.
1569 + // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4'
1570 + //
1571 + // Walk through the string checking with a /g regexp
1572 + // Manually set the index so as to pick up overlapping matches.
1573 + // Stop when we get a match that ends at the string end, since no
1574 + // coercible string can be more right-ward without the same terminus.
1575 + var next
1576 + while ((next = re[t.COERCERTL].exec(version)) &&
1577 + (!match || match.index + match[0].length !== version.length)
1578 + ) {
1579 + if (!match ||
1580 + next.index + next[0].length !== match.index + match[0].length) {
1581 + match = next
1582 + }
1583 + re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
1584 + }
1585 + // leave it in a clean state
1586 + re[t.COERCERTL].lastIndex = -1
1587 + }
1588 +
1589 + if (match === null) {
1590 + return null
1591 + }
1592 +
1593 + return parse(match[2] +
1594 + '.' + (match[3] || '0') +
1595 + '.' + (match[4] || '0'), options)
1596 +}
node_modules/stream-to-pull-stream/.travis.ymlView
@@ -1,0 +1,4 @@
1 +language: node_js
2 +node_js:
3 + - 0.6
4 + - 0.8
node_modules/stream-to-pull-stream/LICENSEView
@@ -1,0 +1,22 @@
1 +Copyright (c) 2013 Dominic Tarr
2 +
3 +Permission is hereby granted, free of charge,
4 +to any person obtaining a copy of this software and
5 +associated documentation files (the "Software"), to
6 +deal in the Software without restriction, including
7 +without limitation the rights to use, copy, modify,
8 +merge, publish, distribute, sublicense, and/or sell
9 +copies of the Software, and to permit persons to whom
10 +the Software is furnished to do so,
11 +subject to the following conditions:
12 +
13 +The above copyright notice and this permission notice
14 +shall be included in all copies or substantial portions of the Software.
15 +
16 +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17 +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
18 +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
19 +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
20 +ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
node_modules/stream-to-pull-stream/README.mdView
@@ -1,0 +1,27 @@
1 +# stream-to-pull-stream
2 +
3 +Convert a classic-stream, or a new-stream into a
4 +[pull-stream](https://github.com/dominictarr/pull-stream)
5 +
6 +## example
7 +
8 +``` js
9 +var toPull = require('stream-to-pull-stream')
10 +var pull = require('pull-stream')
11 +
12 +pull(
13 + toPull.source(fs.createReadStream(__filename)),
14 + pull.map(function (e) { return e.toString().toUpperCase() }),
15 + toPull.sink(process.stdout, function (err) {
16 + if(err) throw err
17 + console.log('done')
18 + })
19 +)
20 +```
21 +
22 +if the node steam is a duplex (i.e. net, ws) then use `toPull.duplex(stream, cb?)`
23 +`duplex` takes an optional callback in the same way that `sink` does.
24 +
25 +## License
26 +
27 +MIT
node_modules/stream-to-pull-stream/index.jsView
@@ -1,0 +1,222 @@
1 +var pull = require('pull-stream/pull')
2 +var looper = require('looper')
3 +
4 +function destroy (stream) {
5 + if(!stream.destroy)
6 + console.error(
7 + 'warning, stream-to-pull-stream: \n'
8 + + 'the wrapped node-stream does not implement `destroy`, \n'
9 + + 'this may cause resource leaks.'
10 + )
11 + else stream.destroy()
12 +
13 +}
14 +
15 +function write(read, stream, cb) {
16 + var ended, closed = false, did
17 + function done () {
18 + if(did) return
19 + did = true
20 + cb && cb(ended === true ? null : ended)
21 + }
22 +
23 + function onClose () {
24 + if(closed) return
25 + closed = true
26 + cleanup()
27 + if(!ended) read(ended = true, done)
28 + else done()
29 + }
30 + function onError (err) {
31 + cleanup()
32 + if(!ended) read(ended = err, done)
33 + }
34 + function cleanup() {
35 + stream.on('finish', onClose)
36 + stream.removeListener('close', onClose)
37 + stream.removeListener('error', onError)
38 + }
39 + stream.on('close', onClose)
40 + stream.on('finish', onClose)
41 + stream.on('error', onError)
42 + process.nextTick(function () {
43 + looper(function (next) {
44 + read(null, function (end, data) {
45 + ended = ended || end
46 + //you can't "end" a stdout stream, so this needs to be handled specially.
47 + if(end === true)
48 + return stream._isStdio ? done() : stream.end()
49 +
50 + if(ended = ended || end) {
51 + destroy(stream)
52 + return done(ended)
53 + }
54 +
55 + //I noticed a problem streaming to the terminal:
56 + //sometimes the end got cut off, creating invalid output.
57 + //it seems that stdout always emits "drain" when it ends.
58 + //so this seems to work, but i have been unable to reproduce this test
59 + //automatically, so you need to run ./test/stdout.js a few times and the end is valid json.
60 + if(stream._isStdio)
61 + stream.write(data, function () { next() })
62 + else {
63 + var pause = stream.write(data)
64 + if(pause === false)
65 + stream.once('drain', next)
66 + else next()
67 + }
68 + })
69 + })
70 + })
71 +}
72 +
73 +function first (emitter, events, handler) {
74 + function listener (val) {
75 + events.forEach(function (e) {
76 + emitter.removeListener(e, listener)
77 + })
78 + handler(val)
79 + }
80 + events.forEach(function (e) {
81 + emitter.on(e, listener)
82 + })
83 + return emitter
84 +}
85 +
86 +function read2(stream) {
87 + var ended = false, waiting = false
88 + var _cb
89 +
90 + function read () {
91 + var data = stream.read()
92 + if(data !== null && _cb) {
93 + var cb = _cb; _cb = null
94 + cb(null, data)
95 + }
96 + }
97 +
98 + stream.on('readable', function () {
99 + waiting = true
100 + _cb && read()
101 + })
102 + .on('end', function () {
103 + ended = true
104 + _cb && _cb(ended)
105 + })
106 + .on('error', function (err) {
107 + ended = err
108 + _cb && _cb(ended)
109 + })
110 +
111 + return function (end, cb) {
112 + _cb = cb
113 + if(ended)
114 + cb(ended)
115 + else if(waiting)
116 + read()
117 + }
118 +}
119 +
120 +function read1(stream) {
121 + var buffer = [], cbs = [], ended, paused = false
122 +
123 + var draining
124 + function drain() {
125 + while((buffer.length || ended) && cbs.length)
126 + cbs.shift()(buffer.length ? null : ended, buffer.shift())
127 + if(!buffer.length && (paused)) {
128 + paused = false
129 + stream.resume()
130 + }
131 + }
132 +
133 + stream.on('data', function (data) {
134 + buffer.push(data)
135 + drain()
136 + if(buffer.length && stream.pause) {
137 + paused = true
138 + stream.pause()
139 + }
140 + })
141 + stream.on('end', function () {
142 + ended = true
143 + drain()
144 + })
145 + stream.on('close', function () {
146 + ended = true
147 + drain()
148 + })
149 + stream.on('error', function (err) {
150 + ended = err
151 + drain()
152 + })
153 + return function (abort, cb) {
154 + if(!cb) throw new Error('*must* provide cb')
155 + if(abort) {
156 + function onAbort () {
157 + while(cbs.length) cbs.shift()(abort)
158 + cb(abort)
159 + }
160 + //if the stream happens to have already ended, then we don't need to abort.
161 + if(ended) return onAbort()
162 + stream.once('close', onAbort)
163 + destroy(stream)
164 + }
165 + else {
166 + cbs.push(cb)
167 + drain()
168 + }
169 + }
170 +}
171 +
172 +var read = read1
173 +
174 +var sink = function (stream, cb) {
175 + return function (read) {
176 + return write(read, stream, cb)
177 + }
178 +}
179 +
180 +var source = function (stream) {
181 + return read1(stream)
182 +}
183 +
184 +exports = module.exports = function (stream, cb) {
185 + return (
186 + (stream.writable && stream.write)
187 + ? stream.readable
188 + ? function(_read) {
189 + write(_read, stream, cb);
190 + return read1(stream)
191 + }
192 + : sink(stream, cb)
193 + : source(stream)
194 + )
195 +}
196 +
197 +exports.sink = sink
198 +exports.source = source
199 +exports.read = read
200 +exports.read1 = read1
201 +exports.read2 = read2
202 +exports.duplex = function (stream, cb) {
203 + return {
204 + source: source(stream),
205 + sink: sink(stream, cb)
206 + }
207 +}
208 +exports.transform = function (stream) {
209 + return function (read) {
210 + var _source = source(stream)
211 + sink(stream)(read); return _source
212 + }
213 +}
214 +
215 +
216 +
217 +
218 +
219 +
220 +
221 +
222 +
node_modules/stream-to-pull-stream/package.jsonView
@@ -1,0 +1,61 @@
1 +{
2 + "_args": [
3 + [
4 + "stream-to-pull-stream@1.7.3",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "stream-to-pull-stream@1.7.3",
9 + "_id": "stream-to-pull-stream@1.7.3",
10 + "_inBundle": false,
11 + "_integrity": "sha256-JHI1Pf3gTAKhv6zRZ4CaTkMCPh4qgj7E6b7jGvdU4UE=",
12 + "_location": "/stream-to-pull-stream",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "stream-to-pull-stream@1.7.3",
18 + "name": "stream-to-pull-stream",
19 + "escapedName": "stream-to-pull-stream",
20 + "rawSpec": "1.7.3",
21 + "saveSpec": null,
22 + "fetchSpec": "1.7.3"
23 + },
24 + "_requiredBy": [
25 + "/"
26 + ],
27 + "_resolved": "http://localhost:8989/blobs/get/&JHI1Pf3gTAKhv6zRZ4CaTkMCPh4qgj7E6b7jGvdU4UE=.sha256",
28 + "_spec": "1.7.3",
29 + "_where": "/home/cel/src/ssb-npm-registry",
30 + "author": {
31 + "name": "Dominic Tarr",
32 + "email": "dominic.tarr@gmail.com",
33 + "url": "http://dominictarr.com"
34 + },
35 + "bugs": {
36 + "url": "https://github.com/dominictarr/stream-to-pull-stream/issues"
37 + },
38 + "dependencies": {
39 + "looper": "^3.0.0",
40 + "pull-stream": "^3.2.3"
41 + },
42 + "description": "convert a stream1 or streams2 stream into a pull-stream",
43 + "devDependencies": {
44 + "pull-cat": "^1.1.11",
45 + "pull-hang": "0.0.0",
46 + "pull-split": "^0.2.0",
47 + "tape": "^4.5.1",
48 + "through": "~2.3.4"
49 + },
50 + "homepage": "https://github.com/dominictarr/stream-to-pull-stream",
51 + "license": "MIT",
52 + "name": "stream-to-pull-stream",
53 + "repository": {
54 + "type": "git",
55 + "url": "git://github.com/dominictarr/stream-to-pull-stream.git"
56 + },
57 + "scripts": {
58 + "test": "set -e; for t in test/*.js; do node $t; done"
59 + },
60 + "version": "1.7.3"
61 +}
node_modules/stream-to-pull-stream/test/abort.jsView
@@ -1,0 +1,122 @@
1 +var pull = require('pull-stream')
2 +var through = require('through')
3 +var toPull = require('../')
4 +var Hang = require('pull-hang')
5 +var Cat = require('pull-cat')
6 +var tape = require('tape')
7 +
8 +tape('abort', function (t) {
9 +
10 + t.plan(2)
11 +
12 + var ts = through()
13 + ts.on('close', function () {
14 + t.ok(true)
15 + })
16 + pull(
17 + pull.values([.1, .4, .6, 0.7, .94, 0.3]),
18 +// pull.infinite()
19 + toPull(ts),
20 + function (read) {
21 + console.log('reader!')
22 + read(null, function next (end, data) {
23 + console.log('>>>', end, data)
24 + if(data > 0.9) {
25 + console.log('ABORT')
26 + read(true, function (end) {
27 + t.ok(true)
28 + t.end()
29 + })
30 + } else {
31 + read(null, next)
32 + }
33 + })
34 + }
35 + )
36 +})
37 +
38 +tape('abort hang', function (t) {
39 + var ts = through(), aborted = false, c = 0, _read, ended, closed
40 + ts.on('close', function () {
41 + closed = true
42 + })
43 + pull(
44 + Cat([
45 + pull.values([.1, .4, .6, 0.7, 0.3]),
46 + Hang(function () {
47 + aborted = true
48 + })
49 + ]),
50 + toPull(ts),
51 + function (read) {
52 + _read = read
53 + read(null, function next (end, data) {
54 + if(end) {
55 + ended = true
56 + }
57 + else read(null, next)
58 + })
59 + }
60 + )
61 +
62 + setTimeout(function () {
63 + _read(true, function (end) {
64 + t.ok(aborted, 'aborted')
65 + t.ok(ended, 'ended')
66 + t.ok(closed, 'closed')
67 + t.ok(end, 'abort cb end')
68 + t.end()
69 + })
70 + }, 10)
71 +})
72 +
73 +
74 +
75 +tape('abort a stream that has already ended', function (t) {
76 +
77 + var ts = through()
78 +
79 + var n = 0
80 + pull(
81 + toPull.source(ts),
82 + //like pull.take(4), but abort async.
83 + function (read) {
84 + return function (abort, cb) {
85 + console.log(n)
86 + if(n++ < 4) read(null, cb)
87 + else {
88 + //this would be quite a badly behaved node stream
89 + //but it can be difficult to make a node stream that behaves well.
90 + ts.end()
91 + setTimeout(function () {
92 + read(true, cb)
93 + }, 10)
94 + }
95 + }
96 + },
97 + pull.collect(function (err, ary) {
98 + if(err) throw err
99 + t.deepEqual(ary, [1,2,3,4])
100 + t.end()
101 + })
102 + )
103 +
104 + ts.queue(1)
105 + ts.queue(2)
106 + ts.queue(3)
107 + ts.queue(4)
108 +
109 +})
110 +
111 +
112 +
113 +
114 +
115 +
116 +
117 +
118 +
119 +
120 +
121 +
122 +
node_modules/stream-to-pull-stream/test/close.jsView
@@ -1,0 +1,29 @@
1 +var pull = require('pull-stream')
2 +var through = require('through')
3 +var toPull = require('../')
4 +
5 +require('tape')('propagate close back to source', function (t) {
6 +
7 + t.plan(1)
8 +
9 + var i = 0
10 +
11 + var ts = through(function (data) {
12 + console.log(data)
13 + if(i++ > 100)
14 + ts.destroy()
15 + })
16 +
17 + pull(
18 + pull.infinite(),
19 + function (read) {
20 + return function (abort, cb) {
21 + if(abort) return t.ok(true), t.end()
22 + read(false, cb)
23 + }
24 + },
25 + toPull(ts),
26 + pull.drain()
27 + )
28 +
29 +})
node_modules/stream-to-pull-stream/test/collect.jsView
@@ -1,0 +1,18 @@
1 +var pull = require('pull-stream')
2 +var through = require('through')
3 +var toPull = require('../')
4 +
5 +
6 +require('tape')('collect', function (t) {
7 +
8 + var values = [.1, .4, .6, 0.7, .94]
9 + pull(
10 + pull.values(values),
11 + toPull(through()),
12 + pull.collect(function (err, _values) {
13 + t.deepEqual(_values, values)
14 + t.end()
15 + })
16 + )
17 +
18 +})
node_modules/stream-to-pull-stream/test/http.jsView
@@ -1,0 +1,46 @@
1 +var http = require('http')
2 +var fs = require('fs')
3 +var toPull = require('../')
4 +var pull = require('pull-stream')
5 +var port = ~~(Math.random()*40000) + 1024
6 +var test = require('tape')
7 +
8 +var thisFile = fs.readFileSync(__filename, 'utf-8')
9 +
10 +test('test http', function (t) {
11 +
12 + var server = http.createServer(function (req, res) {
13 + pull(
14 + toPull(req),
15 + pull.reduce(function (b, s) {
16 + return b + s
17 + }, '', function (err, body) {
18 + t.equal(body, thisFile)
19 + t.notOk(err)
20 + res.end('done')
21 + })
22 + )
23 + }).listen(port, function () {
24 +
25 + fs.createReadStream(__filename)
26 + .pipe(http.request({method: 'PUT', port: port}, function (res) {
27 + console.log(res.statusCode)
28 + var _res = toPull(res)
29 +
30 + setTimeout(function () {
31 +
32 + pull(
33 + _res,
34 + pull.collect(function (err, ary) {
35 + t.equal(ary.map(String).join(''), 'done')
36 + t.end()
37 + })
38 + )
39 +
40 + }, 200)
41 +
42 + server.close()
43 + }))
44 + })
45 +
46 +})
node_modules/stream-to-pull-stream/test/index.jsView
@@ -1,0 +1,26 @@
1 +var pull = require('pull-stream')
2 +var fs = require('fs')
3 +var tape = require('tape')
4 +
5 +var toPullStream = require('../')
6 +
7 +pull(
8 + pull.values([
9 + 'hello\n',
10 + ' there\n'
11 + ]),
12 + toPullStream(process.stdout)
13 +)
14 +
15 +tape('get end callback even with stdout', function (t) {
16 +
17 + pull(
18 + toPullStream(fs.createReadStream(__filename)),
19 + pull.map(function (e) { return e.toString().toUpperCase() }),
20 + toPullStream.sink(process.stdout, function (err) {
21 + console.log('----END!')
22 + t.end()
23 + })
24 + )
25 +
26 +})
node_modules/stream-to-pull-stream/test/sink.jsView
@@ -1,0 +1,52 @@
1 +var pull = require('pull-stream')
2 +var through = require('through')
3 +var toPull = require('../')
4 +
5 +var tape = require('tape')
6 +
7 +tape('propagate close back to source', function (t) {
8 +
9 +// t.plan(4)
10 +
11 + var ended = false
12 + var input = [1,2,3]
13 + var ts = through(function (data) {
14 + t.equal(data, input.shift())
15 + }, function () {
16 + ended = true
17 + this.queue(null)
18 + })
19 +
20 + pull(
21 + pull.values([1,2,3]),
22 + toPull.sink(ts, function (err) {
23 + t.notOk(err)
24 + t.ok(ended)
25 + t.end()
26 + })
27 + )
28 +})
29 +
30 +
31 +tape('error', function (t) {
32 +
33 +
34 + var ts = through()
35 + var err = new Error('wtf')
36 + pull(
37 + pull.values([1,2,3]),
38 + function (read) {
39 + return function (abort, cb) {
40 + read(abort, function (end, data) {
41 + if(data === 3) cb(err)
42 + else cb(end, data)
43 + })
44 + }
45 + },
46 + toPull.sink(ts, function (_err) {
47 + t.equal(_err, err)
48 + t.end()
49 + })
50 + )
51 +
52 +})
node_modules/stream-to-pull-stream/test/stack.jsView
@@ -1,0 +1,16 @@
1 +
2 +//getting stack overflows from writing too fast.
3 +//I think it's because there is a recursive loop
4 +//which stacks up there are many write that don't drain.
5 +//try to reproduce it.
6 +var pull = require('pull-stream')
7 +var through = require('through')
8 +var toPull = require('../')
9 +
10 +pull(
11 + pull.count(1000000),
12 + pull.map(function (e) {
13 + return e.toString()+'\n'
14 + }),
15 + toPull.sink(through())
16 +)
node_modules/stream-to-pull-stream/test/stdout.jsView
@@ -1,0 +1,29 @@
1 +
2 +//test that stdout doesn't leave anything hanging.
3 +//unfortunately i have not been able to reproduce this automatically.
4 +//so you have to run this and check for valid output + the text "VALID END"
5 +
6 +
7 +var toPull = require('../')
8 +var pull = require('pull-stream')
9 +
10 +pull(
11 + pull.count(150),
12 + pull.map(function () {
13 + return {
14 + okay: true,
15 + date: new Date(),
16 + array: [1,3,4,6,7498,49,837,9],
17 + nest: {foo:{bar:{baz: null}}},
18 + pkg: require('../package')
19 + }
20 + }),
21 + pull.map(function (e) {
22 + return JSON.stringify(e, null, 2) +'\n\n'
23 + }),
24 + toPull.sink(process.stdout, function (err) {
25 + if(err) throw err
26 +// console.log('VALID END')
27 + process.exit()
28 + })
29 +)
node_modules/stream-to-pull-stream/test/streams2.jsView
@@ -1,0 +1,38 @@
1 +var pull = require('pull-stream')
2 +var through = require('through')
3 +var toPull = require('../')
4 +
5 +var stream = require('stream')
6 +
7 +if (stream.Readable) {
8 + require('tape')('issue-3', function (t) {
9 + var util = require('util')
10 + util.inherits(Counter, stream.Readable)
11 +
12 + function Counter() {
13 + stream.Readable.call(this, {objectMode: true, highWaterMark: 1})
14 + this._max = 5
15 + this._index = 1
16 + }
17 +
18 + Counter.prototype._read = function() {
19 + var i = this._index++
20 + this.push(i)
21 + if (i >= this._max) this.push(null)
22 + };
23 +
24 + pull(
25 + toPull(new Counter()),
26 + pull.asyncMap(function (value, done) {
27 + process.nextTick(function() {
28 + done(null, value)
29 + })
30 + }),
31 + pull.collect(function (err, values) {
32 + t.deepEqual(values, [1, 2, 3, 4, 5])
33 + t.end()
34 + })
35 + )
36 +
37 + })
38 +}
node_modules/stream-to-pull-stream/test/test-stdout.jsView
@@ -1,0 +1,42 @@
1 +
2 +
3 +//i saw bugs with streams to stdout not ending correctly.
4 +//if there was a pause, it would not.
5 +
6 +
7 +var cp = require('child_process')
8 +var toPull = require('../')
9 +var pull = require('pull-stream')
10 +var split = require('pull-split')
11 +
12 +
13 +console.log(process.execPath, [require.resolve('./stdout')])
14 +var child = cp.spawn(process.execPath, [require.resolve('./stdout')])
15 +child.on('exit', function () {
16 + console.log('ended')
17 +})
18 +pull(
19 + toPull.source(child.stdout),
20 + split('\n\n'),
21 + pull.filter(),
22 + pull.map(function (e) {
23 + try {
24 + return JSON.parse(e)
25 + } catch (err) {
26 + console.log(JSON.stringify(e))
27 + //throw err
28 + }
29 +
30 + }),
31 + pull.asyncMap(function (data, cb) {
32 + setTimeout(function () {
33 + cb(null, data)
34 + }, 10)
35 + }),
36 + pull.drain(null, function (err) {
37 + console.log('DONE')
38 + if(err) throw err
39 + console.log('done')
40 + })
41 +)
42 +
node_modules/string_decoder/LICENSEView
@@ -1,0 +1,48 @@
1 +Node.js is licensed for use as follows:
2 +
3 +"""
4 +Copyright Node.js contributors. All rights reserved.
5 +
6 +Permission is hereby granted, free of charge, to any person obtaining a copy
7 +of this software and associated documentation files (the "Software"), to
8 +deal in the Software without restriction, including without limitation the
9 +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 +sell copies of the Software, and to permit persons to whom the Software is
11 +furnished to do so, subject to the following conditions:
12 +
13 +The above copyright notice and this permission notice shall be included in
14 +all copies or substantial portions of the Software.
15 +
16 +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
22 +IN THE SOFTWARE.
23 +"""
24 +
25 +This license applies to parts of Node.js originating from the
26 +https://github.com/joyent/node repository:
27 +
28 +"""
29 +Copyright Joyent, Inc. and other Node contributors. All rights reserved.
30 +Permission is hereby granted, free of charge, to any person obtaining a copy
31 +of this software and associated documentation files (the "Software"), to
32 +deal in the Software without restriction, including without limitation the
33 +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
34 +sell copies of the Software, and to permit persons to whom the Software is
35 +furnished to do so, subject to the following conditions:
36 +
37 +The above copyright notice and this permission notice shall be included in
38 +all copies or substantial portions of the Software.
39 +
40 +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
41 +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
42 +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
43 +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
44 +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
45 +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
46 +IN THE SOFTWARE.
47 +"""
48 +
node_modules/string_decoder/README.mdView
@@ -1,0 +1,47 @@
1 +# string_decoder
2 +
3 +***Node-core v8.9.4 string_decoder for userland***
4 +
5 +
6 +[![NPM](https://nodei.co/npm/string_decoder.png?downloads=true&downloadRank=true)](https://nodei.co/npm/string_decoder/)
7 +[![NPM](https://nodei.co/npm-dl/string_decoder.png?&months=6&height=3)](https://nodei.co/npm/string_decoder/)
8 +
9 +
10 +```bash
11 +npm install --save string_decoder
12 +```
13 +
14 +***Node-core string_decoder for userland***
15 +
16 +This package is a mirror of the string_decoder implementation in Node-core.
17 +
18 +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.9.4/docs/api/).
19 +
20 +As of version 1.0.0 **string_decoder** uses semantic versioning.
21 +
22 +## Previous versions
23 +
24 +Previous version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10.
25 +
26 +## Update
27 +
28 +The *build/* directory contains a build script that will scrape the source from the [nodejs/node](https://github.com/nodejs/node) repo given a specific Node version.
29 +
30 +## Streams Working Group
31 +
32 +`string_decoder` is maintained by the Streams Working Group, which
33 +oversees the development and maintenance of the Streams API within
34 +Node.js. The responsibilities of the Streams Working Group include:
35 +
36 +* Addressing stream issues on the Node.js issue tracker.
37 +* Authoring and editing stream documentation within the Node.js project.
38 +* Reviewing changes to stream subclasses within the Node.js project.
39 +* Redirecting changes to streams from the Node.js project to this
40 + project.
41 +* Assisting in the implementation of stream providers within Node.js.
42 +* Recommending versions of `readable-stream` to be included in Node.js.
43 +* Messaging about the future of streams to give the community advance
44 + notice of changes.
45 +
46 +See [readable-stream](https://github.com/nodejs/readable-stream) for
47 +more details.
node_modules/string_decoder/lib/string_decoder.jsView
@@ -1,0 +1,296 @@
1 +// Copyright Joyent, Inc. and other Node contributors.
2 +//
3 +// Permission is hereby granted, free of charge, to any person obtaining a
4 +// copy of this software and associated documentation files (the
5 +// "Software"), to deal in the Software without restriction, including
6 +// without limitation the rights to use, copy, modify, merge, publish,
7 +// distribute, sublicense, and/or sell copies of the Software, and to permit
8 +// persons to whom the Software is furnished to do so, subject to the
9 +// following conditions:
10 +//
11 +// The above copyright notice and this permission notice shall be included
12 +// in all copies or substantial portions of the Software.
13 +//
14 +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
15 +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16 +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
17 +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
18 +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
19 +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
20 +// USE OR OTHER DEALINGS IN THE SOFTWARE.
21 +
22 +'use strict';
23 +
24 +/*<replacement>*/
25 +
26 +var Buffer = require('safe-buffer').Buffer;
27 +/*</replacement>*/
28 +
29 +var isEncoding = Buffer.isEncoding || function (encoding) {
30 + encoding = '' + encoding;
31 + switch (encoding && encoding.toLowerCase()) {
32 + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw':
33 + return true;
34 + default:
35 + return false;
36 + }
37 +};
38 +
39 +function _normalizeEncoding(enc) {
40 + if (!enc) return 'utf8';
41 + var retried;
42 + while (true) {
43 + switch (enc) {
44 + case 'utf8':
45 + case 'utf-8':
46 + return 'utf8';
47 + case 'ucs2':
48 + case 'ucs-2':
49 + case 'utf16le':
50 + case 'utf-16le':
51 + return 'utf16le';
52 + case 'latin1':
53 + case 'binary':
54 + return 'latin1';
55 + case 'base64':
56 + case 'ascii':
57 + case 'hex':
58 + return enc;
59 + default:
60 + if (retried) return; // undefined
61 + enc = ('' + enc).toLowerCase();
62 + retried = true;
63 + }
64 + }
65 +};
66 +
67 +// Do not cache `Buffer.isEncoding` when checking encoding names as some
68 +// modules monkey-patch it to support additional encodings
69 +function normalizeEncoding(enc) {
70 + var nenc = _normalizeEncoding(enc);
71 + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc);
72 + return nenc || enc;
73 +}
74 +
75 +// StringDecoder provides an interface for efficiently splitting a series of
76 +// buffers into a series of JS strings without breaking apart multi-byte
77 +// characters.
78 +exports.StringDecoder = StringDecoder;
79 +function StringDecoder(encoding) {
80 + this.encoding = normalizeEncoding(encoding);
81 + var nb;
82 + switch (this.encoding) {
83 + case 'utf16le':
84 + this.text = utf16Text;
85 + this.end = utf16End;
86 + nb = 4;
87 + break;
88 + case 'utf8':
89 + this.fillLast = utf8FillLast;
90 + nb = 4;
91 + break;
92 + case 'base64':
93 + this.text = base64Text;
94 + this.end = base64End;
95 + nb = 3;
96 + break;
97 + default:
98 + this.write = simpleWrite;
99 + this.end = simpleEnd;
100 + return;
101 + }
102 + this.lastNeed = 0;
103 + this.lastTotal = 0;
104 + this.lastChar = Buffer.allocUnsafe(nb);
105 +}
106 +
107 +StringDecoder.prototype.write = function (buf) {
108 + if (buf.length === 0) return '';
109 + var r;
110 + var i;
111 + if (this.lastNeed) {
112 + r = this.fillLast(buf);
113 + if (r === undefined) return '';
114 + i = this.lastNeed;
115 + this.lastNeed = 0;
116 + } else {
117 + i = 0;
118 + }
119 + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i);
120 + return r || '';
121 +};
122 +
123 +StringDecoder.prototype.end = utf8End;
124 +
125 +// Returns only complete characters in a Buffer
126 +StringDecoder.prototype.text = utf8Text;
127 +
128 +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer
129 +StringDecoder.prototype.fillLast = function (buf) {
130 + if (this.lastNeed <= buf.length) {
131 + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed);
132 + return this.lastChar.toString(this.encoding, 0, this.lastTotal);
133 + }
134 + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length);
135 + this.lastNeed -= buf.length;
136 +};
137 +
138 +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a
139 +// continuation byte. If an invalid byte is detected, -2 is returned.
140 +function utf8CheckByte(byte) {
141 + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4;
142 + return byte >> 6 === 0x02 ? -1 : -2;
143 +}
144 +
145 +// Checks at most 3 bytes at the end of a Buffer in order to detect an
146 +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4)
147 +// needed to complete the UTF-8 character (if applicable) are returned.
148 +function utf8CheckIncomplete(self, buf, i) {
149 + var j = buf.length - 1;
150 + if (j < i) return 0;
151 + var nb = utf8CheckByte(buf[j]);
152 + if (nb >= 0) {
153 + if (nb > 0) self.lastNeed = nb - 1;
154 + return nb;
155 + }
156 + if (--j < i || nb === -2) return 0;
157 + nb = utf8CheckByte(buf[j]);
158 + if (nb >= 0) {
159 + if (nb > 0) self.lastNeed = nb - 2;
160 + return nb;
161 + }
162 + if (--j < i || nb === -2) return 0;
163 + nb = utf8CheckByte(buf[j]);
164 + if (nb >= 0) {
165 + if (nb > 0) {
166 + if (nb === 2) nb = 0;else self.lastNeed = nb - 3;
167 + }
168 + return nb;
169 + }
170 + return 0;
171 +}
172 +
173 +// Validates as many continuation bytes for a multi-byte UTF-8 character as
174 +// needed or are available. If we see a non-continuation byte where we expect
175 +// one, we "replace" the validated continuation bytes we've seen so far with
176 +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding
177 +// behavior. The continuation byte check is included three times in the case
178 +// where all of the continuation bytes for a character exist in the same buffer.
179 +// It is also done this way as a slight performance increase instead of using a
180 +// loop.
181 +function utf8CheckExtraBytes(self, buf, p) {
182 + if ((buf[0] & 0xC0) !== 0x80) {
183 + self.lastNeed = 0;
184 + return '\ufffd';
185 + }
186 + if (self.lastNeed > 1 && buf.length > 1) {
187 + if ((buf[1] & 0xC0) !== 0x80) {
188 + self.lastNeed = 1;
189 + return '\ufffd';
190 + }
191 + if (self.lastNeed > 2 && buf.length > 2) {
192 + if ((buf[2] & 0xC0) !== 0x80) {
193 + self.lastNeed = 2;
194 + return '\ufffd';
195 + }
196 + }
197 + }
198 +}
199 +
200 +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer.
201 +function utf8FillLast(buf) {
202 + var p = this.lastTotal - this.lastNeed;
203 + var r = utf8CheckExtraBytes(this, buf, p);
204 + if (r !== undefined) return r;
205 + if (this.lastNeed <= buf.length) {
206 + buf.copy(this.lastChar, p, 0, this.lastNeed);
207 + return this.lastChar.toString(this.encoding, 0, this.lastTotal);
208 + }
209 + buf.copy(this.lastChar, p, 0, buf.length);
210 + this.lastNeed -= buf.length;
211 +}
212 +
213 +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a
214 +// partial character, the character's bytes are buffered until the required
215 +// number of bytes are available.
216 +function utf8Text(buf, i) {
217 + var total = utf8CheckIncomplete(this, buf, i);
218 + if (!this.lastNeed) return buf.toString('utf8', i);
219 + this.lastTotal = total;
220 + var end = buf.length - (total - this.lastNeed);
221 + buf.copy(this.lastChar, 0, end);
222 + return buf.toString('utf8', i, end);
223 +}
224 +
225 +// For UTF-8, a replacement character is added when ending on a partial
226 +// character.
227 +function utf8End(buf) {
228 + var r = buf && buf.length ? this.write(buf) : '';
229 + if (this.lastNeed) return r + '\ufffd';
230 + return r;
231 +}
232 +
233 +// UTF-16LE typically needs two bytes per character, but even if we have an even
234 +// number of bytes available, we need to check if we end on a leading/high
235 +// surrogate. In that case, we need to wait for the next two bytes in order to
236 +// decode the last character properly.
237 +function utf16Text(buf, i) {
238 + if ((buf.length - i) % 2 === 0) {
239 + var r = buf.toString('utf16le', i);
240 + if (r) {
241 + var c = r.charCodeAt(r.length - 1);
242 + if (c >= 0xD800 && c <= 0xDBFF) {
243 + this.lastNeed = 2;
244 + this.lastTotal = 4;
245 + this.lastChar[0] = buf[buf.length - 2];
246 + this.lastChar[1] = buf[buf.length - 1];
247 + return r.slice(0, -1);
248 + }
249 + }
250 + return r;
251 + }
252 + this.lastNeed = 1;
253 + this.lastTotal = 2;
254 + this.lastChar[0] = buf[buf.length - 1];
255 + return buf.toString('utf16le', i, buf.length - 1);
256 +}
257 +
258 +// For UTF-16LE we do not explicitly append special replacement characters if we
259 +// end on a partial character, we simply let v8 handle that.
260 +function utf16End(buf) {
261 + var r = buf && buf.length ? this.write(buf) : '';
262 + if (this.lastNeed) {
263 + var end = this.lastTotal - this.lastNeed;
264 + return r + this.lastChar.toString('utf16le', 0, end);
265 + }
266 + return r;
267 +}
268 +
269 +function base64Text(buf, i) {
270 + var n = (buf.length - i) % 3;
271 + if (n === 0) return buf.toString('base64', i);
272 + this.lastNeed = 3 - n;
273 + this.lastTotal = 3;
274 + if (n === 1) {
275 + this.lastChar[0] = buf[buf.length - 1];
276 + } else {
277 + this.lastChar[0] = buf[buf.length - 2];
278 + this.lastChar[1] = buf[buf.length - 1];
279 + }
280 + return buf.toString('base64', i, buf.length - n);
281 +}
282 +
283 +function base64End(buf) {
284 + var r = buf && buf.length ? this.write(buf) : '';
285 + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed);
286 + return r;
287 +}
288 +
289 +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex)
290 +function simpleWrite(buf) {
291 + return buf.toString(this.encoding);
292 +}
293 +
294 +function simpleEnd(buf) {
295 + return buf && buf.length ? this.write(buf) : '';
296 +}
node_modules/string_decoder/package.jsonView
@@ -1,0 +1,65 @@
1 +{
2 + "_args": [
3 + [
4 + "string_decoder@1.2.0",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "string_decoder@1.2.0",
9 + "_id": "string_decoder@1.2.0",
10 + "_inBundle": false,
11 + "_integrity": "sha256-Wtmtnb5kwLKTAWRm8II8hKWz84tLE3+eGxUQi58o9zg=",
12 + "_location": "/string_decoder",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "string_decoder@1.2.0",
18 + "name": "string_decoder",
19 + "escapedName": "string_decoder",
20 + "rawSpec": "1.2.0",
21 + "saveSpec": null,
22 + "fetchSpec": "1.2.0"
23 + },
24 + "_requiredBy": [
25 + "/readable-stream"
26 + ],
27 + "_resolved": "http://localhost:8989/blobs/get/&Wtmtnb5kwLKTAWRm8II8hKWz84tLE3+eGxUQi58o9zg=.sha256",
28 + "_spec": "1.2.0",
29 + "_where": "/home/cel/src/ssb-npm-registry",
30 + "bugs": {
31 + "url": "https://github.com/nodejs/string_decoder/issues"
32 + },
33 + "dependencies": {
34 + "safe-buffer": "~5.1.0"
35 + },
36 + "description": "The string_decoder module from Node core",
37 + "devDependencies": {
38 + "babel-polyfill": "^6.23.0",
39 + "core-util-is": "^1.0.2",
40 + "inherits": "^2.0.3",
41 + "tap": "~0.4.8"
42 + },
43 + "files": [
44 + "lib"
45 + ],
46 + "homepage": "https://github.com/nodejs/string_decoder",
47 + "keywords": [
48 + "string",
49 + "decoder",
50 + "browser",
51 + "browserify"
52 + ],
53 + "license": "MIT",
54 + "main": "lib/string_decoder.js",
55 + "name": "string_decoder",
56 + "repository": {
57 + "type": "git",
58 + "url": "git://github.com/nodejs/string_decoder.git"
59 + },
60 + "scripts": {
61 + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js",
62 + "test": "tap test/parallel/*.js && node test/verify-dependencies"
63 + },
64 + "version": "1.2.0"
65 +}
node_modules/tar-stream/LICENSEView
@@ -1,0 +1,21 @@
1 +The MIT License (MIT)
2 +
3 +Copyright (c) 2014 Mathias Buus
4 +
5 +Permission is hereby granted, free of charge, to any person obtaining a copy
6 +of this software and associated documentation files (the "Software"), to deal
7 +in the Software without restriction, including without limitation the rights
8 +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 +copies of the Software, and to permit persons to whom the Software is
10 +furnished to do so, subject to the following conditions:
11 +
12 +The above copyright notice and this permission notice shall be included in
13 +all copies or substantial portions of the Software.
14 +
15 +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21 +THE SOFTWARE.
node_modules/tar-stream/README.mdView
@@ -1,0 +1,168 @@
1 +# tar-stream
2 +
3 +tar-stream is a streaming tar parser and generator and nothing else. It is streams2 and operates purely using streams which means you can easily extract/parse tarballs without ever hitting the file system.
4 +
5 +Note that you still need to gunzip your data if you have a `.tar.gz`. We recommend using [gunzip-maybe](https://github.com/mafintosh/gunzip-maybe) in conjunction with this.
6 +
7 +```
8 +npm install tar-stream
9 +```
10 +
11 +[![build status](https://secure.travis-ci.org/mafintosh/tar-stream.png)](http://travis-ci.org/mafintosh/tar-stream)
12 +[![License](https://img.shields.io/badge/license-MIT-blue.svg)](http://opensource.org/licenses/MIT)
13 +
14 +## Usage
15 +
16 +tar-stream exposes two streams, [pack](https://github.com/mafintosh/tar-stream#packing) which creates tarballs and [extract](https://github.com/mafintosh/tar-stream#extracting) which extracts tarballs. To [modify an existing tarball](https://github.com/mafintosh/tar-stream#modifying-existing-tarballs) use both.
17 +
18 +
19 +It implementes USTAR with additional support for pax extended headers. It should be compatible with all popular tar distributions out there (gnutar, bsdtar etc)
20 +
21 +## Related
22 +
23 +If you want to pack/unpack directories on the file system check out [tar-fs](https://github.com/mafintosh/tar-fs) which provides file system bindings to this module.
24 +
25 +## Packing
26 +
27 +To create a pack stream use `tar.pack()` and call `pack.entry(header, [callback])` to add tar entries.
28 +
29 +``` js
30 +var tar = require('tar-stream')
31 +var pack = tar.pack() // pack is a streams2 stream
32 +
33 +// add a file called my-test.txt with the content "Hello World!"
34 +pack.entry({ name: 'my-test.txt' }, 'Hello World!')
35 +
36 +// add a file called my-stream-test.txt from a stream
37 +var entry = pack.entry({ name: 'my-stream-test.txt', size: 11 }, function(err) {
38 + // the stream was added
39 + // no more entries
40 + pack.finalize()
41 +})
42 +
43 +entry.write('hello')
44 +entry.write(' ')
45 +entry.write('world')
46 +entry.end()
47 +
48 +// pipe the pack stream somewhere
49 +pack.pipe(process.stdout)
50 +```
51 +
52 +## Extracting
53 +
54 +To extract a stream use `tar.extract()` and listen for `extract.on('entry', (header, stream, next) )`
55 +
56 +``` js
57 +var extract = tar.extract()
58 +
59 +extract.on('entry', function(header, stream, next) {
60 + // header is the tar header
61 + // stream is the content body (might be an empty stream)
62 + // call next when you are done with this entry
63 +
64 + stream.on('end', function() {
65 + next() // ready for next entry
66 + })
67 +
68 + stream.resume() // just auto drain the stream
69 +})
70 +
71 +extract.on('finish', function() {
72 + // all entries read
73 +})
74 +
75 +pack.pipe(extract)
76 +```
77 +
78 +The tar archive is streamed sequentially, meaning you **must** drain each entry's stream as you get them or else the main extract stream will receive backpressure and stop reading.
79 +
80 +## Headers
81 +
82 +The header object using in `entry` should contain the following properties.
83 +Most of these values can be found by stat'ing a file.
84 +
85 +``` js
86 +{
87 + name: 'path/to/this/entry.txt',
88 + size: 1314, // entry size. defaults to 0
89 + mode: 0644, // entry mode. defaults to to 0755 for dirs and 0644 otherwise
90 + mtime: new Date(), // last modified date for entry. defaults to now.
91 + type: 'file', // type of entry. defaults to file. can be:
92 + // file | link | symlink | directory | block-device
93 + // character-device | fifo | contiguous-file
94 + linkname: 'path', // linked file name
95 + uid: 0, // uid of entry owner. defaults to 0
96 + gid: 0, // gid of entry owner. defaults to 0
97 + uname: 'maf', // uname of entry owner. defaults to null
98 + gname: 'staff', // gname of entry owner. defaults to null
99 + devmajor: 0, // device major version. defaults to 0
100 + devminor: 0 // device minor version. defaults to 0
101 +}
102 +```
103 +
104 +## Modifying existing tarballs
105 +
106 +Using tar-stream it is easy to rewrite paths / change modes etc in an existing tarball.
107 +
108 +``` js
109 +var extract = tar.extract()
110 +var pack = tar.pack()
111 +var path = require('path')
112 +
113 +extract.on('entry', function(header, stream, callback) {
114 + // let's prefix all names with 'tmp'
115 + header.name = path.join('tmp', header.name)
116 + // write the new entry to the pack stream
117 + stream.pipe(pack.entry(header, callback))
118 +})
119 +
120 +extract.on('finish', function() {
121 + // all entries done - lets finalize it
122 + pack.finalize()
123 +})
124 +
125 +// pipe the old tarball to the extractor
126 +oldTarballStream.pipe(extract)
127 +
128 +// pipe the new tarball the another stream
129 +pack.pipe(newTarballStream)
130 +```
131 +
132 +## Saving tarball to fs
133 +
134 +
135 +``` js
136 +var fs = require('fs')
137 +var tar = require('tar-stream')
138 +
139 +var pack = tar.pack() // pack is a streams2 stream
140 +var path = 'YourTarBall.tar'
141 +var yourTarball = fs.createWriteStream(path)
142 +
143 +// add a file called YourFile.txt with the content "Hello World!"
144 +pack.entry({name: 'YourFile.txt'}, 'Hello World!', function (err) {
145 + if (err) throw err
146 + pack.finalize()
147 +})
148 +
149 +// pipe the pack stream to your file
150 +pack.pipe(yourTarball)
151 +
152 +yourTarball.on('close', function () {
153 + console.log(path + ' has been written')
154 + fs.stat(path, function(err, stats) {
155 + if (err) throw err
156 + console.log(stats)
157 + console.log('Got file info successfully!')
158 + })
159 +})
160 +```
161 +
162 +## Performance
163 +
164 +[See tar-fs for a performance comparison with node-tar](https://github.com/mafintosh/tar-fs/blob/master/README.md#performance)
165 +
166 +# License
167 +
168 +MIT
node_modules/tar-stream/extract.jsView
@@ -1,0 +1,257 @@
1 +var util = require('util')
2 +var bl = require('bl')
3 +var headers = require('./headers')
4 +
5 +var Writable = require('readable-stream').Writable
6 +var PassThrough = require('readable-stream').PassThrough
7 +
8 +var noop = function () {}
9 +
10 +var overflow = function (size) {
11 + size &= 511
12 + return size && 512 - size
13 +}
14 +
15 +var emptyStream = function (self, offset) {
16 + var s = new Source(self, offset)
17 + s.end()
18 + return s
19 +}
20 +
21 +var mixinPax = function (header, pax) {
22 + if (pax.path) header.name = pax.path
23 + if (pax.linkpath) header.linkname = pax.linkpath
24 + if (pax.size) header.size = parseInt(pax.size, 10)
25 + header.pax = pax
26 + return header
27 +}
28 +
29 +var Source = function (self, offset) {
30 + this._parent = self
31 + this.offset = offset
32 + PassThrough.call(this)
33 +}
34 +
35 +util.inherits(Source, PassThrough)
36 +
37 +Source.prototype.destroy = function (err) {
38 + this._parent.destroy(err)
39 +}
40 +
41 +var Extract = function (opts) {
42 + if (!(this instanceof Extract)) return new Extract(opts)
43 + Writable.call(this, opts)
44 +
45 + opts = opts || {}
46 +
47 + this._offset = 0
48 + this._buffer = bl()
49 + this._missing = 0
50 + this._partial = false
51 + this._onparse = noop
52 + this._header = null
53 + this._stream = null
54 + this._overflow = null
55 + this._cb = null
56 + this._locked = false
57 + this._destroyed = false
58 + this._pax = null
59 + this._paxGlobal = null
60 + this._gnuLongPath = null
61 + this._gnuLongLinkPath = null
62 +
63 + var self = this
64 + var b = self._buffer
65 +
66 + var oncontinue = function () {
67 + self._continue()
68 + }
69 +
70 + var onunlock = function (err) {
71 + self._locked = false
72 + if (err) return self.destroy(err)
73 + if (!self._stream) oncontinue()
74 + }
75 +
76 + var onstreamend = function () {
77 + self._stream = null
78 + var drain = overflow(self._header.size)
79 + if (drain) self._parse(drain, ondrain)
80 + else self._parse(512, onheader)
81 + if (!self._locked) oncontinue()
82 + }
83 +
84 + var ondrain = function () {
85 + self._buffer.consume(overflow(self._header.size))
86 + self._parse(512, onheader)
87 + oncontinue()
88 + }
89 +
90 + var onpaxglobalheader = function () {
91 + var size = self._header.size
92 + self._paxGlobal = headers.decodePax(b.slice(0, size))
93 + b.consume(size)
94 + onstreamend()
95 + }
96 +
97 + var onpaxheader = function () {
98 + var size = self._header.size
99 + self._pax = headers.decodePax(b.slice(0, size))
100 + if (self._paxGlobal) self._pax = Object.assign({}, self._paxGlobal, self._pax)
101 + b.consume(size)
102 + onstreamend()
103 + }
104 +
105 + var ongnulongpath = function () {
106 + var size = self._header.size
107 + this._gnuLongPath = headers.decodeLongPath(b.slice(0, size), opts.filenameEncoding)
108 + b.consume(size)
109 + onstreamend()
110 + }
111 +
112 + var ongnulonglinkpath = function () {
113 + var size = self._header.size
114 + this._gnuLongLinkPath = headers.decodeLongPath(b.slice(0, size), opts.filenameEncoding)
115 + b.consume(size)
116 + onstreamend()
117 + }
118 +
119 + var onheader = function () {
120 + var offset = self._offset
121 + var header
122 + try {
123 + header = self._header = headers.decode(b.slice(0, 512), opts.filenameEncoding)
124 + } catch (err) {
125 + self.emit('error', err)
126 + }
127 + b.consume(512)
128 +
129 + if (!header) {
130 + self._parse(512, onheader)
131 + oncontinue()
132 + return
133 + }
134 + if (header.type === 'gnu-long-path') {
135 + self._parse(header.size, ongnulongpath)
136 + oncontinue()
137 + return
138 + }
139 + if (header.type === 'gnu-long-link-path') {
140 + self._parse(header.size, ongnulonglinkpath)
141 + oncontinue()
142 + return
143 + }
144 + if (header.type === 'pax-global-header') {
145 + self._parse(header.size, onpaxglobalheader)
146 + oncontinue()
147 + return
148 + }
149 + if (header.type === 'pax-header') {
150 + self._parse(header.size, onpaxheader)
151 + oncontinue()
152 + return
153 + }
154 +
155 + if (self._gnuLongPath) {
156 + header.name = self._gnuLongPath
157 + self._gnuLongPath = null
158 + }
159 +
160 + if (self._gnuLongLinkPath) {
161 + header.linkname = self._gnuLongLinkPath
162 + self._gnuLongLinkPath = null
163 + }
164 +
165 + if (self._pax) {
166 + self._header = header = mixinPax(header, self._pax)
167 + self._pax = null
168 + }
169 +
170 + self._locked = true
171 +
172 + if (!header.size || header.type === 'directory') {
173 + self._parse(512, onheader)
174 + self.emit('entry', header, emptyStream(self, offset), onunlock)
175 + return
176 + }
177 +
178 + self._stream = new Source(self, offset)
179 +
180 + self.emit('entry', header, self._stream, onunlock)
181 + self._parse(header.size, onstreamend)
182 + oncontinue()
183 + }
184 +
185 + this._onheader = onheader
186 + this._parse(512, onheader)
187 +}
188 +
189 +util.inherits(Extract, Writable)
190 +
191 +Extract.prototype.destroy = function (err) {
192 + if (this._destroyed) return
193 + this._destroyed = true
194 +
195 + if (err) this.emit('error', err)
196 + this.emit('close')
197 + if (this._stream) this._stream.emit('close')
198 +}
199 +
200 +Extract.prototype._parse = function (size, onparse) {
201 + if (this._destroyed) return
202 + this._offset += size
203 + this._missing = size
204 + if (onparse === this._onheader) this._partial = false
205 + this._onparse = onparse
206 +}
207 +
208 +Extract.prototype._continue = function () {
209 + if (this._destroyed) return
210 + var cb = this._cb
211 + this._cb = noop
212 + if (this._overflow) this._write(this._overflow, undefined, cb)
213 + else cb()
214 +}
215 +
216 +Extract.prototype._write = function (data, enc, cb) {
217 + if (this._destroyed) return
218 +
219 + var s = this._stream
220 + var b = this._buffer
221 + var missing = this._missing
222 + if (data.length) this._partial = true
223 +
224 + // we do not reach end-of-chunk now. just forward it
225 +
226 + if (data.length < missing) {
227 + this._missing -= data.length
228 + this._overflow = null
229 + if (s) return s.write(data, cb)
230 + b.append(data)
231 + return cb()
232 + }
233 +
234 + // end-of-chunk. the parser should call cb.
235 +
236 + this._cb = cb
237 + this._missing = 0
238 +
239 + var overflow = null
240 + if (data.length > missing) {
241 + overflow = data.slice(missing)
242 + data = data.slice(0, missing)
243 + }
244 +
245 + if (s) s.end(data)
246 + else b.append(data)
247 +
248 + this._overflow = overflow
249 + this._onparse()
250 +}
251 +
252 +Extract.prototype._final = function (cb) {
253 + if (this._partial) return this.destroy(new Error('Unexpected end of data'))
254 + cb()
255 +}
256 +
257 +module.exports = Extract
node_modules/tar-stream/headers.jsView
@@ -1,0 +1,282 @@
1 +var alloc = Buffer.alloc
2 +
3 +var ZEROS = '0000000000000000000'
4 +var SEVENS = '7777777777777777777'
5 +var ZERO_OFFSET = '0'.charCodeAt(0)
6 +var USTAR = 'ustar\x0000'
7 +var MASK = parseInt('7777', 8)
8 +
9 +var clamp = function (index, len, defaultValue) {
10 + if (typeof index !== 'number') return defaultValue
11 + index = ~~index // Coerce to integer.
12 + if (index >= len) return len
13 + if (index >= 0) return index
14 + index += len
15 + if (index >= 0) return index
16 + return 0
17 +}
18 +
19 +var toType = function (flag) {
20 + switch (flag) {
21 + case 0:
22 + return 'file'
23 + case 1:
24 + return 'link'
25 + case 2:
26 + return 'symlink'
27 + case 3:
28 + return 'character-device'
29 + case 4:
30 + return 'block-device'
31 + case 5:
32 + return 'directory'
33 + case 6:
34 + return 'fifo'
35 + case 7:
36 + return 'contiguous-file'
37 + case 72:
38 + return 'pax-header'
39 + case 55:
40 + return 'pax-global-header'
41 + case 27:
42 + return 'gnu-long-link-path'
43 + case 28:
44 + case 30:
45 + return 'gnu-long-path'
46 + }
47 +
48 + return null
49 +}
50 +
51 +var toTypeflag = function (flag) {
52 + switch (flag) {
53 + case 'file':
54 + return 0
55 + case 'link':
56 + return 1
57 + case 'symlink':
58 + return 2
59 + case 'character-device':
60 + return 3
61 + case 'block-device':
62 + return 4
63 + case 'directory':
64 + return 5
65 + case 'fifo':
66 + return 6
67 + case 'contiguous-file':
68 + return 7
69 + case 'pax-header':
70 + return 72
71 + }
72 +
73 + return 0
74 +}
75 +
76 +var indexOf = function (block, num, offset, end) {
77 + for (; offset < end; offset++) {
78 + if (block[offset] === num) return offset
79 + }
80 + return end
81 +}
82 +
83 +var cksum = function (block) {
84 + var sum = 8 * 32
85 + for (var i = 0; i < 148; i++) sum += block[i]
86 + for (var j = 156; j < 512; j++) sum += block[j]
87 + return sum
88 +}
89 +
90 +var encodeOct = function (val, n) {
91 + val = val.toString(8)
92 + if (val.length > n) return SEVENS.slice(0, n) + ' '
93 + else return ZEROS.slice(0, n - val.length) + val + ' '
94 +}
95 +
96 +/* Copied from the node-tar repo and modified to meet
97 + * tar-stream coding standard.
98 + *
99 + * Source: https://github.com/npm/node-tar/blob/51b6627a1f357d2eb433e7378e5f05e83b7aa6cd/lib/header.js#L349
100 + */
101 +function parse256 (buf) {
102 + // first byte MUST be either 80 or FF
103 + // 80 for positive, FF for 2's comp
104 + var positive
105 + if (buf[0] === 0x80) positive = true
106 + else if (buf[0] === 0xFF) positive = false
107 + else return null
108 +
109 + // build up a base-256 tuple from the least sig to the highest
110 + var zero = false
111 + var tuple = []
112 + for (var i = buf.length - 1; i > 0; i--) {
113 + var byte = buf[i]
114 + if (positive) tuple.push(byte)
115 + else if (zero && byte === 0) tuple.push(0)
116 + else if (zero) {
117 + zero = false
118 + tuple.push(0x100 - byte)
119 + } else tuple.push(0xFF - byte)
120 + }
121 +
122 + var sum = 0
123 + var l = tuple.length
124 + for (i = 0; i < l; i++) {
125 + sum += tuple[i] * Math.pow(256, i)
126 + }
127 +
128 + return positive ? sum : -1 * sum
129 +}
130 +
131 +var decodeOct = function (val, offset, length) {
132 + val = val.slice(offset, offset + length)
133 + offset = 0
134 +
135 + // If prefixed with 0x80 then parse as a base-256 integer
136 + if (val[offset] & 0x80) {
137 + return parse256(val)
138 + } else {
139 + // Older versions of tar can prefix with spaces
140 + while (offset < val.length && val[offset] === 32) offset++
141 + var end = clamp(indexOf(val, 32, offset, val.length), val.length, val.length)
142 + while (offset < end && val[offset] === 0) offset++
143 + if (end === offset) return 0
144 + return parseInt(val.slice(offset, end).toString(), 8)
145 + }
146 +}
147 +
148 +var decodeStr = function (val, offset, length, encoding) {
149 + return val.slice(offset, indexOf(val, 0, offset, offset + length)).toString(encoding)
150 +}
151 +
152 +var addLength = function (str) {
153 + var len = Buffer.byteLength(str)
154 + var digits = Math.floor(Math.log(len) / Math.log(10)) + 1
155 + if (len + digits >= Math.pow(10, digits)) digits++
156 +
157 + return (len + digits) + str
158 +}
159 +
160 +exports.decodeLongPath = function (buf, encoding) {
161 + return decodeStr(buf, 0, buf.length, encoding)
162 +}
163 +
164 +exports.encodePax = function (opts) { // TODO: encode more stuff in pax
165 + var result = ''
166 + if (opts.name) result += addLength(' path=' + opts.name + '\n')
167 + if (opts.linkname) result += addLength(' linkpath=' + opts.linkname + '\n')
168 + var pax = opts.pax
169 + if (pax) {
170 + for (var key in pax) {
171 + result += addLength(' ' + key + '=' + pax[key] + '\n')
172 + }
173 + }
174 + return Buffer.from(result)
175 +}
176 +
177 +exports.decodePax = function (buf) {
178 + var result = {}
179 +
180 + while (buf.length) {
181 + var i = 0
182 + while (i < buf.length && buf[i] !== 32) i++
183 + var len = parseInt(buf.slice(0, i).toString(), 10)
184 + if (!len) return result
185 +
186 + var b = buf.slice(i + 1, len - 1).toString()
187 + var keyIndex = b.indexOf('=')
188 + if (keyIndex === -1) return result
189 + result[b.slice(0, keyIndex)] = b.slice(keyIndex + 1)
190 +
191 + buf = buf.slice(len)
192 + }
193 +
194 + return result
195 +}
196 +
197 +exports.encode = function (opts) {
198 + var buf = alloc(512)
199 + var name = opts.name
200 + var prefix = ''
201 +
202 + if (opts.typeflag === 5 && name[name.length - 1] !== '/') name += '/'
203 + if (Buffer.byteLength(name) !== name.length) return null // utf-8
204 +
205 + while (Buffer.byteLength(name) > 100) {
206 + var i = name.indexOf('/')
207 + if (i === -1) return null
208 + prefix += prefix ? '/' + name.slice(0, i) : name.slice(0, i)
209 + name = name.slice(i + 1)
210 + }
211 +
212 + if (Buffer.byteLength(name) > 100 || Buffer.byteLength(prefix) > 155) return null
213 + if (opts.linkname && Buffer.byteLength(opts.linkname) > 100) return null
214 +
215 + buf.write(name)
216 + buf.write(encodeOct(opts.mode & MASK, 6), 100)
217 + buf.write(encodeOct(opts.uid, 6), 108)
218 + buf.write(encodeOct(opts.gid, 6), 116)
219 + buf.write(encodeOct(opts.size, 11), 124)
220 + buf.write(encodeOct((opts.mtime.getTime() / 1000) | 0, 11), 136)
221 +
222 + buf[156] = ZERO_OFFSET + toTypeflag(opts.type)
223 +
224 + if (opts.linkname) buf.write(opts.linkname, 157)
225 +
226 + buf.write(USTAR, 257)
227 + if (opts.uname) buf.write(opts.uname, 265)
228 + if (opts.gname) buf.write(opts.gname, 297)
229 + buf.write(encodeOct(opts.devmajor || 0, 6), 329)
230 + buf.write(encodeOct(opts.devminor || 0, 6), 337)
231 +
232 + if (prefix) buf.write(prefix, 345)
233 +
234 + buf.write(encodeOct(cksum(buf), 6), 148)
235 +
236 + return buf
237 +}
238 +
239 +exports.decode = function (buf, filenameEncoding) {
240 + var typeflag = buf[156] === 0 ? 0 : buf[156] - ZERO_OFFSET
241 +
242 + var name = decodeStr(buf, 0, 100, filenameEncoding)
243 + var mode = decodeOct(buf, 100, 8)
244 + var uid = decodeOct(buf, 108, 8)
245 + var gid = decodeOct(buf, 116, 8)
246 + var size = decodeOct(buf, 124, 12)
247 + var mtime = decodeOct(buf, 136, 12)
248 + var type = toType(typeflag)
249 + var linkname = buf[157] === 0 ? null : decodeStr(buf, 157, 100, filenameEncoding)
250 + var uname = decodeStr(buf, 265, 32)
251 + var gname = decodeStr(buf, 297, 32)
252 + var devmajor = decodeOct(buf, 329, 8)
253 + var devminor = decodeOct(buf, 337, 8)
254 +
255 + if (buf[345]) name = decodeStr(buf, 345, 155, filenameEncoding) + '/' + name
256 +
257 + // to support old tar versions that use trailing / to indicate dirs
258 + if (typeflag === 0 && name && name[name.length - 1] === '/') typeflag = 5
259 +
260 + var c = cksum(buf)
261 +
262 + // checksum is still initial value if header was null.
263 + if (c === 8 * 32) return null
264 +
265 + // valid checksum
266 + if (c !== decodeOct(buf, 148, 8)) throw new Error('Invalid tar header. Maybe the tar is corrupted or it needs to be gunzipped?')
267 +
268 + return {
269 + name: name,
270 + mode: mode,
271 + uid: uid,
272 + gid: gid,
273 + size: size,
274 + mtime: new Date(1000 * mtime),
275 + type: type,
276 + linkname: linkname,
277 + uname: uname,
278 + gname: gname,
279 + devmajor: devmajor,
280 + devminor: devminor
281 + }
282 +}
node_modules/tar-stream/index.jsView
@@ -1,0 +1,2 @@
1 +exports.extract = require('./extract')
2 +exports.pack = require('./pack')
node_modules/tar-stream/pack.jsView
@@ -1,0 +1,254 @@
1 +var constants = require('fs-constants')
2 +var eos = require('end-of-stream')
3 +var inherits = require('inherits')
4 +var alloc = Buffer.alloc
5 +
6 +var Readable = require('readable-stream').Readable
7 +var Writable = require('readable-stream').Writable
8 +var StringDecoder = require('string_decoder').StringDecoder
9 +
10 +var headers = require('./headers')
11 +
12 +var DMODE = parseInt('755', 8)
13 +var FMODE = parseInt('644', 8)
14 +
15 +var END_OF_TAR = alloc(1024)
16 +
17 +var noop = function () {}
18 +
19 +var overflow = function (self, size) {
20 + size &= 511
21 + if (size) self.push(END_OF_TAR.slice(0, 512 - size))
22 +}
23 +
24 +function modeToType (mode) {
25 + switch (mode & constants.S_IFMT) {
26 + case constants.S_IFBLK: return 'block-device'
27 + case constants.S_IFCHR: return 'character-device'
28 + case constants.S_IFDIR: return 'directory'
29 + case constants.S_IFIFO: return 'fifo'
30 + case constants.S_IFLNK: return 'symlink'
31 + }
32 +
33 + return 'file'
34 +}
35 +
36 +var Sink = function (to) {
37 + Writable.call(this)
38 + this.written = 0
39 + this._to = to
40 + this._destroyed = false
41 +}
42 +
43 +inherits(Sink, Writable)
44 +
45 +Sink.prototype._write = function (data, enc, cb) {
46 + this.written += data.length
47 + if (this._to.push(data)) return cb()
48 + this._to._drain = cb
49 +}
50 +
51 +Sink.prototype.destroy = function () {
52 + if (this._destroyed) return
53 + this._destroyed = true
54 + this.emit('close')
55 +}
56 +
57 +var LinkSink = function () {
58 + Writable.call(this)
59 + this.linkname = ''
60 + this._decoder = new StringDecoder('utf-8')
61 + this._destroyed = false
62 +}
63 +
64 +inherits(LinkSink, Writable)
65 +
66 +LinkSink.prototype._write = function (data, enc, cb) {
67 + this.linkname += this._decoder.write(data)
68 + cb()
69 +}
70 +
71 +LinkSink.prototype.destroy = function () {
72 + if (this._destroyed) return
73 + this._destroyed = true
74 + this.emit('close')
75 +}
76 +
77 +var Void = function () {
78 + Writable.call(this)
79 + this._destroyed = false
80 +}
81 +
82 +inherits(Void, Writable)
83 +
84 +Void.prototype._write = function (data, enc, cb) {
85 + cb(new Error('No body allowed for this entry'))
86 +}
87 +
88 +Void.prototype.destroy = function () {
89 + if (this._destroyed) return
90 + this._destroyed = true
91 + this.emit('close')
92 +}
93 +
94 +var Pack = function (opts) {
95 + if (!(this instanceof Pack)) return new Pack(opts)
96 + Readable.call(this, opts)
97 +
98 + this._drain = noop
99 + this._finalized = false
100 + this._finalizing = false
101 + this._destroyed = false
102 + this._stream = null
103 +}
104 +
105 +inherits(Pack, Readable)
106 +
107 +Pack.prototype.entry = function (header, buffer, callback) {
108 + if (this._stream) throw new Error('already piping an entry')
109 + if (this._finalized || this._destroyed) return
110 +
111 + if (typeof buffer === 'function') {
112 + callback = buffer
113 + buffer = null
114 + }
115 +
116 + if (!callback) callback = noop
117 +
118 + var self = this
119 +
120 + if (!header.size || header.type === 'symlink') header.size = 0
121 + if (!header.type) header.type = modeToType(header.mode)
122 + if (!header.mode) header.mode = header.type === 'directory' ? DMODE : FMODE
123 + if (!header.uid) header.uid = 0
124 + if (!header.gid) header.gid = 0
125 + if (!header.mtime) header.mtime = new Date()
126 +
127 + if (typeof buffer === 'string') buffer = Buffer.from(buffer)
128 + if (Buffer.isBuffer(buffer)) {
129 + header.size = buffer.length
130 + this._encode(header)
131 + this.push(buffer)
132 + overflow(self, header.size)
133 + process.nextTick(callback)
134 + return new Void()
135 + }
136 +
137 + if (header.type === 'symlink' && !header.linkname) {
138 + var linkSink = new LinkSink()
139 + eos(linkSink, function (err) {
140 + if (err) { // stream was closed
141 + self.destroy()
142 + return callback(err)
143 + }
144 +
145 + header.linkname = linkSink.linkname
146 + self._encode(header)
147 + callback()
148 + })
149 +
150 + return linkSink
151 + }
152 +
153 + this._encode(header)
154 +
155 + if (header.type !== 'file' && header.type !== 'contiguous-file') {
156 + process.nextTick(callback)
157 + return new Void()
158 + }
159 +
160 + var sink = new Sink(this)
161 +
162 + this._stream = sink
163 +
164 + eos(sink, function (err) {
165 + self._stream = null
166 +
167 + if (err) { // stream was closed
168 + self.destroy()
169 + return callback(err)
170 + }
171 +
172 + if (sink.written !== header.size) { // corrupting tar
173 + self.destroy()
174 + return callback(new Error('size mismatch'))
175 + }
176 +
177 + overflow(self, header.size)
178 + if (self._finalizing) self.finalize()
179 + callback()
180 + })
181 +
182 + return sink
183 +}
184 +
185 +Pack.prototype.finalize = function () {
186 + if (this._stream) {
187 + this._finalizing = true
188 + return
189 + }
190 +
191 + if (this._finalized) return
192 + this._finalized = true
193 + this.push(END_OF_TAR)
194 + this.push(null)
195 +}
196 +
197 +Pack.prototype.destroy = function (err) {
198 + if (this._destroyed) return
199 + this._destroyed = true
200 +
201 + if (err) this.emit('error', err)
202 + this.emit('close')
203 + if (this._stream && this._stream.destroy) this._stream.destroy()
204 +}
205 +
206 +Pack.prototype._encode = function (header) {
207 + if (!header.pax) {
208 + var buf = headers.encode(header)
209 + if (buf) {
210 + this.push(buf)
211 + return
212 + }
213 + }
214 + this._encodePax(header)
215 +}
216 +
217 +Pack.prototype._encodePax = function (header) {
218 + var paxHeader = headers.encodePax({
219 + name: header.name,
220 + linkname: header.linkname,
221 + pax: header.pax
222 + })
223 +
224 + var newHeader = {
225 + name: 'PaxHeader',
226 + mode: header.mode,
227 + uid: header.uid,
228 + gid: header.gid,
229 + size: paxHeader.length,
230 + mtime: header.mtime,
231 + type: 'pax-header',
232 + linkname: header.linkname && 'PaxHeader',
233 + uname: header.uname,
234 + gname: header.gname,
235 + devmajor: header.devmajor,
236 + devminor: header.devminor
237 + }
238 +
239 + this.push(headers.encode(newHeader))
240 + this.push(paxHeader)
241 + overflow(this, paxHeader.length)
242 +
243 + newHeader.size = header.size
244 + newHeader.type = header.type
245 + this.push(headers.encode(newHeader))
246 +}
247 +
248 +Pack.prototype._read = function (n) {
249 + var drain = this._drain
250 + this._drain = noop
251 + drain()
252 +}
253 +
254 +module.exports = Pack
node_modules/tar-stream/package.jsonView
@@ -1,0 +1,86 @@
1 +{
2 + "_args": [
3 + [
4 + "tar-stream@2.0.1",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "tar-stream@2.0.1",
9 + "_id": "tar-stream@2.0.1",
10 + "_inBundle": false,
11 + "_integrity": "sha256-vl/gEfGo3Z4bhYiHqE8WxrAr3E53zYHTd6fiAnSni94=",
12 + "_location": "/tar-stream",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "tar-stream@2.0.1",
18 + "name": "tar-stream",
19 + "escapedName": "tar-stream",
20 + "rawSpec": "2.0.1",
21 + "saveSpec": null,
22 + "fetchSpec": "2.0.1"
23 + },
24 + "_requiredBy": [
25 + "/"
26 + ],
27 + "_resolved": "http://localhost:8989/blobs/get/&vl/gEfGo3Z4bhYiHqE8WxrAr3E53zYHTd6fiAnSni94=.sha256",
28 + "_spec": "2.0.1",
29 + "_where": "/home/cel/src/ssb-npm-registry",
30 + "author": {
31 + "name": "Mathias Buus",
32 + "email": "mathiasbuus@gmail.com"
33 + },
34 + "bugs": {
35 + "url": "https://github.com/mafintosh/tar-stream/issues"
36 + },
37 + "dependencies": {
38 + "bl": "^3.0.0",
39 + "end-of-stream": "^1.4.1",
40 + "fs-constants": "^1.0.0",
41 + "inherits": "^2.0.3",
42 + "readable-stream": "^3.1.1"
43 + },
44 + "description": "tar-stream is a streaming tar parser and generator and nothing else. It is streams2 and operates purely using streams which means you can easily extract/parse tarballs without ever hitting the file system.",
45 + "devDependencies": {
46 + "concat-stream": "^2.0.0",
47 + "standard": "^12.0.1",
48 + "tape": "^4.9.2"
49 + },
50 + "directories": {
51 + "test": "test"
52 + },
53 + "files": [
54 + "*.js",
55 + "LICENSE"
56 + ],
57 + "homepage": "https://github.com/mafintosh/tar-stream",
58 + "keywords": [
59 + "tar",
60 + "tarball",
61 + "parse",
62 + "parser",
63 + "generate",
64 + "generator",
65 + "stream",
66 + "stream2",
67 + "streams",
68 + "streams2",
69 + "streaming",
70 + "pack",
71 + "extract",
72 + "modify"
73 + ],
74 + "license": "MIT",
75 + "main": "index.js",
76 + "name": "tar-stream",
77 + "repository": {
78 + "type": "git",
79 + "url": "git+https://github.com/mafintosh/tar-stream.git"
80 + },
81 + "scripts": {
82 + "test": "standard && tape test/extract.js test/pack.js",
83 + "test-all": "standard && tape test/*.js"
84 + },
85 + "version": "2.0.1"
86 +}
node_modules/util-deprecate/History.mdView
@@ -1,0 +1,16 @@
1 +
2 +1.0.2 / 2015-10-07
3 +==================
4 +
5 + * use try/catch when checking `localStorage` (#3, @kumavis)
6 +
7 +1.0.1 / 2014-11-25
8 +==================
9 +
10 + * browser: use `console.warn()` for deprecation calls
11 + * browser: more jsdocs
12 +
13 +1.0.0 / 2014-04-30
14 +==================
15 +
16 + * initial commit
node_modules/util-deprecate/LICENSEView
@@ -1,0 +1,24 @@
1 +(The MIT License)
2 +
3 +Copyright (c) 2014 Nathan Rajlich <nathan@tootallnate.net>
4 +
5 +Permission is hereby granted, free of charge, to any person
6 +obtaining a copy of this software and associated documentation
7 +files (the "Software"), to deal in the Software without
8 +restriction, including without limitation the rights to use,
9 +copy, modify, merge, publish, distribute, sublicense, and/or sell
10 +copies of the Software, and to permit persons to whom the
11 +Software is furnished to do so, subject to the following
12 +conditions:
13 +
14 +The above copyright notice and this permission notice shall be
15 +included in all copies or substantial portions of the Software.
16 +
17 +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
18 +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
19 +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
20 +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
21 +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
22 +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
23 +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
24 +OTHER DEALINGS IN THE SOFTWARE.
node_modules/util-deprecate/README.mdView
@@ -1,0 +1,53 @@
1 +util-deprecate
2 +==============
3 +### The Node.js `util.deprecate()` function with browser support
4 +
5 +In Node.js, this module simply re-exports the `util.deprecate()` function.
6 +
7 +In the web browser (i.e. via browserify), a browser-specific implementation
8 +of the `util.deprecate()` function is used.
9 +
10 +
11 +## API
12 +
13 +A `deprecate()` function is the only thing exposed by this module.
14 +
15 +``` javascript
16 +// setup:
17 +exports.foo = deprecate(foo, 'foo() is deprecated, use bar() instead');
18 +
19 +
20 +// users see:
21 +foo();
22 +// foo() is deprecated, use bar() instead
23 +foo();
24 +foo();
25 +```
26 +
27 +
28 +## License
29 +
30 +(The MIT License)
31 +
32 +Copyright (c) 2014 Nathan Rajlich <nathan@tootallnate.net>
33 +
34 +Permission is hereby granted, free of charge, to any person
35 +obtaining a copy of this software and associated documentation
36 +files (the "Software"), to deal in the Software without
37 +restriction, including without limitation the rights to use,
38 +copy, modify, merge, publish, distribute, sublicense, and/or sell
39 +copies of the Software, and to permit persons to whom the
40 +Software is furnished to do so, subject to the following
41 +conditions:
42 +
43 +The above copyright notice and this permission notice shall be
44 +included in all copies or substantial portions of the Software.
45 +
46 +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
47 +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
48 +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
49 +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
50 +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
51 +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
52 +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
53 +OTHER DEALINGS IN THE SOFTWARE.
node_modules/util-deprecate/browser.jsView
@@ -1,0 +1,67 @@
1 +
2 +/**
3 + * Module exports.
4 + */
5 +
6 +module.exports = deprecate;
7 +
8 +/**
9 + * Mark that a method should not be used.
10 + * Returns a modified function which warns once by default.
11 + *
12 + * If `localStorage.noDeprecation = true` is set, then it is a no-op.
13 + *
14 + * If `localStorage.throwDeprecation = true` is set, then deprecated functions
15 + * will throw an Error when invoked.
16 + *
17 + * If `localStorage.traceDeprecation = true` is set, then deprecated functions
18 + * will invoke `console.trace()` instead of `console.error()`.
19 + *
20 + * @param {Function} fn - the function to deprecate
21 + * @param {String} msg - the string to print to the console when `fn` is invoked
22 + * @returns {Function} a new "deprecated" version of `fn`
23 + * @api public
24 + */
25 +
26 +function deprecate (fn, msg) {
27 + if (config('noDeprecation')) {
28 + return fn;
29 + }
30 +
31 + var warned = false;
32 + function deprecated() {
33 + if (!warned) {
34 + if (config('throwDeprecation')) {
35 + throw new Error(msg);
36 + } else if (config('traceDeprecation')) {
37 + console.trace(msg);
38 + } else {
39 + console.warn(msg);
40 + }
41 + warned = true;
42 + }
43 + return fn.apply(this, arguments);
44 + }
45 +
46 + return deprecated;
47 +}
48 +
49 +/**
50 + * Checks `localStorage` for boolean values for the given `name`.
51 + *
52 + * @param {String} name
53 + * @returns {Boolean}
54 + * @api private
55 + */
56 +
57 +function config (name) {
58 + // accessing global.localStorage can trigger a DOMException in sandboxed iframes
59 + try {
60 + if (!global.localStorage) return false;
61 + } catch (_) {
62 + return false;
63 + }
64 + var val = global.localStorage[name];
65 + if (null == val) return false;
66 + return String(val).toLowerCase() === 'true';
67 +}
node_modules/util-deprecate/node.jsView
@@ -1,0 +1,6 @@
1 +
2 +/**
3 + * For Node.js, simply re-export the core `util.deprecate` function.
4 + */
5 +
6 +module.exports = require('util').deprecate;
node_modules/util-deprecate/package.jsonView
@@ -1,0 +1,59 @@
1 +{
2 + "_args": [
3 + [
4 + "util-deprecate@1.0.2",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "util-deprecate@1.0.2",
9 + "_id": "util-deprecate@1.0.2",
10 + "_inBundle": false,
11 + "_integrity": "sha256-eaHemDwbOTGAxHRW1rc8qrJ4oA6m431cZnXy3N7Co+U=",
12 + "_location": "/util-deprecate",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "util-deprecate@1.0.2",
18 + "name": "util-deprecate",
19 + "escapedName": "util-deprecate",
20 + "rawSpec": "1.0.2",
21 + "saveSpec": null,
22 + "fetchSpec": "1.0.2"
23 + },
24 + "_requiredBy": [
25 + "/readable-stream"
26 + ],
27 + "_resolved": "http://localhost:8989/blobs/get/&eaHemDwbOTGAxHRW1rc8qrJ4oA6m431cZnXy3N7Co+U=.sha256",
28 + "_spec": "1.0.2",
29 + "_where": "/home/cel/src/ssb-npm-registry",
30 + "author": {
31 + "name": "Nathan Rajlich",
32 + "email": "nathan@tootallnate.net",
33 + "url": "http://n8.io/"
34 + },
35 + "browser": "browser.js",
36 + "bugs": {
37 + "url": "https://github.com/TooTallNate/util-deprecate/issues"
38 + },
39 + "description": "The Node.js `util.deprecate()` function with browser support",
40 + "homepage": "https://github.com/TooTallNate/util-deprecate",
41 + "keywords": [
42 + "util",
43 + "deprecate",
44 + "browserify",
45 + "browser",
46 + "node"
47 + ],
48 + "license": "MIT",
49 + "main": "node.js",
50 + "name": "util-deprecate",
51 + "repository": {
52 + "type": "git",
53 + "url": "git://github.com/TooTallNate/util-deprecate.git"
54 + },
55 + "scripts": {
56 + "test": "echo \"Error: no test specified\" && exit 1"
57 + },
58 + "version": "1.0.2"
59 +}
node_modules/wrappy/LICENSEView
@@ -1,0 +1,15 @@
1 +The ISC License
2 +
3 +Copyright (c) Isaac Z. Schlueter and Contributors
4 +
5 +Permission to use, copy, modify, and/or distribute this software for any
6 +purpose with or without fee is hereby granted, provided that the above
7 +copyright notice and this permission notice appear in all copies.
8 +
9 +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
10 +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
11 +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
12 +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
13 +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
14 +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
15 +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
node_modules/wrappy/README.mdView
@@ -1,0 +1,36 @@
1 +# wrappy
2 +
3 +Callback wrapping utility
4 +
5 +## USAGE
6 +
7 +```javascript
8 +var wrappy = require("wrappy")
9 +
10 +// var wrapper = wrappy(wrapperFunction)
11 +
12 +// make sure a cb is called only once
13 +// See also: http://npm.im/once for this specific use case
14 +var once = wrappy(function (cb) {
15 + var called = false
16 + return function () {
17 + if (called) return
18 + called = true
19 + return cb.apply(this, arguments)
20 + }
21 +})
22 +
23 +function printBoo () {
24 + console.log('boo')
25 +}
26 +// has some rando property
27 +printBoo.iAmBooPrinter = true
28 +
29 +var onlyPrintOnce = once(printBoo)
30 +
31 +onlyPrintOnce() // prints 'boo'
32 +onlyPrintOnce() // does nothing
33 +
34 +// random property is retained!
35 +assert.equal(onlyPrintOnce.iAmBooPrinter, true)
36 +```
node_modules/wrappy/package.jsonView
@@ -1,0 +1,61 @@
1 +{
2 + "_args": [
3 + [
4 + "wrappy@1.0.2",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "wrappy@1.0.2",
9 + "_id": "wrappy@1.0.2",
10 + "_inBundle": false,
11 + "_integrity": "sha256-r/NzDZG3seFDgilW0UYI9WMWPPEbnQrmAt8f4eQw/fs=",
12 + "_location": "/wrappy",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "wrappy@1.0.2",
18 + "name": "wrappy",
19 + "escapedName": "wrappy",
20 + "rawSpec": "1.0.2",
21 + "saveSpec": null,
22 + "fetchSpec": "1.0.2"
23 + },
24 + "_requiredBy": [
25 + "/once"
26 + ],
27 + "_resolved": "http://localhost:8989/blobs/get/&r/NzDZG3seFDgilW0UYI9WMWPPEbnQrmAt8f4eQw/fs=.sha256",
28 + "_spec": "1.0.2",
29 + "_where": "/home/cel/src/ssb-npm-registry",
30 + "author": {
31 + "name": "Isaac Z. Schlueter",
32 + "email": "i@izs.me",
33 + "url": "http://blog.izs.me/"
34 + },
35 + "bugs": {
36 + "url": "https://github.com/npm/wrappy/issues"
37 + },
38 + "dependencies": {},
39 + "description": "Callback wrapping utility",
40 + "devDependencies": {
41 + "tap": "^2.3.1"
42 + },
43 + "directories": {
44 + "test": "test"
45 + },
46 + "files": [
47 + "wrappy.js"
48 + ],
49 + "homepage": "https://github.com/npm/wrappy",
50 + "license": "ISC",
51 + "main": "wrappy.js",
52 + "name": "wrappy",
53 + "repository": {
54 + "type": "git",
55 + "url": "git+https://github.com/npm/wrappy.git"
56 + },
57 + "scripts": {
58 + "test": "tap --coverage test/*.js"
59 + },
60 + "version": "1.0.2"
61 +}
node_modules/wrappy/wrappy.jsView
@@ -1,0 +1,33 @@
1 +// Returns a wrapper function that returns a wrapped callback
2 +// The wrapper function should do some stuff, and return a
3 +// presumably different callback function.
4 +// This makes sure that own properties are retained, so that
5 +// decorations and such are not lost along the way.
6 +module.exports = wrappy
7 +function wrappy (fn, cb) {
8 + if (fn && cb) return wrappy(fn)(cb)
9 +
10 + if (typeof fn !== 'function')
11 + throw new TypeError('need wrapper function')
12 +
13 + Object.keys(fn).forEach(function (k) {
14 + wrapper[k] = fn[k]
15 + })
16 +
17 + return wrapper
18 +
19 + function wrapper() {
20 + var args = new Array(arguments.length)
21 + for (var i = 0; i < args.length; i++) {
22 + args[i] = arguments[i]
23 + }
24 + var ret = fn.apply(this, args)
25 + var cb = args[args.length-1]
26 + if (typeof ret === 'function' && ret !== cb) {
27 + Object.keys(cb).forEach(function (k) {
28 + ret[k] = cb[k]
29 + })
30 + }
31 + return ret
32 + }
33 +}

Built with git-ssb-web