git ssb

3+

cel / ssb-npm-registry



Commit 4f27567900a7b5b821bccc74e95bfc96c8233159

Check in dependencies

cel committed on 5/17/2020, 11:40:58 PM
Parent: aaa1df0018178db95a6bc4d24a4db3ec9f8975bc

Files changed

README.mdchanged
node_modules/.bin/semveradded
node_modules/asyncmemo/README.mdadded
node_modules/asyncmemo/index.jsadded
node_modules/asyncmemo/package.jsonadded
node_modules/asyncmemo/test.jsadded
node_modules/bl/.jshintrcadded
node_modules/bl/.travis.ymladded
node_modules/bl/LICENSE.mdadded
node_modules/bl/README.mdadded
node_modules/bl/bl.jsadded
node_modules/bl/package.jsonadded
node_modules/bl/test/indexOf.jsadded
node_modules/bl/test/test.jsadded
node_modules/end-of-stream/LICENSEadded
node_modules/end-of-stream/README.mdadded
node_modules/end-of-stream/index.jsadded
node_modules/end-of-stream/package.jsonadded
node_modules/fs-constants/LICENSEadded
node_modules/fs-constants/README.mdadded
node_modules/fs-constants/browser.jsadded
node_modules/fs-constants/index.jsadded
node_modules/fs-constants/package.jsonadded
node_modules/hashlru/.travis.ymladded
node_modules/hashlru/LICENSEadded
node_modules/hashlru/README.mdadded
node_modules/hashlru/bench.jsadded
node_modules/hashlru/index.d.tsadded
node_modules/hashlru/index.jsadded
node_modules/hashlru/package.jsonadded
node_modules/hashlru/test/test.jsadded
node_modules/inherits/LICENSEadded
node_modules/inherits/README.mdadded
node_modules/inherits/inherits.jsadded
node_modules/inherits/inherits_browser.jsadded
node_modules/inherits/package.jsonadded
node_modules/looper/.travis.ymladded
node_modules/looper/LICENSEadded
node_modules/looper/README.mdadded
node_modules/looper/index.jsadded
node_modules/looper/package.jsonadded
node_modules/looper/test/test.jsadded
node_modules/multicb/README.mdadded
node_modules/multicb/index.jsadded
node_modules/multicb/package.jsonadded
node_modules/multicb/test/errors-multi.jsadded
node_modules/multicb/test/errors.jsadded
node_modules/multicb/test/multicb.jsadded
node_modules/multicb/test/pluck.jsadded
node_modules/multicb/test/spread.jsadded
node_modules/once/LICENSEadded
node_modules/once/README.mdadded
node_modules/once/once.jsadded
node_modules/once/package.jsonadded
node_modules/pull-cat/.npmignoreadded
node_modules/pull-cat/.travis.ymladded
node_modules/pull-cat/LICENSEadded
node_modules/pull-cat/README.mdadded
node_modules/pull-cat/index.jsadded
node_modules/pull-cat/package.jsonadded
node_modules/pull-cat/test/index.jsadded
node_modules/pull-file/.npmignoreadded
node_modules/pull-file/.travis.ymladded
node_modules/pull-file/README.mdadded
node_modules/pull-file/examples/bench.jsadded
node_modules/pull-file/examples/ipsum-chunks.jsadded
node_modules/pull-file/examples/node-rate.jsadded
node_modules/pull-file/examples/rate.jsadded
node_modules/pull-file/index.jsadded
node_modules/pull-file/package.jsonadded
node_modules/pull-file/test/append.jsadded
node_modules/pull-file/test/assets/ipsum.txtadded
node_modules/pull-file/test/assets/test.txtadded
node_modules/pull-file/test/explicit-buffer.jsadded
node_modules/pull-file/test/fd.jsadded
node_modules/pull-file/test/largefile.jsadded
node_modules/pull-file/test/partial.jsadded
node_modules/pull-file/test/small.jsadded
node_modules/pull-file/test/terminate-read.jsadded
node_modules/pull-hash/README.mdadded
node_modules/pull-hash/ext/git.jsadded
node_modules/pull-hash/ext/ssb.jsadded
node_modules/pull-hash/index.jsadded
node_modules/pull-hash/package.jsonadded
node_modules/pull-hash/test.jsadded
node_modules/pull-stream/.travis.ymladded
node_modules/pull-stream/LICENSEadded
node_modules/pull-stream/README.mdadded
node_modules/pull-stream/benchmarks/node.jsadded
node_modules/pull-stream/benchmarks/pull.jsadded
node_modules/pull-stream/docs/examples.mdadded
node_modules/pull-stream/docs/glossary.mdadded
node_modules/pull-stream/docs/pull.mdadded
node_modules/pull-stream/docs/sinks/collect.mdadded
node_modules/pull-stream/docs/sinks/concat.mdadded
node_modules/pull-stream/docs/sinks/drain.mdadded
node_modules/pull-stream/docs/sinks/index.mdadded
node_modules/pull-stream/docs/sinks/log.mdadded
node_modules/pull-stream/docs/sinks/on-end.mdadded
node_modules/pull-stream/docs/sinks/reduce.mdadded
node_modules/pull-stream/docs/sources/count.mdadded
node_modules/pull-stream/docs/sources/empty.mdadded
node_modules/pull-stream/docs/sources/error.mdadded
node_modules/pull-stream/docs/sources/index.mdadded
node_modules/pull-stream/docs/sources/infinite.mdadded
node_modules/pull-stream/docs/sources/keys.mdadded
node_modules/pull-stream/docs/sources/once.mdadded
node_modules/pull-stream/docs/sources/values.mdadded
node_modules/pull-stream/docs/spec.mdadded
node_modules/pull-stream/docs/throughs/async-map.mdadded
node_modules/pull-stream/docs/throughs/filter-not.mdadded
node_modules/pull-stream/docs/throughs/filter.mdadded
node_modules/pull-stream/docs/throughs/flatten.mdadded
node_modules/pull-stream/docs/throughs/index.mdadded
node_modules/pull-stream/docs/throughs/map.mdadded
node_modules/pull-stream/docs/throughs/non-unique.mdadded
node_modules/pull-stream/docs/throughs/take.mdadded
node_modules/pull-stream/docs/throughs/through.mdadded
node_modules/pull-stream/docs/throughs/unique.mdadded
node_modules/pull-stream/index.jsadded
node_modules/pull-stream/package.jsonadded
node_modules/pull-stream/pull.jsadded
node_modules/pull-stream/sinks/collect.jsadded
node_modules/pull-stream/sinks/concat.jsadded
node_modules/pull-stream/sinks/drain.jsadded
node_modules/pull-stream/sinks/find.jsadded
node_modules/pull-stream/sinks/index.jsadded
node_modules/pull-stream/sinks/log.jsadded
node_modules/pull-stream/sinks/on-end.jsadded
node_modules/pull-stream/sinks/reduce.jsadded
node_modules/pull-stream/sources/count.jsadded
node_modules/pull-stream/sources/empty.jsadded
node_modules/pull-stream/sources/error.jsadded
node_modules/pull-stream/sources/index.jsadded
node_modules/pull-stream/sources/infinite.jsadded
node_modules/pull-stream/sources/keys.jsadded
node_modules/pull-stream/sources/once.jsadded
node_modules/pull-stream/sources/values.jsadded
node_modules/pull-stream/test/abort-stalled.jsadded
node_modules/pull-stream/test/async-map.jsadded
node_modules/pull-stream/test/collect.jsadded
node_modules/pull-stream/test/compose.jsadded
node_modules/pull-stream/test/concat.jsadded
node_modules/pull-stream/test/continuable.jsadded
node_modules/pull-stream/test/drain-abort.jsadded
node_modules/pull-stream/test/drain-if.jsadded
node_modules/pull-stream/test/filter.jsadded
node_modules/pull-stream/test/find.jsadded
node_modules/pull-stream/test/flatten.jsadded
node_modules/pull-stream/test/map.jsadded
node_modules/pull-stream/test/pull.jsadded
node_modules/pull-stream/test/take.jsadded
node_modules/pull-stream/test/through.jsadded
node_modules/pull-stream/test/unique.jsadded
node_modules/pull-stream/test/values.jsadded
node_modules/pull-stream/throughs/async-map.jsadded
node_modules/pull-stream/throughs/filter-not.jsadded
node_modules/pull-stream/throughs/filter.jsadded
node_modules/pull-stream/throughs/flatten.jsadded
node_modules/pull-stream/throughs/index.jsadded
node_modules/pull-stream/throughs/map.jsadded
node_modules/pull-stream/throughs/non-unique.jsadded
node_modules/pull-stream/throughs/take.jsadded
node_modules/pull-stream/throughs/through.jsadded
node_modules/pull-stream/throughs/unique.jsadded
node_modules/pull-stream/util/abort-cb.jsadded
node_modules/pull-stream/util/prop.jsadded
node_modules/pull-stream/util/tester.jsadded
node_modules/pull-utf8-decoder/.travis.ymladded
node_modules/pull-utf8-decoder/LICENSEadded
node_modules/pull-utf8-decoder/README.mdadded
node_modules/pull-utf8-decoder/index.jsadded
node_modules/pull-utf8-decoder/package.jsonadded
node_modules/pull-utf8-decoder/test/index.jsadded
node_modules/readable-stream/CONTRIBUTING.mdadded
node_modules/readable-stream/GOVERNANCE.mdadded
node_modules/readable-stream/LICENSEadded
node_modules/readable-stream/README.mdadded
node_modules/readable-stream/errors-browser.jsadded
node_modules/readable-stream/errors.jsadded
node_modules/readable-stream/experimentalWarning.jsadded
node_modules/readable-stream/lib/_stream_duplex.jsadded
node_modules/readable-stream/lib/_stream_passthrough.jsadded
node_modules/readable-stream/lib/_stream_readable.jsadded
node_modules/readable-stream/lib/_stream_transform.jsadded
node_modules/readable-stream/lib/_stream_writable.jsadded
node_modules/readable-stream/lib/internal/streams/async_iterator.jsadded
node_modules/readable-stream/lib/internal/streams/buffer_list.jsadded
node_modules/readable-stream/lib/internal/streams/destroy.jsadded
node_modules/readable-stream/lib/internal/streams/end-of-stream.jsadded
node_modules/readable-stream/lib/internal/streams/pipeline.jsadded
node_modules/readable-stream/lib/internal/streams/state.jsadded
node_modules/readable-stream/lib/internal/streams/stream-browser.jsadded
node_modules/readable-stream/lib/internal/streams/stream.jsadded
node_modules/readable-stream/package.jsonadded
node_modules/readable-stream/readable-browser.jsadded
node_modules/readable-stream/readable.jsadded
node_modules/safe-buffer/LICENSEadded
node_modules/safe-buffer/README.mdadded
node_modules/safe-buffer/index.d.tsadded
node_modules/safe-buffer/index.jsadded
node_modules/safe-buffer/package.jsonadded
node_modules/semver/CHANGELOG.mdadded
node_modules/semver/LICENSEadded
node_modules/semver/README.mdadded
node_modules/semver/bin/semver.jsadded
node_modules/semver/package.jsonadded
node_modules/semver/range.bnfadded
node_modules/semver/semver.jsadded
node_modules/stream-to-pull-stream/.travis.ymladded
node_modules/stream-to-pull-stream/LICENSEadded
node_modules/stream-to-pull-stream/README.mdadded
node_modules/stream-to-pull-stream/index.jsadded
node_modules/stream-to-pull-stream/package.jsonadded
node_modules/stream-to-pull-stream/test/abort.jsadded
node_modules/stream-to-pull-stream/test/close.jsadded
node_modules/stream-to-pull-stream/test/collect.jsadded
node_modules/stream-to-pull-stream/test/http.jsadded
node_modules/stream-to-pull-stream/test/index.jsadded
node_modules/stream-to-pull-stream/test/sink.jsadded
node_modules/stream-to-pull-stream/test/stack.jsadded
node_modules/stream-to-pull-stream/test/stdout.jsadded
node_modules/stream-to-pull-stream/test/streams2.jsadded
node_modules/stream-to-pull-stream/test/test-stdout.jsadded
node_modules/string_decoder/LICENSEadded
node_modules/string_decoder/README.mdadded
node_modules/string_decoder/lib/string_decoder.jsadded
node_modules/string_decoder/package.jsonadded
node_modules/tar-stream/LICENSEadded
node_modules/tar-stream/README.mdadded
node_modules/tar-stream/extract.jsadded
node_modules/tar-stream/headers.jsadded
node_modules/tar-stream/index.jsadded
node_modules/tar-stream/pack.jsadded
node_modules/tar-stream/package.jsonadded
node_modules/util-deprecate/History.mdadded
node_modules/util-deprecate/LICENSEadded
node_modules/util-deprecate/README.mdadded
node_modules/util-deprecate/browser.jsadded
node_modules/util-deprecate/node.jsadded
node_modules/util-deprecate/package.jsonadded
node_modules/wrappy/LICENSEadded
node_modules/wrappy/README.mdadded
node_modules/wrappy/package.jsonadded
node_modules/wrappy/wrappy.jsadded
README.mdView
@@ -24,11 +24,8 @@
2424 As a scuttlebot plugin:
2525 ```
2626 git clone ssb://%pFqjcdVKHqsrtOjVEAVZeCF0iY4s+3Hr0vA3EFCq5UM=.sha256 ~/.ssb/node_modules/ssb-npm-registry
2727 cd ~/.ssb/node_modules/ssb-npm-registry
28-wget -qO- 'http://localhost:8989/blobs/get/&E+tZfD6eodncvEddM3QAfsmzTJ003jlPGsqFN5TO7sQ=.sha256' | tar xz package/node_modules
29-mv package/node_modules node_modules
30-rmdir package
3128 ./enable.js
3229 # restart sbot
3330 ```
3431
node_modules/.bin/semverView
@@ -1,0 +1,1 @@
1 +../semver/bin/semver.js
node_modules/asyncmemo/README.mdView
@@ -1,0 +1,60 @@
1 +# asyncmemo
2 +
3 +Memoize asynchronous function calls. Combine multiple calls with the same
4 +argument so that the underlying function is only called once for that argument.
5 +Optionally cache the result for that argument.
6 +
7 +## Install
8 +
9 +```
10 +npm install --save asyncmemo
11 +```
12 +
13 +## Example
14 +
15 +```js
16 +var asyncMemo = require('asyncmemo')
17 +var xhr = require('xhr')
18 +var get = asyncMemo(function (base, path, cb) {
19 + xhr.get(base + path, cb)
20 +}, 'http://localhost')
21 +get('/foo', function (err, resp) {
22 + console.log(err, resp)
23 +})
24 +```
25 +
26 +## API
27 +
28 +### `asyncMemo([opts,] fn, [args...]): memo`
29 +
30 +- `opts.cache`: cache object, or false to disable cache
31 +- `opts.asString`: function to convert an argument to a string
32 +- `fn([args...], arg, cb(err, result))`: the asyncronous function to memoize
33 +- `args`: arguments to prepend to each call to `fn`
34 +- `memo(arg, cb(err, result))`: memoized asynchronous function
35 +- `memo.cache`: cache object, either `opts.cache` or a auto-created one
36 +
37 +A custom cache object can be passed using the `cache` option. This should have
38 +`has`, `get`, and `set` methods.
39 +
40 +If you want to be able to use memoize a key that is not a string or number, you
41 +can specify a `asString` function to convert the key argument to a string.
42 +
43 +## Related
44 +
45 +- [async-memo](https://www.npmjs.com/package/async-memo)
46 +- [async-cache](https://www.npmjs.com/package/async-cache)
47 +- [memo-cache](https://www.npmjs.com/package/memo-cache)
48 +- [memoizeasync](https://www.npmjs.com/package/memoizeasync)
49 +- [thunky](https://github.com/mafintosh/thunky) - does the same except memoizes
50 + only one value
51 +
52 +## License
53 +
54 +Copyright (c) 2016 Charles Lehner
55 +
56 +Usage of the works is permitted provided that this instrument is
57 +retained with the works, so that any entity that uses the works is
58 +notified of this instrument.
59 +
60 +DISCLAIMER: THE WORKS ARE WITHOUT WARRANTY.
node_modules/asyncmemo/index.jsView
@@ -1,0 +1,72 @@
1 +var has = Object.prototype.hasOwnProperty
2 +
3 +function toArgString() {
4 + return [].join.call(arguments)
5 +}
6 +
7 +module.exports = function (opts, fn /*, preArgs... */) {
8 + var preArgs = [].slice.call(arguments, 2)
9 + if (typeof opts === 'function') {
10 + if (arguments.length >= 2) preArgs.unshift(fn)
11 + fn = opts
12 + opts = {}
13 + }
14 + var cache =
15 + opts.cache === false ? null :
16 + opts.cache === true || opts.cache == null ? new Storage() :
17 + opts.cache
18 + var callbacks = {/* arg: [callback] */}
19 + var toString = opts.asString || toArgString
20 +
21 + var memoized = function (/* args..., cb */) {
22 + var args = [].slice.call(arguments)
23 + var cb = args.pop()
24 + var memo = toString.apply(this, args)
25 + if (cache && cache.has(memo)) {
26 + var self = this
27 + return process.nextTick(function () {
28 + if (cache.has(memo))
29 + cb.call(self, null, cache.get(memo))
30 + else
31 + run.call(self, args, memo, cb)
32 + })
33 + }
34 + run.call(this, args, memo, cb)
35 + }
36 + memoized.cache = cache
37 + return memoized
38 +
39 + function run(args, memo, cb) {
40 + if (has.call(callbacks, memo))
41 + return callbacks[memo].push([this, cb])
42 + var cbs = callbacks[memo] = [[this, cb]]
43 + fn.apply(this, preArgs.concat(args, function (err, result) {
44 + if (!err && cache)
45 + cache.set(memo, result)
46 + while (cbs.length) {
47 + cb = cbs.shift()
48 + cb[1].call(cb[0], err, result)
49 + }
50 + delete callbacks[memo]
51 + }))
52 + }
53 +}
54 +
55 +function Storage() {
56 + this.data = {}
57 +}
58 +Storage.prototype.has = function (key) {
59 + return has.call(this.data, key)
60 +}
61 +Storage.prototype.get = function (key) {
62 + return this.data[key]
63 +}
64 +Storage.prototype.set = function (key, value) {
65 + this.data[key] = value
66 +}
67 +Storage.prototype.remove = function (key) {
68 + delete this.data[key]
69 +}
70 +Storage.prototype.clear = function (key) {
71 + this.data = {}
72 +}
node_modules/asyncmemo/package.jsonView
@@ -1,0 +1,59 @@
1 +{
2 + "_args": [
3 + [
4 + "asyncmemo@1.3.0",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "asyncmemo@1.3.0",
9 + "_id": "asyncmemo@1.3.0",
10 + "_inBundle": false,
11 + "_integrity": "sha256-U96yJbGCHK+6wk8CK/IAj5Mypb1AXtyxvxl1E0zMqRI=",
12 + "_location": "/asyncmemo",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "asyncmemo@1.3.0",
18 + "name": "asyncmemo",
19 + "escapedName": "asyncmemo",
20 + "rawSpec": "1.3.0",
21 + "saveSpec": null,
22 + "fetchSpec": "1.3.0"
23 + },
24 + "_requiredBy": [
25 + "/"
26 + ],
27 + "_resolved": "http://localhost:8989/blobs/get/&U96yJbGCHK+6wk8CK/IAj5Mypb1AXtyxvxl1E0zMqRI=.sha256",
28 + "_spec": "1.3.0",
29 + "_where": "/home/cel/src/ssb-npm-registry",
30 + "author": {
31 + "name": "Charles Lehner",
32 + "url": "http://celehner.com/"
33 + },
34 + "bugs": {
35 + "url": "https://github.com/clehner/asyncmemo/issues"
36 + },
37 + "description": "memoize asynchronous function calls",
38 + "devDependencies": {
39 + "tape": "^4.6.2"
40 + },
41 + "homepage": "https://github.com/clehner/asyncmemo#readme",
42 + "keywords": [
43 + "async",
44 + "cache",
45 + "memo",
46 + "memoize"
47 + ],
48 + "license": "Fair",
49 + "main": "index.js",
50 + "name": "asyncmemo",
51 + "repository": {
52 + "type": "git",
53 + "url": "git+https://github.com/clehner/asyncmemo.git"
54 + },
55 + "scripts": {
56 + "test": "node test"
57 + },
58 + "version": "1.3.0"
59 +}
node_modules/asyncmemo/test.jsView
@@ -1,0 +1,139 @@
1 +var asyncMemo = require('.')
2 +var test = require('tape')
3 +
4 +var i = 0
5 +var get = asyncMemo(function (foo, bar, arg, cb) {
6 + setTimeout(function () {
7 + cb(i == 3, [foo, bar, arg, i++].join('-'))
8 + }, 10)
9 +}, 'foo', 'bar')
10 +
11 +test('memoize values', function (t) {
12 + get('thing', function (err, result) {
13 + t.error(err, 'thing')
14 + t.equal(result, 'foo-bar-thing-0')
15 +
16 + get('thing', function (err, result) {
17 + t.error(err, 'thing 2')
18 + t.equal(result, 'foo-bar-thing-0')
19 +
20 + get('pasta', function (err, result) {
21 + t.error(err, 'pasta')
22 + t.equal(result, 'foo-bar-pasta-1')
23 +
24 + get('pasta', function (err, result) {
25 + t.error(err, 'pasta 2')
26 + t.equal(result, 'foo-bar-pasta-1')
27 +
28 + get('more', function (err, result) {
29 + t.error(err, 'more')
30 + t.equal(result, 'foo-bar-more-2')
31 + t.end()
32 + })
33 + })
34 + })
35 + })
36 + })
37 +})
38 +
39 +test('don\'t cache results asynchronously with errors', function (t) {
40 + get('blah', function (err, result) {
41 + t.ok(err, 'error')
42 + setImmediate(function () {
43 + get('blah', function (err, result) {
44 + t.error(err, 'blah')
45 + t.equal(result, 'foo-bar-blah-4')
46 + t.end()
47 + })
48 + })
49 + })
50 +})
51 +
52 +test('handle concurrent requests', function (t) {
53 + t.plan(4)
54 + get('one', function (err, result) {
55 + t.error(err, 'one')
56 + t.equal(result, 'foo-bar-one-5')
57 + })
58 + get('one', function (err, result) {
59 + t.error(err, 'one 2')
60 + t.equal(result, 'foo-bar-one-5')
61 + })
62 +})
63 +
64 +test('preserve this', function (t) {
65 + var obj = {get: get}
66 + obj.get('thing', function (err, result) {
67 + t.error(err, 'thing')
68 + t.equal(this, obj)
69 + t.equal(result, 'foo-bar-thing-0')
70 + t.end()
71 + })
72 +})
73 +
74 +test('memoize undefined', function (t) {
75 + var i = 0
76 + var get = asyncMemo(function (arg, cb) {
77 + i++
78 + cb()
79 + })
80 +
81 + get('one', function (err, result) {
82 + t.error(err, 'one')
83 + t.equal(i, 1)
84 + t.equal(result, undefined)
85 +
86 + get('one', function (err, result) {
87 + t.error(err, 'two')
88 + t.equal(i, 1)
89 + t.equal(result, undefined)
90 + t.end()
91 + })
92 + })
93 +})
94 +
95 +test('without cache', function (t) {
96 + t.plan(4)
97 +
98 + var run = asyncMemo({cache: false}, function (arg, cb) {
99 + setImmediate(function () {
100 + t.pass('called')
101 + cb(null, arg)
102 + })
103 + })
104 +
105 + run('a', function () {
106 + t.pass('one')
107 +
108 + setImmediate(function () {
109 + run('a', function () {
110 + t.pass('two')
111 + })
112 + })
113 + })
114 +})
115 +
116 +test('toString', function (t) {
117 + var get = asyncMemo({
118 + asString: function (arg) {
119 + return arg.first + '-' + arg.last
120 + }
121 + }, function (arg, cb) {
122 + cb(null, {})
123 + })
124 +
125 + var abcdef
126 + get({first: 'abc', last: 'def'}, function (err, obj) {
127 + t.error(err)
128 + abcdef = obj
129 + })
130 + get({first: 'abc', last: 'def'}, function (err, obj) {
131 + t.error(err)
132 + t.equals(obj, abcdef)
133 + })
134 + get({first: 'zzz', last: 'def'}, function (err, obj) {
135 + t.error(err)
136 + t.notEquals(obj, abcdef)
137 + })
138 + t.end()
139 +})
node_modules/bl/.jshintrcView
@@ -1,0 +1,60 @@
1 +{
2 + "predef": [ ]
3 + , "bitwise": false
4 + , "camelcase": false
5 + , "curly": false
6 + , "eqeqeq": false
7 + , "forin": false
8 + , "immed": false
9 + , "latedef": false
10 + , "noarg": true
11 + , "noempty": true
12 + , "nonew": true
13 + , "plusplus": false
14 + , "quotmark": true
15 + , "regexp": false
16 + , "undef": true
17 + , "unused": true
18 + , "strict": false
19 + , "trailing": true
20 + , "maxlen": 120
21 + , "asi": true
22 + , "boss": true
23 + , "debug": true
24 + , "eqnull": true
25 + , "esnext": false
26 + , "evil": true
27 + , "expr": true
28 + , "funcscope": false
29 + , "globalstrict": false
30 + , "iterator": false
31 + , "lastsemic": true
32 + , "laxbreak": true
33 + , "laxcomma": true
34 + , "loopfunc": true
35 + , "multistr": false
36 + , "onecase": false
37 + , "proto": false
38 + , "regexdash": false
39 + , "scripturl": true
40 + , "smarttabs": false
41 + , "shadow": false
42 + , "sub": true
43 + , "supernew": false
44 + , "validthis": true
45 + , "browser": true
46 + , "couch": false
47 + , "devel": false
48 + , "dojo": false
49 + , "mootools": false
50 + , "node": true
51 + , "nonstandard": true
52 + , "prototypejs": false
53 + , "rhino": false
54 + , "worker": true
55 + , "wsh": false
56 + , "nomen": false
57 + , "onevar": false
58 + , "passfail": false
59 + , "esversion": 3
60 +}
node_modules/bl/.travis.ymlView
@@ -1,0 +1,13 @@
1 +sudo: false
2 +language: node_js
3 +node_js:
4 + - '6'
5 + - '8'
6 + - '10'
7 +branches:
8 + only:
9 + - master
10 +notifications:
11 + email:
12 + - rod@vagg.org
13 + - matteo.collina@gmail.com
node_modules/bl/LICENSE.mdView
@@ -1,0 +1,13 @@
1 +The MIT License (MIT)
2 +=====================
3 +
4 +Copyright (c) 2013-2018 bl contributors
5 +----------------------------------
6 +
7 +*bl contributors listed at <https://github.com/rvagg/bl#contributors>*
8 +
9 +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
10 +
11 +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
12 +
13 +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
node_modules/bl/README.mdView
@@ -1,0 +1,218 @@
1 +# bl *(BufferList)*
2 +
3 +[![Build Status](https://travis-ci.org/rvagg/bl.svg?branch=master)](https://travis-ci.org/rvagg/bl)
4 +
5 +**A Node.js Buffer list collector, reader and streamer thingy.**
6 +
7 +[![NPM](https://nodei.co/npm/bl.png?downloads=true&downloadRank=true)](https://nodei.co/npm/bl/)
8 +[![NPM](https://nodei.co/npm-dl/bl.png?months=6&height=3)](https://nodei.co/npm/bl/)
9 +
10 +**bl** is a storage object for collections of Node Buffers, exposing them with the main Buffer readable API. Also works as a duplex stream so you can collect buffers from a stream that emits them and emit buffers to a stream that consumes them!
11 +
12 +The original buffers are kept intact and copies are only done as necessary. Any reads that require the use of a single original buffer will return a slice of that buffer only (which references the same memory as the original buffer). Reads that span buffers perform concatenation as required and return the results transparently.
13 +
14 +```js
15 +const BufferList = require('bl')
16 +
17 +var bl = new BufferList()
18 +bl.append(Buffer.from('abcd'))
19 +bl.append(Buffer.from('efg'))
20 +bl.append('hi') // bl will also accept & convert Strings
21 +bl.append(Buffer.from('j'))
22 +bl.append(Buffer.from([ 0x3, 0x4 ]))
23 +
24 +console.log(bl.length) // 12
25 +
26 +console.log(bl.slice(0, 10).toString('ascii')) // 'abcdefghij'
27 +console.log(bl.slice(3, 10).toString('ascii')) // 'defghij'
28 +console.log(bl.slice(3, 6).toString('ascii')) // 'def'
29 +console.log(bl.slice(3, 8).toString('ascii')) // 'defgh'
30 +console.log(bl.slice(5, 10).toString('ascii')) // 'fghij'
31 +
32 +console.log(bl.indexOf('def')) // 3
33 +console.log(bl.indexOf('asdf')) // -1
34 +
35 +// or just use toString!
36 +console.log(bl.toString()) // 'abcdefghij\u0003\u0004'
37 +console.log(bl.toString('ascii', 3, 8)) // 'defgh'
38 +console.log(bl.toString('ascii', 5, 10)) // 'fghij'
39 +
40 +// other standard Buffer readables
41 +console.log(bl.readUInt16BE(10)) // 0x0304
42 +console.log(bl.readUInt16LE(10)) // 0x0403
43 +```
44 +
45 +Give it a callback in the constructor and use it just like **[concat-stream](https://github.com/maxogden/node-concat-stream)**:
46 +
47 +```js
48 +const bl = require('bl')
49 + , fs = require('fs')
50 +
51 +fs.createReadStream('README.md')
52 + .pipe(bl(function (err, data) { // note 'new' isn't strictly required
53 + // `data` is a complete Buffer object containing the full data
54 + console.log(data.toString())
55 + }))
56 +```
57 +
58 +Note that when you use the *callback* method like this, the resulting `data` parameter is a concatenation of all `Buffer` objects in the list. If you want to avoid the overhead of this concatenation (in cases of extreme performance consciousness), then avoid the *callback* method and just listen to `'end'` instead, like a standard Stream.
59 +
60 +Or to fetch a URL using [hyperquest](https://github.com/substack/hyperquest) (should work with [request](http://github.com/mikeal/request) and even plain Node http too!):
61 +```js
62 +const hyperquest = require('hyperquest')
63 + , bl = require('bl')
64 + , url = 'https://raw.github.com/rvagg/bl/master/README.md'
65 +
66 +hyperquest(url).pipe(bl(function (err, data) {
67 + console.log(data.toString())
68 +}))
69 +```
70 +
71 +Or, use it as a readable stream to recompose a list of Buffers to an output source:
72 +
73 +```js
74 +const BufferList = require('bl')
75 + , fs = require('fs')
76 +
77 +var bl = new BufferList()
78 +bl.append(Buffer.from('abcd'))
79 +bl.append(Buffer.from('efg'))
80 +bl.append(Buffer.from('hi'))
81 +bl.append(Buffer.from('j'))
82 +
83 +bl.pipe(fs.createWriteStream('gibberish.txt'))
84 +```
85 +
86 +## API
87 +
88 + * <a href="#ctor"><code><b>new BufferList([ callback ])</b></code></a>
89 + * <a href="#length"><code>bl.<b>length</b></code></a>
90 + * <a href="#append"><code>bl.<b>append(buffer)</b></code></a>
91 + * <a href="#get"><code>bl.<b>get(index)</b></code></a>
92 + * <a href="#indexOf"><code>bl.<b>indexOf(value[, byteOffset][, encoding])</b></code></a>
93 + * <a href="#slice"><code>bl.<b>slice([ start[, end ] ])</b></code></a>
94 + * <a href="#shallowSlice"><code>bl.<b>shallowSlice([ start[, end ] ])</b></code></a>
95 + * <a href="#copy"><code>bl.<b>copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ])</b></code></a>
96 + * <a href="#duplicate"><code>bl.<b>duplicate()</b></code></a>
97 + * <a href="#consume"><code>bl.<b>consume(bytes)</b></code></a>
98 + * <a href="#toString"><code>bl.<b>toString([encoding, [ start, [ end ]]])</b></code></a>
99 + * <a href="#readXX"><code>bl.<b>readDoubleBE()</b></code>, <code>bl.<b>readDoubleLE()</b></code>, <code>bl.<b>readFloatBE()</b></code>, <code>bl.<b>readFloatLE()</b></code>, <code>bl.<b>readInt32BE()</b></code>, <code>bl.<b>readInt32LE()</b></code>, <code>bl.<b>readUInt32BE()</b></code>, <code>bl.<b>readUInt32LE()</b></code>, <code>bl.<b>readInt16BE()</b></code>, <code>bl.<b>readInt16LE()</b></code>, <code>bl.<b>readUInt16BE()</b></code>, <code>bl.<b>readUInt16LE()</b></code>, <code>bl.<b>readInt8()</b></code>, <code>bl.<b>readUInt8()</b></code></a>
100 + * <a href="#streams">Streams</a>
101 +
102 +--------------------------------------------------------
103 +<a name="ctor"></a>
104 +### new BufferList([ callback | Buffer | Buffer array | BufferList | BufferList array | String ])
105 +The constructor takes an optional callback, if supplied, the callback will be called with an error argument followed by a reference to the **bl** instance, when `bl.end()` is called (i.e. from a piped stream). This is a convenient method of collecting the entire contents of a stream, particularly when the stream is *chunky*, such as a network stream.
106 +
107 +Normally, no arguments are required for the constructor, but you can initialise the list by passing in a single `Buffer` object or an array of `Buffer` object.
108 +
109 +`new` is not strictly required, if you don't instantiate a new object, it will be done automatically for you so you can create a new instance simply with:
110 +
111 +```js
112 +var bl = require('bl')
113 +var myinstance = bl()
114 +
115 +// equivalent to:
116 +
117 +var BufferList = require('bl')
118 +var myinstance = new BufferList()
119 +```
120 +
121 +--------------------------------------------------------
122 +<a name="length"></a>
123 +### bl.length
124 +Get the length of the list in bytes. This is the sum of the lengths of all of the buffers contained in the list, minus any initial offset for a semi-consumed buffer at the beginning. Should accurately represent the total number of bytes that can be read from the list.
125 +
126 +--------------------------------------------------------
127 +<a name="append"></a>
128 +### bl.append(Buffer | Buffer array | BufferList | BufferList array | String)
129 +`append(buffer)` adds an additional buffer or BufferList to the internal list. `this` is returned so it can be chained.
130 +
131 +--------------------------------------------------------
132 +<a name="get"></a>
133 +### bl.get(index)
134 +`get()` will return the byte at the specified index.
135 +
136 +--------------------------------------------------------
137 +<a name="indexOf"></a>
138 +### bl.indexOf(value[, byteOffset][, encoding])
139 +`get()` will return the byte at the specified index.
140 +`indexOf()` method returns the first index at which a given element can be found in the BufferList, or -1 if it is not present.
141 +
142 +--------------------------------------------------------
143 +<a name="slice"></a>
144 +### bl.slice([ start, [ end ] ])
145 +`slice()` returns a new `Buffer` object containing the bytes within the range specified. Both `start` and `end` are optional and will default to the beginning and end of the list respectively.
146 +
147 +If the requested range spans a single internal buffer then a slice of that buffer will be returned which shares the original memory range of that Buffer. If the range spans multiple buffers then copy operations will likely occur to give you a uniform Buffer.
148 +
149 +--------------------------------------------------------
150 +<a name="shallowSlice"></a>
151 +### bl.shallowSlice([ start, [ end ] ])
152 +`shallowSlice()` returns a new `BufferList` object containing the bytes within the range specified. Both `start` and `end` are optional and will default to the beginning and end of the list respectively.
153 +
154 +No copies will be performed. All buffers in the result share memory with the original list.
155 +
156 +--------------------------------------------------------
157 +<a name="copy"></a>
158 +### bl.copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ])
159 +`copy()` copies the content of the list in the `dest` buffer, starting from `destStart` and containing the bytes within the range specified with `srcStart` to `srcEnd`. `destStart`, `start` and `end` are optional and will default to the beginning of the `dest` buffer, and the beginning and end of the list respectively.
160 +
161 +--------------------------------------------------------
162 +<a name="duplicate"></a>
163 +### bl.duplicate()
164 +`duplicate()` performs a **shallow-copy** of the list. The internal Buffers remains the same, so if you change the underlying Buffers, the change will be reflected in both the original and the duplicate. This method is needed if you want to call `consume()` or `pipe()` and still keep the original list.Example:
165 +
166 +```js
167 +var bl = new BufferList()
168 +
169 +bl.append('hello')
170 +bl.append(' world')
171 +bl.append('\n')
172 +
173 +bl.duplicate().pipe(process.stdout, { end: false })
174 +
175 +console.log(bl.toString())
176 +```
177 +
178 +--------------------------------------------------------
179 +<a name="consume"></a>
180 +### bl.consume(bytes)
181 +`consume()` will shift bytes *off the start of the list*. The number of bytes consumed don't need to line up with the sizes of the internal Buffers&mdash;initial offsets will be calculated accordingly in order to give you a consistent view of the data.
182 +
183 +--------------------------------------------------------
184 +<a name="toString"></a>
185 +### bl.toString([encoding, [ start, [ end ]]])
186 +`toString()` will return a string representation of the buffer. The optional `start` and `end` arguments are passed on to `slice()`, while the `encoding` is passed on to `toString()` of the resulting Buffer. See the [Buffer#toString()](http://nodejs.org/docs/latest/api/buffer.html#buffer_buf_tostring_encoding_start_end) documentation for more information.
187 +
188 +--------------------------------------------------------
189 +<a name="readXX"></a>
190 +### bl.readDoubleBE(), bl.readDoubleLE(), bl.readFloatBE(), bl.readFloatLE(), bl.readInt32BE(), bl.readInt32LE(), bl.readUInt32BE(), bl.readUInt32LE(), bl.readInt16BE(), bl.readInt16LE(), bl.readUInt16BE(), bl.readUInt16LE(), bl.readInt8(), bl.readUInt8()
191 +
192 +All of the standard byte-reading methods of the `Buffer` interface are implemented and will operate across internal Buffer boundaries transparently.
193 +
194 +See the <b><code>[Buffer](http://nodejs.org/docs/latest/api/buffer.html)</code></b> documentation for how these work.
195 +
196 +--------------------------------------------------------
197 +<a name="streams"></a>
198 +### Streams
199 +**bl** is a Node **[Duplex Stream](http://nodejs.org/docs/latest/api/stream.html#stream_class_stream_duplex)**, so it can be read from and written to like a standard Node stream. You can also `pipe()` to and from a **bl** instance.
200 +
201 +--------------------------------------------------------
202 +
203 +## Contributors
204 +
205 +**bl** is brought to you by the following hackers:
206 +
207 + * [Rod Vagg](https://github.com/rvagg)
208 + * [Matteo Collina](https://github.com/mcollina)
209 + * [Jarett Cruger](https://github.com/jcrugzz)
210 +
211 +=======
212 +
213 +<a name="license"></a>
214 +## License &amp; copyright
215 +
216 +Copyright (c) 2013-2018 bl contributors (listed above).
217 +
218 +bl is licensed under the MIT license. All rights not explicitly granted in the MIT license are reserved. See the included LICENSE.md file for more details.
node_modules/bl/bl.jsView
@@ -1,0 +1,382 @@
1 +'use strict'
2 +var DuplexStream = require('readable-stream').Duplex
3 + , util = require('util')
4 +
5 +function BufferList (callback) {
6 + if (!(this instanceof BufferList))
7 + return new BufferList(callback)
8 +
9 + this._bufs = []
10 + this.length = 0
11 +
12 + if (typeof callback == 'function') {
13 + this._callback = callback
14 +
15 + var piper = function piper (err) {
16 + if (this._callback) {
17 + this._callback(err)
18 + this._callback = null
19 + }
20 + }.bind(this)
21 +
22 + this.on('pipe', function onPipe (src) {
23 + src.on('error', piper)
24 + })
25 + this.on('unpipe', function onUnpipe (src) {
26 + src.removeListener('error', piper)
27 + })
28 + } else {
29 + this.append(callback)
30 + }
31 +
32 + DuplexStream.call(this)
33 +}
34 +
35 +
36 +util.inherits(BufferList, DuplexStream)
37 +
38 +
39 +BufferList.prototype._offset = function _offset (offset) {
40 + var tot = 0, i = 0, _t
41 + if (offset === 0) return [ 0, 0 ]
42 + for (; i < this._bufs.length; i++) {
43 + _t = tot + this._bufs[i].length
44 + if (offset < _t || i == this._bufs.length - 1) {
45 + return [ i, offset - tot ]
46 + }
47 + tot = _t
48 + }
49 +}
50 +
51 +BufferList.prototype._reverseOffset = function (blOffset) {
52 + var bufferId = blOffset[0]
53 + var offset = blOffset[1]
54 + for (var i = 0; i < bufferId; i++) {
55 + offset += this._bufs[i].length
56 + }
57 + return offset
58 +}
59 +
60 +BufferList.prototype.append = function append (buf) {
61 + var i = 0
62 +
63 + if (Buffer.isBuffer(buf)) {
64 + this._appendBuffer(buf)
65 + } else if (Array.isArray(buf)) {
66 + for (; i < buf.length; i++)
67 + this.append(buf[i])
68 + } else if (buf instanceof BufferList) {
69 + // unwrap argument into individual BufferLists
70 + for (; i < buf._bufs.length; i++)
71 + this.append(buf._bufs[i])
72 + } else if (buf != null) {
73 + // coerce number arguments to strings, since Buffer(number) does
74 + // uninitialized memory allocation
75 + if (typeof buf == 'number')
76 + buf = buf.toString()
77 +
78 + this._appendBuffer(Buffer.from(buf))
79 + }
80 +
81 + return this
82 +}
83 +
84 +
85 +BufferList.prototype._appendBuffer = function appendBuffer (buf) {
86 + this._bufs.push(buf)
87 + this.length += buf.length
88 +}
89 +
90 +
91 +BufferList.prototype._write = function _write (buf, encoding, callback) {
92 + this._appendBuffer(buf)
93 +
94 + if (typeof callback == 'function')
95 + callback()
96 +}
97 +
98 +
99 +BufferList.prototype._read = function _read (size) {
100 + if (!this.length)
101 + return this.push(null)
102 +
103 + size = Math.min(size, this.length)
104 + this.push(this.slice(0, size))
105 + this.consume(size)
106 +}
107 +
108 +
109 +BufferList.prototype.end = function end (chunk) {
110 + DuplexStream.prototype.end.call(this, chunk)
111 +
112 + if (this._callback) {
113 + this._callback(null, this.slice())
114 + this._callback = null
115 + }
116 +}
117 +
118 +
119 +BufferList.prototype.get = function get (index) {
120 + if (index > this.length || index < 0) {
121 + return undefined
122 + }
123 + var offset = this._offset(index)
124 + return this._bufs[offset[0]][offset[1]]
125 +}
126 +
127 +
128 +BufferList.prototype.slice = function slice (start, end) {
129 + if (typeof start == 'number' && start < 0)
130 + start += this.length
131 + if (typeof end == 'number' && end < 0)
132 + end += this.length
133 + return this.copy(null, 0, start, end)
134 +}
135 +
136 +
137 +BufferList.prototype.copy = function copy (dst, dstStart, srcStart, srcEnd) {
138 + if (typeof srcStart != 'number' || srcStart < 0)
139 + srcStart = 0
140 + if (typeof srcEnd != 'number' || srcEnd > this.length)
141 + srcEnd = this.length
142 + if (srcStart >= this.length)
143 + return dst || Buffer.alloc(0)
144 + if (srcEnd <= 0)
145 + return dst || Buffer.alloc(0)
146 +
147 + var copy = !!dst
148 + , off = this._offset(srcStart)
149 + , len = srcEnd - srcStart
150 + , bytes = len
151 + , bufoff = (copy && dstStart) || 0
152 + , start = off[1]
153 + , l
154 + , i
155 +
156 + // copy/slice everything
157 + if (srcStart === 0 && srcEnd == this.length) {
158 + if (!copy) { // slice, but full concat if multiple buffers
159 + return this._bufs.length === 1
160 + ? this._bufs[0]
161 + : Buffer.concat(this._bufs, this.length)
162 + }
163 +
164 + // copy, need to copy individual buffers
165 + for (i = 0; i < this._bufs.length; i++) {
166 + this._bufs[i].copy(dst, bufoff)
167 + bufoff += this._bufs[i].length
168 + }
169 +
170 + return dst
171 + }
172 +
173 + // easy, cheap case where it's a subset of one of the buffers
174 + if (bytes <= this._bufs[off[0]].length - start) {
175 + return copy
176 + ? this._bufs[off[0]].copy(dst, dstStart, start, start + bytes)
177 + : this._bufs[off[0]].slice(start, start + bytes)
178 + }
179 +
180 + if (!copy) // a slice, we need something to copy in to
181 + dst = Buffer.allocUnsafe(len)
182 +
183 + for (i = off[0]; i < this._bufs.length; i++) {
184 + l = this._bufs[i].length - start
185 +
186 + if (bytes > l) {
187 + this._bufs[i].copy(dst, bufoff, start)
188 + } else {
189 + this._bufs[i].copy(dst, bufoff, start, start + bytes)
190 + break
191 + }
192 +
193 + bufoff += l
194 + bytes -= l
195 +
196 + if (start)
197 + start = 0
198 + }
199 +
200 + return dst
201 +}
202 +
203 +BufferList.prototype.shallowSlice = function shallowSlice (start, end) {
204 + start = start || 0
205 + end = typeof end !== 'number' ? this.length : end
206 +
207 + if (start < 0)
208 + start += this.length
209 + if (end < 0)
210 + end += this.length
211 +
212 + if (start === end) {
213 + return new BufferList()
214 + }
215 + var startOffset = this._offset(start)
216 + , endOffset = this._offset(end)
217 + , buffers = this._bufs.slice(startOffset[0], endOffset[0] + 1)
218 +
219 + if (endOffset[1] == 0)
220 + buffers.pop()
221 + else
222 + buffers[buffers.length-1] = buffers[buffers.length-1].slice(0, endOffset[1])
223 +
224 + if (startOffset[1] != 0)
225 + buffers[0] = buffers[0].slice(startOffset[1])
226 +
227 + return new BufferList(buffers)
228 +}
229 +
230 +BufferList.prototype.toString = function toString (encoding, start, end) {
231 + return this.slice(start, end).toString(encoding)
232 +}
233 +
234 +BufferList.prototype.consume = function consume (bytes) {
235 + while (this._bufs.length) {
236 + if (bytes >= this._bufs[0].length) {
237 + bytes -= this._bufs[0].length
238 + this.length -= this._bufs[0].length
239 + this._bufs.shift()
240 + } else {
241 + this._bufs[0] = this._bufs[0].slice(bytes)
242 + this.length -= bytes
243 + break
244 + }
245 + }
246 + return this
247 +}
248 +
249 +
250 +BufferList.prototype.duplicate = function duplicate () {
251 + var i = 0
252 + , copy = new BufferList()
253 +
254 + for (; i < this._bufs.length; i++)
255 + copy.append(this._bufs[i])
256 +
257 + return copy
258 +}
259 +
260 +
261 +BufferList.prototype._destroy = function _destroy (err, cb) {
262 + this._bufs.length = 0
263 + this.length = 0
264 + cb(err)
265 +}
266 +
267 +
268 +BufferList.prototype.indexOf = function (search, offset, encoding) {
269 + if (encoding === undefined && typeof offset === 'string') {
270 + encoding = offset
271 + offset = undefined
272 + }
273 + if (typeof search === 'function' || Array.isArray(search)) {
274 + throw new TypeError('The "value" argument must be one of type string, Buffer, BufferList, or Uint8Array.')
275 + } else if (typeof search === 'number') {
276 + search = Buffer.from([search])
277 + } else if (typeof search === 'string') {
278 + search = Buffer.from(search, encoding)
279 + } else if (search instanceof BufferList) {
280 + search = search.slice()
281 + } else if (!Buffer.isBuffer(search)) {
282 + search = Buffer.from(search)
283 + }
284 +
285 + offset = Number(offset || 0)
286 + if (isNaN(offset)) {
287 + offset = 0
288 + }
289 +
290 + if (offset < 0) {
291 + offset = this.length + offset
292 + }
293 +
294 + if (offset < 0) {
295 + offset = 0
296 + }
297 +
298 + if (search.length === 0) {
299 + return offset > this.length ? this.length : offset
300 + }
301 +
302 + var blOffset = this._offset(offset)
303 + var blIndex = blOffset[0] // index of which internal buffer we're working on
304 + var buffOffset = blOffset[1] // offset of the internal buffer we're working on
305 +
306 + // scan over each buffer
307 + for (blIndex; blIndex < this._bufs.length; blIndex++) {
308 + var buff = this._bufs[blIndex]
309 + while(buffOffset < buff.length) {
310 + var availableWindow = buff.length - buffOffset
311 + if (availableWindow >= search.length) {
312 + var nativeSearchResult = buff.indexOf(search, buffOffset)
313 + if (nativeSearchResult !== -1) {
314 + return this._reverseOffset([blIndex, nativeSearchResult])
315 + }
316 + buffOffset = buff.length - search.length + 1 // end of native search window
317 + } else {
318 + var revOffset = this._reverseOffset([blIndex, buffOffset])
319 + if (this._match(revOffset, search)) {
320 + return revOffset
321 + }
322 + buffOffset++
323 + }
324 + }
325 + buffOffset = 0
326 + }
327 + return -1
328 +}
329 +
330 +BufferList.prototype._match = function(offset, search) {
331 + if (this.length - offset < search.length) {
332 + return false
333 + }
334 + for (var searchOffset = 0; searchOffset < search.length ; searchOffset++) {
335 + if(this.get(offset + searchOffset) !== search[searchOffset]){
336 + return false
337 + }
338 + }
339 + return true
340 +}
341 +
342 +
343 +;(function () {
344 + var methods = {
345 + 'readDoubleBE' : 8
346 + , 'readDoubleLE' : 8
347 + , 'readFloatBE' : 4
348 + , 'readFloatLE' : 4
349 + , 'readInt32BE' : 4
350 + , 'readInt32LE' : 4
351 + , 'readUInt32BE' : 4
352 + , 'readUInt32LE' : 4
353 + , 'readInt16BE' : 2
354 + , 'readInt16LE' : 2
355 + , 'readUInt16BE' : 2
356 + , 'readUInt16LE' : 2
357 + , 'readInt8' : 1
358 + , 'readUInt8' : 1
359 + , 'readIntBE' : null
360 + , 'readIntLE' : null
361 + , 'readUIntBE' : null
362 + , 'readUIntLE' : null
363 + }
364 +
365 + for (var m in methods) {
366 + (function (m) {
367 + if (methods[m] === null) {
368 + BufferList.prototype[m] = function (offset, byteLength) {
369 + return this.slice(offset, offset + byteLength)[m](0, byteLength)
370 + }
371 + }
372 + else {
373 + BufferList.prototype[m] = function (offset) {
374 + return this.slice(offset, offset + methods[m])[m](0)
375 + }
376 + }
377 + }(m))
378 + }
379 +}())
380 +
381 +
382 +module.exports = BufferList
node_modules/bl/package.jsonView
@@ -1,0 +1,65 @@
1 +{
2 + "_args": [
3 + [
4 + "bl@3.0.0",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "bl@3.0.0",
9 + "_id": "bl@3.0.0",
10 + "_inBundle": false,
11 + "_integrity": "sha256-wezXsoQPBGN/Lpfgr+kaU26zuLDrtfLbG5MRvJJuh7w=",
12 + "_location": "/bl",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "bl@3.0.0",
18 + "name": "bl",
19 + "escapedName": "bl",
20 + "rawSpec": "3.0.0",
21 + "saveSpec": null,
22 + "fetchSpec": "3.0.0"
23 + },
24 + "_requiredBy": [
25 + "/tar-stream"
26 + ],
27 + "_resolved": "http://localhost:8989/blobs/get/&wezXsoQPBGN/Lpfgr+kaU26zuLDrtfLbG5MRvJJuh7w=.sha256",
28 + "_spec": "3.0.0",
29 + "_where": "/home/cel/src/ssb-npm-registry",
30 + "authors": [
31 + "Rod Vagg <rod@vagg.org> (https://github.com/rvagg)",
32 + "Matteo Collina <matteo.collina@gmail.com> (https://github.com/mcollina)",
33 + "Jarett Cruger <jcrugzz@gmail.com> (https://github.com/jcrugzz)"
34 + ],
35 + "bugs": {
36 + "url": "https://github.com/rvagg/bl/issues"
37 + },
38 + "dependencies": {
39 + "readable-stream": "^3.0.1"
40 + },
41 + "description": "Buffer List: collect buffers and access with a standard readable Buffer interface, streamable too!",
42 + "devDependencies": {
43 + "faucet": "0.0.1",
44 + "hash_file": "~0.1.1",
45 + "tape": "~4.9.1"
46 + },
47 + "homepage": "https://github.com/rvagg/bl",
48 + "keywords": [
49 + "buffer",
50 + "buffers",
51 + "stream",
52 + "awesomesauce"
53 + ],
54 + "license": "MIT",
55 + "main": "bl.js",
56 + "name": "bl",
57 + "repository": {
58 + "type": "git",
59 + "url": "git+https://github.com/rvagg/bl.git"
60 + },
61 + "scripts": {
62 + "test": "node test/test.js | faucet"
63 + },
64 + "version": "3.0.0"
65 +}
node_modules/bl/test/indexOf.jsView
@@ -1,0 +1,463 @@
1 +'use strict'
2 +
3 +var tape = require('tape')
4 + , BufferList = require('../')
5 + , Buffer = require('safe-buffer').Buffer
6 +
7 +tape('indexOf single byte needle', t => {
8 + const bl = new BufferList(['abcdefg', 'abcdefg', '12345'])
9 + t.equal(bl.indexOf('e'), 4)
10 + t.equal(bl.indexOf('e', 5), 11)
11 + t.equal(bl.indexOf('e', 12), -1)
12 + t.equal(bl.indexOf('5'), 18)
13 + t.end()
14 +})
15 +
16 +tape('indexOf multiple byte needle', t => {
17 + const bl = new BufferList(['abcdefg', 'abcdefg'])
18 + t.equal(bl.indexOf('ef'), 4)
19 + t.equal(bl.indexOf('ef', 5), 11)
20 + t.end()
21 +})
22 +
23 +tape('indexOf multiple byte needles across buffer boundaries', t => {
24 + const bl = new BufferList(['abcdefg', 'abcdefg'])
25 + t.equal(bl.indexOf('fgabc'), 5)
26 + t.end()
27 +})
28 +
29 +tape('indexOf takes a buffer list search', t => {
30 + const bl = new BufferList(['abcdefg', 'abcdefg'])
31 + const search = new BufferList('fgabc')
32 + t.equal(bl.indexOf(search), 5)
33 + t.end()
34 +})
35 +
36 +tape('indexOf a zero byte needle', t => {
37 + const b = new BufferList('abcdef')
38 + const buf_empty = Buffer.from('')
39 + t.equal(b.indexOf(''), 0)
40 + t.equal(b.indexOf('', 1), 1)
41 + t.equal(b.indexOf('', b.length + 1), b.length)
42 + t.equal(b.indexOf('', Infinity), b.length)
43 + t.equal(b.indexOf(buf_empty), 0)
44 + t.equal(b.indexOf(buf_empty, 1), 1)
45 + t.equal(b.indexOf(buf_empty, b.length + 1), b.length)
46 + t.equal(b.indexOf(buf_empty, Infinity), b.length)
47 + t.end()
48 +})
49 +
50 +tape('indexOf buffers smaller and larger than the needle', t => {
51 + const bl = new BufferList(['abcdefg', 'a', 'bcdefg', 'a', 'bcfgab'])
52 + t.equal(bl.indexOf('fgabc'), 5)
53 + t.equal(bl.indexOf('fgabc', 6), 12)
54 + t.equal(bl.indexOf('fgabc', 13), -1)
55 + t.end()
56 +})
57 +
58 +// only present in node 6+
59 +;(process.version.substr(1).split('.')[0] >= 6) && tape('indexOf latin1 and binary encoding', t => {
60 + const b = new BufferList('abcdef')
61 +
62 + // test latin1 encoding
63 + t.equal(
64 + new BufferList(Buffer.from(b.toString('latin1'), 'latin1'))
65 + .indexOf('d', 0, 'latin1'),
66 + 3
67 + )
68 + t.equal(
69 + new BufferList(Buffer.from(b.toString('latin1'), 'latin1'))
70 + .indexOf(Buffer.from('d', 'latin1'), 0, 'latin1'),
71 + 3
72 + )
73 + t.equal(
74 + new BufferList(Buffer.from('aa\u00e8aa', 'latin1'))
75 + .indexOf('\u00e8', 'latin1'),
76 + 2
77 + )
78 + t.equal(
79 + new BufferList(Buffer.from('\u00e8', 'latin1'))
80 + .indexOf('\u00e8', 'latin1'),
81 + 0
82 + )
83 + t.equal(
84 + new BufferList(Buffer.from('\u00e8', 'latin1'))
85 + .indexOf(Buffer.from('\u00e8', 'latin1'), 'latin1'),
86 + 0
87 + )
88 +
89 + // test binary encoding
90 + t.equal(
91 + new BufferList(Buffer.from(b.toString('binary'), 'binary'))
92 + .indexOf('d', 0, 'binary'),
93 + 3
94 + )
95 + t.equal(
96 + new BufferList(Buffer.from(b.toString('binary'), 'binary'))
97 + .indexOf(Buffer.from('d', 'binary'), 0, 'binary'),
98 + 3
99 + )
100 + t.equal(
101 + new BufferList(Buffer.from('aa\u00e8aa', 'binary'))
102 + .indexOf('\u00e8', 'binary'),
103 + 2
104 + )
105 + t.equal(
106 + new BufferList(Buffer.from('\u00e8', 'binary'))
107 + .indexOf('\u00e8', 'binary'),
108 + 0
109 + )
110 + t.equal(
111 + new BufferList(Buffer.from('\u00e8', 'binary'))
112 + .indexOf(Buffer.from('\u00e8', 'binary'), 'binary'),
113 + 0
114 + )
115 + t.end()
116 +})
117 +
118 +tape('indexOf the entire nodejs10 buffer test suite', t => {
119 + const b = new BufferList('abcdef')
120 + const buf_a = Buffer.from('a')
121 + const buf_bc = Buffer.from('bc')
122 + const buf_f = Buffer.from('f')
123 + const buf_z = Buffer.from('z')
124 +
125 + const stringComparison = 'abcdef'
126 +
127 + t.equal(b.indexOf('a'), 0)
128 + t.equal(b.indexOf('a', 1), -1)
129 + t.equal(b.indexOf('a', -1), -1)
130 + t.equal(b.indexOf('a', -4), -1)
131 + t.equal(b.indexOf('a', -b.length), 0)
132 + t.equal(b.indexOf('a', NaN), 0)
133 + t.equal(b.indexOf('a', -Infinity), 0)
134 + t.equal(b.indexOf('a', Infinity), -1)
135 + t.equal(b.indexOf('bc'), 1)
136 + t.equal(b.indexOf('bc', 2), -1)
137 + t.equal(b.indexOf('bc', -1), -1)
138 + t.equal(b.indexOf('bc', -3), -1)
139 + t.equal(b.indexOf('bc', -5), 1)
140 + t.equal(b.indexOf('bc', NaN), 1)
141 + t.equal(b.indexOf('bc', -Infinity), 1)
142 + t.equal(b.indexOf('bc', Infinity), -1)
143 + t.equal(b.indexOf('f'), b.length - 1)
144 + t.equal(b.indexOf('z'), -1)
145 + // empty search tests
146 + t.equal(b.indexOf(buf_a), 0)
147 + t.equal(b.indexOf(buf_a, 1), -1)
148 + t.equal(b.indexOf(buf_a, -1), -1)
149 + t.equal(b.indexOf(buf_a, -4), -1)
150 + t.equal(b.indexOf(buf_a, -b.length), 0)
151 + t.equal(b.indexOf(buf_a, NaN), 0)
152 + t.equal(b.indexOf(buf_a, -Infinity), 0)
153 + t.equal(b.indexOf(buf_a, Infinity), -1)
154 + t.equal(b.indexOf(buf_bc), 1)
155 + t.equal(b.indexOf(buf_bc, 2), -1)
156 + t.equal(b.indexOf(buf_bc, -1), -1)
157 + t.equal(b.indexOf(buf_bc, -3), -1)
158 + t.equal(b.indexOf(buf_bc, -5), 1)
159 + t.equal(b.indexOf(buf_bc, NaN), 1)
160 + t.equal(b.indexOf(buf_bc, -Infinity), 1)
161 + t.equal(b.indexOf(buf_bc, Infinity), -1)
162 + t.equal(b.indexOf(buf_f), b.length - 1)
163 + t.equal(b.indexOf(buf_z), -1)
164 + t.equal(b.indexOf(0x61), 0)
165 + t.equal(b.indexOf(0x61, 1), -1)
166 + t.equal(b.indexOf(0x61, -1), -1)
167 + t.equal(b.indexOf(0x61, -4), -1)
168 + t.equal(b.indexOf(0x61, -b.length), 0)
169 + t.equal(b.indexOf(0x61, NaN), 0)
170 + t.equal(b.indexOf(0x61, -Infinity), 0)
171 + t.equal(b.indexOf(0x61, Infinity), -1)
172 + t.equal(b.indexOf(0x0), -1)
173 +
174 + // test offsets
175 + t.equal(b.indexOf('d', 2), 3)
176 + t.equal(b.indexOf('f', 5), 5)
177 + t.equal(b.indexOf('f', -1), 5)
178 + t.equal(b.indexOf('f', 6), -1)
179 +
180 + t.equal(b.indexOf(Buffer.from('d'), 2), 3)
181 + t.equal(b.indexOf(Buffer.from('f'), 5), 5)
182 + t.equal(b.indexOf(Buffer.from('f'), -1), 5)
183 + t.equal(b.indexOf(Buffer.from('f'), 6), -1)
184 +
185 + t.equal(Buffer.from('ff').indexOf(Buffer.from('f'), 1, 'ucs2'), -1)
186 +
187 + // test invalid and uppercase encoding
188 + t.equal(b.indexOf('b', 'utf8'), 1)
189 + t.equal(b.indexOf('b', 'UTF8'), 1)
190 + t.equal(b.indexOf('62', 'HEX'), 1)
191 + t.throws(() => b.indexOf('bad', 'enc'), TypeError)
192 +
193 + // test hex encoding
194 + t.equal(
195 + Buffer.from(b.toString('hex'), 'hex')
196 + .indexOf('64', 0, 'hex'),
197 + 3
198 + )
199 + t.equal(
200 + Buffer.from(b.toString('hex'), 'hex')
201 + .indexOf(Buffer.from('64', 'hex'), 0, 'hex'),
202 + 3
203 + )
204 +
205 + // test base64 encoding
206 + t.equal(
207 + Buffer.from(b.toString('base64'), 'base64')
208 + .indexOf('ZA==', 0, 'base64'),
209 + 3
210 + )
211 + t.equal(
212 + Buffer.from(b.toString('base64'), 'base64')
213 + .indexOf(Buffer.from('ZA==', 'base64'), 0, 'base64'),
214 + 3
215 + )
216 +
217 + // test ascii encoding
218 + t.equal(
219 + Buffer.from(b.toString('ascii'), 'ascii')
220 + .indexOf('d', 0, 'ascii'),
221 + 3
222 + )
223 + t.equal(
224 + Buffer.from(b.toString('ascii'), 'ascii')
225 + .indexOf(Buffer.from('d', 'ascii'), 0, 'ascii'),
226 + 3
227 + )
228 +
229 + // test optional offset with passed encoding
230 + t.equal(Buffer.from('aaaa0').indexOf('30', 'hex'), 4)
231 + t.equal(Buffer.from('aaaa00a').indexOf('3030', 'hex'), 4)
232 +
233 + {
234 + // test usc2 encoding
235 + const twoByteString = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'ucs2')
236 +
237 + t.equal(8, twoByteString.indexOf('\u0395', 4, 'ucs2'))
238 + t.equal(6, twoByteString.indexOf('\u03a3', -4, 'ucs2'))
239 + t.equal(4, twoByteString.indexOf('\u03a3', -6, 'ucs2'))
240 + t.equal(4, twoByteString.indexOf(
241 + Buffer.from('\u03a3', 'ucs2'), -6, 'ucs2'))
242 + t.equal(-1, twoByteString.indexOf('\u03a3', -2, 'ucs2'))
243 + }
244 +
245 + const mixedByteStringUcs2 =
246 + Buffer.from('\u039a\u0391abc\u03a3\u03a3\u0395', 'ucs2')
247 + t.equal(6, mixedByteStringUcs2.indexOf('bc', 0, 'ucs2'))
248 + t.equal(10, mixedByteStringUcs2.indexOf('\u03a3', 0, 'ucs2'))
249 + t.equal(-1, mixedByteStringUcs2.indexOf('\u0396', 0, 'ucs2'))
250 +
251 + t.equal(
252 + 6, mixedByteStringUcs2.indexOf(Buffer.from('bc', 'ucs2'), 0, 'ucs2'))
253 + t.equal(
254 + 10, mixedByteStringUcs2.indexOf(Buffer.from('\u03a3', 'ucs2'), 0, 'ucs2'))
255 + t.equal(
256 + -1, mixedByteStringUcs2.indexOf(Buffer.from('\u0396', 'ucs2'), 0, 'ucs2'))
257 +
258 + {
259 + const twoByteString = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'ucs2')
260 +
261 + // Test single char pattern
262 + t.equal(0, twoByteString.indexOf('\u039a', 0, 'ucs2'))
263 + let index = twoByteString.indexOf('\u0391', 0, 'ucs2')
264 + t.equal(2, index, `Alpha - at index ${index}`)
265 + index = twoByteString.indexOf('\u03a3', 0, 'ucs2')
266 + t.equal(4, index, `First Sigma - at index ${index}`)
267 + index = twoByteString.indexOf('\u03a3', 6, 'ucs2')
268 + t.equal(6, index, `Second Sigma - at index ${index}`)
269 + index = twoByteString.indexOf('\u0395', 0, 'ucs2')
270 + t.equal(8, index, `Epsilon - at index ${index}`)
271 + index = twoByteString.indexOf('\u0392', 0, 'ucs2')
272 + t.equal(-1, index, `Not beta - at index ${index}`)
273 +
274 + // Test multi-char pattern
275 + index = twoByteString.indexOf('\u039a\u0391', 0, 'ucs2')
276 + t.equal(0, index, `Lambda Alpha - at index ${index}`)
277 + index = twoByteString.indexOf('\u0391\u03a3', 0, 'ucs2')
278 + t.equal(2, index, `Alpha Sigma - at index ${index}`)
279 + index = twoByteString.indexOf('\u03a3\u03a3', 0, 'ucs2')
280 + t.equal(4, index, `Sigma Sigma - at index ${index}`)
281 + index = twoByteString.indexOf('\u03a3\u0395', 0, 'ucs2')
282 + t.equal(6, index, `Sigma Epsilon - at index ${index}`)
283 + }
284 +
285 + const mixedByteStringUtf8 = Buffer.from('\u039a\u0391abc\u03a3\u03a3\u0395')
286 + t.equal(5, mixedByteStringUtf8.indexOf('bc'))
287 + t.equal(5, mixedByteStringUtf8.indexOf('bc', 5))
288 + t.equal(5, mixedByteStringUtf8.indexOf('bc', -8))
289 + t.equal(7, mixedByteStringUtf8.indexOf('\u03a3'))
290 + t.equal(-1, mixedByteStringUtf8.indexOf('\u0396'))
291 +
292 +
293 + // Test complex string indexOf algorithms. Only trigger for long strings.
294 + // Long string that isn't a simple repeat of a shorter string.
295 + let longString = 'A'
296 + for (let i = 66; i < 76; i++) { // from 'B' to 'K'
297 + longString = longString + String.fromCharCode(i) + longString
298 + }
299 +
300 + const longBufferString = Buffer.from(longString)
301 +
302 + // pattern of 15 chars, repeated every 16 chars in long
303 + let pattern = 'ABACABADABACABA'
304 + for (let i = 0; i < longBufferString.length - pattern.length; i += 7) {
305 + const index = longBufferString.indexOf(pattern, i)
306 + t.equal((i + 15) & ~0xf, index,
307 + `Long ABACABA...-string at index ${i}`)
308 + }
309 +
310 + let index = longBufferString.indexOf('AJABACA')
311 + t.equal(510, index, `Long AJABACA, First J - at index ${index}`)
312 + index = longBufferString.indexOf('AJABACA', 511)
313 + t.equal(1534, index, `Long AJABACA, Second J - at index ${index}`)
314 +
315 + pattern = 'JABACABADABACABA'
316 + index = longBufferString.indexOf(pattern)
317 + t.equal(511, index, `Long JABACABA..., First J - at index ${index}`)
318 + index = longBufferString.indexOf(pattern, 512)
319 + t.equal(
320 + 1535, index, `Long JABACABA..., Second J - at index ${index}`)
321 +
322 + // Search for a non-ASCII string in a pure ASCII string.
323 + const asciiString = Buffer.from(
324 + 'arglebargleglopglyfarglebargleglopglyfarglebargleglopglyf')
325 + t.equal(-1, asciiString.indexOf('\x2061'))
326 + t.equal(3, asciiString.indexOf('leb', 0))
327 +
328 + // Search in string containing many non-ASCII chars.
329 + const allCodePoints = []
330 + for (let i = 0; i < 65536; i++) allCodePoints[i] = i
331 + const allCharsString = String.fromCharCode.apply(String, allCodePoints)
332 + const allCharsBufferUtf8 = Buffer.from(allCharsString)
333 + const allCharsBufferUcs2 = Buffer.from(allCharsString, 'ucs2')
334 +
335 + // Search for string long enough to trigger complex search with ASCII pattern
336 + // and UC16 subject.
337 + t.equal(-1, allCharsBufferUtf8.indexOf('notfound'))
338 + t.equal(-1, allCharsBufferUcs2.indexOf('notfound'))
339 +
340 + // Needle is longer than haystack, but only because it's encoded as UTF-16
341 + t.equal(Buffer.from('aaaa').indexOf('a'.repeat(4), 'ucs2'), -1)
342 +
343 + t.equal(Buffer.from('aaaa').indexOf('a'.repeat(4), 'utf8'), 0)
344 + t.equal(Buffer.from('aaaa').indexOf('你好', 'ucs2'), -1)
345 +
346 + // Haystack has odd length, but the needle is UCS2.
347 + t.equal(Buffer.from('aaaaa').indexOf('b', 'ucs2'), -1)
348 +
349 + {
350 + // Find substrings in Utf8.
351 + const lengths = [1, 3, 15]; // Single char, simple and complex.
352 + const indices = [0x5, 0x60, 0x400, 0x680, 0x7ee, 0xFF02, 0x16610, 0x2f77b]
353 + for (let lengthIndex = 0; lengthIndex < lengths.length; lengthIndex++) {
354 + for (let i = 0; i < indices.length; i++) {
355 + const index = indices[i]
356 + let length = lengths[lengthIndex]
357 +
358 + if (index + length > 0x7F) {
359 + length = 2 * length
360 + }
361 +
362 + if (index + length > 0x7FF) {
363 + length = 3 * length
364 + }
365 +
366 + if (index + length > 0xFFFF) {
367 + length = 4 * length
368 + }
369 +
370 + const patternBufferUtf8 = allCharsBufferUtf8.slice(index, index + length)
371 + t.equal(index, allCharsBufferUtf8.indexOf(patternBufferUtf8))
372 +
373 + const patternStringUtf8 = patternBufferUtf8.toString()
374 + t.equal(index, allCharsBufferUtf8.indexOf(patternStringUtf8))
375 + }
376 + }
377 + }
378 +
379 + {
380 + // Find substrings in Usc2.
381 + const lengths = [2, 4, 16]; // Single char, simple and complex.
382 + const indices = [0x5, 0x65, 0x105, 0x205, 0x285, 0x2005, 0x2085, 0xfff0]
383 + for (let lengthIndex = 0; lengthIndex < lengths.length; lengthIndex++) {
384 + for (let i = 0; i < indices.length; i++) {
385 + const index = indices[i] * 2
386 + const length = lengths[lengthIndex]
387 +
388 + const patternBufferUcs2 =
389 + allCharsBufferUcs2.slice(index, index + length)
390 + t.equal(
391 + index, allCharsBufferUcs2.indexOf(patternBufferUcs2, 0, 'ucs2'))
392 +
393 + const patternStringUcs2 = patternBufferUcs2.toString('ucs2')
394 + t.equal(
395 + index, allCharsBufferUcs2.indexOf(patternStringUcs2, 0, 'ucs2'))
396 + }
397 + }
398 + }
399 +
400 + [
401 + () => {},
402 + {},
403 + []
404 + ].forEach(val => {
405 + debugger
406 + t.throws(() => b.indexOf(val), TypeError, `"${JSON.stringify(val)}" should throw`)
407 + })
408 +
409 + // Test weird offset arguments.
410 + // The following offsets coerce to NaN or 0, searching the whole Buffer
411 + t.equal(b.indexOf('b', undefined), 1)
412 + t.equal(b.indexOf('b', {}), 1)
413 + t.equal(b.indexOf('b', 0), 1)
414 + t.equal(b.indexOf('b', null), 1)
415 + t.equal(b.indexOf('b', []), 1)
416 +
417 + // The following offset coerces to 2, in other words +[2] === 2
418 + t.equal(b.indexOf('b', [2]), -1)
419 +
420 + // Behavior should match String.indexOf()
421 + t.equal(
422 + b.indexOf('b', undefined),
423 + stringComparison.indexOf('b', undefined))
424 + t.equal(
425 + b.indexOf('b', {}),
426 + stringComparison.indexOf('b', {}))
427 + t.equal(
428 + b.indexOf('b', 0),
429 + stringComparison.indexOf('b', 0))
430 + t.equal(
431 + b.indexOf('b', null),
432 + stringComparison.indexOf('b', null))
433 + t.equal(
434 + b.indexOf('b', []),
435 + stringComparison.indexOf('b', []))
436 + t.equal(
437 + b.indexOf('b', [2]),
438 + stringComparison.indexOf('b', [2]))
439 +
440 + // test truncation of Number arguments to uint8
441 + {
442 + const buf = Buffer.from('this is a test')
443 + t.equal(buf.indexOf(0x6973), 3)
444 + t.equal(buf.indexOf(0x697320), 4)
445 + t.equal(buf.indexOf(0x69732069), 2)
446 + t.equal(buf.indexOf(0x697374657374), 0)
447 + t.equal(buf.indexOf(0x69737374), 0)
448 + t.equal(buf.indexOf(0x69737465), 11)
449 + t.equal(buf.indexOf(0x69737465), 11)
450 + t.equal(buf.indexOf(-140), 0)
451 + t.equal(buf.indexOf(-152), 1)
452 + t.equal(buf.indexOf(0xff), -1)
453 + t.equal(buf.indexOf(0xffff), -1)
454 + }
455 +
456 + // Test that Uint8Array arguments are okay.
457 + {
458 + const needle = new Uint8Array([ 0x66, 0x6f, 0x6f ])
459 + const haystack = new BufferList(Buffer.from('a foo b foo'))
460 + t.equal(haystack.indexOf(needle), 2)
461 + }
462 + t.end()
463 +})
node_modules/bl/test/test.jsView
@@ -1,0 +1,780 @@
1 +'use strict'
2 +
3 +var tape = require('tape')
4 + , crypto = require('crypto')
5 + , fs = require('fs')
6 + , hash = require('hash_file')
7 + , BufferList = require('../')
8 + , Buffer = require('safe-buffer').Buffer
9 +
10 + , encodings =
11 + ('hex utf8 utf-8 ascii binary base64'
12 + + (process.browser ? '' : ' ucs2 ucs-2 utf16le utf-16le')).split(' ')
13 +
14 +// run the indexOf tests
15 +require('./indexOf')
16 +
17 +tape('single bytes from single buffer', function (t) {
18 + var bl = new BufferList()
19 + bl.append(Buffer.from('abcd'))
20 +
21 + t.equal(bl.length, 4)
22 + t.equal(bl.get(-1), undefined)
23 + t.equal(bl.get(0), 97)
24 + t.equal(bl.get(1), 98)
25 + t.equal(bl.get(2), 99)
26 + t.equal(bl.get(3), 100)
27 + t.equal(bl.get(4), undefined)
28 +
29 + t.end()
30 +})
31 +
32 +tape('single bytes from multiple buffers', function (t) {
33 + var bl = new BufferList()
34 + bl.append(Buffer.from('abcd'))
35 + bl.append(Buffer.from('efg'))
36 + bl.append(Buffer.from('hi'))
37 + bl.append(Buffer.from('j'))
38 +
39 + t.equal(bl.length, 10)
40 +
41 + t.equal(bl.get(0), 97)
42 + t.equal(bl.get(1), 98)
43 + t.equal(bl.get(2), 99)
44 + t.equal(bl.get(3), 100)
45 + t.equal(bl.get(4), 101)
46 + t.equal(bl.get(5), 102)
47 + t.equal(bl.get(6), 103)
48 + t.equal(bl.get(7), 104)
49 + t.equal(bl.get(8), 105)
50 + t.equal(bl.get(9), 106)
51 + t.end()
52 +})
53 +
54 +tape('multi bytes from single buffer', function (t) {
55 + var bl = new BufferList()
56 + bl.append(Buffer.from('abcd'))
57 +
58 + t.equal(bl.length, 4)
59 +
60 + t.equal(bl.slice(0, 4).toString('ascii'), 'abcd')
61 + t.equal(bl.slice(0, 3).toString('ascii'), 'abc')
62 + t.equal(bl.slice(1, 4).toString('ascii'), 'bcd')
63 + t.equal(bl.slice(-4, -1).toString('ascii'), 'abc')
64 +
65 + t.end()
66 +})
67 +
68 +tape('multi bytes from single buffer (negative indexes)', function (t) {
69 + var bl = new BufferList()
70 + bl.append(Buffer.from('buffer'))
71 +
72 + t.equal(bl.length, 6)
73 +
74 + t.equal(bl.slice(-6, -1).toString('ascii'), 'buffe')
75 + t.equal(bl.slice(-6, -2).toString('ascii'), 'buff')
76 + t.equal(bl.slice(-5, -2).toString('ascii'), 'uff')
77 +
78 + t.end()
79 +})
80 +
81 +tape('multiple bytes from multiple buffers', function (t) {
82 + var bl = new BufferList()
83 +
84 + bl.append(Buffer.from('abcd'))
85 + bl.append(Buffer.from('efg'))
86 + bl.append(Buffer.from('hi'))
87 + bl.append(Buffer.from('j'))
88 +
89 + t.equal(bl.length, 10)
90 +
91 + t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij')
92 + t.equal(bl.slice(3, 10).toString('ascii'), 'defghij')
93 + t.equal(bl.slice(3, 6).toString('ascii'), 'def')
94 + t.equal(bl.slice(3, 8).toString('ascii'), 'defgh')
95 + t.equal(bl.slice(5, 10).toString('ascii'), 'fghij')
96 + t.equal(bl.slice(-7, -4).toString('ascii'), 'def')
97 +
98 + t.end()
99 +})
100 +
101 +tape('multiple bytes from multiple buffer lists', function (t) {
102 + var bl = new BufferList()
103 +
104 + bl.append(new BufferList([ Buffer.from('abcd'), Buffer.from('efg') ]))
105 + bl.append(new BufferList([ Buffer.from('hi'), Buffer.from('j') ]))
106 +
107 + t.equal(bl.length, 10)
108 +
109 + t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij')
110 +
111 + t.equal(bl.slice(3, 10).toString('ascii'), 'defghij')
112 + t.equal(bl.slice(3, 6).toString('ascii'), 'def')
113 + t.equal(bl.slice(3, 8).toString('ascii'), 'defgh')
114 + t.equal(bl.slice(5, 10).toString('ascii'), 'fghij')
115 +
116 + t.end()
117 +})
118 +
119 +// same data as previous test, just using nested constructors
120 +tape('multiple bytes from crazy nested buffer lists', function (t) {
121 + var bl = new BufferList()
122 +
123 + bl.append(new BufferList([
124 + new BufferList([
125 + new BufferList(Buffer.from('abc'))
126 + , Buffer.from('d')
127 + , new BufferList(Buffer.from('efg'))
128 + ])
129 + , new BufferList([ Buffer.from('hi') ])
130 + , new BufferList(Buffer.from('j'))
131 + ]))
132 +
133 + t.equal(bl.length, 10)
134 +
135 + t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij')
136 +
137 + t.equal(bl.slice(3, 10).toString('ascii'), 'defghij')
138 + t.equal(bl.slice(3, 6).toString('ascii'), 'def')
139 + t.equal(bl.slice(3, 8).toString('ascii'), 'defgh')
140 + t.equal(bl.slice(5, 10).toString('ascii'), 'fghij')
141 +
142 + t.end()
143 +})
144 +
145 +tape('append accepts arrays of Buffers', function (t) {
146 + var bl = new BufferList()
147 + bl.append(Buffer.from('abc'))
148 + bl.append([ Buffer.from('def') ])
149 + bl.append([ Buffer.from('ghi'), Buffer.from('jkl') ])
150 + bl.append([ Buffer.from('mnop'), Buffer.from('qrstu'), Buffer.from('vwxyz') ])
151 + t.equal(bl.length, 26)
152 + t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz')
153 + t.end()
154 +})
155 +
156 +tape('append accepts arrays of BufferLists', function (t) {
157 + var bl = new BufferList()
158 + bl.append(Buffer.from('abc'))
159 + bl.append([ new BufferList('def') ])
160 + bl.append(new BufferList([ Buffer.from('ghi'), new BufferList('jkl') ]))
161 + bl.append([ Buffer.from('mnop'), new BufferList([ Buffer.from('qrstu'), Buffer.from('vwxyz') ]) ])
162 + t.equal(bl.length, 26)
163 + t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz')
164 + t.end()
165 +})
166 +
167 +tape('append chainable', function (t) {
168 + var bl = new BufferList()
169 + t.ok(bl.append(Buffer.from('abcd')) === bl)
170 + t.ok(bl.append([ Buffer.from('abcd') ]) === bl)
171 + t.ok(bl.append(new BufferList(Buffer.from('abcd'))) === bl)
172 + t.ok(bl.append([ new BufferList(Buffer.from('abcd')) ]) === bl)
173 + t.end()
174 +})
175 +
176 +tape('append chainable (test results)', function (t) {
177 + var bl = new BufferList('abc')
178 + .append([ new BufferList('def') ])
179 + .append(new BufferList([ Buffer.from('ghi'), new BufferList('jkl') ]))
180 + .append([ Buffer.from('mnop'), new BufferList([ Buffer.from('qrstu'), Buffer.from('vwxyz') ]) ])
181 +
182 + t.equal(bl.length, 26)
183 + t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz')
184 + t.end()
185 +})
186 +
187 +tape('consuming from multiple buffers', function (t) {
188 + var bl = new BufferList()
189 +
190 + bl.append(Buffer.from('abcd'))
191 + bl.append(Buffer.from('efg'))
192 + bl.append(Buffer.from('hi'))
193 + bl.append(Buffer.from('j'))
194 +
195 + t.equal(bl.length, 10)
196 +
197 + t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij')
198 +
199 + bl.consume(3)
200 + t.equal(bl.length, 7)
201 + t.equal(bl.slice(0, 7).toString('ascii'), 'defghij')
202 +
203 + bl.consume(2)
204 + t.equal(bl.length, 5)
205 + t.equal(bl.slice(0, 5).toString('ascii'), 'fghij')
206 +
207 + bl.consume(1)
208 + t.equal(bl.length, 4)
209 + t.equal(bl.slice(0, 4).toString('ascii'), 'ghij')
210 +
211 + bl.consume(1)
212 + t.equal(bl.length, 3)
213 + t.equal(bl.slice(0, 3).toString('ascii'), 'hij')
214 +
215 + bl.consume(2)
216 + t.equal(bl.length, 1)
217 + t.equal(bl.slice(0, 1).toString('ascii'), 'j')
218 +
219 + t.end()
220 +})
221 +
222 +tape('complete consumption', function (t) {
223 + var bl = new BufferList()
224 +
225 + bl.append(Buffer.from('a'))
226 + bl.append(Buffer.from('b'))
227 +
228 + bl.consume(2)
229 +
230 + t.equal(bl.length, 0)
231 + t.equal(bl._bufs.length, 0)
232 +
233 + t.end()
234 +})
235 +
236 +tape('test readUInt8 / readInt8', function (t) {
237 + var buf1 = Buffer.alloc(1)
238 + , buf2 = Buffer.alloc(3)
239 + , buf3 = Buffer.alloc(3)
240 + , bl = new BufferList()
241 +
242 + buf2[1] = 0x3
243 + buf2[2] = 0x4
244 + buf3[0] = 0x23
245 + buf3[1] = 0x42
246 +
247 + bl.append(buf1)
248 + bl.append(buf2)
249 + bl.append(buf3)
250 +
251 + t.equal(bl.readUInt8(2), 0x3)
252 + t.equal(bl.readInt8(2), 0x3)
253 + t.equal(bl.readUInt8(3), 0x4)
254 + t.equal(bl.readInt8(3), 0x4)
255 + t.equal(bl.readUInt8(4), 0x23)
256 + t.equal(bl.readInt8(4), 0x23)
257 + t.equal(bl.readUInt8(5), 0x42)
258 + t.equal(bl.readInt8(5), 0x42)
259 + t.end()
260 +})
261 +
262 +tape('test readUInt16LE / readUInt16BE / readInt16LE / readInt16BE', function (t) {
263 + var buf1 = Buffer.alloc(1)
264 + , buf2 = Buffer.alloc(3)
265 + , buf3 = Buffer.alloc(3)
266 + , bl = new BufferList()
267 +
268 + buf2[1] = 0x3
269 + buf2[2] = 0x4
270 + buf3[0] = 0x23
271 + buf3[1] = 0x42
272 +
273 + bl.append(buf1)
274 + bl.append(buf2)
275 + bl.append(buf3)
276 +
277 + t.equal(bl.readUInt16BE(2), 0x0304)
278 + t.equal(bl.readUInt16LE(2), 0x0403)
279 + t.equal(bl.readInt16BE(2), 0x0304)
280 + t.equal(bl.readInt16LE(2), 0x0403)
281 + t.equal(bl.readUInt16BE(3), 0x0423)
282 + t.equal(bl.readUInt16LE(3), 0x2304)
283 + t.equal(bl.readInt16BE(3), 0x0423)
284 + t.equal(bl.readInt16LE(3), 0x2304)
285 + t.equal(bl.readUInt16BE(4), 0x2342)
286 + t.equal(bl.readUInt16LE(4), 0x4223)
287 + t.equal(bl.readInt16BE(4), 0x2342)
288 + t.equal(bl.readInt16LE(4), 0x4223)
289 + t.end()
290 +})
291 +
292 +tape('test readUInt32LE / readUInt32BE / readInt32LE / readInt32BE', function (t) {
293 + var buf1 = Buffer.alloc(1)
294 + , buf2 = Buffer.alloc(3)
295 + , buf3 = Buffer.alloc(3)
296 + , bl = new BufferList()
297 +
298 + buf2[1] = 0x3
299 + buf2[2] = 0x4
300 + buf3[0] = 0x23
301 + buf3[1] = 0x42
302 +
303 + bl.append(buf1)
304 + bl.append(buf2)
305 + bl.append(buf3)
306 +
307 + t.equal(bl.readUInt32BE(2), 0x03042342)
308 + t.equal(bl.readUInt32LE(2), 0x42230403)
309 + t.equal(bl.readInt32BE(2), 0x03042342)
310 + t.equal(bl.readInt32LE(2), 0x42230403)
311 + t.end()
312 +})
313 +
314 +tape('test readUIntLE / readUIntBE / readIntLE / readIntBE', function (t) {
315 + var buf1 = Buffer.alloc(1)
316 + , buf2 = Buffer.alloc(3)
317 + , buf3 = Buffer.alloc(3)
318 + , bl = new BufferList()
319 +
320 + buf2[0] = 0x2
321 + buf2[1] = 0x3
322 + buf2[2] = 0x4
323 + buf3[0] = 0x23
324 + buf3[1] = 0x42
325 + buf3[2] = 0x61
326 +
327 + bl.append(buf1)
328 + bl.append(buf2)
329 + bl.append(buf3)
330 +
331 + t.equal(bl.readUIntBE(1, 1), 0x02)
332 + t.equal(bl.readUIntBE(1, 2), 0x0203)
333 + t.equal(bl.readUIntBE(1, 3), 0x020304)
334 + t.equal(bl.readUIntBE(1, 4), 0x02030423)
335 + t.equal(bl.readUIntBE(1, 5), 0x0203042342)
336 + t.equal(bl.readUIntBE(1, 6), 0x020304234261)
337 + t.equal(bl.readUIntLE(1, 1), 0x02)
338 + t.equal(bl.readUIntLE(1, 2), 0x0302)
339 + t.equal(bl.readUIntLE(1, 3), 0x040302)
340 + t.equal(bl.readUIntLE(1, 4), 0x23040302)
341 + t.equal(bl.readUIntLE(1, 5), 0x4223040302)
342 + t.equal(bl.readUIntLE(1, 6), 0x614223040302)
343 + t.equal(bl.readIntBE(1, 1), 0x02)
344 + t.equal(bl.readIntBE(1, 2), 0x0203)
345 + t.equal(bl.readIntBE(1, 3), 0x020304)
346 + t.equal(bl.readIntBE(1, 4), 0x02030423)
347 + t.equal(bl.readIntBE(1, 5), 0x0203042342)
348 + t.equal(bl.readIntBE(1, 6), 0x020304234261)
349 + t.equal(bl.readIntLE(1, 1), 0x02)
350 + t.equal(bl.readIntLE(1, 2), 0x0302)
351 + t.equal(bl.readIntLE(1, 3), 0x040302)
352 + t.equal(bl.readIntLE(1, 4), 0x23040302)
353 + t.equal(bl.readIntLE(1, 5), 0x4223040302)
354 + t.equal(bl.readIntLE(1, 6), 0x614223040302)
355 + t.end()
356 +})
357 +
358 +tape('test readFloatLE / readFloatBE', function (t) {
359 + var buf1 = Buffer.alloc(1)
360 + , buf2 = Buffer.alloc(3)
361 + , buf3 = Buffer.alloc(3)
362 + , bl = new BufferList()
363 +
364 + buf2[1] = 0x00
365 + buf2[2] = 0x00
366 + buf3[0] = 0x80
367 + buf3[1] = 0x3f
368 +
369 + bl.append(buf1)
370 + bl.append(buf2)
371 + bl.append(buf3)
372 +
373 + t.equal(bl.readFloatLE(2), 0x01)
374 + t.end()
375 +})
376 +
377 +tape('test readDoubleLE / readDoubleBE', function (t) {
378 + var buf1 = Buffer.alloc(1)
379 + , buf2 = Buffer.alloc(3)
380 + , buf3 = Buffer.alloc(10)
381 + , bl = new BufferList()
382 +
383 + buf2[1] = 0x55
384 + buf2[2] = 0x55
385 + buf3[0] = 0x55
386 + buf3[1] = 0x55
387 + buf3[2] = 0x55
388 + buf3[3] = 0x55
389 + buf3[4] = 0xd5
390 + buf3[5] = 0x3f
391 +
392 + bl.append(buf1)
393 + bl.append(buf2)
394 + bl.append(buf3)
395 +
396 + t.equal(bl.readDoubleLE(2), 0.3333333333333333)
397 + t.end()
398 +})
399 +
400 +tape('test toString', function (t) {
401 + var bl = new BufferList()
402 +
403 + bl.append(Buffer.from('abcd'))
404 + bl.append(Buffer.from('efg'))
405 + bl.append(Buffer.from('hi'))
406 + bl.append(Buffer.from('j'))
407 +
408 + t.equal(bl.toString('ascii', 0, 10), 'abcdefghij')
409 + t.equal(bl.toString('ascii', 3, 10), 'defghij')
410 + t.equal(bl.toString('ascii', 3, 6), 'def')
411 + t.equal(bl.toString('ascii', 3, 8), 'defgh')
412 + t.equal(bl.toString('ascii', 5, 10), 'fghij')
413 +
414 + t.end()
415 +})
416 +
417 +tape('test toString encoding', function (t) {
418 + var bl = new BufferList()
419 + , b = Buffer.from('abcdefghij\xff\x00')
420 +
421 + bl.append(Buffer.from('abcd'))
422 + bl.append(Buffer.from('efg'))
423 + bl.append(Buffer.from('hi'))
424 + bl.append(Buffer.from('j'))
425 + bl.append(Buffer.from('\xff\x00'))
426 +
427 + encodings.forEach(function (enc) {
428 + t.equal(bl.toString(enc), b.toString(enc), enc)
429 + })
430 +
431 + t.end()
432 +})
433 +
434 +!process.browser && tape('test stream', function (t) {
435 + var random = crypto.randomBytes(65534)
436 + , rndhash = hash(random, 'md5')
437 + , md5sum = crypto.createHash('md5')
438 + , bl = new BufferList(function (err, buf) {
439 + t.ok(Buffer.isBuffer(buf))
440 + t.ok(err === null)
441 + t.equal(rndhash, hash(bl.slice(), 'md5'))
442 + t.equal(rndhash, hash(buf, 'md5'))
443 +
444 + bl.pipe(fs.createWriteStream('/tmp/bl_test_rnd_out.dat'))
445 + .on('close', function () {
446 + var s = fs.createReadStream('/tmp/bl_test_rnd_out.dat')
447 + s.on('data', md5sum.update.bind(md5sum))
448 + s.on('end', function() {
449 + t.equal(rndhash, md5sum.digest('hex'), 'woohoo! correct hash!')
450 + t.end()
451 + })
452 + })
453 +
454 + })
455 +
456 + fs.writeFileSync('/tmp/bl_test_rnd.dat', random)
457 + fs.createReadStream('/tmp/bl_test_rnd.dat').pipe(bl)
458 +})
459 +
460 +tape('instantiation with Buffer', function (t) {
461 + var buf = crypto.randomBytes(1024)
462 + , buf2 = crypto.randomBytes(1024)
463 + , b = BufferList(buf)
464 +
465 + t.equal(buf.toString('hex'), b.slice().toString('hex'), 'same buffer')
466 + b = BufferList([ buf, buf2 ])
467 + t.equal(b.slice().toString('hex'), Buffer.concat([ buf, buf2 ]).toString('hex'), 'same buffer')
468 + t.end()
469 +})
470 +
471 +tape('test String appendage', function (t) {
472 + var bl = new BufferList()
473 + , b = Buffer.from('abcdefghij\xff\x00')
474 +
475 + bl.append('abcd')
476 + bl.append('efg')
477 + bl.append('hi')
478 + bl.append('j')
479 + bl.append('\xff\x00')
480 +
481 + encodings.forEach(function (enc) {
482 + t.equal(bl.toString(enc), b.toString(enc))
483 + })
484 +
485 + t.end()
486 +})
487 +
488 +tape('test Number appendage', function (t) {
489 + var bl = new BufferList()
490 + , b = Buffer.from('1234567890')
491 +
492 + bl.append(1234)
493 + bl.append(567)
494 + bl.append(89)
495 + bl.append(0)
496 +
497 + encodings.forEach(function (enc) {
498 + t.equal(bl.toString(enc), b.toString(enc))
499 + })
500 +
501 + t.end()
502 +})
503 +
504 +tape('write nothing, should get empty buffer', function (t) {
505 + t.plan(3)
506 + BufferList(function (err, data) {
507 + t.notOk(err, 'no error')
508 + t.ok(Buffer.isBuffer(data), 'got a buffer')
509 + t.equal(0, data.length, 'got a zero-length buffer')
510 + t.end()
511 + }).end()
512 +})
513 +
514 +tape('unicode string', function (t) {
515 + t.plan(2)
516 + var inp1 = '\u2600'
517 + , inp2 = '\u2603'
518 + , exp = inp1 + ' and ' + inp2
519 + , bl = BufferList()
520 + bl.write(inp1)
521 + bl.write(' and ')
522 + bl.write(inp2)
523 + t.equal(exp, bl.toString())
524 + t.equal(Buffer.from(exp).toString('hex'), bl.toString('hex'))
525 +})
526 +
527 +tape('should emit finish', function (t) {
528 + var source = BufferList()
529 + , dest = BufferList()
530 +
531 + source.write('hello')
532 + source.pipe(dest)
533 +
534 + dest.on('finish', function () {
535 + t.equal(dest.toString('utf8'), 'hello')
536 + t.end()
537 + })
538 +})
539 +
540 +tape('basic copy', function (t) {
541 + var buf = crypto.randomBytes(1024)
542 + , buf2 = Buffer.alloc(1024)
543 + , b = BufferList(buf)
544 +
545 + b.copy(buf2)
546 + t.equal(b.slice().toString('hex'), buf2.toString('hex'), 'same buffer')
547 + t.end()
548 +})
549 +
550 +tape('copy after many appends', function (t) {
551 + var buf = crypto.randomBytes(512)
552 + , buf2 = Buffer.alloc(1024)
553 + , b = BufferList(buf)
554 +
555 + b.append(buf)
556 + b.copy(buf2)
557 + t.equal(b.slice().toString('hex'), buf2.toString('hex'), 'same buffer')
558 + t.end()
559 +})
560 +
561 +tape('copy at a precise position', function (t) {
562 + var buf = crypto.randomBytes(1004)
563 + , buf2 = Buffer.alloc(1024)
564 + , b = BufferList(buf)
565 +
566 + b.copy(buf2, 20)
567 + t.equal(b.slice().toString('hex'), buf2.slice(20).toString('hex'), 'same buffer')
568 + t.end()
569 +})
570 +
571 +tape('copy starting from a precise location', function (t) {
572 + var buf = crypto.randomBytes(10)
573 + , buf2 = Buffer.alloc(5)
574 + , b = BufferList(buf)
575 +
576 + b.copy(buf2, 0, 5)
577 + t.equal(b.slice(5).toString('hex'), buf2.toString('hex'), 'same buffer')
578 + t.end()
579 +})
580 +
581 +tape('copy in an interval', function (t) {
582 + var rnd = crypto.randomBytes(10)
583 + , b = BufferList(rnd) // put the random bytes there
584 + , actual = Buffer.alloc(3)
585 + , expected = Buffer.alloc(3)
586 +
587 + rnd.copy(expected, 0, 5, 8)
588 + b.copy(actual, 0, 5, 8)
589 +
590 + t.equal(actual.toString('hex'), expected.toString('hex'), 'same buffer')
591 + t.end()
592 +})
593 +
594 +tape('copy an interval between two buffers', function (t) {
595 + var buf = crypto.randomBytes(10)
596 + , buf2 = Buffer.alloc(10)
597 + , b = BufferList(buf)
598 +
599 + b.append(buf)
600 + b.copy(buf2, 0, 5, 15)
601 +
602 + t.equal(b.slice(5, 15).toString('hex'), buf2.toString('hex'), 'same buffer')
603 + t.end()
604 +})
605 +
606 +tape('shallow slice across buffer boundaries', function (t) {
607 + var bl = new BufferList(['First', 'Second', 'Third'])
608 +
609 + t.equal(bl.shallowSlice(3, 13).toString(), 'stSecondTh')
610 + t.end()
611 +})
612 +
613 +tape('shallow slice within single buffer', function (t) {
614 + t.plan(2)
615 + var bl = new BufferList(['First', 'Second', 'Third'])
616 +
617 + t.equal(bl.shallowSlice(5, 10).toString(), 'Secon')
618 + t.equal(bl.shallowSlice(7, 10).toString(), 'con')
619 + t.end()
620 +})
621 +
622 +tape('shallow slice single buffer', function (t) {
623 + t.plan(3)
624 + var bl = new BufferList(['First', 'Second', 'Third'])
625 +
626 + t.equal(bl.shallowSlice(0, 5).toString(), 'First')
627 + t.equal(bl.shallowSlice(5, 11).toString(), 'Second')
628 + t.equal(bl.shallowSlice(11, 16).toString(), 'Third')
629 +})
630 +
631 +tape('shallow slice with negative or omitted indices', function (t) {
632 + t.plan(4)
633 + var bl = new BufferList(['First', 'Second', 'Third'])
634 +
635 + t.equal(bl.shallowSlice().toString(), 'FirstSecondThird')
636 + t.equal(bl.shallowSlice(5).toString(), 'SecondThird')
637 + t.equal(bl.shallowSlice(5, -3).toString(), 'SecondTh')
638 + t.equal(bl.shallowSlice(-8).toString(), 'ondThird')
639 +})
640 +
641 +tape('shallow slice does not make a copy', function (t) {
642 + t.plan(1)
643 + var buffers = [Buffer.from('First'), Buffer.from('Second'), Buffer.from('Third')]
644 + var bl = (new BufferList(buffers)).shallowSlice(5, -3)
645 +
646 + buffers[1].fill('h')
647 + buffers[2].fill('h')
648 +
649 + t.equal(bl.toString(), 'hhhhhhhh')
650 +})
651 +
652 +tape('shallow slice with 0 length', function (t) {
653 + t.plan(1)
654 + var buffers = [Buffer.from('First'), Buffer.from('Second'), Buffer.from('Third')]
655 + var bl = (new BufferList(buffers)).shallowSlice(0, 0)
656 + t.equal(bl.length, 0)
657 +})
658 +
659 +tape('shallow slice with 0 length from middle', function (t) {
660 + t.plan(1)
661 + var buffers = [Buffer.from('First'), Buffer.from('Second'), Buffer.from('Third')]
662 + var bl = (new BufferList(buffers)).shallowSlice(10, 10)
663 + t.equal(bl.length, 0)
664 +})
665 +
666 +tape('duplicate', function (t) {
667 + t.plan(2)
668 +
669 + var bl = new BufferList('abcdefghij\xff\x00')
670 + , dup = bl.duplicate()
671 +
672 + t.equal(bl.prototype, dup.prototype)
673 + t.equal(bl.toString('hex'), dup.toString('hex'))
674 +})
675 +
676 +tape('destroy no pipe', function (t) {
677 + t.plan(2)
678 +
679 + var bl = new BufferList('alsdkfja;lsdkfja;lsdk')
680 + bl.destroy()
681 +
682 + t.equal(bl._bufs.length, 0)
683 + t.equal(bl.length, 0)
684 +})
685 +
686 +tape('destroy with error', function (t) {
687 + t.plan(3)
688 +
689 + var bl = new BufferList('alsdkfja;lsdkfja;lsdk')
690 + var err = new Error('kaboom')
691 + bl.destroy(err)
692 + bl.on('error', function (_err) {
693 + t.equal(_err, err)
694 + })
695 +
696 + t.equal(bl._bufs.length, 0)
697 + t.equal(bl.length, 0)
698 +})
699 +
700 +!process.browser && tape('destroy with pipe before read end', function (t) {
701 + t.plan(2)
702 +
703 + var bl = new BufferList()
704 + fs.createReadStream(__dirname + '/test.js')
705 + .pipe(bl)
706 +
707 + bl.destroy()
708 +
709 + t.equal(bl._bufs.length, 0)
710 + t.equal(bl.length, 0)
711 +
712 +})
713 +
714 +!process.browser && tape('destroy with pipe before read end with race', function (t) {
715 + t.plan(2)
716 +
717 + var bl = new BufferList()
718 + fs.createReadStream(__dirname + '/test.js')
719 + .pipe(bl)
720 +
721 + setTimeout(function () {
722 + bl.destroy()
723 + setTimeout(function () {
724 + t.equal(bl._bufs.length, 0)
725 + t.equal(bl.length, 0)
726 + }, 500)
727 + }, 500)
728 +})
729 +
730 +!process.browser && tape('destroy with pipe after read end', function (t) {
731 + t.plan(2)
732 +
733 + var bl = new BufferList()
734 + fs.createReadStream(__dirname + '/test.js')
735 + .on('end', onEnd)
736 + .pipe(bl)
737 +
738 + function onEnd () {
739 + bl.destroy()
740 +
741 + t.equal(bl._bufs.length, 0)
742 + t.equal(bl.length, 0)
743 + }
744 +})
745 +
746 +!process.browser && tape('destroy with pipe while writing to a destination', function (t) {
747 + t.plan(4)
748 +
749 + var bl = new BufferList()
750 + , ds = new BufferList()
751 +
752 + fs.createReadStream(__dirname + '/test.js')
753 + .on('end', onEnd)
754 + .pipe(bl)
755 +
756 + function onEnd () {
757 + bl.pipe(ds)
758 +
759 + setTimeout(function () {
760 + bl.destroy()
761 +
762 + t.equals(bl._bufs.length, 0)
763 + t.equals(bl.length, 0)
764 +
765 + ds.destroy()
766 +
767 + t.equals(bl._bufs.length, 0)
768 + t.equals(bl.length, 0)
769 +
770 + }, 100)
771 + }
772 +})
773 +
774 +!process.browser && tape('handle error', function (t) {
775 + t.plan(2)
776 + fs.createReadStream('/does/not/exist').pipe(BufferList(function (err, data) {
777 + t.ok(err instanceof Error, 'has error')
778 + t.notOk(data, 'no data')
779 + }))
780 +})
node_modules/end-of-stream/LICENSEView
@@ -1,0 +1,21 @@
1 +The MIT License (MIT)
2 +
3 +Copyright (c) 2014 Mathias Buus
4 +
5 +Permission is hereby granted, free of charge, to any person obtaining a copy
6 +of this software and associated documentation files (the "Software"), to deal
7 +in the Software without restriction, including without limitation the rights
8 +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 +copies of the Software, and to permit persons to whom the Software is
10 +furnished to do so, subject to the following conditions:
11 +
12 +The above copyright notice and this permission notice shall be included in
13 +all copies or substantial portions of the Software.
14 +
15 +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21 +THE SOFTWARE.
node_modules/end-of-stream/README.mdView
@@ -1,0 +1,52 @@
1 +# end-of-stream
2 +
3 +A node module that calls a callback when a readable/writable/duplex stream has completed or failed.
4 +
5 + npm install end-of-stream
6 +
7 +## Usage
8 +
9 +Simply pass a stream and a callback to the `eos`.
10 +Both legacy streams, streams2 and stream3 are supported.
11 +
12 +``` js
13 +var eos = require('end-of-stream');
14 +
15 +eos(readableStream, function(err) {
16 + // this will be set to the stream instance
17 + if (err) return console.log('stream had an error or closed early');
18 + console.log('stream has ended', this === readableStream);
19 +});
20 +
21 +eos(writableStream, function(err) {
22 + if (err) return console.log('stream had an error or closed early');
23 + console.log('stream has finished', this === writableStream);
24 +});
25 +
26 +eos(duplexStream, function(err) {
27 + if (err) return console.log('stream had an error or closed early');
28 + console.log('stream has ended and finished', this === duplexStream);
29 +});
30 +
31 +eos(duplexStream, {readable:false}, function(err) {
32 + if (err) return console.log('stream had an error or closed early');
33 + console.log('stream has finished but might still be readable');
34 +});
35 +
36 +eos(duplexStream, {writable:false}, function(err) {
37 + if (err) return console.log('stream had an error or closed early');
38 + console.log('stream has ended but might still be writable');
39 +});
40 +
41 +eos(readableStream, {error:false}, function(err) {
42 + // do not treat emit('error', err) as a end-of-stream
43 +});
44 +```
45 +
46 +## License
47 +
48 +MIT
49 +
50 +## Related
51 +
52 +`end-of-stream` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one.
node_modules/end-of-stream/index.jsView
@@ -1,0 +1,87 @@
1 +var once = require('once');
2 +
3 +var noop = function() {};
4 +
5 +var isRequest = function(stream) {
6 + return stream.setHeader && typeof stream.abort === 'function';
7 +};
8 +
9 +var isChildProcess = function(stream) {
10 + return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3
11 +};
12 +
13 +var eos = function(stream, opts, callback) {
14 + if (typeof opts === 'function') return eos(stream, null, opts);
15 + if (!opts) opts = {};
16 +
17 + callback = once(callback || noop);
18 +
19 + var ws = stream._writableState;
20 + var rs = stream._readableState;
21 + var readable = opts.readable || (opts.readable !== false && stream.readable);
22 + var writable = opts.writable || (opts.writable !== false && stream.writable);
23 +
24 + var onlegacyfinish = function() {
25 + if (!stream.writable) onfinish();
26 + };
27 +
28 + var onfinish = function() {
29 + writable = false;
30 + if (!readable) callback.call(stream);
31 + };
32 +
33 + var onend = function() {
34 + readable = false;
35 + if (!writable) callback.call(stream);
36 + };
37 +
38 + var onexit = function(exitCode) {
39 + callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null);
40 + };
41 +
42 + var onerror = function(err) {
43 + callback.call(stream, err);
44 + };
45 +
46 + var onclose = function() {
47 + if (readable && !(rs && rs.ended)) return callback.call(stream, new Error('premature close'));
48 + if (writable && !(ws && ws.ended)) return callback.call(stream, new Error('premature close'));
49 + };
50 +
51 + var onrequest = function() {
52 + stream.req.on('finish', onfinish);
53 + };
54 +
55 + if (isRequest(stream)) {
56 + stream.on('complete', onfinish);
57 + stream.on('abort', onclose);
58 + if (stream.req) onrequest();
59 + else stream.on('request', onrequest);
60 + } else if (writable && !ws) { // legacy streams
61 + stream.on('end', onlegacyfinish);
62 + stream.on('close', onlegacyfinish);
63 + }
64 +
65 + if (isChildProcess(stream)) stream.on('exit', onexit);
66 +
67 + stream.on('end', onend);
68 + stream.on('finish', onfinish);
69 + if (opts.error !== false) stream.on('error', onerror);
70 + stream.on('close', onclose);
71 +
72 + return function() {
73 + stream.removeListener('complete', onfinish);
74 + stream.removeListener('abort', onclose);
75 + stream.removeListener('request', onrequest);
76 + if (stream.req) stream.req.removeListener('finish', onfinish);
77 + stream.removeListener('end', onlegacyfinish);
78 + stream.removeListener('close', onlegacyfinish);
79 + stream.removeListener('finish', onfinish);
80 + stream.removeListener('exit', onexit);
81 + stream.removeListener('end', onend);
82 + stream.removeListener('error', onerror);
83 + stream.removeListener('close', onclose);
84 + };
85 +};
86 +
87 +module.exports = eos;
node_modules/end-of-stream/package.jsonView
@@ -1,0 +1,65 @@
1 +{
2 + "_args": [
3 + [
4 + "end-of-stream@1.4.1",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "end-of-stream@1.4.1",
9 + "_id": "end-of-stream@1.4.1",
10 + "_inBundle": false,
11 + "_integrity": "sha256-P8z5hny+y3o1XkPZydXCy+IWvXs1Uy1SttnMErC6OF4=",
12 + "_location": "/end-of-stream",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "end-of-stream@1.4.1",
18 + "name": "end-of-stream",
19 + "escapedName": "end-of-stream",
20 + "rawSpec": "1.4.1",
21 + "saveSpec": null,
22 + "fetchSpec": "1.4.1"
23 + },
24 + "_requiredBy": [
25 + "/tar-stream"
26 + ],
27 + "_resolved": "http://localhost:8989/blobs/get/&P8z5hny+y3o1XkPZydXCy+IWvXs1Uy1SttnMErC6OF4=.sha256",
28 + "_spec": "1.4.1",
29 + "_where": "/home/cel/src/ssb-npm-registry",
30 + "author": {
31 + "name": "Mathias Buus",
32 + "email": "mathiasbuus@gmail.com"
33 + },
34 + "bugs": {
35 + "url": "https://github.com/mafintosh/end-of-stream/issues"
36 + },
37 + "dependencies": {
38 + "once": "^1.4.0"
39 + },
40 + "description": "Call a callback when a readable/writable/duplex stream has completed or failed.",
41 + "files": [
42 + "index.js"
43 + ],
44 + "homepage": "https://github.com/mafintosh/end-of-stream",
45 + "keywords": [
46 + "stream",
47 + "streams",
48 + "callback",
49 + "finish",
50 + "close",
51 + "end",
52 + "wait"
53 + ],
54 + "license": "MIT",
55 + "main": "index.js",
56 + "name": "end-of-stream",
57 + "repository": {
58 + "type": "git",
59 + "url": "git://github.com/mafintosh/end-of-stream.git"
60 + },
61 + "scripts": {
62 + "test": "node test.js"
63 + },
64 + "version": "1.4.1"
65 +}
node_modules/fs-constants/LICENSEView
@@ -1,0 +1,21 @@
1 +The MIT License (MIT)
2 +
3 +Copyright (c) 2018 Mathias Buus
4 +
5 +Permission is hereby granted, free of charge, to any person obtaining a copy
6 +of this software and associated documentation files (the "Software"), to deal
7 +in the Software without restriction, including without limitation the rights
8 +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 +copies of the Software, and to permit persons to whom the Software is
10 +furnished to do so, subject to the following conditions:
11 +
12 +The above copyright notice and this permission notice shall be included in
13 +all copies or substantial portions of the Software.
14 +
15 +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21 +THE SOFTWARE.
node_modules/fs-constants/README.mdView
@@ -1,0 +1,26 @@
1 +# fs-constants
2 +
3 +Small module that allows you to get the fs constants across
4 +Node and the browser.
5 +
6 +```
7 +npm install fs-constants
8 +```
9 +
10 +Previously you would use `require('constants')` for this in node but that has been
11 +deprecated and changed to `require('fs').constants` which does not browserify.
12 +
13 +This module uses `require('constants')` in the browser and `require('fs').constants` in node to work around this
14 +
15 +
16 +## Usage
17 +
18 +``` js
19 +var constants = require('fs-constants')
20 +
21 +console.log('constants:', constants)
22 +```
23 +
24 +## License
25 +
26 +MIT
node_modules/fs-constants/browser.jsView
@@ -1,0 +1,1 @@
1 +module.exports = require('constants')
node_modules/fs-constants/index.jsView
@@ -1,0 +1,1 @@
1 +module.exports = require('fs').constants || require('constants')
node_modules/fs-constants/package.jsonView
@@ -1,0 +1,50 @@
1 +{
2 + "_args": [
3 + [
4 + "fs-constants@1.0.0",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "fs-constants@1.0.0",
9 + "_id": "fs-constants@1.0.0",
10 + "_inBundle": false,
11 + "_integrity": "sha256-55NtfhskR1vVkxLEAF8w1CU5Yn38HE9d/iMm17fHxfo=",
12 + "_location": "/fs-constants",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "fs-constants@1.0.0",
18 + "name": "fs-constants",
19 + "escapedName": "fs-constants",
20 + "rawSpec": "1.0.0",
21 + "saveSpec": null,
22 + "fetchSpec": "1.0.0"
23 + },
24 + "_requiredBy": [
25 + "/tar-stream"
26 + ],
27 + "_resolved": "http://localhost:8989/blobs/get/&55NtfhskR1vVkxLEAF8w1CU5Yn38HE9d/iMm17fHxfo=.sha256",
28 + "_spec": "1.0.0",
29 + "_where": "/home/cel/src/ssb-npm-registry",
30 + "author": {
31 + "name": "Mathias Buus",
32 + "url": "@mafintosh"
33 + },
34 + "browser": "browser.js",
35 + "bugs": {
36 + "url": "https://github.com/mafintosh/fs-constants/issues"
37 + },
38 + "dependencies": {},
39 + "description": "Require constants across node and the browser",
40 + "devDependencies": {},
41 + "homepage": "https://github.com/mafintosh/fs-constants",
42 + "license": "MIT",
43 + "main": "index.js",
44 + "name": "fs-constants",
45 + "repository": {
46 + "type": "git",
47 + "url": "git+https://github.com/mafintosh/fs-constants.git"
48 + },
49 + "version": "1.0.0"
50 +}
node_modules/hashlru/.travis.ymlView
@@ -1,0 +1,8 @@
1 +language: node_js
2 +node_js:
3 +- '0.10'
4 +- '0.12'
5 +- '4'
6 +- '5'
7 +- '6'
8 +- '7'
node_modules/hashlru/LICENSEView
@@ -1,0 +1,22 @@
1 +Copyright (c) 2016 'Dominic Tarr'
2 +
3 +Permission is hereby granted, free of charge,
4 +to any person obtaining a copy of this software and
5 +associated documentation files (the "Software"), to
6 +deal in the Software without restriction, including
7 +without limitation the rights to use, copy, modify,
8 +merge, publish, distribute, sublicense, and/or sell
9 +copies of the Software, and to permit persons to whom
10 +the Software is furnished to do so,
11 +subject to the following conditions:
12 +
13 +The above copyright notice and this permission notice
14 +shall be included in all copies or substantial portions of the Software.
15 +
16 +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17 +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
18 +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
19 +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
20 +ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
node_modules/hashlru/README.mdView
@@ -1,0 +1,101 @@
1 +# hashlru
2 +
3 +Simpler, faster LRU cache algorithm
4 +
5 +A Least Recently Used cache is used to speedup requests to a key-value oriented resource,
6 +while making a bounded memory commitment.
7 +
8 +I've recently [benchmarked the various lru implementations available on npm](https://github.com/dominictarr/bench-lru)
9 +and found wildly varing performance. There where some that performed well overall,
10 +and others that performed extremely well in some cases, but poorly in others, due to
11 +compromises made to maintain correctness.
12 +
13 +After writing the benchmark, of course I had to try my hand at my own LRU implementation.
14 +I soon found a few things, LRUs are quite difficult to implement, first of all contain a linked
15 +list. LRUs use a linked list to maintain the order
16 +that keys have been accessed, so that when the cache fills, the old values
17 +(which presumably are the least likely to be needed again) can be removed from the cache.
18 +Linked Lists are not easy to implement correctly!
19 +
20 +Then I discovered why some of the fast algorithms where so slow - they used `delete cache[key]`
21 +which is much slower than `cache[key] = value`, much much slower.
22 +
23 +So, why looking for a way to avoid `delete` I had an idea - have two cache objects,
24 +and when one fills - create a new one and start putting items in that, and then it's sufficiently
25 +full, throw it away. It avoids delete, at at max, only commits us to only N values and between N and 2N keys.
26 +
27 +Then I realized with this pattern, you _don't actually need_ the linked list anymore!
28 +This makes a N-2N least recently used cache very very simple. This both has performance benefits,
29 +and it's also very easy to verify it's correctness.
30 +
31 +This algorithm does not give you an ordered list of the N most recently used items,
32 +but you do not really need that! The property of dropping the least recent items is still preserved.
33 +
34 +see a [benchmark](https://github.com/dominictarr/bench-lru) of this against
35 +the other LRU implementations on npm.
36 +
37 +## example
38 +
39 +``` js
40 +var HLRU = require('hashlru')
41 +var lru = HLRU(100)
42 +lru.set(key, value)
43 +lru.get(key)
44 +```
45 +
46 +## algorithm
47 +
48 +create two caches - `old_cache` and `new_cache`, and a counter, `size`.
49 +
50 +When an `key, value` pair is added, if `key` is already in `new_cache` update the value,
51 +not currently in `new_cache`, set `new_cache[key] = value`.
52 +If the key was _not_ already in `new_cache` then `size` is incremented.
53 +If `size > max`, move the `old_cache = new_cache`, reset `size = 0`, and initialize a new `new_cache={}`
54 +
55 +To get a `key`, check if `new_cache` contains key, and if so, return it.
56 +If not, check if it is in `old_cache` and if so, move that value to `new_cache`, and increment `size`.
57 +If `size > max`, move the `old_cache = new_cache`, reset `size = 0`, and initialize a new `new_cache={}`
58 +
59 +## complexity
60 +
61 +Writes are O(1) on average, like a hash table.
62 +
63 +When implemented in a garbage collected language, the old cache is thrown away when the new cache is
64 +full. To better manage memory usage, it could also be implemented as two fixes sized hash tables.
65 +In this case, instead of discarding the old cache, it would be zeroed. This means at most every N
66 +writes when the caches are rotated, that write will require N operations (to clear the old cache)
67 +
68 +This still averages out to O(1) but it does cost O(N) but only every N writes (except for updates)
69 +so N/N is still 1.
70 +
71 +## HashLRU (max) => lru
72 +
73 +initialize a lru object.
74 +
75 +### lru.get (key) => value | undefined
76 +
77 +Returns the value in the cache, or `undefined` if the value is not in the cache.
78 +
79 +### lru.set(key, value)
80 +
81 +update the value for key.
82 +
83 +### lru.has(key) => boolean
84 +
85 +Checks if the `key` is in the cache.
86 +
87 +### lru.remove(key)
88 +
89 +Removes the `key` from the cache.
90 +
91 +### lru.clear()
92 +
93 +Empties the entire cache.
94 +
95 +## License
96 +
97 +MIT
98 +
99 +
100 +
101 +
node_modules/hashlru/bench.jsView
@@ -1,0 +1,47 @@
1 +var Stats = require('statistics/mutate')
2 +var LRU = require('./')
3 +
4 +//simple benchmarks, and measure standard deviation
5 +
6 +function run (N, op, init) {
7 + var stats = null, value
8 + for(var j = 0; j < 100; j++) {
9 + if(init) value = init(j)
10 + var start = Date.now()
11 + for(var i = 0; i < N; i++) op(value, i)
12 + stats = Stats(stats, N/((Date.now() - start)))
13 + }
14 + return stats
15 +}
16 +
17 +//set 1000 random items, then read 10000 items.
18 +//since they are random, there will be misses as well as hits
19 +console.log('GET', run(100000, function (lru, n) {
20 + lru.get(~~(Math.random()*1000))
21 +// lru.set(n, Math.random())
22 +}, function () {
23 + var lru = LRU(1000)
24 + for(var i = 0; i ++ ; i < 1000)
25 + lru.set(~~(Math.random()*1000), Math.random())
26 + return lru
27 +}))
28 +
29 +//set 100000 random values into LRU for 1000 values.
30 +//this means 99/100 should be evictions
31 +console.log('SET', run(100000, function (lru, n) {
32 + lru.set(~~(Math.random()*100000), Math.random())
33 +}, function () {
34 + return LRU(1000)
35 +}))
36 +
37 +
38 +
39 +
40 +
41 +
42 +
43 +
44 +
45 +
46 +
47 +
node_modules/hashlru/index.d.tsView
@@ -1,0 +1,7 @@
1 +export default function HLRU(max: number): {
2 + has: (key: string | number) => boolean;
3 + remove: (key: string | number) => void;
4 + get: (key: string | number) => any;
5 + set: (key: string | number, value: any) => void;
6 + clear: () => void;
7 +};
node_modules/hashlru/index.jsView
@@ -1,0 +1,51 @@
1 +module.exports = function (max) {
2 +
3 + if (!max) throw Error('hashlru must have a max value, of type number, greater than 0')
4 +
5 + var size = 0, cache = Object.create(null), _cache = Object.create(null)
6 +
7 + function update (key, value) {
8 + cache[key] = value
9 + size ++
10 + if(size >= max) {
11 + size = 0
12 + _cache = cache
13 + cache = Object.create(null)
14 + }
15 + }
16 +
17 + return {
18 + has: function (key) {
19 + return cache[key] !== undefined || _cache[key] !== undefined
20 + },
21 + remove: function (key) {
22 + if(cache[key] !== undefined)
23 + cache[key] = undefined
24 + if(_cache[key] !== undefined)
25 + _cache[key] = undefined
26 + },
27 + get: function (key) {
28 + var v = cache[key]
29 + if(v !== undefined) return v
30 + if((v = _cache[key]) !== undefined) {
31 + update(key, v)
32 + return v
33 + }
34 + },
35 + set: function (key, value) {
36 + if(cache[key] !== undefined) cache[key] = value
37 + else update(key, value)
38 + },
39 + clear: function () {
40 + cache = Object.create(null)
41 + _cache = Object.create(null)
42 + }
43 + }
44 +}
45 +
46 +
47 +
48 +
49 +
50 +
51 +
node_modules/hashlru/package.jsonView
@@ -1,0 +1,56 @@
1 +{
2 + "_args": [
3 + [
4 + "hashlru@2.3.0",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "hashlru@2.3.0",
9 + "_id": "hashlru@2.3.0",
10 + "_inBundle": false,
11 + "_integrity": "sha256-A3SehgJzx5gtrxWIf4ckf/LoJWAzYsT7hFUiOVbt0hE=",
12 + "_location": "/hashlru",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "hashlru@2.3.0",
18 + "name": "hashlru",
19 + "escapedName": "hashlru",
20 + "rawSpec": "2.3.0",
21 + "saveSpec": null,
22 + "fetchSpec": "2.3.0"
23 + },
24 + "_requiredBy": [
25 + "/"
26 + ],
27 + "_resolved": "http://localhost:8989/blobs/get/&A3SehgJzx5gtrxWIf4ckf/LoJWAzYsT7hFUiOVbt0hE=.sha256",
28 + "_spec": "2.3.0",
29 + "_where": "/home/cel/src/ssb-npm-registry",
30 + "author": {
31 + "name": "'Dominic Tarr'",
32 + "email": "dominic.tarr@gmail.com",
33 + "url": "dominictarr.com"
34 + },
35 + "bugs": {
36 + "url": "https://github.com/dominictarr/hashlru/issues"
37 + },
38 + "dependencies": {},
39 + "description": "simpler faster substitute for LRU",
40 + "devDependencies": {
41 + "istanbul": "^0.4.5"
42 + },
43 + "homepage": "https://github.com/dominictarr/hashlru",
44 + "license": "MIT",
45 + "name": "hashlru",
46 + "repository": {
47 + "type": "git",
48 + "url": "git://github.com/dominictarr/hashlru.git"
49 + },
50 + "scripts": {
51 + "cov": "istanbul cover test/*.js",
52 + "test": "set -e; for t in test/*.js; do node $t; done"
53 + },
54 + "types": "index.d.ts",
55 + "version": "2.3.0"
56 +}
node_modules/hashlru/test/test.jsView
@@ -1,0 +1,46 @@
1 +var assert = require('assert')
2 +var HLRU = require('../')
3 +var lru = HLRU(2)
4 +
5 +// set-get:
6 +lru.set('test', 'test')
7 +
8 +assert.equal(lru.get('test'), 'test')
9 +
10 +// has:
11 +assert.equal(lru.has('test'), true)
12 +assert.equal(lru.has('blah'), false)
13 +
14 +// update:
15 +lru.set('test', 'test2')
16 +
17 +assert.equal(lru.get('test'), 'test2')
18 +
19 +// cache cycle:
20 +lru.set('test2', 'test')
21 +
22 +assert.equal(lru.get('test2'), 'test')
23 +
24 +// get previous after cache cycle:
25 +assert.equal(lru.get('test'), 'test2')
26 +
27 +// update new cache:
28 +lru.set('test2', 'test2')
29 +
30 +assert.equal(lru.get('test2'), 'test2')
31 +
32 +// object purity:
33 +assert.equal(lru.get('constructor'), undefined)
34 +
35 +// max validation:
36 +assert.throws(HLRU)
37 +
38 +// remove:
39 +assert.equal(lru.has('test2'), true)
40 +lru.remove('test2')
41 +assert.equal(lru.has('test2'), false)
42 +
43 +// clear
44 +assert.equal(lru.has('test'), true)
45 +lru.clear()
46 +assert.equal(lru.has('test'), false)
node_modules/inherits/LICENSEView
@@ -1,0 +1,16 @@
1 +The ISC License
2 +
3 +Copyright (c) Isaac Z. Schlueter
4 +
5 +Permission to use, copy, modify, and/or distribute this software for any
6 +purpose with or without fee is hereby granted, provided that the above
7 +copyright notice and this permission notice appear in all copies.
8 +
9 +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
10 +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
11 +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
12 +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
13 +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
14 +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
15 +PERFORMANCE OF THIS SOFTWARE.
16 +
node_modules/inherits/README.mdView
@@ -1,0 +1,42 @@
1 +Browser-friendly inheritance fully compatible with standard node.js
2 +[inherits](http://nodejs.org/api/util.html#util_util_inherits_constructor_superconstructor).
3 +
4 +This package exports standard `inherits` from node.js `util` module in
5 +node environment, but also provides alternative browser-friendly
6 +implementation through [browser
7 +field](https://gist.github.com/shtylman/4339901). Alternative
8 +implementation is a literal copy of standard one located in standalone
9 +module to avoid requiring of `util`. It also has a shim for old
10 +browsers with no `Object.create` support.
11 +
12 +While keeping you sure you are using standard `inherits`
13 +implementation in node.js environment, it allows bundlers such as
14 +[browserify](https://github.com/substack/node-browserify) to not
15 +include full `util` package to your client code if all you need is
16 +just `inherits` function. It worth, because browser shim for `util`
17 +package is large and `inherits` is often the single function you need
18 +from it.
19 +
20 +It's recommended to use this package instead of
21 +`require('util').inherits` for any code that has chances to be used
22 +not only in node.js but in browser too.
23 +
24 +## usage
25 +
26 +```js
27 +var inherits = require('inherits');
28 +// then use exactly as the standard one
29 +```
30 +
31 +## note on version ~1.0
32 +
33 +Version ~1.0 had completely different motivation and is not compatible
34 +neither with 2.0 nor with standard node.js `inherits`.
35 +
36 +If you are using version ~1.0 and planning to switch to ~2.0, be
37 +careful:
38 +
39 +* new version uses `super_` instead of `super` for referencing
40 + superclass
41 +* new version overwrites current prototype while old one preserves any
42 + existing fields on it
node_modules/inherits/inherits.jsView
@@ -1,0 +1,7 @@
1 +try {
2 + var util = require('util');
3 + if (typeof util.inherits !== 'function') throw '';
4 + module.exports = util.inherits;
5 +} catch (e) {
6 + module.exports = require('./inherits_browser.js');
7 +}
node_modules/inherits/inherits_browser.jsView
@@ -1,0 +1,23 @@
1 +if (typeof Object.create === 'function') {
2 + // implementation from standard node.js 'util' module
3 + module.exports = function inherits(ctor, superCtor) {
4 + ctor.super_ = superCtor
5 + ctor.prototype = Object.create(superCtor.prototype, {
6 + constructor: {
7 + value: ctor,
8 + enumerable: false,
9 + writable: true,
10 + configurable: true
11 + }
12 + });
13 + };
14 +} else {
15 + // old school shim for old browsers
16 + module.exports = function inherits(ctor, superCtor) {
17 + ctor.super_ = superCtor
18 + var TempCtor = function () {}
19 + TempCtor.prototype = superCtor.prototype
20 + ctor.prototype = new TempCtor()
21 + ctor.prototype.constructor = ctor
22 + }
23 +}
node_modules/inherits/package.jsonView
@@ -1,0 +1,65 @@
1 +{
2 + "_args": [
3 + [
4 + "inherits@2.0.3",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "inherits@2.0.3",
9 + "_id": "inherits@2.0.3",
10 + "_inBundle": false,
11 + "_integrity": "sha256-f19Y6bVOh+JkeG5+hNngeKr2jBAD3p+miUUQHgI1bN8=",
12 + "_location": "/inherits",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "inherits@2.0.3",
18 + "name": "inherits",
19 + "escapedName": "inherits",
20 + "rawSpec": "2.0.3",
21 + "saveSpec": null,
22 + "fetchSpec": "2.0.3"
23 + },
24 + "_requiredBy": [
25 + "/readable-stream",
26 + "/tar-stream"
27 + ],
28 + "_resolved": "http://localhost:8989/blobs/get/&f19Y6bVOh+JkeG5+hNngeKr2jBAD3p+miUUQHgI1bN8=.sha256",
29 + "_spec": "2.0.3",
30 + "_where": "/home/cel/src/ssb-npm-registry",
31 + "browser": "./inherits_browser.js",
32 + "bugs": {
33 + "url": "https://github.com/isaacs/inherits/issues"
34 + },
35 + "description": "Browser-friendly inheritance fully compatible with standard node.js inherits()",
36 + "devDependencies": {
37 + "tap": "^7.1.0"
38 + },
39 + "files": [
40 + "inherits.js",
41 + "inherits_browser.js"
42 + ],
43 + "homepage": "https://github.com/isaacs/inherits#readme",
44 + "keywords": [
45 + "inheritance",
46 + "class",
47 + "klass",
48 + "oop",
49 + "object-oriented",
50 + "inherits",
51 + "browser",
52 + "browserify"
53 + ],
54 + "license": "ISC",
55 + "main": "./inherits.js",
56 + "name": "inherits",
57 + "repository": {
58 + "type": "git",
59 + "url": "git://github.com/isaacs/inherits.git"
60 + },
61 + "scripts": {
62 + "test": "node test"
63 + },
64 + "version": "2.0.3"
65 +}
node_modules/looper/.travis.ymlView
@@ -1,0 +1,4 @@
1 +language: node_js
2 +node_js:
3 + - 0.8
4 + - '0.10'
node_modules/looper/LICENSEView
@@ -1,0 +1,22 @@
1 +Copyright (c) 2013 Dominic Tarr
2 +
3 +Permission is hereby granted, free of charge,
4 +to any person obtaining a copy of this software and
5 +associated documentation files (the "Software"), to
6 +deal in the Software without restriction, including
7 +without limitation the rights to use, copy, modify,
8 +merge, publish, distribute, sublicense, and/or sell
9 +copies of the Software, and to permit persons to whom
10 +the Software is furnished to do so,
11 +subject to the following conditions:
12 +
13 +The above copyright notice and this permission notice
14 +shall be included in all copies or substantial portions of the Software.
15 +
16 +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17 +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
18 +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
19 +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
20 +ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
node_modules/looper/README.mdView
@@ -1,0 +1,43 @@
1 +# looper
2 +
3 +Loop with callbacks but don't RangeError
4 +
5 +[![travis](https://travis-ci.org/dominictarr/looper.png?branch=master)
6 +](https://travis-ci.org/dominictarr/looper)
7 +
8 +[![testling](http://ci.testling.com/dominictarr/looper.png)
9 +](http://ci.testling.com/dominictarr/looper)
10 +
11 +## Synopsis
12 +
13 +Normally, if `mightBeAsync` calls it's cb immediately
14 +this would `RangeError`:
15 +
16 +``` js
17 +var l = 100000
18 +;(function next () {
19 + if(--l) mightBeAsync(next)
20 +})
21 +```
22 +
23 +`looper` detects that case, and falls back to a `while` loop,
24 +
25 +## Example
26 +
27 +``` js
28 +var loop = require('looper')
29 +
30 +var l = 100000
31 +loop(function () {
32 + var next = this
33 + if(--l) probablySync(next)
34 +})()
35 +```
36 +
37 +when you want to stop looping, don't call `next`.
38 +`looper` checks if each callback is sync or not,
39 +so you can even mix sync and async calls!
40 +
41 +## License
42 +
43 +MIT
node_modules/looper/index.jsView
@@ -1,0 +1,14 @@
1 +
2 +var looper = module.exports = function (fun) {
3 + (function next () {
4 + var loop = true, returned = false, sync = false
5 + do {
6 + sync = true; loop = false
7 + fun.call(this, function () {
8 + if(sync) loop = true
9 + else next()
10 + })
11 + sync = false
12 + } while(loop)
13 + })()
14 +}
node_modules/looper/package.jsonView
@@ -1,0 +1,70 @@
1 +{
2 + "_args": [
3 + [
4 + "looper@3.0.0",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "looper@3.0.0",
9 + "_id": "looper@3.0.0",
10 + "_inBundle": false,
11 + "_integrity": "sha256-UKhTvWst16euOATlhckl4mr/zcz+aByyUJO11lMf+Jc=",
12 + "_location": "/looper",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "looper@3.0.0",
18 + "name": "looper",
19 + "escapedName": "looper",
20 + "rawSpec": "3.0.0",
21 + "saveSpec": null,
22 + "fetchSpec": "3.0.0"
23 + },
24 + "_requiredBy": [
25 + "/stream-to-pull-stream"
26 + ],
27 + "_resolved": "http://localhost:8989/blobs/get/&UKhTvWst16euOATlhckl4mr/zcz+aByyUJO11lMf+Jc=.sha256",
28 + "_spec": "3.0.0",
29 + "_where": "/home/cel/src/ssb-npm-registry",
30 + "author": {
31 + "name": "Dominic Tarr",
32 + "email": "dominic.tarr@gmail.com",
33 + "url": "http://dominictarr.com"
34 + },
35 + "bugs": {
36 + "url": "https://github.com/dominictarr/looper/issues"
37 + },
38 + "dependencies": {},
39 + "description": "async loops that never RangeError",
40 + "devDependencies": {
41 + "tape": "1.0.4"
42 + },
43 + "homepage": "https://github.com/dominictarr/looper",
44 + "license": "MIT",
45 + "name": "looper",
46 + "repository": {
47 + "type": "git",
48 + "url": "git://github.com/dominictarr/looper.git"
49 + },
50 + "scripts": {
51 + "test": "set -e; for t in test/*.js; do node $t; done"
52 + },
53 + "testling": {
54 + "files": "test/*.js",
55 + "browsers": [
56 + "ie/8..latest",
57 + "firefox/17..latest",
58 + "firefox/nightly",
59 + "chrome/22..latest",
60 + "chrome/canary",
61 + "opera/12..latest",
62 + "opera/next",
63 + "safari/5.1..latest",
64 + "ipad/6.0..latest",
65 + "iphone/6.0..latest",
66 + "android-browser/4.2..latest"
67 + ]
68 + },
69 + "version": "3.0.0"
70 +}
node_modules/looper/test/test.jsView
@@ -1,0 +1,37 @@
1 +
2 +var tape = require('tape')
3 +var looper = require('../')
4 +
5 +tape('n=1000000, with no RangeError', function (t) {
6 + var n = 1000000, c = 0
7 + looper(function (next) {
8 + c ++
9 + if(--n) return next()
10 + t.equal(c, 1000000)
11 + t.end()
12 + })
13 +})
14 +
15 +tape('async is okay', function (t) {
16 +
17 + var n = 100, c = 0
18 + looper(function (next) {
19 + c ++
20 + if(--n) return setTimeout(next)
21 + t.equal(c, 100)
22 + t.end()
23 + })
24 +
25 +})
26 +
27 +tape('sometimes async is okay', function (t) {
28 + var i = 1000; c = 0
29 + looper(function (next) {
30 + c++
31 + if(--i) return Math.random() < 0.1 ? setTimeout(next) : next()
32 + t.equal(c, 1000)
33 + t.end()
34 + })
35 +
36 +})
37 +
node_modules/multicb/README.mdView
@@ -1,0 +1,54 @@
1 +# MultiCB
2 +
3 +Simple way to aggregate multiple node-style callbacks
4 +
5 +```js
6 +var multicb = require('multicb')
7 +
8 +// default usage
9 +
10 +var done = multicb()
11 +doAsync(done())
12 +doAsync(done())
13 +doAsync(done())
14 +done(function(err, results) {
15 + console.log(err) // => undefined
16 + console.log(results) /* =>
17 + [
18 + [undefined, 'foo'],
19 + [undefined, 'bar'],
20 + [undefined, 'baz']
21 + ]
22 + */
23 +})
24 +
25 +// pluck argument
26 +
27 +var done = multicb({ pluck: 1 })
28 +doAsync(done())
29 +doAsync(done())
30 +doAsync(done())
31 +done(function(err, results) {
32 + console.log(err) // => undefined
33 + console.log(results) /* =>
34 + [
35 + 'foo',
36 + 'bar',
37 + 'baz'
38 + ]
39 + */
40 +})
41 +
42 +// spread argument
43 +
44 +var done = multicb({ pluck: 1, spread: true })
45 +doAsync(done())
46 +doAsync(done())
47 +doAsync(done())
48 +done(function(err, a, b, c) {
49 + console.log(err) // => undefined
50 + console.log(a) // => 'foo'
51 + console.log(b) // => 'bar'
52 + console.log(c) // => 'baz'
53 +})
54 +```
node_modules/multicb/index.jsView
@@ -1,0 +1,46 @@
1 +module.exports = function(allopts) {
2 + var n = 0, m = 0, _cb, results = [], _err;
3 + function o (k, d) { return allopts && allopts[k] !== void 0 ? allopts[k] : d }
4 +
5 + return function(cb) {
6 + if (cb) {
7 + results.length = m
8 +
9 + if(_err) {
10 + var err = _err; _err = null
11 + return cb(err)
12 + }
13 + if(n == m) {
14 + if (o('spread'))
15 + return cb.apply(null, [null].concat(results))
16 + else
17 + return cb(null, results)
18 + }
19 +
20 + _cb = cb
21 + return
22 + }
23 +
24 + var i = m++
25 + return function (err) {
26 + if (err) {
27 + if (_err) return
28 + _err = err
29 + n = -1 // stop
30 + if (_cb) _cb(err)
31 + } else {
32 + n++
33 + if (o('pluck'))
34 + results[i] = arguments[o('pluck')]
35 + else
36 + results[i] = Array.prototype.slice.call(arguments)
37 + if (n === m && _cb) {
38 + if (o('spread'))
39 + _cb.apply(null, [null].concat(results))
40 + else
41 + _cb(null, results)
42 + }
43 + }
44 + }
45 + }
46 +}
node_modules/multicb/package.jsonView
@@ -1,0 +1,48 @@
1 +{
2 + "_args": [
3 + [
4 + "multicb@1.2.2",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "multicb@1.2.2",
9 + "_id": "multicb@1.2.2",
10 + "_inBundle": false,
11 + "_integrity": "sha256-Z6lAzBXF2f5DqwKHdL9yaArwseg8iYuljWDmEs6VcT8=",
12 + "_location": "/multicb",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "multicb@1.2.2",
18 + "name": "multicb",
19 + "escapedName": "multicb",
20 + "rawSpec": "1.2.2",
21 + "saveSpec": null,
22 + "fetchSpec": "1.2.2"
23 + },
24 + "_requiredBy": [
25 + "/"
26 + ],
27 + "_resolved": "http://localhost:8989/blobs/get/&Z6lAzBXF2f5DqwKHdL9yaArwseg8iYuljWDmEs6VcT8=.sha256",
28 + "_spec": "1.2.2",
29 + "_where": "/home/cel/src/ssb-npm-registry",
30 + "bugs": {
31 + "url": "https://github.com/pfrazee/multicb/issues"
32 + },
33 + "dependencies": {},
34 + "description": "Simple way to aggregate multiple node-style callbacks",
35 + "devDependencies": {
36 + "interleavings": "~0.3.0"
37 + },
38 + "homepage": "https://github.com/pfrazee/multicb#readme",
39 + "name": "multicb",
40 + "repository": {
41 + "type": "git",
42 + "url": "git+https://github.com/pfrazee/multicb.git"
43 + },
44 + "scripts": {
45 + "test": "set -e; for t in test/*.js; do node $t; done"
46 + },
47 + "version": "1.2.2"
48 +}
node_modules/multicb/test/errors-multi.jsView
@@ -1,0 +1,14 @@
1 +'use strict'
2 +var multicb = require('../')
3 +var t = require('assert')
4 +
5 +var done = multicb()
6 +var cbs = [done(), done()]
7 +var called = 0
8 +done(function(err, results) {
9 + called++
10 + t.equal(results, void 0)
11 +})
12 +cbs[0]('fail')
13 +cbs[1]('fail')
14 +t.equal(called, 1)
node_modules/multicb/test/errors.jsView
@@ -1,0 +1,21 @@
1 +'use strict'
2 +var multicb = require('../')
3 +var t = require('assert')
4 +
5 +require('interleavings').test(function (isAsync) {
6 +
7 + function async(cb, delay, args) {
8 + isAsync(function() { cb.apply(null, args) })()
9 + }
10 +
11 + var done = multicb()
12 + async(done(), 5, [null, 1])
13 + async(done(), 15, [null, 2])
14 + async(done(), 10, ['fail'])
15 + done(function(err, results) {
16 + t.equal(err, 'fail')
17 + t.equal(results, void 0)
18 + isAsync.done()
19 + })
20 +
21 +})
node_modules/multicb/test/multicb.jsView
@@ -1,0 +1,24 @@
1 +'use strict'
2 +var multicb = require('../')
3 +var t = require('assert')
4 +
5 +require('interleavings').test(function (isAsync) {
6 +
7 + function async(cb, delay, args) {
8 + isAsync(function() { cb.apply(null, args) })()
9 + }
10 +
11 + var done = multicb()
12 + async(done(), 5, [null, 1])
13 + async(done(), 15, [null, 2])
14 + async(done(), 10, [null, 3])
15 + done(function(err, results) {
16 + console.log('done')
17 + t.equal(err, null)
18 + t.equal(results[0][1], 1)
19 + t.equal(results[1][1], 2)
20 + t.equal(results[2][1], 3)
21 + isAsync.done()
22 + })
23 +
24 +})
node_modules/multicb/test/pluck.jsView
@@ -1,0 +1,24 @@
1 +'use strict'
2 +var multicb = require('../')
3 +var t = require('assert')
4 +
5 +require('interleavings').test(function (isAsync) {
6 +
7 + function async(cb, delay, args) {
8 + isAsync(function() { cb.apply(null, args) })()
9 + }
10 +
11 + var done = multicb({ pluck: 1 })
12 + async(done(), 5, [null, 1])
13 + async(done(), 15, [null, 2])
14 + async(done(), 10, [null, 3])
15 + done(function(err, results) {
16 + console.log('done')
17 + t.equal(err, null)
18 + t.equal(results[0], 1)
19 + t.equal(results[1], 2)
20 + t.equal(results[2], 3)
21 + isAsync.done()
22 + })
23 +
24 +})
node_modules/multicb/test/spread.jsView
@@ -1,0 +1,24 @@
1 +'use strict'
2 +var multicb = require('../')
3 +var t = require('assert')
4 +
5 +require('interleavings').test(function (isAsync) {
6 +
7 + function async(cb, delay, args) {
8 + isAsync(function() { cb.apply(null, args) })()
9 + }
10 +
11 + var done = multicb({ pluck: 1, spread: true })
12 + async(done(), 5, [null, 1])
13 + async(done(), 15, [null, 2])
14 + async(done(), 10, [null, 3])
15 + done(function(err, first, second, third) {
16 + console.log('done')
17 + t.equal(err, null)
18 + t.equal(first, 1)
19 + t.equal(second, 2)
20 + t.equal(third, 3)
21 + isAsync.done()
22 + })
23 +
24 +})
node_modules/once/LICENSEView
@@ -1,0 +1,15 @@
1 +The ISC License
2 +
3 +Copyright (c) Isaac Z. Schlueter and Contributors
4 +
5 +Permission to use, copy, modify, and/or distribute this software for any
6 +purpose with or without fee is hereby granted, provided that the above
7 +copyright notice and this permission notice appear in all copies.
8 +
9 +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
10 +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
11 +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
12 +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
13 +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
14 +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
15 +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
node_modules/once/README.mdView
@@ -1,0 +1,79 @@
1 +# once
2 +
3 +Only call a function once.
4 +
5 +## usage
6 +
7 +```javascript
8 +var once = require('once')
9 +
10 +function load (file, cb) {
11 + cb = once(cb)
12 + loader.load('file')
13 + loader.once('load', cb)
14 + loader.once('error', cb)
15 +}
16 +```
17 +
18 +Or add to the Function.prototype in a responsible way:
19 +
20 +```javascript
21 +// only has to be done once
22 +require('once').proto()
23 +
24 +function load (file, cb) {
25 + cb = cb.once()
26 + loader.load('file')
27 + loader.once('load', cb)
28 + loader.once('error', cb)
29 +}
30 +```
31 +
32 +Ironically, the prototype feature makes this module twice as
33 +complicated as necessary.
34 +
35 +To check whether you function has been called, use `fn.called`. Once the
36 +function is called for the first time the return value of the original
37 +function is saved in `fn.value` and subsequent calls will continue to
38 +return this value.
39 +
40 +```javascript
41 +var once = require('once')
42 +
43 +function load (cb) {
44 + cb = once(cb)
45 + var stream = createStream()
46 + stream.once('data', cb)
47 + stream.once('end', function () {
48 + if (!cb.called) cb(new Error('not found'))
49 + })
50 +}
51 +```
52 +
53 +## `once.strict(func)`
54 +
55 +Throw an error if the function is called twice.
56 +
57 +Some functions are expected to be called only once. Using `once` for them would
58 +potentially hide logical errors.
59 +
60 +In the example below, the `greet` function has to call the callback only once:
61 +
62 +```javascript
63 +function greet (name, cb) {
64 + // return is missing from the if statement
65 + // when no name is passed, the callback is called twice
66 + if (!name) cb('Hello anonymous')
67 + cb('Hello ' + name)
68 +}
69 +
70 +function log (msg) {
71 + console.log(msg)
72 +}
73 +
74 +// this will print 'Hello anonymous' but the logical error will be missed
75 +greet(null, once(msg))
76 +
77 +// once.strict will print 'Hello anonymous' and throw an error when the callback will be called the second time
78 +greet(null, once.strict(msg))
79 +```
node_modules/once/once.jsView
@@ -1,0 +1,42 @@
1 +var wrappy = require('wrappy')
2 +module.exports = wrappy(once)
3 +module.exports.strict = wrappy(onceStrict)
4 +
5 +once.proto = once(function () {
6 + Object.defineProperty(Function.prototype, 'once', {
7 + value: function () {
8 + return once(this)
9 + },
10 + configurable: true
11 + })
12 +
13 + Object.defineProperty(Function.prototype, 'onceStrict', {
14 + value: function () {
15 + return onceStrict(this)
16 + },
17 + configurable: true
18 + })
19 +})
20 +
21 +function once (fn) {
22 + var f = function () {
23 + if (f.called) return f.value
24 + f.called = true
25 + return f.value = fn.apply(this, arguments)
26 + }
27 + f.called = false
28 + return f
29 +}
30 +
31 +function onceStrict (fn) {
32 + var f = function () {
33 + if (f.called)
34 + throw new Error(f.onceError)
35 + f.called = true
36 + return f.value = fn.apply(this, arguments)
37 + }
38 + var name = fn.name || 'Function wrapped with `once`'
39 + f.onceError = name + " shouldn't be called more than once"
40 + f.called = false
41 + return f
42 +}
node_modules/once/package.jsonView
@@ -1,0 +1,69 @@
1 +{
2 + "_args": [
3 + [
4 + "once@1.4.0",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "once@1.4.0",
9 + "_id": "once@1.4.0",
10 + "_inBundle": false,
11 + "_integrity": "sha256-z1FGC6Nwxpj2i5duUU0RNJczm6AYtgA+jo61acb8z88=",
12 + "_location": "/once",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "once@1.4.0",
18 + "name": "once",
19 + "escapedName": "once",
20 + "rawSpec": "1.4.0",
21 + "saveSpec": null,
22 + "fetchSpec": "1.4.0"
23 + },
24 + "_requiredBy": [
25 + "/end-of-stream"
26 + ],
27 + "_resolved": "http://localhost:8989/blobs/get/&z1FGC6Nwxpj2i5duUU0RNJczm6AYtgA+jo61acb8z88=.sha256",
28 + "_spec": "1.4.0",
29 + "_where": "/home/cel/src/ssb-npm-registry",
30 + "author": {
31 + "name": "Isaac Z. Schlueter",
32 + "email": "i@izs.me",
33 + "url": "http://blog.izs.me/"
34 + },
35 + "bugs": {
36 + "url": "https://github.com/isaacs/once/issues"
37 + },
38 + "dependencies": {
39 + "wrappy": "1"
40 + },
41 + "description": "Run a function exactly one time",
42 + "devDependencies": {
43 + "tap": "^7.0.1"
44 + },
45 + "directories": {
46 + "test": "test"
47 + },
48 + "files": [
49 + "once.js"
50 + ],
51 + "homepage": "https://github.com/isaacs/once#readme",
52 + "keywords": [
53 + "once",
54 + "function",
55 + "one",
56 + "single"
57 + ],
58 + "license": "ISC",
59 + "main": "once.js",
60 + "name": "once",
61 + "repository": {
62 + "type": "git",
63 + "url": "git://github.com/isaacs/once.git"
64 + },
65 + "scripts": {
66 + "test": "tap test/*.js"
67 + },
68 + "version": "1.4.0"
69 +}
node_modules/pull-cat/.npmignoreView
@@ -1,0 +1,3 @@
1 +node_modules
2 +node_modules/*
3 +npm_debug.log
node_modules/pull-cat/.travis.ymlView
@@ -1,0 +1,5 @@
1 +sudo: false
2 +language: node_js
3 +node_js:
4 + - stable
5 + - 0.10
node_modules/pull-cat/LICENSEView
@@ -1,0 +1,22 @@
1 +Copyright (c) 2013 Dominic Tarr
2 +
3 +Permission is hereby granted, free of charge,
4 +to any person obtaining a copy of this software and
5 +associated documentation files (the "Software"), to
6 +deal in the Software without restriction, including
7 +without limitation the rights to use, copy, modify,
8 +merge, publish, distribute, sublicense, and/or sell
9 +copies of the Software, and to permit persons to whom
10 +the Software is furnished to do so,
11 +subject to the following conditions:
12 +
13 +The above copyright notice and this permission notice
14 +shall be included in all copies or substantial portions of the Software.
15 +
16 +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17 +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
18 +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
19 +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
20 +ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
node_modules/pull-cat/README.mdView
@@ -1,0 +1,56 @@
1 +# pull-cat
2 +
3 +> Concatenate pull-streams
4 +
5 +## Install
6 +
7 +```shell
8 +npm install --save pull-cat
9 +```
10 +
11 +## Example
12 +
13 +Construct a new source stream from a sequential list of source streams,
14 +reading from each one in turn until it ends, then the next, etc.
15 +If one stream errors, then the rest of the streams are aborted immediately.
16 +If the cat stream is aborted (i.e. if it's sink errors) then all the streams
17 +are aborted.
18 +
19 +A cat stream is a moderately challenging stream to implement,
20 +especially in the context of error states.
21 +
22 +```js
23 +var cat = require('pull-cat')
24 +var pull = require('pull-stream')
25 +
26 +pull(
27 + cat([
28 + pull.values([1,2,3]),
29 + pull.values([4,5,6])
30 + ]),
31 + pull.log()
32 +)
33 +// 1
34 +// 2
35 +// 3
36 +// 4
37 +// 5
38 +// 6
39 +```
40 +
41 +
42 +## Api
43 +
44 +### `cat = require('pull-cat')`
45 +
46 +### `stream = cat(streams)`
47 +
48 +Reads from each stream in `streams` until finished.
49 +
50 +If a stream errors, stop all the streams.
51 +if the concatenated stream is aborted, abort all the streams,
52 +then callback to the aborter.
53 +
54 +## License
55 +
56 +MIT
node_modules/pull-cat/index.jsView
@@ -1,0 +1,41 @@
1 +var noop = function () {}
2 +
3 +function abortAll(ary, abort, cb) {
4 + var n = ary.length
5 + if(!n) return cb(abort)
6 + ary.forEach(function (f) {
7 + if(f) f(abort, next)
8 + else next()
9 + })
10 +
11 + function next() {
12 + if(--n) return
13 + cb(abort)
14 + }
15 + if(!n) next()
16 +}
17 +
18 +module.exports = function (streams) {
19 + return function (abort, cb) {
20 + ;(function next () {
21 + if(abort)
22 + abortAll(streams, abort, cb)
23 + else if(!streams.length)
24 + cb(true)
25 + else if(!streams[0])
26 + streams.shift(), next()
27 + else
28 + streams[0](null, function (err, data) {
29 + if(err) {
30 + streams.shift() //drop the first, has already ended.
31 + if(err === true) next()
32 + else abortAll(streams, err, cb)
33 + }
34 + else
35 + cb(null, data)
36 + })
37 + })()
38 + }
39 +}
40 +
41 +
node_modules/pull-cat/package.jsonView
@@ -1,0 +1,56 @@
1 +{
2 + "_args": [
3 + [
4 + "pull-cat@1.1.11",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "pull-cat@1.1.11",
9 + "_id": "pull-cat@1.1.11",
10 + "_inBundle": false,
11 + "_integrity": "sha256-+uVE8RHNwJIJa68sQGICGbxnCTCOdLLhSt/Pb4NmgL0=",
12 + "_location": "/pull-cat",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "pull-cat@1.1.11",
18 + "name": "pull-cat",
19 + "escapedName": "pull-cat",
20 + "rawSpec": "1.1.11",
21 + "saveSpec": null,
22 + "fetchSpec": "1.1.11"
23 + },
24 + "_requiredBy": [
25 + "/"
26 + ],
27 + "_resolved": "http://localhost:8989/blobs/get/&+uVE8RHNwJIJa68sQGICGbxnCTCOdLLhSt/Pb4NmgL0=.sha256",
28 + "_spec": "1.1.11",
29 + "_where": "/home/cel/src/ssb-npm-registry",
30 + "author": {
31 + "name": "Dominic Tarr",
32 + "email": "dominic.tarr@gmail.com",
33 + "url": "http://dominictarr.com"
34 + },
35 + "bugs": {
36 + "url": "https://github.com/pull-stream/pull-cat/issues"
37 + },
38 + "description": "concatenate pull-streams",
39 + "devDependencies": {
40 + "pull-abortable": "~4.1.0",
41 + "pull-pushable": "~2.0.1",
42 + "pull-stream": "^3.4.2",
43 + "tape": "~4.6"
44 + },
45 + "homepage": "https://github.com/pull-stream/pull-cat",
46 + "license": "MIT",
47 + "name": "pull-cat",
48 + "repository": {
49 + "type": "git",
50 + "url": "git://github.com/pull-stream/pull-cat.git"
51 + },
52 + "scripts": {
53 + "test": "set -e; for t in test/*.js; do node $t; done"
54 + },
55 + "version": "1.1.11"
56 +}
node_modules/pull-cat/test/index.jsView
@@ -1,0 +1,159 @@
1 +var pull = require('pull-stream')
2 +var cat = require('../')
3 +var test = require('tape')
4 +var Pushable = require('pull-pushable')
5 +var Abortable = require('pull-abortable')
6 +
7 +test('cat', function (t) {
8 + pull(
9 + cat([pull.values([1,2,3]), pull.values([4,5,6])]),
10 + pull.collect(function (err, ary) {
11 + console.log(err, ary)
12 + t.notOk(err)
13 + t.deepEqual(ary, [1,2,3,4,5,6])
14 + t.end()
15 + })
16 + )
17 +})
18 +
19 +test('cat - with empty', function (t) {
20 + pull(
21 + cat([pull.values([1,2,3]), null, pull.values([4,5,6])]),
22 + pull.collect(function (err, ary) {
23 + console.log(err, ary)
24 + t.notOk(err)
25 + t.deepEqual(ary, [1,2,3,4,5,6])
26 + t.end()
27 + })
28 + )
29 +})
30 +
31 +test('cat - with empty stream', function (t) {
32 + var ended = false
33 + var justEnd = function (err, cb) { ended = true; cb(true) }
34 +
35 + pull(
36 + cat([pull.values([1,2,3]), justEnd, pull.values([4,5,6])]),
37 + pull.collect(function (err, ary) {
38 + console.log(err, ary)
39 + t.ok(ended)
40 + t.notOk(err)
41 + t.deepEqual(ary, [1,2,3,4,5,6])
42 + t.end()
43 + })
44 + )
45 +})
46 +
47 +
48 +
49 +test('abort - with empty', function (t) {
50 + pull(
51 + cat([pull.values([1,2,3]), null, pull.values([4,5,6])]),
52 + function (read) {
53 + read(true, function (err) {
54 + t.equal(err, true)
55 + t.end()
56 + })
57 + }
58 + )
59 +})
60 +
61 +test('error', function (t) {
62 + var err = new Error('test error')
63 + pull(
64 + cat([pull.values([1,2,3]), function (_, cb) {
65 + cb(err)
66 + }]),
67 + pull.collect(function (_err) {
68 + console.log('COLLECT END', _err)
69 + t.equal(_err, err)
70 + t.end()
71 + })
72 + )
73 +})
74 +
75 +test('abort stalled', function (t) {
76 + var err = new Error('intentional'), n = 2
77 + var abortable = Abortable()
78 + var pushable = Pushable(function (_err) {
79 + t.equal(_err, err)
80 + next()
81 + })
82 +
83 + pushable.push(4)
84 +
85 + pull(
86 + cat([pull.values([1,2,3]), undefined, pushable]),
87 + abortable,
88 + pull.drain(function (item) {
89 + if(item == 4)
90 + process.nextTick(function () {
91 + abortable.abort(err)
92 + })
93 + }, function (err) {
94 + next()
95 + })
96 + )
97 +
98 + function next () {
99 + if(--n) return
100 + t.end()
101 + }
102 +})
103 +
104 +test('abort empty', function (t) {
105 + cat([])(true, function (end) {
106 + t.equal(end, true)
107 + t.end()
108 + })
109 +})
110 +
111 +test('error + undefined', function (t) {
112 + var err = new Error('test error')
113 + pull(
114 + cat([pull.values([1,2,3]), function (_, cb) {
115 + cb(err)
116 + }, undefined]),
117 + pull.collect(function (_err) {
118 + t.equal(_err, err)
119 + t.end()
120 + })
121 + )
122 +})
123 +
124 +test('take cat', function (t) {
125 + pull(
126 + cat([
127 + pull(pull.values([1,2,3]), pull.take(2)),
128 + pull(pull.values([8,7,6,5]), pull.take(3)),
129 + ]),
130 + pull.collect(function (err, data) {
131 + t.error(err)
132 + t.deepEqual(data, [1,2,8,7,6])
133 + t.end()
134 + })
135 + )
136 +})
137 +
138 +test('abort streams after error', function (t) {
139 + var err = new Error('test error')
140 + var aborted = false
141 + pull(
142 + cat([pull.values([1,2,3]), function (_, cb) {
143 + cb(err)
144 + }, function (_err, cb) {
145 + //this stream should be aborted.
146 + aborted = true
147 + t.strictEqual(_err, err)
148 + cb()
149 + }]),
150 + pull.collect(function (_err) {
151 + t.equal(aborted, true)
152 + t.equal(_err, err)
153 + t.end()
154 + })
155 + )
156 +})
157 +
158 +
159 +
node_modules/pull-file/.npmignoreView
@@ -1,0 +1,2 @@
1 +node_modules
2 +.DS_Store
node_modules/pull-file/.travis.ymlView
@@ -1,0 +1,9 @@
1 +sudo: false
2 +language: node_js
3 +node_js:
4 + - 4.2
5 + - 5.4
6 +
7 +notifications:
8 + email:
9 + - damon.oehlman@gmail.com
node_modules/pull-file/README.mdView
@@ -1,0 +1,58 @@
1 +# pull-file
2 +
3 +a pull-streaming file reader, build directly on the low level stream functions.
4 +by passing node's fs streams.
5 +
6 +[![NPM](https://nodei.co/npm/pull-file.png)](https://nodei.co/npm/pull-file/)
7 +
8 +[![Build Status](https://img.shields.io/travis/pull-stream/pull-file.svg?branch=master)](https://travis-ci.org/pull-stream/pull-file)
9 +
10 +## Example Usage
11 +
12 +```js
13 +var file = require('pull-file');
14 +var pull = require('pull-stream');
15 +var path = require('path');
16 +var inputFile = path.resolve(__dirname, '../test/assets/ipsum.txt');
17 +
18 +pull(
19 + file(inputFile, { bufferSize: 40 }),
20 + pull.take(4),
21 + pull.drain(function(buffer) {
22 + console.log(buffer.toString());
23 + })
24 +);
25 +```
26 +## options
27 +
28 +this supports all the options that node's [fs.createReadStream](https://nodejs.org/dist/latest-v6.x/docs/api/fs.html#fs_fs_createreadstream_path_options) supports,
29 +and _also_ this supports a `live: true` property which will keep the stream open and wait for appends
30 +when it gets to the end and an explicit `buffer` option where your chunks will be read to.
31 +Note that if your downstream operations are async you may run into concurrency
32 +issues with this option. Use at your own risk!
33 +
34 +
35 +## License(s)
36 +
37 +### MIT
38 +
39 +Copyright (c) 2014 Damon Oehlman <damon.oehlman@gmail.com>
40 +
41 +Permission is hereby granted, free of charge, to any person obtaining
42 +a copy of this software and associated documentation files (the
43 +'Software'), to deal in the Software without restriction, including
44 +without limitation the rights to use, copy, modify, merge, publish,
45 +distribute, sublicense, and/or sell copies of the Software, and to
46 +permit persons to whom the Software is furnished to do so, subject to
47 +the following conditions:
48 +
49 +The above copyright notice and this permission notice shall be
50 +included in all copies or substantial portions of the Software.
51 +
52 +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
53 +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
54 +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
55 +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
56 +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
57 +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
58 +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
node_modules/pull-file/examples/bench.jsView
@@ -1,0 +1,74 @@
1 +
2 +var Stats = require('statistics')
3 +var cont = require('cont')
4 +
5 +function bench (a, b, N) {
6 +
7 + N = N || 20,n = N
8 + var sA = Stats(), sB = Stats()
9 + var results_a = [], results_b = []
10 + var A, B
11 +
12 + var wins = 0
13 +
14 + ;(function next () {
15 +
16 + cont.series([
17 + function (cb) {
18 + a(function (err, data) {
19 + var time = data.time/1000
20 + var size = data.total/(1024*1024)
21 + sA.value(size/time) //bytes per ms
22 + results_a.push(A = data)
23 + cb()
24 +
25 + })
26 + },
27 + function (cb) {
28 + b(function (err, data) {
29 + var time = data.time/1000
30 + var size = data.total/(1024*1024)
31 + sB.value(size/time) //bytes per ms
32 + results_b.push(B = data)
33 + cb()
34 + })
35 + }
36 + ].sort(function () {
37 + return Math.random() - 0.5
38 + }))(function (err) {
39 + if(A.time < B.time)
40 + wins ++
41 +
42 + console.log('winner:', A.time < B.time ? 'A' : 'B', A, B)
43 +
44 + if(0<--n) next()
45 + else {
46 + console.log('A: pull-stream')
47 + console.log(sA.toJSON())
48 + console.log('B: node stream')
49 + console.log(sB.toJSON())
50 + console.log('chance A wins:', wins/N, wins, N - wins)
51 + }
52 + })
53 + })()
54 +
55 +}
56 +
57 +if(!module.parent) {
58 + var file = process.argv[2]
59 + var pull = require('./rate')
60 + var node = require('./node-rate')
61 + bench(function (cb) {
62 + pull(file, cb)
63 + }, function (cb) {
64 + node(file, cb)
65 + })
66 +
67 +}
68 +
69 +
70 +
71 +
72 +
73 +
74 +
node_modules/pull-file/examples/ipsum-chunks.jsView
@@ -1,0 +1,12 @@
1 +var file = require('..');
2 +var pull = require('pull-stream');
3 +var path = require('path');
4 +var inputFile = path.resolve(__dirname, '../test/assets/ipsum.txt');
5 +
6 +pull(
7 + file(inputFile, { bufferSize: 40 }),
8 + pull.take(4),
9 + pull.drain(function(buffer) {
10 + console.log(buffer.toString());
11 + })
12 +);
node_modules/pull-file/examples/node-rate.jsView
@@ -1,0 +1,22 @@
1 +
2 +
3 +var fs = require('fs')
4 +
5 +module.exports = function (file, cb) {
6 + var start = Date.now(), total = 0
7 + fs.createReadStream(file)
8 + .on('data', function (b) {
9 + total += b.length
10 + })
11 + .on('end', function () {
12 + cb(null, {time: Date.now() - start, total: total})
13 + })
14 +}
15 +
16 +if(!module.parent)
17 + module.exports (process.argv[2], function (err, stats) {
18 + var seconds = stats.time/1000, mb = stats.total/(1024*1024)
19 + console.log(seconds, mb, mb/seconds)
20 + })
21 +
22 +
node_modules/pull-file/examples/rate.jsView
@@ -1,0 +1,26 @@
1 +var pull = require('pull-stream')
2 +var File = require('../')
3 +
4 +
5 +module.exports = function (file, cb) {
6 + var start = Date.now(), total = 0
7 + pull(
8 + File(file),
9 + pull.drain(function (b) {
10 + total += b.length
11 + }, function (err) {
12 + cb(null, {time: Date.now() - start, total: total})
13 + })
14 + )
15 +}
16 +
17 +
18 +
19 +if(!module.parent)
20 + module.exports (process.argv[2], function (err, stats) {
21 + var seconds = stats.time/1000, mb = stats.total/(1024*1024)
22 + console.log(seconds, mb, mb/seconds)
23 + })
24 +
25 +
26 +
node_modules/pull-file/index.jsView
@@ -1,0 +1,170 @@
1 +
2 +/* jshint node: true */
3 +'use strict';
4 +
5 +var fs = require('fs');
6 +var Decoder = require('pull-utf8-decoder')
7 +/**
8 + # pull-file
9 +
10 + This is a simple module which uses raw file reading methods available in
11 + the node `fs` module to read files on-demand. It's a work in progress
12 + and feedback is welcome :)
13 +
14 + ## Example Usage
15 +
16 + <<< examples/ipsum-chunks.js
17 +
18 +**/
19 +module.exports = function(filename, opts) {
20 + var mode = opts && opts.mode || 0x1B6; // 0666
21 + var bufferSize = opts && (opts.bufferSize || (opts.buffer && opts.buffer.length)) || 1024*64;
22 + var start = opts && opts.start || 0
23 + var end = opts && opts.end || Number.MAX_SAFE_INTEGER
24 + var fd = opts && opts.fd
25 +
26 + var ended, closeNext, busy;
27 + var _buffer = opts && opts.buffer || new Buffer(bufferSize)
28 + var live = opts && !!opts.live
29 + var liveCb, closeCb
30 + var watcher
31 + if(live) {
32 + watcher = fs.watch(filename, {
33 + persistent: opts.persistent !== false,
34 + },
35 + function (event) {
36 + if(liveCb && event === 'change') {
37 + var cb = liveCb
38 + liveCb = null
39 + closeNext = false
40 + readNext(cb)
41 + }
42 + })
43 +
44 + }
45 +
46 + var flags = opts && opts.flags || 'r'
47 +
48 + function readNext(cb) {
49 + if(closeNext) {
50 + if(!live) close(cb);
51 + else liveCb = cb;
52 + return
53 + }
54 + var toRead = Math.min(end - start, bufferSize);
55 + busy = true;
56 +
57 + fs.read(
58 + fd,
59 + _buffer,
60 + 0,
61 + toRead,
62 + start,
63 + function(err, count, buffer) {
64 + busy = false;
65 + start += count;
66 + // if we have received an end noticiation, just discard this data
67 + if(closeNext && !live) {
68 + close(closeCb);
69 + return cb(closeNext);
70 + }
71 +
72 + if (ended) {
73 + return cb(err || ended);
74 + }
75 +
76 + // if we encountered a read error pass it on
77 + if (err) {
78 + return cb(err);
79 + }
80 +
81 + if(count === buffer.length) {
82 + cb(null, buffer);
83 + } else if(count === 0 && live) {
84 + liveCb = cb; closeNext = true
85 + } else {
86 + closeNext = true;
87 + cb(null, buffer.slice(0, count));
88 + }
89 + }
90 + );
91 + _buffer = opts && opts.buffer || new Buffer(Math.min(end - start, bufferSize))
92 + }
93 +
94 + function open(cb) {
95 + busy = true;
96 + fs.open(filename, flags, mode, function(err, descriptor) {
97 + // save the file descriptor
98 + fd = descriptor;
99 +
100 + busy = false
101 + if(closeNext) {
102 + close(closeCb);
103 + return cb(closeNext);
104 + }
105 +
106 + if (err) {
107 + return cb(err);
108 + }
109 +
110 + // read the next bytes
111 + return readNext(cb);
112 + });
113 + }
114 +
115 + function close (cb) {
116 + if(!cb) throw new Error('close must have cb')
117 + if(watcher) watcher.close()
118 + //if auto close is disabled, then user manages fd.
119 + if(opts && opts.autoClose === false) return cb(true)
120 +
121 + //wait until we have got out of bed, then go back to bed.
122 + //or if we are reading, wait till we read, then go back to bed.
123 + else if(busy) {
124 + closeCb = cb
125 + return closeNext = true
126 + }
127 +
128 + //first read was close, don't even get out of bed.
129 + else if(!fd) {
130 + return cb(true)
131 + }
132 +
133 + //go back to bed
134 + else {
135 + fs.close(fd, function(err) {
136 + fd = null;
137 + cb(err || true);
138 + });
139 + }
140 + }
141 +
142 + function source (end, cb) {
143 + if (end) {
144 + ended = end;
145 + live = false;
146 + if(liveCb) {
147 + liveCb(end || true);
148 + }
149 + close(cb);
150 + }
151 + // if we have already received the end notification, abort further
152 + else if (ended) {
153 + cb(ended);
154 + }
155 +
156 + else if (! fd) {
157 + open(cb);
158 + }
159 +
160 + else
161 + readNext(cb);
162 + };
163 +
164 + //read directly to text
165 + if(opts && opts.encoding)
166 + return Decoder(opts.encoding)(source)
167 +
168 + return source
169 +
170 +};
node_modules/pull-file/package.jsonView
@@ -1,0 +1,70 @@
1 +{
2 + "_args": [
3 + [
4 + "pull-file@1.1.0",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "pull-file@1.1.0",
9 + "_id": "pull-file@1.1.0",
10 + "_inBundle": false,
11 + "_integrity": "sha256-Ky5N01jAWyETQvZOzKq7wL6DT8cyJxrQ8XaEwmP6/ts=",
12 + "_location": "/pull-file",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "pull-file@1.1.0",
18 + "name": "pull-file",
19 + "escapedName": "pull-file",
20 + "rawSpec": "1.1.0",
21 + "saveSpec": null,
22 + "fetchSpec": "1.1.0"
23 + },
24 + "_requiredBy": [
25 + "/"
26 + ],
27 + "_resolved": "http://localhost:8989/blobs/get/&Ky5N01jAWyETQvZOzKq7wL6DT8cyJxrQ8XaEwmP6/ts=.sha256",
28 + "_spec": "1.1.0",
29 + "_where": "/home/cel/src/ssb-npm-registry",
30 + "author": {
31 + "name": "Damon Oehlman",
32 + "email": "damon.oehlman@gmail.com"
33 + },
34 + "bugs": {
35 + "url": "https://github.com/DamonOehlman/pull-file/issues"
36 + },
37 + "dependencies": {
38 + "pull-utf8-decoder": "^1.0.2"
39 + },
40 + "description": "Pull streams implementation of a file reader",
41 + "devDependencies": {
42 + "cont": "^1.0.3",
43 + "osenv": "^0.1.3",
44 + "pull-stream": "^3.1.0",
45 + "statistics": "^2.0.1",
46 + "tape": "^4.4.0"
47 + },
48 + "directories": {
49 + "example": "examples",
50 + "test": "test"
51 + },
52 + "homepage": "https://github.com/DamonOehlman/pull-file",
53 + "keywords": [
54 + "pull-stream",
55 + "fs",
56 + "read"
57 + ],
58 + "license": "MIT",
59 + "main": "index.js",
60 + "name": "pull-file",
61 + "repository": {
62 + "type": "git",
63 + "url": "git+https://github.com/DamonOehlman/pull-file.git"
64 + },
65 + "scripts": {
66 + "gendocs": "gendocs > README.md",
67 + "test": "set -e; for t in test/*.js; do node $t; done"
68 + },
69 + "version": "1.1.0"
70 +}
node_modules/pull-file/test/append.jsView
@@ -1,0 +1,49 @@
1 +
2 +var pull = require('pull-stream')
3 +var fs = require('fs')
4 +var File = require('../')
5 +
6 +var tape = require('tape')
7 +
8 +tape('append to a file', function (t) {
9 +
10 + var filename = '/tmp/test_pull-file_append'+Date.now()
11 +
12 + var n = 10, r = 0, ended = false
13 + ;(function next () {
14 + --n
15 + fs.appendFile(filename, Date.now() +'\n', function (err) {
16 + if(err) throw err
17 +
18 + if(n) setTimeout(next, 20)
19 + else { ended = true; }
20 + })
21 + })()
22 +
23 + pull(
24 + File(filename, {live: true}),
25 + pull.through(function (chunk) {
26 + r ++
27 + t.notEqual(chunk.length, 0)
28 + }),
29 + pull.take(10),
30 + pull.drain(null, function (err) {
31 + if(err) throw err
32 + t.equal(n, 0, 'writes')
33 + t.equal(r, 10, 'reads')
34 + t.end()
35 + })
36 + )
37 +})
38 +
39 +
40 +
41 +
42 +
43 +
44 +
45 +
46 +
47 +
48 +
49 +
node_modules/pull-file/test/assets/ipsum.txtView
@@ -1,0 +1,9 @@
1 +Lorem ipsum dolor sit amet, consectetur adipiscing elit. Quisque quis tortor elit. Donec vulputate lacus at posuere sodales. Suspendisse cursus, turpis eget dapibus pulvinar, quam nunc scelerisque purus, eu sollicitudin ipsum purus vitae nibh. Phasellus aliquet, magna id convallis faucibus, justo turpis auctor massa, et iaculis nibh orci lobortis purus. Quisque venenatis dolor justo, ac fringilla nunc lobortis in. Maecenas sagittis accumsan sagittis. Quisque vel egestas nisl, eget commodo dolor. Fusce feugiat condimentum iaculis. Suspendisse potenti. Etiam mauris sem, fringilla elementum ultricies eu, consectetur sit amet lectus. Maecenas sit amet sagittis nibh. Donec lobortis, ipsum at malesuada congue, tellus libero ornare ipsum, sit amet pulvinar risus est pellentesque diam. Nunc lorem metus, blandit vitae tristique at, scelerisque vel metus.
2 +
3 +Nam non sagittis lorem, quis egestas enim. Maecenas cursus sagittis metus, ut condimentum purus sodales eget. Vestibulum et imperdiet turpis. Praesent ullamcorper sem non condimentum porta. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Aliquam et nunc at enim vulputate consectetur vitae id leo. Duis rutrum mattis velit, a luctus eros imperdiet id. Phasellus faucibus nulla quis condimentum congue. Fusce mi metus, pharetra eget malesuada et, scelerisque ac justo. Etiam fermentum lectus sit amet posuere gravida. Ut facilisis massa sed erat commodo, sed ultricies dolor porttitor. Donec ac ipsum laoreet, lobortis augue in, ullamcorper risus. Sed sagittis sapien ipsum, sit amet sollicitudin quam tempor quis. Mauris lacus augue, porta eget consequat posuere, eleifend id turpis.
4 +
5 +Pellentesque vitae augue eget nisi sollicitudin ullamcorper placerat in nisl. Nunc malesuada ipsum vel justo luctus suscipit. Nullam elementum id odio vel commodo. Nam sed sem at est sollicitudin suscipit quis in nisi. Aliquam erat volutpat. Nullam tincidunt, nunc eget fermentum tincidunt, lectus mi tempor mauris, vel suscipit elit felis ut arcu. Vestibulum ut malesuada neque, sit amet porttitor magna. Pellentesque adipiscing velit mauris, ut pharetra lorem vestibulum eu. Cras quis lacus tellus. Quisque eu facilisis odio. Morbi ac est porta, molestie nisi vel, euismod augue. Aenean tincidunt justo odio, at gravida felis consequat non. Vestibulum congue auctor arcu, quis placerat mauris posuere vitae. Integer rutrum nisl at consectetur aliquet.
6 +
7 +Donec enim lacus, feugiat nec urna nec, pulvinar venenatis massa. Aenean sed ante urna. Nullam dictum nulla nec lacus tincidunt venenatis. Morbi sed massa et odio rhoncus facilisis. Nullam interdum aliquet iaculis. Quisque vel risus et nunc malesuada tincidunt a sit amet dolor. Ut congue nibh at nulla sodales blandit. In sed massa cursus, dictum orci et, vestibulum neque. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Duis rutrum a purus at tempor.
8 +
9 +Integer posuere orci vel vehicula varius. Maecenas sed vehicula velit. Duis adipiscing lacus ac euismod pellentesque. Suspendisse lacinia enim ligula, nec dapibus eros faucibus sit amet. Aliquam malesuada diam sed nunc hendrerit interdum. Nam scelerisque, velit at lobortis blandit, ligula lacus fringilla metus, vitae pretium lectus ante vel sem. Morbi dapibus ante ut diam euismod vehicula. Phasellus accumsan scelerisque augue gravida luctus. Aenean et hendrerit erat. Sed placerat lacinia risus, vitae tincidunt metus eleifend eu. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Curabitur metus augue, sollicitudin id sagittis eu, posuere eget eros. Vestibulum convallis nunc sed nisi dictum, non tempus mi fringilla.
node_modules/pull-file/test/assets/test.txtView
@@ -1,0 +1,1 @@
1 +hello
node_modules/pull-file/test/explicit-buffer.jsView
@@ -1,0 +1,33 @@
1 +var test = require('tape');
2 +var pull = require('pull-stream');
3 +var file = require('..');
4 +
5 +var path = require('path');
6 +var crypto = require('crypto')
7 +var osenv = require('osenv')
8 +var fs = require('fs')
9 +
10 +var tmpfile = path.join(osenv.tmpdir(), 'test_pull-file_big')
11 +
12 +var big = crypto.pseudoRandomBytes(10*1024*1024)
13 +fs.writeFileSync(tmpfile, big)
14 +
15 +function hash (data) {
16 + return crypto.createHash('sha256').update(data).digest('hex')
17 +}
18 +
19 +test('large file in explicit buffer', function(t) {
20 + var buf = new Buffer(65551) // prime close to 1024 * 64
21 + var h = crypto.createHash('sha256')
22 +
23 + pull(
24 + file(tmpfile, {buffer: buf}),
25 + pull.through(function (chunk) {
26 + h.update(chunk)
27 + }),
28 + pull.onEnd(function(err) {
29 + t.equal(hash(big), h.digest('hex'))
30 + t.end()
31 + })
32 + );
33 +});
node_modules/pull-file/test/fd.jsView
@@ -1,0 +1,57 @@
1 +
2 +var tape = require('tape')
3 +var File = require('../')
4 +var pull = require('pull-stream')
5 +var fs = require('fs')
6 +
7 +var path = require('path')
8 +
9 +function asset(file) {
10 + return path.join(__dirname, 'assets', file)
11 +}
12 +
13 +function all(stream, cb) {
14 + pull(stream, pull.collect(function (err, ary) {
15 + cb(err, Buffer.concat(ary))
16 + }))
17 +}
18 +
19 +tape('can read a file with a provided fd', function (t) {
20 +
21 + var fd = fs.openSync(asset('ipsum.txt'), 'r')
22 +
23 + all(File(null, {fd: fd}), function (err, buf) {
24 + if(err) throw err
25 + t.ok(buf)
26 + t.end()
27 + })
28 +
29 +})
30 +
31 +
32 +tape('two files can read from one fd if autoClose is disabled', function (t) {
33 + var fd = fs.openSync(asset('ipsum.txt'), 'r')
34 +
35 + all(File(null, {fd: fd, autoClose: false}), function (err, buf1) {
36 + if(err) throw err
37 + t.ok(buf1)
38 + all(File(null, {fd: fd, autoClose: false}), function (err, buf2) {
39 + if(err) throw err
40 + t.ok(buf2)
41 + t.equal(buf1.toString(), buf2.toString())
42 + fs.close(fd, function (err) {
43 + if(err) throw err
44 + t.end()
45 + })
46 + })
47 + })
48 +
49 +})
50 +
51 +
52 +
53 +
54 +
55 +
56 +
57 +
node_modules/pull-file/test/largefile.jsView
@@ -1,0 +1,50 @@
1 +var test = require('tape');
2 +var pull = require('pull-stream');
3 +var file = require('..');
4 +
5 +var path = require('path');
6 +var crypto = require('crypto')
7 +var osenv = require('osenv')
8 +var fs = require('fs')
9 +
10 +var tmpfile = path.join(osenv.tmpdir(), 'test_pull-file_big')
11 +
12 +function hash (data) {
13 + return crypto.createHash('sha256').update(data).digest('hex')
14 +}
15 +
16 +test('large file', function(t) {
17 + var big = crypto.pseudoRandomBytes(10*1024*1024)
18 + fs.writeFileSync(tmpfile, big)
19 +
20 + pull(
21 + file(tmpfile),
22 + pull.collect(function(err, items) {
23 + t.equal(hash(big), hash(Buffer.concat(items)))
24 + t.end()
25 + })
26 + );
27 +});
28 +
29 +
30 +test('large file as ascii strings', function(t) {
31 + var big = crypto.pseudoRandomBytes(10*1024*1024).toString('base64')
32 + fs.writeFileSync(tmpfile, big, 'ascii');
33 +
34 + pull(
35 + file(tmpfile, {encoding: 'ascii'}),
36 + pull.through(function (str) {
37 + t.equal(typeof str, 'string');
38 + }),
39 + pull.collect(function(err, items) {
40 + t.equal(hash(big), hash(items.join('')))
41 + t.end()
42 + })
43 + );
44 +});
45 +
46 +
47 +
48 +
49 +
50 +
node_modules/pull-file/test/partial.jsView
@@ -1,0 +1,74 @@
1 +
2 +var tape = require('tape')
3 +var path = require('path')
4 +var pull = require('pull-stream')
5 +var File = require('../')
6 +var cont = require('cont')
7 +var fs = require('fs')
8 +
9 +var crypto = require('crypto')
10 +var osenv = require('osenv')
11 +
12 +var tmpfile = path.join(osenv.tmpdir(), 'test_pull-file_big')
13 +var crypto = require('crypto')
14 +
15 +var big = crypto.pseudoRandomBytes(10*1024*1024)
16 +fs.writeFileSync(tmpfile, big)
17 +
18 +function hash (data) {
19 + return crypto.createHash('sha256').update(data).digest('hex')
20 +}
21 +
22 +function asset(file) {
23 + return path.join(__dirname, 'assets', file)
24 +}
25 +
26 +var MB = 1024*1024
27 +
28 +tape('read files partially', function (t) {
29 +
30 + function test (file, start, end) {
31 + return function (cb) {
32 + var opts = {start: start, end: end}
33 + var expected
34 + var _expected = fs.readFileSync(file, opts)
35 +
36 + expected = _expected
37 + .slice(
38 + start || 0,
39 + end || _expected.length
40 + )
41 +
42 + pull(
43 + File(file, opts),
44 + pull.collect(function (err, ary) {
45 + var actual = Buffer.concat(ary)
46 + t.equal(actual.length, expected.length)
47 + t.equal(hash(actual), hash(expected))
48 + cb()
49 + })
50 + )
51 + }
52 +
53 + }
54 +
55 + cont.para([
56 + test(tmpfile, 0, 9*MB),
57 + test(tmpfile, 5*MB, 10*MB),
58 + test(tmpfile, 5*MB, 6*MB),
59 + test(asset('ipsum.txt')),
60 + test(asset('test.txt'), 1, 4)
61 + ])(function (err) {
62 + t.end()
63 + })
64 +
65 +})
66 +
67 +
68 +
69 +
70 +
71 +
72 +
73 +
74 +
node_modules/pull-file/test/small.jsView
@@ -1,0 +1,31 @@
1 +var path = require('path');
2 +var test = require('tape');
3 +var pull = require('pull-stream');
4 +var file = require('..');
5 +
6 +test('small text', function(t) {
7 + t.plan(1);
8 +
9 + pull(
10 + file(path.resolve(__dirname, 'assets', 'test.txt')),
11 + pull.map(function(data) {
12 + return data.toString();
13 + }),
14 + pull.collect(function(err, items) {
15 + t.equal(items.join(''), 'hello');
16 + })
17 + );
18 +});
19 +
20 +test('buffer size respected', function(t) {
21 + var expected = ['he', 'll', 'o'];
22 +
23 + t.plan(3);
24 +
25 + pull(
26 + file(path.resolve(__dirname, 'assets', 'test.txt'), { bufferSize: 2 }),
27 + pull.drain(function(data) {
28 + t.equal(data.toString(), expected.shift());
29 + })
30 + );
31 +});
node_modules/pull-file/test/terminate-read.jsView
@@ -1,0 +1,140 @@
1 +var path = require('path');
2 +var test = require('tape');
3 +var pull = require('pull-stream');
4 +var file = require('..');
5 +var fs = require('fs')
6 +
7 +var ipsum = path.resolve(__dirname, 'assets', 'ipsum.txt')
8 +var au = path.resolve(__dirname, 'assets', 'AU.txt')
9 +
10 +test('can terminate read process', function(t) {
11 +
12 + var expected = [
13 + 'Lorem ipsum dolor sit amet, consectetur ',
14 + 'adipiscing elit. Quisque quis tortor eli',
15 + 't. Donec vulputate lacus at posuere soda',
16 + 'les. Suspendisse cursus, turpis eget dap'
17 + ];
18 +
19 + pull(
20 + file(ipsum, { bufferSize: 40 }),
21 + pull.take(expected.length),
22 + pull.drain(function(data) {
23 + t.equal(data.toString(), expected.shift(), 'line ok in drain');
24 + }, function (err) {
25 + if(err) throw err
26 + t.end()
27 + })
28 + );
29 +});
30 +
31 +test('can terminate file immediately (before open)', function (t) {
32 +
33 + var source = file(ipsum)
34 + var sync = false
35 + source(true, function (end) {
36 + sync = true
37 + t.equal(end, true)
38 + })
39 + t.ok(sync)
40 + t.end()
41 +
42 +})
43 +
44 +test('can terminate file immediately (after open)', function (t) {
45 +
46 + var source = file(ipsum)
47 + var sync1 = false, sync2 = false
48 + t.plan(6)
49 + source(null, function (end, data) {
50 + if(sync1) throw new Error('read1 called twice')
51 + sync1 = true
52 + t.equal(end, true, 'read aborted, end=true')
53 + t.notOk(data, 'read aborted, data = null')
54 + })
55 + source(true, function (end) {
56 + if(sync2) throw new Error('read2 called twice')
57 + sync2 = true
58 + t.ok(sync1, 'read cb was first')
59 + t.equal(end, true)
60 + t.end()
61 + })
62 + t.notOk(sync1)
63 + t.notOk(sync2)
64 +
65 +})
66 +
67 +test('can terminate file during a read', function (t) {
68 +
69 + var source = file(ipsum, {bufferSize: 1024})
70 + var sync1 = false, sync2 = false
71 + source(null, function (end, data) {
72 + t.equal(end, null)
73 + t.ok(data)
74 + source(null, function (end, data) {
75 + sync1 = true
76 + t.equal(end, true)
77 + t.notOk(data, "data can't have been read")
78 + })
79 + source(true, function (end) {
80 + sync2 = true
81 + t.equal(end, true, 'valid abort end')
82 + t.ok(sync1, 'read called back first')
83 + t.end()
84 + })
85 + t.notOk(sync1)
86 + t.notOk(sync2)
87 + })
88 +
89 +})
90 +
91 +//usually the read succeeds before the close does,
92 +//but not always
93 +
94 +test('after 10k times, cb order is always correct', function (t) {
95 +
96 + var C = 0, R = 0, T = 0
97 + ;(function next () {
98 + T++
99 +
100 + if(T > 10000) {
101 + t.equal(R, 10000)
102 + t.equal(C, 0)
103 + t.equal(R+C, 10000)
104 + console.log(C, R, T)
105 + return t.end()
106 + }
107 +
108 + var fd = fs.openSync(__filename, 'r+', 0666)
109 + var data, closed
110 +
111 + //create a file stream with a fixed fd,
112 + //configured to automatically close (as by default)
113 + var source = file(null, {fd: fd})
114 +
115 + //read.
116 + source(null, function (err, _data) {
117 + data = true
118 + if(!closed) R++
119 + if(data && closed) next()
120 + })
121 +
122 + //abort.
123 + source(true, function (err) {
124 + closed = true
125 + if(!data) C ++
126 + if(data && closed) next()
127 + })
128 + })()
129 +
130 +})
131 +
132 +
133 +
134 +
135 +
136 +
137 +
138 +
139 +
140 +
node_modules/pull-hash/README.mdView
@@ -1,0 +1,62 @@
1 +# pull-hash
2 +
3 +crypto hash API using [pull-stream][]s
4 +
5 +```js
6 +var hash = require('pull-hash')
7 +var pull = require('pull-stream')
8 +
9 +pull(
10 + source,
11 + hash('sha256', 'hex', function (err, sum) {
12 + console.log('shasum:', sum)
13 + }),
14 + pull.drain()
15 +)
16 +```
17 +
18 +## API
19 +
20 +```js
21 +var hash = require('pull-hash')
22 +```
23 +#### `hash(type[, encoding], onEnd): through`
24 +
25 +Create a through stream that hashes the data and calls `onEnd` when it is done.
26 +
27 +- `type`: `crypto.Hash` object, or string to pass to `crypto.createHash`, e.g.
28 + `"sha256"`
29 +- `encoding`: encoding for the digest to pass to `Hash.digest()`
30 +- `onEnd(err, digest)`: callback with digest when stream is ended or errored
31 +
32 +### Extras
33 +
34 +```js
35 +var gitHash = require('pull-hash/ext/git')
36 +```
37 +#### `gitHash(object[, encoding], onEnd): through`
38 +
39 +Get a git object id.
40 +
41 +- `object.type`: one of `["commit", "tree", "blob", "tag"]`
42 +- `object.length || object.size`: size in bytes of the git object's data
43 +
44 +```js
45 +var ssbHash = require('pull-hash/ext/ssb')
46 +```
47 +#### `ssbHash(onEnd): through`
48 +
49 +Get a hash id of a secure-scuttlebutt blob.
50 +
51 +[secure-scuttlebutt]: %iljFzUwTYposC7vs2V6AZgObPqwRVNAXjxYVVUoG4tU=.sha256
52 +[pull-stream]: %xAFKL6PBr1CIzL5xGHZC5DFVWiXmc7R0MMnZnBwq/yk=.sha256
53 +
54 +## License
55 +
56 +Copyright (c) 2016-2018 cel
57 +
58 +Usage of the works is permitted provided that this instrument is
59 +retained with the works, so that any entity that uses the works is
60 +notified of this instrument.
61 +
62 +DISCLAIMER: THE WORKS ARE WITHOUT WARRANTY.
node_modules/pull-hash/ext/git.jsView
@@ -1,0 +1,9 @@
1 +var hash = require('../')
2 +
3 +module.exports = function (object, encoding, onEnd) {
4 + if (onEnd == null) onEnd = encoding, encoding = null
5 + var hasher = hash('sha1', encoding || 'hex', onEnd)
6 + var size = Number(object.length || object.size || 0)
7 + hasher.hash.update(object.type + ' ' + size + '\0')
8 + return hasher
9 +}
node_modules/pull-hash/ext/ssb.jsView
@@ -1,0 +1,7 @@
1 +var hash = require('../')
2 +
3 +module.exports = function (onEnd) {
4 + return hash('sha256', 'base64', function (err, digest) {
5 + onEnd(err, digest && ('&' + digest + '.sha256'))
6 + })
7 +}
node_modules/pull-hash/index.jsView
@@ -1,0 +1,22 @@
1 +var crypto = require('crypto')
2 +
3 +module.exports = function (type, encoding, onEnd) {
4 + if (onEnd == null) onEnd = encoding, encoding = null
5 + var hash = (typeof type == 'string') ? crypto.createHash(type) : type
6 + var ended
7 + function hasher(read) {
8 + return function (abort, cb) {
9 + read(abort, function (end, data) {
10 + if (end === true && !hasher.digest) hasher.digest = hash.digest(encoding)
11 + else if (!end) hash.update(data)
12 + if (end && onEnd && !ended) {
13 + onEnd(end === true ? null : end, hasher.digest)
14 + ended = true
15 + }
16 + cb(end, data)
17 + })
18 + }
19 + }
20 + hasher.hash = hash
21 + return hasher
22 +}
node_modules/pull-hash/package.jsonView
@@ -1,0 +1,52 @@
1 +{
2 + "_args": [
3 + [
4 + "pull-hash@1.0.1",
5 + "/home/cel/src/ssb-npm-registry"
6 + ]
7 + ],
8 + "_from": "pull-hash@1.0.1",
9 + "_id": "pull-hash@1.0.1",
10 + "_inBundle": false,
11 + "_integrity": "sha256-GI8bZT7TX4QLtqbZw9VvYHrK1lNyMqg8xfF5ai7TJ48=",
12 + "_location": "/pull-hash",
13 + "_phantomChildren": {},
14 + "_requested": {
15 + "type": "version",
16 + "registry": true,
17 + "raw": "pull-hash@1.0.1",
18 + "name": "pull-hash",
19 + "escapedName": "pull-hash",
20 + "rawSpec": "1.0.1",
21 + "saveSpec": null,
22 + "fetchSpec": "1.0.1"
23 + },
24 + "_requiredBy": [
25 + "/"
26 + ],
27 + "_resolved": "http://localhost:8989/blobs/get/&GI8bZT7TX4QLtqbZw9VvYHrK1lNyMqg8xfF5ai7TJ48=.sha256",
28 + "_spec": "1.0.1",
29 + "_where": "/home/cel/src/ssb-npm-registry",
30 + "author": {
31 + "name": "cel"
32 + },
33 + "description": "pull-stream API for hashing data",
34 + "devDependencies": {
35 + "multicb": "^1.2.1",
36 + "pull-stream": "^3.2.0",
37 + "stream-to-pull-stream": "^1.6.6",
38 + "tape": "^4.5.0"
39 + },
40 + "homepage": "https://git.scuttlebot.io/%25fQT3h6iV6tTONormNQ6eRN0yqsdsv2%2Fv1KudeDiXwZs%3D.sha256",
41 + "license": "Fair",
42 + "main": "index.js",
43 + "name": "pull-hash",
44 + "repository": {
45 + "type": "git",
46 + "url": "ssb://%fQT3h6iV6tTONormNQ6eRN0yqsdsv2/v1KudeDiXwZs=.sha256"
47 + },
48 + "scripts": {
49 + "test": "node test"
50 + },
51 + "version": "1.0.1"
52 +}
node_modules/pull-hash/test.jsView
@@ -1,0 +1,78 @@
1 +var test = require('tape')
2 +var hash = require('.')
3 +var crypto = require('crypto')
4 +var fs = require('fs')
5 +var pull = require('pull-stream')
6 +var toPull = require('stream-to-pull-stream')
7 +var multicb = require('multicb')
8 +var path = require('path')
9 +var gitHash = require('./ext/git')
10 +var ssbHash = require('./ext/ssb')
11 +
12 +function hashFile(filename, type, encoding, cb) {
13 + var shasum = crypto.createHash(type)
14 + fs.createReadStream(filename).on('data', function (d) {
15 + shasum.update(d)
16 + })
17 + .on('error', cb)
18 + .on('end', function () {
19 + cb(null, shasum.digest(encoding))
20 + })
21 +}
22 +
23 +function hashFilePull(filename, type, encoding, cb) {
24 + pull(
25 + toPull(fs.createReadStream(filename)),
26 + hash(type, encoding, cb),
27 + pull.drain()
28 + )
29 +}
30 +
31 +test('hash a file', function (t) {
32 + var done = multicb({ pluck: 1, spread: true })
33 + hashFile(__filename, 'md5', 'hex', done())
34 + hashFilePull(__filename, 'md5', 'hex', done())
35 + done(function (err, digestNodejs, digestPull) {
36 + t.error(err, 'hashes')
37 + t.equals(digestPull, digestNodejs, 'hash')
38 + t.end()
39 + })
40 +})
41 +
42 +test('git object hash', function (t) {
43 + pull(
44 + pull.once('asdf\n'),
45 + gitHash({type: 'blob', size: 5}, function (err, digest) {
46 + t.error(err, 'git hash')
47 + t.equals(digest, '8bd6648ed130ac9ece0f89cd9a8fbbfd2608427a', 'hash')
48 + t.end()
49 + }),
50 + pull.drain()
51 + )
52 +})
53 +
54 +test('empty git blob', function (t) {
55 + var emptyId = 'e69de29bb2d1d6434b8b29ae775ad8c2e48c5391'
56 + pull(
57 + pull.empty(),
58 + gitHash({type: 'blob', length: 0}, function (err, digest) {
59 + t.error(err, 'git hash')
60 + t.equals(digest, emptyId, 'blob id')
61 + t.end()
62 + }),
63 + pull.drain()
64 + )
65 +})
66 +
67 +test('ssb blob id', function (t) {
68 + var emptyId = '&47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=.sha256'
69 + pull(
70 + pull.empty(),
71 + ssbHash(function (err, digest) {
72 + t.error(err, 'ssb hash')
73 + t.equals(digest, emptyId, 'blob id')
74 + t.end()
75 + }),
76 + pull.drain()
77 + )
78 +})
node_modules/pull-stream/.travis.ymlView
@@ -1,0 +1,5 @@
1 +language: node_js
2 +node_js:
3 + - 0.12
4 + - 4
5 + - 5
node_modules/pull-stream/LICENSEView
@@ -1,0 +1,22 @@
1 +Copyright (c) 2013 Dominic Tarr
2 +
3 +Permission is hereby granted, free of charge,
4 +to any person obtaining a copy of this software and
5 +associated documentation files (the "Software"), to
6 +deal in the Software without restriction, including
7 +without limitation the rights to use, copy, modify,
8 +merge, publish, distribute, sublicense, and/or sell
9 +copies of the Software, and to permit persons to whom
10 +the Software is furnished to do so,
11 +subject to the following conditions:
12 +
13 +The above copyright notice and this permission notice
14 +shall be included in all copies or substantial portions of the Software.
15 +
16 +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17 +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
18 +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
19 +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
20 +ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
node_modules/pull-stream/README.mdView
@@ -1,0 +1,357 @@
1 +# pull-stream
2 +
3 +Minimal Pipeable Pull-stream
4 +
5 +In [classic-streams](https://github.com/nodejs/node-v0.x-archive/blob/v0.8/doc/api/stream.markdown),
6 +streams _push_ data to the next stream in the pipeline.
7 +In [new-streams](https://github.com/nodejs/node-v0.x-archive/blob/v0.10/doc/api/stream.markdown),
8 +data is pulled out of the source stream, into the destination.
9 +`pull-stream` is a minimal take on streams,
10 +pull streams work great for "object" streams as well as streams of raw text or binary data.
11 +
12 +[![build status](https://secure.travis-ci.org/pull-stream/pull-stream.png)](https://travis-ci.org/pull-stream/pull-stream)
13 +
14 +
15 +## Quick Example
16 +
17 +Stat some files:
18 +
19 +```js
20 +pull(
21 + pull.values(['file1', 'file2', 'file3']),
22 + pull.asyncMap(fs.stat),
23 + pull.collect(function (err, array) {
24 + console.log(array)
25 + })
26 +)
27 +```
28 +Note that `pull(a, b, c)` is basically the same as `a.pipe(b).pipe(c)`.
29 +
30 +To grok how pull-streams work, read through [pull-streams workshop](https://github.com/pull-stream/pull-stream-workshop)
31 +
32 +## How do I do X with pull-streams?
33 +
34 +There is a module for that!
35 +
36 +Check the [pull-stream FAQ](https://github.com/pull-stream/pull-stream-faq)
37 +and post an issue if you have a question that is not covered.
38 +
39 +## Compatibily with node streams
40 +
41 +pull-streams are not _directly_ compatible with node streams,
42 +but pull-streams can be converted into node streams with
43 +[pull-stream-to-stream](https://github.com/pull-stream/pull-stream-to-stream)
44 +and node streams can be converted into pull-stream using [stream-to-pull-stream](https://github.com/pull-stream/stream-to-pull-stream)
45 +correct back pressure is preserved.
46 +
47 +### Readable & Reader vs. Readable & Writable
48 +
49 +Instead of a readable stream, and a writable stream, there is a `readable` stream,
50 + (aka "Source") and a `reader` stream (aka "Sink"). Through streams
51 +is a Sink that returns a Source.
52 +
53 +See also:
54 +* [Sources](./docs/sources/index.md)
55 +* [Throughs](./docs/throughs/index.md)
56 +* [Sinks](./docs/sinks/index.md)
57 +
58 +### Source (readable stream that produces values)
59 +
60 +A Source is a function `read(end, cb)`,
61 +that may be called many times,
62 +and will (asynchronously) call `cb(null, data)` once for each call.
63 +
64 +To signify an end state, the stream eventually returns `cb(err)` or `cb(true)`.
65 +When signifying an end state, `data` *must* be ignored.
66 +
67 +The `read` function *must not* be called until the previous call has called back.
68 +Unless, it is a call to abort the stream (`read(Error || true, cb)`).
69 +
70 +```js
71 +var n = 5;
72 +
73 +// random is a source 5 of random numbers.
74 +function random (end, cb) {
75 + if(end) return cb(end)
76 + // only read n times, then stop.
77 + if(0 > --n) return cb(true)
78 + cb(null, Math.random())
79 +}
80 +
81 +```
82 +
83 +### Sink (reader or writable stream that consumes values)
84 +
85 +A Sink is a function `reader(read)` that calls a Source (`read(null, cb)`),
86 +until it decides to stop (by calling `read(true, cb)`), or the readable ends (`read` calls
87 +`cb(Error || true)`
88 +
89 +All [Throughs](./docs/throughs/index.md)
90 +and [Sinks](./docs/sinks/index.md)
91 +are reader streams.
92 +
93 +```js
94 +// logger reads a source and logs it.
95 +function logger (read) {
96 + read(null, function next(end, data) {
97 + if(end === true) return
98 + if(end) throw end
99 +
100 + console.log(data)
101 + read(null, next)
102 + })
103 +}
104 +```
105 +
106 +Since Sources and Sinks are functions, you can pass them to each other!
107 +
108 +```js
109 +logger(random) //"pipe" the streams.
110 +
111 +```
112 +
113 +but, it's easier to read if you use's pull-stream's `pull` method
114 +
115 +```js
116 +var pull = require('pull-stream')
117 +
118 +pull(random, logger)
119 +```
120 +
121 +### Creating reusable streams
122 +
123 +When working with pull streams it is common to create functions that return a stream.
124 +This is because streams contain mutable state and so can only be used once.
125 +In the above example, once `random` has been connected to a sink and has produced 5 random numbers it will not produce any more random numbers if connected to another sink.
126 +
127 +Therefore, use a function like this to create a random number generating stream that can be reused:
128 +
129 +```js
130 +
131 +// create a stream of n random numbers
132 +function createRandomStream (n) {
133 + return function randomReadable (end, cb) {
134 + if(end) return cb(end)
135 + if(0 > --n) return cb(true)
136 + cb(null, Math.random())
137 + }
138 +}
139 +
140 +pull(createRandomStream(5), logger)
141 +```
142 +
143 +
144 +### Through
145 +
146 +A through stream is both a reader (consumes values) and a readable (produces values).
147 +It's a function that takes a `read` function (a Sink),
148 +and returns another `read` function (a Source).
149 +
150 +```js
151 +// double is a through stream that doubles values.
152 +function double (read) {
153 + return function readable (end, cb) {
154 + read(end, function (end, data) {
155 + cb(end, data != null ? data * 2 : null)
156 + })
157 + }
158 +}
159 +
160 +pull(createRandomStream(5), double, logger)
161 +```
162 +
163 +### Pipeability
164 +
165 +Every pipeline must go from a `source` to a `sink`.
166 +Data will not start moving until the whole thing is connected.
167 +
168 +```js
169 +pull(source, through, sink)
170 +```
171 +
172 +some times, it's simplest to describe a stream in terms of other streams.
173 +pull can detect what sort of stream it starts with (by counting arguments)
174 +and if you pull together through streams, it gives you a new through stream.
175 +
176 +```js
177 +var tripleThrough =
178 + pull(through1(), through2(), through3())
179 +// The three through streams become one.
180 +
181 +pull(source(), tripleThrough, sink())
182 +```
183 +
184 +pull detects if it's missing a Source by checking function arity,
185 +if the function takes only one argument it's either a sink or a through.
186 +Otherwise it's a Source.
187 +
188 +## Duplex Streams
189 +
190 +Duplex streams, which are used to communicate between two things,
191 +(i.e. over a network) are a little different. In a duplex stream,
192 +messages go both ways, so instead of a single function that represents the stream,
193 +you need a pair of streams. `{source: sourceStream, sink: sinkStream}`
194 +
195 +pipe duplex streams like this:
196 +
197 +``` js
198 +var a = duplex()
199 +var b = duplex()
200 +
201 +pull(a.source, b.sink)
202 +pull(b.source, a.sink)
203 +
204 +//which is the same as
205 +
206 +b.sink(a.source); a.sink(b.source)
207 +
208 +//but the easiest way is to allow pull to handle this
209 +
210 +pull(a, b, a)
211 +
212 +//"pull from a to b and then back to a"
213 +
214 +```
215 +
216 +## Design Goals & Rationale
217 +
218 +There is a deeper,
219 +[platonic abstraction](http://en.wikipedia.org/wiki/Platonic_idealism),
220 +where a streams is just an array in time, instead of in space.
221 +And all the various streaming "abstractions" are just crude implementations
222 +of this abstract idea.
223 +
224 +[classic-streams](https://github.com/joyent/node/blob/v0.8.16/doc/api/stream.markdown),
225 +[new-streams](https://github.com/joyent/node/blob/v0.10/doc/api/stream.markdown),
226 +[reducers](https://github.com/Gozala/reducers)
227 +
228 +The objective here is to find a simple realization of the best features of the above.
229 +
230 +### Type Agnostic
231 +
232 +A stream abstraction should be able to handle both streams of text and streams
233 +of objects.
234 +
235 +### A pipeline is also a stream.
236 +
237 +Something like this should work: `a.pipe(x.pipe(y).pipe(z)).pipe(b)`
238 +this makes it possible to write a custom stream simply by
239 +combining a few available streams.
240 +
241 +### Propagate End/Error conditions.
242 +
243 +If a stream ends in an unexpected way (error),
244 +then other streams in the pipeline should be notified.
245 +(this is a problem in node streams - when an error occurs,
246 +the stream is disconnected, and the user must handle that specially)
247 +
248 +Also, the stream should be able to be ended from either end.
249 +
250 +### Transparent Backpressure & Laziness
251 +
252 +Very simple transform streams must be able to transfer back pressure
253 +instantly.
254 +
255 +This is a problem in node streams, pause is only transfered on write, so
256 +on a long chain (`a.pipe(b).pipe(c)`), if `c` pauses, `b` will have to write to it
257 +to pause, and then `a` will have to write to `b` to pause.
258 +If `b` only transforms `a`'s output, then `a` will have to write to `b` twice to
259 +find out that `c` is paused.
260 +
261 +[reducers](https://github.com/Gozala/reducers) reducers has an interesting method,
262 +where synchronous tranformations propagate back pressure instantly!
263 +
264 +This means you can have two "smart" streams doing io at the ends, and lots of dumb
265 +streams in the middle, and back pressure will work perfectly, as if the dumb streams
266 +are not there.
267 +
268 +This makes laziness work right.
269 +
270 +### handling end, error, and abort.
271 +
272 +in pull streams, any part of the stream (source, sink, or through)
273 +may terminate the stream. (this is the case with node streams too,
274 +but it's not handled well).
275 +
276 +#### source: end, error
277 +
278 +A source may end (`cb(true)` after read) or error (`cb(error)` after read)
279 +After ending, the source *must* never `cb(null, data)`
280 +
281 +#### sink: abort
282 +
283 +Sinks do not normally end the stream, but if they decide they do
284 +not need any more data they may "abort" the source by calling `read(true, cb)`.
285 +A abort (`read(true, cb)`) may be called before a preceding read call
286 +has called back.
287 +
288 +### handling end/abort/error in through streams
289 +
290 +Rules for implementing `read` in a through stream:
291 +1) Sink wants to stop. sink aborts the through
292 +
293 + just forward the exact read() call to your source,
294 + any future read calls should cb(true).
295 +
296 +2) We want to stop. (abort from the middle of the stream)
297 +
298 + abort your source, and then cb(true) to tell the sink we have ended.
299 + If the source errored during abort, end the sink by cb read with `cb(err)`.
300 + (this will be an ordinary end/error for the sink)
301 +
302 +3) Source wants to stop. (`read(null, cb) -> cb(err||true)`)
303 +
304 + forward that exact callback towards the sink chain,
305 + we must respond to any future read calls with `cb(err||true)`.
306 +
307 +In none of the above cases data is flowing!
308 +4) If data is flowing (normal operation: `read(null, cb) -> cb(null, data)`
309 +
310 + forward data downstream (towards the Sink)
311 + do none of the above!
312 +
313 +There either is data flowing (4) OR you have the error/abort cases (1-3), never both.
314 +
315 +
316 +## 1:1 read-callback ratio
317 +
318 +A pull stream source (and thus transform) returns *exactly one value* per read.
319 +
320 +This differs from node streams, which can use `this.push(value)` and in internal
321 +buffer to create transforms that write many values from a single read value.
322 +
323 +Pull streams don't come with their own buffering mechanism, but [there are ways
324 +to get around this](https://github.com/dominictarr/pull-stream-examples/blob/master/buffering.js).
325 +
326 +
327 +## Minimal bundle
328 +
329 +If you need only the `pull` function from this package you can reduce the size
330 +of the imported code (for instance to reduce a Browserify bundle) by requiring
331 +it directly:
332 +
333 +
334 +```js
335 +var pull = require('pull-stream/pull')
336 +
337 +pull(createRandomStream(5), logger())
338 +```
339 +
340 +
341 +## Further Examples
342 +
343 +- [dominictarr/pull-stream-examples](https://github.com/dominictarr/pull-stream-examples)
344 +- [./docs/examples](./docs/examples.md)
345 +
346 +Explore this repo further for more information about
347 +[sources](./docs/sources/index.md),
348 +[throughs](./docs/throughs/index.md),
349 +[sinks](./docs/sinks/index.md), and
350 +[glossary](./docs/glossary.md).
351 +
352 +
353 +## License
354 +
355 +MIT
356 +
357 +
node_modules/pull-stream/benchmarks/node.jsView
@@ -1,0 +1,97 @@
1 +var stream = require('stream')
2 +var inherits = require('util').inherits
3 +
4 +inherits(Values, stream.Readable)
5 +
6 +function Values (v) {
7 + this.i = 0
8 + this.values = v
9 + stream.Readable.call(this, {objectMode: true})
10 +}
11 +
12 +Values.prototype._read = function () {
13 + if(this.i >= this.values.length)
14 + this.push(null)
15 + else
16 + this.push(this.values[this.i++])
17 +}
18 +
19 +
20 +inherits(Async, stream.Transform)
21 +
22 +function Async (fn) {
23 + this._map = fn
24 + stream.Transform.call(this, {objectMode: true})
25 +}
26 +
27 +Async.prototype._transform = function (chunk, _, callback) {
28 + var self = this
29 + this._map(chunk, function (err, data) {
30 + self.push(JSON.parse(data))
31 + //it seems that this HAS to be async, which slows this down a lot.
32 + setImmediate(callback)
33 + })
34 +}
35 +Async.prototype._flush = function (callback) {
36 + this.push(null)
37 + setImmediate(callback)
38 +}
39 +
40 +inherits(Collect, stream.Writable)
41 +
42 +function Collect (cb) {
43 + this._ary = []
44 + this._cb = cb
45 + stream.Writable.call(this, {objectMode: true})
46 +}
47 +
48 +Collect.prototype._write = function (chunk, _, callback) {
49 + this._ary.push(chunk)
50 + setImmediate(callback)
51 +}
52 +
53 +//I couldn't figure out which method you are ment to override to implement a writable
54 +//stream so I ended up just using .end and that worked.
55 +
56 +//Collect.prototype._destroy = Collect.prototype._final = function (callback) {
57 +// this._cb(this._ary)
58 +// callback()
59 +//}
60 +//
61 +//Collect.prototype._flush = function (callback) {
62 +// this._cb(this._ary)
63 +// callback()
64 +//}
65 +//
66 +Collect.prototype.end = function () {
67 + this._cb(null, this._ary)
68 +}
69 +
70 +var bench = require('fastbench')
71 +const values = [
72 + JSON.stringify({ hello: 'world' }),
73 + JSON.stringify({ foo: 'bar' }),
74 + JSON.stringify({ bin: 'baz' })
75 +]
76 +
77 +const run = bench([
78 + function pull3 (done) {
79 + var c = new Collect(function (err, array) {
80 + if (err) return console.error(err)
81 + if(array.length < 3) throw new Error('wrong array')
82 + setImmediate(done)
83 + })
84 +
85 + new Values(values)
86 + .pipe(new Async(function (val, done) {
87 + done(null, val)
88 + }))
89 + .pipe(c)
90 + }]
91 +, N=100000)
92 +
93 +var heap = process.memoryUsage().heapUsed
94 +run(function () {
95 + console.log((process.memoryUsage().heapUsed - heap)/N)
96 +})
97 +
node_modules/pull-stream/benchmarks/pull.jsView
@@ -1,0 +1,57 @@
1 +const bench = require('fastbench')
2 +const pull = require('../')
3 +
4 +const values = [
5 + JSON.stringify({ hello: 'world' }),
6 + JSON.stringify({ foo: 'bar' }),
7 + JSON.stringify({ bin: 'baz' })
8 +]
9 +
10 +const run = bench([
11 + function pull3 (done) {
12 + const source = pull.values(values)
13 + const through = pull.asyncMap(function (val, done) {
14 + const json = JSON.parse(val)
15 + done(null, json)
16 + })
17 +
18 + const sink = pull.collect(function (err, array) {
19 + if (err) return console.error(err)
20 + setImmediate(done)
21 + })
22 + pull(source, through, sink)
23 + }/*,
24 + function pull_compose (done) {
25 + const source = pull.values(values)
26 + const through = pull.asyncMap(function (val, done) {
27 + const json = JSON.parse(val)
28 + done(null, json)
29 + })
30 +
31 + const sink = pull.collect(function (err, array) {
32 + if (err) return console.error(err)
33 + setImmediate(done)
34 + })
35 + pull(source, pull(through, sink))
36 + },
37 + function pull_chain (done) {
38 + const source = pull.values(values)
39 + const through = pull.asyncMap(function (val, done) {
40 + const json = JSON.parse(val)
41 + done(null, json)
42 + })
43 +
44 + const sink = pull.collect(function (err, array) {
45 + if (err) return console.error(err)
46 + setImmediate(done)
47 + })
48 + pull(pull(source, through), sink)
49 + }*/
50 +], N=100000)
51 +
52 +var heap = process.memoryUsage().heapUsed
53 +run(function () {
54 + console.log((process.memoryUsage().heapUsed - heap)/N)
55 +})
56 +
57 +
node_modules/pull-stream/docs/examples.mdView
@@ -1,0 +1,92 @@
1 +
2 +This document describes some examples of where various features
3 +of pull streams are used in simple real-world examples.
4 +
5 +Much of the focus here is handling the error cases. Indeed,
6 +distributed systems are _all about_ handling the error cases.
7 +
8 +# A simple source that ends correctly. (read, end)
9 +
10 +A normal file (source) is read, and sent to a sink stream
11 +that computes some aggregation upon that input such as
12 +the number of bytes, or number of occurances of the `\n`
13 +character (i.e. the number of lines).
14 +
15 +The source reads a chunk of the file at each time it's called,
16 +there is some optimium size depending on your operating system,
17 +file system, physical hardware,
18 +and how many other files are being read concurrently.
19 +
20 +When the sink gets a chunk, it iterates over the characters in it
21 +counting the `\n` characters. When the source returns `end` to the
22 +sink, the sink calls a user provided callback.
23 +
24 +# A source that may fail. (read, err, end)
25 +
26 +A file is downloaded over http and written to a file.
27 +The network should always be considered to be unreliable,
28 +and you must design your system to recover if the download
29 +fails. (For example if the wifi were to cut out).
30 +
31 +The read stream is just the http download, and the sink
32 +writes it to a temporary file. If the source ends normally,
33 +the temporary file is moved to the correct location.
34 +If the source errors, the temporary file is deleted.
35 +
36 +(You could also write the file to the correct location,
37 +and delete it if it errors, but the temporary file method has the advantage
38 +that if the computer or process crashes it leaves only a temporary file
39 +and not a file that appears valid. Stray temporary files can be cleaned up
40 +or resumed when the process restarts.)
41 +
42 +# A sink that may fail
43 +
44 +If we read a file from disk, and upload it, then the upload is the sink that may error.
45 +The file system is probably faster than the upload and
46 +so it will mostly be waiting for the sink to ask for more data.
47 +Usually the sink calls `read(null, cb)` and the source retrives chunks of the file
48 +until the file ends. If the sink errors, it then calls `read(true, cb)`
49 +and the source closes the file descriptor and stops reading.
50 +In this case the whole file is never loaded into memory.
51 +
52 +# A sink that may fail out of turn.
53 +
54 +A http client connects to a log server and tails a log in realtime.
55 +(Another process will write to the log file,
56 +but we don't need to worry about that.)
57 +
58 +The source is the server's log stream, and the sink is the client.
59 +First the source outputs the old data, this will always be a fast
60 +response, because that data is already at hand. When the old data is all
61 +written then the output rate may drop significantly because the server (the source) will
62 +wait for new data to be added to the file. Therefore,
63 +it becomes much more likely that the sink will error (for example if the network connection
64 +drops) while the source is waiting for new data. Because of this,
65 +it's necessary to be able to abort the stream reading (after you called
66 +read, but before it called back). If it was not possible to abort
67 +out of turn, you'd have to wait for the next read before you can abort
68 +but, depending on the source of the stream, the next read may never come.
69 +
70 +# A through stream that needs to abort.
71 +
72 +Say we wish to read from a file (source), parse each line as JSON (through),
73 +and then output to another file (sink).
74 +If the parser encounters illegal JSON then it will error and,
75 +if this parsing is a fatal error, then the parser needs to abort the pipeline
76 +from the middle. Here the source reads normaly, but then the through fails.
77 +When the through finds an invalid line, it should first abort the source,
78 +and then callback to the sink with an error. This way,
79 +by the time the sink receives the error, the entire stream has been cleaned up.
80 +
81 +(You could abort the source and error back to the sink in parallel.
82 +However, if something happened to the source while aborting, for the user
83 +discover this error they would have to call the source again with another callback, as
84 +situation would occur only rarely users would be inclined to not handle it leading to
85 +the possiblity of undetected errors.
86 +Therefore, as it is better to have one callback at the sink, wait until the source
87 +has finished cleaning up before callingback to the pink with an error.)
88 +
89 +In some cases you may want the stream to continue, and the the through stream can just ignore
90 +an any lines that do not parse. An example where you definately
91 +want a through stream to abort on invalid input would be an encrypted stream, which
92 +should be broken into chunks that are encrypted separately.
node_modules/pull-stream/docs/glossary.mdView
@@ -1,0 +1,51 @@
1 +# Glossary
2 +
3 +## read (end, cb)
4 +
5 +A function that retrives the next chunk.
6 +All readable streams (sources, and throughs)
7 +must return a `read` function.
8 +
9 +## reader (read,...)
10 +
11 +A function to create a reader. It takes a `read` function
12 +as the first argument, and any other options after that.
13 +
14 +When passed to `pipeable` or `pipeableSource`,
15 +a new function is created that adds `.pipe(dest)`
16 +
17 +## Lazy vs Eager
18 +
19 +Lazy means to avoid doing something until you know you have
20 +to do it.
21 +
22 +Eager means to do something early, so you have it ready
23 +immediately when you need it.
24 +
25 +## [Source](sources/index.md)
26 +
27 +The first stream in the pipeline. The Source is not a reader (not writable).
28 +
29 +## [Sink](sinks/index.md)
30 +
31 +The last stream in the pipeline. The Sink is not readable.
32 +
33 +## [Through](throughs/index.md)
34 +
35 +The stream (or streams) in the middle of the pipeline, between your source and sink. A through is a reader and readable.
36 +
37 +## Push vs Pull
38 +
39 +A pull-stream is a stream where the movement of data
40 +is initiated by the sink, and a push-stream
41 +is a stream where the movement of data is initiated
42 +by the source.
43 +
44 +## Reader vs Writable
45 +
46 +In push streams, destination streams (Through and Sink),
47 +are _writable_. They are written to by the source streams.
48 +
49 +In pull streams, destination streams _read_ from the source
50 +streams. They are the active participant, so they are called
51 +_readers_ rather than _writables_.
node_modules/pull-stream/docs/pull.mdView
@@ -1,0 +1,143 @@
1 +# pull-stream/pull
2 +
3 +> pipe many pull streams into a pipeline
4 +
5 +## Background
6 +
7 +In pull-streams, you need a complete pipeline before data will flow.
8 +
9 +That means: a source, zero or more throughs, and a sink.
10 +
11 +But you can still create a _partial_ pipeline, which is a great for tiny pull-stream modules.
12 +
13 +## Usage
14 +
15 +```js
16 +var pull = require('pull-stream/pull')
17 +```
18 +
19 +Create a simple complete pipeline:
20 +
21 +```js
22 +pull(source, sink) => undefined
23 +```
24 +
25 +Create a source modified by a through:
26 +
27 +```js
28 +pull(source, through) => source
29 +```
30 +
31 +Create a sink, but modify it's input before it goes.
32 +
33 +```js
34 +pull(through, sink) => sink
35 +```
36 +
37 +Create a through, by chainging several throughs:
38 +
39 +```js
40 +pull(through1, through2) => through
41 +```
42 +
43 +These streams combine just like normal streams.
44 +
45 +```js
46 +pull(
47 + pull(source, through),
48 + pull(through1, through2),
49 + pull(through, sink)
50 +) => undefined
51 +```
52 +
53 +The complete pipeline returns undefined, because it cannot be piped to anything else.
54 +
55 +Pipe duplex streams like this:
56 +
57 +```js
58 +var a = duplex()
59 +var b = duplex()
60 +
61 +pull(a.source, b.sink)
62 +pull(b.source, a.sink)
63 +
64 +//which is the same as
65 +
66 +b.sink(a.source); a.sink(b.source)
67 +
68 +//but the easiest way is to allow pull to handle this
69 +
70 +pull(a, b, a)
71 +
72 +//"pull from a to b and then back to a"
73 +```
74 +
75 +## Continuable
76 +
77 +[Continuables](https://github.com/Raynos/continuable) let you defer a stream and handle the completion of the sink stream. For example:
78 +
79 +```js
80 +var cont = pull(...streams, sink)
81 +
82 +// ...
83 +
84 +cont(function (err) {
85 + // stream finished
86 +})
87 +```
88 +
89 +Or call beside it if you are not deferring:
90 +
91 +```js
92 +pull(...streams, sink)(function (err) {
93 + // stream finished
94 +})
95 +```
96 +
97 +They are created by making a sink stream return a continuable, which uses it's callback and reads:
98 +
99 +```js
100 +function sink (read) {
101 + return function continuable (done) {
102 + // Do reads and eventually call `done`
103 + read(null, function (end, data) {
104 + if (end === true) return done(null)
105 + if (end) return done(end)
106 + // ... otherwise use `data`
107 + })
108 + }
109 +}
110 +```
111 +
112 +## API
113 +
114 +```js
115 +var pull = require('pull-stream/pull')
116 +```
117 +
118 +### `pull(...streams)`
119 +
120 +`pull` is a function that receives n-arity stream arguments and connects them into a pipeline.
121 +
122 +`pull` detects the type of stream by checking function arity, if the function takes only one argument it's either a sink or a through. Otherwise it's a source. A duplex stream is an object with the shape `{ source, sink }`.
123 +
124 +If the pipeline is complete (reduces into a source being passed into a sink), then `pull` returns `undefined`, as the data is flowing.
125 +
126 +If the pipeline is partial (reduces into either a source, a through, or a sink), then `pull` returns the partial pipeline, as it must be composed with other streams before the data will flow.
127 +
128 +## Install
129 +
130 +With [npm](https://npmjs.org/) installed, run
131 +
132 +```sh
133 +$ npm install pull-stream
134 +```
135 +
136 +## See Also
137 +
138 +- [`mafintosh/pump`](https://github.com/mafintosh/pump)
139 +- [`mafintosh/pumpify`](https://github.com/mafintosh/pumpify)
140 +
141 +## License
142 +
143 +[MIT](https://tldrlegal.com/license/mit-license)
node_modules/pull-stream/docs/sinks/collect.mdView
@@ -1,0 +1,10 @@
1 +# pull-stream/sinks/collect
2 +
3 +## usage
4 +
5 +### `collect = require('pull-stream/sinks/collect')`
6 +
7 +### `collect(cb)`
8 +
9 +Read the stream into an array, then callback.
10 +
node_modules/pull-stream/docs/sinks/concat.mdView
@@ -1,0 +1,9 @@
1 +# pull-stream/sinks/concat
2 +
3 +## usage
4 +
5 +### `concat = require('pull-stream/sinks/concat')`
6 +
7 +### `concat(cb)`
8 +
9 +concat stream of strings into single string, then callback.
node_modules/pull-stream/docs/sinks/drain.mdView
@@ -1,0 +1,11 @@
1 +# pull-stream/sinks/drain
2 +
3 +## usage
4 +
5 +### `drain = require('pull-stream/sinks/drain')`
6 +
7 +### `drain(op?, done?)`
8 +
9 +Drain the stream, calling `op` on each `data`.
10 +call `done` when stream is finished.
11 +If op returns `===false`, abort the stream.
node_modules/pull-stream/docs/sinks/index.mdView
@@ -1,0 +1,22 @@
1 +# Sinks
2 +
3 +A Sink is a stream that is not readable.
4 +You *must* have a sink at the end of a pipeline
5 +for data to move towards.
6 +
7 +You can only use _one_ sink per pipeline.
8 +
9 +``` js
10 +pull(source, through, sink)
11 +```
12 +
13 +See also:
14 +* [Sources](../sources/index.md)
15 +* [Throughs](../throughs/index.md)
16 +
17 +## [drain](./drain.md)
18 +## [reduce](./reduce.md)
19 +## [concat](./collect.md)
20 +## [collect](./collect.md)
21 +## [onEnd](./on-end.md)
22 +## [log](./log.md)
node_modules/pull-stream/docs/sinks/log.mdView
@@ -1,0 +1,9 @@
1 +# pull-stream/sinks/log
2 +
3 +## usage
4 +
5 +### `log = require('pull-stream/sinks/log')`
6 +
7 +### `log()`
8 +
9 +output the stream to `console.log`
node_modules/pull-stream/docs/sinks/on-end.mdView
@@ -1,0 +1,9 @@
1 +# pull-stream/sinks/on-end
2 +
3 +## usage
4 +
5 +### `onEnd = require('pull-stream/sinks/on-end')`
6 +
7 +### `onEnd(cb)`
8 +
9 +Drain the stream and then callback when done.
node_modules/pull-stream/docs/sinks/reduce.mdView
@@ -1,0 +1,9 @@
1 +# pull-stream/sinks/reduce
2 +
3 +## usage
4 +
5 +### `reduce = require('pull-stream/sinks/reduce')`
6 +
7 +### `reduce (reduce, initial, cb)`
8 +
9 +reduce stream into single value, then callback.
node_modules/pull-stream/docs/sources/count.mdView
@@ -1,0 +1,12 @@
1 +# pull-stream/sources/count
2 +
3 +## usage
4 +
5 +### `count = require('pull-stream/sources/count')`
6 +
7 +### `count(max, onAbort)`
8 +
9 +create a stream that outputs `0 ... max`.
10 +by default, `max = Infinity`, see
11 +[take](../throughs/take.md)
12 +
node_modules/pull-stream/docs/sources/empty.mdView
@@ -1,0 +1,20 @@
1 +# pull-stream/sources/empty
2 +
3 +## usage
4 +
5 +### `empty = require('pull-stream/sources/empty')`
6 +
7 +### `empty()`
8 +
9 +A stream with no contents (it just ends immediately)
10 +
11 +``` js
12 +pull(
13 + pull.empty(),
14 + pull.collect(function (err, ary) {
15 + console.log(arg)
16 + // ==> []
17 + })
18 +}
19 +```
20 +
node_modules/pull-stream/docs/sources/error.mdView
@@ -1,0 +1,9 @@
1 +# pull-stream/sources/error
2 +
3 +## usage
4 +
5 +### `error = require('pull-stream/sources/error')`
6 +
7 +### `error(err)`
8 +
9 +a stream that errors immediately
node_modules/pull-stream/docs/sources/index.mdView
@@ -1,0 +1,23 @@
1 +# Sources
2 +
3 +A source is a stream that is not writable.
4 +You *must* have a source at the start of a pipeline
5 +for data to move through.
6 +
7 +in general:
8 +
9 +``` js
10 +pull(source, through, sink)
11 +```
12 +
13 +See also:
14 +* [Throughs](../throughs/index.md)
15 +* [Sinks](../sinks/index.md)
16 +
17 +## [values](./values.md)
18 +## [keys](./keys.md)
19 +## [count](./count.md)
20 +## [infinite](./infinite.md)
21 +## [empty](./empty.md)
22 +## [once](./once.md)
23 +## [error](./error.md)
node_modules/pull-stream/docs/sources/infinite.mdView
@@ -1,0 +1,11 @@
1 +# pull-stream/sources/infinite
2 +
3 +## usage
4 +
5 +### `infinite = require('pull-stream/sources/infinite')`
6 +
7 +### `infinite(generator, onAbort)`
8 +
9 +create an unending stream by repeatedly calling a generator
10 +function (by default, `Math.random`)
11 +see [take](../throughs/take.md)
node_modules/pull-stream/docs/sources/keys.mdView
@@ -1,0 +1,10 @@
1 +# pull-stream/sources/keys
2 +
3 +## usage
4 +
5 +### `keys = require('pull-stream/sources/keys')`
6 +
7 +### `keys(array | object, onAbort)`
8 +
9 +stream the key names from an object (or array)
10 +
node_modules/pull-stream/docs/sources/once.mdView
@@ -1,0 +1,9 @@
1 +# pull-stream/sources/once
2 +
3 +## usage
4 +
5 +### `once = require('pull-stream/sources/once')`
6 +
7 +### `once(value, onAbort)`
8 +
9 +a stream with a single value
node_modules/pull-stream/docs/sources/values.mdView
@@ -1,0 +1,9 @@
1 +# pull-stream/sources/values
2 +
3 +## usage
4 +
5 +### `values = require('pull-stream/sources/values')`
6 +
7 +### `values(array | object, onAbort)`
8 +
9 +create a SourceStream that reads the values from an array or object and then stops.
node_modules/pull-stream/docs/spec.mdView
@@ -1,0 +1,67 @@
1 +# Synopsis
2 +
3 +In Pull-Streams, there are two fundamental types of streams `Source`s and `Sink`s. There are two composite types of streams `Through` (aka transform) and `Duplex`. A Through Stream is a sink stream that reads what goes into the Source Stream, it can also be written to. A duplex stream is a pair of streams (`{Source, Sink}`) streams.
4 +
5 +# Pull-Streams
6 +## Source Streams
7 +
8 +A Source Stream (aka readable stream) is a asynchronous function that may be called repeatedly until it returns a terminal state. Pull-streams have back pressure, but it is implicit instead of sending an explicit back pressure signal. If a source
9 +needs the sink to slow down, it may delay returning a read. If a sink needs the source to slow down, it just waits until it reads the source again.
10 +
11 +For example, the Source Stream `fn(abort, cb)` may have an internal implementation that will read data from a disk or network. If `fn` is called with the first argument (`abort`) being truthy, the callback will be passed `abort` as it's first argument. The callback has three different argument configurations...
12 +
13 + 1. `cb(null, data)`, indicates there there is data.
14 + 2. `cb(true)`, indicates the stream has ended normally.
15 + 3. `cb(error)`, indicates that there was an error.
16 +
17 +The read method *must not* be called until the previous call has returned, except for a call to abort the stream.
18 +
19 +### End
20 +The stream may be terminated, for example `cb(err|end)`. The read method *must not* be called after it has terminated. As a normal stream end is propagated up the pipeline, an error should be propagated also, because it also means the end of the stream. If `cb(end=true)` that is a "end" which means it's a valid termination, if `cb(err)` that is an error.
21 +`error` and `end` are mostly the same. If you are buffering inputs and see an `end`, process those inputs and then the end.
22 +If you are buffering inputs and get an `error`, then you _may_ throw away that buffer and return the end.
23 +
24 +### Abort
25 +Sometimes it's the sink that errors, and if it can't read anymore then we _must_ abort the source. (example, source is a file stream from local fs, and sink is a http upload. prehaps the network drops or remote server crashes, in this case we should abort the source, so that it's resources can be released.)
26 +
27 +To abort the sink, call read with a truthy first argument. You may abort a source _before_ it has returned from a regular read. (if you wait for the previous read to complete, it's possible you'd get a deadlock, if you a reading a stream that takes a long time, example, `tail -f` is reading a file, but nothing has appended to that file yet).
28 +
29 +When a stream is aborted during a read, the callback provided to the read function *must* be called first, with an error, and then the abort callback.
30 +
31 +## Sink Streams
32 +
33 +A Sink Stream (aka writable stream) is a function that a Source Stream is passed to. The Sink Stream calls the `read` function of the Source Stream, abiding by the rules about when it may not call.
34 +
35 +### Abort
36 +The Sink Stream may also abort the source if it can no longer read from it.
37 +
38 +## Through Streams
39 +
40 +A through stream is a sink stream that returns another source when it is passed a source.
41 +A through stream may be thought of as wrapping a source.
42 +
43 +## Duplex Streams
44 +
45 +A pair of independent streams, one Source and one Sink. The purpose of a duplex stream is not transformation of the data that passes though it. It's meant for communication only.
46 +
47 +# Composing Streams
48 +
49 +Since a Sink is a function that takes a Source, a Source may be fed into a Sink by simply passing the Source to the Sink.
50 +For example, `sink(source)`. Since a transform is a Sink that returns a Source, you can just add to that pattern by wrapping the source. For example, `sink(transform(source))`. This works, but it reads from right-to-left, and we are used to left-to-right.
51 +
52 +A method for creating a left-to-rihght reading pipeline of pull-streams. For example, a method could implement the following interface...
53 +
54 +```
55 +pull([source] [,transform ...] [,sink ...])
56 +```
57 +
58 +The interface could alllow for the following scenarios...
59 +
60 +1. Connect a complete pipeline: `pull(source, transform,* sink)` this connects a source to a sink via zero or more transforms.
61 +
62 +2. If a sink is not provided: `pull(source, transform+)` then pull should return the last `source`,
63 +this way streams can be easily combined in a functional way.
64 +
65 +3. If a source is not provided: `pull(transform,* sink)` then pull should return a sink that will complete the pipeline when
66 +it's passed a source. `function (source) { return pull(source, pipeline) }`
67 +If neither a source or a sink are provided, this will return a source that will return another source (via 2) i.e. a through stream.
node_modules/pull-stream/docs/throughs/async-map.mdView
@@ -1,0 +1,10 @@
1 +# pull-stream/throughs/async-map
2 +
3 +## usage
4 +
5 +### `asyncMap = require('pull-stream/throughs/async-map')`
6 +
7 +### `asyncMap(fn)`
8 +
9 +Like [`map`](./map.md) but the signature of `fn` must be
10 +`function (data, cb) { cb(null, data) }`
node_modules/pull-stream/docs/throughs/filter-not.mdView
@@ -1,0 +1,9 @@
1 +# pull-stream/throughs/filter-not
2 +
3 +## usage
4 +
5 +### `filterNot = require('pull-stream/throughs/filter-not')`
6 +
7 +### `filterNot(test)`
8 +
9 +Like [`filter`](./filter.md), but remove items where the filter returns true.
node_modules/pull-stream/docs/throughs/filter.mdView
@@ -1,0 +1,14 @@
1 +# pull-stream/throughs/filter
2 +
3 +## usage
4 +
5 +### `filter = require('pull-stream/throughs/filter')`
6 +
7 +### `filter(test)`
8 +
9 +Like `[].filter(function (data) {return true || false})`
10 +only `data` where `test(data) == true` are let through
11 +to the next stream.
12 +
13 +`test` defaults to `function id (e) { return e }` this means
14 +any truthy javascript value is allowed through.
node_modules/pull-stream/docs/throughs/flatten.mdView
@@ -1,0 +1,42 @@
1 +# pull-stream/throughs/flatten
2 +
3 +## usage
4 +### `flatten = require('pull-stream/throughs/flatten')`
5 +### `flatten(streams)`
6 +Turn a stream of streams or a stream of arrays into a stream of their items, (undoes group).
7 +
8 +
9 +## example
10 +```js
11 +test('flatten arrays', function (t) {
12 + pull(
13 + pull.values([
14 + [1, 2, 3],
15 + [4, 5, 6],
16 + [7, 8, 9]
17 + ]),
18 + pull.flatten(),
19 + pull.collect(function (err, numbers) {
20 + t.deepEqual([1, 2, 3, 4, 5, 6, 7, 8, 9], numbers)
21 + t.end()
22 + })
23 + )
24 +})
25 +
26 +test('flatten stream of streams', function (t) {
27 +
28 + pull(
29 + pull.values([
30 + pull.values([1, 2, 3]),
31 + pull.values([4, 5, 6]),
32 + pull.values([7, 8, 9])
33 + ]),
34 + pull.flatten(),
35 + pull.collect(function (err, numbers) {
36 + t.deepEqual([1, 2, 3, 4, 5, 6, 7, 8, 9], numbers)
37 + t.end()
38 + })
39 + )
40 +
41 +})
42 +```
node_modules/pull-stream/docs/throughs/index.mdView
@@ -1,0 +1,46 @@
1 +# Throughs
2 +
3 +A Through is a stream that both reads and is read by
4 +another stream.
5 +
6 +Through streams are optional.
7 +
8 +Put through streams in-between [sources](../sources/index.md) and [sinks](../sinks/index.md),
9 +like this:
10 +
11 +```js
12 +pull(source, through, sink)
13 +```
14 +
15 +Also, if you don't have the source/sink yet,
16 +you can pipe multiple through streams together
17 +to get one through stream!
18 +
19 +```js
20 +var throughABC = function () {
21 + return pull(
22 + throughA(),
23 + throughB(),
24 + throughC()
25 + )
26 +}
27 +```
28 +
29 +Which can then be treated like a normal through stream!
30 +
31 +```js
32 +pull(source(), throughABC(), sink())
33 +```
34 +
35 +See also:
36 +* [Sources](../sources/index.md)
37 +* [Sinks](../sinks/index.md)
38 +
39 +## [map](./map.md)
40 +## [asyncMap](./async-map.md)
41 +## [filter](./filter.md)
42 +## [filterNot](./filter-not.md)
43 +## [unique](./unique.md)
44 +## [nonUnique](./non-unique.md)
45 +## [take](./take.md)
46 +## [flatten](./flatten.md)
node_modules/pull-stream/docs/throughs/map.mdView
@@ -1,0 +1,54 @@
1 +# pull-stream/throughs/map
2 +
3 +> [].map for pull-streams
4 +
5 +## Background
6 +
7 +Pull-streams are arrays of data in time rather than space.
8 +
9 +As with a [`[].map`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/map), we may want to map a function over a stream.
10 +
11 +## Example
12 +
13 +```js
14 +var map = require('pull-stream/throughs/map')
15 +```
16 +
17 +```js
18 +pull(
19 + values([0, 1, 2, 3]),
20 + map(function (x) {
21 + return x * x
22 + }),
23 + log()
24 +)
25 +// 0
26 +// 1
27 +// 4
28 +// 9
29 +```
30 +
31 +## Usage
32 +
33 +### `map = require('pull-stream/throughs/map')`
34 +
35 +### `map((data) => data)`
36 +
37 +`map(fn)` returns a through stream that calls the given `fn` for each chunk of incoming data and outputs the return value, in the same order as before.
38 +
39 +## Install
40 +
41 +With [npm](https://npmjs.org/) installed, run
42 +
43 +```
44 +$ npm install pull-stream
45 +```
46 +
47 +## See Also
48 +
49 +- [`brycebaril/through2-map`](https://github.com/brycebaril/through2-map)
50 +- [`Rx.Obsevable#map`](http://xgrommx.github.io/rx-book/content/observable/observable_instance_methods/map.html)
51 +
52 +## License
53 +
54 +[MIT](https://tldrlegal.com/license/mit-license)
node_modules/pull-stream/docs/throughs/non-unique.mdView
@@ -1,0 +1,10 @@
1 +# pull-stream/throughs/non-unique
2 +
3 +## usage
4 +
5 +### `nonUnique = require('pull-stream/throughs/non-unique')`
6 +
7 +### `nonUnique(prop)`
8 +
9 +Filter unique items -- get the duplicates.
10 +The inverse of [`unique`](./unique.md)
node_modules/pull-stream/docs/throughs/take.mdView
@@ -1,0 +1,55 @@
1 +# pull-stream/throughs/take
2 +
3 +## Example usage
4 +
5 +```js
6 +var pull = require('pull-stream')
7 +// var take = require('pull-stream/throughs/take') // if you just need take
8 +
9 +pull(
10 + pull.values(['a', 'b', 'c', 'd', 'e']),
11 + pull.take(3),
12 + pull.collect((err, data) => {
13 + console.log(data)
14 + // => ['a', 'b', 'c']
15 + })
16 +)
17 +```
18 +
19 +## API
20 +
21 +take has 2 valid signatures:
22 +
23 +### `take(n) => through`
24 +
25 +Where `n` is a positive integer.
26 +`take` pulls n values from the source and then closes the stream.
27 +This is really useful for limiting how much you pull.
28 +
29 +### `take(testFn [, opts]) => through`
30 +
31 +If `testFn` is a function, read data from the source stream and forward it downstream until `testFn(data)` returns false, then close the stream.
32 +
33 +`opts` is an optional Object of form `{ last: Boolean }`, where `opts.last` determines whether the last value tested (before closing the stream) is included or excluded (default). e.g.
34 +
35 +```js
36 +pull(
37 + pull.values([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]),
38 + pull.take(n => n < 4.6), { last: true }), // include the last value tested (5)
39 + pull.collect(function (err, results) {
40 + console.log(results)
41 + // => [1, 2, 3, 4, 5]
42 + })
43 +})
44 +```
45 +
46 +```js
47 +pull(
48 + pull.values([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]),
49 + pull.take(n => n < 4.6), { last: false }), // exclude the last value tested (5)
50 + pull.collect(function (err, results) {
51 + console.log(results)
52 + // => [1, 2, 3, 4]
53 + })
54 +})
55 +```
node_modules/pull-stream/docs/throughs/through.mdView
@@ -1,0 +1,5 @@
1 +# pull-stream/throughs/filter
2 +
3 +## usage
4 +
5 +### `filter = require('pull-stream/throughs/filter')`