Commit 37c7f336bf09edb2faf7bbc393a9c945e1524224
remove perstant global encoding
Signed-off-by: wanderer <mjbecze@gmail.com>wanderer committed on 4/3/2018, 1:14:59 AM
Parent: eadc4c5881ee57090e37e64e5d62ca6033a84aaa
Files changed
index.js | changed |
tests/index.js | changed |
index.js | |||
---|---|---|---|
@@ -55,81 +55,28 @@ | |||
55 | 55 … | function encode (annotations) { | |
56 | 56 … | const stream = new Stream() | |
57 | 57 … | encodeCustomSection('types', annotations, stream, encodeType) | |
58 | 58 … | encodeCustomSection('typeMap', annotations, stream, encodeTypeMap) | |
59 | - encodeCustomSection('persist', annotations, stream, encodePersist) | ||
60 | 59 … | ||
61 | 60 … | return stream.buffer | |
62 | 61 … | } | |
63 | 62 … | ||
64 | 63 … | function encodeCustomSection (name, json, stream, encodingFunc) { | |
65 | 64 … | let payload = new Stream() | |
66 | 65 … | json = json[name] | |
67 | 66 … | ||
68 | - if (json) { | ||
69 | - stream.write([0]) | ||
70 | - // encode type | ||
71 | - leb.unsigned.write(name.length, payload) | ||
72 | - payload.write(name) | ||
73 | - encodingFunc(json, payload) | ||
74 | - // write the size of the payload | ||
75 | - leb.unsigned.write(payload.bytesWrote, stream) | ||
76 | - stream.write(payload.buffer) | ||
77 | - } | ||
67 … | + stream.write([0]) | ||
68 … | + // encode type | ||
69 … | + leb.unsigned.write(name.length, payload) | ||
70 … | + payload.write(name) | ||
71 … | + encodingFunc(json, payload) | ||
72 … | + // write the size of the payload | ||
73 … | + leb.unsigned.write(payload.bytesWrote, stream) | ||
74 … | + stream.write(payload.buffer) | ||
78 | 75 … | return stream | |
79 | 76 … | } | |
80 | 77 … | ||
81 | 78 … | /** | |
82 | - * encodes the type annoations for persist | ||
83 | - * @param {Object} annoations | ||
84 | - * @param {buffer-pipe} [stream] | ||
85 | - * @return {Buffer} | ||
86 | - */ | ||
87 | -function encodePersist (annotations, stream = new Stream()) { | ||
88 | - leb.unsigned.write(annotations.length, stream) | ||
89 | - for (const entry of annotations) { | ||
90 | - const form = EXTERNAL_KIND_STRG[entry.form] | ||
91 | - leb.unsigned.write(form, stream) | ||
92 | - leb.unsigned.write(entry.index, stream) | ||
93 | - leb.unsigned.write(LANGUAGE_TYPES_STRG[entry.type], stream) | ||
94 | - } | ||
95 | - return stream.buffer | ||
96 | -} | ||
97 | - | ||
98 | -/** | ||
99 | - * decodes the persist annotations | ||
100 | - * @param {Buffer} buf | ||
101 | - * @param {Object} | ||
102 | - */ | ||
103 | -function decodePersist (buf) { | ||
104 | - const stream = new Stream(Buffer.from(buf)) | ||
105 | - let numOfEntries = leb.unsigned.read(stream) | ||
106 | - const json = [] | ||
107 | - while (numOfEntries--) { | ||
108 | - const form = EXTERNAL_KIND_BIN[leb.unsigned.readBn(stream).toNumber()] | ||
109 | - if (!form) { | ||
110 | - throw new Error('invalid form') | ||
111 | - } | ||
112 | - const index = leb.unsigned.readBn(stream).toNumber() | ||
113 | - const type = LANGUAGE_TYPES_BIN[leb.unsigned.readBn(stream).toNumber()] | ||
114 | - if (!type) { | ||
115 | - throw new Error('invalid param') | ||
116 | - } | ||
117 | - json.push({ | ||
118 | - form, | ||
119 | - index, | ||
120 | - type | ||
121 | - }) | ||
122 | - } | ||
123 | - | ||
124 | - if (stream.buffer.length) { | ||
125 | - throw new Error('invalid buffer length') | ||
126 | - } | ||
127 | - | ||
128 | - return json | ||
129 | -} | ||
130 | - | ||
131 | -/** | ||
132 | 79 … | * encodes a typeMap definition | |
133 | 80 … | * @param {Object} definition | |
134 | 81 … | * @param {buffer-pipe} [stream] | |
135 | 82 … | * @return {Buffer} | |
@@ -269,13 +216,12 @@ | |||
269 | 216 … | function mergeTypeSections (json) { | |
270 | 217 … | const result = { | |
271 | 218 … | types: [], | |
272 | 219 … | indexes: {}, | |
273 | - exports: {}, | ||
274 | - persist: [] | ||
220 … | + exports: {} | ||
275 | 221 … | } | |
276 | 222 … | ||
277 | - const wantedSections = ['types', 'typeMap', 'persist', 'type', 'import', 'function', 'export'] | ||
223 … | + const wantedSections = ['types', 'typeMap', 'type', 'import', 'function', 'export'] | ||
278 | 224 … | const iterator = findSections(json, wantedSections) | |
279 | 225 … | const mappedFuncs = new Map() | |
280 | 226 … | const mappedTypes = new Map() | |
281 | 227 … | const {value: customType} = iterator.next() | |
@@ -287,13 +233,8 @@ | |||
287 | 233 … | if (typeMap) { | |
288 | 234 … | decodeTypeMap(typeMap.payload).forEach(map => mappedFuncs.set(map.func, map.type)) | |
289 | 235 … | } | |
290 | 236 … | ||
291 | - let {value: persist} = iterator.next() | ||
292 | - if (persist) { | ||
293 | - result.persist = decodePersist(persist.payload) | ||
294 | - } | ||
295 | - | ||
296 | 237 … | const {value: type} = iterator.next() | |
297 | 238 … | const {value: imports = {entries: []}} = iterator.next() | |
298 | 239 … | const {value: functions = {entries: []}} = iterator.next() | |
299 | 240 … | functions.entries.forEach((typeIndex, funcIndex) => { | |
@@ -343,12 +284,10 @@ | |||
343 | 284 … | injectCustomSection, | |
344 | 285 … | encodeAndInject, | |
345 | 286 … | decodeType, | |
346 | 287 … | decodeTypeMap, | |
347 | - decodePersist, | ||
348 | 288 … | encodeType, | |
349 | 289 … | encodeTypeMap, | |
350 | - encodePersist, | ||
351 | 290 … | encode, | |
352 | 291 … | mergeTypeSections, | |
353 | 292 … | LANGUAGE_TYPES_BIN, | |
354 | 293 … | LANGUAGE_TYPES_STRG |
tests/index.js | ||
---|---|---|
@@ -8,14 +8,13 @@ | ||
8 | 8 … | const wat = fs.readFileSync(`${__dirname}/wast/caller.wast`).toString() |
9 | 9 … | const json = JSON.parse(fs.readFileSync(`${__dirname}/wast/caller.json`)) |
10 | 10 … | const mod = wabt.parseWat('module.wast', wat) |
11 | 11 … | |
12 | - const buf = types.encode(json) | |
13 | 12 … | const r = mod.toBinary({ |
14 | 13 … | log: true |
15 | 14 … | }) |
16 | 15 … | let binary = Buffer.from(r.buffer) |
17 | - binary = types.injectCustomSection(buf, binary) | |
16 … | + binary = types.encodeAndInject(json, binary) | |
18 | 17 … | const moduleJSON = wasm2json(binary) |
19 | 18 … | |
20 | 19 … | const mergedJson = types.mergeTypeSections(moduleJSON) |
21 | 20 … | const expectedJson = { |
@@ -35,10 +34,9 @@ | ||
35 | 34 … | '2': 1 |
36 | 35 … | }, |
37 | 36 … | 'exports': { |
38 | 37 … | 'call': 1 |
39 | - }, | |
40 | - 'persist': [] | |
38 … | + } | |
41 | 39 … | } |
42 | 40 … | |
43 | 41 … | t.deepEquals(mergedJson, expectedJson) |
44 | 42 … | t.end() |
@@ -57,54 +55,16 @@ | ||
57 | 55 … | const mergedJson = types.mergeTypeSections(moduleJSON) |
58 | 56 … | const expectedJson = { |
59 | 57 … | 'types': [], |
60 | 58 … | 'indexes': {}, |
61 | - 'exports': {}, | |
62 | - 'persist': [] | |
59 … | + 'exports': {} | |
63 | 60 … | } |
64 | 61 … | |
65 | 62 … | t.deepEquals(mergedJson, expectedJson) |
66 | 63 … | t.end() |
67 | 64 … | }) |
68 | 65 … | |
69 | -tape('globals', t => { | |
70 | - const wat = fs.readFileSync(`${__dirname}/wast/globals.wast`).toString() | |
71 | - const json = JSON.parse(fs.readFileSync(`${__dirname}/wast/globals.json`)) | |
72 | - const mod = wabt.parseWat('module.wast', wat) | |
73 | 66 … | |
74 | - const r = mod.toBinary({ | |
75 | - log: true | |
76 | - }) | |
77 | - | |
78 | - let binary = Buffer.from(r.buffer) | |
79 | - binary = types.encodeAndInject(json, binary) | |
80 | - const moduleJSON = wasm2json(binary) | |
81 | - const mergedJson = types.mergeTypeSections(moduleJSON) | |
82 | - const expectedJson = { | |
83 | - 'types': [{ | |
84 | - 'form': 'func', | |
85 | - 'params': [] | |
86 | - }], | |
87 | - 'indexes': { | |
88 | - '2': 0, | |
89 | - '3': 0 | |
90 | - }, | |
91 | - 'exports': { | |
92 | - 'load': 3, | |
93 | - 'store': 2 | |
94 | - }, | |
95 | - 'persist': [{ | |
96 | - 'form': 'global', | |
97 | - 'index': 0, | |
98 | - 'type': 'data' | |
99 | - }] | |
100 | - } | |
101 | - | |
102 | - t.deepEquals(mergedJson, expectedJson) | |
103 | - | |
104 | - t.end() | |
105 | -}) | |
106 | - | |
107 | 67 … | tape('invalid function type', t => { |
108 | 68 … | const wat = fs.readFileSync(`${__dirname}/wast/invalid.wast`).toString() |
109 | 69 … | const mod = wabt.parseWat('module.wast', wat) |
110 | 70 … | const r = mod.toBinary({ |
@@ -200,32 +160,4 @@ | ||
200 | 160 … | } catch (e) { |
201 | 161 … | t.pass('should catch invalid typeMap encodings') |
202 | 162 … | } |
203 | 163 … | }) |
204 | - | |
205 | -tape('invalid persist encoding', t => { | |
206 | - t.plan(3) | |
207 | - const json = JSON.parse(fs.readFileSync(`${__dirname}/wast/globals.json`)) | |
208 | - let buf = types.encodePersist(json.persist) | |
209 | - try { | |
210 | - const invalidBuf = Buffer.concat([buf, Buffer.from([0])]) | |
211 | - types.decodePersist(invalidBuf) | |
212 | - } catch (e) { | |
213 | - t.pass('should catch invalid persist encodings') | |
214 | - } | |
215 | - | |
216 | - try { | |
217 | - const invalidParam = Buffer.from(buf) | |
218 | - invalidParam[3] = 77 | |
219 | - types.decodePersist(invalidParam) | |
220 | - } catch (e) { | |
221 | - t.pass('should catch invalid persist type encodings') | |
222 | - } | |
223 | - | |
224 | - try { | |
225 | - const invalidParam = Buffer.from(buf) | |
226 | - invalidParam[1] = 77 | |
227 | - types.decodePersist(invalidParam) | |
228 | - } catch (e) { | |
229 | - t.pass('should catch invalid persist form encodings') | |
230 | - } | |
231 | -}) |
Built with git-ssb-web