diff --git a/README.md b/README.md index 1b42cac..88bd7a1 100644 --- a/README.md +++ b/README.md @@ -29,7 +29,7 @@ This `encode` function will generate standard CBOR without any extensions that s Cbor-x modules are standard ESM modules and can be loaded directly from the [deno.land registry for cbor](https://deno.land/x/cbor) for use in Deno. The standard encode and decode functionality is available on Deno, like other platforms. ### Streams -We can use the including streaming functionality (which further improves performance). The `EncoderStream` is a NodeJS transform stream that can be used to serialize objects to a binary stream (writing to network/socket, IPC, etc.), and the `DecoderStream` can be used to deserialize objects from a binary sream (reading from network/socket, etc.): +We can use the including streaming functionality (which further improves performance). The `EncoderStream` is a NodeJS transform stream that can be used to serialize objects to a binary stream (writing to network/socket, IPC, etc.), and the `DecoderStream` can be used to deserialize objects from a binary stream (reading from network/socket, etc.): ```JavaScript import { EncoderStream } from 'cbor-x'; @@ -166,7 +166,7 @@ The principle driver for this feature is to support `application/senml+cbor`cont Records are also supported in conjunction with keyMaps, but these are disabled by default when keyMaps are specified as use of the two features does not introduce any additional compression efficiency unless that the data arrays are quite large (> 10 items). ```JavaScript -import { Encoder } from 'cbor-x' +import { Decoder, Encoder } from 'cbor-x' const data = [ { bn: '/3303/0/5700', bt: 1278887, v: 35.5 }, { t: 10, v: 34 }, @@ -183,7 +183,8 @@ let senmlBuff = senmlCbor.encode(data) let basicBuff = basicCbor.encode(data) console.log('Senml CBOR size:', senmlBuff.length) // 77 console.log('Basic CBOR size:', basicBuff.length) // 90 -assert.deepEqual(senmlEncoder.decode(senmlBuff), data) +let senmlDecoder = new Decoder({ keyMap: senmlKeys }); +assert.deepEqual(senmlDecoder.decode(senmlBuff), data) ``` @@ -228,6 +229,18 @@ Note, that the performance is decreased with decimal rounding by about 20-25%, a In addition, msgpackr exports a `roundFloat32(number)` function that can be used to round floating point numbers to the maximum significant decimal digits that can be stored in 32-bit float, just as DECIMAL_ROUND does when decoding. This can be useful for determining how a number will be decoded prior to encoding it. +### Setting Size Limits +You can set size limits on objects, arrays, and maps to prevent resource exhaustion when decoding. This can be done by calling the setMaxLimits export. Each of the properties are optional (only provide +the properties you want to change), for example (with the defaults): +```JavaScript +import { setMaxLimits } from 'cbor-x'; +setMaxLimits({ + maxArraySize: 112810000, + maxMapSize: 16810000, + maxObjectSize : 16710000 +}); +``` + ## Performance Cbor-x is fast. Really fast. Here is comparison with the next fastest JS projects using the benchmark tool from `msgpack-lite` (and the sample data is from some clinical research data we use that has a good mix of different value types and structures). It also includes comparison to V8 native JSON functionality, and JavaScript Avro (`avsc`, a very optimized Avro implementation): diff --git a/decode.d.ts b/decode.d.ts index c836f26..7d888e8 100644 --- a/decode.d.ts +++ b/decode.d.ts @@ -1,2 +1,2 @@ export { decode, decodeMultiple, Decoder, addExtension, clearSource,roundFloat32, isNativeAccelerationEnabled, - Extension, Options, FLOAT32_OPTIONS} from '.' + Extension, Options, FLOAT32_OPTIONS, setMaxLimits, MAX_LIMITS_OPTIONS } from '.' diff --git a/decode.js b/decode.js index 379c22b..362ff36 100644 --- a/decode.js +++ b/decode.js @@ -14,6 +14,13 @@ const BUNDLED_STRINGS_ID = 0xdff9 const PACKED_TABLE_TAG_ID = 51 const PACKED_REFERENCE_TAG_ID = 6 const STOP_CODE = {} +let maxArraySize = 112810000 // This is the maximum array size in V8. We would potentially detect and set it higher +// for JSC, but this is pretty large and should be sufficient for most use cases +let maxMapSize = 16810000 // JavaScript has a fixed maximum map size of about 16710000, but JS itself enforces this, +// so we don't need to + +let maxObjectSize = 16710000; // This is the maximum number of keys in a Map. It takes over a minute to create this +// many keys in an object, so also probably a reasonable choice there. let strings = EMPTY_ARRAY let stringPosition = 0 let currentDecoder = {} @@ -294,6 +301,7 @@ export function read() { let array = [] let value, i = 0 while ((value = read()) != STOP_CODE) { + if (i >= maxArraySize) throw new Error(`Array length exceeds ${maxArraySize}`) array[i++] = value } return majorType == 4 ? array : majorType == 3 ? array.join('') : Buffer.concat(array) @@ -301,8 +309,19 @@ export function read() { let key if (currentDecoder.mapsAsObjects) { let object = {} - if (currentDecoder.keyMap) while((key = read()) != STOP_CODE) object[safeKey(currentDecoder.decodeKey(key))] = read() - else while ((key = read()) != STOP_CODE) object[safeKey(key)] = read() + let i = 0; + if (currentDecoder.keyMap) { + while((key = read()) != STOP_CODE) { + if (i++ >= maxMapSize) throw new Error(`Property count exceeds ${maxMapSize}`) + object[safeKey(currentDecoder.decodeKey(key))] = read() + } + } + else { + while ((key = read()) != STOP_CODE) { + if (i++ >= maxMapSize) throw new Error(`Property count exceeds ${maxMapSize}`) + object[safeKey(key)] = read() + } + } return object } else { if (restoreMapsAsObject) { @@ -310,8 +329,24 @@ export function read() { restoreMapsAsObject = false } let map = new Map() - if (currentDecoder.keyMap) while((key = read()) != STOP_CODE) map.set(currentDecoder.decodeKey(key), read()) - else while ((key = read()) != STOP_CODE) map.set(key, read()) + if (currentDecoder.keyMap) { + let i = 0; + while((key = read()) != STOP_CODE) { + if (i++ >= maxMapSize) { + throw new Error(`Map size exceeds ${maxMapSize}`); + } + map.set(currentDecoder.decodeKey(key), read()) + } + } + else { + let i = 0; + while ((key = read()) != STOP_CODE) { + if (i++ >= maxMapSize) { + throw new Error(`Map size exceeds ${maxMapSize}`); + } + map.set(key, read()) + } + } return map } case 7: @@ -342,12 +377,14 @@ export function read() { } return readFixedString(token) case 4: // array + if (token >= maxArraySize) throw new Error(`Array length exceeds ${maxArraySize}`) let array = new Array(token) //if (currentDecoder.keyMap) for (let i = 0; i < token; i++) array[i] = currentDecoder.decodeKey(read()) //else for (let i = 0; i < token; i++) array[i] = read() return array case 5: // map + if (token >= maxMapSize) throw new Error(`Map size exceeds ${maxArraySize}`) if (currentDecoder.mapsAsObjects) { let object = {} if (currentDecoder.keyMap) for (let i = 0; i < token; i++) object[safeKey(currentDecoder.decodeKey(read()))] = read() @@ -449,6 +486,7 @@ export function read() { } const validName = /^[a-zA-Z_$][a-zA-Z\d_$]*$/ function createStructureReader(structure) { + if (!structure) throw new Error('Structure is required in record definition'); function readObject() { // get the array size from the header let length = src[position++] @@ -502,7 +540,12 @@ function createStructureReader(structure) { } function safeKey(key) { - return key === '__proto__' ? '__proto_' : key + // protect against prototype pollution + if (typeof key === 'string') return key === '__proto__' ? '__proto_' : key + if (typeof key === 'number' || typeof key === 'boolean' || typeof key === 'bigint') return key.toString(); + if (key == null) return key + ''; + // protect against expensive (DoS) string conversions + throw new Error('Invalid property name type ' + typeof key); } let readFixedString = readStringJS @@ -881,7 +924,7 @@ currentExtensions[2] = (buffer) => { // bigint extension let value = BigInt(0) for (let i = 0, l = buffer.byteLength; i < l; i++) { - value = BigInt(buffer[i]) + value << BigInt(8) + value = BigInt(buffer[i]) + (value << BigInt(8)) } return value } @@ -937,9 +980,18 @@ currentExtensions[27] = (data) => { // http://cbor.schmorp.de/generic-object return (glbl[data[0]] || Error)(data[1], data[2]) } const packedTable = (read) => { - if (src[position++] != 0x84) - throw new Error('Packed values structure must be followed by a 4 element array') + if (src[position++] != 0x84) { + let error = new Error('Packed values structure must be followed by a 4 element array') + if (src.length < position) + error.incomplete = true + throw error + } let newPackedValues = read() // packed values + if (!newPackedValues || !newPackedValues.length) { + let error = new Error('Packed values structure must be followed by a 4 element array') + error.incomplete = true + throw error + } packedValues = packedValues ? newPackedValues.concat(packedValues.slice(newPackedValues.length)) : newPackedValues packedValues.prefixes = read() packedValues.suffixes = read() @@ -957,7 +1009,10 @@ currentExtensions[PACKED_REFERENCE_TAG_ID] = (data) => { // packed reference } if (typeof data == 'number') return packedValues[16 + (data >= 0 ? 2 * data : (-2 * data - 1))] - throw new Error('No support for non-integer packed references yet') + let error = new Error('No support for non-integer packed references yet') + if (data === undefined) + error.incomplete = true + throw error } // The following code is an incomplete implementation of http://cbor.schmorp.de/stringref @@ -984,6 +1039,7 @@ currentExtensions[28] = (read) => { referenceMap.id = 0 } let id = referenceMap.id++ + let startingPosition = position let token = src[position] let target // TODO: handle Maps, Sets, and other types that can cycle; this is complicated, because you potentially need to read @@ -996,8 +1052,20 @@ currentExtensions[28] = (read) => { let refEntry = { target } // a placeholder object referenceMap.set(id, refEntry) let targetProperties = read() // read the next value as the target object to id - if (refEntry.used) // there is a cycle, so we have to assign properties to original target + if (refEntry.used) {// there is a cycle, so we have to assign properties to original target + if (Object.getPrototypeOf(target) !== Object.getPrototypeOf(targetProperties)) { + // this means that the returned target does not match the targetProperties, so we need rerun the read to + // have the correctly create instance be assigned as a reference, then we do the copy the properties back to the + // target + // reset the position so that the read can be repeated + position = startingPosition + // the returned instance is our new target for references + target = targetProperties + referenceMap.set(id, { target }) + targetProperties = read() + } return Object.assign(target, targetProperties) + } refEntry.target = targetProperties // the placeholder wasn't used, replace with the deserialized one return targetProperties // no cycle, can just use the returned read object } @@ -1079,11 +1147,19 @@ function registerTypedArray(TypedArray, tag) { for (let littleEndian = 0; littleEndian < 2; littleEndian++) { if (!littleEndian && bytesPerElement == 1) continue - let sizeShift = bytesPerElement == 2 ? 1 : bytesPerElement == 4 ? 2 : 3 + let sizeShift = bytesPerElement == 2 ? 1 : bytesPerElement == 4 ? 2 : bytesPerElement == 8 ? 3 : 0 currentExtensions[littleEndian ? tag : (tag - 4)] = (bytesPerElement == 1 || littleEndian == isLittleEndianMachine) ? (buffer) => { if (!TypedArray) throw new Error('Could not find typed array for code ' + tag) - // we have to always slice/copy here to get a new ArrayBuffer that is word/byte aligned + if (!currentDecoder.copyBuffers) { + // try provide a direct view, but will only work if we are byte-aligned + if (bytesPerElement === 1 || + bytesPerElement === 2 && !(buffer.byteOffset & 1) || + bytesPerElement === 4 && !(buffer.byteOffset & 3) || + bytesPerElement === 8 && !(buffer.byteOffset & 7)) + return new TypedArray(buffer.buffer, buffer.byteOffset, buffer.byteLength >> sizeShift); + } + // we have to slice/copy here to get a new ArrayBuffer, if we are not word/byte aligned return new TypedArray(Uint8Array.prototype.slice.call(buffer, 0).buffer) } : buffer => { if (!TypedArray) @@ -1198,6 +1274,12 @@ export function addExtension(extension) { currentExtensions[extension.tag] = extension.decode } +export function setSizeLimits(limits) { + if (limits.maxMapSize) maxMapSize = limits.maxMapSize; + if (limits.maxArraySize) maxArraySize = limits.maxArraySize; + if (limits.maxObjectSize) maxObjectSize = limits.maxObjectSize; +} + export const mult10 = new Array(147) // this is a table matching binary exponents to the multiplier to determine significant digit rounding for (let i = 0; i < 256; i++) { mult10[i] = +('1e' + Math.floor(45.15 - i * 0.30103)) diff --git a/encode.js b/encode.js index fbfa7b6..1e22443 100644 --- a/encode.js +++ b/encode.js @@ -469,7 +469,7 @@ export class Encoder extends Decoder { } let constructor = value.constructor if (constructor === Object) { - writeObject(value, true) + writeObject(value) } else if (constructor === Array) { length = value.length if (length < 0x18) { @@ -564,8 +564,8 @@ export class Encoder extends Decoder { return encode(json) } - // no extension found, write as object - writeObject(value, !value.hasOwnProperty) // if it doesn't have hasOwnProperty, don't do hasOwnProperty checks + // no extension found, write as a plain object + writeObject(value) } } } else if (type === 'boolean') { @@ -585,7 +585,19 @@ export class Encoder extends Decoder { target[position++] = 0xfb targetView.setFloat64(position, Number(value)) } else { - throw new RangeError(value + ' was too large to fit in CBOR 64-bit integer format, set largeBigIntToFloat to convert to float-64') + if (value >= BigInt(0)) + target[position++] = 0xc2 // tag 2 + else { + target[position++] = 0xc3 // tag 2 + value = BigInt(-1) - value; + } + let bytes = []; + while (value) { + bytes.push(Number(value & BigInt(0xff))); + value >>= BigInt(8); + } + writeBuffer(new Uint8Array(bytes.reverse()), makeRoom); + return; } } position += 8 @@ -628,19 +640,19 @@ export class Encoder extends Decoder { } } } : - (object, safePrototype) => { + (object) => { target[position++] = 0xb9 // always use map 16, so we can preallocate and set the length afterwards let objectOffset = position - start position += 2 let size = 0 - if (encoder.keyMap) { - for (let key in object) if (safePrototype || object.hasOwnProperty(key)) { + if (encoder.keyMap) { + for (let key in object) if (typeof object.hasOwnProperty !== 'function' || object.hasOwnProperty(key)) { encode(encoder.encodeKey(key)) encode(object[key]) size++ } } else { - for (let key in object) if (safePrototype || object.hasOwnProperty(key)) { + for (let key in object) if (typeof object.hasOwnProperty !== 'function' || object.hasOwnProperty(key)) { encode(key) encode(object[key]) size++ @@ -649,7 +661,7 @@ export class Encoder extends Decoder { target[objectOffset++ + start] = size >> 8 target[objectOffset + start] = size & 0xff } : - (object, safePrototype) => { + (object, skipValues) => { let nextTransition, transition = structures.transitions || (structures.transitions = Object.create(null)) let newTransitions = 0 let length = 0 @@ -668,7 +680,7 @@ export class Encoder extends Decoder { transition = nextTransition } } else { - for (let key in object) if (safePrototype || object.hasOwnProperty(key)) { + for (let key in object) if (typeof object.hasOwnProperty !== 'function' || object.hasOwnProperty(key)) { nextTransition = transition[key] if (!nextTransition) { if (transition[RECORD_SYMBOL] & 0x100000) {// this indicates it is a brancheable/extendable terminal node, so we will use this record id and extend it @@ -728,9 +740,9 @@ export class Encoder extends Decoder { writeArrayHeader(length + 2) encode(0xe000 + recordId) encode(keys) - if (safePrototype === null) return; // special exit for iterator + if (skipValues) return; // special exit for iterator for (let key in object) - if (safePrototype || object.hasOwnProperty(key)) + if (typeof object.hasOwnProperty !== 'function' || object.hasOwnProperty(key)) encode(object[key]) return } @@ -740,9 +752,9 @@ export class Encoder extends Decoder { } else { writeArrayHeader(length) } - if (safePrototype === null) return; // special exit for iterator + if (skipValues) return; // special exit for iterator for (let key in object) - if (safePrototype || object.hasOwnProperty(key)) + if (typeof object.hasOwnProperty !== 'function' || object.hasOwnProperty(key)) encode(object[key]) } const makeRoom = (end) => { @@ -780,7 +792,7 @@ export class Encoder extends Decoder { if (constructor === Object) { let useRecords = encoder.useRecords !== false; if (useRecords) - writeObject(object, null); // write the record identifier + writeObject(object, true); // write the record identifier else writeEntityLength(Object.keys(object).length, 0xa0); for (let key in object) { @@ -805,7 +817,7 @@ export class Encoder extends Decoder { yield* tryEncode(value, iterateProperties, 'element'); } else encode(value); } - } else if (object[Symbol.iterator]) { + } else if (object[Symbol.iterator] && !object.buffer) { // iterator, but exclude typed arrays target[position++] = 0x9f; // start indefinite array for (let value of object) { if (value && (typeof value === 'object' || position - start > chunkThreshold)) { diff --git a/index.d.ts b/index.d.ts index 6d1b409..beffcba 100644 --- a/index.d.ts +++ b/index.d.ts @@ -4,7 +4,11 @@ export enum FLOAT32_OPTIONS { DECIMAL_ROUND = 3, DECIMAL_FIT = 4 } - +export interface SizeLimitOptions { + maxArraySize: number; + maxMapSize: number; + maxObjectSize: number; +} export interface Options { alwaysUseFloat?: boolean useFloat32?: FLOAT32_OPTIONS @@ -43,6 +47,7 @@ export class Decoder { decode(messagePack: Buffer | Uint8Array): any decodeMultiple(messagePack: Buffer | Uint8Array, forEach?: (value: any) => any): [] | void } +export function setMaxLimits(options: SizeLimitOptions): void export function decode(messagePack: Buffer | Uint8Array): any export function decodeMultiple(messagePack: Buffer | Uint8Array, forEach?: (value: any) => any): [] | void export function addExtension(extension: Extension): void @@ -66,3 +71,9 @@ export class DecoderStream extends Transform { export class EncoderStream extends Transform { constructor(options?: Options | { highWaterMark: number, emitClose: boolean, allowHalfOpen: boolean }) } + +export class Tag { + constructor(value: any, tagNumber: number) + value: any + tag: number +} \ No newline at end of file diff --git a/index.js b/index.js index e2e9757..7618204 100644 --- a/index.js +++ b/index.js @@ -1,3 +1,3 @@ export { Encoder, addExtension, encode, encodeAsIterable, encodeAsAsyncIterable, NEVER, ALWAYS, DECIMAL_ROUND, DECIMAL_FIT, REUSE_BUFFER_MODE } from './encode.js' -export { Tag, Decoder, decodeMultiple, decode, FLOAT32_OPTIONS, clearSource, roundFloat32, isNativeAccelerationEnabled } from './decode.js' +export { Tag, Decoder, decodeMultiple, decode, FLOAT32_OPTIONS, clearSource, roundFloat32, isNativeAccelerationEnabled, setSizeLimits } from './decode.js' export { decodeIter, encodeIter } from './iterators.js' diff --git a/node-index.js b/node-index.js index fc5b3fa..7e5a394 100644 --- a/node-index.js +++ b/node-index.js @@ -1,5 +1,5 @@ export { Encoder, addExtension, encode, encodeAsIterable, encodeAsAsyncIterable, NEVER, ALWAYS, DECIMAL_ROUND, DECIMAL_FIT, REUSE_BUFFER_MODE } from './encode.js' -export { Tag, Decoder, decodeMultiple, decode, FLOAT32_OPTIONS, clearSource, roundFloat32, isNativeAccelerationEnabled } from './decode.js' +export { Tag, Decoder, decodeMultiple, decode, FLOAT32_OPTIONS, clearSource, roundFloat32, isNativeAccelerationEnabled, setSizeLimits } from './decode.js' export { EncoderStream, DecoderStream } from './stream.js' export { decodeIter, encodeIter } from './iterators.js' export const useRecords = false diff --git a/package.json b/package.json index 2e1d989..edb94e5 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "cbor-x", "author": "Kris Zyp", - "version": "1.5.4", + "version": "1.6.0", "description": "Ultra-fast and conformant CBOR (RFC 8949) implementation with support for numerous tag extensions including records and structured cloning", "license": "MIT", "types": "./index.d.ts", @@ -55,8 +55,14 @@ "import": "./decode.js" } }, - "./decode-no-eval": "./dist/decode-no-eval.cjs", - "./index-no-eval": "./dist/index-no-eval.cjs", + "./decode-no-eval": { + "types": "./decode.d.ts", + "default": "./dist/decode-no-eval.cjs" + }, + "./index-no-eval": { + "types": "./index.d.ts", + "default": "./dist/index-no-eval.cjs" + }, "./package.json": "./package.json" }, "files": [ @@ -69,7 +75,7 @@ "node:buffer": false }, "optionalDependencies": { - "cbor-extract": "^2.1.1" + "cbor-extract": "^2.2.0" }, "devDependencies": { "@rollup/plugin-json": "^5.0.1", diff --git a/stream.js b/stream.js index 144f23d..38b193c 100644 --- a/stream.js +++ b/stream.js @@ -13,10 +13,12 @@ export class EncoderStream extends Transform { this.encoder = options.encoder || new Encoder(options) } async _transform(value, encoding, callback) { - for await (let chunk of this.encoder.encodeAsAsyncIterable(value)) { - this.push(chunk) - } - callback() + try { + for await (let chunk of this.encoder.encodeAsAsyncIterable(value)) { + this.push(chunk) + } + callback() + } catch(error) { callback (error) } } } @@ -41,9 +43,9 @@ export class DecoderStream extends Transform { if (error.incomplete) { this.incompleteBuffer = chunk.slice(error.lastPosition) values = error.values + } else { + return callback(error) } - else - throw error } finally { for (let value of values || []) { if (value === null) @@ -51,7 +53,7 @@ export class DecoderStream extends Transform { this.push(value) } } - if (callback) callback() + callback() } getNullValue() { return Symbol.for(null) diff --git a/tests/test.js b/tests/test.js index c78d2ed..fefda28 100644 --- a/tests/test.js +++ b/tests/test.js @@ -1,4 +1,4 @@ -import * as CBOR from '../index.js' +import * as CBOR from '../node-index.js' import chai from 'chai' import { readFileSync } from 'fs' const sampleData = JSON.parse(readFileSync(new URL('./example4.json', import.meta.url))) @@ -274,11 +274,11 @@ suite('CBOR basic tests', function(){ assert.deepEqual(deserialized1, data1) assert.deepEqual(deserialized2, data2) }) - - test('extended class encode/decode', function(){ + test('extended class encode/decode', function() { function Extended() { } + Extended.prototype.getDouble = function() { return this.value * 2 } @@ -305,10 +305,37 @@ suite('CBOR basic tests', function(){ return encoder.encode([instance.value, instance.string]) } }) - var serialized = encode(data) - var deserialized = decode(serialized) - assert.deepEqual(data, deserialized) - assert.equal(deserialized.extendedInstance.getDouble(), 8) + }) + test('extended class encode/decode with self reference in structered clone', function(){ + function Extended() { + + } + addExtension({ + Class: Extended, + tag: 301, + decode: function(data) { + let e = new Extended() + e.value = data[0] + e.string = data[1] + return e + }, + encode: function(instance, encode) { + return encode([instance.value, instance.string]) + } + }) + var instance = new Extended() + instance.value = instance; + instance.string = 'hi' + let data = { + extended: instance + } + let encoder = new Encoder({ + structuredClone: true, + }) + let serialized = encoder.encode(data) + let deserialized = encoder.decode(serialized) + assert(data.extended.value.value === data.extended) + assert(data.extended instanceof Extended) }) test('addExtension with map', function(){ @@ -452,6 +479,12 @@ suite('CBOR basic tests', function(){ var deserialized = decode(serialized) assert.deepEqual(deserialized, data) }) + test('object with __proto__', function(){ + const data = { foo: 'bar', __proto__: { isAdmin: true } }; + var serialized = encode(data) + var deserialized = decode(serialized) + assert.deepEqual(deserialized, { foo: 'bar' }); + }) test('big buffer', function() { var size = 100000000 @@ -646,23 +679,27 @@ suite('CBOR basic tests', function(){ var data = { bigintSmall: 352n, bigintSmallNegative: -333335252n, - bigintBig: 2n**64n - 1n, // biggest possible + bigintBig: 2n**64n - 1n, // biggest 64-bit possible bigintBigNegative: -(2n**63n), // largest negative mixedWithNormal: 44, } var serialized = encode(data) var deserialized = decode(serialized) assert.deepEqual(deserialized, data) - var tooBigInt = { - tooBig: 2n**66n + var evenBiggerInt = { + big: 2n**66n, + bigger: 53285732853728573289573289573289573289583725892358732859532n, + negBig: -93025879203578903275903285903285903289502n, } - assert.throws(function(){ serialized = encode(tooBigInt) }) + var serialized = encode(evenBiggerInt) + var deserialized = decode(serialized) + assert.deepEqual(deserialized, evenBiggerInt) let encoder = new Encoder({ largeBigIntToFloat: true }) - serialized = encoder.encode(tooBigInt) + serialized = encoder.encode(evenBiggerInt) deserialized = decode(serialized) - assert.isTrue(deserialized.tooBig > 2n**65n) + assert.isTrue(deserialized.bigger > 2n**65n) }) test('buffers', function() { @@ -720,6 +757,11 @@ suite('CBOR basic tests', function(){ let badInput = Buffer.from('7b2273657269616c6e6f223a2265343a30222c226970223a223139322e3136382e312e3335222c226b6579223a226770735f736563726574227d', 'hex'); assert.throws(function(){ decode(badInput) }) // should throw, not crash }) + test('buffer key', function() { + let encoder = new Encoder({ mapsAsObjects: false }) + let test = encoder.decode(Buffer.from('D87982A1446E616D654361626301', 'hex')); + console.log(test); + }); test('encode as iterator', function() { let hasIterables = { a: 1, @@ -812,6 +854,15 @@ suite('CBOR basic tests', function(){ let deserialized = decode(Buffer.concat(result)); console.log(performance.now() - start, result.length); }); + + test('little-endian typed array with aligned data', function() { + // array[1] { uint32-little-endian-typed-array { bytes <00 00 00 00> } } + let data = new Uint8Array([ 129, 216, 70, 68, 0, 0, 0, 0 ]); + assert.deepEqual(decode(data), [new Uint32Array([0])]); + + let value = {x: new Float32Array([1, 2, 3])}; + assert.deepEqual(decode(encode(value)), value); + }); }) suite('CBOR performance tests', function(){ test('performance JSON.parse', function() {