diff --git a/README.md b/README.md index 9992c97..4969f87 100644 --- a/README.md +++ b/README.md @@ -71,7 +71,7 @@ const schema = array({ c: bits(5) }); -const encoder = new PackBytes(schema); +const { encode, decode } = PackBytes(schema); ``` ### Encode: ```javascript @@ -81,7 +81,7 @@ const data = [ { a: true, b: 3, c: 31 } ]; -const buf = encoder.encode(data); +const buf = encode(data); // buf.length == 3, encoded to 3 bytes, 24x smaller than JSON.stringify(data) at 73 bytes @@ -90,7 +90,7 @@ saveToDisk(buf); ``` ### Decode: ```javascript -const data = encoder.decode(buf); +const data = decode(buf); console.log(data); // [ @@ -154,10 +154,10 @@ schema = schemas({ schema1: schema, schema2: schema, .. }) // multiple schemas m // create encoder by providing schema: // accepts schema object or JSON.stringify(schema) string for easy transfer from server to client: -encoder = new PackBytes(schema) +const { encode, decode } = PackBytes(schema) -buf = encoder.encode(data) // encode data -buf = encoder.encode(schema_name, data) // encode data with specific schema from 'schemas' type +buf = encode(data) // encode data +buf = encode(schema_name, data) // encode data with specific schema from 'schemas' type -data = encoder.decode(buf) // decode, returns original data +data = decode(buf) // decode, returns original data ``` diff --git a/packbytes.mjs b/packbytes.mjs index 0e290b2..3d6e96a 100644 --- a/packbytes.mjs +++ b/packbytes.mjs @@ -1,313 +1,347 @@ -export class PackBytes { - constructor(schema) { - this.schema = PackBytes.parse(schema); - this.type(this.schema).init?.(this.schema); - this.dataview = new DataView(new ArrayBuffer(2 ** 14)); // 16,384 - } - encode = (schema, data) => { - data = this.inputs(schema, data); - this.offset = 0; - this.type(this.schema).encode(this.schema, data); - return new Uint8Array(this.dataview.buffer, 0, this.offset); - } - decode = (buf) => { - this.offset = 0; - this.decodeDV = buf.buffer ? new DataView(buf.buffer, buf.byteOffset, buf.byteLength) : new DataView(buf); - return this.type(this.schema).decode(this.schema); - } - types = { +export const PackBytes = (schema) => { // takes schema object or JSON.stringify(schema) string + const constructor = () => { + schema = parse(schema); + initialize(schema); + setEncodeBuffer(new ArrayBuffer(2 ** 14)); + }; + const encode = (schemaName, data) => { // takes (data) or (schemaName, data) + offset = 0; + data = parseInputs(schemaName, data); + encodeSchema(schema, data); + return new Uint8Array(encodeAB, 0, offset); + }; + const decode = (buf) => { // takes Buffer, TypedArray, ArrayBuffer + offset = 0; + setDecodeBuffer(buf); + return decodeSchema(schema); + }; + const types = { bool: { - encode: (schema, data = 0) => this.writeUint(data, 1), - decode: (schema) => Boolean(this.readUint(1)), + encode: (schema, data = 0) => writeUint(data, 1), + decode: (schema) => Boolean(readUint(1)), init: (schema) => { - schema.bits = 1; + schema.bits = 1; // schemas with "bits" field get packed into 32 bit spaces by packInts() if schema is child of object, skipping encode() fn + schema.maxInt = 1; schema.bool = true; }, }, bits: { - encode: (schema, data = 0) => this.writeUint(Math.max(0, Math.min(data, schema.max)), schema.bytes), - decode: (schema) => this.readUint(schema.bytes), - init: (schema, objSchema) => { - if (objSchema) schema.bits = schema.val; - schema.bytes = Math.ceil((schema.val) / 8); - schema.max = 2**(schema.bytes * 8) - 1; + encode: (schema, data = 0) => { + if (!(data >= 0 && data <= schema.maxByteInt)) throw rangeError(schema, data); + writeUint(data, schema.bytes); + }, + decode: (schema) => readUint(schema.bytes), + init: (schema) => { + if (!(schema.val >= 1 && schema.val <= 32)) throw TypeError(`bit size must be 1 - 32, received "${schema.val}"`); + schema.bits = schema.val; + schema.bytes = Math.ceil(schema.bits / 8); + schema.maxInt = 2**schema.bits - 1; + schema.maxByteInt = 2**(schema.bytes * 8) - 1; }, }, float: { - encode: (schema, data = 0) => this.writeFloat(data, schema.bytes), - decode: (schema) => this.readFloat(schema.bytes), - init: (schema) => schema.bytes = schema.val / 8, + encode: (schema, data = 0) => writeFloat(data, schema.bytes), + decode: (schema) => readFloat(schema.bytes), + init: (schema) => { + if (val != 32 || val != 64) throw TypeError(`float must be 32 or 64 bit, received "${val}"`); + schema.bytes = schema.val / 8; + }, }, varint: { - encode: (schema, data = 0) => this.writeVarInt(data), - decode: (schema) => this.readVarInt(), + encode: (schema, data = 0) => { + if (!(data >= 0 && data <= schema.maxByteInt)) throw rangeError(schema, data); + writeVarInt(data); + }, + decode: (schema) => readVarInt(), + init: (schema) => schema.maxByteInt = 2**30 - 1, }, string: { - encode: (schema, data = '') => schema.map ? this.writeUint(schema.map.values[data], schema.map.bytes) : this.writeString(data), - decode: (schema) => schema.map ? schema.map.index[this.readUint(schema.map.bytes)] : this.readString(), + encode: (schema, data = '') => { + if (schema.map) { + const int = schema.map.values[data]; + if (int === undefined) throw RangeError(`field "${schema[fieldName]}" with string "${data}" not found in [${schema.map.index}]`); + writeUint(int, schema.map.bytes) + } else writeString(data); + }, + decode: (schema) => schema.map ? schema.map.index[readUint(schema.map.bytes)] : readString(), init: (schema) => { if (schema.val) { - schema.map = PackBytes.genMap(schema.val); + if (!schema.val.length) throw TypeError(`schema string(value) must be array of strings`); + schema.map = genMap(schema.val); schema.bits = schema.map.bits; + schema.maxInt = 2**schema.bits - 1; } }, }, blob: { - encode: (schema, data = PackBytes.defaultBlob) => this.writeBlob(data, schema.val), - decode: (schema) => this.readBlob(schema.val), + encode: (schema, data = defaultBlob) => writeBlob(data, schema.val), + decode: (schema) => readBlob(schema.val), }, objectid: { - encode: (schema, data = PackBytes.defaultObjectID) => this.writeBlob(data.id, 12), - decode: (schema) => PackBytes.uint8arrayToHex(this.readBlob(12)), + encode: (schema, data = defaultObjectID) => writeBlob(data.id, 12), + decode: (schema) => uint8arrayToHex(readBlob(12)), }, uuid: { - encode: (schema, data = PackBytes.defaultUUID) => this.writeBlob(data.buffer, 16), - decode: (schema) => this.readBlob(16), + encode: (schema, data = defaultUUID) => writeBlob(data.buffer, 16), + decode: (schema) => readBlob(16), }, date: { - encode: (schema, data = PackBytes.defaultDate) => { + encode: (schema, data = defaultDate) => { const seconds = Math.floor(data.getTime() / 1000); if (data < 0 || seconds > 4_294_967_295) throw Error(`date ${date} outside range ${new Date(0)} - ${new Date(4294967295000)}`); - this.writeUint(seconds, 4); + writeUint(seconds, 4); }, - decode: (schema) => new Date(this.readUint(4) * 1000), + decode: (schema) => new Date(readUint(4) * 1000), }, lonlat: { - encode: (schema, data = PackBytes.defaultLonlat) => { - this.writeUint((data[0] + 180) * 1e7, 4); - this.writeUint((data[1] + 90) * 1e7, 4); + encode: (schema, data = defaultLonlat) => { + writeUint((data[0] + 180) * 1e7, 4); + writeUint((data[1] + 90) * 1e7, 4); }, - decode: (schema) => [ this.readUint(4) / 1e7 - 180, this.readUint(4) / 1e7 - 90 ], + decode: (schema) => [ readUint(4) / 1e7 - 180, readUint(4) / 1e7 - 90 ], }, array: { encode: (schema, data = []) => { - if (!schema._size) this.writeVarInt(data.length); - for (const item of data) this.type(schema.val).encode(schema.val, item); + if (!schema._size) writeVarInt(data.length); + for (const item of data) encodeSchema(schema.val, item); }, decode: (schema) => { const arr = []; - const length = schema._size || this.readVarInt(); + const length = schema._size || readVarInt(); for (let i = length; i > 0; i--) { - const x = this.type(schema.val).decode(schema.val) + const x = decodeSchema(schema.val); arr.push(x); } return arr; }, - init: (schema) => this.type(schema.val).init?.(schema.val), + init: (schema) => initialize(schema.val), }, schemas: { encode: (schema, data) => { const index = schema.map.values[data[0]]; if (index === undefined) throw Error(`Packbytes: schema "${data[0]}" not found in ${JSON.stringify(schema.map.index)}`); - this.writeVarInt(index); + writeVarInt(index); const dataSchema = schema.val[data[0]]; - this.type(dataSchema).encode(dataSchema, data[1]); + encodeSchema(dataSchema, data[1]); }, decode: (schema) => { - const name = schema.map.index[this.readVarInt()]; + const name = schema.map.index[readVarInt()]; const dataSchema = schema.val[name]; - return [ name, this.type(dataSchema).decode(dataSchema) ]; + return [ name, decodeSchema(dataSchema) ]; }, init: (schema) => { - schema.map = PackBytes.genMap(Object.keys(schema.val)); - Object.values(schema.val).forEach(schema => this.type(schema).init?.(schema)); + schema.map = genMap(Object.keys(schema.val)); + Object.values(schema.val).forEach(schema => initialize(schema)); } }, object: { encode: (schema, data) => { - const o = schema[PackBytes.objSchema]; + const o = schema[objSchema]; if (o) { - PackBytes.setData(schema, data); // attaches bits data to schema - if (o.int8.length) for (const ints of o.int8) this.writeInts(1, ints); - if (o.int16.length) for (const ints of o.int16) this.writeInts(2, ints); - if (o.int32.length) for (const ints of o.int32) this.writeInts(4, ints); + setData(schema, data); // attaches bits data to schema + if (o.int8.length) for (const ints of o.int8) writeInts(1, ints); + if (o.int16.length) for (const ints of o.int16) writeInts(2, ints); + if (o.int32.length) for (const ints of o.int32) writeInts(4, ints); } for (const field in schema) { const childSchema = schema[field]; const childData = data[field]; - if (!childSchema.bits) this.type(childSchema).encode(childSchema, childData); + if (!childSchema.bits) encodeSchema(childSchema, childData); } }, decode: (schema) => { - const obj = {}, o = schema[PackBytes.objSchema]; + const obj = {}, o = schema[objSchema]; if (o) { - if (o.int8.length) for (const ints of o.int8) this.readInts(1, ints); - if (o.int16.length) for (const ints of o.int16) this.readInts(2, ints); - if (o.int32.length) for (const ints of o.int32) this.readInts(4, ints); + if (o.int8.length) for (const ints of o.int8) readInts(1, ints); // attaches decoded value to schema + if (o.int16.length) for (const ints of o.int16) readInts(2, ints); + if (o.int32.length) for (const ints of o.int32) readInts(4, ints); } for (const field in schema) { const childSchema = schema[field]; - obj[field] = childSchema.decoded ?? this.type(childSchema).decode(childSchema); + obj[field] = childSchema.decoded ?? decodeSchema(childSchema); } return obj; }, - init: (schema, objSchema) => { - const _objSchema = objSchema || (schema[PackBytes.objSchema] = PackBytes.newObjSchema()); + init: (schema, parentObjSchema) => { + const o = parentObjSchema || (schema[objSchema] = newObjSchema()); // use parent objectSchema else create new objectSchema and attach to object for (const field in schema) { const childSchema = schema[field]; - this.type(childSchema).init?.(childSchema, _objSchema); - if (childSchema.bits) _objSchema.ints.push(childSchema); + childSchema[fieldName] = field; + initialize(childSchema, o); + if (childSchema.bits) o.ints.push(childSchema); } - if (!objSchema && _objSchema.ints.length) PackBytes.packInts(_objSchema); + if (!parentObjSchema && o.ints.length) packInts(o); // packInts if current object has no parent object }, }, null: { encode: () => {}, decode: () => null }, - } - static genMap(values) { - const bits = PackBytes.numberToBits(values.length - 1); - const z = { - bits, - bytes: Math.ceil(bits / 8), - index: values, - values: values.reduce((obj, v, i) => (obj[v] = i, obj), {}) - } - return z; - } - static packInts(o) { - o.ints.sort((a, b) => b.bits - a.bits); - while (o.ints.length) { - let ints32 = [], remaining = 32; - for (let i = 0; i < o.ints.length; i++) { - if (o.ints[i].bits <= remaining) { - remaining -= o.ints[i].bits; - ints32.push(...o.ints.splice(i--, 1)); - if (!remaining) break; - } - } - (remaining < 16 ? o.int32 : remaining < 24 ? o.int16 : o.int8).push(ints32); - } - } - static setData(schema, data) { - for (const field in schema) { - const childSchema = schema[field]; - const childData = data[field]; - if (childSchema.bits) childSchema.data = childData; // attaches data to schema - if (!childSchema._type) { - if (childData === undefined) throw Error(`Packbytes: no data for field "${field}"`); - PackBytes.setData(childSchema, childData); - } - } - } - writeInts(bytes, ints) { - let packed = 0; - for (const int of ints) { - packed <<= int.bits; - packed |= int.map ? int.map.values[int.data] : int.data; - } - this.writeUint(packed >>> 0, bytes); }; - readInts(bytes, ints) { - let packed = this.readUint(bytes); - if (ints.length > 1) for (let i = ints.length - 1; i >= 0; i--) { - const val = packed % (1 << ints[i].bits); - ints[i].decoded = ints[i].bool ? Boolean(val) : ints[i].map?.index[val] ?? val; - packed >>>= ints[i].bits; - } else ints[0].decoded = ints[0].bool ? Boolean(packed) : ints[0].map?.index[packed] ?? packed; - } - readString() { - const length = this.readVarInt(); - const str = length ? PackBytes.textDecoder.decode(new Uint8Array(this.decodeDV.buffer, this.offset, length)) : ''; - this.offset += length; + const type = (schema) => types[schema ? schema._type || 'object' : 'null']; + const initialize = (schema, objSchema) => type(schema).init?.(schema, objSchema); + const parseInputs = (schemaName, data) => data ? [ schemaName, data ] : schemaName; + const encodeSchema = (schema, data) => type(schema).encode(schema, data); + const decodeSchema = (schema, data) => type(schema).decode(schema); + const setEncodeBuffer = (arrayBuffer) => { + encodeAB = arrayBuffer; + encodeDV = new DataView(arrayBuffer); + encodeUA = new Uint8Array(arrayBuffer); + }; + const setDecodeBuffer = (buf) => { + decodeDV = buf.buffer ? new DataView(buf.buffer, buf.byteOffset, buf.byteLength) : new DataView(buf); + decodeUA = buf.buffer ? new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength) : new Uint8Array(buf); + }; + const readString = () => { + const length = readVarInt(); + const str = length ? textDecoder.decode(decodeUA.subarray(offset, offset + length)) : ''; + offset += length; return str; - } - writeString(str) { - const uint8array = PackBytes.textEncoder.encode(str); - this.writeVarInt(uint8array.length); - this.checkSize(uint8array.length); - new Uint8Array(this.dataview.buffer, this.offset, uint8array.length).set(uint8array); - this.offset += uint8array.length; - } - readBlob(bytes) { - const length = bytes || this.readVarInt(); - const blob = new Uint8Array(this.decodeDV.buffer, this.offset, length); - this.offset += length; + }; + const writeString = (str) => { + const uint8array = textEncoder.encode(str); + writeVarInt(uint8array.length); + checkSize(uint8array.length); + new Uint8Array(encodeDV.buffer, offset, uint8array.length).set(uint8array); + offset += uint8array.length; + }; + const readBlob = (bytes) => { + const length = bytes || readVarInt(); + const blob = decodeUA.subarray(offset, offset + length); + offset += length; return blob; - } - writeBlob(buf, bytes) { // takes Buffer, TypedArray + }; + const writeBlob = (buf, bytes) => { // takes TypedArray, Buffer, ArrayBuffer + if (buf.byteLength === undefined) throw TypeError(`writeBlob() expected TypedArray, Buffer, or ArrayBuffer, received "${buf}"`); + if (!buf.buffer) buf = new Uint8Array(buf); // ArrayBuffer const length = bytes || buf.byteLength; - if (bytes) { - if (buf.byteLength > bytes) buf = buf.subarray(0, bytes); - else if (buf.byteLength < bytes) { // fill zero - const newBuf = new Uint8Array(bytes); - newBuf.set(buf); - buf = newBuf; - } - } else this.writeVarInt(length); - this.checkSize(length); - new Uint8Array(this.dataview.buffer, this.offset, length).set(buf); - this.offset += length; - } - readUint(bytes) { - var int = this.decodeDV[({ 1: 'getUint8', 2: 'getUint16', 4: 'getUint32' })[bytes]](this.offset); - this.offset += bytes; + if (!bytes) writeVarInt(length); + else if (buf.byteLength != bytes) throw RangeError(`buffer size mismatch: "${buf.byteLength}" != "${bytes}" for buffer "${buf}"`); + checkSize(length); + encodeUA.set(buf, offset); + offset += length; + }; + const readUint = (bytes) => { + var int = decodeDV[({ 1: 'getUint8', 2: 'getUint16', 4: 'getUint32' })[bytes]](offset); + offset += bytes; return int; - } - writeUint(val, bytes) { - this.checkSize(bytes); - this.dataview[({ 1: 'setUint8', 2: 'setUint16', 4: 'setUint32' })[bytes]](this.offset, val); - this.offset += bytes; - } - readFloat(bytes) { - const float = this.decodeDV[({ 4: 'getFloat32', 8: 'getFloat64' })[bytes]](this.offset); - this.offset += bytes; + }; + const writeUint = (val, bytes) => { + checkSize(bytes); + encodeDV[({ 1: 'setUint8', 2: 'setUint16', 4: 'setUint32' })[bytes]](offset, val); + offset += bytes; + }; + const readFloat = (bytes) => { + const float = decodeDV[({ 4: 'getFloat32', 8: 'getFloat64' })[bytes]](offset); + offset += bytes; return float; - } - writeFloat(val, bytes) { - this.checkSize(bytes); - this.dataview[({ 4: 'setFloat32', 8: 'setFloat64'})[bytes]](this.offset, val); - this.offset += bytes; - } - readVarInt() { - let val = this.readUint(1); + }; + const writeFloat = (val, bytes) => { + checkSize(bytes); + encodeDV[({ 4: 'setFloat32', 8: 'setFloat64'})[bytes]](offset, val); + offset += bytes; + }; + const readVarInt = () => { + let val = readUint(1); if (val < 128) return val; - this.offset--; val = this.readUint(2); + offset--; val = readUint(2); if (!(val & 0b1000_0000)) return ((val & 0b111_1111_0000_0000) >> 1) | (val & 0b111_1111); - this.offset -= 2; val = this.readUint(4); + offset -= 2; val = readUint(4); return ((val & 0b111_1111_0000_0000_0000_0000_0000_0000) >> 1) | (val & 0b111_1111_1111_1111_1111_1111); - } - writeVarInt(int) { - if (int < 128) return this.writeUint(int, 1); - if (int < 16_384) return this.writeUint(((int & 0b11_1111_1000_0000) << 1) | (int & 0b111_1111) | 0b1000_0000_0000_0000, 2); - if (int < 1_073_741_824) return this.writeUint(((int & 0b11_1111_1000_0000_0000_0000_0000_0000) << 1) | (int & 0b111_1111_1111_1111_1111_1111) | 0b1000_0000_1000_0000_0000_0000_0000_0000, 4); - throw Error(`varInt max 1,073,741,823 exceeded: ${int}`); - } - checkSize(bytes) { - if (bytes + this.offset > this.dataview.byteLength) { - if (this.dataview.buffer.transfer) this.dataview = new DataView(this.dataview.buffer.transfer(this.dataview.byteLength * 2)); + }; + const writeVarInt = (int) => { + if (int <= 127) return writeUint(int, 1); + if (int <= 16_383) return writeUint(((int & 0b11_1111_1000_0000) << 1) | (int & 0b111_1111) | 0b1000_0000_0000_0000, 2); + if (int <= 1_073_741_823) return writeUint(((int & 0b11_1111_1000_0000_0000_0000_0000_0000) << 1) | (int & 0b111_1111_1111_1111_1111_1111) | 0b1000_0000_1000_0000_0000_0000_0000_0000, 4); + throw RangeError(`varInt max 1,073,741,823 exceeded: "${int}"`); + }; + const readInts = (bytes, ints) => { + let packed = readUint(bytes); + if (ints.length > 1) for (let i = ints.length - 1; i >= 0; i--) { + const val = packed % (1 << ints[i].bits); + ints[i].decoded = ints[i].bool ? Boolean(val) : ints[i].map?.index[val] ?? val; + packed >>>= ints[i].bits; + } else ints[0].decoded = ints[0].bool ? Boolean(packed) : ints[0].map?.index[packed] ?? packed; + }; + const writeInts = (bytes, ints) => { + let packed = 0; + for (const int of ints) { + const value = int.map ? int.map.values[int.data] : int.bool ? int.data ? 1 : 0 : int.data; + if (!(value >= 0 && value <= int.maxInt)) throw rangeError(int, value, int.maxInt); + packed <<= int.bits; + packed |= value + } + writeUint(packed >>> 0, bytes); + }; + const checkSize = (bytes) => { + if (bytes + offset > encodeAB.byteLength) { + if (encodeAB.transfer) setEncodeBuffer(encodeAB.transfer(encodeAB.byteLength * 2)); else { // backwards compatible for <= Node v20 - const arraybuffer = new ArrayBuffer(this.dataview.byteLength * 2); - new Uint8Array(arraybuffer).set(new Uint8Array(this.dataview.buffer)); - this.dataview = new DataView(arraybuffer); + const uint8Array = encodeUA; + setEncodeBuffer(new ArrayBuffer(encodeAB.byteLength * 2)); + encodeUA.set(uint8Array); } - this.checkSize(bytes); + checkSize(bytes); } + }; + + constructor(); + return { encode, decode }; + var offset, encodeAB, encodeDV, encodeUA, decodeDV, decodeUA; +}; + +const genMap = (values) => { + const bits = numberToBits(values.length - 1); + return { + bits, + bytes: Math.ceil(bits / 8), + index: values, + values: values.reduce((obj, v, i) => (obj[v] = i, obj), {}), + }; +}; +const packInts = (o) => { + o.ints.sort((a, b) => b.bits - a.bits); + while (o.ints.length) { + let ints32 = [], remaining = 32; + for (let i = 0; i < o.ints.length; i++) { + if (o.ints[i].bits <= remaining) { + remaining -= o.ints[i].bits; + ints32.push(...o.ints.splice(i--, 1)); + if (!remaining) break; + } + } + (remaining < 16 ? o.int32 : remaining < 24 ? o.int16 : o.int8).push(ints32); } - type(schema) { return this.types[schema ? schema._type || 'object' : 'null']; } - inputs(schema, data) { return this.schema._type == 'schemas' ? [ schema, data ] : schema; } - static parse(schema) { return JSON.parse(typeof schema == 'string' ? schema : JSON.stringify(schema)); } - static numberToBits(num) { return Math.ceil(Math.log2(num + 1)) || 1; } - static newObjSchema() { return ({ ints: [], int8: [], int16: [], int32: [] }); } - static getVarIntSize(int) { return int < 128 ? 1 : int < 16_384 ? 2 : 4; } - static strByteLength(str = '') { let s = str.length; for (let i = str.length - 1; i >= 0; i--) { const code = str.charCodeAt(i); if (code > 0x7f && code <= 0x7ff) s++; else if (code > 0x7ff && code <= 0xffff) s += 2; if (code >= 0xDC00 && code <= 0xDFFF) i--; } return s; } - static strTotalLength(str = '') { const length = PackBytes.strByteLength(str); return length + PackBytes.getVarIntSize(length); } - static uint8arrayToHex(uint8) { return Array.from(uint8).map(a => PackBytes.byteToHex[a]).join(''); } - static byteToHex = Array.from(Array(256)).map((a, i) => i.toString(16).padStart(2, '0')); - static objSchema = Symbol('objSchema'); - static defaultBlob = new Uint8Array(0); - static defaultObjectID = { id: new Uint8Array(12) }; - static defaultUUID = { buffer: new Uint8Array(16) }; - static defaultDate = new Date(0); - static defaultLonlat = [ 0, 0 ]; - static textEncoder = new TextEncoder(); - static textDecoder = new TextDecoder(); - static size(s) { this._size = s; return this; } - static genType(_type) { - const fn = val => ({ _type, val, size: PackBytes.size }); - fn.toJSON = () => ({ _type }); - fn._type = _type; - return fn; +}; +const setData = (schema, data) => { + for (const field in schema) { + const childSchema = schema[field]; + const childData = data[field]; + if (childSchema.bits) childSchema.data = childData; // attaches data to schema + if (!childSchema._type) { + if (childData === undefined) throw Error(`Packbytes: no data for field "${field}"`); + setData(childSchema, childData); + } } -} +}; +const parse = (schema) => JSON.parse(typeof schema == 'string' ? schema : JSON.stringify(schema)); +const numberToBits = (num) => Math.ceil(Math.log2(num + 1)) || 1; +const newObjSchema = () => ({ ints: [], int8: [], int16: [], int32: [] }); +const getVarIntSize = (int) => int < 128 ? 1 : int < 16_384 ? 2 : 4; +const uint8arrayToHex = (uint8) => uint8.reduce((hex, byte) => hex + byte.toString(16).padStart(2, '0'), ''); +const fieldName = Symbol('fieldName'); +const objSchema = Symbol('objSchema'); +const defaultBlob = new Uint8Array(0); +const defaultObjectID = { id: new Uint8Array(12) }; +const defaultUUID = { buffer: new Uint8Array(16) }; +const defaultDate = new Date(0); +const defaultLonlat = [ 0, 0 ]; +const textEncoder = new TextEncoder(); +const textDecoder = new TextDecoder(); +const rangeError = (schema, data, max) => RangeError(`field "${schema[fieldName]}" with value "${data}" out of range [ 0 - ${max || schema.maxByteInt} ]`); +const genType = (_type) => { + const fn = val => ({ _type, val, size: function(s) { this._size = s; return this; } }); + fn.toJSON = () => ({ _type }); + fn._type = _type; + return fn; +}; export const [ bool, bits, float, varint, string, blob, objectid, uuid, date, lonlat, array, schemas ] = - [ 'bool', 'bits', 'float', 'varint', 'string', 'blob', 'objectid', 'uuid', 'date', 'lonlat', 'array', 'schemas' ].map(PackBytes.genType); + [ 'bool', 'bits', 'float', 'varint', 'string', 'blob', 'objectid', 'uuid', 'date', 'lonlat', 'array', 'schemas' ].map(genType); diff --git a/test/test.mjs b/test/test.mjs index edb457b..45168a1 100644 --- a/test/test.mjs +++ b/test/test.mjs @@ -1,7 +1,6 @@ -import { bool, bits, string, array, float, blob, schemas, PackBytes } from '../packbytes.mjs'; +import { bool, bits, string, array, float, blob, schemas, PackBytes } from '../packbytes3.mjs'; export const logs = []; const log = (...msg) => console.log(...msg) || logs.push(msg); -const isNode = PackBytes.isNode; const tests = [ { schema: bool, data: true }, @@ -11,11 +10,11 @@ const tests = [ { schema: bits(8), data: 255 }, { schema: bits(32), data: 4294967295 }, { schema: string, data: 'str' }, - { schema: string('str1', 'str2'), data: 'str2' }, + { schema: string([ 'str1', 'str2' ]), data: 'str2' }, //{ schema: float(32), data: 1.33 }, //{ schema: float(64), data: 12345678.901234 }, - { schema: blob, data: isNode ? Buffer.from([ 0, 1 ]) : new Uint8Array([ 0, 1 ]) }, - { schema: blob(3), data: isNode ? Buffer.from([ 0, 1, 2 ]) : new Uint8Array([ 0, 1, 2 ]) }, + { schema: blob, data: new Uint8Array([ 0, 1 ]) }, + { schema: blob(3), data: new Uint8Array([ 0, 1, 2 ]) }, { schema: array(bits(2)), data: [ 0, 1, 2, 3 ] }, { schema: schemas({ s1: null, s2: bits(3) }), data: [ 's2', 3 ] }, ]; @@ -41,15 +40,15 @@ let fail; tests.forEach((t, i) => { if (fail) return; const json = JSON.stringify(t.schema); - const encoder = new PackBytes(json); + const { encode, decode } = PackBytes(json); log(''); log('TEST', i + 1); - log(json); - log(JSON.stringify(t.data)); + log('schema:', json); + log('data:', JSON.stringify(t.data)); try { - var buf = encoder.encode(t.data); - log(buf, buf.length || buf.byteLength); - var result = encoder.decode(buf); + var buf = encode(t.data); + log('buf:', buf, buf.length || buf.byteLength); + var result = decode(buf); } catch (e) { log(''); log('FAIL:'); @@ -60,8 +59,8 @@ tests.forEach((t, i) => { if (JSON.stringify(result) == JSON.stringify(t.data)) return; log(''); log('FAIL:'); - log(JSON.stringify(t.data)); - log(JSON.stringify(result)); + log('data:', JSON.stringify(t.data)); + log('result:', JSON.stringify(result)); log(''); fail = true; });