Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

More interpreter optimizations #107

Closed
wants to merge 28 commits into from
Closed
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
1c5d3d1
Trying to optimize interpreter
Saiv46 Jun 4, 2020
a8419b9
Merge branch 'master' of https://github.com/ProtoDef-io/node-protodef
Saiv46 Jun 13, 2020
019bb99
Revert "Merge branch 'master' of https://github.com/ProtoDef-io/node-…
Saiv46 Jun 13, 2020
e08e58e
Revert "Revert "Merge branch 'master' of https://github.com/ProtoDef-…
Saiv46 Jun 13, 2020
a68af84
Refactored things
Saiv46 Jun 13, 2020
ee00fca
Merge branch 'patch-1'
Saiv46 Jun 13, 2020
ee6b8b3
Implemented removeType & organizing changes
Saiv46 Jun 13, 2020
43f7401
Implemented `createEncoding` (closes #69)
Saiv46 Jun 13, 2020
2a34b91
Removed `DATATYPE_NOCOPY`
Saiv46 Jun 13, 2020
63de431
Removed DATATYPE_NOCOPY constant
Saiv46 Jun 15, 2020
3d168b0
Revert "Removed `DATATYPE_NOCOPY`"
Saiv46 Jun 16, 2020
b444547
Update interpreter.js
Saiv46 Jun 16, 2020
ec4c9db
Merge branch 'master' of https://github.com/Saiv46/node-protodef
Saiv46 Jun 16, 2020
48220c8
fix(interpreter/structures): Bugfix
Saiv46 Jun 17, 2020
932809e
fix(interpreter.js): Continuing expirement with DATATYPE_NOCOPY
Saiv46 Jun 17, 2020
dc47292
Lint quickfix
Saiv46 Jun 17, 2020
f1610e1
Optimize `constructProduceArgs`
Saiv46 Jan 3, 2021
9b6285c
Optimize encoding and `tryDoc`
Saiv46 Jan 3, 2021
84c1fad
Update serializer.js
Saiv46 Jan 3, 2021
86f3ba9
Optimize type extension
Saiv46 Jan 3, 2021
21f81af
Using `Result` struct
Saiv46 Jan 3, 2021
f927c8b
Using upstream
Saiv46 Jan 3, 2021
90288f1
Removed `DATATYPE_NOCOPY`
Saiv46 Jan 3, 2021
8dadd50
Refactor
Saiv46 Jan 3, 2021
88eb235
Refactor
Saiv46 Jan 3, 2021
359a3c6
More bound functions optimizations
Saiv46 Jan 3, 2021
3df8aca
Update utils.js
Saiv46 Jan 3, 2021
fcedeed
Update utils.js
Saiv46 Jan 3, 2021
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions .npmignore

This file was deleted.

11 changes: 9 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
@@ -4,6 +4,7 @@
"description": "A simple yet powerful way to define binary protocols",
"main": "index.js",
"author": "roblabla <robinlambertz.dev@gmail.com>",
"sideEffects": false,
"scripts": {
"prepare": "require-self",
"lint": "standard",
@@ -15,13 +16,14 @@
"tonicExampleFilename": "example.js",
"license": "MIT",
"dependencies": {
"lodash.clonedeep": "^4.5.0",
"lodash.get": "^4.4.2",
"lodash.reduce": "^4.6.0",
"protodef-validator": "^1.2.2",
"readable-stream": "^3.0.3"
},
"engines": {
"node": ">=6"
"node": ">=12"
},
"bugs": {
"url": "https://github.com/ProtoDef-io/node-protodef/issues"
@@ -38,5 +40,10 @@
"mocha": "^5.2.0",
"require-self": "^0.1.0",
"standard": "^12.0.1"
}
},
"files": [
"src/",
"ProtoDef/schemas/",
"*.js"
]
}
271 changes: 151 additions & 120 deletions src/compiler.js

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
const {
Enum: { CompilerTypeKind: { PARAMETRIZABLE } }
} = require('../../utils')

module.exports = {
Read: {
'switch': ['parametrizable', (compiler, struct) => {
'switch': [PARAMETRIZABLE, (compiler, struct) => {
let compare = struct.compareTo ? struct.compareTo : struct.compareToValue
let args = []
if (compare.startsWith('$')) args.push(compare)
@@ -17,19 +21,19 @@ module.exports = {
code += `}`
return compiler.wrapCode(code, args)
}],
'option': ['parametrizable', (compiler, type) => {
'option': [PARAMETRIZABLE, (compiler, type) => {
let code = 'const {value} = ctx.bool(buffer, offset)\n'
code += 'if (value) {\n'
code += ' const { value, size } = ' + compiler.callType(type, 'offset + 1') + '\n'
code += ' return { value, size: size + 1 }\n'
code += ' const result = ' + compiler.callType(type, 'offset + 1') + '\n'
code += ' return new Result(result.value, result.size + 1)\n'
code += '}\n'
code += 'return { value: undefined, size: 1}'
code += 'return new Result(undefined, 1)'
return compiler.wrapCode(code)
}]
},

Write: {
'switch': ['parametrizable', (compiler, struct) => {
'switch': [PARAMETRIZABLE, (compiler, struct) => {
let compare = struct.compareTo ? struct.compareTo : struct.compareToValue
let args = []
if (compare.startsWith('$')) args.push(compare)
@@ -46,7 +50,7 @@ module.exports = {
code += `}`
return compiler.wrapCode(code, args)
}],
'option': ['parametrizable', (compiler, type) => {
'option': [PARAMETRIZABLE, (compiler, type) => {
let code = 'if (value != null) {\n'
code += ' offset = ctx.bool(1, buffer, offset)\n'
code += ' offset = ' + compiler.callType('value', type) + '\n'
@@ -59,7 +63,7 @@ module.exports = {
},

SizeOf: {
'switch': ['parametrizable', (compiler, struct) => {
'switch': [PARAMETRIZABLE, (compiler, struct) => {
let compare = struct.compareTo ? struct.compareTo : struct.compareToValue
let args = []
if (compare.startsWith('$')) args.push(compare)
@@ -76,7 +80,7 @@ module.exports = {
code += `}`
return compiler.wrapCode(code, args)
}],
'option': ['parametrizable', (compiler, type) => {
'option': [PARAMETRIZABLE, (compiler, type) => {
let code = 'if (value != null) {\n'
code += ' return 1 + ' + compiler.callType('value', type) + '\n'
code += '}\n'
30 changes: 30 additions & 0 deletions src/datatypes/compiler/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
const { Enum: { CompilerTypeKind: { NATIVE } } } = require('../../utils')
const conditionalDatatypes = require('./conditional')
const structuresDatatypes = require('./structures')
const utilsDatatypes = require('./utils')
const sharedDatatypes = require('../shared')

module.exports = {
Read: {
...conditionalDatatypes.Read,
...structuresDatatypes.Read,
...utilsDatatypes.Read
},
Write: {
...conditionalDatatypes.Write,
...structuresDatatypes.Write,
...utilsDatatypes.Write
},
SizeOf: {
...conditionalDatatypes.SizeOf,
...structuresDatatypes.SizeOf,
...utilsDatatypes.SizeOf
}
}

for (const k in sharedDatatypes) {
const [ read, write, sizeOf ] = sharedDatatypes[k]
module.exports.Read[k] = [NATIVE, read]
module.exports.Write[k] = [NATIVE, write]
module.exports.SizeOf[k] = [NATIVE, sizeOf]
}
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
const {
Enum: { CompilerTypeKind: { PARAMETRIZABLE } }
} = require('../../utils')

module.exports = {
Read: {
'array': ['parametrizable', (compiler, array) => {
'array': [PARAMETRIZABLE, (compiler, array) => {
let code = ''
if (array.countType) {
code += 'const { value: count, size: countSize } = ' + compiler.callType(array.countType) + '\n'
@@ -10,21 +14,22 @@ module.exports = {
} else {
throw new Error('Array must contain either count or countType')
}
code += 'if (count > 0xffffff) throw new Error("array size is abnormally large, not reading: " + count)\n'
code += 'const data = []\n'
code += 'let size = countSize\n'
code += 'for (let i = 0; i < count; i++) {\n'
code += ' const elem = ' + compiler.callType(array.type, 'offset + size') + '\n'
code += ' data.push(elem.value)\n'
code += ' size += elem.size\n'
code += '}\n'
code += 'return { value: data, size }'
code += 'return new Result(data, size)'
return compiler.wrapCode(code)
}],
'count': ['parametrizable', (compiler, type) => {
'count': [PARAMETRIZABLE, (compiler, type) => {
let code = 'return ' + compiler.callType(type.type)
return compiler.wrapCode(code)
}],
'container': ['parametrizable', (compiler, values) => {
'container': [PARAMETRIZABLE, (compiler, values) => {
values = containerInlining(values)

let code = ''
@@ -56,13 +61,13 @@ module.exports = {
const sizes = offsetExpr.split(' + ')
sizes.shift()
if (sizes.length === 0) sizes.push('0')
code += 'return { value: { ' + names.join(', ') + ' }, size: ' + sizes.join(' + ') + '}'
code += 'return new Result({ ' + names.join(', ') + ' }, ' + sizes.join(' + ') + ')'
return compiler.wrapCode(code)
}]
},

Write: {
'array': ['parametrizable', (compiler, array) => {
'array': [PARAMETRIZABLE, (compiler, array) => {
let code = ''
if (array.countType) {
code += 'offset = ' + compiler.callType('value.length', array.countType) + '\n'
@@ -75,11 +80,11 @@ module.exports = {
code += 'return offset'
return compiler.wrapCode(code)
}],
'count': ['parametrizable', (compiler, type) => {
'count': [PARAMETRIZABLE, (compiler, type) => {
let code = 'return ' + compiler.callType('value', type.type)
return compiler.wrapCode(code)
}],
'container': ['parametrizable', (compiler, values) => {
'container': [PARAMETRIZABLE, (compiler, values) => {
values = containerInlining(values)
let code = ''
for (const i in values) {
@@ -106,7 +111,7 @@ module.exports = {
},

SizeOf: {
'array': ['parametrizable', (compiler, array) => {
'array': [PARAMETRIZABLE, (compiler, array) => {
let code = ''
if (array.countType) {
code += 'let size = ' + compiler.callType('value.length', array.countType) + '\n'
@@ -125,11 +130,11 @@ module.exports = {
code += 'return size'
return compiler.wrapCode(code)
}],
'count': ['parametrizable', (compiler, type) => {
'count': [PARAMETRIZABLE, (compiler, type) => {
let code = 'return ' + compiler.callType('value', type.type)
return compiler.wrapCode(code)
}],
'container': ['parametrizable', (compiler, values) => {
'container': [PARAMETRIZABLE, (compiler, values) => {
values = containerInlining(values)
let code = 'let size = 0\n'
for (const i in values) {
36 changes: 20 additions & 16 deletions src/datatypes/compiler-utils.js → src/datatypes/compiler/utils.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
const {
Enum: { CompilerTypeKind: { PARAMETRIZABLE } }
} = require('../../utils')

module.exports = {
Read: {
'pstring': ['parametrizable', (compiler, string) => {
'pstring': [PARAMETRIZABLE, (compiler, string) => {
let code = ''
if (string.countType) {
code += 'const { value: count, size: countSize } = ' + compiler.callType(string.countType) + '\n'
@@ -14,10 +18,10 @@ module.exports = {
code += 'if (offset + count > buffer.length) {\n'
code += ' throw new PartialReadError("Missing characters in string, found size is " + buffer.length + " expected size was " + (offset + count))\n'
code += '}\n'
code += 'return { value: buffer.toString(\'utf8\', offset, offset + count), size: count + countSize }'
code += 'return new Result(buffer.toString(\'utf8\', offset, offset + count), count + countSize)'
return compiler.wrapCode(code)
}],
'buffer': ['parametrizable', (compiler, buffer) => {
'buffer': [PARAMETRIZABLE, (compiler, buffer) => {
let code = ''
if (buffer.countType) {
code += 'const { value: count, size: countSize } = ' + compiler.callType(buffer.countType) + '\n'
@@ -31,10 +35,10 @@ module.exports = {
code += 'if (offset + count > buffer.length) {\n'
code += ' throw new PartialReadError()\n'
code += '}\n'
code += 'return { value: buffer.slice(offset, offset + count), size: count + countSize }'
code += 'return new Result(buffer.slice(offset, offset + count), count + countSize)'
return compiler.wrapCode(code)
}],
'bitfield': ['parametrizable', (compiler, values) => {
'bitfield': [PARAMETRIZABLE, (compiler, values) => {
let code = ''
const totalBytes = Math.ceil(values.reduce((acc, { size }) => acc + size, 0) / 8)
code += `if ( offset + ${totalBytes} > buffer.length) { throw new PartialReadError() }\n`
@@ -55,18 +59,18 @@ module.exports = {
if (name === trueName) names.push(name)
else names.push(`${name}: ${trueName}`)
}
code += 'return { value: { ' + names.join(', ') + ` }, size: ${totalBytes} }`
code += 'return new Result({ ' + names.join(', ') + ` }, ${totalBytes})`
return compiler.wrapCode(code)
}],
'mapper': ['parametrizable', (compiler, mapper) => {
'mapper': [PARAMETRIZABLE, (compiler, mapper) => {
let code = 'const { value, size } = ' + compiler.callType(mapper.type) + '\n'
code += 'return { value: ' + JSON.stringify(sanitizeMappings(mapper.mappings)) + '[value], size }'
code += 'return new Result(' + JSON.stringify(sanitizeMappings(mapper.mappings)) + '[value], size)'
return compiler.wrapCode(code)
}]
},

Write: {
'pstring': ['parametrizable', (compiler, string) => {
'pstring': [PARAMETRIZABLE, (compiler, string) => {
let code = 'const length = Buffer.byteLength(value, \'utf8\')\n'
if (string.countType) {
code += 'offset = ' + compiler.callType('length', string.countType) + '\n'
@@ -77,7 +81,7 @@ module.exports = {
code += 'return offset + length'
return compiler.wrapCode(code)
}],
'buffer': ['parametrizable', (compiler, buffer) => {
'buffer': [PARAMETRIZABLE, (compiler, buffer) => {
let code = ''
if (buffer.countType) {
code += 'offset = ' + compiler.callType('value.length', buffer.countType) + '\n'
@@ -88,7 +92,7 @@ module.exports = {
code += 'return offset + value.length'
return compiler.wrapCode(code)
}],
'bitfield': ['parametrizable', (compiler, values) => {
'bitfield': [PARAMETRIZABLE, (compiler, values) => {
let toWrite = ''
let bits = 0
let code = ''
@@ -116,15 +120,15 @@ module.exports = {
code += 'return offset'
return compiler.wrapCode(code)
}],
'mapper': ['parametrizable', (compiler, mapper) => {
'mapper': [PARAMETRIZABLE, (compiler, mapper) => {
const mappings = JSON.stringify(swapMappings(mapper.mappings))
const code = 'return ' + compiler.callType(`${mappings}[value]`, mapper.type)
return compiler.wrapCode(code)
}]
},

SizeOf: {
'pstring': ['parametrizable', (compiler, string) => {
'pstring': [PARAMETRIZABLE, (compiler, string) => {
let code = 'let size = Buffer.byteLength(value, \'utf8\')\n'
if (string.countType) {
code += 'size += ' + compiler.callType('size', string.countType) + '\n'
@@ -134,7 +138,7 @@ module.exports = {
code += 'return size'
return compiler.wrapCode(code)
}],
'buffer': ['parametrizable', (compiler, buffer) => {
'buffer': [PARAMETRIZABLE, (compiler, buffer) => {
let code = 'let size = value.length\n'
if (buffer.countType) {
code += 'size += ' + compiler.callType('size', buffer.countType) + '\n'
@@ -144,11 +148,11 @@ module.exports = {
code += 'return size'
return compiler.wrapCode(code)
}],
'bitfield': ['parametrizable', (compiler, values) => {
'bitfield': [PARAMETRIZABLE, (compiler, values) => {
const totalBytes = Math.ceil(values.reduce((acc, { size }) => acc + size, 0) / 8)
return `${totalBytes}`
}],
'mapper': ['parametrizable', (compiler, mapper) => {
'mapper': [PARAMETRIZABLE, (compiler, mapper) => {
const mappings = JSON.stringify(swapMappings(mapper.mappings))
const code = 'return ' + compiler.callType(`${mappings}[value]`, mapper.type)
return compiler.wrapCode(code)
56 changes: 0 additions & 56 deletions src/datatypes/conditional.js

This file was deleted.

70 changes: 70 additions & 0 deletions src/datatypes/interpreter/conditional.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
const { getField, getFieldInfo, tryDoc, PartialReadError, Result } = require('../../utils')
const schema = require('../../../ProtoDef/schemas/conditional.json')

function readSwitch (buffer, offset, { compareTo, fields, compareToValue, 'default': defVal }, rootNode) {
compareTo = compareToValue !== undefined ? compareToValue : getField(compareTo, rootNode)
if (fields[compareTo] === undefined) {
compareTo = 'default'
fields[compareTo] = defVal
if (defVal === undefined) {
throw new Error(`${compareTo} has no associated fieldInfo in switch`)
}
}
const fieldInfo = getFieldInfo(fields[compareTo])
return tryDoc(this.read.bind(this, buffer, offset, fieldInfo, rootNode), compareTo)
}

function writeSwitch (value, buffer, offset, { compareTo, fields, compareToValue, 'default': defVal }, rootNode) {
compareTo = compareToValue !== undefined ? compareToValue : getField(compareTo, rootNode)
if (fields[compareTo] === undefined) {
compareTo = 'default'
fields[compareTo] = defVal
if (defVal === undefined) {
throw new Error(`${compareTo} has no associated fieldInfo in switch`)
}
}
const fieldInfo = getFieldInfo(fields[compareTo])
return tryDoc(this.write.bind(this, value, buffer, offset, fieldInfo, rootNode), compareTo)
}

function sizeOfSwitch (value, { compareTo, fields, compareToValue, 'default': defVal }, rootNode) {
compareTo = compareToValue !== undefined ? compareToValue : getField(compareTo, rootNode)
if (fields[compareTo] === undefined) {
compareTo = 'default'
fields[compareTo] = defVal
if (defVal === undefined) {
throw new Error(`${compareTo} has no associated fieldInfo in switch`)
}
}
const fieldInfo = getFieldInfo(fields[compareTo])
return tryDoc(this.sizeOf.bind(this, value, fieldInfo, rootNode), compareTo)
}

function readOption (buffer, offset, typeArgs, context) {
if (buffer.length < offset + 1) { throw new PartialReadError() }
const isPresent = buffer.readUInt8(offset++) !== 0
if (isPresent) {
const retval = this.read(buffer, offset, typeArgs, context)
retval.size++
return retval
}
return new Result(undefined, 1)
}

function writeOption (value, buffer, offset, typeArgs, context) {
const isPresent = value != null
buffer.writeUInt8(isPresent & 1, offset++)
if (isPresent) {
offset = this.write(value, buffer, offset, typeArgs, context)
}
return offset
}

function sizeOfOption (value, typeArgs, context) {
return (value != null && this.sizeOf(value, typeArgs, context)) + 1
}

module.exports = {
'switch': [readSwitch, writeSwitch, sizeOfSwitch, schema['switch']],
'option': [readOption, writeOption, sizeOfOption, schema['option']]
}
11 changes: 11 additions & 0 deletions src/datatypes/interpreter/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
const conditionalDatatypes = require('./conditional')
const structuresDatatypes = require('./structures')
const utilsDatatypes = require('./utils')
const sharedDatatypes = require('../shared')

module.exports = {
...conditionalDatatypes,
...structuresDatatypes,
...utilsDatatypes,
...sharedDatatypes
}
90 changes: 90 additions & 0 deletions src/datatypes/interpreter/structures.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
const { getField, getCount, sendCount, calcCount, tryDoc, Enum: { ParentSymbol }, Result } = require('../../utils')
const schema = require('../../../ProtoDef/schemas/structures.json')

function readArray (buffer, offset, typeArgs, rootNode) {
const value = []
let { count, size } = getCount.call(this, buffer, offset, typeArgs, rootNode)
offset += size
for (let i = 0; i < count; i++) {
const res = tryDoc(this.read.bind(this, buffer, offset, typeArgs.type, rootNode), i)
size += res.size
offset += res.size
value.push(res.value)
}
return new Result(value, size)
}

function writeArray (value, buffer, offset, typeArgs, rootNode) {
offset = sendCount.call(this, value.length, buffer, offset, typeArgs, rootNode)
for (let i = 0, l = value.length; i < l; i++) {
offset = tryDoc(this.write.bind(this, value[i], buffer, offset, typeArgs.type, rootNode), i)
}
return offset
}

function sizeOfArray (value, typeArgs, rootNode) {
let size = calcCount.call(this, value.length, typeArgs, rootNode)
for (let i = 0, l = value.length; i < l; i++) {
size += tryDoc(this.sizeOf.bind(this, value[i], typeArgs.type, rootNode), i)
}
return size
}

function readCount (buffer, offset, { type }, rootNode) {
return this.read(buffer, offset, type, rootNode)
}

function writeCount (value, buffer, offset, { countFor, type }, rootNode) {
// Actually gets the required field, and writes its length. Value is unused.
// TODO : a bit hackityhack.
return this.write(getField(countFor, rootNode).length, buffer, offset, type, rootNode)
}

function sizeOfCount (value, { countFor, type }, rootNode) {
// TODO : should I use value or getField().length ?
return this.sizeOf(getField(countFor, rootNode).length, type, rootNode)
}

function readContainer (buffer, offset, typeArgs, context) {
const value = { [ParentSymbol]: context }
let size = 0
for (const { type, name, anon } of typeArgs) {
const res = tryDoc(this.read.bind(this, buffer, offset, type, value), name || 'unknown')
size += res.size
offset += res.size
if (anon && res.value !== undefined) {
for (const k in res.value) {
value[k] = res.value[k]
}
continue
}
value[name] = res.value
}
value[ParentSymbol] = undefined
return new Result(value, size)
}

function writeContainer (value, buffer, offset, typeArgs, context) {
value[ParentSymbol] = context
for (const { type, name, anon } of typeArgs) {
offset = tryDoc(this.write.bind(this, anon ? value : value[name], buffer, offset, type, value), name || 'unknown')
}
value[ParentSymbol] = undefined
return offset
}

function sizeOfContainer (value, typeArgs, context) {
value[ParentSymbol] = context
let size = 0
for (const { type, name, anon } of typeArgs) {
size += tryDoc(this.sizeOf.bind(this, anon ? value : value[name], type, value), name || 'unknown')
}
value[ParentSymbol] = undefined
return size
}

module.exports = {
'array': [readArray, writeArray, sizeOfArray, schema['array']],
'count': [readCount, writeCount, sizeOfCount, schema['count']],
'container': [readContainer, writeContainer, sizeOfContainer, schema['container']]
}
141 changes: 141 additions & 0 deletions src/datatypes/interpreter/utils.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,141 @@
const { getCount, sendCount, calcCount, PartialReadError, Result } = require('../../utils')
const schema = require('../../../ProtoDef/schemas/utils.json')

function readMapper (buffer, offset, { type, mappings }, rootNode) {
const { size, value } = this.read(buffer, offset, type, rootNode)
for (const key in mappings) {
if (key === value || +key === +value) {
return new Result(mappings[key], size)
}
}
throw new Error(`${typeof value} "${value}" is not in the mappings value`)
}

function writeMapper (value, buffer, offset, { type, mappings }, rootNode) {
for (const key in mappings) {
const writeValue = mappings[key]
if (writeValue === value || +writeValue === +value) {
return this.write(key, buffer, offset, type, rootNode)
}
}
throw new Error(`${value} is not in the mappings value`)
}

function sizeOfMapper (value, { type, mappings }, rootNode) {
for (const key in mappings) {
const sizeValue = mappings[key]
if (sizeValue === value || +sizeValue === +value) {
return this.sizeOf(key, type, rootNode)
}
}
throw new Error(`${value} is not in the mappings value`)
}

function readPString (buffer, offset, typeArgs, rootNode) {
const { size, count } = getCount.call(this, buffer, offset, typeArgs, rootNode)
const cursor = offset + size
const strEnd = cursor + count
if (strEnd > buffer.length) {
throw new PartialReadError(`Missing characters in string, found size is ${buffer.length} expected size was ${strEnd}`)
}
return new Result(buffer.toString('utf8', cursor, strEnd), size + count)
}

function writePString (value, buffer, offset, typeArgs, rootNode) {
const length = Buffer.byteLength(value, 'utf8')
offset = sendCount.call(this, length, buffer, offset, typeArgs, rootNode)
buffer.write(value, offset, length, 'utf8')
return offset + length
}

function sizeOfPString (value, typeArgs, rootNode) {
const length = Buffer.byteLength(value, 'utf8')
const size = calcCount.call(this, length, typeArgs, rootNode)
return size + length
}

function readBuffer (buffer, offset, typeArgs, rootNode) {
const { size, count } = getCount.call(this, buffer, offset, typeArgs, rootNode)
offset += size
if (offset + count > buffer.length) throw new PartialReadError()
return new Result(buffer.slice(offset, offset + count), size + count)
}

function writeBuffer (value, buffer, offset, typeArgs, rootNode) {
offset = sendCount.call(this, value.length, buffer, offset, typeArgs, rootNode)
return offset + value.copy(buffer, offset)
}

function sizeOfBuffer (value, typeArgs, rootNode) {
return calcCount.call(this, value.length, typeArgs, rootNode) + value.length
}

function generateBitMask (n) {
return (1 << n) - 1
}

function readBitField (buffer, offset, typeArgs) {
const value = {}
const beginOffset = offset
let curVal = null
let bits = 0
for (const { size, signed, name } of typeArgs) {
let currentSize = size
let val = 0
while (currentSize > 0) {
if (bits === 0) {
if (buffer.length < offset + 1) { throw new PartialReadError() }
curVal = buffer[offset++]
bits = 8
}
const bitsToRead = Math.min(currentSize, bits)
val = (val << bitsToRead) | (curVal & generateBitMask(bits)) >> (bits - bitsToRead)
bits -= bitsToRead
currentSize -= bitsToRead
}
if (signed && val >= 1 << (size - 1)) { val -= 1 << size }
value[name] = val
}
return new Result(value, offset - beginOffset)
}

function writeBitField (value, buffer, offset, typeArgs) {
let toWrite = 0
let bits = 0
for (let { size, signed, name } of typeArgs) {
const val = value[name]
const min = +signed && -(1 << (size - 1))
const max = (1 << (size - signed)) - signed
if (value < min) { throw new Error(value + ' < ' + min) }
if (val >= max) { throw new Error(value + ' >= ' + max) }
while (size > 0) {
const writeBits = Math.min(8 - bits, size)
toWrite = toWrite << writeBits |
((val >> (size - writeBits)) & generateBitMask(writeBits))
size -= writeBits
bits += writeBits
if (bits === 8) {
buffer[offset++] = toWrite
bits = 0
toWrite = 0
}
}
}
if (bits !== 0) {
buffer[offset++] = toWrite << (8 - bits)
}
return offset
}

function sizeOfBitField (value, typeArgs) {
let i = 0
for (const { size } of typeArgs) { i += size }
return Math.ceil(i / 8)
}

module.exports = {
'pstring': [readPString, writePString, sizeOfPString, schema['pstring']],
'buffer': [readBuffer, writeBuffer, sizeOfBuffer, schema['buffer']],
'bitfield': [readBitField, writeBitField, sizeOfBitField, schema['bitfield']],
'mapper': [readMapper, writeMapper, sizeOfMapper, schema['mapper']]
}
5 changes: 5 additions & 0 deletions src/datatypes/shared/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
const numeric = require('./numeric')
const utils = require('./utils')

// There's no need to compile these types, just optimize it instead
module.exports = { ...numeric, ...utils }
71 changes: 30 additions & 41 deletions src/datatypes/numeric.js → src/datatypes/shared/numeric.js
Original file line number Diff line number Diff line change
@@ -1,11 +1,8 @@
const { PartialReadError } = require('../utils')
const { PartialReadError, Result } = require('../../utils')

function readI64 (buffer, offset) {
if (offset + 8 > buffer.length) { throw new PartialReadError() }
return {
value: [buffer.readInt32BE(offset), buffer.readInt32BE(offset + 4)],
size: 8
}
return new Result([buffer.readInt32BE(offset), buffer.readInt32BE(offset + 4)], 8)
}

function writeI64 (value, buffer, offset) {
@@ -16,10 +13,7 @@ function writeI64 (value, buffer, offset) {

function readLI64 (buffer, offset) {
if (offset + 8 > buffer.length) { throw new PartialReadError() }
return {
value: [buffer.readInt32LE(offset + 4), buffer.readInt32LE(offset)],
size: 8
}
return new Result([buffer.readInt32LE(offset + 4), buffer.readInt32LE(offset)], 8)
}

function writeLI64 (value, buffer, offset) {
@@ -30,10 +24,7 @@ function writeLI64 (value, buffer, offset) {

function readU64 (buffer, offset) {
if (offset + 8 > buffer.length) { throw new PartialReadError() }
return {
value: [buffer.readUInt32BE(offset), buffer.readUInt32BE(offset + 4)],
size: 8
}
return new Result([buffer.readUInt32BE(offset), buffer.readUInt32BE(offset + 4)], 8)
}

function writeU64 (value, buffer, offset) {
@@ -44,10 +35,7 @@ function writeU64 (value, buffer, offset) {

function readLU64 (buffer, offset) {
if (offset + 8 > buffer.length) { throw new PartialReadError() }
return {
value: [buffer.readUInt32LE(offset + 4), buffer.readUInt32LE(offset)],
size: 8
}
return new Result([buffer.readUInt32LE(offset + 4), buffer.readUInt32LE(offset)], 8)
}

function writeLU64 (value, buffer, offset) {
@@ -56,22 +44,6 @@ function writeLU64 (value, buffer, offset) {
return offset + 8
}

function generateFunctions (bufferReader, bufferWriter, size, schema) {
const reader = (buffer, offset) => {
if (offset + size > buffer.length) { throw new PartialReadError() }
const value = buffer[bufferReader](offset)
return {
value: value,
size: size
}
}
const writer = (value, buffer, offset) => {
buffer[bufferWriter](value, offset)
return offset + size
}
return [reader, writer, size, schema]
}

const nums = {
'i8': ['readInt8', 'writeInt8', 1],
'u8': ['readUInt8', 'writeUInt8', 1],
@@ -91,13 +63,30 @@ const nums = {
'lf64': ['readDoubleLE', 'writeDoubleLE', 8]
}

const types = Object.keys(nums).reduce((types, num) => {
types[num] = generateFunctions(nums[num][0], nums[num][1], nums[num][2], require('../../ProtoDef/schemas/numeric.json')[num])
return types
}, {})
types['i64'] = [readI64, writeI64, 8, require('../../ProtoDef/schemas/numeric.json')['i64']]
types['li64'] = [readLI64, writeLI64, 8, require('../../ProtoDef/schemas/numeric.json')['li64']]
types['u64'] = [readU64, writeU64, 8, require('../../ProtoDef/schemas/numeric.json')['u64']]
types['lu64'] = [readLU64, writeLU64, 8, require('../../ProtoDef/schemas/numeric.json')['lu64']]
const types = {
i64: [readI64, writeI64, 8, require('../../../ProtoDef/schemas/numeric.json')['i64']],
li64: [readLI64, writeLI64, 8, require('../../../ProtoDef/schemas/numeric.json')['li64']],
u64: [readU64, writeU64, 8, require('../../../ProtoDef/schemas/numeric.json')['u64']],
lu64: [readLU64, writeLU64, 8, require('../../../ProtoDef/schemas/numeric.json')['lu64']]
}

function readIntN (method, size, buffer, offset) {
if (offset + size > buffer.length) throw new PartialReadError()
return new Result(buffer[method](offset), size)
}

function writeIntN (method, value, buffer, offset) {
return buffer[method](value, offset)
}

for (const num in nums) {
const [ bufferReader, bufferWriter, size ] = nums[num]
types[num] = [
readIntN.bind(null, bufferReader, size),
writeIntN.bind(null, bufferWriter),
size,
require('../../../ProtoDef/schemas/numeric.json')[num]
]
}

module.exports = types
68 changes: 68 additions & 0 deletions src/datatypes/shared/utils.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
const { PartialReadError, Result } = require('../../utils')
const schema = require('../../../ProtoDef/schemas/utils.json')

const LOG2 = Math.log2(0x7F)
function readVarInt (buffer, offset) {
let value = 0
let size = 0
while (true) {
const v = buffer[offset + size]
value |= (v & 0x7F) << (size++ * 7)
if ((v & 0x80) === 0) break
}
if (offset + size > buffer.length) throw new PartialReadError()
return new Result(value, size)
}

function sizeOfVarInt (value) {
return value >= 0 ? Math.ceil(Math.log2(Math.max(value, 127)) / LOG2) : 5
}

function writeVarInt (value, buffer, offset) {
while (value & ~0x7F) {
buffer[offset++] = (value & 0xFF) | 0x80
value >>>= 7
}
buffer[offset++] = value | 0
return offset
}

function readBool (buffer, offset) {
if (buffer.length <= offset) throw new PartialReadError()
return new Result(buffer[offset] === 1, 1)
}

function writeBool (value, buffer, offset) {
return buffer.writeUInt8(value & 1, offset)
}

function readVoid () {
return new Result(undefined, 0)
}

function writeVoid (value, buffer, offset) {
return offset
}

function readCString (buffer, offset) {
const index = buffer.indexOf(0x00)
if (index === -1) throw new PartialReadError()
return new Result(buffer.toString('utf8', offset, index), index - offset + 1)
}

function writeCString (value, buffer, offset) {
const length = Buffer.byteLength(value, 'utf8')
buffer.write(value, offset, length, 'utf8')
return buffer.writeInt8(0x00, offset + length)
}

function sizeOfCString (value) {
return Buffer.byteLength(value, 'utf8') + 1
}

module.exports = {
'varint': [readVarInt, writeVarInt, sizeOfVarInt, schema['varint']],
'bool': [readBool, writeBool, 1, schema['bool']],
'void': [readVoid, writeVoid, 0, schema['void']],
'cstring': [readCString, writeCString, sizeOfCString, schema['cstring']]
}
90 changes: 0 additions & 90 deletions src/datatypes/structures.js

This file was deleted.

260 changes: 0 additions & 260 deletions src/datatypes/utils.js

This file was deleted.

23 changes: 14 additions & 9 deletions src/index.js
Original file line number Diff line number Diff line change
@@ -1,12 +1,17 @@
const ProtoDef = require('./protodef')
const proto = new ProtoDef()
const { Serializer, Parser, FullPacketParser } = require('./serializer')
const ProtoDef = require('./interpreter')
const Compiler = require('./compiler')
const utils = require('./utils')
const types = require('./datatypes/interpreter')
const { createEncoding } = utils

module.exports = {
ProtoDef: ProtoDef,
Serializer: require('./serializer').Serializer,
Parser: require('./serializer').Parser,
FullPacketParser: require('./serializer').FullPacketParser,
Compiler: require('./compiler'),
types: proto.types,
utils: require('./utils')
ProtoDef,
Compiler,
Serializer,
Parser,
FullPacketParser,
createEncoding,
utils,
types
}
182 changes: 182 additions & 0 deletions src/interpreter.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,182 @@
const { getFieldInfo, isFieldInfo, tryCatch } = require('./utils')
const reduce = require('lodash.reduce')
const get = require('lodash.get')
const clonedeep = require('lodash.clonedeep')
const Validator = require('protodef-validator')
const defaultDatatypes = require('./datatypes/interpreter')

class ProtoDef {
constructor (validation = true) {
this.validator = validation ? new Validator() : null
this.clearTypes()
this.addTypes(defaultDatatypes)
}

addProtocol (protocolData, path) {
if (this.validator) { this.validator.validateProtocol(protocolData) }
this.recursiveAddTypes(protocolData, path)
}

recursiveAddTypes (protocolData, path) {
if (protocolData === undefined) return
if (protocolData.types) { this.addTypes(protocolData.types) }
this.recursiveAddTypes(get(protocolData, path.shift()), path)
}

addType (name, functions, validate = true) {
if (functions === 'native') {
if (this.validator) { this.validator.addType(name) }
return
}
if (isFieldInfo(functions)) {
if (this.validator) {
if (validate) { this.validator.validateType(functions) }
this.validator.addType(name)
}

let { type, typeArgs } = getFieldInfo(functions)
this.types[name] = typeArgs ? extendType.call(this, this.types[type], typeArgs) : this.types[type]
} else {
if (this.validator) {
if (functions[3]) {
this.validator.addType(name, functions[3])
} else { this.validator.addType(name) }
}

this.types[name] = functions
}
}

removeType (name) {
delete this.types[name]
}

addTypes (types) {
for (const name in types) {
this.addType(name, types[name], this.validator)
}
}

clearTypes () {
this.types = {}
}

read (buffer, cursor, _fieldInfo, rootNodes) {
const { type, typeArgs } = getFieldInfo(_fieldInfo)
const typeFunctions = this.types[type]
if (!typeFunctions) { throw new Error(`missing data type: ${type}`) }
return typeFunctions[0].call(this, buffer, cursor, typeArgs, rootNodes)
}

write (value, buffer, offset, _fieldInfo, rootNode) {
const { type, typeArgs } = getFieldInfo(_fieldInfo)
const typeFunctions = this.types[type]
if (!typeFunctions) { throw new Error(`missing data type: ${type}`) }
return typeFunctions[1].call(this, value, buffer, offset, typeArgs, rootNode)
}

sizeOf (value, _fieldInfo, rootNode) {
const { type, typeArgs } = getFieldInfo(_fieldInfo)
const typeFunctions = this.types[type]
if (!typeFunctions) { throw new Error(`missing data type: ${type}`) }
if (typeof typeFunctions[2] === 'function') {
return typeFunctions[2].call(this, value, typeArgs, rootNode)
} else {
return typeFunctions[2]
}
}

_readErrorHandler (e) {
e.message = `Read error for ${e.field} : ${e.message}`
throw e
}

_writeErrorHandler (e) {
e.message = `Write error for ${e.field} : ${e.message}`
throw e
}

_sizeOfErrorHandler (e) {
e.message = `SizeOf error for ${e.field} : ${e.message}`
throw e
}

createPacketBuffer (type, packet) {
const length = tryCatch(this.sizeOf.bind(this, packet, type, {}), this._sizeOfErrorHandler)
const buffer = Buffer.allocUnsafe(length)
tryCatch(this.write.bind(this, packet, buffer, 0, type, {}), this._writeErrorHandler)
return buffer
}

parsePacketBuffer (type, buffer) {
const result = tryCatch(this.read.bind(this, buffer, 0, type, {}), this._readErrorHandler)
return {
data: result.value,
metadata: { size: result.size },
buffer: buffer.slice(0, result.size)
}
}
}

function findArgs (acc, v, k) {
if (typeof v === 'string' && v.charAt(0) === '$') {
acc.push({ path: k, val: v.substr(1) })
} else if (Array.isArray(v) || typeof v === 'object') {
acc = acc.concat(reduce(v, findArgs, []).map((v) => ({ path: `${k}.${v.path}`, val: v.val })))
}
return acc
}

function produceArgsObject (defaultTypeArgs, argPos, typeArgs) {
if (typeArgs === undefined) return defaultTypeArgs
const args = clonedeep(defaultTypeArgs)
for (const { path, val } of argPos) {
// Set field
const c = path.split('.').reverse()
let into = args
while (c.length > 1) {
into = into[c.pop()]
}
into[c.pop()] = typeArgs[val]
}
return args
}

function produceArgsStatic (defaultTypeArgs) {
return defaultTypeArgs
}

function constructProduceArgs (defaultTypeArgs) {
const argPos = reduce(defaultTypeArgs, findArgs, [])
if (typeof defaultTypeArgs !== 'object') return produceArgsStatic.bind(this, defaultTypeArgs)
return produceArgsObject.bind(this, defaultTypeArgs, argPos)
}

function extendedRead (_read, _produceArgs, buffer, offset, typeArgs, context) {
return _read.call(this, buffer, offset, _produceArgs(typeArgs), context)
}

function extendedWrite (_write, _produceArgs, value, buffer, offset, typeArgs, context) {
return _write.call(this, value, buffer, offset, _produceArgs(typeArgs), context)
}

function extendedSizeOf (_sizeOf, _produceArgs, value, typeArgs, context) {
return _sizeOf.call(this, value, _produceArgs(typeArgs), context)
}

function staticSizeOf (_sizeOf) {
return _sizeOf
}

function extendType ([ _read, _write, _sizeOf ], defaultTypeArgs) {
const produceArgs = constructProduceArgs(defaultTypeArgs)
return [
extendedRead.bind(this, _read, produceArgs),
extendedWrite.bind(this, _write, produceArgs),
typeof _sizeOf === 'function'
? extendedSizeOf.bind(this, _sizeOf, produceArgs)
: staticSizeOf.bind(this, _sizeOf)
]
}

module.exports = ProtoDef
164 changes: 0 additions & 164 deletions src/protodef.js

This file was deleted.

24 changes: 13 additions & 11 deletions src/serializer.js
Original file line number Diff line number Diff line change
@@ -13,7 +13,7 @@ class Serializer extends Transform {
}

_transform (chunk, enc, cb) {
let buf
let buf // DO NOT REMOVE THIS WORKAROUND!
try {
buf = this.createPacketBuffer(chunk)
} catch (e) {
@@ -39,7 +39,7 @@ class Parser extends Transform {
_transform (chunk, enc, cb) {
this.queue = Buffer.concat([this.queue, chunk])
while (true) {
let packet
let packet // DO NOT REMOVE THIS WORKAROUND!
try {
packet = this.parsePacketBuffer(this.queue)
} catch (e) {
@@ -57,10 +57,11 @@ class Parser extends Transform {
}

class FullPacketParser extends Transform {
constructor (proto, mainType) {
constructor (proto, mainType, noErrorLogging = false) {
super({ readableObjectMode: true })
this.proto = proto
this.mainType = mainType
this.noErrorLogging = noErrorLogging
}

parsePacketBuffer (buffer) {
@@ -71,20 +72,21 @@ class FullPacketParser extends Transform {
let packet
try {
packet = this.parsePacketBuffer(chunk)
if (packet.metadata.size !== chunk.length) {
console.log('Chunk size is ' + chunk.length + ' but only ' + packet.metadata.size + ' was read ; partial packet : ' +
JSON.stringify(packet.data) + '; buffer :' + chunk.toString('hex'))
if (packet.metadata.size !== chunk.length && !this.noErrorLogging) {
console.log(`Chunk size is ${chunk.length} but only ${packet.metadata.size} was read ; partial packet : ${JSON.stringify(packet.data)}; buffer : ${chunk.toString('hex')}`)
}
} catch (e) {
if (e.partialReadError) {
if (!this.noErrorLogging) {
console.log(e.stack)
}
return cb()
}
return cb(e)
}
this.push(packet)
cb()
}
}

module.exports = {
Serializer: Serializer,
Parser: Parser,
FullPacketParser: FullPacketParser
}
module.exports = { Serializer, Parser, FullPacketParser }
189 changes: 134 additions & 55 deletions src/utils.js
Original file line number Diff line number Diff line change
@@ -1,85 +1,164 @@
const Enum = Object.freeze({
CompilerTypeKind: {
NATIVE: 0,
CONTEXT: 1,
PARAMETRIZABLE: 2
},
ParentSymbol: typeof Symbol !== 'undefined' ? Symbol('ProtoDefContext') : '..'
})

class Result {
// Using this wrapper is up to 30% faster than constructing
// plain objects ({ value, size }). V8 will use inline caching
// and hidden classes to speed this up.
constructor (value = undefined, size = 0) {
this.value = value
this.size = size
}
// This getter will be inlined
get count () { return this.value }
}

class ExtendableError extends Error {
constructor (message) {
super(message)
this.name = this.constructor.name
this.message = message
if (Error.captureStackTrace != null) {
Error.captureStackTrace(this, this.constructor.name)
}
}
}

class PartialReadError extends ExtendableError {
constructor (message) {
super(message)
this.partialReadError = true
}
}

function tryCatch (tryfn, catchfn) {
try { return tryfn() } catch (e) { catchfn(e) }
}

function typeDocErrorHandler (field, e) {
e.field = e.field ? `${field}.${e.field}` : field
throw e
}

function tryDoc (tryfn, field) {
return tryCatch(tryfn, typeDocErrorHandler.bind(this, field))
}

function getField (countField, context) {
if (countField.startsWith('/')) {
while (context.hasOwnProperty(Enum.ParentSymbol)) {
context = context[Enum.ParentSymbol]
}
countField = countField.slice(1)
}
const countFieldArr = countField.split('/')
let i = 0
if (countFieldArr[i] === '') {
while (context.hasOwnProperty('..')) { context = context['..'] }
i++
for (const field of countFieldArr) {
context = context[field]
}
for (; i < countFieldArr.length; i++) { context = context[countFieldArr[i]] }
return context
}

function getFieldInfo (fieldInfo) {
if (typeof fieldInfo === 'string') { return { type: fieldInfo } } else if (Array.isArray(fieldInfo)) { return { type: fieldInfo[0], typeArgs: fieldInfo[1] } } else if (typeof fieldInfo.type === 'string') { return fieldInfo } else { throw new Error('Not a fieldinfo') }
switch (true) {
case typeof fieldInfo === 'string':
return { type: fieldInfo }
case Array.isArray(fieldInfo):
return { type: fieldInfo[0], typeArgs: fieldInfo[1] }
case typeof fieldInfo.type === 'string':
return fieldInfo
default:
throw new Error(`${fieldInfo} is not a fieldinfo`)
}
}

function isFieldInfo (type) {
return typeof type === 'string' ||
(Array.isArray(type) && typeof type[0] === 'string') ||
type.type
}

function getCount (buffer, offset, { count, countType }, rootNode) {
let c = 0
let size = 0
if (typeof count === 'number') { c = count } else if (typeof count !== 'undefined') {
c = getField(count, rootNode)
} else if (typeof countType !== 'undefined') {
({ size, value: c } = tryDoc(() => this.read(buffer, offset, getFieldInfo(countType), rootNode), '$count'))
} else { // TODO : broken schema, should probably error out.
c = 0
if (count !== undefined) {
count = typeof count === 'number' ? count : getField(count, rootNode)
return new Result(count, 0)
}
return { count: c, size }
if (countType !== undefined) {
const { size, value } = tryDoc(this.read.bind(this, buffer, offset, getFieldInfo(countType), rootNode), '$count')
return new Result(value, size)
}
throw new Error('Broken schema, neither count nor countType defined')
}

function sendCount (len, buffer, offset, { count, countType }, rootNode) {
if (typeof count !== 'undefined' && len !== count) {
// TODO: Throw
} else if (typeof countType !== 'undefined') {
offset = this.write(len, buffer, offset, getFieldInfo(countType), rootNode)
} else {
// TODO: Throw
if (count !== undefined) {
if (typeof count === 'number' && len !== count) {
throw new Error('Datatype length is not equal to count defined in schema')
}
return offset
}
return offset
if (countType !== undefined) {
return this.write(len, buffer, offset, getFieldInfo(countType), rootNode)
}
throw new Error('Broken schema, neither count nor countType defined')
}

function calcCount (len, { count, countType }, rootNode) {
if (typeof count === 'undefined' && typeof countType !== 'undefined') { return tryDoc(() => this.sizeOf(len, getFieldInfo(countType), rootNode), '$count') } else { return 0 }
if (count === undefined && countType !== undefined) {
return tryDoc(this.sizeOf.bind(this, len, getFieldInfo(countType), rootNode), '$count')
}
return 0
}

function addErrorField (e, field) {
e.field = e.field ? field + '.' + e.field : field
throw e
}
class ProtoDefEncoding {
constructor (inst, type) {
this.inst = inst
this.type = type
this.encode.bytes = 0
this.decode.bytes = 0
}

function tryCatch (tryfn, catchfn) {
try { return tryfn() } catch (e) { catchfn(e) }
}
encode (obj, buffer, offset) {
if (buffer) {
this.encode.bytes = this.inst.write(obj, buffer, offset, this.type)
} else {
buffer = this.inst.createPacketBuffer(this.type, obj)
this.encode.bytes = buffer.length
}
return buffer
}

function tryDoc (tryfn, field) {
return tryCatch(tryfn, (e) => addErrorField(e, field))
}
decode (buffer, start, end) {
const { value, size } = this.inst.read(buffer.slice(start, end), 0, this.type)
this.decode.bytes = size
return value
}

class ExtendableError extends Error {
constructor (message) {
super(message)
this.name = this.constructor.name
this.message = message
if (Error.captureStackTrace != null) {
Error.captureStackTrace(this, this.constructor.name)
}
encodingLength (obj) {
return this.inst.sizeOf(obj, this.type)
}
}

class PartialReadError extends ExtendableError {
constructor (message) {
super(message)
this.partialReadError = true
}
function createEncoding (inst, type) {
return new ProtoDefEncoding(inst, type)
}

module.exports = {
getField: getField,
getFieldInfo: getFieldInfo,
addErrorField: addErrorField,
getCount: getCount,
sendCount: sendCount,
calcCount: calcCount,
tryCatch: tryCatch,
tryDoc: tryDoc,
PartialReadError: PartialReadError
Enum,
Result,
PartialReadError,
tryCatch,
tryDoc,
getField,
getFieldInfo,
isFieldInfo,
getCount,
sendCount,
calcCount,
createEncoding
}