diff --git a/.npmignore b/.npmignore
deleted file mode 100644
index 5171c54..0000000
--- a/.npmignore
+++ /dev/null
@@ -1,2 +0,0 @@
-node_modules
-npm-debug.log
\ No newline at end of file
diff --git a/package.json b/package.json
index ef4a79c..0cb2de9 100644
--- a/package.json
+++ b/package.json
@@ -4,6 +4,7 @@
   "description": "A simple yet powerful way to define binary protocols",
   "main": "index.js",
   "author": "roblabla <robinlambertz.dev@gmail.com>",
+  "sideEffects": false,
   "scripts": {
     "prepare": "require-self",
     "lint": "standard",
@@ -15,13 +16,14 @@
   "tonicExampleFilename": "example.js",
   "license": "MIT",
   "dependencies": {
+    "lodash.clonedeep": "^4.5.0",
     "lodash.get": "^4.4.2",
     "lodash.reduce": "^4.6.0",
     "protodef-validator": "^1.2.2",
     "readable-stream": "^3.0.3"
   },
   "engines": {
-    "node": ">=6"
+    "node": ">=12"
   },
   "bugs": {
     "url": "https://github.com/ProtoDef-io/node-protodef/issues"
@@ -38,5 +40,10 @@
     "mocha": "^5.2.0",
     "require-self": "^0.1.0",
     "standard": "^12.0.1"
-  }
+  },
+  "files": [
+    "src/",
+    "ProtoDef/schemas/",
+    "*.js"
+  ]
 }
diff --git a/src/compiler.js b/src/compiler.js
index 5c3aba6..3f23c7c 100644
--- a/src/compiler.js
+++ b/src/compiler.js
@@ -1,11 +1,10 @@
-const numeric = require('./datatypes/numeric')
-const utils = require('./datatypes/utils')
-
-const conditionalDatatypes = require('./datatypes/compiler-conditional')
-const structuresDatatypes = require('./datatypes/compiler-structures')
-const utilsDatatypes = require('./datatypes/compiler-utils')
-
-const { tryCatch } = require('./utils')
+const {
+  tryCatch,
+  Enum: {
+    CompilerTypeKind: { NATIVE, CONTEXT, PARAMETRIZABLE }
+  }
+} = require('./utils')
+const defaultDatatypes = require('./datatypes/compiler')
 
 class ProtoDefCompiler {
   constructor () {
@@ -60,19 +59,19 @@ class CompiledProtodef {
 
   read (buffer, cursor, type) {
     const readFn = this.readCtx[type]
-    if (!readFn) { throw new Error('missing data type: ' + type) }
+    if (!readFn) { throw new Error(`missing data type: ${type}`) }
     return readFn(buffer, cursor)
   }
 
   write (value, buffer, cursor, type) {
     const writeFn = this.writeCtx[type]
-    if (!writeFn) { throw new Error('missing data type: ' + type) }
+    if (!writeFn) { throw new Error(`missing data type: ${type}`) }
     return writeFn(value, buffer, cursor)
   }
 
   sizeOf (value, type) {
     const sizeFn = this.sizeOfCtx[type]
-    if (!sizeFn) { throw new Error('missing data type: ' + type) }
+    if (!sizeFn) { throw new Error(`missing data type: ${type}`) }
     if (typeof sizeFn === 'function') {
       return sizeFn(value)
     } else {
@@ -80,63 +79,119 @@ class CompiledProtodef {
     }
   }
 
+  _readErrorHandler (e) {
+    e.message = `Read error for ${e.field} : ${e.message}`
+    throw e
+  }
+
+  _writeErrorHandler (e) {
+    e.message = `Write error for ${e.field} : ${e.message}`
+    throw e
+  }
+
+  _sizeOfErrorHandler (e) {
+    e.message = `SizeOf error for ${e.field} : ${e.message}`
+    throw e
+  }
+
   createPacketBuffer (type, packet) {
-    const length = tryCatch(() => this.sizeOf(packet, type),
-      (e) => {
-        e.message = `SizeOf error for ${e.field} : ${e.message}`
-        throw e
-      })
+    const length = tryCatch(this.sizeOf.bind(this, packet, type), this._sizeOfErrorHandler)
     const buffer = Buffer.allocUnsafe(length)
-    tryCatch(() => this.write(packet, buffer, 0, type),
-      (e) => {
-        e.message = `Write error for ${e.field} : ${e.message}`
-        throw e
-      })
+    tryCatch(this.write.bind(this, packet, buffer, 0, type), this._readErrorHandler)
     return buffer
   }
 
   parsePacketBuffer (type, buffer) {
-    const { value, size } = tryCatch(() => this.read(buffer, 0, type),
-      (e) => {
-        e.message = `Read error for ${e.field} : ${e.message}`
-        throw e
-      })
+    const result = tryCatch(this.read.bind(this, buffer, 0, type), this._writeErrorHandler)
     return {
-      data: value,
-      metadata: { size },
-      buffer: buffer.slice(0, size)
+      data: result.value,
+      metadata: { size: result.size },
+      buffer: buffer.slice(0, result.size)
     }
   }
 }
 
 class Compiler {
   constructor () {
+    this.scopeStack = []
+    this.clearTypes()
+  }
+
+  /**
+   * See `addNativeType` / `addContextType` / `addParametrizableType`
+   * @param {String} name
+   * @param {Function} fn
+   * @param {*} kind
+   */
+  addType (name, fn, kind = NATIVE) {
+    switch (kind) {
+      case NATIVE:
+        this.addNativeType(name, fn); break
+      case CONTEXT:
+        this.addContextType(name, fn); break
+      case PARAMETRIZABLE:
+        this.addParametrizableType(name, fn); break
+      default:
+        throw new Error('Unknown datatype kind ' + kind)
+    }
+  }
+
+  /**
+   * @param {String} name
+   * @param {Function} fn
+   * @param {*} kind
+   */
+  removeType (name, fn, kind = NATIVE) {
+    switch (kind) {
+      case NATIVE:
+        delete this.primitiveTypes[name]
+        delete this.native[name]
+        delete this.types[name]
+        break
+      case CONTEXT:
+        delete this.primitiveTypes[name]
+        delete this.context[name]
+        break
+      case PARAMETRIZABLE:
+        delete this.parameterizableTypes[name]
+        break
+      default:
+        throw new Error('Unknown datatype kind ' + kind)
+    }
+  }
+
+  addTypes (types) {
+    for (const [type, [kind, fn]] of Object.entries(types)) {
+      this.addType(type, fn, kind)
+    }
+  }
+
+  clearTypes () {
     this.primitiveTypes = {}
     this.native = {}
     this.context = {}
     this.types = {}
-    this.scopeStack = []
     this.parameterizableTypes = {}
   }
 
   /**
    * A native type is a type read or written by a function that will be called in it's
    * original context.
-   * @param {*} type
-   * @param {*} fn
+   * @param {String} type
+   * @param {Function} fn
    */
   addNativeType (type, fn) {
     this.primitiveTypes[type] = `native.${type}`
     this.native[type] = fn
-    this.types[type] = 'native'
+    this.types[type] = NATIVE
   }
 
   /**
    * A context type is a type that will be called in the protocol's context. It can refer to
    * registred native types using native.{type}() or context type (provided and generated)
    * using ctx.{type}(), but cannot access it's original context.
-   * @param {*} type
-   * @param {*} fn
+   * @param {String} type
+   * @param {Function} fn
    */
   addContextType (type, fn) {
     this.primitiveTypes[type] = `ctx.${type}`
@@ -146,42 +201,41 @@ class Compiler {
   /**
    * A parametrizable type is a function that will be generated at compile time using the
    * provided maker function
-   * @param {*} type
-   * @param {*} maker
+   * @param {String} type
+   * @param {Function} maker
    */
   addParametrizableType (type, maker) {
     this.parameterizableTypes[type] = maker
   }
 
-  addTypes (types) {
-    for (const [type, [kind, fn]] of Object.entries(types)) {
-      if (kind === 'native') this.addNativeType(type, fn)
-      else if (kind === 'context') this.addContextType(type, fn)
-      else if (kind === 'parametrizable') this.addParametrizableType(type, fn)
-    }
-  }
-
   addTypesToCompile (types) {
     for (const [type, json] of Object.entries(types)) {
       // Replace native type, otherwise first in wins
-      if (!this.types[type] || this.types[type] === 'native') this.types[type] = json
+      if (!this.types[type] || this.types[type] === NATIVE) this.types[type] = json
     }
   }
 
   addProtocol (protocolData, path) {
     const self = this
     function recursiveAddTypes (protocolData, path) {
-      if (protocolData === undefined) { return }
+      if (protocolData === undefined) return
       if (protocolData.types) { self.addTypesToCompile(protocolData.types) }
       recursiveAddTypes(protocolData[path.shift()], path)
     }
     recursiveAddTypes(protocolData, path.slice(0))
   }
 
+  /**
+   * @param {String} code
+   * @param {String} indent
+   */
   indent (code, indent = '  ') {
-    return code.split('\n').map((line) => indent + line).join('\n')
+    return code.split('\n').map(line => indent + line).join('\n')
   }
 
+  /**
+   * @param {String} name Slash-separated field name
+   */
   getField (name) {
     let path = name.split('/')
     let i = this.scopeStack.length - 1
@@ -207,9 +261,13 @@ class Compiler {
       scope[field] = field + (count || '') // If the name is already used, add a number
       return scope[field]
     }
-    throw new Error('Unknown field ' + path)
+    throw new Error(`Unknown field ${path}`)
   }
 
+  /**
+   * Generates code to eval
+   * @private
+   */
   generate () {
     this.scopeStack = [{}]
     let functions = []
@@ -218,7 +276,7 @@ class Compiler {
     }
     for (const type in this.types) {
       if (!functions[type]) {
-        if (this.types[type] !== 'native') {
+        if (this.types[type] !== NATIVE) {
           functions[type] = this.compileType(this.types[type])
           if (functions[type].startsWith('ctx')) { functions[type] = this.wrapCode(functions[type]) }
           if (!isNaN(functions[type])) { functions[type] = this.wrapCode('  return ' + functions[type]) }
@@ -234,12 +292,12 @@ class Compiler {
 
   /**
    * Compile the given js code, providing native.{type} to the context, return the compiled types
-   * @param {*} code
+   * @param {String} code
    */
   compile (code) {
     // Local variable to provide some context to eval()
     const native = this.native // eslint-disable-line
-    const { PartialReadError } = require('./utils') // eslint-disable-line
+    const { PartialReadError, Result } = require('./utils') // eslint-disable-line
     return eval(code)() // eslint-disable-line
   }
 }
@@ -247,118 +305,89 @@ class Compiler {
 class ReadCompiler extends Compiler {
   constructor () {
     super()
-
-    this.addTypes(conditionalDatatypes.Read)
-    this.addTypes(structuresDatatypes.Read)
-    this.addTypes(utilsDatatypes.Read)
-
-    // Add default types
-    for (const key in numeric) {
-      this.addNativeType(key, numeric[key][0])
-    }
-    for (const key in utils) {
-      this.addNativeType(key, utils[key][0])
-    }
+    this.addTypes(defaultDatatypes.Read)
   }
 
   compileType (type) {
     if (type instanceof Array) {
-      if (this.parameterizableTypes[type[0]]) { return this.parameterizableTypes[type[0]](this, type[1]) }
-      if (this.types[type[0]] && this.types[type[0]] !== 'native') {
+      if (this.parameterizableTypes[type[0]]) {
+        return this.parameterizableTypes[type[0]](this, type[1])
+      }
+      if (this.types[type[0]] && this.types[type[0]] !== NATIVE) {
         return this.wrapCode('return ' + this.callType(type[0], 'offset', Object.values(type[1])))
       }
-      throw new Error('Unknown parametrizable type: ' + type[0])
+      throw new Error(`Unknown parametrizable type: ${type[0]}`)
     } else { // Primitive type
-      if (type === 'native') return 'null'
-      if (this.types[type]) { return 'ctx.' + type }
+      if (type === NATIVE) return 'null'
+      if (this.types[type]) { return `ctx.${type}` }
       return this.primitiveTypes[type]
     }
   }
 
   wrapCode (code, args = []) {
-    if (args.length > 0) return '(buffer, offset, ' + args.join(', ') + ') => {\n' + this.indent(code) + '\n}'
-    return '(buffer, offset) => {\n' + this.indent(code) + '\n}'
+    if (args.length > 0) return `(buffer, offset, ${args.join(', ')}) => {\n${this.indent(code)}\n}`
+    return `(buffer, offset) => {\n${this.indent(code)}\n}`
   }
 
   callType (type, offsetExpr = 'offset', args = []) {
     if (type instanceof Array) {
-      if (this.types[type[0]] && this.types[type[0]] !== 'native') {
+      if (this.types[type[0]] && this.types[type[0]] !== NATIVE) {
         return this.callType(type[0], offsetExpr, Object.values(type[1]))
       }
     }
     if (type instanceof Array && type[0] === 'container') this.scopeStack.push({})
     const code = this.compileType(type)
     if (type instanceof Array && type[0] === 'container') this.scopeStack.pop()
-    if (args.length > 0) return '(' + code + `)(buffer, ${offsetExpr}, ` + args.map(name => this.getField(name)).join(', ') + ')'
-    return '(' + code + `)(buffer, ${offsetExpr})`
+    if (args.length > 0) return `(${code})(buffer, ${offsetExpr}, ${args.map(name => this.getField(name)).join(', ')})`
+    return `(${code})(buffer, ${offsetExpr})`
   }
 }
 
 class WriteCompiler extends Compiler {
   constructor () {
     super()
-
-    this.addTypes(conditionalDatatypes.Write)
-    this.addTypes(structuresDatatypes.Write)
-    this.addTypes(utilsDatatypes.Write)
-
-    // Add default types
-    for (const key in numeric) {
-      this.addNativeType(key, numeric[key][1])
-    }
-    for (const key in utils) {
-      this.addNativeType(key, utils[key][1])
-    }
+    this.addTypes(defaultDatatypes.Write)
   }
 
   compileType (type) {
     if (type instanceof Array) {
-      if (this.parameterizableTypes[type[0]]) { return this.parameterizableTypes[type[0]](this, type[1]) }
-      if (this.types[type[0]] && this.types[type[0]] !== 'native') {
+      if (this.parameterizableTypes[type[0]]) {
+        return this.parameterizableTypes[type[0]](this, type[1])
+      }
+      if (this.types[type[0]] && this.types[type[0]] !== NATIVE) {
         return this.wrapCode('return ' + this.callType('value', type[0], 'offset', Object.values(type[1])))
       }
-      throw new Error('Unknown parametrizable type: ' + type[0])
+      throw new Error(`Unknown parametrizable type: ${type[0]}`)
     } else { // Primitive type
-      if (type === 'native') return 'null'
-      if (this.types[type]) { return 'ctx.' + type }
+      if (type === NATIVE) return 'null'
+      if (this.types[type]) { return `ctx.${type}` }
       return this.primitiveTypes[type]
     }
   }
 
   wrapCode (code, args = []) {
-    if (args.length > 0) return '(value, buffer, offset, ' + args.join(', ') + ') => {\n' + this.indent(code) + '\n}'
-    return '(value, buffer, offset) => {\n' + this.indent(code) + '\n}'
+    if (args.length > 0) return `(value, buffer, offset, ${args.join(', ')}) => {\n${this.indent(code)}\n}`
+    return `(value, buffer, offset) => {\n${this.indent(code)}\n}`
   }
 
   callType (value, type, offsetExpr = 'offset', args = []) {
     if (type instanceof Array) {
-      if (this.types[type[0]] && this.types[type[0]] !== 'native') {
+      if (this.types[type[0]] && this.types[type[0]] !== NATIVE) {
         return this.callType(value, type[0], offsetExpr, Object.values(type[1]))
       }
     }
     if (type instanceof Array && type[0] === 'container') this.scopeStack.push({})
     const code = this.compileType(type)
     if (type instanceof Array && type[0] === 'container') this.scopeStack.pop()
-    if (args.length > 0) return '(' + code + `)(${value}, buffer, ${offsetExpr}, ` + args.map(name => this.getField(name)).join(', ') + ')'
-    return '(' + code + `)(${value}, buffer, ${offsetExpr})`
+    if (args.length > 0) return `(${code})(${value}, buffer, ${offsetExpr}, ${args.map(this.getField, this).join(', ')})`
+    return `(${code})(${value}, buffer, ${offsetExpr})`
   }
 }
 
 class SizeOfCompiler extends Compiler {
   constructor () {
     super()
-
-    this.addTypes(conditionalDatatypes.SizeOf)
-    this.addTypes(structuresDatatypes.SizeOf)
-    this.addTypes(utilsDatatypes.SizeOf)
-
-    // Add default types
-    for (const key in numeric) {
-      this.addNativeType(key, numeric[key][2])
-    }
-    for (const key in utils) {
-      this.addNativeType(key, utils[key][2])
-    }
+    this.addTypes(defaultDatatypes.SizeOf)
   }
 
   /**
@@ -370,36 +399,38 @@ class SizeOfCompiler extends Compiler {
   addNativeType (type, fn) {
     this.primitiveTypes[type] = `native.${type}`
     if (!isNaN(fn)) {
-      this.native[type] = (value) => { return fn }
+      this.native[type] = () => fn
     } else {
       this.native[type] = fn
     }
-    this.types[type] = 'native'
+    this.types[type] = NATIVE
   }
 
   compileType (type) {
     if (type instanceof Array) {
-      if (this.parameterizableTypes[type[0]]) { return this.parameterizableTypes[type[0]](this, type[1]) }
-      if (this.types[type[0]] && this.types[type[0]] !== 'native') {
+      if (this.parameterizableTypes[type[0]]) {
+        return this.parameterizableTypes[type[0]](this, type[1])
+      }
+      if (this.types[type[0]] && this.types[type[0]] !== NATIVE) {
         return this.wrapCode('return ' + this.callType('value', type[0], Object.values(type[1])))
       }
-      throw new Error('Unknown parametrizable type: ' + type[0])
+      throw new Error(`Unknown parametrizable type: ${type[0]}`)
     } else { // Primitive type
-      if (type === 'native') return 'null'
+      if (type === NATIVE) return 'null'
       if (!isNaN(this.primitiveTypes[type])) return this.primitiveTypes[type]
-      if (this.types[type]) { return 'ctx.' + type }
+      if (this.types[type]) { return `ctx.${type}` }
       return this.primitiveTypes[type]
     }
   }
 
   wrapCode (code, args = []) {
-    if (args.length > 0) return '(value, ' + args.join(', ') + ') => {\n' + this.indent(code) + '\n}'
-    return '(value) => {\n' + this.indent(code) + '\n}'
+    if (args.length > 0) return `(value, ${args.join(', ')}) => {\n${this.indent(code)}\n}`
+    return `(value) => {\n${this.indent(code)}\n}`
   }
 
   callType (value, type, args = []) {
     if (type instanceof Array) {
-      if (this.types[type[0]] && this.types[type[0]] !== 'native') {
+      if (this.types[type[0]] && this.types[type[0]] !== NATIVE) {
         return this.callType(value, type[0], Object.values(type[1]))
       }
     }
@@ -407,8 +438,8 @@ class SizeOfCompiler extends Compiler {
     const code = this.compileType(type)
     if (type instanceof Array && type[0] === 'container') this.scopeStack.pop()
     if (!isNaN(code)) return code
-    if (args.length > 0) return '(' + code + `)(${value}, ` + args.map(name => this.getField(name)).join(', ') + ')'
-    return '(' + code + `)(${value})`
+    if (args.length > 0) return `(${code})(${value}, ${args.map(this.getField, this).join(', ')})`
+    return `(${code})(${value})`
   }
 }
 
diff --git a/src/datatypes/compiler-conditional.js b/src/datatypes/compiler/conditional.js
similarity index 83%
rename from src/datatypes/compiler-conditional.js
rename to src/datatypes/compiler/conditional.js
index 9408cb0..7abd6a9 100644
--- a/src/datatypes/compiler-conditional.js
+++ b/src/datatypes/compiler/conditional.js
@@ -1,6 +1,10 @@
+const {
+  Enum: { CompilerTypeKind: { PARAMETRIZABLE } }
+} = require('../../utils')
+
 module.exports = {
   Read: {
-    'switch': ['parametrizable', (compiler, struct) => {
+    'switch': [PARAMETRIZABLE, (compiler, struct) => {
       let compare = struct.compareTo ? struct.compareTo : struct.compareToValue
       let args = []
       if (compare.startsWith('$')) args.push(compare)
@@ -17,19 +21,19 @@ module.exports = {
       code += `}`
       return compiler.wrapCode(code, args)
     }],
-    'option': ['parametrizable', (compiler, type) => {
+    'option': [PARAMETRIZABLE, (compiler, type) => {
       let code = 'const {value} = ctx.bool(buffer, offset)\n'
       code += 'if (value) {\n'
-      code += '  const { value, size } = ' + compiler.callType(type, 'offset + 1') + '\n'
-      code += '  return { value, size: size + 1 }\n'
+      code += '  const result = ' + compiler.callType(type, 'offset + 1') + '\n'
+      code += '  return new Result(result.value, result.size + 1)\n'
       code += '}\n'
-      code += 'return { value: undefined, size: 1}'
+      code += 'return new Result(undefined, 1)'
       return compiler.wrapCode(code)
     }]
   },
 
   Write: {
-    'switch': ['parametrizable', (compiler, struct) => {
+    'switch': [PARAMETRIZABLE, (compiler, struct) => {
       let compare = struct.compareTo ? struct.compareTo : struct.compareToValue
       let args = []
       if (compare.startsWith('$')) args.push(compare)
@@ -46,7 +50,7 @@ module.exports = {
       code += `}`
       return compiler.wrapCode(code, args)
     }],
-    'option': ['parametrizable', (compiler, type) => {
+    'option': [PARAMETRIZABLE, (compiler, type) => {
       let code = 'if (value != null) {\n'
       code += '  offset = ctx.bool(1, buffer, offset)\n'
       code += '  offset = ' + compiler.callType('value', type) + '\n'
@@ -59,7 +63,7 @@ module.exports = {
   },
 
   SizeOf: {
-    'switch': ['parametrizable', (compiler, struct) => {
+    'switch': [PARAMETRIZABLE, (compiler, struct) => {
       let compare = struct.compareTo ? struct.compareTo : struct.compareToValue
       let args = []
       if (compare.startsWith('$')) args.push(compare)
@@ -76,7 +80,7 @@ module.exports = {
       code += `}`
       return compiler.wrapCode(code, args)
     }],
-    'option': ['parametrizable', (compiler, type) => {
+    'option': [PARAMETRIZABLE, (compiler, type) => {
       let code = 'if (value != null) {\n'
       code += '  return 1 + ' + compiler.callType('value', type) + '\n'
       code += '}\n'
diff --git a/src/datatypes/compiler/index.js b/src/datatypes/compiler/index.js
new file mode 100644
index 0000000..5eb641f
--- /dev/null
+++ b/src/datatypes/compiler/index.js
@@ -0,0 +1,30 @@
+const { Enum: { CompilerTypeKind: { NATIVE } } } = require('../../utils')
+const conditionalDatatypes = require('./conditional')
+const structuresDatatypes = require('./structures')
+const utilsDatatypes = require('./utils')
+const sharedDatatypes = require('../shared')
+
+module.exports = {
+  Read: {
+    ...conditionalDatatypes.Read,
+    ...structuresDatatypes.Read,
+    ...utilsDatatypes.Read
+  },
+  Write: {
+    ...conditionalDatatypes.Write,
+    ...structuresDatatypes.Write,
+    ...utilsDatatypes.Write
+  },
+  SizeOf: {
+    ...conditionalDatatypes.SizeOf,
+    ...structuresDatatypes.SizeOf,
+    ...utilsDatatypes.SizeOf
+  }
+}
+
+for (const k in sharedDatatypes) {
+  const [ read, write, sizeOf ] = sharedDatatypes[k]
+  module.exports.Read[k] = [NATIVE, read]
+  module.exports.Write[k] = [NATIVE, write]
+  module.exports.SizeOf[k] = [NATIVE, sizeOf]
+}
diff --git a/src/datatypes/compiler-structures.js b/src/datatypes/compiler/structures.js
similarity index 90%
rename from src/datatypes/compiler-structures.js
rename to src/datatypes/compiler/structures.js
index 7578020..e0871de 100644
--- a/src/datatypes/compiler-structures.js
+++ b/src/datatypes/compiler/structures.js
@@ -1,6 +1,10 @@
+const {
+  Enum: { CompilerTypeKind: { PARAMETRIZABLE } }
+} = require('../../utils')
+
 module.exports = {
   Read: {
-    'array': ['parametrizable', (compiler, array) => {
+    'array': [PARAMETRIZABLE, (compiler, array) => {
       let code = ''
       if (array.countType) {
         code += 'const { value: count, size: countSize } = ' + compiler.callType(array.countType) + '\n'
@@ -10,6 +14,7 @@ module.exports = {
       } else {
         throw new Error('Array must contain either count or countType')
       }
+      code += 'if (count > 0xffffff) throw new Error("array size is abnormally large, not reading: " + count)\n'
       code += 'const data = []\n'
       code += 'let size = countSize\n'
       code += 'for (let i = 0; i < count; i++) {\n'
@@ -17,14 +22,14 @@ module.exports = {
       code += '  data.push(elem.value)\n'
       code += '  size += elem.size\n'
       code += '}\n'
-      code += 'return { value: data, size }'
+      code += 'return new Result(data, size)'
       return compiler.wrapCode(code)
     }],
-    'count': ['parametrizable', (compiler, type) => {
+    'count': [PARAMETRIZABLE, (compiler, type) => {
       let code = 'return ' + compiler.callType(type.type)
       return compiler.wrapCode(code)
     }],
-    'container': ['parametrizable', (compiler, values) => {
+    'container': [PARAMETRIZABLE, (compiler, values) => {
       values = containerInlining(values)
 
       let code = ''
@@ -56,13 +61,13 @@ module.exports = {
       const sizes = offsetExpr.split(' + ')
       sizes.shift()
       if (sizes.length === 0) sizes.push('0')
-      code += 'return { value: { ' + names.join(', ') + ' }, size: ' + sizes.join(' + ') + '}'
+      code += 'return new Result({ ' + names.join(', ') + ' }, ' + sizes.join(' + ') + ')'
       return compiler.wrapCode(code)
     }]
   },
 
   Write: {
-    'array': ['parametrizable', (compiler, array) => {
+    'array': [PARAMETRIZABLE, (compiler, array) => {
       let code = ''
       if (array.countType) {
         code += 'offset = ' + compiler.callType('value.length', array.countType) + '\n'
@@ -75,11 +80,11 @@ module.exports = {
       code += 'return offset'
       return compiler.wrapCode(code)
     }],
-    'count': ['parametrizable', (compiler, type) => {
+    'count': [PARAMETRIZABLE, (compiler, type) => {
       let code = 'return ' + compiler.callType('value', type.type)
       return compiler.wrapCode(code)
     }],
-    'container': ['parametrizable', (compiler, values) => {
+    'container': [PARAMETRIZABLE, (compiler, values) => {
       values = containerInlining(values)
       let code = ''
       for (const i in values) {
@@ -106,7 +111,7 @@ module.exports = {
   },
 
   SizeOf: {
-    'array': ['parametrizable', (compiler, array) => {
+    'array': [PARAMETRIZABLE, (compiler, array) => {
       let code = ''
       if (array.countType) {
         code += 'let size = ' + compiler.callType('value.length', array.countType) + '\n'
@@ -125,11 +130,11 @@ module.exports = {
       code += 'return size'
       return compiler.wrapCode(code)
     }],
-    'count': ['parametrizable', (compiler, type) => {
+    'count': [PARAMETRIZABLE, (compiler, type) => {
       let code = 'return ' + compiler.callType('value', type.type)
       return compiler.wrapCode(code)
     }],
-    'container': ['parametrizable', (compiler, values) => {
+    'container': [PARAMETRIZABLE, (compiler, values) => {
       values = containerInlining(values)
       let code = 'let size = 0\n'
       for (const i in values) {
diff --git a/src/datatypes/compiler-utils.js b/src/datatypes/compiler/utils.js
similarity index 84%
rename from src/datatypes/compiler-utils.js
rename to src/datatypes/compiler/utils.js
index 740c6ca..45bcb5f 100644
--- a/src/datatypes/compiler-utils.js
+++ b/src/datatypes/compiler/utils.js
@@ -1,6 +1,10 @@
+const {
+  Enum: { CompilerTypeKind: { PARAMETRIZABLE } }
+} = require('../../utils')
+
 module.exports = {
   Read: {
-    'pstring': ['parametrizable', (compiler, string) => {
+    'pstring': [PARAMETRIZABLE, (compiler, string) => {
       let code = ''
       if (string.countType) {
         code += 'const { value: count, size: countSize } = ' + compiler.callType(string.countType) + '\n'
@@ -14,10 +18,10 @@ module.exports = {
       code += 'if (offset + count > buffer.length) {\n'
       code += '  throw new PartialReadError("Missing characters in string, found size is " + buffer.length + " expected size was " + (offset + count))\n'
       code += '}\n'
-      code += 'return { value: buffer.toString(\'utf8\', offset, offset + count), size: count + countSize }'
+      code += 'return new Result(buffer.toString(\'utf8\', offset, offset + count), count + countSize)'
       return compiler.wrapCode(code)
     }],
-    'buffer': ['parametrizable', (compiler, buffer) => {
+    'buffer': [PARAMETRIZABLE, (compiler, buffer) => {
       let code = ''
       if (buffer.countType) {
         code += 'const { value: count, size: countSize } = ' + compiler.callType(buffer.countType) + '\n'
@@ -31,10 +35,10 @@ module.exports = {
       code += 'if (offset + count > buffer.length) {\n'
       code += '  throw new PartialReadError()\n'
       code += '}\n'
-      code += 'return { value: buffer.slice(offset, offset + count), size: count + countSize }'
+      code += 'return new Result(buffer.slice(offset, offset + count), count + countSize)'
       return compiler.wrapCode(code)
     }],
-    'bitfield': ['parametrizable', (compiler, values) => {
+    'bitfield': [PARAMETRIZABLE, (compiler, values) => {
       let code = ''
       const totalBytes = Math.ceil(values.reduce((acc, { size }) => acc + size, 0) / 8)
       code += `if ( offset + ${totalBytes} > buffer.length) { throw new PartialReadError() }\n`
@@ -55,18 +59,18 @@ module.exports = {
         if (name === trueName) names.push(name)
         else names.push(`${name}: ${trueName}`)
       }
-      code += 'return { value: { ' + names.join(', ') + ` }, size: ${totalBytes} }`
+      code += 'return new Result({ ' + names.join(', ') + ` }, ${totalBytes})`
       return compiler.wrapCode(code)
     }],
-    'mapper': ['parametrizable', (compiler, mapper) => {
+    'mapper': [PARAMETRIZABLE, (compiler, mapper) => {
       let code = 'const { value, size } = ' + compiler.callType(mapper.type) + '\n'
-      code += 'return { value: ' + JSON.stringify(sanitizeMappings(mapper.mappings)) + '[value], size }'
+      code += 'return new Result(' + JSON.stringify(sanitizeMappings(mapper.mappings)) + '[value], size)'
       return compiler.wrapCode(code)
     }]
   },
 
   Write: {
-    'pstring': ['parametrizable', (compiler, string) => {
+    'pstring': [PARAMETRIZABLE, (compiler, string) => {
       let code = 'const length = Buffer.byteLength(value, \'utf8\')\n'
       if (string.countType) {
         code += 'offset = ' + compiler.callType('length', string.countType) + '\n'
@@ -77,7 +81,7 @@ module.exports = {
       code += 'return offset + length'
       return compiler.wrapCode(code)
     }],
-    'buffer': ['parametrizable', (compiler, buffer) => {
+    'buffer': [PARAMETRIZABLE, (compiler, buffer) => {
       let code = ''
       if (buffer.countType) {
         code += 'offset = ' + compiler.callType('value.length', buffer.countType) + '\n'
@@ -88,7 +92,7 @@ module.exports = {
       code += 'return offset + value.length'
       return compiler.wrapCode(code)
     }],
-    'bitfield': ['parametrizable', (compiler, values) => {
+    'bitfield': [PARAMETRIZABLE, (compiler, values) => {
       let toWrite = ''
       let bits = 0
       let code = ''
@@ -116,7 +120,7 @@ module.exports = {
       code += 'return offset'
       return compiler.wrapCode(code)
     }],
-    'mapper': ['parametrizable', (compiler, mapper) => {
+    'mapper': [PARAMETRIZABLE, (compiler, mapper) => {
       const mappings = JSON.stringify(swapMappings(mapper.mappings))
       const code = 'return ' + compiler.callType(`${mappings}[value]`, mapper.type)
       return compiler.wrapCode(code)
@@ -124,7 +128,7 @@ module.exports = {
   },
 
   SizeOf: {
-    'pstring': ['parametrizable', (compiler, string) => {
+    'pstring': [PARAMETRIZABLE, (compiler, string) => {
       let code = 'let size = Buffer.byteLength(value, \'utf8\')\n'
       if (string.countType) {
         code += 'size += ' + compiler.callType('size', string.countType) + '\n'
@@ -134,7 +138,7 @@ module.exports = {
       code += 'return size'
       return compiler.wrapCode(code)
     }],
-    'buffer': ['parametrizable', (compiler, buffer) => {
+    'buffer': [PARAMETRIZABLE, (compiler, buffer) => {
       let code = 'let size = value.length\n'
       if (buffer.countType) {
         code += 'size += ' + compiler.callType('size', buffer.countType) + '\n'
@@ -144,11 +148,11 @@ module.exports = {
       code += 'return size'
       return compiler.wrapCode(code)
     }],
-    'bitfield': ['parametrizable', (compiler, values) => {
+    'bitfield': [PARAMETRIZABLE, (compiler, values) => {
       const totalBytes = Math.ceil(values.reduce((acc, { size }) => acc + size, 0) / 8)
       return `${totalBytes}`
     }],
-    'mapper': ['parametrizable', (compiler, mapper) => {
+    'mapper': [PARAMETRIZABLE, (compiler, mapper) => {
       const mappings = JSON.stringify(swapMappings(mapper.mappings))
       const code = 'return ' + compiler.callType(`${mappings}[value]`, mapper.type)
       return compiler.wrapCode(code)
diff --git a/src/datatypes/conditional.js b/src/datatypes/conditional.js
deleted file mode 100644
index 438f880..0000000
--- a/src/datatypes/conditional.js
+++ /dev/null
@@ -1,56 +0,0 @@
-const { getField, getFieldInfo, tryDoc, PartialReadError } = require('../utils')
-
-module.exports = {
-  'switch': [readSwitch, writeSwitch, sizeOfSwitch, require('../../ProtoDef/schemas/conditional.json')['switch']],
-  'option': [readOption, writeOption, sizeOfOption, require('../../ProtoDef/schemas/conditional.json')['option']]
-}
-
-function readSwitch (buffer, offset, { compareTo, fields, compareToValue, 'default': defVal }, rootNode) {
-  compareTo = compareToValue !== undefined ? compareToValue : getField(compareTo, rootNode)
-  if (typeof fields[compareTo] === 'undefined' && typeof defVal === 'undefined') { throw new Error(compareTo + ' has no associated fieldInfo in switch') }
-
-  const caseDefault = typeof fields[compareTo] === 'undefined'
-  const resultingType = caseDefault ? defVal : fields[compareTo]
-  const fieldInfo = getFieldInfo(resultingType)
-  return tryDoc(() => this.read(buffer, offset, fieldInfo, rootNode), caseDefault ? 'default' : compareTo)
-}
-
-function writeSwitch (value, buffer, offset, { compareTo, fields, compareToValue, 'default': defVal }, rootNode) {
-  compareTo = compareToValue !== undefined ? compareToValue : getField(compareTo, rootNode)
-  if (typeof fields[compareTo] === 'undefined' && typeof defVal === 'undefined') { throw new Error(compareTo + ' has no associated fieldInfo in switch') }
-
-  const caseDefault = typeof fields[compareTo] === 'undefined'
-  const fieldInfo = getFieldInfo(caseDefault ? defVal : fields[compareTo])
-  return tryDoc(() => this.write(value, buffer, offset, fieldInfo, rootNode), caseDefault ? 'default' : compareTo)
-}
-
-function sizeOfSwitch (value, { compareTo, fields, compareToValue, 'default': defVal }, rootNode) {
-  compareTo = compareToValue !== undefined ? compareToValue : getField(compareTo, rootNode)
-  if (typeof fields[compareTo] === 'undefined' && typeof defVal === 'undefined') { throw new Error(compareTo + ' has no associated fieldInfo in switch') }
-
-  const caseDefault = typeof fields[compareTo] === 'undefined'
-  const fieldInfo = getFieldInfo(caseDefault ? defVal : fields[compareTo])
-  return tryDoc(() => this.sizeOf(value, fieldInfo, rootNode), caseDefault ? 'default' : compareTo)
-}
-
-function readOption (buffer, offset, typeArgs, context) {
-  if (buffer.length < offset + 1) { throw new PartialReadError() }
-  const val = buffer.readUInt8(offset++)
-  if (val !== 0) {
-    const retval = this.read(buffer, offset, typeArgs, context)
-    retval.size++
-    return retval
-  } else { return { size: 1 } }
-}
-
-function writeOption (value, buffer, offset, typeArgs, context) {
-  if (value != null) {
-    buffer.writeUInt8(1, offset++)
-    offset = this.write(value, buffer, offset, typeArgs, context)
-  } else { buffer.writeUInt8(0, offset++) }
-  return offset
-}
-
-function sizeOfOption (value, typeArgs, context) {
-  return value == null ? 1 : this.sizeOf(value, typeArgs, context) + 1
-}
diff --git a/src/datatypes/interpreter/conditional.js b/src/datatypes/interpreter/conditional.js
new file mode 100644
index 0000000..948417c
--- /dev/null
+++ b/src/datatypes/interpreter/conditional.js
@@ -0,0 +1,70 @@
+const { getField, getFieldInfo, tryDoc, PartialReadError, Result } = require('../../utils')
+const schema = require('../../../ProtoDef/schemas/conditional.json')
+
+function readSwitch (buffer, offset, { compareTo, fields, compareToValue, 'default': defVal }, rootNode) {
+  compareTo = compareToValue !== undefined ? compareToValue : getField(compareTo, rootNode)
+  if (fields[compareTo] === undefined) {
+    compareTo = 'default'
+    fields[compareTo] = defVal
+    if (defVal === undefined) {
+      throw new Error(`${compareTo} has no associated fieldInfo in switch`)
+    }
+  }
+  const fieldInfo = getFieldInfo(fields[compareTo])
+  return tryDoc(this.read.bind(this, buffer, offset, fieldInfo, rootNode), compareTo)
+}
+
+function writeSwitch (value, buffer, offset, { compareTo, fields, compareToValue, 'default': defVal }, rootNode) {
+  compareTo = compareToValue !== undefined ? compareToValue : getField(compareTo, rootNode)
+  if (fields[compareTo] === undefined) {
+    compareTo = 'default'
+    fields[compareTo] = defVal
+    if (defVal === undefined) {
+      throw new Error(`${compareTo} has no associated fieldInfo in switch`)
+    }
+  }
+  const fieldInfo = getFieldInfo(fields[compareTo])
+  return tryDoc(this.write.bind(this, value, buffer, offset, fieldInfo, rootNode), compareTo)
+}
+
+function sizeOfSwitch (value, { compareTo, fields, compareToValue, 'default': defVal }, rootNode) {
+  compareTo = compareToValue !== undefined ? compareToValue : getField(compareTo, rootNode)
+  if (fields[compareTo] === undefined) {
+    compareTo = 'default'
+    fields[compareTo] = defVal
+    if (defVal === undefined) {
+      throw new Error(`${compareTo} has no associated fieldInfo in switch`)
+    }
+  }
+  const fieldInfo = getFieldInfo(fields[compareTo])
+  return tryDoc(this.sizeOf.bind(this, value, fieldInfo, rootNode), compareTo)
+}
+
+function readOption (buffer, offset, typeArgs, context) {
+  if (buffer.length < offset + 1) { throw new PartialReadError() }
+  const isPresent = buffer.readUInt8(offset++) !== 0
+  if (isPresent) {
+    const retval = this.read(buffer, offset, typeArgs, context)
+    retval.size++
+    return retval
+  }
+  return new Result(undefined, 1)
+}
+
+function writeOption (value, buffer, offset, typeArgs, context) {
+  const isPresent = value != null
+  buffer.writeUInt8(isPresent & 1, offset++)
+  if (isPresent) {
+    offset = this.write(value, buffer, offset, typeArgs, context)
+  }
+  return offset
+}
+
+function sizeOfOption (value, typeArgs, context) {
+  return (value != null && this.sizeOf(value, typeArgs, context)) + 1
+}
+
+module.exports = {
+  'switch': [readSwitch, writeSwitch, sizeOfSwitch, schema['switch']],
+  'option': [readOption, writeOption, sizeOfOption, schema['option']]
+}
diff --git a/src/datatypes/interpreter/index.js b/src/datatypes/interpreter/index.js
new file mode 100644
index 0000000..025901c
--- /dev/null
+++ b/src/datatypes/interpreter/index.js
@@ -0,0 +1,11 @@
+const conditionalDatatypes = require('./conditional')
+const structuresDatatypes = require('./structures')
+const utilsDatatypes = require('./utils')
+const sharedDatatypes = require('../shared')
+
+module.exports = {
+  ...conditionalDatatypes,
+  ...structuresDatatypes,
+  ...utilsDatatypes,
+  ...sharedDatatypes
+}
diff --git a/src/datatypes/interpreter/structures.js b/src/datatypes/interpreter/structures.js
new file mode 100644
index 0000000..70e0e71
--- /dev/null
+++ b/src/datatypes/interpreter/structures.js
@@ -0,0 +1,90 @@
+const { getField, getCount, sendCount, calcCount, tryDoc, Enum: { ParentSymbol }, Result } = require('../../utils')
+const schema = require('../../../ProtoDef/schemas/structures.json')
+
+function readArray (buffer, offset, typeArgs, rootNode) {
+  const value = []
+  let { count, size } = getCount.call(this, buffer, offset, typeArgs, rootNode)
+  offset += size
+  for (let i = 0; i < count; i++) {
+    const res = tryDoc(this.read.bind(this, buffer, offset, typeArgs.type, rootNode), i)
+    size += res.size
+    offset += res.size
+    value.push(res.value)
+  }
+  return new Result(value, size)
+}
+
+function writeArray (value, buffer, offset, typeArgs, rootNode) {
+  offset = sendCount.call(this, value.length, buffer, offset, typeArgs, rootNode)
+  for (let i = 0, l = value.length; i < l; i++) {
+    offset = tryDoc(this.write.bind(this, value[i], buffer, offset, typeArgs.type, rootNode), i)
+  }
+  return offset
+}
+
+function sizeOfArray (value, typeArgs, rootNode) {
+  let size = calcCount.call(this, value.length, typeArgs, rootNode)
+  for (let i = 0, l = value.length; i < l; i++) {
+    size += tryDoc(this.sizeOf.bind(this, value[i], typeArgs.type, rootNode), i)
+  }
+  return size
+}
+
+function readCount (buffer, offset, { type }, rootNode) {
+  return this.read(buffer, offset, type, rootNode)
+}
+
+function writeCount (value, buffer, offset, { countFor, type }, rootNode) {
+  // Actually gets the required field, and writes its length. Value is unused.
+  // TODO : a bit hackityhack.
+  return this.write(getField(countFor, rootNode).length, buffer, offset, type, rootNode)
+}
+
+function sizeOfCount (value, { countFor, type }, rootNode) {
+  // TODO : should I use value or getField().length ?
+  return this.sizeOf(getField(countFor, rootNode).length, type, rootNode)
+}
+
+function readContainer (buffer, offset, typeArgs, context) {
+  const value = { [ParentSymbol]: context }
+  let size = 0
+  for (const { type, name, anon } of typeArgs) {
+    const res = tryDoc(this.read.bind(this, buffer, offset, type, value), name || 'unknown')
+    size += res.size
+    offset += res.size
+    if (anon && res.value !== undefined) {
+      for (const k in res.value) {
+        value[k] = res.value[k]
+      }
+      continue
+    }
+    value[name] = res.value
+  }
+  value[ParentSymbol] = undefined
+  return new Result(value, size)
+}
+
+function writeContainer (value, buffer, offset, typeArgs, context) {
+  value[ParentSymbol] = context
+  for (const { type, name, anon } of typeArgs) {
+    offset = tryDoc(this.write.bind(this, anon ? value : value[name], buffer, offset, type, value), name || 'unknown')
+  }
+  value[ParentSymbol] = undefined
+  return offset
+}
+
+function sizeOfContainer (value, typeArgs, context) {
+  value[ParentSymbol] = context
+  let size = 0
+  for (const { type, name, anon } of typeArgs) {
+    size += tryDoc(this.sizeOf.bind(this, anon ? value : value[name], type, value), name || 'unknown')
+  }
+  value[ParentSymbol] = undefined
+  return size
+}
+
+module.exports = {
+  'array': [readArray, writeArray, sizeOfArray, schema['array']],
+  'count': [readCount, writeCount, sizeOfCount, schema['count']],
+  'container': [readContainer, writeContainer, sizeOfContainer, schema['container']]
+}
diff --git a/src/datatypes/interpreter/utils.js b/src/datatypes/interpreter/utils.js
new file mode 100644
index 0000000..e9b0152
--- /dev/null
+++ b/src/datatypes/interpreter/utils.js
@@ -0,0 +1,141 @@
+const { getCount, sendCount, calcCount, PartialReadError, Result } = require('../../utils')
+const schema = require('../../../ProtoDef/schemas/utils.json')
+
+function readMapper (buffer, offset, { type, mappings }, rootNode) {
+  const { size, value } = this.read(buffer, offset, type, rootNode)
+  for (const key in mappings) {
+    if (key === value || +key === +value) {
+      return new Result(mappings[key], size)
+    }
+  }
+  throw new Error(`${typeof value} "${value}" is not in the mappings value`)
+}
+
+function writeMapper (value, buffer, offset, { type, mappings }, rootNode) {
+  for (const key in mappings) {
+    const writeValue = mappings[key]
+    if (writeValue === value || +writeValue === +value) {
+      return this.write(key, buffer, offset, type, rootNode)
+    }
+  }
+  throw new Error(`${value} is not in the mappings value`)
+}
+
+function sizeOfMapper (value, { type, mappings }, rootNode) {
+  for (const key in mappings) {
+    const sizeValue = mappings[key]
+    if (sizeValue === value || +sizeValue === +value) {
+      return this.sizeOf(key, type, rootNode)
+    }
+  }
+  throw new Error(`${value} is not in the mappings value`)
+}
+
+function readPString (buffer, offset, typeArgs, rootNode) {
+  const { size, count } = getCount.call(this, buffer, offset, typeArgs, rootNode)
+  const cursor = offset + size
+  const strEnd = cursor + count
+  if (strEnd > buffer.length) {
+    throw new PartialReadError(`Missing characters in string, found size is ${buffer.length} expected size was ${strEnd}`)
+  }
+  return new Result(buffer.toString('utf8', cursor, strEnd), size + count)
+}
+
+function writePString (value, buffer, offset, typeArgs, rootNode) {
+  const length = Buffer.byteLength(value, 'utf8')
+  offset = sendCount.call(this, length, buffer, offset, typeArgs, rootNode)
+  buffer.write(value, offset, length, 'utf8')
+  return offset + length
+}
+
+function sizeOfPString (value, typeArgs, rootNode) {
+  const length = Buffer.byteLength(value, 'utf8')
+  const size = calcCount.call(this, length, typeArgs, rootNode)
+  return size + length
+}
+
+function readBuffer (buffer, offset, typeArgs, rootNode) {
+  const { size, count } = getCount.call(this, buffer, offset, typeArgs, rootNode)
+  offset += size
+  if (offset + count > buffer.length) throw new PartialReadError()
+  return new Result(buffer.slice(offset, offset + count), size + count)
+}
+
+function writeBuffer (value, buffer, offset, typeArgs, rootNode) {
+  offset = sendCount.call(this, value.length, buffer, offset, typeArgs, rootNode)
+  return offset + value.copy(buffer, offset)
+}
+
+function sizeOfBuffer (value, typeArgs, rootNode) {
+  return calcCount.call(this, value.length, typeArgs, rootNode) + value.length
+}
+
+function generateBitMask (n) {
+  return (1 << n) - 1
+}
+
+function readBitField (buffer, offset, typeArgs) {
+  const value = {}
+  const beginOffset = offset
+  let curVal = null
+  let bits = 0
+  for (const { size, signed, name } of typeArgs) {
+    let currentSize = size
+    let val = 0
+    while (currentSize > 0) {
+      if (bits === 0) {
+        if (buffer.length < offset + 1) { throw new PartialReadError() }
+        curVal = buffer[offset++]
+        bits = 8
+      }
+      const bitsToRead = Math.min(currentSize, bits)
+      val = (val << bitsToRead) | (curVal & generateBitMask(bits)) >> (bits - bitsToRead)
+      bits -= bitsToRead
+      currentSize -= bitsToRead
+    }
+    if (signed && val >= 1 << (size - 1)) { val -= 1 << size }
+    value[name] = val
+  }
+  return new Result(value, offset - beginOffset)
+}
+
+function writeBitField (value, buffer, offset, typeArgs) {
+  let toWrite = 0
+  let bits = 0
+  for (let { size, signed, name } of typeArgs) {
+    const val = value[name]
+    const min = +signed && -(1 << (size - 1))
+    const max = (1 << (size - signed)) - signed
+    if (value < min) { throw new Error(value + ' < ' + min) }
+    if (val >= max) { throw new Error(value + ' >= ' + max) }
+    while (size > 0) {
+      const writeBits = Math.min(8 - bits, size)
+      toWrite = toWrite << writeBits |
+        ((val >> (size - writeBits)) & generateBitMask(writeBits))
+      size -= writeBits
+      bits += writeBits
+      if (bits === 8) {
+        buffer[offset++] = toWrite
+        bits = 0
+        toWrite = 0
+      }
+    }
+  }
+  if (bits !== 0) {
+    buffer[offset++] = toWrite << (8 - bits)
+  }
+  return offset
+}
+
+function sizeOfBitField (value, typeArgs) {
+  let i = 0
+  for (const { size } of typeArgs) { i += size }
+  return Math.ceil(i / 8)
+}
+
+module.exports = {
+  'pstring': [readPString, writePString, sizeOfPString, schema['pstring']],
+  'buffer': [readBuffer, writeBuffer, sizeOfBuffer, schema['buffer']],
+  'bitfield': [readBitField, writeBitField, sizeOfBitField, schema['bitfield']],
+  'mapper': [readMapper, writeMapper, sizeOfMapper, schema['mapper']]
+}
diff --git a/src/datatypes/shared/index.js b/src/datatypes/shared/index.js
new file mode 100644
index 0000000..d3a4675
--- /dev/null
+++ b/src/datatypes/shared/index.js
@@ -0,0 +1,5 @@
+const numeric = require('./numeric')
+const utils = require('./utils')
+
+// There's no need to compile these types, just optimize it instead
+module.exports = { ...numeric, ...utils }
diff --git a/src/datatypes/numeric.js b/src/datatypes/shared/numeric.js
similarity index 54%
rename from src/datatypes/numeric.js
rename to src/datatypes/shared/numeric.js
index 60c0a72..f9c3afc 100644
--- a/src/datatypes/numeric.js
+++ b/src/datatypes/shared/numeric.js
@@ -1,11 +1,8 @@
-const { PartialReadError } = require('../utils')
+const { PartialReadError, Result } = require('../../utils')
 
 function readI64 (buffer, offset) {
   if (offset + 8 > buffer.length) { throw new PartialReadError() }
-  return {
-    value: [buffer.readInt32BE(offset), buffer.readInt32BE(offset + 4)],
-    size: 8
-  }
+  return new Result([buffer.readInt32BE(offset), buffer.readInt32BE(offset + 4)], 8)
 }
 
 function writeI64 (value, buffer, offset) {
@@ -16,10 +13,7 @@ function writeI64 (value, buffer, offset) {
 
 function readLI64 (buffer, offset) {
   if (offset + 8 > buffer.length) { throw new PartialReadError() }
-  return {
-    value: [buffer.readInt32LE(offset + 4), buffer.readInt32LE(offset)],
-    size: 8
-  }
+  return new Result([buffer.readInt32LE(offset + 4), buffer.readInt32LE(offset)], 8)
 }
 
 function writeLI64 (value, buffer, offset) {
@@ -30,10 +24,7 @@ function writeLI64 (value, buffer, offset) {
 
 function readU64 (buffer, offset) {
   if (offset + 8 > buffer.length) { throw new PartialReadError() }
-  return {
-    value: [buffer.readUInt32BE(offset), buffer.readUInt32BE(offset + 4)],
-    size: 8
-  }
+  return new Result([buffer.readUInt32BE(offset), buffer.readUInt32BE(offset + 4)], 8)
 }
 
 function writeU64 (value, buffer, offset) {
@@ -44,10 +35,7 @@ function writeU64 (value, buffer, offset) {
 
 function readLU64 (buffer, offset) {
   if (offset + 8 > buffer.length) { throw new PartialReadError() }
-  return {
-    value: [buffer.readUInt32LE(offset + 4), buffer.readUInt32LE(offset)],
-    size: 8
-  }
+  return new Result([buffer.readUInt32LE(offset + 4), buffer.readUInt32LE(offset)], 8)
 }
 
 function writeLU64 (value, buffer, offset) {
@@ -56,22 +44,6 @@ function writeLU64 (value, buffer, offset) {
   return offset + 8
 }
 
-function generateFunctions (bufferReader, bufferWriter, size, schema) {
-  const reader = (buffer, offset) => {
-    if (offset + size > buffer.length) { throw new PartialReadError() }
-    const value = buffer[bufferReader](offset)
-    return {
-      value: value,
-      size: size
-    }
-  }
-  const writer = (value, buffer, offset) => {
-    buffer[bufferWriter](value, offset)
-    return offset + size
-  }
-  return [reader, writer, size, schema]
-}
-
 const nums = {
   'i8': ['readInt8', 'writeInt8', 1],
   'u8': ['readUInt8', 'writeUInt8', 1],
@@ -91,13 +63,30 @@ const nums = {
   'lf64': ['readDoubleLE', 'writeDoubleLE', 8]
 }
 
-const types = Object.keys(nums).reduce((types, num) => {
-  types[num] = generateFunctions(nums[num][0], nums[num][1], nums[num][2], require('../../ProtoDef/schemas/numeric.json')[num])
-  return types
-}, {})
-types['i64'] = [readI64, writeI64, 8, require('../../ProtoDef/schemas/numeric.json')['i64']]
-types['li64'] = [readLI64, writeLI64, 8, require('../../ProtoDef/schemas/numeric.json')['li64']]
-types['u64'] = [readU64, writeU64, 8, require('../../ProtoDef/schemas/numeric.json')['u64']]
-types['lu64'] = [readLU64, writeLU64, 8, require('../../ProtoDef/schemas/numeric.json')['lu64']]
+const types = {
+  i64: [readI64, writeI64, 8, require('../../../ProtoDef/schemas/numeric.json')['i64']],
+  li64: [readLI64, writeLI64, 8, require('../../../ProtoDef/schemas/numeric.json')['li64']],
+  u64: [readU64, writeU64, 8, require('../../../ProtoDef/schemas/numeric.json')['u64']],
+  lu64: [readLU64, writeLU64, 8, require('../../../ProtoDef/schemas/numeric.json')['lu64']]
+}
+
+function readIntN (method, size, buffer, offset) {
+  if (offset + size > buffer.length) throw new PartialReadError()
+  return new Result(buffer[method](offset), size)
+}
+
+function writeIntN (method, value, buffer, offset) {
+  return buffer[method](value, offset)
+}
+
+for (const num in nums) {
+  const [ bufferReader, bufferWriter, size ] = nums[num]
+  types[num] = [
+    readIntN.bind(null, bufferReader, size),
+    writeIntN.bind(null, bufferWriter),
+    size,
+    require('../../../ProtoDef/schemas/numeric.json')[num]
+  ]
+}
 
 module.exports = types
diff --git a/src/datatypes/shared/utils.js b/src/datatypes/shared/utils.js
new file mode 100644
index 0000000..097aee0
--- /dev/null
+++ b/src/datatypes/shared/utils.js
@@ -0,0 +1,68 @@
+const { PartialReadError, Result } = require('../../utils')
+const schema = require('../../../ProtoDef/schemas/utils.json')
+
+const LOG2 = Math.log2(0x7F)
+function readVarInt (buffer, offset) {
+  let value = 0
+  let size = 0
+  while (true) {
+    const v = buffer[offset + size]
+    value |= (v & 0x7F) << (size++ * 7)
+    if ((v & 0x80) === 0) break
+  }
+  if (offset + size > buffer.length) throw new PartialReadError()
+  return new Result(value, size)
+}
+
+function sizeOfVarInt (value) {
+  return value >= 0 ? Math.ceil(Math.log2(Math.max(value, 127)) / LOG2) : 5
+}
+
+function writeVarInt (value, buffer, offset) {
+  while (value & ~0x7F) {
+    buffer[offset++] = (value & 0xFF) | 0x80
+    value >>>= 7
+  }
+  buffer[offset++] = value | 0
+  return offset
+}
+
+function readBool (buffer, offset) {
+  if (buffer.length <= offset) throw new PartialReadError()
+  return new Result(buffer[offset] === 1, 1)
+}
+
+function writeBool (value, buffer, offset) {
+  return buffer.writeUInt8(value & 1, offset)
+}
+
+function readVoid () {
+  return new Result(undefined, 0)
+}
+
+function writeVoid (value, buffer, offset) {
+  return offset
+}
+
+function readCString (buffer, offset) {
+  const index = buffer.indexOf(0x00)
+  if (index === -1) throw new PartialReadError()
+  return new Result(buffer.toString('utf8', offset, index), index - offset + 1)
+}
+
+function writeCString (value, buffer, offset) {
+  const length = Buffer.byteLength(value, 'utf8')
+  buffer.write(value, offset, length, 'utf8')
+  return buffer.writeInt8(0x00, offset + length)
+}
+
+function sizeOfCString (value) {
+  return Buffer.byteLength(value, 'utf8') + 1
+}
+
+module.exports = {
+  'varint': [readVarInt, writeVarInt, sizeOfVarInt, schema['varint']],
+  'bool': [readBool, writeBool, 1, schema['bool']],
+  'void': [readVoid, writeVoid, 0, schema['void']],
+  'cstring': [readCString, writeCString, sizeOfCString, schema['cstring']]
+}
diff --git a/src/datatypes/structures.js b/src/datatypes/structures.js
deleted file mode 100644
index 7677bf4..0000000
--- a/src/datatypes/structures.js
+++ /dev/null
@@ -1,90 +0,0 @@
-const { getField, getCount, sendCount, calcCount, tryDoc } = require('../utils')
-
-module.exports = {
-  'array': [readArray, writeArray, sizeOfArray, require('../../ProtoDef/schemas/structures.json')['array']],
-  'count': [readCount, writeCount, sizeOfCount, require('../../ProtoDef/schemas/structures.json')['count']],
-  'container': [readContainer, writeContainer, sizeOfContainer, require('../../ProtoDef/schemas/structures.json')['container']]
-}
-
-function readArray (buffer, offset, typeArgs, rootNode) {
-  const results = {
-    value: [],
-    size: 0
-  }
-  let value
-  let { count, size } = getCount.call(this, buffer, offset, typeArgs, rootNode)
-  offset += size
-  results.size += size
-  for (let i = 0; i < count; i++) {
-    ({ size, value } = tryDoc(() => this.read(buffer, offset, typeArgs.type, rootNode), i))
-    results.size += size
-    offset += size
-    results.value.push(value)
-  }
-  return results
-}
-
-function writeArray (value, buffer, offset, typeArgs, rootNode) {
-  offset = sendCount.call(this, value.length, buffer, offset, typeArgs, rootNode)
-  return value.reduce((offset, v, index) => tryDoc(() => this.write(v, buffer, offset, typeArgs.type, rootNode), index), offset)
-}
-
-function sizeOfArray (value, typeArgs, rootNode) {
-  let size = calcCount.call(this, value.length, typeArgs, rootNode)
-  size = value.reduce((size, v, index) => tryDoc(() => size + this.sizeOf(v, typeArgs.type, rootNode), index), size)
-  return size
-}
-
-function readContainer (buffer, offset, typeArgs, context) {
-  const results = {
-    value: { '..': context },
-    size: 0
-  }
-  typeArgs.forEach(({ type, name, anon }) => {
-    tryDoc(() => {
-      const readResults = this.read(buffer, offset, type, results.value)
-      results.size += readResults.size
-      offset += readResults.size
-      if (anon) {
-        if (readResults.value !== undefined) {
-          Object.keys(readResults.value).forEach(key => {
-            results.value[key] = readResults.value[key]
-          })
-        }
-      } else { results.value[name] = readResults.value }
-    }, name || 'unknown')
-  })
-  delete results.value['..']
-  return results
-}
-
-function writeContainer (value, buffer, offset, typeArgs, context) {
-  value['..'] = context
-  offset = typeArgs.reduce((offset, { type, name, anon }) =>
-    tryDoc(() => this.write(anon ? value : value[name], buffer, offset, type, value), name || 'unknown'), offset)
-  delete value['..']
-  return offset
-}
-
-function sizeOfContainer (value, typeArgs, context) {
-  value['..'] = context
-  const size = typeArgs.reduce((size, { type, name, anon }) =>
-    size + tryDoc(() => this.sizeOf(anon ? value : value[name], type, value), name || 'unknown'), 0)
-  delete value['..']
-  return size
-}
-
-function readCount (buffer, offset, { type }, rootNode) {
-  return this.read(buffer, offset, type, rootNode)
-}
-
-function writeCount (value, buffer, offset, { countFor, type }, rootNode) {
-  // Actually gets the required field, and writes its length. Value is unused.
-  // TODO : a bit hackityhack.
-  return this.write(getField(countFor, rootNode).length, buffer, offset, type, rootNode)
-}
-
-function sizeOfCount (value, { countFor, type }, rootNode) {
-  // TODO : should I use value or getField().length ?
-  return this.sizeOf(getField(countFor, rootNode).length, type, rootNode)
-}
diff --git a/src/datatypes/utils.js b/src/datatypes/utils.js
deleted file mode 100644
index bf539d2..0000000
--- a/src/datatypes/utils.js
+++ /dev/null
@@ -1,260 +0,0 @@
-const assert = require('assert')
-
-const { getCount, sendCount, calcCount, PartialReadError } = require('../utils')
-
-module.exports = {
-  'varint': [readVarInt, writeVarInt, sizeOfVarInt, require('../../ProtoDef/schemas/utils.json')['varint']],
-  'bool': [readBool, writeBool, 1, require('../../ProtoDef/schemas/utils.json')['bool']],
-  'pstring': [readPString, writePString, sizeOfPString, require('../../ProtoDef/schemas/utils.json')['pstring']],
-  'buffer': [readBuffer, writeBuffer, sizeOfBuffer, require('../../ProtoDef/schemas/utils.json')['buffer']],
-  'void': [readVoid, writeVoid, 0, require('../../ProtoDef/schemas/utils.json')['void']],
-  'bitfield': [readBitField, writeBitField, sizeOfBitField, require('../../ProtoDef/schemas/utils.json')['bitfield']],
-  'cstring': [readCString, writeCString, sizeOfCString, require('../../ProtoDef/schemas/utils.json')['cstring']],
-  'mapper': [readMapper, writeMapper, sizeOfMapper, require('../../ProtoDef/schemas/utils.json')['mapper']]
-}
-
-function mapperEquality (a, b) {
-  return a === b || parseInt(a) === parseInt(b)
-}
-
-function readMapper (buffer, offset, { type, mappings }, rootNode) {
-  const { size, value } = this.read(buffer, offset, type, rootNode)
-  let mappedValue = null
-  const keys = Object.keys(mappings)
-  for (let i = 0; i < keys.length; i++) {
-    if (mapperEquality(keys[i], value)) {
-      mappedValue = mappings[keys[i]]
-      break
-    }
-  }
-  if (mappedValue == null) throw new Error(value + ' is not in the mappings value')
-  return {
-    size: size,
-    value: mappedValue
-  }
-}
-
-function writeMapper (value, buffer, offset, { type, mappings }, rootNode) {
-  const keys = Object.keys(mappings)
-  let mappedValue = null
-  for (let i = 0; i < keys.length; i++) {
-    if (mapperEquality(mappings[keys[i]], value)) {
-      mappedValue = keys[i]
-      break
-    }
-  }
-  if (mappedValue == null) throw new Error(value + ' is not in the mappings value')
-  return this.write(mappedValue, buffer, offset, type, rootNode)
-}
-
-function sizeOfMapper (value, { type, mappings }, rootNode) {
-  const keys = Object.keys(mappings)
-  let mappedValue = null
-  for (let i = 0; i < keys.length; i++) {
-    if (mapperEquality(mappings[keys[i]], value)) {
-      mappedValue = keys[i]
-      break
-    }
-  }
-  if (mappedValue == null) throw new Error(value + ' is not in the mappings value')
-  return this.sizeOf(mappedValue, type, rootNode)
-}
-
-function readVarInt (buffer, offset) {
-  let result = 0
-  let shift = 0
-  let cursor = offset
-
-  while (true) {
-    if (cursor + 1 > buffer.length) { throw new PartialReadError() }
-    const b = buffer.readUInt8(cursor)
-    result |= ((b & 0x7f) << shift) // Add the bits to our number, except MSB
-    cursor++
-    if (!(b & 0x80)) { // If the MSB is not set, we return the number
-      return {
-        value: result,
-        size: cursor - offset
-      }
-    }
-    shift += 7 // we only have 7 bits, MSB being the return-trigger
-    assert.ok(shift < 64, 'varint is too big') // Make sure our shift don't overflow.
-  }
-}
-
-function sizeOfVarInt (value) {
-  let cursor = 0
-  while (value & ~0x7F) {
-    value >>>= 7
-    cursor++
-  }
-  return cursor + 1
-}
-
-function writeVarInt (value, buffer, offset) {
-  let cursor = 0
-  while (value & ~0x7F) {
-    buffer.writeUInt8((value & 0xFF) | 0x80, offset + cursor)
-    cursor++
-    value >>>= 7
-  }
-  buffer.writeUInt8(value, offset + cursor)
-  return offset + cursor + 1
-}
-
-function readPString (buffer, offset, typeArgs, rootNode) {
-  const { size, count } = getCount.call(this, buffer, offset, typeArgs, rootNode)
-  const cursor = offset + size
-  const strEnd = cursor + count
-  if (strEnd > buffer.length) {
-    throw new PartialReadError('Missing characters in string, found size is ' + buffer.length +
-    ' expected size was ' + strEnd)
-  }
-
-  return {
-    value: buffer.toString('utf8', cursor, strEnd),
-    size: strEnd - offset
-  }
-}
-
-function writePString (value, buffer, offset, typeArgs, rootNode) {
-  const length = Buffer.byteLength(value, 'utf8')
-  offset = sendCount.call(this, length, buffer, offset, typeArgs, rootNode)
-  buffer.write(value, offset, length, 'utf8')
-  return offset + length
-}
-
-function sizeOfPString (value, typeArgs, rootNode) {
-  const length = Buffer.byteLength(value, 'utf8')
-  const size = calcCount.call(this, length, typeArgs, rootNode)
-  return size + length
-}
-
-function readBool (buffer, offset) {
-  if (offset + 1 > buffer.length) throw new PartialReadError()
-  const value = buffer.readInt8(offset)
-  return {
-    value: !!value,
-    size: 1
-  }
-}
-
-function writeBool (value, buffer, offset) {
-  buffer.writeInt8(+value, offset)
-  return offset + 1
-}
-
-function readBuffer (buffer, offset, typeArgs, rootNode) {
-  const { size, count } = getCount.call(this, buffer, offset, typeArgs, rootNode)
-  offset += size
-  if (offset + count > buffer.length) throw new PartialReadError()
-  return {
-    value: buffer.slice(offset, offset + count),
-    size: size + count
-  }
-}
-
-function writeBuffer (value, buffer, offset, typeArgs, rootNode) {
-  offset = sendCount.call(this, value.length, buffer, offset, typeArgs, rootNode)
-  value.copy(buffer, offset)
-  return offset + value.length
-}
-
-function sizeOfBuffer (value, typeArgs, rootNode) {
-  const size = calcCount.call(this, value.length, typeArgs, rootNode)
-  return size + value.length
-}
-
-function readVoid () {
-  return {
-    value: undefined,
-    size: 0
-  }
-}
-
-function writeVoid (value, buffer, offset) {
-  return offset
-}
-
-function generateBitMask (n) {
-  return (1 << n) - 1
-}
-
-function readBitField (buffer, offset, typeArgs) {
-  const beginOffset = offset
-  let curVal = null
-  let bits = 0
-  const results = {}
-  results.value = typeArgs.reduce((acc, { size, signed, name }) => {
-    let currentSize = size
-    let val = 0
-    while (currentSize > 0) {
-      if (bits === 0) {
-        if (buffer.length < offset + 1) { throw new PartialReadError() }
-        curVal = buffer[offset++]
-        bits = 8
-      }
-      const bitsToRead = Math.min(currentSize, bits)
-      val = (val << bitsToRead) | (curVal & generateBitMask(bits)) >> (bits - bitsToRead)
-      bits -= bitsToRead
-      currentSize -= bitsToRead
-    }
-    if (signed && val >= 1 << (size - 1)) { val -= 1 << size }
-    acc[name] = val
-    return acc
-  }, {})
-  results.size = offset - beginOffset
-  return results
-}
-function writeBitField (value, buffer, offset, typeArgs) {
-  let toWrite = 0
-  let bits = 0
-  typeArgs.forEach(({ size, signed, name }) => {
-    const val = value[name]
-    if ((!signed && val < 0) || (signed && val < -(1 << (size - 1)))) { throw new Error(value + ' < ' + signed ? (-(1 << (size - 1))) : 0) } else if ((!signed && val >= 1 << size) ||
-        (signed && val >= (1 << (size - 1)) - 1)) { throw new Error(value + ' >= ' + signed ? (1 << size) : ((1 << (size - 1)) - 1)) }
-    while (size > 0) {
-      const writeBits = Math.min(8 - bits, size)
-      toWrite = toWrite << writeBits |
-        ((val >> (size - writeBits)) & generateBitMask(writeBits))
-      size -= writeBits
-      bits += writeBits
-      if (bits === 8) {
-        buffer[offset++] = toWrite
-        bits = 0
-        toWrite = 0
-      }
-    }
-  })
-  if (bits !== 0) { buffer[offset++] = toWrite << (8 - bits) }
-  return offset
-}
-
-function sizeOfBitField (value, typeArgs) {
-  return Math.ceil(typeArgs.reduce((acc, { size }) => {
-    return acc + size
-  }, 0) / 8)
-}
-
-function readCString (buffer, offset) {
-  let size = 0
-  while (offset + size < buffer.length && buffer[offset + size] !== 0x00) { size++ }
-  if (buffer.length < offset + size + 1) { throw new PartialReadError() }
-
-  return {
-    value: buffer.toString('utf8', offset, offset + size),
-    size: size + 1
-  }
-}
-
-function writeCString (value, buffer, offset) {
-  const length = Buffer.byteLength(value, 'utf8')
-  buffer.write(value, offset, length, 'utf8')
-  offset += length
-  buffer.writeInt8(0x00, offset)
-  return offset + 1
-}
-
-function sizeOfCString (value) {
-  const length = Buffer.byteLength(value, 'utf8')
-  return length + 1
-}
diff --git a/src/index.js b/src/index.js
index dd9a32e..3d0d926 100644
--- a/src/index.js
+++ b/src/index.js
@@ -1,12 +1,17 @@
-const ProtoDef = require('./protodef')
-const proto = new ProtoDef()
+const { Serializer, Parser, FullPacketParser } = require('./serializer')
+const ProtoDef = require('./interpreter')
+const Compiler = require('./compiler')
+const utils = require('./utils')
+const types = require('./datatypes/interpreter')
+const { createEncoding } = utils
 
 module.exports = {
-  ProtoDef: ProtoDef,
-  Serializer: require('./serializer').Serializer,
-  Parser: require('./serializer').Parser,
-  FullPacketParser: require('./serializer').FullPacketParser,
-  Compiler: require('./compiler'),
-  types: proto.types,
-  utils: require('./utils')
+  ProtoDef,
+  Compiler,
+  Serializer,
+  Parser,
+  FullPacketParser,
+  createEncoding,
+  utils,
+  types
 }
diff --git a/src/interpreter.js b/src/interpreter.js
new file mode 100644
index 0000000..f6f824b
--- /dev/null
+++ b/src/interpreter.js
@@ -0,0 +1,182 @@
+const { getFieldInfo, isFieldInfo, tryCatch } = require('./utils')
+const reduce = require('lodash.reduce')
+const get = require('lodash.get')
+const clonedeep = require('lodash.clonedeep')
+const Validator = require('protodef-validator')
+const defaultDatatypes = require('./datatypes/interpreter')
+
+class ProtoDef {
+  constructor (validation = true) {
+    this.validator = validation ? new Validator() : null
+    this.clearTypes()
+    this.addTypes(defaultDatatypes)
+  }
+
+  addProtocol (protocolData, path) {
+    if (this.validator) { this.validator.validateProtocol(protocolData) }
+    this.recursiveAddTypes(protocolData, path)
+  }
+
+  recursiveAddTypes (protocolData, path) {
+    if (protocolData === undefined) return
+    if (protocolData.types) { this.addTypes(protocolData.types) }
+    this.recursiveAddTypes(get(protocolData, path.shift()), path)
+  }
+
+  addType (name, functions, validate = true) {
+    if (functions === 'native') {
+      if (this.validator) { this.validator.addType(name) }
+      return
+    }
+    if (isFieldInfo(functions)) {
+      if (this.validator) {
+        if (validate) { this.validator.validateType(functions) }
+        this.validator.addType(name)
+      }
+
+      let { type, typeArgs } = getFieldInfo(functions)
+      this.types[name] = typeArgs ? extendType.call(this, this.types[type], typeArgs) : this.types[type]
+    } else {
+      if (this.validator) {
+        if (functions[3]) {
+          this.validator.addType(name, functions[3])
+        } else { this.validator.addType(name) }
+      }
+
+      this.types[name] = functions
+    }
+  }
+
+  removeType (name) {
+    delete this.types[name]
+  }
+
+  addTypes (types) {
+    for (const name in types) {
+      this.addType(name, types[name], this.validator)
+    }
+  }
+
+  clearTypes () {
+    this.types = {}
+  }
+
+  read (buffer, cursor, _fieldInfo, rootNodes) {
+    const { type, typeArgs } = getFieldInfo(_fieldInfo)
+    const typeFunctions = this.types[type]
+    if (!typeFunctions) { throw new Error(`missing data type: ${type}`) }
+    return typeFunctions[0].call(this, buffer, cursor, typeArgs, rootNodes)
+  }
+
+  write (value, buffer, offset, _fieldInfo, rootNode) {
+    const { type, typeArgs } = getFieldInfo(_fieldInfo)
+    const typeFunctions = this.types[type]
+    if (!typeFunctions) { throw new Error(`missing data type: ${type}`) }
+    return typeFunctions[1].call(this, value, buffer, offset, typeArgs, rootNode)
+  }
+
+  sizeOf (value, _fieldInfo, rootNode) {
+    const { type, typeArgs } = getFieldInfo(_fieldInfo)
+    const typeFunctions = this.types[type]
+    if (!typeFunctions) { throw new Error(`missing data type: ${type}`) }
+    if (typeof typeFunctions[2] === 'function') {
+      return typeFunctions[2].call(this, value, typeArgs, rootNode)
+    } else {
+      return typeFunctions[2]
+    }
+  }
+
+  _readErrorHandler (e) {
+    e.message = `Read error for ${e.field} : ${e.message}`
+    throw e
+  }
+
+  _writeErrorHandler (e) {
+    e.message = `Write error for ${e.field} : ${e.message}`
+    throw e
+  }
+
+  _sizeOfErrorHandler (e) {
+    e.message = `SizeOf error for ${e.field} : ${e.message}`
+    throw e
+  }
+
+  createPacketBuffer (type, packet) {
+    const length = tryCatch(this.sizeOf.bind(this, packet, type, {}), this._sizeOfErrorHandler)
+    const buffer = Buffer.allocUnsafe(length)
+    tryCatch(this.write.bind(this, packet, buffer, 0, type, {}), this._writeErrorHandler)
+    return buffer
+  }
+
+  parsePacketBuffer (type, buffer) {
+    const result = tryCatch(this.read.bind(this, buffer, 0, type, {}), this._readErrorHandler)
+    return {
+      data: result.value,
+      metadata: { size: result.size },
+      buffer: buffer.slice(0, result.size)
+    }
+  }
+}
+
+function findArgs (acc, v, k) {
+  if (typeof v === 'string' && v.charAt(0) === '$') {
+    acc.push({ path: k, val: v.substr(1) })
+  } else if (Array.isArray(v) || typeof v === 'object') {
+    acc = acc.concat(reduce(v, findArgs, []).map((v) => ({ path: `${k}.${v.path}`, val: v.val })))
+  }
+  return acc
+}
+
+function produceArgsObject (defaultTypeArgs, argPos, typeArgs) {
+  if (typeArgs === undefined) return defaultTypeArgs
+  const args = clonedeep(defaultTypeArgs)
+  for (const { path, val } of argPos) {
+    // Set field
+    const c = path.split('.').reverse()
+    let into = args
+    while (c.length > 1) {
+      into = into[c.pop()]
+    }
+    into[c.pop()] = typeArgs[val]
+  }
+  return args
+}
+
+function produceArgsStatic (defaultTypeArgs) {
+  return defaultTypeArgs
+}
+
+function constructProduceArgs (defaultTypeArgs) {
+  const argPos = reduce(defaultTypeArgs, findArgs, [])
+  if (typeof defaultTypeArgs !== 'object') return produceArgsStatic.bind(this, defaultTypeArgs)
+  return produceArgsObject.bind(this, defaultTypeArgs, argPos)
+}
+
+function extendedRead (_read, _produceArgs, buffer, offset, typeArgs, context) {
+  return _read.call(this, buffer, offset, _produceArgs(typeArgs), context)
+}
+
+function extendedWrite (_write, _produceArgs, value, buffer, offset, typeArgs, context) {
+  return _write.call(this, value, buffer, offset, _produceArgs(typeArgs), context)
+}
+
+function extendedSizeOf (_sizeOf, _produceArgs, value, typeArgs, context) {
+  return _sizeOf.call(this, value, _produceArgs(typeArgs), context)
+}
+
+function staticSizeOf (_sizeOf) {
+  return _sizeOf
+}
+
+function extendType ([ _read, _write, _sizeOf ], defaultTypeArgs) {
+  const produceArgs = constructProduceArgs(defaultTypeArgs)
+  return [
+    extendedRead.bind(this, _read, produceArgs),
+    extendedWrite.bind(this, _write, produceArgs),
+    typeof _sizeOf === 'function'
+      ? extendedSizeOf.bind(this, _sizeOf, produceArgs)
+      : staticSizeOf.bind(this, _sizeOf)
+  ]
+}
+
+module.exports = ProtoDef
diff --git a/src/protodef.js b/src/protodef.js
deleted file mode 100644
index 60cbf08..0000000
--- a/src/protodef.js
+++ /dev/null
@@ -1,164 +0,0 @@
-const { getFieldInfo, tryCatch } = require('./utils')
-const reduce = require('lodash.reduce')
-const get = require('lodash.get')
-const Validator = require('protodef-validator')
-
-function isFieldInfo (type) {
-  return typeof type === 'string' ||
-    (Array.isArray(type) && typeof type[0] === 'string') ||
-    type.type
-}
-
-function findArgs (acc, v, k) {
-  if (typeof v === 'string' && v.charAt(0) === '$') { acc.push({ 'path': k, 'val': v.substr(1) }) } else if (Array.isArray(v) || typeof v === 'object') { acc = acc.concat(reduce(v, findArgs, []).map((v) => ({ 'path': k + '.' + v.path, 'val': v.val }))) }
-  return acc
-}
-
-function setField (path, val, into) {
-  const c = path.split('.').reverse()
-  while (c.length > 1) {
-    into = into[c.pop()]
-  }
-  into[c.pop()] = val
-}
-
-function extendType (functions, defaultTypeArgs) {
-  const json = JSON.stringify(defaultTypeArgs)
-  const argPos = reduce(defaultTypeArgs, findArgs, [])
-  function produceArgs (typeArgs) {
-    const args = JSON.parse(json)
-    argPos.forEach((v) => {
-      setField(v.path, typeArgs[v.val], args)
-    })
-    return args
-  }
-  return [function read (buffer, offset, typeArgs, context) {
-    return functions[0].call(this, buffer, offset, produceArgs(typeArgs), context)
-  }, function write (value, buffer, offset, typeArgs, context) {
-    return functions[1].call(this, value, buffer, offset, produceArgs(typeArgs), context)
-  }, function sizeOf (value, typeArgs, context) {
-    if (typeof functions[2] === 'function') { return functions[2].call(this, value, produceArgs(typeArgs), context) } else { return functions[2] }
-  }]
-}
-
-class ProtoDef {
-  constructor (validation = true) {
-    this.types = {}
-    this.validator = validation ? new Validator() : null
-    this.addDefaultTypes()
-  }
-
-  addDefaultTypes () {
-    this.addTypes(require('./datatypes/numeric'))
-    this.addTypes(require('./datatypes/utils'))
-    this.addTypes(require('./datatypes/structures'))
-    this.addTypes(require('./datatypes/conditional'))
-  }
-
-  addProtocol (protocolData, path) {
-    const self = this
-    function recursiveAddTypes (protocolData, path) {
-      if (protocolData === undefined) { return }
-      if (protocolData.types) { self.addTypes(protocolData.types) }
-      recursiveAddTypes(get(protocolData, path.shift()), path)
-    }
-
-    if (this.validator) { this.validator.validateProtocol(protocolData) }
-
-    recursiveAddTypes(protocolData, path)
-  }
-
-  addType (name, functions, validate = true) {
-    if (functions === 'native') {
-      if (this.validator) { this.validator.addType(name) }
-      return
-    }
-    if (isFieldInfo(functions)) {
-      if (this.validator) {
-        if (validate) { this.validator.validateType(functions) }
-        this.validator.addType(name)
-      }
-
-      let { type, typeArgs } = getFieldInfo(functions)
-      this.types[name] = typeArgs ? extendType(this.types[type], typeArgs) : this.types[type]
-    } else {
-      if (this.validator) {
-        if (functions[3]) {
-          this.validator.addType(name, functions[3])
-        } else { this.validator.addType(name) }
-      }
-
-      this.types[name] = functions
-    }
-  }
-
-  addTypes (types) {
-    Object.keys(types).forEach((name) => this.addType(name, types[name], false))
-    if (this.validator) {
-      Object.keys(types).forEach((name) => {
-        if (isFieldInfo(types[name])) {
-          this.validator.validateType(types[name])
-        }
-      })
-    }
-  }
-
-  read (buffer, cursor, _fieldInfo, rootNodes) {
-    let { type, typeArgs } = getFieldInfo(_fieldInfo)
-    const typeFunctions = this.types[type]
-    if (!typeFunctions) { throw new Error('missing data type: ' + type) }
-    return typeFunctions[0].call(this, buffer, cursor, typeArgs, rootNodes)
-  }
-
-  write (value, buffer, offset, _fieldInfo, rootNode) {
-    let { type, typeArgs } = getFieldInfo(_fieldInfo)
-    const typeFunctions = this.types[type]
-    if (!typeFunctions) { throw new Error('missing data type: ' + type) }
-    return typeFunctions[1].call(this, value, buffer, offset, typeArgs, rootNode)
-  }
-
-  sizeOf (value, _fieldInfo, rootNode) {
-    let { type, typeArgs } = getFieldInfo(_fieldInfo)
-    const typeFunctions = this.types[type]
-    if (!typeFunctions) {
-      throw new Error('missing data type: ' + type)
-    }
-    if (typeof typeFunctions[2] === 'function') {
-      return typeFunctions[2].call(this, value, typeArgs, rootNode)
-    } else {
-      return typeFunctions[2]
-    }
-  }
-
-  createPacketBuffer (type, packet) {
-    const length = tryCatch(() => this.sizeOf(packet, type, {}),
-      (e) => {
-        e.message = `SizeOf error for ${e.field} : ${e.message}`
-        throw e
-      })
-    const buffer = Buffer.allocUnsafe(length)
-    tryCatch(() => this.write(packet, buffer, 0, type, {}),
-      (e) => {
-        e.message = `Write error for ${e.field} : ${e.message}`
-        throw e
-      })
-    return buffer
-  }
-
-  parsePacketBuffer (type, buffer) {
-    const { value, size } = tryCatch(() => this.read(buffer, 0, type, {}),
-      (e) => {
-        e.message = `Read error for ${e.field} : ${e.message}`
-        throw e
-      })
-    return {
-      data: value,
-      metadata: {
-        size: size
-      },
-      buffer: buffer.slice(0, size)
-    }
-  }
-}
-
-module.exports = ProtoDef
diff --git a/src/serializer.js b/src/serializer.js
index f7632df..c81a71c 100644
--- a/src/serializer.js
+++ b/src/serializer.js
@@ -13,7 +13,7 @@ class Serializer extends Transform {
   }
 
   _transform (chunk, enc, cb) {
-    let buf
+    let buf // DO NOT REMOVE THIS WORKAROUND!
     try {
       buf = this.createPacketBuffer(chunk)
     } catch (e) {
@@ -39,7 +39,7 @@ class Parser extends Transform {
   _transform (chunk, enc, cb) {
     this.queue = Buffer.concat([this.queue, chunk])
     while (true) {
-      let packet
+      let packet // DO NOT REMOVE THIS WORKAROUND!
       try {
         packet = this.parsePacketBuffer(this.queue)
       } catch (e) {
@@ -57,10 +57,11 @@ class Parser extends Transform {
 }
 
 class FullPacketParser extends Transform {
-  constructor (proto, mainType) {
+  constructor (proto, mainType, noErrorLogging = false) {
     super({ readableObjectMode: true })
     this.proto = proto
     this.mainType = mainType
+    this.noErrorLogging = noErrorLogging
   }
 
   parsePacketBuffer (buffer) {
@@ -71,11 +72,16 @@ class FullPacketParser extends Transform {
     let packet
     try {
       packet = this.parsePacketBuffer(chunk)
-      if (packet.metadata.size !== chunk.length) {
-        console.log('Chunk size is ' + chunk.length + ' but only ' + packet.metadata.size + ' was read ; partial packet : ' +
-          JSON.stringify(packet.data) + '; buffer :' + chunk.toString('hex'))
+      if (packet.metadata.size !== chunk.length && !this.noErrorLogging) {
+        console.log(`Chunk size is ${chunk.length} but only ${packet.metadata.size} was read ; partial packet : ${JSON.stringify(packet.data)}; buffer : ${chunk.toString('hex')}`)
       }
     } catch (e) {
+      if (e.partialReadError) {
+        if (!this.noErrorLogging) {
+          console.log(e.stack)
+        }
+        return cb()
+      }
       return cb(e)
     }
     this.push(packet)
@@ -83,8 +89,4 @@ class FullPacketParser extends Transform {
   }
 }
 
-module.exports = {
-  Serializer: Serializer,
-  Parser: Parser,
-  FullPacketParser: FullPacketParser
-}
+module.exports = { Serializer, Parser, FullPacketParser }
diff --git a/src/utils.js b/src/utils.js
index e9c3e16..3351069 100644
--- a/src/utils.js
+++ b/src/utils.js
@@ -1,85 +1,164 @@
+const Enum = Object.freeze({
+  CompilerTypeKind: {
+    NATIVE: 0,
+    CONTEXT: 1,
+    PARAMETRIZABLE: 2
+  },
+  ParentSymbol: typeof Symbol !== 'undefined' ? Symbol('ProtoDefContext') : '..'
+})
+
+class Result {
+  // Using this wrapper is up to 30% faster than constructing
+  // plain objects ({ value, size }). V8 will use inline caching
+  // and hidden classes to speed this up.
+  constructor (value = undefined, size = 0) {
+    this.value = value
+    this.size = size
+  }
+  // This getter will be inlined
+  get count () { return this.value }
+}
+
+class ExtendableError extends Error {
+  constructor (message) {
+    super(message)
+    this.name = this.constructor.name
+    this.message = message
+    if (Error.captureStackTrace != null) {
+      Error.captureStackTrace(this, this.constructor.name)
+    }
+  }
+}
+
+class PartialReadError extends ExtendableError {
+  constructor (message) {
+    super(message)
+    this.partialReadError = true
+  }
+}
+
+function tryCatch (tryfn, catchfn) {
+  try { return tryfn() } catch (e) { catchfn(e) }
+}
+
+function typeDocErrorHandler (field, e) {
+  e.field = e.field ? `${field}.${e.field}` : field
+  throw e
+}
+
+function tryDoc (tryfn, field) {
+  return tryCatch(tryfn, typeDocErrorHandler.bind(this, field))
+}
+
 function getField (countField, context) {
+  if (countField.startsWith('/')) {
+    while (context.hasOwnProperty(Enum.ParentSymbol)) {
+      context = context[Enum.ParentSymbol]
+    }
+    countField = countField.slice(1)
+  }
   const countFieldArr = countField.split('/')
-  let i = 0
-  if (countFieldArr[i] === '') {
-    while (context.hasOwnProperty('..')) { context = context['..'] }
-    i++
+  for (const field of countFieldArr) {
+    context = context[field]
   }
-  for (; i < countFieldArr.length; i++) { context = context[countFieldArr[i]] }
   return context
 }
 
 function getFieldInfo (fieldInfo) {
-  if (typeof fieldInfo === 'string') { return { type: fieldInfo } } else if (Array.isArray(fieldInfo)) { return { type: fieldInfo[0], typeArgs: fieldInfo[1] } } else if (typeof fieldInfo.type === 'string') { return fieldInfo } else { throw new Error('Not a fieldinfo') }
+  switch (true) {
+    case typeof fieldInfo === 'string':
+      return { type: fieldInfo }
+    case Array.isArray(fieldInfo):
+      return { type: fieldInfo[0], typeArgs: fieldInfo[1] }
+    case typeof fieldInfo.type === 'string':
+      return fieldInfo
+    default:
+      throw new Error(`${fieldInfo} is not a fieldinfo`)
+  }
+}
+
+function isFieldInfo (type) {
+  return typeof type === 'string' ||
+    (Array.isArray(type) && typeof type[0] === 'string') ||
+    type.type
 }
 
 function getCount (buffer, offset, { count, countType }, rootNode) {
-  let c = 0
-  let size = 0
-  if (typeof count === 'number') { c = count } else if (typeof count !== 'undefined') {
-    c = getField(count, rootNode)
-  } else if (typeof countType !== 'undefined') {
-    ({ size, value: c } = tryDoc(() => this.read(buffer, offset, getFieldInfo(countType), rootNode), '$count'))
-  } else { // TODO : broken schema, should probably error out.
-    c = 0
+  if (count !== undefined) {
+    count = typeof count === 'number' ? count : getField(count, rootNode)
+    return new Result(count, 0)
   }
-  return { count: c, size }
+  if (countType !== undefined) {
+    const { size, value } = tryDoc(this.read.bind(this, buffer, offset, getFieldInfo(countType), rootNode), '$count')
+    return new Result(value, size)
+  }
+  throw new Error('Broken schema, neither count nor countType defined')
 }
 
 function sendCount (len, buffer, offset, { count, countType }, rootNode) {
-  if (typeof count !== 'undefined' && len !== count) {
-    // TODO: Throw
-  } else if (typeof countType !== 'undefined') {
-    offset = this.write(len, buffer, offset, getFieldInfo(countType), rootNode)
-  } else {
-    // TODO: Throw
+  if (count !== undefined) {
+    if (typeof count === 'number' && len !== count) {
+      throw new Error('Datatype length is not equal to count defined in schema')
+    }
+    return offset
   }
-  return offset
+  if (countType !== undefined) {
+    return this.write(len, buffer, offset, getFieldInfo(countType), rootNode)
+  }
+  throw new Error('Broken schema, neither count nor countType defined')
 }
 
 function calcCount (len, { count, countType }, rootNode) {
-  if (typeof count === 'undefined' && typeof countType !== 'undefined') { return tryDoc(() => this.sizeOf(len, getFieldInfo(countType), rootNode), '$count') } else { return 0 }
+  if (count === undefined && countType !== undefined) {
+    return tryDoc(this.sizeOf.bind(this, len, getFieldInfo(countType), rootNode), '$count')
+  }
+  return 0
 }
 
-function addErrorField (e, field) {
-  e.field = e.field ? field + '.' + e.field : field
-  throw e
-}
+class ProtoDefEncoding {
+  constructor (inst, type) {
+    this.inst = inst
+    this.type = type
+    this.encode.bytes = 0
+    this.decode.bytes = 0
+  }
 
-function tryCatch (tryfn, catchfn) {
-  try { return tryfn() } catch (e) { catchfn(e) }
-}
+  encode (obj, buffer, offset) {
+    if (buffer) {
+      this.encode.bytes = this.inst.write(obj, buffer, offset, this.type)
+    } else {
+      buffer = this.inst.createPacketBuffer(this.type, obj)
+      this.encode.bytes = buffer.length
+    }
+    return buffer
+  }
 
-function tryDoc (tryfn, field) {
-  return tryCatch(tryfn, (e) => addErrorField(e, field))
-}
+  decode (buffer, start, end) {
+    const { value, size } = this.inst.read(buffer.slice(start, end), 0, this.type)
+    this.decode.bytes = size
+    return value
+  }
 
-class ExtendableError extends Error {
-  constructor (message) {
-    super(message)
-    this.name = this.constructor.name
-    this.message = message
-    if (Error.captureStackTrace != null) {
-      Error.captureStackTrace(this, this.constructor.name)
-    }
+  encodingLength (obj) {
+    return this.inst.sizeOf(obj, this.type)
   }
 }
 
-class PartialReadError extends ExtendableError {
-  constructor (message) {
-    super(message)
-    this.partialReadError = true
-  }
+function createEncoding (inst, type) {
+  return new ProtoDefEncoding(inst, type)
 }
 
 module.exports = {
-  getField: getField,
-  getFieldInfo: getFieldInfo,
-  addErrorField: addErrorField,
-  getCount: getCount,
-  sendCount: sendCount,
-  calcCount: calcCount,
-  tryCatch: tryCatch,
-  tryDoc: tryDoc,
-  PartialReadError: PartialReadError
+  Enum,
+  Result,
+  PartialReadError,
+  tryCatch,
+  tryDoc,
+  getField,
+  getFieldInfo,
+  isFieldInfo,
+  getCount,
+  sendCount,
+  calcCount,
+  createEncoding
 }