From daf91b08c2d9320b7ad35b1e905b982a110b49c6 Mon Sep 17 00:00:00 2001 From: Jake Chitel Date: Thu, 25 Jan 2018 12:08:47 -0600 Subject: [PATCH 01/15] refactored tokenizer into 'lexer' which is now PURELY FUNCTIONAL --- src/@types/types.d.ts | 16 +- src/core.ts | 101 ++++++++ src/extensions.ts | 35 +++ src/index.ts | 26 +-- src/parser/lexer/char-stream.ts | 97 ++++++++ src/parser/lexer/index.ts | 2 + src/parser/lexer/lexer-state.ts | 172 ++++++++++++++ src/parser/lexer/lexer.ts | 396 ++++++++++++++++++++++++++++++++ src/parser/lexer/token.ts | 64 ++++++ src/require-hook.js | 26 +++ src/runner.ts | 57 +++++ src/utils/OrderedMap.ts | 59 +++++ src/utils/Scope.ts | 36 +++ src/utils/box-error.ts | 11 + src/utils/lazy-list.ts | 226 ++++++++++++++++++ src/utils/lazy.ts | 43 ++++ src/utils/preVisit.ts | 25 ++ src/utils/utils.ts | 38 +++ 18 files changed, 1410 insertions(+), 20 deletions(-) create mode 100644 src/core.ts create mode 100644 src/extensions.ts create mode 100644 src/parser/lexer/char-stream.ts create mode 100644 src/parser/lexer/index.ts create mode 100644 src/parser/lexer/lexer-state.ts create mode 100644 src/parser/lexer/lexer.ts create mode 100644 src/parser/lexer/token.ts create mode 100644 src/require-hook.js create mode 100644 src/runner.ts create mode 100644 src/utils/OrderedMap.ts create mode 100644 src/utils/Scope.ts create mode 100644 src/utils/box-error.ts create mode 100644 src/utils/lazy-list.ts create mode 100644 src/utils/lazy.ts create mode 100644 src/utils/preVisit.ts create mode 100644 src/utils/utils.ts diff --git a/src/@types/types.d.ts b/src/@types/types.d.ts index 8e05d2f..918b0ca 100644 --- a/src/@types/types.d.ts +++ b/src/@types/types.d.ts @@ -24,12 +24,20 @@ interface ObjectConstructor { * constraint and just say that it must be any kind of * function, with a prototype of the specified type. */ -interface Class extends Function { +interface Class extends Function { prototype: T; } type Optional = T | null; -type bool = boolean; - -type ArrayOrSingle = T | T[]; +/** + * A read-only version of Map. + * This is backed by a regular Map, but only exposes APIs that + * don't mutate the map. To mutate the map, use `mapSet()` + * from `~/utils/utils`. + */ +interface ReadonlyMap extends Iterable<[K, V]> { + get(key: K): V | undefined; + has(key: K): boolean; + readonly size: number; +} diff --git a/src/core.ts b/src/core.ts new file mode 100644 index 0000000..cccdb95 --- /dev/null +++ b/src/core.ts @@ -0,0 +1,101 @@ +export interface FilePosition { + readonly type: 'FilePosition'; + readonly path: string; + readonly position: [number, number]; + readonly computeRange: (image: string) => FileRange; + readonly nextLine: () => FilePosition; + readonly nextColumn: () => FilePosition; +} + +export function FilePosition(path: string, position: [number, number]): FilePosition { + return { type: 'FilePosition', path, position, computeRange, nextLine, nextColumn }; +} + +function computeRange(this: FilePosition, image: string) { + if (!image.includes('\n')) return FileRange(this.path, this.position, [this.position[0], this.position[1] + image.length - 1]); + const length = image.length; + // if the image ends with a newline, we have to ignore it because it is included within the previous line + const search = image.endsWith('\n') ? image.substring(0, length - 2) : image; + // number of line breaks in the string + const numBreaks = [...search].filter(c => c === '\n').length; + // number of characters after the previous line break (use the real length here) + const trailing = length - search.lastIndexOf('\n') - 1; + return FileRange(this.path, this.position, [this.position[0] + numBreaks, trailing]); +} + +function nextLine(this: FilePosition): FilePosition { + return { ...this, position: [this.position[0] + 1, 0] }; +} + +function nextColumn(this: FilePosition): FilePosition { + return { ...this, position: [this.position[0], this.position[1] + 1] }; +} + +/** + * Represents a range of text in a specific file on this system: + * - the path of the file + * - the start line/column of the range + * - the end line/column of the range + */ +export interface FileRange { + readonly type: 'FileRange'; + readonly path: string; + readonly start: [number, number]; + readonly end: [number, number]; + readonly merge: (location: FileRange) => FileRange; +} + +export function FileRange(path: string, start: [number, number], end: [number, number]): FileRange { + return { type: 'FileRange', path, start, end, merge }; +} + +/** + * Create a new location that contains both this location and the specified location + */ +function merge(this: FileRange, location: FileRange): FileRange { + if (this.path !== location.path) throw new Error('Two locations in different files cannot be merged.'); + let start = this.start; + let end = this.end; + if (location.start[0] < this.start[0] || location.start[0] === this.start[0] && location.start[1] < this.start[1]) { + [start[0], start[1]] = [location.start[0], location.start[0]]; + } else if (location.end[0] > this.end[0] || location.end[0] === this.end[0] && location.end[1] > this.end[1]) { + [end[0], end[1]] = [location.end[0], location.end[1]]; + } + return FileRange(this.path, start, end); +} + +/** + * The level of a diagnostic, listed in order so that comparison operators can be used: + * - Verbose: diagnostics that should only appear when the user requests as much information as possible + * - Message: diagnostics that serve to notify the user, and can be safely ignored + * - Warning: diagnostics that indicate a problem that will not trigger a failure + * - Error: diagnostics that indicate a problem that will trigger a failure + * - Fatal: diagnostics that indicate a problem that causes compilation to immediately fail + */ +export enum DiagnosticLevel { + Verbose = 1, + Message = 2, + Warning = 3, + Error = 4, + Fatal = 5, +} + +/** + * Represents a message to report to the user as an output of compilation. + */ +export interface Diagnostic { + readonly level: DiagnosticLevel; + readonly message: string; + readonly location: FileRange; + readonly toString: () => string; +} + +export function Diagnostic(message: string, location: FileRange | FilePosition, level: DiagnosticLevel = DiagnosticLevel.Error): Diagnostic { + if (location.type === 'FilePosition') location = FileRange(location.path, location.position, location.position); + return { message, location, level, toString: diagToString }; +} + +function diagToString(this: Diagnostic) { + const { path, start: [line, column] } = this.location; + return `${DiagnosticLevel[this.level]}: ${this.message} (${path}:${line}:${column})`; +} diff --git a/src/extensions.ts b/src/extensions.ts new file mode 100644 index 0000000..86a8448 --- /dev/null +++ b/src/extensions.ts @@ -0,0 +1,35 @@ +interface Array { + /** Get the last item in this array, or undefined if the array is empty */ + last(): T; + /** Get the last {count} items in this array */ + last(count: number): T[]; + /** Get the number of items in this array that match the given predicate */ + count(predicate: (item: T) => boolean): number; +} + +interface ReadonlyArray { + /** Get the last item in this array, or undefined if the array is empty */ + last(): T; + /** Get the last {count} items in this array */ + last(count: number): T[]; + /** Get the number of items in this array that match the given predicate */ + count(predicate: (item: T) => boolean): number; +} + +Array.prototype.last = function last(count?: number) { + if (typeof count === 'undefined') return this[this.length - 1]; + return this.slice(this.length - count, this.length); +} + +Array.prototype.count = function count(predicate: (item: T) => boolean): number { + return this.filter(predicate).length; +} + +interface String { + /** Get the substring from this string containing the last {count} characters in this string */ + last(count?: number): string; +} + +String.prototype.last = function last(count = 1) { + return this.slice(this.length - count, this.length); +} diff --git a/src/index.ts b/src/index.ts index ef35007..f294ce3 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,23 +1,17 @@ -import { readFileSync as readFile } from 'fs'; import { resolve } from 'path'; +/** Allows us to use require path aliases (~, ~test) */ import './require-hook'; -import parse from './parser'; -import typecheck from './typecheck'; -import translate from './translator'; -import interpret from './interpreter'; +/** Injects extensions into built-in APIs */ +import './extensions'; +import { runProgram } from './runner'; -// read contents of specified file -const path = resolve(process.argv[2]); -const contents = readFile(path).toString(); -// parse them -const parsed = parse(contents); -// type check the parsed result -const checked = typecheck(parsed, path); -// translate the type checker result to IR -const translated = translate(checked); -// execute the IR in the interpreter and get the exit code of the program -const exitCode = interpret(translated, process.argv.slice(3)); +// extract the program path and arguments +const [path, ...args] = process.argv.slice(2); + +// run the program +const exitCode = runProgram(resolve(process.cwd(), path), args); + // exit the process process.exit(exitCode); diff --git a/src/parser/lexer/char-stream.ts b/src/parser/lexer/char-stream.ts new file mode 100644 index 0000000..e291ad7 --- /dev/null +++ b/src/parser/lexer/char-stream.ts @@ -0,0 +1,97 @@ +import { openSync as open, readSync as read } from 'fs'; +import { StringDecoder } from 'string_decoder'; +import { LazyList, NonEmptyLazyList, fromIterable, infList } from '~/utils/lazy-list'; +import { FilePosition } from '~/core'; + + +export type CharStream = EmptyCharStream | NonEmptyCharStream; + +export interface EmptyCharStream { + readonly empty: true; + /** The file position of the end of the file */ + readonly position: FilePosition; +} + +export interface NonEmptyCharStream { + readonly empty: false; + /** The file position of the next character in the stream */ + readonly position: FilePosition; + /** Reads one character from the stream and returns it */ + readonly first: () => string; + /** Reads one character from the stream, and returns it with the remaining stream */ + readonly read: () => { char: string, stream: CharStream }; + /** Reads as many characters from the stream as possible, up to {count} */ + readonly forceRead: (count: number) => { chars: string, stream: CharStream }; +} + +interface InternalCharStream extends NonEmptyCharStream { + readonly list: NonEmptyLazyList; +} + +/** + * Reads a single byte from a file at the specified position. + * If the position is higher than the length of the file, + * a buffer of length 0 will be returned. + */ +function readByte(fd: number, position: number) { + const buf = new Buffer(1); + const bytesRead = read(fd, buf, 0, 1, position); + return bytesRead ? buf : new Buffer(0); +} + +/** + * Returns a lazy list of bytes from the file at the specified path. + */ +function createByteStream(path: string): LazyList { + const fd = open(path, 'r'); + return infList() + .map(i => readByte(fd, i)) + .takeWhile(b => b.length > 0); +} + +/** + * Returns a lazy list of characters from the file at the specified path. + */ +export default function createCharStream(path: string): CharStream { + const decoder = new StringDecoder('utf8'); + const list = createByteStream(path) + .flatMap(byte => decoder.write(byte)) + .concat(fromIterable(decoder.end())); + if (list.empty) return { empty: true, position: FilePosition(path, [1, 1]) }; + return { + empty: false, + list, + position: FilePosition(path, [1, 1]), + read: readChar, + first: readFirst, + forceRead, + } as InternalCharStream; +} + +function readChar(this: InternalCharStream): { char: string, stream: CharStream } { + const char = this.list.head; + const empty = this.list.tail.empty; + const position = char === '\n' ? this.position.nextLine() : this.position.nextColumn(); + if (empty) return { char, stream: { empty: true, position } }; + return { + char, + stream: { ...this, list: this.list.tail, position } as InternalCharStream, + } +} + +function readFirst(this: InternalCharStream): string { + return this.list.head; +} + +function forceRead(this: InternalCharStream, count: number): { chars: string, stream: CharStream } { + // if we don't need any more, return the base of the recursion + if (count === 0) return { chars: '', stream: this }; + // read one from the front + const { char, stream } = this.read(); + // if it's now empty, just return that + if (stream.empty) return { chars: char, stream }; + // otherwise we've reached the recursion state, descend one level + const { chars, stream: stream1 } = stream.forceRead(count - 1); + // prepend the current character + return { chars: char + chars, stream: stream1 }; +} diff --git a/src/parser/lexer/index.ts b/src/parser/lexer/index.ts new file mode 100644 index 0000000..2f53aec --- /dev/null +++ b/src/parser/lexer/index.ts @@ -0,0 +1,2 @@ +export { createTokenStream } from './lexer'; +export { TokenType, Token } from './Token'; diff --git a/src/parser/lexer/lexer-state.ts b/src/parser/lexer/lexer-state.ts new file mode 100644 index 0000000..36aed14 --- /dev/null +++ b/src/parser/lexer/lexer-state.ts @@ -0,0 +1,172 @@ +import { FilePosition } from '~/core'; +import { TokenType, Token } from './token'; +import { CharStream, EmptyCharStream, NonEmptyCharStream } from './char-stream'; + + +export interface TokenResult { + final: Token; + remaining: CharStream; +} + +// #region IfHasNextOperation + +interface IfHasNextOperation { + readonly previous: LexerState; + readonly result: Optional; + /** + * In the event of a false result for the previous ifHasNext(), try again with a different predicate. + * This method can chain so that the first successful predicate will propagate through to the last else. + */ + readonly elseIf: (count: number, pred: (values: string[]) => boolean, + then: (state: LexerState, accepted: string) => LexerState + ) => IfHasNextOperation; + /** + * In the event of a false result for the previous ifHasNext(), return an alternate result. + * This method will end a chain, so the first successful result will return from this method. + */ + readonly else: (fn: (state: LexerState) => LexerState) => LexerState; +} + +function IfHasNextOperation(previous: LexerState, result: Optional = null): IfHasNextOperation { + return { previous, result, elseIf: IfHasNextOperation.elseIf, else: IfHasNextOperation._else }; +} + +namespace IfHasNextOperation { + /** + * If we already have a result, then skip this else-if and return it so it propagates to the end. + * Otherwise, execute ifHasNext() on the original with the new parameters. + */ + export function elseIf(this: IfHasNextOperation, count: number, pred: (values: string[]) => boolean, + then: (state: LexerState, accepted: string) => LexerState + ) { + if (this.result) return this; + return this.previous.ifHasNext(count, pred, then); + } + + /** + * End of the chain, if a previous predicate yielded a result, return it. + * Otherwise return the alternate. + */ + export function _else(this: IfHasNextOperation, fn: (state: LexerState) => LexerState) { + if (this.result) return this.result; + return fn(this.previous); + } +} + +// #endregion + +export type LexerState = EmptyLexerState | NonEmptyLexerState; + +interface LexerStateBase { + /** The expected resulting type of token, can be changed with setType() */ + readonly type: TokenType; + /** The start position of the token */ + readonly position: FilePosition; + /** The progressing image of the consumed token, can be appended to with consume() */ + readonly image: string; + /** The expeceted resulting value of the token, can be set with setValue() */ + readonly value?: any; + /** Returns a new LexerState with the provided type */ + readonly setType: (type: TokenType) => LexerState; + /** Returns a new LexerState with a value based on the current image */ + readonly setValue: (fn: (image: string) => any) => LexerState; + /** Returns a new LexerState with a value based on the current value */ + readonly mapValue: (fn: (value: any) => any) => LexerState; + /** + * This is a very useful tool to handle conditional consumption. + * First, specify the number of characters from the stream you wish to analyze. + * Then, provide a predicate that will be called with those characters ONLY if there were that many available. + * That predicate should return true if the characters should be consumed, and false if not. + * Then, provide a 'then' function that will be called with the resulting consumed state + * and the string of the accepted characters, which should return the state to return from the operation. + * This method will return a chainable object that can be used to append more checks in the instance that one fails. + */ + readonly ifHasNext: (count: number, pred: (values: string[]) => boolean, + then: (state: LexerState, accepted: string) => LexerState + ) => IfHasNextOperation; + /** Returns a completed token and remaining stream based on this LexerState */ + readonly finish: () => TokenResult; +} + +interface EmptyLexerState extends LexerStateBase { + /** Determines whether this LexerState is empty */ + readonly empty: true; + /** The remaining available character stream */ + readonly stream: EmptyCharStream; +} + +interface NonEmptyLexerState extends LexerStateBase { + /** Determines whether this LexerState is empty */ + readonly empty: false; + /** The remaining available character stream */ + readonly stream: NonEmptyCharStream; + /** Consumes at least one character from the stream and appends it to the image, returning a new LexerState */ + readonly consume: (count?: number) => LexerState; +} + +/** + * Tracks the state for the consumption of one token. + * EmptyLexerState and NonEmptyLexerState have the same properties, except: + * - empty is true for Empty, false for NonEmpty (the discriminant) + * - stream is empty for Empty, non-empty for NonEmpty + * - consume() is available for NonEmpty, not for Empty + */ +export function LexerState(position: FilePosition, char: string, stream: CharStream) { + return LexerState.init(position, char, stream); +} + +export namespace LexerState { + export function init(position: FilePosition, char: string, stream: CharStream): LexerState { + const base = { type: TokenType.NONE, position, image: char, setType, setValue, mapValue, ifHasNext, finish }; + if (stream.empty) return { ...base, empty: true, stream }; + return { ...base, empty: false, stream, consume }; + } + + function consume(this: NonEmptyLexerState, count = 1): LexerState { + let chars: string, stream: CharStream; + if (count === 1) { + ({ char: chars, stream } = this.stream.read()); + } else { + ({ chars, stream } = this.stream.forceRead(count)); + } + const image = this.image + chars; + return (stream.empty + ? { ...this, empty: true, image, stream } + : { ...this, empty: false, image, stream }) as LexerState; + } + + function setType(this: LexerState, type: TokenType): LexerState { + return { ...this, type }; + } + + function setValue(this: LexerState, fn: (image: string) => any): LexerState { + return { ...this, value: fn(this.image) }; + } + + function mapValue(this: LexerState, fn: (value: any) => any): LexerState { + return { ...this, value: fn(this.value) }; + } + + function ifHasNext(this: LexerState, count: number, pred: (values: string[]) => boolean, + then: (state: LexerState, accepted: string) => LexerState + ): IfHasNextOperation { + const state = this; + // not enough chars + if (state.empty) return IfHasNextOperation(state); + const { chars, stream } = state.stream.forceRead(count); + // not enough chars + if (chars.length !== count) return IfHasNextOperation(state); + // predicate deemed it not so + if (!pred([...chars])) return IfHasNextOperation(state); + // predicate deemed it so + const image = state.image + chars; + const result = (stream.empty + ? { ...state, empty: true, image, stream } + : { ...state, empty: false, image, stream }) as LexerState; + return IfHasNextOperation(state, then(result, chars)); + } + + function finish(this: LexerState) { + return { final: Token(this.type, this.position, this.image, this.value), remaining: this.stream }; + } +} diff --git a/src/parser/lexer/lexer.ts b/src/parser/lexer/lexer.ts new file mode 100644 index 0000000..14f9244 --- /dev/null +++ b/src/parser/lexer/lexer.ts @@ -0,0 +1,396 @@ +import { LazyList, empty, create } from '~/utils/lazy-list'; +import createCharStream, { CharStream } from './char-stream'; +import { TokenType, Token } from './token'; +import { TokenResult, LexerState } from './lexer-state'; +import BoxError from '~/utils/box-error'; +import { Diagnostic } from '~/core'; + + +// #region Token/character sets + +/** + * Full list of identifiers that are classified as reserved words + */ +export const RESERVED = [ + 'as', // used for renaming imports + 'any', // supertype of all types + 'bool', // boolean type name + 'break', // statement to break from a loop + 'byte', // byte type name (alias of u8) + 'catch', // denotes a catch block in a try-catch block + 'char', // character type name + 'const', // constant declaration keyword + 'continue', // statement to skip to the next iteration of a loop + 'default', // used to declare a default export, also for a default case in a pattern match block + 'do', // denotes the start of a do-while loop + 'double', // double type name (alias for f64) + 'else', // denotes the start of an else clause + 'export', // declares a module export + 'f32', // 32-bit floating point type (equivalent to 'float') + 'f64', // 64-bit floating point type (equivalent to 'double') + 'false', // boolean false value + 'finally', // denotes a finally block in a try-catch-finally block + 'float', // float type name (alias for f32) + 'for', // denotes the start of a for loop + 'from', // used in import and export declarations to specify the name of another module + 'func', // denotes a named function declaration + 'i16', // 16 bit signed integer type + 'i32', // 32 bit signed integer type (equivalent to 'int') + 'i64', // 64 bit signed integer type (equivalent to 'long') + 'i8', // 8 bit signed integer type + 'if', // denotes the start of an if block + 'import', // declares a module import + 'in', // separates iterator variable and iterable expression in for statements + 'int', // int type name (alias for i32) + 'integer', // integer type name (true integer, infinite capacity) + 'long', // long type name (alias for i64) + 'return', // denotes a return statement to return a value from a function + 'short', // short type name (alias for u16) + 'string', // string type name + 'throw', // denotes a throw statement to throw an exception from a function + 'true', // boolean true value + 'try', // denotes the start of a try-catch block + 'type', // denotes the start of a type declaration + 'u16', // 16 bit unsigned integer type (equivalent to 'short') + 'u32', // 32 bit unsigned integer type + 'u64', // 64 bit unsigned integer type + 'u8', // 8 bit unsigned integer type (equivalent to 'byte') + 'void', // return type of functions, indicates no value is returned (alias for '()') + 'while', // denotes the start of a while loop +] + +/** + * Operators are dynamic tokens that serve a semantic purpose. + * They can be composed of any combination of these characters, + * except when that combination matches a symbol. + */ +const OPER_CHARS = '~!$%^&*+-=|<>?/'; + +/** + * Anything that matches one of these symbols is parsed as a SYMBOL, + * except when one of the symbols contains an OPER_CHARS character. + * In that case, the symbol must be followed by a non-OPER_CHARS character. + */ +const RESERVED_SYMBOLS = [ + ':', // colon separates value names from their types, and also serves as a general delimiter + '{', // braces wrap struct literals and body statements + '}', + '(', // parentheses wrap tuple literals and serve as explicit delimiters for types and expressions + ')', + '[', // brackets wrap array literals and are used for index expressions + ']', + ',', // commas separate lists of syntax elements + '=', // equals sign is used to assign values to variables + '=>', // fat arrow is used to separate a lambda function's parameter list from its body + '`', // backticks are not yet used in the language, but may have a way to use named functions as operators + '.', // dots are used in field access expressions and other related operations + ';', // semicolons are used to explicitly separate statements that exist on the same line +]; + +// types of tokens that are ignored by the parser +const IGNORED_TYPES = [TokenType.COMMENT, TokenType.WHITESPACE]; + +// #endregion + +// #region Token stream logic + +/** + * Reads a stream of characters from the file at the specified path and performs lexical analysis on the stream, + * returning a stream of tokens. + */ +export function createTokenStream(path: string, ignoreMode = true): LazyList { + const list = consumeTokens(createCharStream(path)); + if (!ignoreMode) return list; + return list.filter(t => !IGNORED_TYPES.includes(t.type)); +} + +/** + * Consumes a token and recurses until an EOF token is reached + */ +function consumeTokens(charStream: CharStream): LazyList { + const { final, remaining } = consumeToken(charStream); + return create(final, () => final.type === TokenType.EOF ? empty() : consumeTokens(remaining)); +} + +/** + * Consumes a single token from the front of the stream and + * returns the token and the remaining stream + */ +function consumeToken(charStream: CharStream): TokenResult { + // stream is empty, return the final EOF token + if (charStream.empty) return { final: Token(TokenType.EOF, charStream.position, ''), remaining: charStream }; + // read a single character from the stream + const { char, stream } = charStream.read(); + + return LexerState(charStream.position, char, stream) + .ifHasNext(2, ([c1, c2]) => c1 === '/' && c2 === '/', + state => consumeSingleLineComment(state.setType(TokenType.COMMENT))) + .elseIf(2, ([c1, c2]) => c1 === '/' && c2 === '*', + state => consumeMultiLineComment(state.setType(TokenType.COMMENT))) + .elseIf(1, ([c]) => ['upper', 'lower', '_'].includes(kind(c)), consumeIdentifierOrReserved) + .elseIf(1, ([c]) => kind(c) === 'num', consumeNumber) + .elseIf(1, ([c]) => c === '"', + state => consumeStringLiteral(state.setType(TokenType.STRING_LITERAL).setValue(_ => ''))) + .elseIf(1, ([c]) => c === "'", + state => consumeCharLiteral(state.setType(TokenType.CHARACTER_LITERAL))) + .elseIf(1, ([c]) => RESERVED_SYMBOLS.some(s => s.startsWith(c)), + state => consumeSymbol(state.setType(TokenType.SYMBOL))) + .elseIf(1, ([c]) => OPER_CHARS.includes(c), + state => consumeOperator(state.setType(TokenType.OPER))) + .elseIf(1, ([c]) => c === '\n', state => state.setType(TokenType.NEWLINE)) + .elseIf(2, ([c1, c2]) => c1 === '\r' && c2 === '\n', state => state.setType(TokenType.NEWLINE)) + .elseIf(1, ([c]) => c === ' ' || c === '\t', + state => consumeWhitespace(state.setType(TokenType.WHITESPACE))) + .else(() => { + // otherwise it is not a valid character (for now) + throw new BoxError(Diagnostic(`Invalid character '${char}'`, charStream.position)); + }) + .finish(); +} + +// #endregion + +// #region Consumers + +/** + * A single line comment is an ignored area of code delimited by a '//' sequence at the start + * and a new line at the end. + */ +function consumeSingleLineComment(pending: LexerState): LexerState { + // we can't use ifHasNext() here because comments can be long and we need tail recursion + if (pending.empty) return pending; + if (pending.stream.first() === '\n') return pending.consume(); + return consumeSingleLineComment(pending.consume()); +} + +enum MutliLineCommentState { + START, // only the first '/' consumed + BODY, // '/*' consumed + MAYBE_END, // a '*' is consumed in the body +} + +/** + * A multi line comment is an ignored area of code delimited by a '/*' sequence at the start + * and a '*\/' (no backslash) sequence at the end. + */ +function consumeMultiLineComment(pending: LexerState, state = MutliLineCommentState.START): LexerState { + // we can't use ifHasNext() here because a) we need tail recursion b) we have a state parameter + if (pending.empty) throw new BoxError(Diagnostic('Unterminated comment', pending.stream.position)); + const first = pending.stream.first(); + let nextState = state; + if (state === MutliLineCommentState.START) { + state = MutliLineCommentState.BODY; + } else if (state === MutliLineCommentState.BODY) { + if (first === '*') state = MutliLineCommentState.MAYBE_END; + } else if (state === MutliLineCommentState.MAYBE_END) { + if (first === '/') return pending.consume(); + state = MutliLineCommentState.BODY + } + return consumeMultiLineComment(pending.consume(), nextState); +} + +/** + * An identifier is a sequence of alphanumeric characters and '_' (but can't start with a number) + * that serves as the name of a code element such as a variable or type. + * Some valid identifier sequences are reserved words in the language, and these + * are parsed as RESERVED tokens instead of IDENT tokens. + */ +function consumeIdentifierOrReserved(pending: LexerState): LexerState { + return pending.ifHasNext(1, ([c]) => ['upper', 'lower', 'num', '_'].includes(kind(c)), consumeIdentifierOrReserved) + .else(state => RESERVED.includes(state.image) ? state.setType(TokenType.RESERVED) : state.setType(TokenType.IDENT)); +} + +/** + * Consume either: hex, binary, float, decimal + */ +function consumeNumber(pending: LexerState): LexerState { + if (pending.image === '0') { + return pending + .ifHasNext(2, ([c1, c2]) => c1.toLowerCase() === 'x' && isHex(c2), + state => consumeHexLiteral(state.setType(TokenType.INTEGER_LITERAL))) + .elseIf(2, ([c1, c2]) => c1.toLowerCase() === 'b' && '01'.includes(c2), + state => consumeBinLiteral(state.setType(TokenType.INTEGER_LITERAL))) + .elseIf(2, ([c1, c2]) => '.e'.includes(c1.toLowerCase()) && kind(c2) === 'num', + (state, img) => consumeFloatLiteral(state.setType(TokenType.FLOAT_LITERAL), + img[0] === '.' ? FloatLiteralState.FRACTION : FloatLiteralState.EXPONENT)) + .else(consumeDecLiteral); + } + return consumeDecLiteral(pending); +} + +/** + * Hexadecimal literals: 0[xX][0-9a-fA-F]+ + */ +function consumeHexLiteral(pending: LexerState): LexerState { + return pending.ifHasNext(1, ([c]) => isHex(c), consumeHexLiteral).else(state => state.setValue(parseHex)); +} + +/** + * Binary literals: 0[bB][01]+ + */ +function consumeBinLiteral(pending: LexerState): LexerState { + return pending.ifHasNext(1, ([c]) => '01'.includes(c), consumeBinLiteral).else(state => state.setValue(parseBin)); +} + +enum FloatLiteralState { + FRACTION, // segment after the decimal point + EXPONENT, // segment after the 'e' +} + +/** + * Float literals: whole number portion + (fractional portion and/or exponent portion) + */ +function consumeFloatLiteral(pending: LexerState, state: FloatLiteralState): LexerState { + if (state === FloatLiteralState.FRACTION) { + return pending + .ifHasNext(2, ([c1, c2]) => c1.toLowerCase() === 'e' && kind(c2) === 'num', + s => consumeFloatLiteral(s, FloatLiteralState.EXPONENT)) + .elseIf(1, ([c]) => kind(c) === 'num', + s => consumeFloatLiteral(s, state)) + .else(s => s.setValue(parseFloat)); + } else { + return pending.ifHasNext(1, ([c]) => kind(c) === 'num', s => consumeFloatLiteral(s, state)) + .else(s => s.setValue(parseFloat)); + } +} + +/** + * Decimal literals: sequence of numbers + */ +function consumeDecLiteral(pending: LexerState): LexerState { + return pending + .ifHasNext(2, ([c1, c2]) => '.e'.includes(c1.toLowerCase()) && kind(c2) === 'num', + (state, img) => consumeFloatLiteral(state, + img[0] === '.' ? FloatLiteralState.FRACTION : FloatLiteralState.EXPONENT)) + .elseIf(1, ([c]) => kind(c) === 'num', consumeDecLiteral) + .else(state => state.setValue(parseInt)); +} + +const ESCAPE: { readonly [key: string]: string } = { n: '\n', r: '\r', t: '\t', f: '\f', b: '\b', v: '\v' }; + +/** + * Literals of character sequences + */ +function consumeStringLiteral(pending: LexerState): LexerState { + if (pending.empty) throw new BoxError(Diagnostic('Unterminated string', pending.stream.position)); + const next = pending + // end of string + .ifHasNext(1, ([c]) => c === '"', state => state) + // basic escape codes + .elseIf(2, ([c1, c2]) => c1 === '\\' && 'nrtfbv'.includes(c2), + (state, cs) => state.mapValue(v => v + ESCAPE[cs[1]])) + // ascii escape codes + .elseIf(4, ([c1, c2, c3, c4]) => c1 === '\\' && c2.toLowerCase() === 'x' && isHex(c3) && isHex(c4), + (state, cs) => state.mapValue(v => v + String.fromCodePoint(parseInt(cs.last(2), 16)))) + // 4-byte unicode escape codes + .elseIf(6, ([c1, c2, ...cs]) => c1 === '\\' && c2.toLowerCase() === 'u' && cs.every(isHex), + (state, cs) => state.mapValue(v => v + String.fromCodePoint(parseInt(cs.last(4), 16)))) + // 5-byte unicode escape codes + .elseIf(9, ([c1, c2, ...cs]) => c1 === '\\' && c2.toLowerCase() === 'u' && cs[0] === '{' && cs[6] === '}' && cs.slice(1, 6).every(isHex), + (state, cs) => state.mapValue(v => v + String.fromCodePoint(parseInt(cs.slice(3, 9), 16)))) + // 6-byte unicode escape codes + .elseIf(10, ([c1, c2, ...cs]) => c1 === '\\' && c2.toLowerCase() === 'u' && cs[0] === '{' && cs[7] === '}' && cs.slice(1, 7).every(isHex), + (state, cs) => state.mapValue(v => v + String.fromCodePoint(parseInt(cs.slice(3, 10), 16)))) + // all other escaped characters + .elseIf(2, ([c]) => c === '\\', (state, cs) => state.mapValue(v => v + cs[1])) + // all other characters + .elseIf(1, () => true, (state, c) => state.mapValue(v => v + c)) + // the last case was a catch-all, so we can guarantee that this will be non-null + .result!; + const last = next.image.last(2); + if (last[0] !== '\\' && last[1] === '"') return next; + return consumeStringLiteral(next); +} + +/** + * Literals of single characters + */ +function consumeCharLiteral(pending: LexerState): LexerState { + if (pending.empty) throw new BoxError(Diagnostic('Unterminated character', pending.stream.position)); + const next = pending + .ifHasNext(1, ([c]) => c === "'", + () => { throw new BoxError(Diagnostic('Empty character', pending.stream.position)) }) + // basic escape codes + .elseIf(2, ([c1, c2]) => c1 === '\\' && 'nrtfbv'.includes(c2), + (state, cs) => state.setValue(() => ESCAPE[cs[1]])) + // ascii escape codes + .elseIf(4, ([c1, c2, c3, c4]) => c1 === '\\' && c2.toLowerCase() === 'x' && isHex(c3) && isHex(c4), + (state, cs) => state.setValue(() => String.fromCodePoint(parseInt(cs.last(2), 16)))) + // 4-byte unicode escape codes + .elseIf(6, ([c1, c2, ...cs]) => c1 === '\\' && c2.toLowerCase() === 'u' && cs.every(isHex), + (state, cs) => state.setValue(() => String.fromCodePoint(parseInt(cs.last(4), 16)))) + // 5-byte unicode escape codes + .elseIf(9, ([c1, c2, ...cs]) => c1 === '\\' && c2.toLowerCase() === 'u' && cs[0] === '{' && cs[6] === '}' && cs.slice(1, 6).every(isHex), + (state, cs) => state.setValue(() => String.fromCodePoint(parseInt(cs.slice(3, 9), 16)))) + // 6-byte unicode escape codes + .elseIf(10, ([c1, c2, ...cs]) => c1 === '\\' && c2.toLowerCase() === 'u' && cs[0] === '{' && cs[7] === '}' && cs.slice(1, 7).every(isHex), + (state, cs) => state.setValue(() => String.fromCodePoint(parseInt(cs.slice(3, 10), 16)))) + // all other escaped characters + .elseIf(2, ([c]) => c === '\\', (state, cs) => state.setValue(() => cs[1])) + // all other characters + .elseIf(1, () => true, (state, c) => state.setValue(() => c)) + // the last case was a catch-all, so we can guarantee that this will be non-null + .result!; + // the next character must absolutely be a ' and nothing else + return next + .ifHasNext(1, ([c]) => c == "'", state => state) + .else(() => { throw new BoxError(Diagnostic('Unterminated character', next.stream.position)) }); +} + +function consumeSymbol(pending: LexerState): LexerState { + // equals is a special case because it's dumb (can be present in both symbols and operators) + if (pending.image === '=') { + return pending + .ifHasNext(1, ([c]) => c === '>', state => state + // oper following =>, definitely oper + .ifHasNext(1, ([c]) => OPER_CHARS.includes(c), s => consumeOperator(s.setType(TokenType.OPER))) + // empty or non-oper following =>, definitely symbol + .else(s => s)) + // oper following =, definitely oper + .elseIf(1, ([c]) => OPER_CHARS.includes(c), state => consumeOperator(state.setType(TokenType.OPER))) + // empty or non-oper following =, definitely symbol + .else(state => state); + } + // all of our other symbols today are only one character long, so we're already good (for now) + return pending; +} + +function consumeOperator(pending: LexerState): LexerState { + // < and > have special behavior in the parser, so we tokenize them individually + if (pending.image === '<' || pending.image === '>') return pending; + // continue to consume oper chars until there aren't anymore + return pending.ifHasNext(1, ([c]) => OPER_CHARS.includes(c), consumeOperator) + .else(state => state); +} + +function consumeWhitespace(pending: LexerState): LexerState { + return pending.ifHasNext(1, ([c]) => ' \t'.includes(c), consumeWhitespace) + .else(state => state); +} + +// #endregion + +// #region Helpers + +function kind(char: string) { + if (char >= 'a' && char <= 'z') return 'lower'; + else if (char >= 'A' && char <= 'Z') return 'upper'; + else if (char >= '0' && char <= '9') return 'num'; + else return char; +} + +function isHex(c: string) { + if (!c) return false; + const low = c.toLowerCase(); + return (c >= '0' && c <= '9') || (low >= 'a' && low <= 'f'); +} + +function parseHex(image: string) { + return parseInt(image, 16); +} + +function parseBin(image: string) { + return parseInt(image.replace(/0b/i, ''), 2); +} + +// #endregion diff --git a/src/parser/lexer/token.ts b/src/parser/lexer/token.ts new file mode 100644 index 0000000..c193bfe --- /dev/null +++ b/src/parser/lexer/token.ts @@ -0,0 +1,64 @@ +import { FilePosition, FileRange } from '~/core'; + + +/** + * Categorizes tokens by syntactic type + */ +export enum TokenType { + NONE = 1, // default + COMMENT, // characters ignored from code + IDENT, // identifier + RESERVED, // reserved word + INTEGER_LITERAL, // integer number literals + FLOAT_LITERAL, // floating-point number literals + STRING_LITERAL, // character string literals + CHARACTER_LITERAL, // single character literals + OPER, // operators + SYMBOL, // any special syntactic symbols + WHITESPACE, // any non-new-line whitespace (spaces, tabs, etc.) + NEWLINE, // any character sequence that produces a new line (inluding ;) + SEMI, // semicolon, special delimiter that behaves as a new line + EOF // special end-of-file token +} + +/** + * Represents a single token extracted from the source string. + * 'type' specifies what kind of terminal the token represents, and is used by the parser. + * 'location' is the text range in the source file where the token is located + * 'image' is an exact copy of the token from the original source string. + * 'value' is an optional value that represents the parsed value of the token, if it makes sense for the token type (numbers, strings, etc.). + */ +export interface Token { + readonly type: TokenType; + readonly location: FileRange; + readonly image: string; + readonly value?: any; + toString(): string; +} + +/** Creates a new token */ +export function Token(type: TokenType, position: FilePosition, image: string, value?: any): Token { + return Token.create(type, position, image, value); +} + +export namespace Token { + /** Creates a new token */ + export function create(type: TokenType, position: FilePosition, image: string, value?: any): Token { + return { + type, image, value, location: position.computeRange(image), + toString, + }; + } + + /** + * Creates a new token that has no type, useful for creating tokens after parsing is done + * and types don't matter anymore. + */ + export function fromLocation(position: FilePosition, image: string) { + return create(TokenType.NONE, position, image); + } + + function toString(this: Token) { + return this.image; + } +} diff --git a/src/require-hook.js b/src/require-hook.js new file mode 100644 index 0000000..ce0c432 --- /dev/null +++ b/src/require-hook.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const Module = require("module"); +const req = Module.prototype.require; +/** + * We make use of typescript's "paths" option to allow us to not + * have to specify a bunch of ".." in module import paths. + * Having a simple path such as "~" as the root of the src/ directory + * looks much cleaner. However, typescript doesn't translate these + * imports to the proper paths, so we still need a mechanism to + * translate the imports at runtime. + * + * This file should be imported for any code that will run a module + * in this package. The src/index.ts file already imports it, + * and the test command is configured to do so as well. + */ +Module.prototype.require = function (path, ...args) { + let resolved = path; + if (path.startsWith('~/')) { + resolved = path.replace(/^~/, __dirname); + } + else if (path.startsWith('~test/')) { + resolved = path.replace(/^~test/, `${__dirname}/../test`); + } + return req.call(this, resolved, ...args); +}; diff --git a/src/runner.ts b/src/runner.ts new file mode 100644 index 0000000..93adbf4 --- /dev/null +++ b/src/runner.ts @@ -0,0 +1,57 @@ +import { mapSet } from '~/utils/utils'; +import { parseModule } from '~/parser'; +import { Diagnostic, DiagnosticLevel } from '~/core'; + + +/** + * Runs the program at the given (absolute) path with the provided arguments. + */ +export function runProgram(path: string, args: string[]) { + let program: Program = { + modules: new Map(), + diagnostics: [], + addModule, + }; + // add the main module (this will be a recursive operation for any imported modules) + program = program.addModule(path); + // we will eventually provide a verbosity option, but for now just set it to Message + const diags = program.diagnostics.filter(d => d.level >= DiagnosticLevel.Message); + const errCount = diags.count(d => d.level >= DiagnosticLevel.Error); + const warnCount = diags.count(d => d.level === DiagnosticLevel.Warning); + if (errCount > 0) { + // there were errors, print all messages and exit + process.stderr.write(`Errors: ${errCount}, Warnings: ${warnCount}\n\n`); + process.stderr.write(diags.map(d => `${d}\n`).join()); + process.stderr.write('\nCompilation failed\n'); + return 1; + } else if (diags.length) { + // otherwise, just print all messages and continue + process.stderr.write(`Warnings: ${warnCount}\n\n`) + process.stderr.write(diags.map(d => `${d}\n`).join()); + const suffix = warnCount > 0 ? ' with warnings' : ''; + process.stderr.write(`\nCompilation succeeded${suffix}\n\n`); + } + // compiled successfully, run the main module + return runModule(program, path, args); +} + +export function addModule(this: Program, path: string): Program { + const { module, diagnostics } = parseModule(path); + return { + ...this, + modules: mapSet(this.modules, path, module), + diagnostics: [...this.diagnostics, ...diagnostics], + } +} + +export function runModule(program: Program, path: string, args: string[]): number { + const module = program.modules.get(path); +} + +export interface Program { + readonly modules: ReadonlyMap; + readonly diagnostics: ReadonlyArray; + readonly addModule: typeof addModule; +} + +export interface Module {} diff --git a/src/utils/OrderedMap.ts b/src/utils/OrderedMap.ts new file mode 100644 index 0000000..9bfdb1c --- /dev/null +++ b/src/utils/OrderedMap.ts @@ -0,0 +1,59 @@ +export default class OrderedMap { + fieldOrder: string[]; + private _values: { [key: string]: V }; + + constructor() { + this.fieldOrder = []; + this._values = {}; + } + + add(key: string, value: V) { + this.fieldOrder.push(key); + this._values[key] = value; + } + + getKey(i: number) { + return this.fieldOrder[i]; + } + + get(key: string) { + return this._values[key]; + } + + getValue(i: number) { + return this.get(this.getKey(i)); + } + + get length() { + return this.fieldOrder.length; + } + + keys() { + return this.fieldOrder; + } + + values() { + return [...this]; + } + + *[Symbol.iterator]() { + for (const key of this.fieldOrder) { + yield this._values[key]; + } + } + + some(predicate: (item: V) => bool) { + for (const i of this) { + if (predicate(i)) return true; + } + return false; + } + + map(mapper: (item: V, key?: string) => T) { + const map = new OrderedMap(); + for (const key of this.fieldOrder) { + map.add(key, mapper(this._values[key], key)); + } + return map; + } +} diff --git a/src/utils/Scope.ts b/src/utils/Scope.ts new file mode 100644 index 0000000..12d90d8 --- /dev/null +++ b/src/utils/Scope.ts @@ -0,0 +1,36 @@ +/** + * Data structure made to represent scoped names. + * Internally, this is a stack of maps. + * When getting a value, the stack is searched from + * the top down, and the first scope that contains the + * value is used. If no scope contains the value, + * the top one is used. + */ +export default class Scope { + private scopes: { [name: string]: T }[] = [{}]; + + private getScope(name: string): { [name: string]: T } { + for (let i = this.scopes.length - 1; i >= 0; --i) { + if (name in this.scopes[i]) return this.scopes[i]; + } + // if the name exists at no level, return the top one + return this.scopes[this.scopes.length - 1]; + } + + get(name: string): T { + return this.getScope(name)[name]; + } + + set(name: string, value: T) { + this.getScope(name)[name] = value; + } + + push() { + this.scopes.push({}); + } + + pop() { + if (this.scopes.length === 1) throw new Error('Cannot pop the last entry of a scope'); + this.scopes.pop(); + } +} diff --git a/src/utils/box-error.ts b/src/utils/box-error.ts new file mode 100644 index 0000000..ff56a17 --- /dev/null +++ b/src/utils/box-error.ts @@ -0,0 +1,11 @@ +/** + * This type of error is used to hold onto a value that is the output + * of an operation that reaches a fail condition. + * This should be used only as an container for a value used to escape + * from a process with an output value. + */ +export default class BoxError extends Error { + constructor(public readonly value: T) { + super(); + } +} diff --git a/src/utils/lazy-list.ts b/src/utils/lazy-list.ts new file mode 100644 index 0000000..da64798 --- /dev/null +++ b/src/utils/lazy-list.ts @@ -0,0 +1,226 @@ +import Lazy, { lazy } from '~/utils/lazy'; + + +/** + * A LazyList is a linked list composed of a head, which is a single item, and a lazily-evaluated + * tail. Because the tail is lazily evaluated, LazyLists can be infinite. Note that the 'tail' property is + * lazy, but it is not a Lazy value, it implements its own laziness mechanism. Additionally, a tail can either + * be another LazyList or a special empty LazyList that has no head and no tail. It represents the end + * of the list. + */ +export type LazyList = NonEmptyLazyList | EmptyLazyList; + +interface LazyListOperations extends Iterable { + // #region functional operations + + /** + * Transforms this list by applying a mapper function to each item + */ + map(mapper: (item: T) => R): LazyList; + + /** + * Applies a mapper function to each item, concatenating each yielded result into one list + */ + flatMap(mapper: (item: T) => Iterable): LazyList; + + /** + * Creates a new list containing only items that return true for the predicate + */ + filter(predicate: (item: T) => boolean): LazyList; + + /** + * Starting with an initial value, combine all items in this list + * into the value using a reducer function + */ + reduce(reducer: (value: R, item: T) => R, init: R): R; + + /** + * Same as reduce(), but the initial value used is the first item of the list + */ + reduceSelf(reducer: (value: T, item: T) => T): T; + + /** + * Appends a list to the end of this one + */ + concat(list: LazyList): LazyList; + + /** + * Prepends an item to this list, returning another list. + * NOTE: This is a constant-time operation + */ + prepend(item: T): LazyList; + + /** + * Copies each item of this list to a new one, stopping for the first item + * that returns false for the specified predicate + */ + takeWhile(predicate?: (item: T) => boolean): LazyList; + + // #endregion + + // #region shift operations + + /** + * Returns the specified number of items from the beginning + * of this list, as well as the tail following the last item + */ + shift(count: number): { values: T[], tail: LazyList }; + + /** + * Returns only the specified number of items from the beginning + * of this list. + */ + peek(count: number): T[]; + + // #endregion +} + +export interface NonEmptyLazyList extends LazyListOperations { + readonly empty: false; + readonly head: T; + readonly tail: LazyList; +} + +export interface EmptyLazyList extends LazyListOperations { + readonly empty: true; +} + +class LazyListImpl implements NonEmptyLazyList { + public readonly empty = false; + private readonly _tail: Lazy>; + + constructor(public readonly head: T, getTail: () => LazyList) { + this._tail = lazy(getTail); + } + + public get tail(): LazyList { + return this._tail.value; + } + + *[Symbol.iterator](): Iterator { + let list: LazyList = this; + while (!list.empty) { + yield list.head; + list = list.tail; + } + } + + public map(mapper: (item: T) => R): LazyList { + return new LazyListImpl(mapper(this.head), () => this.tail.map(mapper)); + } + + public flatMap(mapper: (item: T) => Iterable): LazyList { + const list = fromIterable(mapper(this.head)); + return list.empty + ? this.tail.flatMap(mapper) + : new LazyListImpl(list.head, () => list.tail.concat(this.tail.flatMap(mapper))); + } + + public filter(predicate: (item: T) => boolean): LazyList { + if (predicate(this.head)) return new LazyListImpl(this.head, () => this.tail.filter(predicate)); + return this.tail.filter(predicate); + } + + public reduce(reducer: (value: R, item: T) => R, init: R): R { + return this.tail.reduce(reducer, reducer(init, this.head)); + } + + public reduceSelf(reducer: (value: T, item: T) => T): T { + return this.tail.reduce(reducer, this.head); + } + + public concat(list: LazyList): LazyList { + return new LazyListImpl(this.head, () => this.tail.concat(list)); + } + + public prepend(item: T): LazyList { + return new LazyListImpl(item, () => this); + } + + public takeWhile(predicate?: (item: T) => boolean): LazyList { + if (!predicate) return this.takeWhile(i => !!i); + if (predicate(this.head)) return new LazyListImpl(this.head, () => this.tail.takeWhile(predicate)); + return new EmptyLazyListImpl(); + } + + public shift(count: number): { values: T[], tail: LazyList } { + if (count === 0) return { values: [], tail: this }; + const { values, tail } = this.tail.shift(count - 1); + return { values: [this.head, ...values], tail }; + } + + public peek(count: number) { + if (count === 0) return []; + return [this.head, ...this.tail.peek(count - 1)]; + } +} + +class EmptyLazyListImpl implements EmptyLazyList { + public readonly empty = true; + + [Symbol.iterator](): Iterator { + return { next() { return { done: true, value: {} as T } } }; + } + + public map(_mapper: (item: T) => R): LazyList { return new EmptyLazyListImpl(); } + public flatMap(_mapper: (item: T) => Iterable): LazyList { return new EmptyLazyListImpl(); } + public filter(_predicate: (item: T) => boolean): LazyList { return new EmptyLazyListImpl(); } + public reduce(_reducer: (value: R, item: T) => R, init: R): R { return init; } + public reduceSelf(_reducer: (value: T, item: T) => T): never { + throw new Error('Cannot call reduceSelf() on an empty list. Try reduce() instead'); + } + public concat(list: LazyList): LazyList { return list; } + public prepend(item: T): LazyList { return new LazyListImpl(item, () => this); } + public takeWhile(_predicate?: (item: T) => boolean): LazyList { return new EmptyLazyListImpl(); } + public shift(_count: number): { values: T[], tail: LazyList } { + return { values: [], tail: this }; + } + public peek(_count: number) { return []; } +} + +/** + * Creates a lazy list of the provided type + */ +export function create(head: T, getTail: () => LazyList): LazyList { + return new LazyListImpl(head, getTail); +} + +/** + * Creates a lazy list containing a single item + */ +export function single(item: T): LazyList { + return new LazyListImpl(item, empty); +} + +/** + * Creates an empty lazy list of the provided type. + */ +export function empty(): LazyList { + return new EmptyLazyListImpl(); +} + +/** + * Creates an infinitely incrementing lazy list starting + * from the specified number (or 0 by default). + */ +export function infList(start = 0): LazyList { + return new LazyListImpl(start, () => infList(start + 1)); +} + +/** + * Creates a lazy list from an iterable, lazily evaluating the + * iterable's iterator for each request of the tail. + */ +export function fromIterable(iterable: Iterable): LazyList { + return fromIterator(iterable[Symbol.iterator]()); +} + +/** + * Creates a lazy list from an iterator, lazily evaluating the + * iterator from its current location for each request of the tail. + */ +export function fromIterator(iterator: Iterator): LazyList { + const next = iterator.next(); + if (next.done) return new EmptyLazyListImpl(); + return new LazyListImpl(next.value, () => fromIterator(iterator)); +} diff --git a/src/utils/lazy.ts b/src/utils/lazy.ts new file mode 100644 index 0000000..a270ff6 --- /dev/null +++ b/src/utils/lazy.ts @@ -0,0 +1,43 @@ +/** + * This is a simple interface for lazy evaluation in TS. + * + * The following is the specification of a lazy value: + * - A lazy value should not be evaluated until it is required + * - A lazy value should be evaluated at most once + * - A lazy value should never change + * + * A simple implementation of lazy logic is provided by the lazy() + * function in this module. This is sufficient in almost all + * cases, but Lazy values can be created by other means as well. + * Just as long as the value implements this interface and + * conforms to the above specification, it is a Lazy value. + */ +export default interface Lazy { + readonly value: T; +} + +interface SimpleLazy extends Lazy { + readonly _evaluator: () => T; + _value?: T; +} + +/** + * This creates a simple lazy value which uses delayed evaluation + * and memoization to implement laziness. The value is taken from + * the return value of the provided evaluator function. + * The evaluator should NOT have any side effects. + * + * NOTE: This memoization technique is not referentially transparent + * because it saves the evaluated result internally, but this is required + * because JavaScript doesn't have a first-class memoization + * mechanism. + */ +export function lazy(evaluator: () => T): Lazy { + const obj: SimpleLazy = { + _evaluator: evaluator, + get value(): T { + return !('_value' in this) ? (this._value = this._evaluator()) : this._value as T; + }, + }; + return obj; +} diff --git a/src/utils/preVisit.ts b/src/utils/preVisit.ts new file mode 100644 index 0000000..2dc996b --- /dev/null +++ b/src/utils/preVisit.ts @@ -0,0 +1,25 @@ +interface IPreVisitable { + preVisit(func: () => T, visitee: V): T; +} + +/** + * A decorator to specify that a "preVisit()" method should be called before + * invoking a class's visitor methods. This method must conform to a specific + * signature, taking a bound visitor method that can be called to invoke + * the original visitor method, and the visitee used to invoke the visitor. + * The preVisit() method can perform operations or set context, and it can + * optionally not invoke the original visitor, but it must return a value + * of the visitor's type. + */ +export default function preVisit>() { + return function(cls: Class): void { + const visitors: Function[] = Object.values(cls.prototype) + .filter(m => typeof m === 'function' && m.name.startsWith('visit')); + for (const visitor of visitors) { + Reflect.set(cls.prototype, visitor.name, function newVisitor(this: Visitor, visitee: Visitee) { + const bound = visitor.bind(this, visitee); + return this.preVisit(bound, visitee); + }); + } + } +} diff --git a/src/utils/utils.ts b/src/utils/utils.ts new file mode 100644 index 0000000..a5294e4 --- /dev/null +++ b/src/utils/utils.ts @@ -0,0 +1,38 @@ +/** + * Creates an array of incrementing numbers, starting at 0, of the specified length + */ +export function range(length: number) { + return [...Array(length)].map((_, i) => i); +} + +/** + * Function to immutably set a key in a ReadonlyMap without exposing Map. + */ +export function mapSet(map: ReadonlyMap, key: K, value: V): ReadonlyMap { + const clone = new Map(map); + clone.set(key, value); + return clone; +} + +/** + * Creates an instance of the specified class with the specified properties. + * This can be used to bypass the class's constructor. + * This should ONLY be used internally as an alternative to JS's lack + * of constructor overloading. + */ +export function createInstance(cls: Class, props: Partial = {}) { + const obj = Object.create(cls.prototype); + return Object.assign(obj, props); +} + +/** + * Clones an instance of a class, optionally overriding properties + * with the specified properties. + */ +export function cloneInstance(obj: T, props: Partial = {}) { + const clone = Object.create(Object.getPrototypeOf(obj)); + for (const key of Object.keys(obj)) { + clone[key] = obj[key]; + } + return Object.assign(clone, props); +} From c475736930f849d579d2e64c531363ee0aeabaa8 Mon Sep 17 00:00:00 2001 From: Jake Chitel Date: Fri, 23 Feb 2018 18:32:27 -0600 Subject: [PATCH 02/15] removing capital P parser to prevent weirdness --- src/parser/Parser.ts | 596 ------------------------------------------- 1 file changed, 596 deletions(-) delete mode 100644 src/parser/Parser.ts diff --git a/src/parser/Parser.ts b/src/parser/Parser.ts deleted file mode 100644 index b6a8218..0000000 --- a/src/parser/Parser.ts +++ /dev/null @@ -1,596 +0,0 @@ -import ASTNode from '~/syntax/ASTNode'; -import LazyList from './LazyList'; -import Tokenizer, { Token, TokenType } from './Tokenizer'; -import ParserError from './ParserError'; -import { ParserMessageKey, getMessage } from './ParserMessages'; -import { ParseExpression, ParseOptions, ParseSequence, ParseExpressionInstance } from './ParseExpression'; - - -/** - * Shorthand function for creating a new parse expression with options. - * - * NOTE: this will return a ParseExpressionInstance, which is not exposed to - * logic external to the parser. - */ -export function exp(expression: ParseExpression, opts: ParseOptions = {}): ParseExpressionInstance { - return new ParseExpressionInstance(expression, opts); -} - -/** - * When a nested parse is used, this is the object that is returned. - */ -export type ParseResult = { - [key: string]: ArrayOrSingle; -} - -/** Parse results that contain sub-nodes */ -type ParseParentNode = ASTNode | ParseResult; -/** All Parse results */ -export type ParseNode = ParseParentNode | Token; - -/** - * A Parser can perform a high-level parse operation. - * Its intended usage is to simply create it with a source code string - * and parse a specific ASTNode subclass, as shown here: - * ```ts - * class Program extends ASTNode { ... } - * const source = 'my program\'s code'; - * const parsed = new Parser(source).parse(Program); - * ``` - * - * ## Ren Parser - * - * ### Grammar Definition Overview - * - * The grammar logic is defined in each ASTNode subclass using a set of decorators and functions: - * - @parser(expression, options?): defines a method of the class as the target for the parse - * result of a specific expression of the sequence defined by the class. More info below. - * - @nonTerminal(options): defines additional attributes onto an ASTNode class, primarily - * related to non-terminal inheritance. More info below. - * - exp(expression, options?): creates a parse expression exactly the same way as @parser, - * but for a value not tied to an ASTNode class. This is usefulf for defining ParseSequences. - * - * ### Context Free Grammars - * - * This parser logic is based on core concepts of Context Free Grammars, with additional sugar - * that makes it much easier to describe a grammar in code with little overhead. - * - * A Context Free Grammar (CFG) is composed of a list of "expansions", where an expansion is - * of the form: - * ``` - * Non-Terminal ::= - * ``` - * - * #### Non-Terminals (Syntax Tree Nodes) - * - * A non-terminal is a symbol that "expands" to one or more sequences of terminal symbols - * or other non-terminal symbols. The tree-like structure of a program comes from - * these non-terminals. Each non-terminal is a node in a "syntax tree", and the expanded - * sequence is its children. The term "AST node" can also be used to mean "non-terminal", - * and in fact the ASTNode class is used to represent non-terminals in Ren's parser. - * - * Examples of non-terminals: - * - programs - * - declarations - * - types - * - expressions - * - statements - * - * #### Terminals (Tokens) - * - * A terminal is called so because it cannot be expanded, it is a concrete unit of code. - * It is called so because expansion "terminates" at terminals; likewise, non-terminals - * are called so because they do not "terminate" expansion. Terminals are the leaf nodes - * of the syntax tree. If you take all of the leaf nodes of the syntax tree from left to - * right and arrange them in a flat list, you've reproduced the original source code. - * The word "token" can also be used to mean "terminal", and in fact the Token class is - * used to represent terminals in Ren's parser. - * - * Examples of terminals: - * - identifiers (abc, _myVar123, etc.) - * - literals (1, 1.5, "hello world", etc.) - * - symbols (+, -, etc.) - * - * ### Grammar Definition - * - * In this parser, expansions can be thought of as the "definition" of non-terminals. - * The primary way to define a non-terminal is by creating a sub-class of the ASTNode - * class, which is the abstract parent class of all formal non-terminals in Ren. - * These classes can contain any fields required to logically represent the node's - * purpose in the program. To define an expansion on the class, you add methods to the class - * annotated with the @parser decorator. The first parameter of this decorator is the - * "parse expression" represented by this element of the expansion. There are five types - * of parse expressions: - * - token types - * - token images - * - non-terminal class references - * - in-place sequences - * - choices - * See below for more info on Parse Expressions. - * - * The second parameter of @parser is an optional object of options for the expression. - * These options are: - * - definite: if true, this expression is the expansion's "decision point", meaning - * that if the parser is parsing this non-terminal as optional or a choice, this - * expression is the point where the parser should start to fail if it hits any - * further failures. This will become more clear with the Soft Mode section below. - * - optional: if true, this expression should be skipped if it fails - * - repeat: either '+' or '*', if specified this expression will be repeatedly parsed - * into a list until the first failure. '+' means there must be at least one, and '*' - * means there can be zero or more. The 'sep' option can be used with this to specify - * an expression that should appear between each repetition as a "separator". The - * 'sepOptions' option can be used to specify the separator as 'definite'. - * - err: this can be a ParserMessageKey, and when specified, the corresponding message - * will be used to assemble an error in the instance of this expression's failed parse - * (assuming the parser is currently in hard mode). - * - * The target of @parser must be a method that takes one of five parameters: - * - a Token: if the expression is a TokenType or a token image - * - an ASTNode subclass instance: if the expression is an ASTNode subclass - * - a ParseResult: if the expression is an in-place sequence - * - a union of any of the above: if the expression is a choice - * - a list of any of the above: if the expression had a 'repeat' option - * (the list of separators will also be passed as a second parameter if 'sep' was specified) - * - * Upon successful parse of the expression, this method is invoked with the result. - * It is **not** invoked if the expression is optional and fails to parse. - * - * This method is intended to take the parse result and perform any additional processing - * or transformation to map it to the class's semantic properties. - * - * NOTE: the methods should be put into the correct order of the expansion. The parser - * is dependent on this. - * - * ### Parse Expressions - * - * #### Terminal Expressions - * - * Token types and images are ways to specify terminals in an expansion. There is a - * specific set of token types that you can use (see `enum TokenType` in ~/parser/Tokenizer), - * and you can also specify a specific token image (the source string of the token) - * to parse. Specifying one of these tells the parser to get the next token and compare - * its type or image to parse it. - * - * #### Non-Terminal Expressions - * - * The remaining three expression types are ways to specify a non-terminal in an expansion. - * You can provide another ASTNode class reference, specifying that the parser should - * attempt to expand that non-terminal once it reaches that point. You can also specify - * an in-place ParseSequence, which is another way to define a non-terminal (described below). - * And you can specify a list of other expressions as choices, which the parser will iterate through - * until it reaches its first successful option, returning that one. - * - * ##### Parse Sequences - * - * Sometimes, it can be cumbersome to define non-terminal classes when those non-terminals - * might only be used for the parsing process and discarded afterward. Examples of this are - * definitions with complex syntax that require several nested non-terminals. - * - * In these instances, you can create in-place sequences, which are parsed the same way as - * non-terminal classes, but the result is a simple JS object that is ultimately passed to - * an actual non-terminal method. Sequences are defined as JS objects, where the keys - * will be the keys of the parsed result, and the values are parse expressions. These - * expressions are defined in the exact same way as @parser(), but using the exp() function - * instead. If the expression has no options, the call to exp() can be omitted. - * Just like with @parser() methods, these keys must be ordered correctly for the parser - * to work properly. - * - * Parse sequences can be nested, allowing you to define very complex grammars in a concise - * manner. You can save sequences to variables so they can be reused in multiple places - * as well. It is not recommended to use parse sequences outside of the parsing logic, - * but there's nothing technically stopping you. - * - * ### Non-terminal inheritance - * - * When defining a complex grammar for a typical programming language, certain non-terminals - * tend to emerge as "abstract" nodes. Examples of these are: - * - Declaration (functions, types, etc.) - * - Type (int, class, etc.) - * - Expression (1+1, myFunc(), etc.) - * - Statement (if (x) y else z, try {} catch () {}, etc.) - * - * These can be represented using choices, but they typically end up mapping to an object-oriented - * pattern of inheritance, in which there is an abstract parent class that doesn't know - * what its subclasses are, and each of the subclasses knows what its superclass is. - * This is the opposite of choice expressions, where instead the parent knows all of its - * children, and the children don't know about the parent. - * - * For these cases, where there is a single abstract parent extended by several different - * types of children, we have a concept of "non-terminal inheritance". In this model, - * each subclass is "registered" as one choice of an abstract non-terminal. When the abstract - * non-terminal is parsed, the list of subclasses is used as the list of parse choices. - * - * To specify a non-terminal as abstract or as an implementor of an abstract non-terminal, - * use the @nonTerminal() decorator. These are the available options: - * - abstract: if true, this class is marked as an abstract non-terminal. The class itself - * should also be abstract because it will never be instantiated, but it doesn't technically - * have to be. - * - implements: should be a reference to an abstract class, this defines an inheritance relationship. - * The class itself should actually be a subclass of the abstract class, but it doesn't - * technically have to be. - * - leftRecursive: if the subclass is left-recursive (its first expression is the same as its - * parent class), it must be defined as left-recursive to avoid infinite recursion in the parser. - * - before: in the instance that one or more children of the same class contain the exact - * expansion of another child in their own expansion, there is a conflict. in these instances, - * the non-terminal with the longer expansion must come first. This option can be a list - * of non-terminal classes that this class must come before. If any of the items in the list - * are already present when this is registered, it will be inserted before all of them. - */ -export default class Parser { - public soft: boolean; - public tokenizer: LazyList; - - constructor(source: string) { - // start with 'soft' as false because Program is implicitly definite - this.soft = false; - this.tokenizer = new LazyList(new Tokenizer(source)); - } - - /** - * Top-level parser method. Call this with an AST node class to perform a parse of that class. - * NOTE: this should **not** be called from within the parser's internals. - */ - parse(cls: Class): T { - try { - return this.parseNonTerminal(cls); - } catch (err) { - if (err instanceof SoftParserError) { - throw new Error('an error occurred in the parser, a soft error was not converted to a hard error'); - } - throw err; - } - } - - /** - * Parse a token (terminal) by type - */ - parseTokenType(type: TokenType) { - const tok = this.getNextToken(); - if (tok.type !== type) throw new SoftParserError(tok); - return tok; - } - - /** - * Parse a token (terminal) by image - */ - parseTokenImage(image: string) { - const tok = this.getNextToken(); - if (tok.image !== image) throw new SoftParserError(tok); - return tok; - } - - /** - * Parse an instance of a non-terminal class. - * NOTE: the 'cls' variable must be an ASTNode subclass, - * but because some of these can be abstract classes, - * it is not possible to represent these in TypeScript. - */ - parseNonTerminal(cls: Class): T { - if (Reflect.has(cls, 'abstract') && Reflect.get(cls, 'abstract')) { - // abstract node, parse all implementing classes as choices - const cfg: decorators.AbstractNonTerminalConfig = Reflect.get(cls, 'abstract'); - if (cfg.suffixes.length === 0) return this.parseChoices(cfg.choices) as T; - return this.parseLeftRecursive(cfg) as T; - } - // normal node, parse as sequence - const entries = this.getEntries(cls); - // invoke the actual constructor so that we get initializers - return this.parseSequenceInternal(Reflect.construct(cls, []) as T, entries); - } - - /** - * Parse a sequence of parse expressions - */ - parseSequence(seq: ParseSequence): ParseResult { - const entries = Object.keys(seq).map(k => ({ - methodName: k, - exp: new ParseExpressionInstance(seq[k]) - })); - return this.parseSequenceInternal({} as ParseResult, entries); - } - - /** - * Parse a list of choices - */ - parseChoices(choices: ParseExpressionInstance[]) { - for (const choice of choices) { - try { - return this.forkAndParse(true, choice); - } catch {} // with choices, we ignore **all** failures and just skip it - } - // no match, soft failure - throw new SoftParserError(); - } - - /** - * Parse a list of left-recursive choices - */ - private parseLeftRecursive(cfg: decorators.AbstractNonTerminalConfig) { - let base = this.parseChoices(cfg.choices) as ASTNode; - retry: while (true) { - for (const suff of cfg.suffixes) { - try { - // suffixes must be non-terminal classes - const cls = suff.exp.nonTerminal!; - const suffNode = Reflect.construct(cls, []); - this.invokeSetter(suffNode, suff.baseName, false, base); - // do the parse - const subParser = this.withSoft(true); - subParser.parseSequenceInternal(suffNode, this.getEntries(cls)); - this.tokenizer = subParser.tokenizer; - base = suffNode; - continue retry; - } catch {} // with suffixes, we ignore **all** failures and just skip it - } - break; - } - return base; - } - - private parseSequenceInternal(inst: T, entries: decorators.ParseEntry[]): T { - // make sure there is a definite flag on at least one entry - checkForDefinite(entries); - for (const { methodName, exp } of entries) { - // parse the expression - const result = this.parseExpressionInternal(exp); - if (Array.isArray(result)) this.invokeSetter(inst, methodName, exp.flatten, ...result); - else if (result) this.invokeSetter(inst, methodName, exp.flatten, result); - } - return inst; - } - - /** - * This is the core parse logic. All expression modifiers are processed here. - */ - private parseExpressionInternal(exp: ParseExpressionInstance) { - if (exp.repeat) { - // repetitions are handled a bit differently - return this.parseRepetition(exp); - } - try { - return this.forkAndParse(exp.optional || this.soft, exp); - } catch (err) { - // forward hard errors - if (!(err instanceof SoftParserError)) throw err; - // if it was optional, ignore the failure and move on, otherwise fail - if (!exp.optional) this.processParseFailure(exp.err, err.token); - } - // We only get here if the expression was optional and not parsed, so undefined will be returned - } - - /** - * Repetition mode - */ - private parseRepetition(exp: ParseExpressionInstance): [ParseNode[], Token[]] { - const items: ParseNode[] = [], seps: Token[] = []; - let wasSeparator = false; // flipped to true after every separator - let handleOneOrMore = exp.repeat === '+'; // flipped to false after the first item - // enter repetition loop - while (true) { - // the first of a "+" repetition, and the node after a separator, are both required - const required = wasSeparator || handleOneOrMore; - try { - // accept node - items.push(this.forkAndParse(this.soft || !required, exp)); - // after the first iteration this should always be false - handleOneOrMore = false; - } catch (err) { - // hard error, forward the error - if (!(err instanceof SoftParserError)) throw err; - // node was required here, throw as a hard failure - if (required) this.processParseFailure(exp.err, err.token); - // this is fine, we just finish repetition - return [items, seps]; - } - if (exp.sep) { - try { - // handle separator if there is one - seps.push(this.forkAndParse(true, exp.sep) as Token); - wasSeparator = true; - } catch (err) { - if (!(err instanceof SoftParserError)) throw err; - // this is fine, we just finish repetition - return [items, seps]; - } - } - } - } - - /** - * To maintain proper immutable state, we create "sub-parsers" - * that clone the soft flag and the tokenizer. - * In the event of a failure, an exception will be thrown. - * Otherwise, we copy the successful tokenizer and set soft to true - * if there was a success - */ - private forkAndParse(soft: boolean, exp: ParseExpressionInstance): ParseNode { - const subParser = this.withSoft(exp.optional || soft); - const result = exp.parse(subParser); - // if we got here, node was accepted successfully, copy the tokenizer to the parent parser - this.tokenizer = subParser.tokenizer; - if (exp.definite) this.soft = false; - return result; - } - - /** - * Gets the next token from the tokenizer, saving the resulting tokenizer. - */ - private getNextToken() { - const [next, newTokenizer] = this.tokenizer.shift(); - this.tokenizer = newTokenizer; - return next; - } - - /** - * In the event of a successful parse of a sequence's child, we need to add the - * result to the container object. - */ - private invokeSetter(inst: ParseParentNode, name: string, flatten: boolean | undefined, ...values: any[]) { - if (inst instanceof ASTNode) { - // non-terminal instance - const setter = Reflect.get(inst, name) as (...values: any[]) => void; - setter.call(inst, ...values); - } else { - // nested parse, this is just a simple object - if (values.length > 1) { - // contains separators, apply each as a separate property, ignore flatten - const items = values[0], seps = values[1]; - Object.assign(inst, { [name]: items, [`${name}_sep`]: seps }); - } else { - // single value, flatten or assign - const value = values[0]; - Object.assign(inst, flatten ? value : { [name]: value }); - } - } - } - - /** - * Copies this instance's fields onto a new instance of copy, changing the value of 'soft' to the one provided - */ - private withSoft(soft: boolean) { - return Object.assign(Object.create(Object.getPrototypeOf(this)), { - ...(this as Parser), - soft, - }); - } - - /** - * In the event of a parse failure, one of three things will happen: - * - we are parsing softly, in which case it is a soft failure and we should reset any peeked tokens and return false - * - we are parsing definitely but there is no message available, in which case we return false and let the parent take care of the message - * - we are parsing definitely, in which case we assemble an error and throw it - */ - private processParseFailure(key: ParserMessageKey | undefined, node: Token | undefined): never { - if (this.soft || !key) throw new SoftParserError(node); - const tok = node as Token; // TODO what about when this isn't a token? - throw new ParserError(getMessage(key, tok), tok.line, tok.column); - } - - private getEntries(cls: Class) { - if (!Reflect.has(cls, 'parser')) throw new Error('AST node class requires at least one @parser() decorator on a method.'); - return Reflect.get(cls, 'parser') as decorators.ParseEntry[]; - } -} - -/** - * A soft parser error is how we represent a failure to parse a value - * where that failure may not actually cause the parse to fail: - * - an optional element - * - a repeated element (as long as one isn't required at that location) - * - a separator in a repetition - * - a choice - * - a left-recursive suffix - */ -class SoftParserError { - token?: Token; - - constructor(token?: Token) { - this.token = token; - } -} - -/** - * This will be a common source of bugs, so we run a check for it for every sequential expansion - */ -function checkForDefinite(es: decorators.ParseEntry[]) { - if (es.some(e => !!(e.exp.definite || e.exp.sep && e.exp.sep.definite))) return; - throw new Error('No definite set on a sequential expansion'); -} - -namespace decorators { - /** - * When a ParserDef is stored on a class (using the 'parser' decorator) - * the name of the decorated method needs to be stored too. - */ - export interface ParseEntry { - methodName: string; - exp: ParseExpressionInstance; - } - - /** - * Decorator for a method that defines a parser definition for an AST class. - * This decorator is required to define parser logic. - */ - export function parser(e: ParseExpression, opts: ParseOptions = {}): MethodDecorator { - return function(cls: T, key: string) { - if (!Reflect.has(cls.constructor, 'parser')) Reflect.set(cls.constructor, 'parser', []); - const parser = Reflect.get(cls.constructor, 'parser') as ParseEntry[]; - parser.push({ methodName: key, exp: new ParseExpressionInstance(e, opts) }); - } - } - - /** - * This is a configuration for defining a non-terminal. - * This is not required for all non-terminals, only if non-terminal - * inheritance is going to be used - */ - interface NonTerminalDef { - abstract?: boolean, - implements?: Class, // ideally this would be Class but you can't pass abstract classes as classes - leftRecursive?: string, // this is the name of the recursive field - before?: Class[], // list of classes that this class should come before in the registration list - } - - export interface AbstractNonTerminalConfig { - choices: ParseExpressionInstance[]; - suffixes: { - baseName: string; - exp: ParseExpressionInstance; - }[]; - } - - /** - * Decorator for an AST class that defines a non-terminal definition on it. - * This is not required for all classes, just for defining inheritance relationships. - */ - export function nonTerminal(def: NonTerminalDef): ClassDecorator { - return function(cls: Class) { - if (def.abstract) { - const cfg: AbstractNonTerminalConfig = { choices: [], suffixes: [] }; - Reflect.defineProperty(cls, 'abstract', { value: cfg }); - } else { - // we need to do this because classes inherit the static properties of their parents - Reflect.defineProperty(cls, 'abstract', { value: false }); - } - if (def.implements) { - if (!Reflect.has(def.implements, 'abstract')) throw new Error('Non-terminal cannot implement non-abstract non-terminal'); - const cfg: AbstractNonTerminalConfig = Reflect.get(def.implements, 'abstract'); - if (def.leftRecursive) { - const suffix = { baseName: def.leftRecursive, exp: new ParseExpressionInstance(cls) }; - if (def.before) { - const indices = findIndices(cfg.suffixes, def.before, (s, c) => s.exp.hasNonTerminal(c)); - if (indices.length) { - cfg.suffixes.splice(Math.min(...indices), 0, suffix); - return; - } - } - cfg.suffixes.push(suffix); - } else { - const choice = new ParseExpressionInstance(cls); - if (def.before) { - const indices = findIndices(cfg.choices, def.before, (i, c) => i.hasNonTerminal(c)); - if (indices.length) { - cfg.choices.splice(Math.min(...indices), 0, choice); - return; - } - } - cfg.choices.push(choice); - } - } - } - } - - /** - * Searches a list for the indices of items in a "terms list" using a specified predicate function. - * The resulting list is the list of indices of items in 'termsList' that were present in 'listToSearch' - * according to the predicate. - */ - function findIndices(listToSearch: T[], termsList: T1[], predicate: (t: T, t1: T1) => boolean) { - return termsList - .map(t1 => listToSearch.findIndex(t => predicate(t, t1))) - .filter(i => i !== -1); - } -} - -export const parser = decorators.parser; -export const nonTerminal = decorators.nonTerminal; From adc2f91cff7b6c77ec8eb9353e0ec519a238c85b Mon Sep 17 00:00:00 2001 From: Jake Chitel Date: Wed, 28 Feb 2018 21:44:34 -0600 Subject: [PATCH 03/15] Refactored parser and syntax to be more idiomatic This involves quite a few changes: Parser: The parser has been completely redone from the ground up. To facilitate the new compiler restriction of pure functional programming, the parser logic makes use of function composition. The result is a dead-simple API. There are 5 types of parse expressions: tok (tokens), seq (sequences), select (selections), optional, and repeat. Each one has a corresponding function that can be used to build a parse function capable of parsing any type of node. The whole thing is now strongly typed. Sequences are strongly typed by way of overloads, and they are parsed as arrays instead of objects, with a transform function for converting the arrays to objects. The concept of 'soft' and 'definite' are now gone, because that was overkill. The parser will now greedily consume as much input as possible, and only throw an error when all options are exhausted. This means that order needs to be heavily enforced even more than before. The parser also makes use of the new pure lexer as well, so the entire process is purely functional. Additionally, much of the complex logic around the various types of repetitions and left recursions have been replaced with desugaring to simpler constructs, making the full set of logic simpler as a whole. "Abstract" node types are explained in the Syntax Environment section. Syntax: The syntax has now scrapped the use of classes, and now uses interfaces with discriminated unions. The base type of all syntax node types is now NodeBase, which defines only a 'location' property. All sub-interfaces are required to specify a 'syntaxType' property specifying a single specific SyntaxType enum value. This property is the discriminant, which we will make use of in the future. This whole structure is made possible by the extreme flexibility of the parser, which can now return any kind of value, not just class instances. The high-level node types (declarations, types, expressions, and statments) are now just unions of their corresponding node types, and there is one high-level Node type that is the union of all of them. The Program type is now called ModuleRoot, and ModuleRoot and all import and export types are now separated from other declarations, because they are part of their own domain. Syntax Environment: The new parser API is specified in-place, not using functions. This means that node types that reference each other circularly will not work out of the box. We need to make use of mechanisms such as scope hoisting and referencing undeclared variables within functions to make it work properly. The only problem with that is that these mechanisms do not work cross-module. To make this work, we have introduced the concept of a "syntax environment", which is a function that loads any circularly-referencing syntax types on-demand. All of the high-level node types have their parser specification declared within the syntax environment inside functions. All types that are dependent on these do not declare their syntax at the module root, instead declaring them in "register()" functions that declare their dependencies as parameters. The syntax environment's module imports all of these register functions and calls them within the environment function, where it has access to the high-level parse functions. This means that to have access to the parse functions of all syntax types, you need to call the SyntaxEnvironment function, which will return a fresh environment complete with circular references resolved. --- .gitignore | 2 + src/parser/LazyList.ts | 98 --- src/parser/ParseExpression.ts | 115 --- src/parser/ParserMessages.ts | 2 +- src/parser/Tokenizer.ts | 689 ------------------ src/parser/index.ts | 14 +- src/parser/lexer/token.ts | 22 +- src/parser/parser.ts | 186 +++++ src/runtime/Module.ts | 119 --- src/syntax/ASTNode.ts | 34 - src/syntax/ModuleRoot.ts | 32 + .../declarations/ConstantDeclaration.ts | 66 +- src/syntax/declarations/ExportDeclaration.ts | 155 ++-- .../declarations/ExportForwardDeclaration.ts | 164 ++--- .../declarations/FunctionDeclaration.ts | 190 +++-- src/syntax/declarations/ImportDeclaration.ts | 150 ++-- src/syntax/declarations/Program.ts | 38 - src/syntax/declarations/TypeDeclaration.ts | 149 ++-- src/syntax/declarations/index.ts | 13 +- src/syntax/environment.ts | 380 ++++++++++ src/syntax/expressions/ArrayAccess.ts | 56 +- src/syntax/expressions/ArrayLiteral.ts | 42 +- src/syntax/expressions/BinaryExpression.ts | 103 ++- src/syntax/expressions/BoolLiteral.ts | 33 +- src/syntax/expressions/CharLiteral.ts | 32 +- src/syntax/expressions/Expression.ts | 6 - src/syntax/expressions/FieldAccess.ts | 53 +- src/syntax/expressions/FloatLiteral.ts | 32 +- src/syntax/expressions/FunctionApplication.ts | 70 +- .../expressions/IdentifierExpression.ts | 32 +- src/syntax/expressions/IfElseExpression.ts | 62 +- src/syntax/expressions/IntegerLiteral.ts | 32 +- src/syntax/expressions/LambdaExpression.ts | 136 ++-- .../expressions/ParenthesizedExpression.ts | 43 +- src/syntax/expressions/StringLiteral.ts | 32 +- src/syntax/expressions/StructLiteral.ts | 61 +- src/syntax/expressions/TupleLiteral.ts | 42 +- src/syntax/expressions/UnaryExpression.ts | 86 ++- src/syntax/expressions/VarDeclaration.ts | 47 +- src/syntax/expressions/index.ts | 38 +- src/syntax/index.ts | 18 +- src/syntax/statements/Block.ts | 43 +- src/syntax/statements/BreakStatement.ts | 38 +- src/syntax/statements/ContinueStatement.ts | 38 +- src/syntax/statements/DoWhileStatement.ts | 57 +- src/syntax/statements/ExpressionStatement.ts | 30 +- src/syntax/statements/ForStatement.ts | 69 +- src/syntax/statements/ReturnStatement.ts | 38 +- src/syntax/statements/Statement.ts | 6 - src/syntax/statements/ThrowStatement.ts | 38 +- src/syntax/statements/TryCatchStatement.ts | 95 ++- src/syntax/statements/WhileStatement.ts | 53 +- src/syntax/statements/index.ts | 21 +- src/syntax/types/ArrayType.ts | 46 +- src/syntax/types/BuiltInType.ts | 59 +- src/syntax/types/FunctionType.ts | 54 +- src/syntax/types/IdentifierType.ts | 33 +- src/syntax/types/NamespaceAccessType.ts | 52 +- src/syntax/types/ParenthesizedType.ts | 45 +- src/syntax/types/SpecificType.ts | 61 +- src/syntax/types/StructType.ts | 58 +- src/syntax/types/TupleType.ts | 41 +- src/syntax/types/Type.ts | 6 - src/syntax/types/UnionType.ts | 50 +- src/syntax/types/index.ts | 21 +- 65 files changed, 1980 insertions(+), 2746 deletions(-) delete mode 100644 src/parser/LazyList.ts delete mode 100644 src/parser/ParseExpression.ts delete mode 100644 src/parser/Tokenizer.ts create mode 100644 src/parser/parser.ts delete mode 100644 src/runtime/Module.ts delete mode 100644 src/syntax/ASTNode.ts create mode 100644 src/syntax/ModuleRoot.ts delete mode 100644 src/syntax/declarations/Program.ts create mode 100644 src/syntax/environment.ts delete mode 100644 src/syntax/expressions/Expression.ts delete mode 100644 src/syntax/statements/Statement.ts delete mode 100644 src/syntax/types/Type.ts diff --git a/.gitignore b/.gitignore index 2257912..90b16ba 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,5 @@ +.DS_Store + # Logs logs *.log diff --git a/src/parser/LazyList.ts b/src/parser/LazyList.ts deleted file mode 100644 index 7b272e0..0000000 --- a/src/parser/LazyList.ts +++ /dev/null @@ -1,98 +0,0 @@ -class LazyListSource { - private iterator: Iterator; - public list: T[]; - public done: boolean; - - constructor(iterator: Iterator) { - this.iterator = iterator; - this.list = []; - this.done = false; - } - - requestItems(start: number, count: number): T[] { - while (start + count > this.list.length && !this.done) { - const next = this.iterator.next(); - if (next.done) { - this.done = true; - } else { - this.list.push(next.value); - } - } - return this.list.slice(start, start + count); - } -} - -/** - * A lazy list is the effective equivalent of a Haskell list. - * It is an immutable structure that is constructed from back to front - * and destructured from front to back. - * To enforce this immutable property, the list can be constructed lazily - * from front to back by placing items onto a lazy "thunk", which is - * an operation that will resolve to the remainder of the list (which can also - * contain more thunks) when it is requested, much like JS's iterators. - * - * To allow this lazy property to work with JS, this structure will be backed - * by an iterator. - * - * Such structures will make it much easier to implement operations that require - * immutability, but with far less overhead than alternatives. - */ -export default class LazyList { - private source: LazyListSource; - public start: number; - - constructor(iterable: Iterable) { - // create a new lazy list source from the iterable, - // which will provide the items for this list and any lists created from this one - this.source = new LazyListSource(iterable[Symbol.iterator]()); - // this is the starting item for this list, this value will never change - this.start = 0; - } - - /** - * This is like a "pop" operation to grab the first item from the list, - * returning the item and a new list for the remainder. - */ - shift(): [T, LazyList] { - const [item] = this.source.requestItems(this.start, 1); - const list = this.createNewList(this.start + 1); - return [item, list]; - } - - shifts(num: number): [T[], LazyList] { - const items = this.source.requestItems(this.start, num); - const list = this.createNewList(this.start + num); - return [items, list]; - } - - /** - * Same as shift(), but doesn't return a new list as well - */ - peek(): T { - return this.source.requestItems(this.start, 1)[0]; - } - - peeks(num: number): T[] { - return this.source.requestItems(this.start, num); - } - - /** - * Returns true if this list is empty, false otherwise. - */ - empty() { - // enumerate just one more item to determine if the source's iterator is done - this.source.requestItems(this.start, 1); - return this.source.done && this.start >= this.source.list.length; - } - - /** - * Generate a new LazyList instance from this one, with the provided start value - * @private - */ - private createNewList(newStart: number) { - const newList = Object.create>(LazyList.prototype); - newList.source = this.source; - newList.start = newStart; - return newList; - } -} diff --git a/src/parser/ParseExpression.ts b/src/parser/ParseExpression.ts deleted file mode 100644 index c9a8365..0000000 --- a/src/parser/ParseExpression.ts +++ /dev/null @@ -1,115 +0,0 @@ -import { TokenType } from './Tokenizer'; -import { ParserMessageKey } from './ParserMessages'; -import Parser, { ParseNode } from './Parser'; -import ASTNode from '~/syntax/ASTNode'; - - -/** - * A parse expression is an element of a non-terminal expansion, - * and is a key component of defining a grammar. - * A parse expression can be one of five things: - * - a token type (for parsing a specific token type) - * - a token image (for parsing a specific token image) - * - an ASTNode subclass (for parsing a formal non-terminal) - * - a ParseSequence (for parsing an informal non-terminal) - * - an array of any of the above four (for parsing one of a list of choices) - */ -export type ParseExpression = ArrayOrSingle | ParseSequence>; - -export class ParseExpressionInstance { - tokenType?: TokenType; - tokenImage?: string; - nonTerminal?: Class; - choices?: ParseExpressionInstance[]; - sequence?: ParseSequence; - - repeat?: '+' | '*'; - optional?: true; - definite?: true; - flatten?: true; - sep?: ParseExpressionInstance; - err?: ParserMessageKey; - - constructor(expression: ParseExpression | ParseExpressionInstance, options: ParseOptions = {}) { - if (expression instanceof ParseExpressionInstance) { - return Object.assign(this, expression); - } - this.repeat = options.repeat; - this.optional = options.optional; - this.definite = options.definite; - this.flatten = options.flatten; - this.err = options.err; - if (options.sep) this.sep = new ParseExpressionInstance(options.sep, options.sepOptions); - - if (Array.isArray(expression)) { - this.choices = expression.map(e => new ParseExpressionInstance(e)); - } else if (typeof expression === 'number') { - this.tokenType = expression; - } else if (typeof expression === 'string') { - this.tokenImage = expression; - } else if (typeof expression === 'function') { - this.nonTerminal = expression; - } else if (expression) { - this.sequence = expression; - } else { - throw new Error('falsy expression passed to constructor'); - } - } - - hasNonTerminal(cls: Function) { - return this.nonTerminal === cls; - } - - /** - * This is the main "switching" logic for all types of expressions. - * If you want to parse an arbitrary expression, call this. - */ - parse(parser: Parser): ParseNode { - if (this.tokenType) { - return parser.parseTokenType(this.tokenType); - } else if (this.tokenImage) { - return parser.parseTokenImage(this.tokenImage); - } else if (this.nonTerminal) { - return parser.parseNonTerminal(this.nonTerminal); - } else if (this.sequence) { - return parser.parseSequence(this.sequence); - } else if (this.choices) { - return parser.parseChoices(this.choices); - } else { - throw new Error('never'); - } - } -} - -/** - * This is the base configuration for defining a parser. - * - repeat: either + (one or more) or * (zero or more) indicating that the parsed - * element can be repeated. - * - sep: If repeat is specified, 'sep' can also be specified to indicate that - * a token must be used to separate repeated elements. - * - definite: indicates the "choosing point" for a non-terminal. If the parse fails - * at or before this element, it will move to the next choice if one exists. If it - * fails after, it will be a hard failure. - * - flatten: for nested ParserDefs, this indicates that the fields inside this parsed - * element should be moved up to the parent element, for convenience. - * - err: if this is specified and the parse fails, this message or function will be used - * to create an error message for the failed element. - */ -export interface ParseOptions { - repeat?: '+' | '*', - optional?: true, - definite?: true, - flatten?: true, - sep?: ParseExpression, - sepOptions?: { definite?: true }, - err?: ParserMessageKey, -} - -/** - * This is used only with the 'parse' option of a ParserDef, when a nested - * definition is going to be used. This definition can be either an object - * of key-value pairs or a list of such objects (for a choice definition). - */ -export type ParseSequence = { - [key: string]: ParseExpression | ParseExpressionInstance; -} \ No newline at end of file diff --git a/src/parser/ParserMessages.ts b/src/parser/ParserMessages.ts index 66167c8..93ad7f8 100644 --- a/src/parser/ParserMessages.ts +++ b/src/parser/ParserMessages.ts @@ -1,4 +1,4 @@ -import { Token } from './Tokenizer'; +import { Token } from './lexer'; const _messages = { diff --git a/src/parser/Tokenizer.ts b/src/parser/Tokenizer.ts deleted file mode 100644 index 22fee8e..0000000 --- a/src/parser/Tokenizer.ts +++ /dev/null @@ -1,689 +0,0 @@ -import ParserError from './ParserError'; -import LazyList from './LazyList'; - - -export class Location { - startLine: number; - startColumn: number; - endLine: number; - endColumn: number; - - constructor(startLine: number, startColumn: number, endLine: number, endColumn: number) { - this.startLine = startLine; - this.startColumn = startColumn; - this.endLine = endLine; - this.endColumn = endColumn; - } - - /** - * Create a new location that contains both this location and the specified location - */ - merge(location: Location) { - let startLine = this.startLine, startColumn = this.startColumn; - let endLine = this.endLine, endColumn = this.endColumn; - if (location.startLine < this.startLine || location.startLine === this.startLine && location.startColumn < this.startColumn) { - [startLine, startColumn] = [location.startLine, location.startColumn]; - } else if (location.endLine > this.endLine || location.endLine === this.endLine && location.endColumn > this.endColumn) { - [endLine, endColumn] = [location.endLine, location.endLine]; - } - return new Location(startLine, startColumn, endLine, endColumn); - } -} - -/** - * Represents a single token extracted from the source string. - * 'type' specifies what kind of terminal the token represents, and is used by the parser. - * 'offset' is the position in the source file of the first character of the token. - * 'image' is an exact copy of the token from the original source string. - * 'value' is an optional value that represents the parsed value of the token, if it makes sense for the token type (numbers, strings, etc.). - */ -export class Token { - type: TokenType; - line: number; - column: number; - image: string; - value?: any; - - constructor(type: TokenType, line: number, column: number, image: string, value?: any) { - this.type = type; - this.line = line; - this.column = column; - this.image = image; - this.value = value; - } - - getLocation(): Location { - return new Location(this.line, this.column, this.line, this.column + this.image.length - 1); - } -} - -export enum TokenType { - COMMENT, // characters ignored from code - IDENT, // identifier - RESERVED, // reserved word - INTEGER_LITERAL, // integer number literals - FLOAT_LITERAL, // floating-point number literals - STRING_LITERAL, // character string literals - CHARACTER_LITERAL, // single character literals - OPER, // operators - COLON, // colon (:) symbol - LBRACE, // left brace ({) symbol - RBRACE, // right brace (}) symbol - LPAREN, // left parenthesis (() symbol - RPAREN, // right parenthesis ()) symbol - LBRACK, // left bracket ([) symbol - RBRACK, // right bracket (]) symbol - COMMA, // comma (,) symbol - EQUALS, // equals (=) symbol - FAT_ARROW, // fat arrow (=>) symbol - BACKTICK, // backtick (`) symbol - DOT, // dot (.) symbol - WHITESPACE, // any non-new-line whitespace (spaces, tabs, etc.) - NEWLINE, // any character sequence that produces a new line (inluding ;) - EOF // special end-of-file token -} - -export const RESERVED = [ - 'as', // used for renaming imports - 'any', // supertype of all types - 'bool', // boolean type name - 'break', // statement to break from a loop - 'byte', // byte type name (alias of u8) - 'catch', // denotes a catch block in a try-catch block - 'char', // character type name - 'const', // constant declaration keyword - 'continue', // statement to skip to the next iteration of a loop - 'default', // used to declare a default export, also for a default case in a pattern match block - 'do', // denotes the start of a do-while loop - 'double', // double type name (alias for f64) - 'else', // denotes the start of an else clause - 'export', // declares a module export - 'f32', // 32-bit floating point type (equivalent to 'float') - 'f64', // 64-bit floating point type (equivalent to 'double') - 'false', // boolean false value - 'finally', // denotes a finally block in a try-catch-finally block - 'float', // float type name (alias for f32) - 'for', // denotes the start of a for loop - 'from', // used in import and export declarations to specify the name of another module - 'func', // denotes a named function declaration - 'i16', // 16 bit signed integer type - 'i32', // 32 bit signed integer type (equivalent to 'int') - 'i64', // 64 bit signed integer type (equivalent to 'long') - 'i8', // 8 bit signed integer type - 'if', // denotes the start of an if block - 'import', // declares a module import - 'in', // separates iterator variable and iterable expression in for statements - 'int', // int type name (alias for i32) - 'integer', // integer type name (true integer, infinite capacity) - 'long', // long type name (alias for i64) - 'return', // denotes a return statement to return a value from a function - 'short', // short type name (alias for u16) - 'string', // string type name - 'throw', // denotes a throw statement to throw an exception from a function - 'true', // boolean true value - 'try', // denotes the start of a try-catch block - 'type', // denotes the start of a type declaration - 'u16', // 16 bit unsigned integer type (equivalent to 'short') - 'u32', // 32 bit unsigned integer type - 'u64', // 64 bit unsigned integer type - 'u8', // 8 bit unsigned integer type (equivalent to 'byte') - 'void', // return type of functions, indicates no value is returned (alias for '()') - 'while', // denotes the start of a while loop -] - -/** - * Class responsible for lexical analysis of a source string: splitting the source string into tokens. - * This class implements the Iterable interface, so it must be iterated, which yields each token in the source string. - */ -export default class Tokenizer { - // set of characters allowed for semantic operators (note that equals by itself or followed by a greater-than are both reserved) - static OPER_CHARS = ['~', '!', '$', '%', '^', '&', '*', '+', '-', '=', '|', '<', '>', '?', '/']; - - // reserved syntactic symbols, no operator can contain these (except =) - static SYMBOL_MAP: { [sym: string]: TokenType } = { - ':': TokenType.COLON, - '{': TokenType.LBRACE, - '}': TokenType.RBRACE, - '(': TokenType.LPAREN, - ')': TokenType.RPAREN, - '[': TokenType.LBRACK, - ']': TokenType.RBRACK, - ',': TokenType.COMMA, - '=': TokenType.EQUALS, - '=>': TokenType.FAT_ARROW, - '`': TokenType.BACKTICK, - '.': TokenType.DOT, - }; - - public list: LazyList; - private gen: Iterator; - private lineNumber: number; - private currentLineOffset: number; - public ignoreMode: boolean; - - /** - * - source: a reference to the original source string - * - iterator: the lookahead iterator providing characters from the source string - * - gen: generator that yields tokens, logic contained in _generator() - * - lineNumber: the current line number - * - currentLineOffset: the offset in the source string off the current line, used to determine column numbers of tokens and errors - * - ignoreMode: if true, all comments, whitespace, and semicolons are ignored from the yielded token output; if false, all tokens are yielded - */ - constructor(source: string) { - this.list = new LazyList(source); - this.gen = this.generator(); - this.lineNumber = 1; - this.currentLineOffset = 0; - this.ignoreMode = true; - } - - /** - * Computes the 1-based column number of the *previously consumed* character. - * Ex: in the source code string "\nabcde", these are the columns: - * Character | Offset | Line # | Column # - * -------------------------------------- - * SOF | 0 | 1 | 0 (SOF is not a character, it just represents the initial state, a.k.a. no characters yet consumed) - * \n | 1 | 1 | 1 (once \n is consumed, the iterator offset is 1, the character itself is still on line 1, column 1) - * a | 2 | 2 | 1 (we are now on a new line, 1st character of the line) - * b | 3 | 2 | 2 - * c | 4 | 2 | 3 - * d | 5 | 2 | 4 - * e | 6 | 2 | 5 - * EOF | 7 | 2 | 6 - * We can compute the column of a character by subtracting the offset of the start of the line from the iterator offset. - * = this.iterator.offset - this.currentLineOffset - * We can compute the column of the start of the token by subtracting the length of the token from the column of the last character and adding 1. - * = this.iterator.offset - this.currentLineOffset - token.length + 1 - * - * For example: using the token 'abcde' above: - * - this.iterator.offset = 6 - * - this.currentLineOffset = 1 - * - token.length = 5 - * - token.column = 6 - 1 - 5 + 1 = 1 - */ - get columnNumber() { - return this.list.start - this.currentLineOffset; - } - - /** - * Given a token length (should be equal to the distance between the start of the token and the current offset, whatever that may be), - * compute the column number of that token. - */ - getColumnNumber(tokenLength: number) { - return (this.columnNumber - tokenLength) + 1; - } - - [Symbol.iterator]() { - return this; - } - - next() { - return this.gen.next(); - } - - moveNext() { - let next; - [next, this.list] = this.list.shift(); // eslint-disable-line prefer-const - return next; - } - - appendTo(image: string) { - let next; - [next, this.list] = this.list.shift(); // eslint-disable-line prefer-const - return image + next; - } - - /** - * Fully iterate the entire source string, extracting tokens according to the language's grammar rules and yielding each one. - */ - private *generator() { - let c; - // fully evaluate the lazy list - while (!this.list.empty()) { - [c, this.list] = this.list.shift(); - // "kind" is either "uppercase", "lowercase", "number", or the character - const kind = this.kind(c); - const [c1] = this.list.shift(); - - // This logic follows a specific order for attempting to extract tokens: - // 1. Identifiers (also checks keywords that match an identifier string) - // 2. Numbers (includes 0, hex, binary, float, and decimal integers) - // 3. String literals - // 4. Char literals - // 5. Special syntactic symbols (reserved symbols that have syntactic meaning, ignoring =) - // 6. Symbols starting with = (just '=' and '=>', fall back to operator if neither matches) - // 7. Operators (tokens made from a specific list of allowed operator characters) - // 8. New lines (\n and ;, semi is for separating a line in two without an actual new line) - // 9. CRLF new lines (\r is treated as normal whitespace if not followed by a \n) - // 10. Whitespace (space and tab) - // 11. Everything else (throws an error for now) - - if (c === '/' && c1 === '/') { - // single-line comment - const com = this.consumeSingleLineComment(c); - if (!this.ignoreMode) yield com; - } else if (c === '/' && c1 === '*') { - // multi-line comment - const com = this.consumeMultiLineComment(c); - if (!this.ignoreMode) yield com; - } else if (kind === 'uppercase' || kind === 'lowercase' || c === '_') { - // valid identifier start, consume an identifier - yield this.consumeIdentifier(c); - } else if (kind === 'number') { - // consume the number - yield this.consumeNumber(c); - } else if (c === '"') { - // consume a string literal - yield this.consumeStringLiteral(c); - } else if (c === "'") { - // consume a character literal - yield this.consumeCharacterLiteral(c); - } else if (Tokenizer.SYMBOL_MAP[c] && c !== '=') { - // consume a symbol - yield new Token(Tokenizer.SYMBOL_MAP[c], this.lineNumber, this.columnNumber, c); - } else if (c === '=') { - // consume an equals, a fat arrow, or an operator starting with equals - if (c1 === '>') { - // fat arrow (NOTE: this will ignore any other operator characters that come immediately after, fat arrow takes precedence) - this.moveNext(); - yield new Token(Tokenizer.SYMBOL_MAP['=>'], this.lineNumber, this.columnNumber - 1, '=>'); - } else if (Tokenizer.OPER_CHARS.includes(c1)) { - // other non-greater-than operator character, consume as operator - yield this.consumeOperator(c); - } else { - // otherwise it's a lone equals symbol - yield new Token(Tokenizer.SYMBOL_MAP[c], this.lineNumber, this.columnNumber, c); - } - } else if (Tokenizer.OPER_CHARS.includes(c)) { - // consume as operator - yield this.consumeOperator(c); - } else if (c === '\n' || c === ';') { - // new line character - if (!this.ignoreMode) yield new Token(TokenType.NEWLINE, this.lineNumber, this.columnNumber, c); - if (c === '\n') { - // increment line number - this.lineNumber++; - this.currentLineOffset = this.list.start; - } - } else if (c === '\r') { - if (c1 === '\n') { - // treat the whole thing as a new line - this.moveNext(); - if (!this.ignoreMode) yield new Token(TokenType.NEWLINE, this.lineNumber, this.getColumnNumber(2), '\r\n'); - // increment line number - this.lineNumber++; - this.currentLineOffset = this.list.start; - } else { - // otherwise treat it as normal whitespace - const w = this.consumeWhitespace(c); - if (!this.ignoreMode) yield w; - } - } else if (c === ' ' || c === '\t') { - // consume whitespace - const w = this.consumeWhitespace(c); - if (!this.ignoreMode) yield w; - } else { - // otherwise it is not a valid character (for now) - throw new ParserError(`Invalid character '${c}'`, this.lineNumber, this.columnNumber); - } - } - // yield a EOF token - yield new Token(TokenType.EOF, this.lineNumber, this.getColumnNumber(0), ''); - } - - /** - * Determine if a character is a lowercase character, an uppercase character, or a number. - * Return the character as-is for anything else. - */ - kind(char: string) { - if (char >= 'a' && char <= 'z') return 'lowercase'; - else if (char >= 'A' && char <= 'Z') return 'uppercase'; - else if (char >= '0' && char <= '9') return 'number'; - else return char; - } - - consumeSingleLineComment(image: string) { - while (this.list.peek() !== '\n' && !this.list.empty()) { - image = this.appendTo(image); - } - if (this.list.peek() === '\n') image = this.appendTo(image); - return new Token(TokenType.COMMENT, this.lineNumber, this.getColumnNumber(image.length), image); - } - - consumeMultiLineComment(image: string) { - const lineNumber = this.lineNumber; - const columnNumber = this.getColumnNumber(1); - image = this.appendTo(image); - while (this.list.peek() && this.list.peek() !== '*' && this.list.peeks(2)[1] !== '/') { - if (this.list.peek() === '\n') { - this.lineNumber++; - this.currentLineOffset = this.list.start + 1; - } - image = this.appendTo(image); - } - if (this.list.peek() === '*') image = this.appendTo(image); - if (this.list.peek() === '/') image = this.appendTo(image); - return new Token(TokenType.COMMENT, lineNumber, columnNumber, image); - } - - /** - * image is alphanumeric (or underscore), consume an identifier. - * This may match a keyword, in which case that will be returned instead. - */ - consumeIdentifier(image: string) { - const next = this.list.peek(); - const kind = this.kind(next); - // if the next character is a valid identifier character, loop to get all the remaining ones - if (kind === 'uppercase' || kind === 'lowercase' || kind === 'number' || next === '_') { - while (true) { - image = this.appendTo(image); - const kind1 = this.kind(this.list.peek()); - // if the next character will not be a valid identifier character, then break - if (kind1 !== 'uppercase' && kind1 !== 'lowercase' && kind1 !== 'number' && kind1 !== '_') break; - } - } - // if the identifier we captured is a reserved word, return the reserved word - if (RESERVED.includes(image)) return new Token(TokenType.RESERVED, this.lineNumber, this.getColumnNumber(image.length), image); - // otherwise, return an identifier - else return new Token(TokenType.IDENT, this.lineNumber, this.getColumnNumber(image.length), image); - } - - /** - * All that we know is that the image represents the start of a number. - * Figure out what kind and return a token. - */ - consumeNumber(image: string) { - const [c, c1] = this.list.peeks(2); - if (image.endsWith('0') && c && c1) { - // literals that start with 0 are a special case, check for alternative bases. - if (c.toLowerCase() === 'x') { - // in order for this to be a valid hex literal, the '0x' must be followed by at least 1 hex digit - if (this.isHexidecimalDigit(c1)) { - // consume hexadecimals, return hex literal token - return this.consumeHexLiteral(image); - } - } else if (c.toLowerCase() === 'b') { - // in order for this to be a valid binary literal, the '0b' must be followed by at least 1 binary digit - if (c1 === '0' || c1 === '1') { - // consume binary digits, return binary literal token - return this.consumeBinaryLiteral(image); - } - } else if (c === '.' || c.toLowerCase() === 'e') { - // in order for this to be valid, it must be followed by a number - if (this.kind(c1) === 'number') { - return this.consumeFloatLiteral(image); - } - } - } - // if this is a 0 that is not followed by a 'x', 'b', '.', or 'e', or it is not a 0 at all, consume it as a normal decimal integer - return this.consumeIntegerLiteral(image); - } - - /** - * Given a starting image (containing '0') and a lookahead character (verified to be 'x') - * and it is known that the next lookahead character is a hex digit, consume an entire hex literal. - */ - consumeHexLiteral(image: string) { - // it has already been verified that next is 'x' and the following character is a hex digit, skip ahead to the hex digits. - image = this.appendTo(image); - // take the first digit - image = this.appendTo(image); - // while the next character is a hex digit, add it to the image - while (this.isHexidecimalDigit(this.list.peek())) image = this.appendTo(image); - return new Token(TokenType.INTEGER_LITERAL, this.lineNumber, this.getColumnNumber(image.length), image, parseInt(image, 16)); - } - - /** - * Given a starting image (containing '0') and a lookahead character (verified to be 'b') - * and it is known that the next lookahead character is a binary digit, consume an entire binary literal. - */ - consumeBinaryLiteral(image: string) { - // it has already been verified that next is 'b' and the following character is binary, skip ahead to the digits. - image = this.appendTo(image); - // take the first digit - image = this.appendTo(image); - // while the next character is a binary digit, add it to the image - while (this.isBinaryDigit(this.list.peek())) image = this.appendTo(image); - // JS doesn't have binary literals, so we need to remove the prefix when parsing - return new Token(TokenType.INTEGER_LITERAL, this.lineNumber, this.getColumnNumber(image.length), image, parseInt(image.replace(/0b/i, ''), 2)); - } - - isBinaryDigit = (c: string) => c === '0' || c === '1'; - - /** - * Given a starting image (containing some sequence of numbers) and a lookahead character (must be either '.' or 'e') - * and it is know that the next lookahead character is a number, consume an entire floating point literal. - */ - consumeFloatLiteral(image: string): Token { - const next = this.list.peek(); - // next is either a dot or 'e', accept it, skip ahead two characters, next must be a number, accept it right away - image = this.appendTo(image); - image = this.appendTo(image); - - if (next === '.') { - // handle fractional portion, consume all numbers following - while (this.kind(this.list.peek()) === 'number') image = this.appendTo(image); - // next character is e, handle exponent portion - const [c1, c2] = this.list.peeks(2); - if (c1 && c1.toLowerCase() === 'e') { - // but only do it if there is a number after e - if (this.kind(c2) === 'number') { - // recurse, this will only happen once because the next character is an e - return this.consumeFloatLiteral(image); - } - } - } else { - // must be e, handle exponent portion - while (this.kind(this.list.peek()) === 'number') image = this.appendTo(image); - } - // we arrive here when we've consumed as much floating point characters as we can - return new Token(TokenType.FLOAT_LITERAL, this.lineNumber, this.getColumnNumber(image.length), image, parseFloat(image)); - } - - /** - * Given a starting image (some sequence of numbers) and a lookahead character (can possibly be any character), - * consume an entire integer literal (may encounter a floating point literal) - */ - consumeIntegerLiteral(image: string) { - let next = this.list.peek(); - // if next is number, we want to consume more numbers if there are any - if (this.kind(next) === 'number') { - image = this.appendTo(image); - // consume all subsequenct numbers - while (this.kind(this.list.peek()) === 'number') { - image = this.appendTo(image); - } - // if the next is now a dot or e that is followed by a number, we have a float, defer to that logic - next = this.list.peek(); - if (next === '.' || next === 'e') { - if (this.kind(this.list.peeks(2)[1]) === 'number') { - return this.consumeFloatLiteral(image); - } - } - } else if (next === '.' || next === 'e') { - // if the current next is a dot or e followed by a number, parse as float - if (this.kind(this.list.peeks(2)[1]) === 'number') { - return this.consumeFloatLiteral(image); - } - } - // otherwise take the numbers we have enumerated so far and parse them as an int - return new Token(TokenType.INTEGER_LITERAL, this.lineNumber, this.getColumnNumber(image.length), image, parseInt(image, 10)); - } - - /** - * Given a starting image (") and an initial next character, consume an entire string literal - */ - consumeStringLiteral(image: string) { - // if there is no next character, throw an error - if (!this.list.peek()) throw new ParserError('Unterminated string', this.lineNumber, this.columnNumber); - - let value = ''; - if (this.list.peek() !== '"') { - do { - const c = this.moveNext(); - image += c; - if (c === '\\') { - const next = this.list.peek(); - if (next !== 'x' && next !== 'u') { - switch (next) { - case 'n': value += '\n'; break; // new line - case 'r': value += '\r'; break; // carriage return - case 't': value += '\t'; break; // tab - case 'f': value += '\f'; break; - case 'b': value += '\b'; break; // backspace - case 'v': value += '\v'; break; // vertical tab - default: value += next; break; - } - // skip ahead for a basic escape sequence because there are only two characters - image = this.appendTo(image); - } else if (next === 'x') { - // ascii escape code - const [c1, c2, c3] = this.list.peeks(3); - if (this.isHexidecimalDigit(c2) && this.isHexidecimalDigit(c3)) { - image += (c1 + c2 + c3); - value += String.fromCodePoint(parseInt(c2 + c3, 16)); - for (let i = 0; i < 3; ++i) this.moveNext(); - } else { - // invalid escape code, treat it like \x - value += c1; - image += c1; - this.moveNext(); - } - } else { - // unicode escape code - const [c1, c2, c3, c4, c5, c6, c7, c8, c9] = this.list.peeks(9); - if ([c2, c3, c4, c5].every(ch => this.isHexidecimalDigit(ch))) { - image += (c1 + c2 + c3 + c4 + c5); - value += String.fromCodePoint(parseInt(c2 + c3 + c4 + c5, 16)); - for (let i = 0; i < 5; ++i) this.moveNext(); - } else if (c2 === '{' && [c3, c4, c5, c6, c7].every(ch => this.isHexidecimalDigit(ch)) && c8 === '}') { - image += [c1, c2, c3, c4, c5, c6, c7, c8].join(''); - value += String.fromCodePoint(parseInt(c3 + c4 + c5 + c6 + c7, 16)); - for (let i = 0; i < 8; ++i) this.moveNext(); - } else if (c2 === '{' && [c3, c4, c5, c6, c7, c8].every(ch => this.isHexidecimalDigit(ch)) && c9 === '}') { - image += [c1, c2, c3, c4, c5, c6, c7, c8, c9].join(''); - value += String.fromCodePoint(parseInt(c3 + c4 + c5 + c6 + c7 + c8, 16)); - for (let i = 0; i < 9; ++i) this.moveNext(); - } else { - // invalid, treat it like \u - value += c1; - image += c1; - this.moveNext(); - } - } - } else { - // just a normal everyday character - value += c; - } - } while (this.list.peek() && (this.list.peek() !== '"' || image.endsWith('\\'))); - // no next character, throw an error - if (!this.list.peek()) throw new ParserError('Unterminated string', this.lineNumber, this.columnNumber); - } - // next character is double quote - this.moveNext(); - image += '"'; - return new Token(TokenType.STRING_LITERAL, this.lineNumber, this.getColumnNumber(image.length), image, value); - } - - /** - * Given a starting image (') and an initial next character, consume an entire character literal - */ - consumeCharacterLiteral(image: string) { - // if there is no next character, throw an error - if (!this.list.peek()) throw new ParserError('Unterminated character', this.lineNumber, this.columnNumber); - if (this.list.peek() === "'") throw new ParserError('Empty character', this.lineNumber, this.columnNumber + 1); - - let value; - const c = this.moveNext(); - image += c; - if (c === '\\') { - // escape sequence - const next = this.list.peek(); - if (next !== 'x' && next !== 'u') { - switch (next) { - case 'n': value = '\n'; break; // new line - case 'r': value = '\r'; break; // carriage return - case 't': value = '\t'; break; // tab - case 'f': value = '\f'; break; - case 'b': value = '\b'; break; // backspace - case 'v': value = '\v'; break; // vertical tab - default: value = next; break; - } - // skip ahead for a basic escape sequence because there are only two characters - image = this.appendTo(image); - } else if (next === 'x') { - // ascii escape code - const [c1, c2, c3] = this.list.peeks(3); - if (this.isHexidecimalDigit(c2) && this.isHexidecimalDigit(c3)) { - image += (c1 + c2 + c3); - value = String.fromCodePoint(parseInt(c2 + c3, 16)); - for (let i = 0; i < 3; ++i) this.moveNext(); - } else { - // invalid escape code, treat it like \x - value = c1; - image += c1; - this.moveNext(); - } - } else { - // unicode escape code - const [c1, c2, c3, c4, c5, c6, c7, c8, c9] = this.list.peeks(9); - if ([c2, c3, c4, c5].every(ch => this.isHexidecimalDigit(ch))) { - image += (c1 + c2 + c3 + c4 + c5); - value = String.fromCodePoint(parseInt(c2 + c3 + c4 + c5, 16)); - for (let i = 0; i < 5; ++i) this.moveNext(); - } else if (c2 === '{' && [c3, c4, c5, c6, c7].every(ch => this.isHexidecimalDigit(ch)) && c8 === '}') { - image += [c1, c2, c3, c4, c5, c6, c7, c8].join(''); - value = String.fromCodePoint(parseInt(c3 + c4 + c5 + c6 + c7, 16)); - for (let i = 0; i < 8; ++i) this.moveNext(); - } else if (c2 === '{' && [c3, c4, c5, c6, c7, c8].every(ch => this.isHexidecimalDigit(ch)) && c9 === '}') { - image += [c1, c2, c3, c4, c5, c6, c7, c8, c9].join(''); - value = String.fromCodePoint(parseInt(c3 + c4 + c5 + c6 + c7 + c8, 16)); - for (let i = 0; i < 9; ++i) this.moveNext(); - } else { - // invalid, treat it like \u - value = c1; - image += c1; - this.moveNext(); - } - } - } else { - // just a normal everyday character - value = c; - } - // no next character, throw an error - if (!this.list.peek()) throw new ParserError('Unterminated character', this.lineNumber, this.columnNumber); - // next character is single quote - this.moveNext(); - image += "'"; - return new Token(TokenType.CHARACTER_LITERAL, this.lineNumber, this.getColumnNumber(image.length), image, value); - } - - /** - * Consume a sequence of valid operator characters - */ - consumeOperator(image: string) { - // < and > have special behavior in the parser, so we tokenize them individually - if (image === '<' || image === '>') return new Token(TokenType.OPER, this.lineNumber, this.getColumnNumber(image.length), image); - while (Tokenizer.OPER_CHARS.includes(this.list.peek())) image = this.appendTo(image); - return new Token(TokenType.OPER, this.lineNumber, this.getColumnNumber(image.length), image); - } - - /** - * Consume any amount of spaces and tabs - */ - consumeWhitespace(image: string) { - while (this.isWhitespace(this.list.peek())) image = this.appendTo(image); - return new Token(TokenType.WHITESPACE, this.lineNumber, this.getColumnNumber(image.length), image); - } - - isWhitespace = (c: string) => c === ' ' || c === '\t'; - - /** - * Returns true if c is a hexadecimal character - */ - isHexidecimalDigit(c: string) { - if (!c) return false; - const low = c.toLowerCase(); - return (c >= '0' && c <= '9') || (low >= 'a' && low <= 'f'); - } -} diff --git a/src/parser/index.ts b/src/parser/index.ts index b13439f..89cb680 100644 --- a/src/parser/index.ts +++ b/src/parser/index.ts @@ -1,7 +1,11 @@ -import { Program } from '~/syntax'; -import Parser from './Parser'; +import { ModuleRoot } from '~/syntax'; +import { createTokenStream } from './lexer'; +import { Parser } from './parser'; +import { SyntaxEnvironment } from '~/syntax/environment'; -export default function parse(source: string) { - return new Parser(source).parse(Program) as Program; -} +export function parseModule(path: string): ModuleRoot { + const parser = Parser(createTokenStream(path)); + const env = SyntaxEnvironment(); + return parser.parse(env.ModuleRoot); +} \ No newline at end of file diff --git a/src/parser/lexer/token.ts b/src/parser/lexer/token.ts index c193bfe..c108bc4 100644 --- a/src/parser/lexer/token.ts +++ b/src/parser/lexer/token.ts @@ -16,7 +16,7 @@ export enum TokenType { OPER, // operators SYMBOL, // any special syntactic symbols WHITESPACE, // any non-new-line whitespace (spaces, tabs, etc.) - NEWLINE, // any character sequence that produces a new line (inluding ;) + NEWLINE, // \r\n and \n, has syntactic significance SEMI, // semicolon, special delimiter that behaves as a new line EOF // special end-of-file token } @@ -34,6 +34,7 @@ export interface Token { readonly image: string; readonly value?: any; toString(): string; + with(props: Partial): Token; } /** Creates a new token */ @@ -42,12 +43,16 @@ export function Token(type: TokenType, position: FilePosition, image: string, va } export namespace Token { + const tokenSymbol = Symbol('Token'); + /** Creates a new token */ export function create(type: TokenType, position: FilePosition, image: string, value?: any): Token { - return { + const token: Token = { type, image, value, location: position.computeRange(image), - toString, + toString, with: _with, }; + // separate symbol assignment so that we catch excessive property errors above + return { ...token, [tokenSymbol]: tokenSymbol } as Token; } /** @@ -58,7 +63,18 @@ export namespace Token { return create(TokenType.NONE, position, image); } + /** + * Determines if an object is a token. + */ + export function isToken(token: {}): token is Token { + return tokenSymbol in token; + } + function toString(this: Token) { return this.image; } + + function _with(this: Token, props: Partial) { + return { ...this, ...props }; + } } diff --git a/src/parser/parser.ts b/src/parser/parser.ts new file mode 100644 index 0000000..619a803 --- /dev/null +++ b/src/parser/parser.ts @@ -0,0 +1,186 @@ +import { FileRange } from '~/core'; +import { Token, TokenType } from '~/parser/lexer'; +import { LazyList, NonEmptyLazyList, EmptyLazyList } from '~/utils/lazy-list'; + + +type RepeatKey = '+' | '*'; + +export type ParseFunc = (ctx: Parser) => ParseResult; + +export interface ParseResult { + result: Optional; + remaining: Parser; +} + +interface ParseResultInternal { + result: Optional; + remaining: ParserInternal; +} + +// #region Parser + +export interface Parser { + parse(fn: ParseFunc): T; +} + +interface BaseParserInternal extends Parser { + readonly failToken: Optional; + readonly successLocation: Optional; + fail(token: Optional): ParserInternal; + succeed(location: Optional): ParserInternal; +} + +interface NonEmptyParserInternal extends BaseParserInternal { + readonly empty: false; + readonly tokens: NonEmptyLazyList; + next(): { token: Token, remaining: ParserInternal }; +} + +interface EmptyParserInternal extends BaseParserInternal { + readonly empty: true; + readonly tokens: EmptyLazyList; +} + +type ParserInternal = NonEmptyParserInternal | EmptyParserInternal; + +export function Parser(tokenStream: LazyList): Parser { + return Parser.init(tokenStream); +} + +export namespace Parser { + export function init(tokenStream: LazyList): Parser { + const common = { parse, succeed, fail, successLocation: null, failToken: null }; + if (tokenStream.empty) { + const parser: EmptyParserInternal = { ...common, empty: true, tokens: tokenStream }; + return parser; + } else { + const parser: NonEmptyParserInternal = { ...common, empty: false, tokens: tokenStream, next }; + return parser; + } + } + + function parse(this: ParserInternal, fn: ParseFunc): T { + const { result, remaining } = fn(this) as ParseResultInternal; + if (!result) throw new Error('FAIL: unable to parse'); // TODO this is not how this should work + if (!remaining.empty) throw new Error('FAIL: unprocessed input remains'); // TODO pass token + return result; + } + + function next(this: NonEmptyParserInternal): { token: Token, remaining: ParserInternal } { + return { token: this.tokens.head, remaining: init(this.tokens.tail) as ParserInternal }; + } + + function succeed(this: ParserInternal, location: Optional): ParserInternal { + return { ...this, successLocation: location, failToken: null }; + } + + function fail(this: ParserInternal, token: Optional): ParserInternal { + return { ...this, failToken: token, successLocation: null }; + } +} + +// #endregion + +export function optional(fn: ParseFunc): ParseFunc> { + return parser => { + const { result, remaining } = fn(parser) as ParseResultInternal; + return { result, remaining: remaining.succeed(remaining.successLocation) }; + } +} + +export function repeat(fn: ParseFunc, key: RepeatKey, sep?: ParseFunc): ParseFunc { + if (key === '+' && !sep) { + // desugar: T+ => (T T*) + return seq( + fn, + repeat(fn, '*'), + ([first, rest]) => [first, ...rest]) + } else if (key === '*' && sep) { + // desugar: T(* sep s) => (T (s T)*)? + return optional(seq( + fn, + repeat(seq( + sep, + fn, + ([_, res]) => res), '*'), + ([first, rest]) => [first, ...rest])); + } else if (key === '+' && sep) { + // desugar: T(+ sep s) => (T (s T)*) + return seq( + fn, + repeat(seq( + sep, + fn, + ([_, res]) => res), '*'), + ([first, rest]) => [first, ...rest]); + } else { + // base case, collect successful results repeatedly until first failure + return parser => { + const results: T[] = []; + let next = parser as ParserInternal; + let location: Optional = null; + while (true) { + const { result, remaining } = fn(next) as ParseResultInternal; + if (result) { + results.push(result); + next = remaining; + location = location ? location.merge(remaining.successLocation!) : remaining.successLocation; + } else { + return { result: results, remaining: next.succeed(location) }; + } + } + }; + } +} + +export function seq(f1: ParseFunc, toResult: (s: T1, location: FileRange) => T): ParseFunc; +export function seq(f1: ParseFunc, f2: ParseFunc, toResult: (s: [T1, T2], location: FileRange) => T): ParseFunc; +export function seq(f1: ParseFunc, f2: ParseFunc, f3: ParseFunc, toResult: (s: [T1, T2, T3], location: FileRange) => T): ParseFunc; +export function seq(f1: ParseFunc, f2: ParseFunc, f3: ParseFunc, f4: ParseFunc, toResult: (s: [T1, T2, T3, T4], location: FileRange) => T): ParseFunc; +export function seq(f1: ParseFunc, f2: ParseFunc, f3: ParseFunc, f4: ParseFunc, f5: ParseFunc, toResult: (s: [T1, T2, T3, T4, T5], location: FileRange) => T): ParseFunc; +export function seq(f1: ParseFunc, f2: ParseFunc, f3: ParseFunc, f4: ParseFunc, f5: ParseFunc, f6: ParseFunc, toResult: (s: [T1, T2, T3, T4, T5, T6], location: FileRange) => T): ParseFunc; +export function seq(f1: ParseFunc, f2: ParseFunc, f3: ParseFunc, f4: ParseFunc, f5: ParseFunc, f6: ParseFunc, f7: ParseFunc, toResult: (s: [T1, T2, T3, T4, T5, T6, T7], location: FileRange) => T): ParseFunc; +export function seq(...args: any[]): ParseFunc { + const fns = args.slice(0, args.length - 1) as ParseFunc<{}>[]; + const toResult = args[args.length - 1] as (s: Array>, location: FileRange) => T; + + return (parser: ParserInternal) => { + let next = parser; + const results: Array> = []; + let location: Optional = null; + for (const fn of fns) { + const { result, remaining } = fn(next) as ParseResultInternal<{}>; + if (remaining.failToken) return { result: null, remaining } + results.push(result); + location = result ? location ? location.merge(remaining.successLocation!) : remaining.successLocation : location; + } + return { result: toResult(results, location!), remaining: next.succeed(location) }; + } +} + +export function tok(image: string): ParseFunc; +export function tok(type: TokenType): ParseFunc; +export function tok(t: string | TokenType): ParseFunc { + return (parser: ParserInternal) => { + if (parser.empty) throw new Error('token stream was empty'); + const { token, remaining } = parser.next(); + if (typeof t === 'string') { + if (token.image !== t) return { result: null, remaining: parser.fail(token) }; + } else { + if (token.type !== t) return { result: null, remaining: parser.fail(token) }; + } + return { result: token, remaining: remaining.succeed(token.location) }; + } +} + +export function select(...fns: ParseFunc[]): ParseFunc { + return (parser: ParserInternal) => { + for (const fn of fns) { + const { result, remaining } = fn(parser) as ParseResultInternal; + if (remaining.failToken) continue; + return { result, remaining: remaining.succeed(remaining.successLocation) }; + } + if (parser.empty) throw new Error('token stream was empty'); + return { result: null, remaining: parser.fail(parser.next().token) }; + } +} diff --git a/src/runtime/Module.ts b/src/runtime/Module.ts deleted file mode 100644 index ae066dd..0000000 --- a/src/runtime/Module.ts +++ /dev/null @@ -1,119 +0,0 @@ -import { resolve, dirname, join } from 'path'; -import { existsSync as exists, lstatSync as lstat, readFileSync as readFile } from 'fs'; - -import ASTNode from '~/syntax/ASTNode'; -import parse from '~/parser'; -import Func from '~/translator/Func'; -import { ImportDeclaration, ExportForwardDeclaration, Program, - TypeDeclaration, FunctionDeclaration, ConstantDeclaration } from '~/syntax'; - - -export interface Import { - moduleId: number; - exportName: string; - kind: 'type' | 'func' | 'const' | 'namespace'; - ast: ImportDeclaration | ExportForwardDeclaration; -} - -export interface Export { - kind: 'type' | 'func' | 'const' | 'namespace'; - valueName: string; -} - -export interface ModuleElement { - ast: T; - imported?: boolean; - resolving?: boolean; - func: Func; -} - -/** - * Container class for a Ren module, - * including all information required to identify and execute the module - */ -export default class Module { - id: number; - path: string; - ast: Program; - imports: { [name: string]: Import }; - types: { [name: string]: ModuleElement }; - functions: { [name: string]: ModuleElement }; - constants: { [name: string]: ModuleElement }; - namespaces: { [name: string]: number }; // module id of the namespace - exports: { [name: string]: Export }; - - /** - * Create a new module. - * id: a number uniquely identifying the module in a given runtime environment - * path: the absolute path of the module (this MUST be absolute, and it MUST correspond to an existing path) - * ast: the parsed syntax tree of the code inside the module (if it is not provided, the file at the specified path will be parsed) - */ - constructor(id: number, path: string, ast?: Program) { - this.id = id; - this.path = path; - this.ast = ast || this.parseModule(); - // symbol tables - this.imports = {}; // values imported from other modules - this.exports = {}; // values exported from this module - this.types = {}; // types declared in this module - this.functions = {}; // functions declared in this module - this.constants = {}; // constants declared in this module (only possible as part of an export, or an import of one from another module) - this.namespaces = {}; - } - - /** - * Given a path of a module imported into this module, - * resolve the absolute path of that module. - */ - resolvePath(path: string) { - // if it is a relative path, resolve the relation and determine if it exists - if (path.startsWith('.')) { - const resolved = resolve(dirname(this.path), path); - return this.resolveDirectPath(resolved); - } - // otherwise, it is a package import - let dir = dirname(this.path); - while (dir) { - // we want to check the path '{currentModuleDir}/packages/{importPath}' for a valid module - const resolved = this.resolveDirectPath(join(dir, 'packages', path)); - // valid path, use it - if (resolved) return resolved; - // if it didn't exist, we want to continue to check parent directories until we reach the fs root - if (dir === dirname(dir)) break; - dir = dirname(dir); - } - return null; - } - - /** - * Given an absolute path to an imported module (it may not exist), - * follow the module system rules for module resolution - * to determine the exact path to the module file, or return null - * if it does not exist. - */ - private resolveDirectPath(path: string) { - // first check the direct path as-is - if (exists(path)) { - // check as if it is a directory - if (exists(join(path, 'index.ren'))) return join(path, 'index.ren'); - // return the path as long as it's not a directory - if (!lstat(path).isDirectory()) return path; - } - // then check it with a .ren extension - if (exists(`${path}.ren`)) return `${path}.ren`; - // doesn't exist according to the rules of the module system - return null; - } - - /** - * If an AST wasn't passed to the constructor, it means this module is being loaded - * during type checking. We need to parse the AST from the code ourselves if that is the - * case. In addition, the type checker expects the AST to be reduced. - */ - parseModule() { - // read the file - const contents = readFile(this.path).toString(); - // parse it - return parse(contents) as Program; - } -} diff --git a/src/syntax/ASTNode.ts b/src/syntax/ASTNode.ts deleted file mode 100644 index 6bf2cfb..0000000 --- a/src/syntax/ASTNode.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { Location } from '~/parser/Tokenizer'; -import { TType } from '~/typecheck/types'; -import INodeVisitor from './INodeVisitor'; - - -/** - * Base class for all AST (Abstract Syntax Tree) nodes. - * - * An "Abstract Syntax Tree" is a tree containing the simplest logical syntax elements - * that correspond to semantically significant parts of the source code. - * The original source code cannot be produced from it, but an equivalent version - * of the source code can be produced. - * These classes are the primary data structures used throughout the compiler - * frontend, and they contain operations such as: - * - resolving the type of a syntax node - * - transforming a syntax node to IR instructions - * These nodes also store location information, which can be used in errors - * to indicate the location of an error in the source code. - */ -export default abstract class ASTNode { - locations: { [key: string]: Location }; - type: TType; - - registerLocation(key: string, value: Location) { - if (!this.locations) this.locations = {}; - this.locations[key] = value; - } - - createAndRegisterLocation(key: string, start: Location, end: Location) { - this.registerLocation(key, start.merge(end)); - } - - abstract visit(visitor: INodeVisitor): T; -} \ No newline at end of file diff --git a/src/syntax/ModuleRoot.ts b/src/syntax/ModuleRoot.ts new file mode 100644 index 0000000..90429e1 --- /dev/null +++ b/src/syntax/ModuleRoot.ts @@ -0,0 +1,32 @@ +import { TokenType } from '~/parser/lexer'; +import { ImportDeclaration } from './declarations/ImportDeclaration'; +import { NodeBase, SyntaxType, Declaration } from '~/syntax/environment'; +import { ExportDeclaration, ExportForwardDeclaration } from '~/syntax'; +import { ParseFunc, seq, repeat, select, tok } from '~/parser/parser'; + + +export interface ModuleRoot extends NodeBase { + readonly syntaxType: SyntaxType.ModuleRoot; + readonly imports: ReadonlyArray; + readonly declarations: ReadonlyArray; +} + +export function register(Declaration: ParseFunc, ExportDeclaration: ParseFunc) { + const ModuleRoot: ParseFunc = seq( + repeat(ImportDeclaration, '*'), + repeat(select( + Declaration, + ExportDeclaration, + ExportForwardDeclaration + ), '*'), + tok(TokenType.EOF), + ([imports, declarations], location) => ({ + syntaxType: SyntaxType.ModuleRoot as SyntaxType.ModuleRoot, + location, + imports, + declarations + }) + ); + + return { ModuleRoot }; +} diff --git a/src/syntax/declarations/ConstantDeclaration.ts b/src/syntax/declarations/ConstantDeclaration.ts index 68bbe9a..2ba0600 100644 --- a/src/syntax/declarations/ConstantDeclaration.ts +++ b/src/syntax/declarations/ConstantDeclaration.ts @@ -1,42 +1,30 @@ -import { Token, TokenType } from '~/parser/Tokenizer'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { parser, nonTerminal } from '~/parser/Parser'; -import { Declaration } from './Program'; -import { Expression } from '~/syntax/expressions/Expression'; +import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; +import { ParseFunc, seq, tok, optional } from '~/parser/parser'; +import { TokenType, Token } from '~/parser/lexer'; -/** - * ConstantDeclaration ::= 'const' IDENT? EQUALS Expression - */ -@nonTerminal({ implements: Declaration }) -export class ConstantDeclaration extends Declaration { - @parser('const', { definite: true }) - setConstToken(token: Token) { - this.registerLocation('self', token.getLocation()); - } - - @parser(TokenType.IDENT, { optional: true }) - setName(token: Token) { - this.name = token.image; - this.registerLocation('name', token.getLocation()); - } - - @parser(TokenType.EQUALS, { err: 'CONST_MISSING_EQUALS' }) setEquals() {} - - @parser(Expression, { err: 'INVALID_EXPRESSION' }) - setValue(exp: Expression) { - this.value = exp; - this.createAndRegisterLocation('self', this.locations.self, exp.locations.self); - } - - name: string = ''; +export interface ConstantDeclaration extends NodeBase { + syntaxType: SyntaxType.ConstantDeclaration; + name: Optional; value: Expression; - - visit(visitor: INodeVisitor): T { - return visitor.visitConstantDeclaration(this); - } - - prettyName() { - return `const ${this.name}`; - } -} \ No newline at end of file +} + +export function register(Expression: ParseFunc) { + /** + * ConstantDeclaration ::= 'const' IDENT? EQUALS Expression + */ + const ConstantDeclaration: ParseFunc = seq( + tok('const'), + optional(tok(TokenType.IDENT)), + tok('='), + Expression, + ([_1, name, _2, value], location) => ({ + syntaxType: SyntaxType.ConstantDeclaration as SyntaxType.ConstantDeclaration, + location, + name, + value + }) + ); + + return { ConstantDeclaration }; +} diff --git a/src/syntax/declarations/ExportDeclaration.ts b/src/syntax/declarations/ExportDeclaration.ts index 3e25aa6..954c4ad 100644 --- a/src/syntax/declarations/ExportDeclaration.ts +++ b/src/syntax/declarations/ExportDeclaration.ts @@ -1,93 +1,86 @@ -import { Location, Token, TokenType } from '~/parser/Tokenizer'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { parser, nonTerminal, exp, ParseResult } from '~/parser/Parser'; -import { Declaration, NonImportDeclaration } from './Program'; -import { NameAlias } from './ImportDeclaration'; +import { NodeBase, SyntaxType, Declaration } from '~/syntax/environment'; +import { ParseFunc, seq, tok, select, repeat } from '~/parser/parser'; +import { Token, TokenType } from '~/parser/lexer'; /** - * ExportDeclaration ::= DefaultExportDeclaration | NamedExportDeclaration - * - * We implemented this using the inheritance model because it ended up being simpler that way. + * Cases: + * - Default export of a name (export name = default, value name = value name, NO value) + * - Named export of a name (export name AND value name = value name, NO value) + * - Named export with an alias (export name = alias, value name = value name, NO value) + * - Default export of a named value (export name = default, value name = name from value, value = value) + * - Default export of an anonymous value (export name = default, NO value name, value = value) + * - Named export of a named value (export name AND value name = name from value, value = value) */ -@nonTerminal({ abstract: true, implements: NonImportDeclaration }) -export abstract class ExportDeclaration extends NonImportDeclaration { - /** - * Cases: - * - Default export of a name (export name = default, value name = value name, NO value) - * - Named export of a name (export name AND value name = value name, NO value) - * - Named export with an alias (export name = alias, value name = value name, NO value) - * - Default export of a named value (export name = default, value name = name from value, value = value) - * - Default export of an anonymous value (export name = default, NO value name, value = value) - * - Named export of a named value (export name AND value name = name from value, value = value) - */ - exports: { - // export name is always present - exportName: string, - exportNameLocation: Location, - // value name is always present (##DEFAULT for anonymous default exports) - valueName: string, - valueNameLocation: Location, - // value is not present for exports of existing names - value?: Declaration, - }[] = []; - - visit(visitor: INodeVisitor): T { - return visitor.visitExportDeclaration(this); - } - - protected addExport(exportName: string, exportNameLocation: Location, valueName: string, valueNameLocation: Location, value?: Declaration) { - this.exports.push({ exportName, exportNameLocation, valueName, valueNameLocation, value }); - } +interface Export { + // export name is always present but may not be set TODO: this should ALWAYS be present, we should split out anonymous declarations + readonly exportName: Optional; + // value name is present for all but anonymous default exports + readonly valueName: Optional; + // value is not present for exports of existing names + readonly value?: Declaration; } -@nonTerminal({ implements: ExportDeclaration }) -export class DefaultExportDeclaration extends ExportDeclaration { - @parser('export') setExportToken() {} +export interface ExportDeclaration extends NodeBase { + readonly syntaxType: SyntaxType.ExportDeclaration; + readonly exports: ReadonlyArray; +} - @parser('default') - setDefaultToken(token: Token) { - this.registerLocation('default', token.getLocation()); - } +export function register(Declaration: ParseFunc) { + const DefaultExportDeclaration: ParseFunc = seq( + tok('export'), + tok('default'), + select( + Declaration, + tok(TokenType.IDENT) + ), + ([_, def, value], location) => ({ + location, + syntaxType: SyntaxType.ExportDeclaration as SyntaxType.ExportDeclaration, + exports: Token.isToken(value) + ? [{ exportName: def, valueName: value }] + : [{ exportName: def, valueName: value.name, value }] + }) + ); - @parser([Declaration, TokenType.IDENT], { definite: true }) - setValue(value: Declaration | Token) { - if (value instanceof Token) { - super.addExport('default', this.locations.default, value.image, value.getLocation()); - } else { - // anonymous declarations have no name, and thus no name location - const loc = value.name ? value.locations.name : this.locations.default; - super.addExport('default', this.locations.default, value.name, loc, value); - } - } -} + /** + * NamedExports ::= '{' (IDENT | (IDENT 'as' IDENT))(+ sep ',') '}' + */ + const NamedExports: ParseFunc = seq( + tok('{'), + repeat(select( + seq( + tok(TokenType.IDENT), + tok('as'), + tok(TokenType.IDENT), + ([name, _, alias]) => ({ exportName: alias, valueName: name }) + ), + seq(tok(TokenType.IDENT), name => ({ exportName: name, valueName: name })) + ), '+', tok(',')), + tok('}'), + ([_1, names, _2]) => names + ); -/** - * NamedExports ::= LBRACE (IDENT | ImportAlias) (+ sep COMMA) RBRACE - */ -const NamedExports = { - '{': exp(TokenType.LBRACE, { definite: true }), - exports: exp([NameAlias, TokenType.IDENT], { repeat: '+', sep: TokenType.COMMA }), - '}': TokenType.RBRACE -}; + const NamedExportDeclaration: ParseFunc = seq( + tok('export'), + select( + Declaration, + NamedExports + ), + ([_, value], location) => ({ + location, + syntaxType: SyntaxType.ExportDeclaration as SyntaxType.ExportDeclaration, + exports: Array.isArray(value) ? value : [{ exportName: value.name, valueName: value.name, value }] + }) + ); -@nonTerminal({ implements: ExportDeclaration }) -export class NamedExportDeclaration extends ExportDeclaration { - @parser('export') setExportToken() {} + /** + * ExportDeclaration ::= DefaultExportDeclaration | NamedExportDeclaration + */ + const ExportDeclaration: ParseFunc = select( + DefaultExportDeclaration, + NamedExportDeclaration + ); - @parser([Declaration, NamedExports], { definite: true }) - setValue(value: Declaration | ParseResult) { - if (value instanceof Declaration) { - super.addExport(value.name, value.locations.name, value.name, value.locations.name, value); - } else { - for (const e of value.exports as (ParseResult | Token)[]) { - if (e instanceof Token) { - super.addExport(e.image, e.getLocation(), e.image, e.getLocation()); - } else { - const alias = e.alias as Token, name = e.name as Token; - super.addExport(alias.image, alias.getLocation(), name.image, name.getLocation()); - } - } - } - } + return { ExportDeclaration }; } diff --git a/src/syntax/declarations/ExportForwardDeclaration.ts b/src/syntax/declarations/ExportForwardDeclaration.ts index c9922ea..9541905 100644 --- a/src/syntax/declarations/ExportForwardDeclaration.ts +++ b/src/syntax/declarations/ExportForwardDeclaration.ts @@ -1,120 +1,78 @@ -import { Location, Token, TokenType } from '~/parser/Tokenizer'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { parser, nonTerminal, exp, ParseResult } from '~/parser/Parser'; -import { NonImportDeclaration } from './Program'; import { ImportList } from './ImportDeclaration'; +import { ParseFunc, seq, select, tok, optional } from '~/parser/parser'; +import { Token, TokenType } from '~/parser/lexer'; +import { NodeBase, SyntaxType } from '~/syntax/environment'; -/** - * ExportForwardDeclaration ::= DefaultExportForwardDeclaration | NamedExportForwardDeclaration - * - * We implemented this using the inheritance model because it ended up being simpler that way. - */ -@nonTerminal({ abstract: true, implements: NonImportDeclaration }) -export abstract class ExportForwardDeclaration extends NonImportDeclaration { - moduleName: string; - forwards: { - importName: string, - importLocation: Location, - exportName: string, - exportLocation: Location, - }[] = []; - - visit(visitor: INodeVisitor): T { - return visitor.visitExportForwardDeclaration(this); - } +export interface Forward { + readonly importName: Token; + readonly exportName: Token; +} - protected addForward(importName: string, importLocation: Location, exportName: string, exportLocation: Location) { - this.forwards.push({ importName, importLocation, exportName, exportLocation }); - } +export interface ExportForwardDeclaration extends NodeBase { + readonly syntaxType: SyntaxType.ExportForwardDeclaration; + readonly moduleName: Token; + readonly forwards: ReadonlyArray; } /** * DefaultExportForwards ::= COLON (LBRACE IDENT RBRACE | '*') */ -const DefaultExportForwards = { - ':': exp(TokenType.COLON, { definite: true }), - value: [{ - '{': exp(TokenType.LBRACE, { definite: true }), - name: TokenType.IDENT, - '}': TokenType.RBRACE, - }, '*'] -}; +const DefaultExportForwards: ParseFunc = seq( + tok(':'), + select( + tok('*'), + seq( + tok('{'), + tok(TokenType.IDENT), + tok('}'), + ([_1, name, _2]) => name + ) + ), + ([_, exp]) => exp +); /** * DefaultExportForwardDeclaration ::= EXPORT DEFAULT FROM STRING_LITERAL DefaultExportForwards? */ -@nonTerminal({ implements: ExportForwardDeclaration }) -// @ts-ignore: Decorator registers this class, so it is used -class DefaultExportForwardDeclaration extends ExportForwardDeclaration { - @parser('export') setExportToken() {} - - @parser('default') - setDefaultToken(token: Token) { - this.registerLocation('default', token.getLocation()); - } - - @parser('from', { definite: true }) setFromToken() {} - - @parser(TokenType.STRING_LITERAL, { err: 'INVALID_IMPORT_MODULE' }) - setModuleName(token: Token) { - this.moduleName = token.value; - this.registerLocation('moduleName', token.getLocation()); - super.addForward('default', this.locations.default, 'default', this.locations.default); - } - - @parser(DefaultExportForwards, { optional: true }) - setForwards(result: ParseResult) { - this.forwards = []; - const value = result.value as (ParseResult | Token); - if (value instanceof Token) super.addForward('*', value.getLocation(), 'default', this.locations.default); - else { - const name = value.name as Token; - super.addForward(name.image, name.getLocation(), 'default', this.locations.default); - } - } -} +const DefaultExportForwardDeclaration: ParseFunc = seq( + tok('export'), + tok('default'), + tok('from'), + tok(TokenType.STRING_LITERAL), + optional(DefaultExportForwards), + ([_1, def, _2, moduleName, fwd], location) => ({ + syntaxType: SyntaxType.ExportForwardDeclaration as SyntaxType.ExportForwardDeclaration, + location, + moduleName, + forwards: [{ importName: fwd || def, exportName: def }] + }) +); /** - * DefaultExportForwardDeclaration ::= EXPORT DEFAULT FROM STRING_LITERAL DefaultExportForwards? + * DefaultExportForwardDeclaration ::= EXPORT FROM STRING_LITERAL ':' (ImportList | '*') */ -@nonTerminal({ implements: ExportForwardDeclaration }) -// @ts-ignore: Decorator registers this class, so it is used -class NamedExportForwardDeclaration extends ExportForwardDeclaration { - @parser('export') setExportToken() {} - - @parser('from', { definite: true }) setFromToken() {} - - @parser(TokenType.STRING_LITERAL, { err: 'INVALID_IMPORT_MODULE' }) - setModuleName(token: Token) { - this.moduleName = token.value; - this.registerLocation('moduleName', token.getLocation()); - } - - @parser(TokenType.COLON, { err: 'INVALID_IMPORT' }) setColon() {} +const NamedExportForwardDeclaration: ParseFunc = seq( + tok('export'), + tok('from'), + tok(TokenType.STRING_LITERAL), + tok(':'), + select( + seq(ImportList, imps => imps.map(({ importName, aliasName }) => ({ importName, exportName: aliasName }))), + seq(tok('*'), _ => [{ importName: _, exportName: _ }]) + ), + ([_1, _2, moduleName, _3, forwards], location) => ({ + syntaxType: SyntaxType.ExportForwardDeclaration as SyntaxType.ExportForwardDeclaration, + location, + moduleName, + forwards + }) +); - @parser([...ImportList, '*']) - setValue(result: ParseResult | Token) { - if (result instanceof Token) { - // wildcard to wildcard - super.addForward('*', result.getLocation(), '*', result.getLocation()); - } else { - if (result.defaultImport instanceof Token) { - const defaultImport = result.defaultImport as Token; - this.addForward('default', defaultImport.getLocation(), defaultImport.image, defaultImport.getLocation()); - } - if (result['*'] instanceof Token) { - const w = result['*'] as Token, a = result.alias as Token; - this.addForward('*', w.getLocation(), a.image, a.getLocation()); - } - if (Array.isArray(result.names)) { - for (const i of result.names as (ParseResult | Token)[]) { - const [importName, exportName] = (i instanceof Token) ? [i, i] - : (i.name) ? [i.name as Token, i.alias as Token] - : [i['*'] as Token, i.alias as Token]; - this.addForward(importName.image, importName.getLocation(), exportName.image, exportName.getLocation()); - } - } - } - } -} +/** + * ExportForwardDeclaration ::= DefaultExportForwardDeclaration | NamedExportForwardDeclaration + */ +export const ExportForwardDeclaration: ParseFunc = select( + DefaultExportForwardDeclaration, + NamedExportForwardDeclaration +); diff --git a/src/syntax/declarations/FunctionDeclaration.ts b/src/syntax/declarations/FunctionDeclaration.ts index 6c8dd16..a0d5fd1 100644 --- a/src/syntax/declarations/FunctionDeclaration.ts +++ b/src/syntax/declarations/FunctionDeclaration.ts @@ -1,109 +1,93 @@ -import { Token, TokenType } from '~/parser/Tokenizer'; -import ASTNode from '~/syntax/ASTNode'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { parser, nonTerminal, exp, ParseResult } from '~/parser/Parser'; -import { Declaration } from './Program'; import { TypeParamList, TypeParam } from './TypeDeclaration'; -import { Statement } from '~/syntax/statements/Statement'; -import { Block } from '~/syntax/statements/Block'; -import { Expression } from '~/syntax/expressions/Expression'; -import { Type } from '~/syntax/types/Type'; +import { ParseFunc, seq, tok, repeat, select, optional } from '~/parser/parser'; +import { TypeNode, Expression, NodeBase, SyntaxType, Statement } from '~/syntax/environment'; +import { Token, TokenType } from '~/parser/lexer'; +import { Block } from '~/syntax'; -/** - * Param ::= Type IDENT - */ -export class Param extends ASTNode { - @parser(Type, { definite: true }) - setType(type: Type) { - this.typeNode = type; - } - - @parser(TokenType.IDENT, { err: 'INVALID_PARAMETER_NAME' }) - setName(name: Token) { - this.name = name.image; - this.registerLocation('name', name.getLocation()); - } - - name: string; - typeNode: Type; - - visit(visitor: INodeVisitor): T { - return visitor.visitParam(this); - } - - prettyName() { - return `${this.type} ${this.name}`; - } +export interface Param extends NodeBase { + readonly syntaxType: SyntaxType.Param; + readonly name: Token; + readonly typeNode: Optional; // optional to support lambda params } -/** - * ParameterList ::= LPAREN Param(* sep COMMA) RPAREN - */ -const ParameterList = { - '(': exp(TokenType.LPAREN, { definite: true }), - params: exp(Param, { repeat: '*', sep: TokenType.COMMA }), - ')': exp(TokenType.RPAREN, { err: 'MISSING_CLOSE_PAREN' }), -}; - -/** - * FunctionBody ::= Block | Expression | Statement - * - * Put block before expression because there is a conflict - * between empty blocks and empty structs. - */ -export const FunctionBody = [Block, Expression, Statement]; - -/** - * FunctionDeclaration ::= 'func' Type IDENT TypeParamList? ParameterList FAT_ARROW FunctionBody - */ -@nonTerminal({ implements: Declaration }) -export class FunctionDeclaration extends Declaration { - @parser('func', { definite: true }) - setFuncToken(token: Token) { - this.registerLocation('self', token.getLocation()); - } - - @parser(Type, { err: 'INVALID_RETURN_TYPE' }) - setReturnType(type: Type) { - this.returnType = type; - } - - @parser(TokenType.IDENT, { optional: true }) - setFunctionName(token: Token) { - this.name = token.image; - this.registerLocation('name', token.getLocation()); - } - - @parser(TypeParamList, { optional: true }) - setTypeParams(result: ParseResult) { - this.typeParams = result.params as TypeParam[]; - } - - @parser(ParameterList, { err: 'INVALID_PARAMETER_LIST' }) - setParams(result: ParseResult) { - this.params = result.params as Param[]; - } - - @parser(TokenType.FAT_ARROW, { err: 'INVALID_FAT_ARROW' }) setFatArrow() {} - - @parser(FunctionBody) - setFunctionBody(body: Expression | Statement) { - this.body = body; - this.createAndRegisterLocation('self', this.locations.self, body.locations.self); - } - - returnType: Type; - name: string = ''; - typeParams: TypeParam[] = []; - params: Param[]; - body: Expression | Statement; - - visit(visitor: INodeVisitor): T { - return visitor.visitFunctionDeclaration(this); - } +export interface FunctionDeclaration extends NodeBase { + readonly syntaxType: SyntaxType.FunctionDeclaration; + readonly returnType: TypeNode; + readonly name: Optional; + readonly typeParams: ReadonlyArray; + readonly params: ReadonlyArray; + readonly body: Expression | Statement; +} - prettyName() { - return `${this.name}(${this.params.map(p => p.prettyName()).join(', ')})`; - } -} \ No newline at end of file +export function register( + TypeNode: ParseFunc, + Expression: ParseFunc, + Statement: ParseFunc, + Block: ParseFunc, + TypeParamList: ParseFunc +) { + /** + * Param ::= Type IDENT + */ + const Param: ParseFunc = seq( + TypeNode, + tok(TokenType.IDENT), + ([typeNode, name], location) => ({ + syntaxType: SyntaxType.Param as SyntaxType.Param, + location, + typeNode, + name + }) + ); + + /** + * ParameterList ::= LPAREN Param(* sep COMMA) RPAREN + */ + const ParamList: ParseFunc = seq( + tok('('), + repeat(Param, '*', tok(',')), + tok(')'), + ([_1, params, _2]) => params + ); + + /** + * FunctionBody ::= Block | Expression | Statement + * + * Put block before expression because there is a conflict + * between empty blocks and empty structs. + */ + const FunctionBody: ParseFunc = select( + Block, + Expression, + Statement + ); + + /** + * FunctionDeclaration ::= 'func' Type IDENT? TypeParamList? ParamList FAT_ARROW FunctionBody + */ + const FunctionDeclaration: ParseFunc = seq( + tok('func'), + TypeNode, + optional(tok(TokenType.IDENT)), + optional(TypeParamList), + ParamList, + tok('=>'), + FunctionBody, + ([_1, returnType, name, typeParams, params, _2, body], location) => ({ + syntaxType: SyntaxType.FunctionDeclaration as SyntaxType.FunctionDeclaration, + location, + name, + returnType, + typeParams: typeParams ? typeParams.params : [], + params, + body + }) + ); + + return { + FunctionDeclaration, + Param, + FunctionBody + }; +} diff --git a/src/syntax/declarations/ImportDeclaration.ts b/src/syntax/declarations/ImportDeclaration.ts index 7f063f6..ef6d33a 100644 --- a/src/syntax/declarations/ImportDeclaration.ts +++ b/src/syntax/declarations/ImportDeclaration.ts @@ -1,39 +1,41 @@ -import { Location, Token, TokenType } from '~/parser/Tokenizer'; -import ASTNode from '~/syntax/ASTNode'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { parser, exp, ParseResult } from '~/parser/Parser'; +import { TokenType, Token } from '~/parser/lexer'; +import { NodeBase, SyntaxType } from '~/syntax/environment'; +import { ParseFunc, seq, tok, select, repeat } from '~/parser/parser'; /** * NameAlias ::= IDENT 'as' IDENT */ -export const NameAlias = { - name: TokenType.IDENT, - as: exp('as', { definite: true }), - alias: exp(TokenType.IDENT, { err: 'INVALID_IMPORT' }) -}; +export const NameAlias: ParseFunc = seq( + tok(TokenType.IDENT), + tok('as'), + tok(TokenType.IDENT), + ([importName, _, aliasName]) => ({ importName, aliasName }) +); /** * WildcardImport ::= '*' 'as' IDENT */ -const WildcardImport = { - '*': exp('*', { definite: true }), - as: 'as', - alias: TokenType.IDENT, -}; +const WildcardImport: ParseFunc = seq( + tok('*'), + tok('as'), + tok(TokenType.IDENT), + ([importName, _, aliasName]) => ({ importName, aliasName }) +); /** * NamedImports ::= LBRACE (AliasImport | IDENT | WildcardImport)+(sep COMMA) RBRACE */ -const NamedImports = { - '{': exp(TokenType.LBRACE, { definite: true }), - names: exp([NameAlias, TokenType.IDENT, WildcardImport], { - repeat: '+', - sep: TokenType.COMMA, - err: 'INVALID_IMPORT' - }), - '}': exp(TokenType.RBRACE, { err: 'INVALID_IMPORT' }) -}; +const NamedImports: ParseFunc = seq( + tok('{'), + repeat(select( + NameAlias, + tok(TokenType.IDENT), + WildcardImport + ), '+', tok(',')), + tok('}'), + ([_1, names, _2]) => names.map(n => Token.isToken(n) ? { importName: n, aliasName: n } : n) +); /** * ImportList ::= NamedImports # just named imports @@ -42,66 +44,50 @@ const NamedImports = { * | IDENT COMMA WildcardImport # default and wildcard import * | IDENT # just default import */ -export const ImportList = [NamedImports, { - defaultImport: TokenType.IDENT, - ',': TokenType.COMMA, - named: exp(NamedImports, { definite: true, flatten: true }), -}, WildcardImport, { - defaultImport: TokenType.IDENT, - ',': TokenType.COMMA, - wildcard: exp(WildcardImport, { definite: true, flatten: true }), -}, { - defaultImport: exp(TokenType.IDENT, { definite: true }) -}]; +export const ImportList: ParseFunc = select( + NamedImports, + seq( + tok(TokenType.IDENT), + tok(','), + NamedImports, + ([def, _, named]) => [defaultImport(def), ...named] + ), + seq(WildcardImport, i => [i]), + seq( + tok(TokenType.IDENT), + tok(','), + WildcardImport, + ([def, _, wildcard]) => [defaultImport(def), wildcard] + ), + seq(tok(TokenType.IDENT), i => [defaultImport(i)]) +); -/** - * ImportDeclaration ::= 'import' 'from' STRING_LITERAL COLON ImportList - */ -export class ImportDeclaration extends ASTNode { - @parser('import', { definite: true }) setImportToken() {} - @parser('from', { err: 'INVALID_IMPORT' }) setFromToken() {} +interface Import { + importName: Token; + aliasName: Token; +} - @parser(TokenType.STRING_LITERAL, { err: 'INVALID_IMPORT_MODULE' }) - setModuleName(token: Token) { - this.moduleName = token.value; - this.registerLocation('moduleName', token.getLocation()); - } +export interface ImportDeclaration extends NodeBase { + readonly syntaxType: SyntaxType.ImportDeclaration; + readonly moduleName: Token; + readonly imports: ReadonlyArray; +} - @parser(TokenType.COLON, { err: 'INVALID_IMPORT' }) setColonToken() {} - - @parser(ImportList, { err: 'INVALID_IMPORT' }) - setImports(imports: ParseResult) { - if (imports.defaultImport instanceof Token) { - const defaultImport = imports.defaultImport as Token; - this.addImport('default', defaultImport.getLocation(), defaultImport.image, defaultImport.getLocation()); - } - if (imports['*'] instanceof Token) { - const w = imports['*'] as Token, a = imports.alias as Token; - this.addImport('*', w.getLocation(), a.image, a.getLocation()); - } - if (Array.isArray(imports.names)) { - for (const i of imports.names as (ParseResult | Token)[]) { - const [importName, aliasName] = (i instanceof Token) ? [i, i] - : (i.name) ? [i.name as Token, i.alias as Token] - : [i['*'] as Token, i.alias as Token]; - this.addImport(importName.image, importName.getLocation(), aliasName.image, aliasName.getLocation()); - } - } - } +/** + * ImportDeclaration ::= 'import' 'from' STRING_LITERAL ':' ImportList + */ +export const ImportDeclaration: ParseFunc = seq( + tok('import'), + tok('from'), + tok(TokenType.STRING_LITERAL), + tok(':'), + ImportList, + ([_1, _2, moduleName, _3, imports], location) => ({ + syntaxType: SyntaxType.ImportDeclaration as SyntaxType.ImportDeclaration, + location, + moduleName, + imports + }) +); - moduleName: string; - imports: { - importName: string, - importLocation: Location, - aliasName: string, - aliasLocation: Location, - }[] = []; - - visit(visitor: INodeVisitor): T { - return visitor.visitImportDeclaration(this); - } - - private addImport(importName: string, importLocation: Location, aliasName: string, aliasLocation: Location) { - this.imports.push({ importName, importLocation, aliasName, aliasLocation }); - } -} +const defaultImport = (token: Token) => ({ importName: token.with({ image: 'default' }), aliasName: token }); diff --git a/src/syntax/declarations/Program.ts b/src/syntax/declarations/Program.ts deleted file mode 100644 index 47f331d..0000000 --- a/src/syntax/declarations/Program.ts +++ /dev/null @@ -1,38 +0,0 @@ -import { Token, TokenType } from '~/parser/Tokenizer'; -import ASTNode from '~/syntax/ASTNode'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { parser, nonTerminal } from '~/parser/Parser'; -import { ImportDeclaration } from './ImportDeclaration'; - - -@nonTerminal({ abstract: true }) -export abstract class NonImportDeclaration extends ASTNode {} - -@nonTerminal({ abstract: true, implements: NonImportDeclaration }) -export abstract class Declaration extends NonImportDeclaration { - abstract name: string; -} - -/** - * Program ::= ImportDeclaration* NonImportDeclaration* EOF - */ -export class Program extends ASTNode { - @parser(ImportDeclaration, { repeat: '*', definite: true }) - setImports(value: ImportDeclaration[]) { - this.imports = value; - } - - @parser(NonImportDeclaration, { repeat: '*', definite: true }) - setDeclarations(value: NonImportDeclaration[]) { - this.declarations = value; - } - - @parser(TokenType.EOF, { definite: true }) setEof(_value: Token) {} - - imports: ImportDeclaration[] = []; - declarations: NonImportDeclaration[] = []; - - visit(visitor: INodeVisitor): T { - return visitor.visitProgram(this); - } -} diff --git a/src/syntax/declarations/TypeDeclaration.ts b/src/syntax/declarations/TypeDeclaration.ts index b71155f..8ec65d6 100644 --- a/src/syntax/declarations/TypeDeclaration.ts +++ b/src/syntax/declarations/TypeDeclaration.ts @@ -1,97 +1,78 @@ -import { Token, TokenType } from '~/parser/Tokenizer'; -import ASTNode from '~/syntax/ASTNode'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { parser, nonTerminal, exp, ParseResult } from '~/parser/Parser'; -import { Declaration } from './Program'; -import { Type } from '~/syntax/types/Type'; +import { TypeNode, SyntaxType, NodeBase } from '~/syntax/environment'; +import { ParseFunc, seq, optional, select, tok, repeat } from '~/parser/parser'; +import { TokenType, Token } from '~/parser/lexer'; -/** - * TypeConstraint ::= COLON Type - */ -const TypeConstraint = { - ':': exp(TokenType.COLON, { definite: true }), - type: exp(Type, { err: 'INVALID_TYPE_PARAM' }), -}; - -/** - * TypeParam = ('+' | '-')? IDENT TypeConstraint? - */ -export class TypeParam extends ASTNode { - @parser(['+', '-'], { optional: true, definite: true }) - setVarianceOp(op: Token) { - this.varianceOp = op.image; - this.registerLocation('variance', op.getLocation()); - } - - @parser(TokenType.IDENT, { definite: true, err: 'INVALID_TYPE_PARAM' }) - setName(name: Token) { - this.name = name.image; - this.registerLocation('name', name.getLocation()); - const start = this.varianceOp ? this.locations.variance : this.locations.name; - this.createAndRegisterLocation('self', start, this.locations.name); - } - - @parser(TypeConstraint, { optional: true }) - setConstraint(constraint: ParseResult) { - this.typeConstraint = constraint.type as Type; - const colon = constraint[':'] as Token; - this.createAndRegisterLocation('constraint', colon.getLocation(), this.typeConstraint.locations.self); - this.createAndRegisterLocation('self', this.locations.self, this.typeConstraint.locations.self); - } +export interface TypeParam extends NodeBase { + readonly syntaxType: SyntaxType.TypeParam; + readonly name: Token; + readonly varianceOp: Optional; + readonly typeConstraint: Optional; +} - name: string; - varianceOp?: string; - typeConstraint?: Type; - - visit(visitor: INodeVisitor): T { - return visitor.visitTypeParam(this); - } +export interface TypeParamList { + readonly params: ReadonlyArray; } -/** - * TypeParamList ::= '<' TypeParam(+ sep COMMA) '>' - */ -export const TypeParamList = { - '<': '<', - params: exp(TypeParam, { repeat: '+', sep: TokenType.COMMA }), - '>': exp('>', { definite: true }), +export interface TypeDeclaration extends NodeBase { + readonly syntaxType: SyntaxType.TypeDeclaration; + readonly name: Optional; + readonly typeParams: ReadonlyArray; + readonly typeNode: TypeNode; } /** - * TypeDeclaration ::= 'type' IDENT? TypeParamList? EQUALS Type + * Registration function to handle circular dependency. */ -@nonTerminal({ implements: Declaration }) -export class TypeDeclaration extends Declaration { - @parser('type', { definite: true }) - setTypeToken(token: Token) { - this.registerLocation('self', token.getLocation()); - } - - @parser(TokenType.IDENT, { optional: true }) - setName(token: Token) { - this.name = token.image; - this.registerLocation('name', token.getLocation()); - } +export function register(TypeNode: ParseFunc) { + /** + * TypeParam = ('+' | '-')? IDENT (':' TypeNode)? + */ + const TypeParam: ParseFunc = seq( + optional(select(tok('+'), tok('-'))), + tok(TokenType.IDENT), + optional(seq( + tok(':'), + TypeNode, + (([_, type]) => type) + )), + ([varianceOp, name, typeConstraint], location) => ({ + syntaxType: SyntaxType.TypeParam as SyntaxType.TypeParam, + location, + name, + varianceOp, + typeConstraint + }) + ); - @parser(TypeParamList, { optional: true }) - setTypeParams(result: ParseResult) { - this.typeParams = result.params as TypeParam[]; - } + const TypeParamList: ParseFunc = seq( + tok('<'), + repeat(TypeParam, '+', tok(',')), + tok('>'), + ([_1, params, _2]) => ({ params }) + ); - @parser(TokenType.EQUALS, { err: 'TYPE_DECL_MISSING_EQUALS' }) setEquals() {} + /** + * TypeDeclaration ::= 'type' IDENT? TypeParamList? EQUALS Type + */ + const TypeDeclaration: ParseFunc = seq( + tok('type'), + optional(tok(TokenType.IDENT)), + optional(TypeParamList), + tok('='), + TypeNode, + ([_1, name, params, _2, typeNode], location) => ({ + syntaxType: SyntaxType.TypeDeclaration as SyntaxType.TypeDeclaration, + location, + name, + typeParams: params ? params.params : [], + typeNode + }) + ); - @parser(Type, { err: 'INVALID_TYPE' }) - setType(type: Type) { - this.typeNode = type; - this.createAndRegisterLocation('self', this.locations.self, type.locations.self); + return { + TypeParam, + TypeParamList, + TypeDeclaration } - - name: string = ''; - typeParams: TypeParam[] = []; - typeNode: Type; - - visit(visitor: INodeVisitor): T { - return visitor.visitTypeDeclaration(this); - } -} \ No newline at end of file +} diff --git a/src/syntax/declarations/index.ts b/src/syntax/declarations/index.ts index 8323e61..557764f 100644 --- a/src/syntax/declarations/index.ts +++ b/src/syntax/declarations/index.ts @@ -1,7 +1,6 @@ -export * from './ImportDeclaration'; -export * from './Program'; -export * from './ConstantDeclaration'; -export * from './ExportDeclaration'; -export * from './ExportForwardDeclaration'; -export * from './TypeDeclaration'; -export * from './FunctionDeclaration'; +export { ImportDeclaration } from './ImportDeclaration'; +export { ConstantDeclaration } from './ConstantDeclaration'; +export { ExportDeclaration } from './ExportDeclaration'; +export { ExportForwardDeclaration } from './ExportForwardDeclaration'; +export { TypeDeclaration, TypeParam } from './TypeDeclaration'; +export { FunctionDeclaration, Param } from './FunctionDeclaration'; diff --git a/src/syntax/environment.ts b/src/syntax/environment.ts new file mode 100644 index 0000000..f286446 --- /dev/null +++ b/src/syntax/environment.ts @@ -0,0 +1,380 @@ +import { FileRange } from '~/core'; +import { ParseFunc, Parser, ParseResult, select, seq, repeat } from '~/parser/parser'; +import { + // module + ModuleRoot, ImportDeclaration, ExportDeclaration, ExportForwardDeclaration, + // declaration + TypeDeclaration, FunctionDeclaration, ConstantDeclaration, + // type + BuiltInType, StructType, TupleType, ArrayType, FunctionType, UnionType, IdentifierType, ParenthesizedType, + SpecificType, NamespaceAccessType, + // expression + IntegerLiteral, FloatLiteral, CharLiteral, BoolLiteral, StringLiteral, StructLiteral, + LambdaExpression, ParenthesizedExpression, TupleLiteral, IdentifierExpression, UnaryExpression, IfElseExpression, + FunctionApplication, BinaryExpression, ArrayAccess, FieldAccess, ArrayLiteral, VarDeclaration, + // statement + Block, ExpressionStatement, ForStatement, WhileStatement, DoWhileStatement, TryCatchStatement, ReturnStatement, + ThrowStatement, BreakStatement, ContinueStatement, + // other + Param, TypeParam +} from '.'; + +// all of the imports below are "internal" and required only for parsing +import { register as register_FunctionType } from './types/FunctionType'; +import { register as register_ParenthesizedType } from './types/ParenthesizedType'; +import { register as register_StructType } from './types/StructType'; +import { register as register_TupleType } from './types/TupleType'; +import { ArrayTypeSuffix } from './types/ArrayType'; +import { UnionTypeSuffix, register as register_UnionType } from './types/UnionType'; +import { NamespaceAccessTypeSuffix } from './types/NamespaceAccessType'; +import { SpecificTypeSuffix, register as register_SpecificTypeSuffix } from './types/SpecificType'; +import { register as register_StructLiteral } from './expressions/StructLiteral'; +import { register as register_LambdaExpression } from './expressions/LambdaExpression'; +import { register as register_ParenthesizedExpression } from './expressions/ParenthesizedExpression'; +import { register as register_TupleLiteral } from './expressions/TupleLiteral'; +import { register as register_ArrayLiteral } from './expressions/ArrayLiteral'; +import { register as register_VarDeclaration } from './expressions/VarDeclaration'; +import { PostfixExpressionSuffix, register as register_UnaryExpression } from './expressions/UnaryExpression'; +import { register as register_IfElseExpression } from './expressions/IfElseExpression'; +import { FunctionApplicationSuffix, register as register_FunctionApplication } from './expressions/FunctionApplication'; +import { BinaryExpressionSuffix, register as register_BinaryExpression } from './expressions/BinaryExpression'; +import { ArrayAccessSuffix, register as register_ArrayAccess } from './expressions/ArrayAccess'; +import { FieldAccessSuffix } from './expressions/FieldAccess'; +import { register as register_Block } from './statements/Block'; +import { register as register_ExpressionStatement } from './statements/ExpressionStatement'; +import { register as register_ForStatement } from './statements/ForStatement'; +import { register as register_WhileStatement } from './statements/WhileStatement'; +import { register as register_DoWhileStatement } from './statements/DoWhileStatement'; +import { register as register_TryCatchStatement } from './statements/TryCatchStatement'; +import { register as register_ReturnStatement } from './statements/ReturnStatement'; +import { register as register_ThrowStatement } from './statements/ThrowStatement'; +import { register as register_ExportDeclaration } from './declarations/ExportDeclaration'; +import { register as register_TypeDeclaration } from './declarations/TypeDeclaration'; +import { register as register_FunctionDeclaration } from './declarations/FunctionDeclaration'; +import { register as register_ConstantDeclaration } from './declarations/ConstantDeclaration'; +import { register as register_ModuleRoot } from './ModuleRoot'; + +/** + * The full enumeration of types of syntax nodes in the language. + */ +export enum SyntaxType { + // #region module + ModuleRoot, + ImportDeclaration, + ExportDeclaration, + ExportForwardDeclaration, + // #endregion + // #region declarations + TypeDeclaration, + TypeParam, + FunctionDeclaration, + Param, + ConstantDeclaration, + // #endregion + // #region types + BuiltInType, + StructType, + TupleType, + ArrayType, + FunctionType, + UnionType, + IdentifierType, + ParenthesizedType, + SpecificType, + NamespaceAccessType, + // #endregion + // #region expressions + IntegerLiteral, + FloatLiteral, + CharLiteral, + BoolLiteral, + StringLiteral, + StructLiteral, + TupleLiteral, + ArrayLiteral, + IdentifierExpression, + ParenthesizedExpression, + VarDeclaration, + UnaryExpression, + BinaryExpression, + FunctionApplication, + ArrayAccess, + FieldAccess, + IfElseExpression, + LambdaExpression, + // #endregion + // #region statements + Block, + ExpressionStatement, + ForStatement, + WhileStatement, + DoWhileStatement, + TryCatchStatement, + ReturnStatement, + ThrowStatement, + BreakStatement, + ContinueStatement, + // #endregion +} + +/** + * Our syntax is understandably recursive. The only issue with that is that + * JS code doesn't handle circular references very well. It works just fine + * for types because of multiple passes, but values in JS can only be circularly + * defined in two instances: + * 1. Referencing an uninitialized value in a function (because it must be initialized before the function is called) + * 2. Referencing a scope-hoisted function (because JS allows functions to be accessed as long as they exist in scope) + * + * The former is impossible to do cross-module, but the second one can via this mechanism here. + * + * This function is responsible for dependency injection of all syntax types that + * are dependent on the four recursive syntax types: declarations, types, expressions, and statements. + * Any syntax type that is dependent on one of these needs to use a registration function + * that declares its dependencies. Likewise, any type that is dependent on a type that + * is dependent on a recursive type must also use a registration function like this. + * Types that are not directly or indirectly dependent on a recursive type can declare + * their parse functions in the module scope without issue. + * + * This function will import all the registration functions and manually + * inject the right dependencies. The four parse functions for the recursive + * types are defined within this function *after* they are injected, which works + * because of scope hoisting. + * + * Because all syntax types flow through here, this function is the source of truth + * for all syntax types and their parse functions. The return value is a massive + * object mapping all of the syntax types to a corresponding parse function. + * The compiler will use this environment to get the ModuleRoot function, which + * is required to parse a program. + */ +export function SyntaxEnvironment() { + // types + const { FunctionType } = register_FunctionType(TypeNode); + const { ParenthesizedType } = register_ParenthesizedType(TypeNode); + const { StructType } = register_StructType(TypeNode); + const { TupleType } = register_TupleType(TypeNode); + const { UnionTypeSuffix } = register_UnionType(TypeNode); + const { SpecificTypeSuffix, TypeArgList } = register_SpecificTypeSuffix(TypeNode); + + // expressions + const { StructLiteral } = register_StructLiteral(Expression); + const { ParenthesizedExpression } = register_ParenthesizedExpression(Expression); + const { TupleLiteral } = register_TupleLiteral(Expression); + const { ArrayLiteral } = register_ArrayLiteral(Expression); + const { VarDeclaration } = register_VarDeclaration(Expression); + const { PrefixExpression, PostfixExpressionSuffix } = register_UnaryExpression(Expression); + const { IfElseExpression } = register_IfElseExpression(Expression); + const { FunctionApplicationSuffix } = register_FunctionApplication(Expression, TypeArgList); + const { BinaryExpressionSuffix } = register_BinaryExpression(Expression); + const { ArrayAccessSuffix } = register_ArrayAccess(Expression); + + // statements + const { Block } = register_Block(Statement); + const { ExpressionStatement } = register_ExpressionStatement(Expression); + const { ForStatement } = register_ForStatement(Expression, Statement); + const { WhileStatement } = register_WhileStatement(Expression, Statement); + const { DoWhileStatement } = register_DoWhileStatement(Expression, Statement); + const { ReturnStatement } = register_ReturnStatement(Expression); + const { ThrowStatement } = register_ThrowStatement(Expression); + + // declarations + const { TypeDeclaration, TypeParamList } = register_TypeDeclaration(TypeNode); + const { FunctionDeclaration, Param, FunctionBody } = register_FunctionDeclaration(TypeNode, Expression, Statement, Block, TypeParamList); + const { ConstantDeclaration } = register_ConstantDeclaration(Expression); + + // requires Param/FunctionBody from FunctionDeclaration + const { LambdaExpression, ShorthandLambdaExpression } = register_LambdaExpression(Param, FunctionBody); + const { TryCatchStatement } = register_TryCatchStatement(Statement, Param); + + // module + const { ExportDeclaration } = register_ExportDeclaration(Declaration); + const { ModuleRoot } = register_ModuleRoot(Declaration, ExportDeclaration); + + function Declaration(parser: Parser): ParseResult { + const fn: ParseFunc = select( + TypeDeclaration, + FunctionDeclaration, + ConstantDeclaration + ); + return fn(parser); + } + + function TypeNode(parser: Parser): ParseResult { + const fn: ParseFunc = seq( + select( + BuiltInType, // must be before IdentifierType + FunctionType, // must be before IdentifierType, ParenthesizedType, TupleType + ParenthesizedType, // must be before TupleType + StructType, + TupleType, + IdentifierType + ), + repeat(select( + ArrayTypeSuffix, + UnionTypeSuffix, + SpecificTypeSuffix, + NamespaceAccessTypeSuffix + ), '*'), + ([base, suffixes]) => suffixes.reduce((base, suffix) => suffix.setBase(base), base) + ); + return fn(parser); + } + + function Expression(parser: Parser): ParseResult { + const fn: ParseFunc = seq( + select( + IntegerLiteral, + FloatLiteral, + CharLiteral, + BoolLiteral, // must be before IdentifierExpression + StringLiteral, + StructLiteral, + LambdaExpression, // must be before TupleLiteral, ParenthesizedExpression + ParenthesizedExpression, // must be before TupleLiteral + TupleLiteral, + ArrayLiteral, + VarDeclaration, // must be before ShorthandLambdaExpression, IdentifierExpression + ShorthandLambdaExpression, // must be before IdentifierExpression + IdentifierExpression, + PrefixExpression, + IfElseExpression + ), + repeat(select( + FunctionApplicationSuffix, // must be before BinaryExpression, PostfixExpression + BinaryExpressionSuffix, // must be before PostfixExpression + PostfixExpressionSuffix, + ArrayAccessSuffix, + FieldAccessSuffix + ), '*'), + ([base, suffixes]) => suffixes.reduce((base, suffix) => suffix.setBase(base), base) + ); + return fn(parser); + } + + function Statement(parser: Parser): ParseResult { + const fn: ParseFunc = select( + Block, // must be before ExpressionStatement + ExpressionStatement, + ForStatement, + WhileStatement, + DoWhileStatement, + TryCatchStatement, + ReturnStatement, + ThrowStatement, + BreakStatement, + ContinueStatement + ); + return fn(parser); + } + + return { + ModuleRoot + }; +} + +/** + * The base type of all syntax nodes. + * All nodes have: + * - a location (range of text in a file) + * + * All nodes also have a node type (of type SyntaxType). + * However, specifying that here does nothing and is actually + * more likely to hide errors than reveal them. The Node type + * expects every node type to have a 'nodeType' field defined, + * so it will be very clear if one doesn't. + */ +export interface NodeBase { + readonly location: FileRange; +} + +/** + * The discriminated union of all declaration nodes + */ +export type Declaration = + | TypeDeclaration + | FunctionDeclaration + | ConstantDeclaration; + +/** + * The discriminated union of all type nodes + */ +export type TypeNode = + | BuiltInType + | StructType + | TupleType + | ArrayType + | FunctionType + | UnionType + | IdentifierType + | ParenthesizedType + | SpecificType + | NamespaceAccessType; + +type TypeNode_LeftRecursive = + | ArrayTypeSuffix + | UnionTypeSuffix + | NamespaceAccessTypeSuffix + | SpecificTypeSuffix; + +/** + * The discriminated union of all expression nodes + */ +export type Expression = + | IntegerLiteral + | FloatLiteral + | CharLiteral + | BoolLiteral + | StringLiteral + | StructLiteral + | TupleLiteral + | ArrayLiteral + | IdentifierExpression + | ParenthesizedExpression + | VarDeclaration + | UnaryExpression + | BinaryExpression + | FunctionApplication + | ArrayAccess + | FieldAccess + | IfElseExpression + | LambdaExpression; + +type Expression_LeftRecursive = + | FunctionApplicationSuffix + | BinaryExpressionSuffix + | PostfixExpressionSuffix + | ArrayAccessSuffix + | FieldAccessSuffix; + +/** + * The discriminated union of all statement nodes + */ +export type Statement = + | Block + | ExpressionStatement + | ForStatement + | WhileStatement + | DoWhileStatement + | TryCatchStatement + | ReturnStatement + | ThrowStatement + | BreakStatement + | ContinueStatement; + +/** + * The discriminated union of all syntax nodes + */ +export type Node = + // module root is a special node type + | ModuleRoot + // types related to the module system + | ImportDeclaration + | ExportDeclaration + | ExportForwardDeclaration + // types that do not fit into any of the general categories + | TypeParam + | Param + // the general categories + | Declaration + | TypeNode + | Expression + | Statement; diff --git a/src/syntax/expressions/ArrayAccess.ts b/src/syntax/expressions/ArrayAccess.ts index 728c412..f354a2c 100644 --- a/src/syntax/expressions/ArrayAccess.ts +++ b/src/syntax/expressions/ArrayAccess.ts @@ -1,31 +1,37 @@ -import { Expression } from './Expression'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { nonTerminal, parser } from '~/parser/Parser'; -import { TokenType, Token } from '~/parser/Tokenizer'; +import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; +import { ParseFunc, seq, tok } from '~/parser/parser'; -@nonTerminal({ implements: Expression, leftRecursive: 'setTarget' }) -export class ArrayAccess extends Expression { - setTarget(exp: Expression) { - this.target = exp; - } - - @parser(TokenType.LBRACK, { definite: true }) setOpenBracket() {} +export interface ArrayAccess extends NodeBase { + syntaxType: SyntaxType.ArrayAccess; + target: Expression; + index: Expression; +} - @parser(Expression, { err: 'INVALID_EXPRESSION' }) - setIndexExp(exp: Expression) { - this.indexExp = exp; - } +export interface ArrayAccessSuffix extends NodeBase { + syntaxType: SyntaxType.ArrayAccess; + index: Expression; + setBase(target: Expression): ArrayAccess; +} - @parser(TokenType.RBRACK, { err: 'ARRAY_ACCESS_MISSING_CLOSE_BRACKET' }) - setCloseBracket(token: Token) { - this.createAndRegisterLocation('self', this.target.locations.self, token.getLocation()); - } +export function register(Expression: ParseFunc) { + const ArrayAccessSuffix: ParseFunc = seq( + tok('['), + Expression, + tok(']'), + ([_1, index, _2], location) => ({ + syntaxType: SyntaxType.ArrayAccess as SyntaxType.ArrayAccess, + location, + index, + setBase(target: Expression) { + return { + ...this, + target, + location: this.location.merge(target.location) + } + } + }) + ); - target: Expression; - indexExp: Expression; - - visit(visitor: INodeVisitor) { - return visitor.visitArrayAccess(this); - } + return { ArrayAccessSuffix }; } diff --git a/src/syntax/expressions/ArrayLiteral.ts b/src/syntax/expressions/ArrayLiteral.ts index 0d92232..2035369 100644 --- a/src/syntax/expressions/ArrayLiteral.ts +++ b/src/syntax/expressions/ArrayLiteral.ts @@ -1,29 +1,23 @@ -import { Expression } from './Expression'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { nonTerminal, parser } from '~/parser/Parser'; -import { Token, TokenType } from '~/parser/Tokenizer'; +import { SyntaxType, Expression, NodeBase } from '~/syntax/environment'; +import { ParseFunc, seq, tok, repeat } from '~/parser/parser'; -@nonTerminal({ implements: Expression }) -export class ArrayLiteral extends Expression { - @parser(TokenType.LBRACK, { definite: true }) - setOpenBracket(token: Token) { - this.registerLocation('openBracket', token.getLocation()); - } - - @parser(Expression, { repeat: '*', err: 'INVALID_EXPRESSION', sep: TokenType.COMMA }) - setItems(items: Expression[]) { - this.items = items; - } +export interface ArrayLiteral extends NodeBase { + syntaxType: SyntaxType.ArrayLiteral; + items: ReadonlyArray; +} - @parser(TokenType.RBRACK) - setCloseBracket(token: Token) { - this.createAndRegisterLocation('self', this.locations.openBracket, token.getLocation()); - } +export function register(Expression: ParseFunc) { + const ArrayLiteral: ParseFunc = seq( + tok('['), + repeat(Expression, '*', tok(',')), + tok(']'), + ([_1, items, _2], location) => ({ + syntaxType: SyntaxType.ArrayLiteral as SyntaxType.ArrayLiteral, + location, + items + }) + ); - items: Expression[]; - - visit(visitor: INodeVisitor) { - return visitor.visitArrayLiteral(this); - } + return { ArrayLiteral }; } diff --git a/src/syntax/expressions/BinaryExpression.ts b/src/syntax/expressions/BinaryExpression.ts index 64de583..764e6e0 100644 --- a/src/syntax/expressions/BinaryExpression.ts +++ b/src/syntax/expressions/BinaryExpression.ts @@ -1,41 +1,45 @@ -import { Expression } from './Expression'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { nonTerminal, parser } from '~/parser/Parser'; -import { BinaryOperator, getOperatorMetadata, verifyMultiOperator } from '~/runtime/operators'; -import { TokenType, Token, Location } from '~/parser/Tokenizer'; -import { PostfixExpression } from '~/syntax/expressions/UnaryExpression'; +import { verifyMultiOperator, getOperatorMetadata } from '~/runtime/operators'; +import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; +import { Token, TokenType } from '~/parser/lexer'; +import { ParseFunc, seq, repeat, tok } from '~/parser/parser'; -@nonTerminal({ implements: Expression, before: [PostfixExpression], leftRecursive: 'setLeft' }) -export class BinaryExpression extends Expression { - setLeft(exp: Expression) { - this.left = exp; - } +export interface BinaryExpression extends NodeBase { + syntaxType: SyntaxType.BinaryExpression; + left: Expression; + symbol: Token; + right: Expression; +} - // operators have to be parsed as oneOrMore because < and > screw everything up - @parser(TokenType.OPER, { repeat: '+' }) - setOperator(tokens: Token[]) { - const oper = verifyMultiOperator(tokens); - this.symbol = oper.image; - this.registerLocation('oper', oper.getLocation()); - } +export interface BinaryExpressionSuffix extends NodeBase { + syntaxType: SyntaxType.BinaryExpression; + symbol: Token; + right: Expression; + setBase(left: Expression): BinaryExpression; +} - @parser(Expression, { definite: true }) - setRight(exp: Expression) { - this.right = exp; - this.createAndRegisterLocation('self', this.left.locations.self, this.right.locations.self); - // TODO: find a more performant way to do this so that it doesn't happen on every recursion - resolvePrecedence(this); - } +export function register(Expression: ParseFunc) { + const BinaryExpressionSuffix: ParseFunc = seq( + repeat(tok(TokenType.OPER), '+'), + Expression, + ([symbol, right], location) => ({ + syntaxType: SyntaxType.BinaryExpression as SyntaxType.BinaryExpression, + location, + symbol: verifyMultiOperator(symbol), // TODO: make sure this works + right, + setBase(left: Expression) { + return resolvePrecedence({ // TODO: this will get run more than necessary + syntaxType: this.syntaxType, + symbol: this.symbol, + right: this.right, + left, + location: this.location.merge(left.location) + }) + } + }) + ); - left: Expression; - right: Expression; - symbol: string; - operator: BinaryOperator; - - visit(visitor: INodeVisitor) { - return visitor.visitBinaryExpression(this); - } + return { BinaryExpressionSuffix }; } /** @@ -52,7 +56,7 @@ function resolvePrecedence(exp: BinaryExpression) { const operStack: Token[] = []; while (items.length) { const item = items.shift() as (Expression | Token); - if (item instanceof Expression) { + if (!Token.isToken(item)) { expStack.push(item); } else { while (operStack.length && shouldPopOperator(item, operStack[operStack.length - 1])) { @@ -66,12 +70,7 @@ function resolvePrecedence(exp: BinaryExpression) { expStack.push(createNewBinExpression(expStack.pop()!, expStack.pop()!, operStack.pop()!)); } // final expression tree is the only element left on the exp stack - const result = expStack[0] as BinaryExpression; - // apply the resulting properties onto the target expression - exp.left = result.left; - exp.right = result.right; - exp.symbol = result.symbol; - exp.locations = result.locations; + return expStack[0] as BinaryExpression; } function shouldPopOperator(nextToken: Token, stackToken: Token) { @@ -83,14 +82,14 @@ function shouldPopOperator(nextToken: Token, stackToken: Token) { function binaryExpressionToList(exp: BinaryExpression) { const items: (Token | Expression)[] = []; // the tree is left-associative, so we assemble the list from right to left - let operToken = createNewOperToken(exp.locations.oper, exp.symbol); + let operToken = createNewOperToken(exp.symbol); let left = exp.left, right = exp.right; while (true) { items.unshift(right); items.unshift(operToken); - if (left instanceof BinaryExpression) { + if (left.syntaxType === SyntaxType.BinaryExpression) { right = left.right; - operToken = createNewOperToken(left.locations.oper, left.symbol); + operToken = createNewOperToken(left.symbol); left = left.left; } else { items.unshift(left); @@ -100,16 +99,16 @@ function binaryExpressionToList(exp: BinaryExpression) { return items; } -function createNewOperToken(loc: Location, symbol: string) { - return new Token(TokenType.OPER, loc.startLine, loc.startColumn, symbol); +function createNewOperToken(tok: Token) { + return tok.with({}); } function createNewBinExpression(right: Expression, left: Expression, oper: Token) { - const exp = new BinaryExpression(); - exp.right = right; - exp.left = left; - exp.symbol = oper.image; - exp.registerLocation('oper', oper.getLocation()); - exp.createAndRegisterLocation('self', exp.left.locations.self, exp.right.locations.self); - return exp; + return { + syntaxType: SyntaxType.BinaryExpression as SyntaxType.BinaryExpression, + location: left.location.merge(right.location), + left, + symbol: oper, + right + }; } diff --git a/src/syntax/expressions/BoolLiteral.ts b/src/syntax/expressions/BoolLiteral.ts index 9994919..399dad6 100644 --- a/src/syntax/expressions/BoolLiteral.ts +++ b/src/syntax/expressions/BoolLiteral.ts @@ -1,21 +1,18 @@ -import { Expression } from './Expression'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { nonTerminal, parser } from '~/parser/Parser'; -import { Token } from '~/parser/Tokenizer'; -import { IdentifierExpression } from '~/syntax/expressions/IdentifierExpression'; +import { NodeBase, SyntaxType } from '~/syntax/environment'; +import { Token } from '~/parser/lexer'; +import { ParseFunc, seq, select, tok } from '~/parser/parser'; -@nonTerminal({ implements: Expression, before: [IdentifierExpression] }) -export class BoolLiteral extends Expression { - @parser(['true', 'false'], { definite: true }) - setValue(token: Token) { - this.value = token.image === 'true'; - this.registerLocation('self', token.getLocation()); - } - - value: boolean; - - visit(visitor: INodeVisitor): T { - return visitor.visitBoolLiteral(this); - } +export interface BoolLiteral extends NodeBase { + syntaxType: SyntaxType.BoolLiteral; + value: Token; } + +export const BoolLiteral: ParseFunc = seq( + select(tok('true'), tok('false')), + (value, location) => ({ + syntaxType: SyntaxType.BoolLiteral as SyntaxType.BoolLiteral, + location, + value + }) +); diff --git a/src/syntax/expressions/CharLiteral.ts b/src/syntax/expressions/CharLiteral.ts index c9b34e3..e6cc0ec 100644 --- a/src/syntax/expressions/CharLiteral.ts +++ b/src/syntax/expressions/CharLiteral.ts @@ -1,20 +1,18 @@ -import { Expression } from './Expression'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { nonTerminal, parser } from '~/parser/Parser'; -import { TokenType, Token } from '~/parser/Tokenizer'; +import { NodeBase, SyntaxType } from '~/syntax/environment'; +import { Token, TokenType } from '~/parser/lexer'; +import { ParseFunc, seq, tok } from '~/parser/parser'; -@nonTerminal({ implements: Expression }) -export class CharLiteral extends Expression { - @parser(TokenType.CHARACTER_LITERAL, { definite: true }) - setValue(token: Token) { - this.value = token.value; - this.registerLocation('self', token.getLocation()); - } - - value: string; - - visit(visitor: INodeVisitor): T { - return visitor.visitCharLiteral(this); - } +export interface CharLiteral extends NodeBase { + syntaxType: SyntaxType.CharLiteral; + value: Token; } + +export const CharLiteral: ParseFunc = seq( + tok(TokenType.CHARACTER_LITERAL), + (value, location) => ({ + syntaxType: SyntaxType.CharLiteral as SyntaxType.CharLiteral, + location, + value + }) +); diff --git a/src/syntax/expressions/Expression.ts b/src/syntax/expressions/Expression.ts deleted file mode 100644 index cf25ea5..0000000 --- a/src/syntax/expressions/Expression.ts +++ /dev/null @@ -1,6 +0,0 @@ -import ASTNode from '~/syntax/ASTNode'; -import { nonTerminal } from '~/parser/Parser'; - - -@nonTerminal({ abstract: true }) -export abstract class Expression extends ASTNode {} diff --git a/src/syntax/expressions/FieldAccess.ts b/src/syntax/expressions/FieldAccess.ts index 79f6570..8751b15 100644 --- a/src/syntax/expressions/FieldAccess.ts +++ b/src/syntax/expressions/FieldAccess.ts @@ -1,28 +1,33 @@ -import { Expression } from './Expression'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { nonTerminal, parser } from '~/parser/Parser'; -import { TokenType, Token } from '~/parser/Tokenizer'; +import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; +import { Token, TokenType } from '~/parser/lexer'; +import { ParseFunc, seq, tok } from '~/parser/parser'; -@nonTerminal({ implements: Expression, leftRecursive: 'setTarget' }) -export class FieldAccess extends Expression { - setTarget(exp: Expression) { - this.target = exp; - } - - @parser(TokenType.DOT, { definite: true }) setDot() {} - - @parser(TokenType.IDENT, { err: 'FIELD_ACCESS_INVALID_FIELD_NAME' }) - setField(token: Token) { - this.field = token.image; - this.registerLocation('field', token.getLocation()); - this.createAndRegisterLocation('self', this.target.locations.self, token.getLocation()); - } - +export interface FieldAccess extends NodeBase { + syntaxType: SyntaxType.FieldAccess; target: Expression; - field: string; - - visit(visitor: INodeVisitor) { - return visitor.visitFieldAccess(this); - } + field: Token; } + +export interface FieldAccessSuffix extends NodeBase { + syntaxType: SyntaxType.FieldAccess; + field: Token; + setBase(target: Expression): FieldAccess; +} + +export const FieldAccessSuffix: ParseFunc = seq( + tok('.'), + tok(TokenType.IDENT), + ([_, field], location) => ({ + syntaxType: SyntaxType.FieldAccess as SyntaxType.FieldAccess, + location, + field, + setBase(target: Expression) { + return { + ...this, + target, + location: this.location.merge(target.location) + } + } + }) +); diff --git a/src/syntax/expressions/FloatLiteral.ts b/src/syntax/expressions/FloatLiteral.ts index 1d2db80..b440a9c 100644 --- a/src/syntax/expressions/FloatLiteral.ts +++ b/src/syntax/expressions/FloatLiteral.ts @@ -1,20 +1,18 @@ -import { Expression } from './Expression'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { nonTerminal, parser } from '~/parser/Parser'; -import { TokenType, Token } from '~/parser/Tokenizer'; +import { NodeBase, SyntaxType } from '~/syntax/environment'; +import { Token, TokenType } from '~/parser/lexer'; +import { ParseFunc, seq, tok } from '~/parser/parser'; -@nonTerminal({ implements: Expression }) -export class FloatLiteral extends Expression { - @parser(TokenType.FLOAT_LITERAL, { definite: true }) - setValue(token: Token) { - this.value = token.value; - this.registerLocation('self', token.getLocation()); - } - - value: number; - - visit(visitor: INodeVisitor): T { - return visitor.visitFloatLiteral(this); - } +export interface FloatLiteral extends NodeBase { + syntaxType: SyntaxType.FloatLiteral; + value: Token; } + +export const FloatLiteral: ParseFunc = seq( + tok(TokenType.FLOAT_LITERAL), + (value, location) => ({ + syntaxType: SyntaxType.FloatLiteral as SyntaxType.FloatLiteral, + location, + value + }) +); diff --git a/src/syntax/expressions/FunctionApplication.ts b/src/syntax/expressions/FunctionApplication.ts index f304d13..aadf354 100644 --- a/src/syntax/expressions/FunctionApplication.ts +++ b/src/syntax/expressions/FunctionApplication.ts @@ -1,41 +1,41 @@ -import { Expression } from './Expression'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { nonTerminal, parser, ParseResult } from '~/parser/Parser'; -import { TokenType, Token } from '~/parser/Tokenizer'; -import { TypeArgList } from '~/syntax/types/SpecificType'; -import { Type } from '~/syntax/types/Type'; -import { BinaryExpression } from './BinaryExpression'; -import { PostfixExpression } from './UnaryExpression'; +import { NodeBase, SyntaxType, Expression, TypeNode } from '~/syntax/environment'; +import { ParseFunc, seq, tok, optional, repeat } from '~/parser/parser'; -@nonTerminal({ implements: Expression, leftRecursive: 'setTarget', before: [BinaryExpression, PostfixExpression] }) -export class FunctionApplication extends Expression { - setTarget(exp: Expression) { - this.target = exp; - } - - @parser(TypeArgList, { optional: true }) - setTypeArgs(result: ParseResult) { - this.typeArgs = result.types as Type[]; - } - - @parser(TokenType.LPAREN, { definite: true }) setOpenParen() {} - - @parser(Expression, { repeat: '*', err: 'INVALID_EXPRESSION', sep: TokenType.COMMA }) - setArgs(args: Expression[]) { - this.args = args; - } - - @parser(TokenType.RPAREN) - setCloseParen(token: Token) { - this.createAndRegisterLocation('self', this.target.locations.self, token.getLocation()); - } - +export interface FunctionApplication extends NodeBase { + syntaxType: SyntaxType.FunctionApplication; target: Expression; - typeArgs?: Type[]; + typeArgs: TypeNode[]; args: Expression[]; +} + +export interface FunctionApplicationSuffix extends NodeBase { + syntaxType: SyntaxType.FunctionApplication; + typeArgs: TypeNode[]; + args: Expression[]; + setBase(target: Expression): FunctionApplication; +} + +export function register(Expression: ParseFunc, TypeArgList: ParseFunc) { + const FunctionApplicationSuffix: ParseFunc = seq( + optional(TypeArgList), + tok('('), + repeat(Expression, '*', tok(',')), + tok(')'), + ([typeArgs, _1, args, _2], location) => ({ + syntaxType: SyntaxType.FunctionApplication as SyntaxType.FunctionApplication, + location, + typeArgs: typeArgs || [], + args, + setBase(target: Expression) { + return { + ...this, + target, + location: this.location.merge(target.location) + } + } + }) + ); - visit(visitor: INodeVisitor) { - return visitor.visitFunctionApplication(this); - } + return { FunctionApplicationSuffix }; } diff --git a/src/syntax/expressions/IdentifierExpression.ts b/src/syntax/expressions/IdentifierExpression.ts index 4f675c8..7265d0e 100644 --- a/src/syntax/expressions/IdentifierExpression.ts +++ b/src/syntax/expressions/IdentifierExpression.ts @@ -1,20 +1,18 @@ -import { Expression } from './Expression'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { nonTerminal, parser } from '~/parser/Parser'; -import { Token, TokenType } from '~/parser/Tokenizer'; +import { NodeBase, SyntaxType } from '~/syntax/environment'; +import { Token, TokenType } from '~/parser/lexer'; +import { ParseFunc, seq, tok } from '~/parser/parser'; -@nonTerminal({ implements: Expression }) -export class IdentifierExpression extends Expression { - @parser(TokenType.IDENT, { definite: true }) - setName(token: Token) { - this.name = token.image; - this.registerLocation('self', token.getLocation()); - } - - name: string; - - visit(visitor: INodeVisitor): T { - return visitor.visitIdentifierExpression(this); - } +export interface IdentifierExpression extends NodeBase { + syntaxType: SyntaxType.IdentifierExpression; + name: Token; } + +export const IdentifierExpression: ParseFunc = seq( + tok(TokenType.IDENT), + (name, location) => ({ + syntaxType: SyntaxType.IdentifierExpression as SyntaxType.IdentifierExpression, + location, + name + }) +); diff --git a/src/syntax/expressions/IfElseExpression.ts b/src/syntax/expressions/IfElseExpression.ts index c838a00..345f44e 100644 --- a/src/syntax/expressions/IfElseExpression.ts +++ b/src/syntax/expressions/IfElseExpression.ts @@ -1,43 +1,31 @@ -import { Expression } from './Expression'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { nonTerminal, parser } from '~/parser/Parser'; -import { Token, TokenType } from '~/parser/Tokenizer'; +import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; +import { ParseFunc, seq, tok } from '~/parser/parser'; -@nonTerminal({ implements: Expression }) -export class IfElseExpression extends Expression { - @parser('if', { definite: true }) - setIfToken(token: Token) { - this.registerLocation('if', token.getLocation()); - } - - @parser(TokenType.LPAREN, { err: 'IF_MISSING_OPEN_PAREN' }) setOpenParen() {} - - @parser(Expression, { err: 'INVALID_EXPRESSION' }) - setCondition(exp: Expression) { - this.condition = exp; - } - - @parser(TokenType.RPAREN, { err: 'IF_MISSING_CLOSE_PAREN' }) setCloseParen() {} - - @parser(Expression, { err: 'INVALID_EXPRESSION' }) - setConsequent(exp: Expression) { - this.consequent = exp; - } - - @parser('else', { err: 'IF_MISSING_ELSE' }) setElse() {} - - @parser(Expression, { err: 'INVALID_EXPRESSION' }) - setAlternate(exp: Expression) { - this.alternate = exp; - this.createAndRegisterLocation('self', this.locations.if, exp.locations.self); - } - +export interface IfElseExpression extends NodeBase { + syntaxType: SyntaxType.IfElseExpression; condition: Expression; consequent: Expression; alternate: Expression; - - visit(visitor: INodeVisitor) { - return visitor.visitIfElseExpression(this); - } +} + +export function register(Expression: ParseFunc) { + const IfElseExpression: ParseFunc = seq( + tok('if'), + tok('('), + Expression, + tok(')'), + Expression, + tok('else'), + Expression, + ([_1, _2, condition, _3, consequent, _4, alternate], location) => ({ + syntaxType: SyntaxType.IfElseExpression as SyntaxType.IfElseExpression, + location, + condition, + consequent, + alternate + }) + ); + + return { IfElseExpression }; } diff --git a/src/syntax/expressions/IntegerLiteral.ts b/src/syntax/expressions/IntegerLiteral.ts index caf57f6..84e313c 100644 --- a/src/syntax/expressions/IntegerLiteral.ts +++ b/src/syntax/expressions/IntegerLiteral.ts @@ -1,20 +1,18 @@ -import { Expression } from './Expression'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { nonTerminal, parser } from '~/parser/Parser'; -import { TokenType, Token } from '~/parser/Tokenizer'; +import { NodeBase, SyntaxType } from '~/syntax/environment'; +import { Token, TokenType } from '~/parser/lexer'; +import { ParseFunc, seq, tok } from '~/parser/parser'; -@nonTerminal({ implements: Expression }) -export class IntegerLiteral extends Expression { - @parser(TokenType.INTEGER_LITERAL, { definite: true }) - setValue(token: Token) { - this.value = token.value; - this.registerLocation('self', token.getLocation()); - } - - value: number; - - visit(visitor: INodeVisitor): T { - return visitor.visitIntegerLiteral(this); - } +export interface IntegerLiteral extends NodeBase { + syntaxType: SyntaxType.IntegerLiteral; + value: Token; } + +export const IntegerLiteral: ParseFunc = seq( + tok(TokenType.INTEGER_LITERAL), + (value, location) => ({ + syntaxType: SyntaxType.IntegerLiteral as SyntaxType.IntegerLiteral, + location, + value + }) +); diff --git a/src/syntax/expressions/LambdaExpression.ts b/src/syntax/expressions/LambdaExpression.ts index b4b568d..a691c26 100644 --- a/src/syntax/expressions/LambdaExpression.ts +++ b/src/syntax/expressions/LambdaExpression.ts @@ -1,93 +1,57 @@ -import { Expression } from './Expression'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { nonTerminal, parser } from '~/parser/Parser'; -import { TFunction } from '~/typecheck/types'; -import ASTNode from '~/syntax/ASTNode'; -import { TokenType, Token } from '~/parser/Tokenizer'; -import { ParenthesizedExpression } from '~/syntax/expressions/ParenthesizedExpression'; -import { TupleLiteral } from '~/syntax/expressions/TupleLiteral'; -import { Param, FunctionBody } from '~/syntax/declarations/FunctionDeclaration'; -import { Statement } from '~/syntax/statements/Statement'; -import { IdentifierExpression } from '~/syntax/expressions/IdentifierExpression'; +import { NodeBase, SyntaxType, Statement, Expression } from '~/syntax/environment'; +import { Param } from '~/syntax'; +import { Token, TokenType } from '~/parser/lexer'; +import { ParseFunc, seq, tok, repeat, select } from '~/parser/parser'; -export class LambdaParam extends ASTNode { - @parser(TokenType.IDENT, { definite: true }) - setName(token: Token) { - this.name = token.image; - this.registerLocation('name', token.getLocation()); - } - - name: string; - - visit(visitor: INodeVisitor) { - return visitor.visitLambdaParam(this); - } - - prettyName() { - return this.name; - } -} - -export abstract class BaseLambdaExpression extends Expression { - params: (Param | LambdaParam)[]; +export interface LambdaExpression extends NodeBase { + syntaxType: SyntaxType.LambdaExpression; + params: ReadonlyArray; body: Expression | Statement; - type: TFunction; - - visit(visitor: INodeVisitor) { - return visitor.visitLambdaExpression(this); - } - - prettyName() { - return `(${this.params.map(p => p.prettyName()).join(', ')})`; - } } -/** - * LambdaExpression ::= LPAREN (Param | LambdaParam)(* sep COMMA) RPAREN FAT_ARROW FunctionBody - */ -@nonTerminal({ implements: Expression, before: [ParenthesizedExpression, TupleLiteral] }) -export class LambdaExpression extends BaseLambdaExpression { - @parser(TokenType.LPAREN) - setOpenParenToken(token: Token) { - this.registerLocation('openParen', token.getLocation()); - } - - @parser([Param, LambdaParam], { repeat: '*', sep: TokenType.COMMA }) - setParams(params: (Param | LambdaParam)[]) { - this.params = params; - } - - @parser(TokenType.RPAREN) setCloseParen() {} - @parser(TokenType.FAT_ARROW, { definite: true }) setFatArrow() {} - - @parser(FunctionBody, { err: 'INVALID_FUNCTION_BODY' }) - setBody(body: Statement | Expression) { - this.body = body; - this.createAndRegisterLocation('self', this.locations.openParen, body.locations.self); - } +export function register(Param: ParseFunc, FunctionBody: ParseFunc) { + /** + * LambdaExpression ::= '(' (Param | IDENT)(* sep ',') ')' '=>' FunctionBody + */ + const LambdaExpression: ParseFunc = seq( + tok('('), + repeat(select( + Param, + tok(TokenType.IDENT) + ), '*', tok(',')), + tok(')'), + tok('=>'), + FunctionBody, + ([_1, params, _2, _3, body], location) => ({ + syntaxType: SyntaxType.LambdaExpression as SyntaxType.LambdaExpression, + location, + params: params.map(p => Token.isToken(p) ? lambdaParam(p) : p), + body + }) + ); + + /** + * ShorthandLambdaExpression ::= IDENT '=>' FunctionBody + */ + const ShorthandLambdaExpression: ParseFunc = seq( + tok(TokenType.IDENT), + tok('=>'), + FunctionBody, + ([param, _, body], location) => ({ + syntaxType: SyntaxType.LambdaExpression as SyntaxType.LambdaExpression, + location, + params: [lambdaParam(param)], + body + }) + ); + + return { LambdaExpression, ShorthandLambdaExpression }; } -/** - * ShorthandLambdaExpression ::= IDENT FAT_ARROW FunctionBody - * - * This is a special version that only applies when you have a single - * parameter whose type is implicit, so that no parentheses are required. - */ -@nonTerminal({ implements: Expression, before: [IdentifierExpression] }) -export class ShorthandLambdaExpression extends BaseLambdaExpression { - @parser(TokenType.IDENT) - setParamName(token: Token) { - const param = new LambdaParam(); - param.setName(token); - this.params = [param]; - } - - @parser(TokenType.FAT_ARROW, { definite: true }) setFatArrow() {} - - @parser(FunctionBody) - setBody(body: Statement | Expression) { - this.body = body; - this.createAndRegisterLocation('self', this.params[0].locations.name, body.locations.self); - } -} +const lambdaParam = (p: Token): Param => ({ + syntaxType: SyntaxType.Param as SyntaxType.Param, + location: p.location, + name: p, + typeNode: null +}); diff --git a/src/syntax/expressions/ParenthesizedExpression.ts b/src/syntax/expressions/ParenthesizedExpression.ts index fcdf66d..965fbb3 100644 --- a/src/syntax/expressions/ParenthesizedExpression.ts +++ b/src/syntax/expressions/ParenthesizedExpression.ts @@ -1,30 +1,23 @@ -import { Expression } from './Expression'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { nonTerminal, parser } from '~/parser/Parser'; -import { Token, TokenType } from '~/parser/Tokenizer'; -import { TupleLiteral } from '~/syntax/expressions/TupleLiteral'; +import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; +import { ParseFunc, seq, tok } from '~/parser/parser'; -@nonTerminal({ implements: Expression, before: [TupleLiteral] }) -export class ParenthesizedExpression extends Expression { - @parser(TokenType.LPAREN) - setOpenParen(token: Token) { - this.registerLocation('openParen', token.getLocation()); - } - - @parser(Expression) - setInner(exp: Expression) { - this.inner = exp; - } +export interface ParenthesizedExpression extends NodeBase { + syntaxType: SyntaxType.ParenthesizedExpression; + inner: Expression; +} - @parser(TokenType.RPAREN, { definite: true }) - setCloseParen(token: Token) { - this.createAndRegisterLocation('self', this.locations.openParen, token.getLocation()); - } +export function register(Expression: ParseFunc) { + const ParenthesizedExpression: ParseFunc = seq( + tok('('), + Expression, + tok(')'), + ([_1, inner, _2], location) => ({ + syntaxType: SyntaxType.ParenthesizedExpression as SyntaxType.ParenthesizedExpression, + location, + inner + }) + ); - inner: Expression; - - visit(visitor: INodeVisitor) { - return visitor.visitParenthesizedExpression(this); - } + return { ParenthesizedExpression }; } diff --git a/src/syntax/expressions/StringLiteral.ts b/src/syntax/expressions/StringLiteral.ts index 436e7cf..33c9a6b 100644 --- a/src/syntax/expressions/StringLiteral.ts +++ b/src/syntax/expressions/StringLiteral.ts @@ -1,20 +1,18 @@ -import { Expression } from './Expression'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { nonTerminal, parser } from '~/parser/Parser'; -import { TokenType, Token } from '~/parser/Tokenizer'; +import { NodeBase, SyntaxType } from '~/syntax/environment'; +import { Token, TokenType } from '~/parser/lexer'; +import { ParseFunc, seq, tok } from '~/parser/parser'; -@nonTerminal({ implements: Expression }) -export class StringLiteral extends Expression { - @parser(TokenType.STRING_LITERAL, { definite: true }) - setValue(token: Token) { - this.value = token.value; - this.registerLocation('self', token.getLocation()); - } - - value: string; - - visit(visitor: INodeVisitor): T { - return visitor.visitStringLiteral(this); - } +export interface StringLiteral extends NodeBase { + syntaxType: SyntaxType.StringLiteral; + value: Token; } + +export const StringLiteral: ParseFunc = seq( + tok(TokenType.STRING_LITERAL), + (value, location) => ({ + syntaxType: SyntaxType.StringLiteral as SyntaxType.StringLiteral, + location, + value + }) +); diff --git a/src/syntax/expressions/StructLiteral.ts b/src/syntax/expressions/StructLiteral.ts index bc5d303..396f752 100644 --- a/src/syntax/expressions/StructLiteral.ts +++ b/src/syntax/expressions/StructLiteral.ts @@ -1,39 +1,34 @@ -import { Expression } from './Expression'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { nonTerminal, exp, parser, ParseResult } from '~/parser/Parser'; -import { TokenType, Token } from '~/parser/Tokenizer'; +import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; +import { Token, TokenType } from '~/parser/lexer'; +import { ParseFunc, seq, tok, repeat } from '~/parser/parser'; -export const StructEntry = { - key: exp(TokenType.IDENT, { definite: true }), - ':': exp(TokenType.COLON, { err: 'STRUCT_LITERAL_MISSING_COLON' }), - value: exp(Expression, { err: 'INVALID_EXPRESSION' }), -}; - -@nonTerminal({ implements: Expression }) -export class StructLiteral extends Expression { - @parser(TokenType.LBRACE, { definite: true }) - setOpenBrace(token: Token) { - this.registerLocation('openBrace', token.getLocation()); - } +interface StructEntry { + key: Token; + value: Expression; +} - @parser(StructEntry, { repeat: '*', sep: TokenType.COMMA }) - setEntries(result: ParseResult[]) { - this.entries = result.map(e => { - const key = e.key as Token; - this.registerLocation(`key_${key.image}`, key.getLocation()); - return { key: key.image, value: e.value as Expression }; - }); - } +export interface StructLiteral extends NodeBase { + syntaxType: SyntaxType.StructLiteral; + entries: ReadonlyArray; +} - @parser(TokenType.RBRACE) - setCloseBrace(token: Token) { - this.createAndRegisterLocation('self', this.locations.openBrace, token.getLocation()); - } +export function register(Expression: ParseFunc) { + const StructLiteral: ParseFunc = seq( + tok('{'), + repeat(seq( + tok(TokenType.IDENT), + tok(':'), + Expression, + ([key, _, value]) => ({ key, value }) + ), '*', tok(',')), + tok('}'), + ([_1, entries, _2], location) => ({ + syntaxType: SyntaxType.StructLiteral as SyntaxType.StructLiteral, + location, + entries + }) + ); - entries: { key: string, value: Expression }[]; - - visit(visitor: INodeVisitor) { - return visitor.visitStructLiteral(this); - } + return { StructLiteral }; } diff --git a/src/syntax/expressions/TupleLiteral.ts b/src/syntax/expressions/TupleLiteral.ts index ac19f5c..2755765 100644 --- a/src/syntax/expressions/TupleLiteral.ts +++ b/src/syntax/expressions/TupleLiteral.ts @@ -1,29 +1,23 @@ -import { Expression } from './Expression'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { nonTerminal, parser } from '~/parser/Parser'; -import { TokenType, Token } from '~/parser/Tokenizer'; +import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; +import { ParseFunc, seq, tok, repeat } from '~/parser/parser'; -@nonTerminal({ implements: Expression }) -export class TupleLiteral extends Expression { - @parser(TokenType.LPAREN, { definite: true }) - setOpenParen(token: Token) { - this.registerLocation('openParen', token.getLocation()); - } - - @parser(Expression, { repeat: '*', err: 'INVALID_EXPRESSION', sep: TokenType.COMMA }) - setItems(items: Expression[]) { - this.items = items; - } +export interface TupleLiteral extends NodeBase { + syntaxType: SyntaxType.TupleLiteral; + items: ReadonlyArray; +} - @parser(TokenType.RPAREN) - setCloseParen(token: Token) { - this.createAndRegisterLocation('self', this.locations.openParen, token.getLocation()); - } +export function register(Expression: ParseFunc) { + const TupleLiteral: ParseFunc = seq( + tok('('), + repeat(Expression, '*', tok(',')), + tok(')'), + ([_1, items, _2], location) => ({ + syntaxType: SyntaxType.TupleLiteral as SyntaxType.TupleLiteral, + location, + items + }) + ); - items: Expression[]; - - visit(visitor: INodeVisitor) { - return visitor.visitTupleLiteral(this); - } + return { TupleLiteral }; } diff --git a/src/syntax/expressions/UnaryExpression.ts b/src/syntax/expressions/UnaryExpression.ts index 33c8972..7823e69 100644 --- a/src/syntax/expressions/UnaryExpression.ts +++ b/src/syntax/expressions/UnaryExpression.ts @@ -1,54 +1,52 @@ -import { Expression } from './Expression'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { nonTerminal, parser } from '~/parser/Parser'; -import { UnaryOperator, verifyMultiOperator } from '~/runtime/operators'; -import { Token, TokenType } from '~/parser/Tokenizer'; +import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; +import { Token, TokenType } from '~/parser/lexer'; +import { ParseFunc, seq, tok, repeat } from '~/parser/parser'; +import { verifyMultiOperator } from '~/runtime/operators'; -export abstract class UnaryExpression extends Expression { +export interface UnaryExpression extends NodeBase { + syntaxType: SyntaxType.UnaryExpression; target: Expression; - symbol: string; + symbol: Token; prefix: boolean; - operator: UnaryOperator; - - visit(visitor: INodeVisitor) { - return visitor.visitUnaryExpression(this); - } - - protected setOperator(tokens: Token[]) { - const oper = verifyMultiOperator(tokens); - this.symbol = oper.image; - this.registerLocation('oper', oper.getLocation()); - } } -@nonTerminal({ implements: Expression }) -export class PrefixExpression extends UnaryExpression { - // operators have to be parsed as + because < and > screw everything up - @parser(TokenType.OPER, { repeat: '+', definite: true }) - setOperatorToken(tokens: Token[]) { - super.setOperator(tokens); - this.prefix = true; - } - - @parser(Expression, { err: 'INVALID_EXPRESSION' }) - setTarget(exp: Expression) { - this.target = exp; - this.createAndRegisterLocation('self', this.locations.oper, exp.locations.self); - } +export interface PostfixExpressionSuffix extends NodeBase { + syntaxType: SyntaxType.UnaryExpression; + symbol: Token; + prefix: false; + setBase(target: Expression): UnaryExpression; } -@nonTerminal({ implements: Expression, leftRecursive: 'setTarget' }) -export class PostfixExpression extends UnaryExpression { - setTarget(exp: Expression) { - this.target = exp; - } +export function register(Expression: ParseFunc) { + const PrefixExpression: ParseFunc = seq( + repeat(tok(TokenType.OPER), '+'), + Expression, + ([symbol, target], location) => ({ + syntaxType: SyntaxType.UnaryExpression as SyntaxType.UnaryExpression, + location, + target, + symbol: verifyMultiOperator(symbol), // TODO: make sure this works + prefix: true + }) + ); + + const PostfixExpressionSuffix: ParseFunc = seq( + repeat(tok(TokenType.OPER), '+'), + (symbol, location) => ({ + syntaxType: SyntaxType.UnaryExpression as SyntaxType.UnaryExpression, + location, + symbol: verifyMultiOperator(symbol), // TODO: make sure this works + prefix: false as false, + setBase(target: Expression) { + return { + ...this, + target, + location: this.location.merge(target.location) + } + } + }) + ); - // operators have to be parsed as + because < and > screw everything up - @parser(TokenType.OPER, { repeat: '+', definite: true }) - setOperatorToken(tokens: Token[]) { - super.setOperator(tokens); - this.prefix = false; - this.createAndRegisterLocation('self', this.target.locations.self, this.locations.oper); - } + return { PrefixExpression, PostfixExpressionSuffix }; } diff --git a/src/syntax/expressions/VarDeclaration.ts b/src/syntax/expressions/VarDeclaration.ts index 3825e08..3f583f7 100644 --- a/src/syntax/expressions/VarDeclaration.ts +++ b/src/syntax/expressions/VarDeclaration.ts @@ -1,31 +1,26 @@ -import { Expression } from './Expression'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { nonTerminal, parser } from '~/parser/Parser'; -import { TokenType, Token } from '~/parser/Tokenizer'; -import { ShorthandLambdaExpression } from '~/syntax/expressions/LambdaExpression'; -import { IdentifierExpression } from '~/syntax/expressions/IdentifierExpression'; +import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; +import { Token, TokenType } from '~/parser/lexer'; +import { ParseFunc, seq, tok } from '~/parser/parser'; -@nonTerminal({ implements: Expression, before: [ShorthandLambdaExpression, IdentifierExpression] }) -export class VarDeclaration extends Expression { - @parser(TokenType.IDENT) - setName(token: Token) { - this.name = token.image; - this.registerLocation('name', token.getLocation()); - } - - @parser(TokenType.EQUALS, { definite: true }) setEquals() {} +export interface VarDeclaration extends NodeBase { + syntaxType: SyntaxType.VarDeclaration; + name: Token; + init: Expression; +} - @parser(Expression, { err: 'INVALID_INITIAL_VALUE' }) - setInitExp(exp: Expression) { - this.initExp = exp; - this.createAndRegisterLocation('self', this.locations.name, exp.locations.self); - } +export function register(Expression: ParseFunc) { + const VarDeclaration: ParseFunc = seq( + tok(TokenType.IDENT), + tok('='), + Expression, + ([name, _, init], location) => ({ + syntaxType: SyntaxType.VarDeclaration as SyntaxType.VarDeclaration, + location, + name, + init + }) + ); - name: string; - initExp: Expression; - - visit(visitor: INodeVisitor) { - return visitor.visitVarDeclaration(this); - } + return { VarDeclaration }; } diff --git a/src/syntax/expressions/index.ts b/src/syntax/expressions/index.ts index 077bbab..aafc13c 100644 --- a/src/syntax/expressions/index.ts +++ b/src/syntax/expressions/index.ts @@ -1,20 +1,18 @@ -// order is important here to avoid circular dependencies -export * from './Expression'; -export * from './ArrayAccess'; -export * from './ArrayLiteral'; -export * from './UnaryExpression'; -export * from './BinaryExpression'; -export * from './IdentifierExpression'; -export * from './BoolLiteral'; -export * from './CharLiteral'; -export * from './FieldAccess'; -export * from './FloatLiteral'; -export * from './FunctionApplication'; -export * from './IfElseExpression'; -export * from './IntegerLiteral'; -export * from './TupleLiteral'; -export * from './ParenthesizedExpression'; -export * from './LambdaExpression'; -export * from './StringLiteral'; -export * from './StructLiteral'; -export * from './VarDeclaration'; +export { ArrayAccess } from './ArrayAccess'; +export { ArrayLiteral } from './ArrayLiteral'; +export { UnaryExpression } from './UnaryExpression'; +export { BinaryExpression } from './BinaryExpression'; +export { IdentifierExpression } from './IdentifierExpression'; +export { BoolLiteral } from './BoolLiteral'; +export { CharLiteral } from './CharLiteral'; +export { FieldAccess } from './FieldAccess'; +export { FloatLiteral } from './FloatLiteral'; +export { FunctionApplication } from './FunctionApplication'; +export { IfElseExpression } from './IfElseExpression'; +export { IntegerLiteral } from './IntegerLiteral'; +export { TupleLiteral } from './TupleLiteral'; +export { ParenthesizedExpression } from './ParenthesizedExpression'; +export { LambdaExpression } from './LambdaExpression'; +export { StringLiteral } from './StringLiteral'; +export { StructLiteral } from './StructLiteral'; +export { VarDeclaration } from './VarDeclaration'; diff --git a/src/syntax/index.ts b/src/syntax/index.ts index 6eb04ba..9f06a82 100644 --- a/src/syntax/index.ts +++ b/src/syntax/index.ts @@ -1,19 +1,9 @@ -export { default as ASTNode } from './ASTNode'; -// get the top-level node types out of the way first -import './declarations/Program'; -import './types/Type'; -import './statements/Statement'; -import './expressions/Expression'; +export { Node } from './environment'; -// export in this order: -// - declarations are dependent on types, expressions, and statements -// - statements are dependent on expressions -// - expressions are dependent on types -// - types aren't dependent on anything -// This may not be entirely necessary, but we don't want to have to deal with nonsense export * from './types'; export * from './expressions'; export * from './statements'; export * from './declarations'; -// INodeVisitor is dependent on EVERYTHING -export { default as INodeVisitor } from './INodeVisitor'; +export { ModuleRoot } from './ModuleRoot'; + +export { default as INodeVisitor } from './visitors/interfaces/INodeVisitor'; diff --git a/src/syntax/statements/Block.ts b/src/syntax/statements/Block.ts index 23c6e05..3e5c485 100644 --- a/src/syntax/statements/Block.ts +++ b/src/syntax/statements/Block.ts @@ -1,30 +1,23 @@ -import { Statement } from '~/syntax/statements/Statement'; -import { nonTerminal, parser } from '~/parser/Parser'; -import { TokenType, Token } from '~/parser/Tokenizer'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { ExpressionStatement } from '~/syntax/statements/ExpressionStatement'; +import { NodeBase, SyntaxType, Statement } from '~/syntax/environment'; +import { ParseFunc, seq, tok, repeat } from '~/parser/parser'; -@nonTerminal({ implements: Statement, before: [ExpressionStatement] }) -export class Block extends Statement { - @parser(TokenType.LBRACE, { definite: true }) - setOpenBrace(token: Token) { - this.registerLocation('openBrace', token.getLocation()); - } - - @parser(Statement, { repeat: '*' }) - setStatements(statements: Statement[]) { - this.statements = statements; - } +export interface Block extends NodeBase { + syntaxType: SyntaxType.Block; + statements: ReadonlyArray; +} - @parser(TokenType.RBRACE, { err: 'MISSING_CLOSE_BRACE' }) - setCloseBrace(token: Token) { - this.createAndRegisterLocation('self', this.locations.openBrace, token.getLocation()); - } +export function register(Statement: ParseFunc) { + const Block: ParseFunc = seq( + tok('{'), + repeat(Statement, '*'), + tok('}'), + ([_1, statements, _2], location) => ({ + syntaxType: SyntaxType.Block as SyntaxType.Block, + location, + statements + }) + ); - statements: Statement[]; - - visit(visitor: INodeVisitor) { - return visitor.visitBlock(this); - } + return { Block }; } diff --git a/src/syntax/statements/BreakStatement.ts b/src/syntax/statements/BreakStatement.ts index 4c64577..0556048 100644 --- a/src/syntax/statements/BreakStatement.ts +++ b/src/syntax/statements/BreakStatement.ts @@ -1,25 +1,19 @@ -import { Statement } from '~/syntax/statements/Statement'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { nonTerminal, parser } from '~/parser/Parser'; -import { Token, TokenType } from '~/parser/Tokenizer'; +import { NodeBase, SyntaxType } from '~/syntax/environment'; +import { Token, TokenType } from '~/parser/lexer'; +import { ParseFunc, seq, tok, optional } from '~/parser/parser'; -@nonTerminal({ implements: Statement }) -export class BreakStatement extends Statement { - @parser('break', { definite: true }) - setBreakToken(token: Token) { - this.registerLocation('self', token.getLocation()); - } - - @parser(TokenType.INTEGER_LITERAL, { optional: true }) - setLoopNumber(token: Token) { - this.loopNumber = token.value; - this.createAndRegisterLocation('self', this.locations.self, token.getLocation()); - } - - loopNumber: number = 0; - - visit(visitor: INodeVisitor) { - return visitor.visitBreakStatement(this); - } +export interface BreakStatement extends NodeBase { + syntaxType: SyntaxType.BreakStatement; + loopNumber: Optional; } + +export const BreakStatement: ParseFunc = seq( + tok('break'), + optional(tok(TokenType.INTEGER_LITERAL)), + ([_, loopNumber], location) => ({ + syntaxType: SyntaxType.BreakStatement as SyntaxType.BreakStatement, + location, + loopNumber + }) +); diff --git a/src/syntax/statements/ContinueStatement.ts b/src/syntax/statements/ContinueStatement.ts index ac14bd6..c17d1f7 100644 --- a/src/syntax/statements/ContinueStatement.ts +++ b/src/syntax/statements/ContinueStatement.ts @@ -1,25 +1,19 @@ -import { Statement } from '~/syntax/statements/Statement'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { nonTerminal, parser } from '~/parser/Parser'; -import { Token, TokenType } from '~/parser/Tokenizer'; +import { NodeBase, SyntaxType } from '~/syntax/environment'; +import { Token, TokenType } from '~/parser/lexer'; +import { ParseFunc, seq, tok, optional } from '~/parser/parser'; -@nonTerminal({ implements: Statement }) -export class ContinueStatement extends Statement { - @parser('continue', { definite: true }) - setContinueToken(token: Token) { - this.registerLocation('self', token.getLocation()); - } - - @parser(TokenType.INTEGER_LITERAL, { optional: true }) - setLoopNumber(token: Token) { - this.loopNumber = token.value; - this.createAndRegisterLocation('self', this.locations.self, token.getLocation()); - } - - loopNumber: number = 0; - - visit(visitor: INodeVisitor) { - return visitor.visitContinueStatement(this); - } +export interface ContinueStatement extends NodeBase { + syntaxType: SyntaxType.ContinueStatement; + loopNumber: Optional; } + +export const ContinueStatement: ParseFunc = seq( + tok('continue'), + optional(tok(TokenType.INTEGER_LITERAL)), + ([_, loopNumber], location) => ({ + syntaxType: SyntaxType.ContinueStatement as SyntaxType.ContinueStatement, + location, + loopNumber + }) +); diff --git a/src/syntax/statements/DoWhileStatement.ts b/src/syntax/statements/DoWhileStatement.ts index e5fb4b7..ed7409c 100644 --- a/src/syntax/statements/DoWhileStatement.ts +++ b/src/syntax/statements/DoWhileStatement.ts @@ -1,39 +1,28 @@ -import { Statement } from '~/syntax/statements/Statement'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { nonTerminal, parser } from '~/parser/Parser'; -import { Token, TokenType } from '~/parser/Tokenizer'; -import { Expression } from '~/syntax/expressions/Expression'; +import { NodeBase, SyntaxType, Statement, Expression } from '~/syntax/environment'; +import { ParseFunc, seq, tok } from '~/parser/parser'; -@nonTerminal({ implements: Statement }) -export class DoWhileStatement extends Statement { - @parser('do', { definite: true }) - setDoToken(token: Token) { - this.registerLocation('do', token.getLocation()); - } - - @parser(Statement, { err: 'INVALID_STATEMENT' }) - setBody(stmt: Statement) { - this.body = stmt; - } - - @parser('while', { err: 'DO_WHILE_MISSING_WHILE' }) setWhileToken() {} - @parser(TokenType.LPAREN, { err: 'WHILE_MISSING_OPEN_PAREN' }) setOpenParen() {} - - @parser(Expression, { err: 'INVALID_EXPRESSION' }) - setCondition(exp: Expression) { - this.conditionExp = exp; - } +export interface DoWhileStatement extends NodeBase { + syntaxType: SyntaxType.DoWhileStatement; + body: Statement; + condition: Expression; +} - @parser(TokenType.RPAREN, { err: 'WHILE_MISSING_CLOSE_PAREN' }) - setCloseParen(token: Token) { - this.createAndRegisterLocation('self', this.locations.do, token.getLocation()); - } +export function register(Expression: ParseFunc, Statement: ParseFunc) { + const DoWhileStatement: ParseFunc = seq( + tok('do'), + Statement, + tok('while'), + tok('('), + Expression, + tok(')'), + ([_1, body, _2, _3, condition, _4], location) => ({ + syntaxType: SyntaxType.DoWhileStatement as SyntaxType.DoWhileStatement, + location, + body, + condition + }) + ); - body: Statement; - conditionExp: Expression; - - visit(visitor: INodeVisitor) { - return visitor.visitDoWhileStatement(this); - } + return { DoWhileStatement }; } diff --git a/src/syntax/statements/ExpressionStatement.ts b/src/syntax/statements/ExpressionStatement.ts index c93807b..56260ac 100644 --- a/src/syntax/statements/ExpressionStatement.ts +++ b/src/syntax/statements/ExpressionStatement.ts @@ -1,19 +1,21 @@ -import { Statement } from '~/syntax/statements/Statement'; -import { parser, nonTerminal } from '~/parser/Parser'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { Expression } from '~/syntax/expressions/Expression'; +import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; +import { ParseFunc, seq } from '~/parser/parser'; -@nonTerminal({ implements: Statement }) -export class ExpressionStatement extends Statement { - @parser(Expression, { definite: true }) - setExpression(exp: Expression) { - this.expression = exp; - } - +export interface ExpressionStatement extends NodeBase { + syntaxType: SyntaxType.ExpressionStatement; expression: Expression; +} + +export function register(Expression: ParseFunc) { + const ExpressionStatement: ParseFunc = seq( + Expression, + (expression, location) => ({ + syntaxType: SyntaxType.ExpressionStatement as SyntaxType.ExpressionStatement, + location, + expression + }) + ); - visit(visitor: INodeVisitor) { - return visitor.visitExpressionStatement(this); - } + return { ExpressionStatement }; } diff --git a/src/syntax/statements/ForStatement.ts b/src/syntax/statements/ForStatement.ts index f07fc0a..aba5463 100644 --- a/src/syntax/statements/ForStatement.ts +++ b/src/syntax/statements/ForStatement.ts @@ -1,45 +1,32 @@ -import { Statement } from '~/syntax/statements/Statement'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { nonTerminal, parser } from '~/parser/Parser'; -import { Token, TokenType } from '~/parser/Tokenizer'; -import { Expression } from '~/syntax/expressions/Expression'; +import { NodeBase, SyntaxType, Expression, Statement } from '~/syntax/environment'; +import { Token, TokenType } from '~/parser/lexer'; +import { ParseFunc, seq, tok } from '~/parser/parser'; -@nonTerminal({ implements: Statement }) -export class ForStatement extends Statement { - @parser('for', { definite: true }) - setForToken(token: Token) { - this.registerLocation('for', token.getLocation()); - } - - @parser(TokenType.LPAREN, { err: 'FOR_MISSING_OPEN_PAREN' }) setOpenParen() {} - - @parser(TokenType.IDENT, { err: 'FOR_INVALID_ITER_IDENT' }) - setIterVar(token: Token) { - this.iterVar = token.image; - this.registerLocation('iterVar', token.getLocation()); - } - - @parser('in', { err: 'FOR_MISSING_IN' }) setIn() {} - - @parser(Expression, { err: 'INVALID_EXPRESSION' }) - setIterable(exp: Expression) { - this.iterableExp = exp; - } - - @parser(TokenType.RPAREN, { err: 'FOR_MISSING_CLOSE_PAREN' }) setCloseParen() {} - - @parser(Statement, { err: 'INVALID_STATEMENT' }) - setBody(stmt: Statement) { - this.body = stmt; - this.createAndRegisterLocation('self', this.locations.for, stmt.locations.self); - } - - iterVar: string; - iterableExp: Expression; +export interface ForStatement extends NodeBase { + syntaxType: SyntaxType.ForStatement; + variable: Token; + iterable: Expression; body: Statement; - - visit(visitor: INodeVisitor) { - return visitor.visitForStatement(this); - } +} + +export function register(Expression: ParseFunc, Statement: ParseFunc) { + const ForStatement: ParseFunc = seq( + tok('for'), + tok('('), + tok(TokenType.IDENT), + tok('in'), + Expression, + tok(')'), + Statement, + ([_1, _2, variable, _3, iterable, _4, body], location) => ({ + syntaxType: SyntaxType.ForStatement as SyntaxType.ForStatement, + location, + variable, + iterable, + body + }) + ); + + return { ForStatement }; } diff --git a/src/syntax/statements/ReturnStatement.ts b/src/syntax/statements/ReturnStatement.ts index f1499b9..2a01804 100644 --- a/src/syntax/statements/ReturnStatement.ts +++ b/src/syntax/statements/ReturnStatement.ts @@ -1,26 +1,22 @@ -import { Statement } from '~/syntax/statements/Statement'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { nonTerminal, parser } from '~/parser/Parser'; -import { Token } from '~/parser/Tokenizer'; -import { Expression } from '~/syntax/expressions/Expression'; +import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; +import { ParseFunc, seq, tok, optional } from '~/parser/parser'; -@nonTerminal({ implements: Statement }) -export class ReturnStatement extends Statement { - @parser('return', { definite: true }) - setReturnToken(token: Token) { - this.registerLocation('self', token.getLocation()); - } +export interface ReturnStatement extends NodeBase { + syntaxType: SyntaxType.ReturnStatement; + exp: Optional; +} - @parser(Expression, { optional: true }) - setExpression(exp: Expression) { - this.exp = exp; - this.createAndRegisterLocation('self', this.locations.self, exp.locations.self); - } +export function register(Expression: ParseFunc) { + const ReturnStatement: ParseFunc = seq( + tok('return'), + optional(Expression), + ([_, exp], location) => ({ + syntaxType: SyntaxType.ReturnStatement as SyntaxType.ReturnStatement, + location, + exp + }) + ); - exp?: Expression; - - visit(visitor: INodeVisitor) { - return visitor.visitReturnStatement(this); - } + return { ReturnStatement }; } diff --git a/src/syntax/statements/Statement.ts b/src/syntax/statements/Statement.ts deleted file mode 100644 index 1c7c27a..0000000 --- a/src/syntax/statements/Statement.ts +++ /dev/null @@ -1,6 +0,0 @@ -import ASTNode from '~/syntax/ASTNode'; -import { nonTerminal } from '~/parser/Parser'; - - -@nonTerminal({ abstract: true }) -export abstract class Statement extends ASTNode {} diff --git a/src/syntax/statements/ThrowStatement.ts b/src/syntax/statements/ThrowStatement.ts index 9cb6cf9..1721e24 100644 --- a/src/syntax/statements/ThrowStatement.ts +++ b/src/syntax/statements/ThrowStatement.ts @@ -1,26 +1,22 @@ -import { Statement } from '~/syntax/statements/Statement'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { nonTerminal, parser } from '~/parser/Parser'; -import { Token } from '~/parser/Tokenizer'; -import { Expression } from '~/syntax/expressions/Expression'; +import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; +import { ParseFunc, seq, tok } from '~/parser/parser'; -@nonTerminal({ implements: Statement }) -export class ThrowStatement extends Statement { - @parser('throw', { definite: true }) - setThrowToken(token: Token) { - this.registerLocation('throw', token.getLocation()); - } +export interface ThrowStatement extends NodeBase { + syntaxType: SyntaxType.ThrowStatement; + exp: Expression; +} - @parser(Expression, { err: 'INVALID_EXPRESSION' }) - setExpression(exp: Expression) { - this.exp = exp; - this.createAndRegisterLocation('self', this.locations.throw, exp.locations.self); - } +export function register(Expression: ParseFunc) { + const ThrowStatement: ParseFunc = seq( + tok('throw'), + Expression, + ([_, exp], location) => ({ + syntaxType: SyntaxType.ThrowStatement as SyntaxType.ThrowStatement, + location, + exp + }) + ); - exp: Expression; - - visit(visitor: INodeVisitor) { - return visitor.visitThrowStatement(this); - } + return { ThrowStatement }; } diff --git a/src/syntax/statements/TryCatchStatement.ts b/src/syntax/statements/TryCatchStatement.ts index b17b68c..eca4cfd 100644 --- a/src/syntax/statements/TryCatchStatement.ts +++ b/src/syntax/statements/TryCatchStatement.ts @@ -1,56 +1,49 @@ -import { Statement } from '~/syntax/statements/Statement'; -import { nonTerminal, parser, ParseResult, exp } from '~/parser/Parser'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { Token } from '~/parser/Tokenizer'; -import { Param } from '~/syntax/declarations/FunctionDeclaration'; +import { NodeBase, SyntaxType, Statement } from '~/syntax/environment'; +import { Param } from '~/syntax'; +import { ParseFunc, seq, tok, repeat, optional } from '~/parser/parser'; -export const CatchClause = { - catch: exp('catch', { definite: true }), - '(': exp('(', { err: 'TRY_CATCH_MISSING_OPEN_PAREN' }), - param: exp(Param, { err: 'CATCH_INVALID_PARAM' }), - ')': exp(')', { err: 'TRY_CATCH_MISSING_CLOSE_PAREN' }), - body: exp(Statement, { err: 'INVALID_STATEMENT' }) +interface Catch { + param: Param; + body: Statement; } -export const FinallyClause = { - finally: exp('finally', { definite: true }), - body: exp(Statement, { err: 'INVALID_STATEMENT' }) -}; - -type Catch = { param: Param, body: Statement }; - -@nonTerminal({ implements: Statement }) -export class TryCatchStatement extends Statement { - @parser('try', { definite: true }) - setTryToken(token: Token) { - this.registerLocation('try', token.getLocation()); - } - - @parser(Statement, { err: 'INVALID_STATEMENT' }) - setTryBody(stmt: Statement) { - this.try = stmt; - } - - @parser(CatchClause, { repeat: '+', err: 'TRY_CATCH_MISSING_CATCH' }) - setCatches(result: ParseResult[]) { - this.catches = result.map(c => ({ - param: c.param as Param, - body: c.body as Statement, - })) - } - - @parser(FinallyClause, { optional: true }) - setFinally(result: ParseResult) { - this.finally = result.body as Statement; - this.createAndRegisterLocation('self', this.locations.try, this.finally.locations.self); - } - +export interface TryCatchStatement extends NodeBase { + syntaxType: SyntaxType.TryCatchStatement; try: Statement; - catches: Catch[]; - finally?: Statement; - - visit(visitor: INodeVisitor) { - return visitor.visitTryCatchStatement(this); - } -} \ No newline at end of file + catches: ReadonlyArray; + finally: Optional; +} + +export function register(Statement: ParseFunc, Param: ParseFunc) { + const CatchClause: ParseFunc = seq( + tok('catch'), + tok('('), + Param, + tok(')'), + Statement, + ([_1, _2, param, _3, body]) => ({ param, body }) + ); + + const FinallyClause: ParseFunc = seq( + tok('finally'), + Statement, + ([_, body]) => body + ); + + const TryCatchStatement: ParseFunc = seq( + tok('try'), + Statement, + repeat(CatchClause, '+'), + optional(FinallyClause), + ([_, _try, catches, _finally], location) => ({ + syntaxType: SyntaxType.TryCatchStatement as SyntaxType.TryCatchStatement, + location, + try: _try, + catches, + finally: _finally + }) + ); + + return { TryCatchStatement }; +} diff --git a/src/syntax/statements/WhileStatement.ts b/src/syntax/statements/WhileStatement.ts index 15e0b8e..05cde07 100644 --- a/src/syntax/statements/WhileStatement.ts +++ b/src/syntax/statements/WhileStatement.ts @@ -1,36 +1,27 @@ -import INodeVisitor from '~/syntax/INodeVisitor'; -import { Statement } from '~/syntax/statements/Statement'; -import { nonTerminal, parser } from '~/parser/Parser'; -import { Token, TokenType } from '~/parser/Tokenizer'; -import { Expression } from '~/syntax/expressions/Expression'; +import { NodeBase, SyntaxType, Expression, Statement } from '~/syntax/environment'; +import { ParseFunc, seq, tok } from '~/parser/parser'; -@nonTerminal({ implements: Statement }) -export class WhileStatement extends Statement { - @parser('while', { definite: true }) - setWhileToken(token: Token) { - this.registerLocation('while', token.getLocation()); - } - - @parser(TokenType.LPAREN, { err: 'WHILE_MISSING_OPEN_PAREN' }) setOpenParen() {} - - @parser(Expression, { err: 'INVALID_EXPRESSION' }) - setCondition(exp: Expression) { - this.conditionExp = exp; - } - - @parser(TokenType.RPAREN, { err: 'WHILE_MISSING_CLOSE_PAREN' }) setCloseParen() {} +export interface WhileStatement extends NodeBase { + syntaxType: SyntaxType.WhileStatement; + condition: Expression; + body: Statement; +} - @parser(Statement, { err: 'INVALID_STATEMENT' }) - setBody(stmt: Statement) { - this.body = stmt; - this.createAndRegisterLocation('self', this.locations.while, stmt.locations.self); - } +export function register(Expression: ParseFunc, Statement: ParseFunc) { + const WhileStatement: ParseFunc = seq( + tok('while'), + tok('('), + Expression, + tok(')'), + Statement, + ([_1, _2, condition, _3, body], location) => ({ + syntaxType: SyntaxType.WhileStatement as SyntaxType.WhileStatement, + location, + condition, + body + }) + ); - conditionExp: Expression; - body: Statement; - - visit(visitor: INodeVisitor) { - return visitor.visitWhileStatement(this); - } + return { WhileStatement }; } diff --git a/src/syntax/statements/index.ts b/src/syntax/statements/index.ts index 74962a2..c2ef5d2 100644 --- a/src/syntax/statements/index.ts +++ b/src/syntax/statements/index.ts @@ -1,11 +1,10 @@ -export * from './Statement'; -export * from './ExpressionStatement'; -export * from './Block'; -export * from './BreakStatement'; -export * from './ContinueStatement'; -export * from './DoWhileStatement'; -export * from './ForStatement'; -export * from './ReturnStatement'; -export * from './ThrowStatement'; -export * from './TryCatchStatement'; -export * from './WhileStatement'; +export { ExpressionStatement } from './ExpressionStatement'; +export { Block } from './Block'; +export { BreakStatement } from './BreakStatement'; +export { ContinueStatement } from './ContinueStatement'; +export { DoWhileStatement } from './DoWhileStatement'; +export { ForStatement } from './ForStatement'; +export { ReturnStatement } from './ReturnStatement'; +export { ThrowStatement } from './ThrowStatement'; +export { TryCatchStatement } from './TryCatchStatement'; +export { WhileStatement } from './WhileStatement'; diff --git a/src/syntax/types/ArrayType.ts b/src/syntax/types/ArrayType.ts index b1ba980..5a30f41 100644 --- a/src/syntax/types/ArrayType.ts +++ b/src/syntax/types/ArrayType.ts @@ -1,25 +1,29 @@ -import { Type } from '~/syntax/types/Type'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { nonTerminal, parser } from '~/parser/Parser'; -import { TokenType, Token } from '~/parser/Tokenizer'; +import { NodeBase, SyntaxType, TypeNode } from '~/syntax/environment'; +import { ParseFunc, seq, tok } from '~/parser/parser'; -@nonTerminal({ implements: Type, leftRecursive: 'setBaseType' }) -export class ArrayType extends Type { - setBaseType(baseType: Type) { - this.baseType = baseType; - } - - @parser(TokenType.LBRACK, { definite: true }) setOpenBracket() {} - - @parser(TokenType.RBRACK) - setCloseBracket(token: Token) { - this.createAndRegisterLocation('self', this.baseType.locations.self, token.getLocation()); - } +export interface ArrayType extends NodeBase { + syntaxType: SyntaxType.ArrayType; + baseType: TypeNode; +} - baseType: Type; - - visit(visitor: INodeVisitor) { - return visitor.visitArrayType(this); - } +export interface ArrayTypeSuffix extends NodeBase { + syntaxType: SyntaxType.ArrayType; + setBase(baseType: TypeNode): ArrayType; } + +export const ArrayTypeSuffix: ParseFunc = seq( + tok('['), + tok(']'), + ([_1, _2], location) => ({ + syntaxType: SyntaxType.ArrayType as SyntaxType.ArrayType, + location, + setBase(baseType: TypeNode) { + return { + ...this, + baseType, + location: this.location.merge(baseType.location) + } + } + }) +); diff --git a/src/syntax/types/BuiltInType.ts b/src/syntax/types/BuiltInType.ts index 4314499..feca739 100644 --- a/src/syntax/types/BuiltInType.ts +++ b/src/syntax/types/BuiltInType.ts @@ -1,36 +1,29 @@ -import { Type } from './Type'; -import { Token } from '~/parser/Tokenizer'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { parser, nonTerminal } from '~/parser/Parser'; -import { IdentifierType } from '~/syntax/types/IdentifierType'; +import { ParseFunc, tok, select, seq } from '~/parser/parser'; +import { NodeBase, SyntaxType } from '~/syntax/environment'; +import { Token } from '~/parser/lexer'; -export const builtInTypes = [ - 'u8', 'i8', 'byte', - 'u16', 'i16', 'short', - 'u32', 'i32', 'integer', - 'u64', 'i64', 'long', - 'int', - 'f32', 'float', - 'f64', 'double', - 'string', - 'char', - 'bool', - 'void', - 'any', -]; - -@nonTerminal({ implements: Type, before: [IdentifierType] }) -export class BuiltInType extends Type { - @parser(builtInTypes, { definite: true }) - setType(token: Token) { - this.typeNode = token.image; - this.registerLocation('self', token.getLocation()); - } - - typeNode: string; - - visit(visitor: INodeVisitor) { - return visitor.visitBuiltInType(this); - } +export interface BuiltInType extends NodeBase { + syntaxType: SyntaxType.BuiltInType; + name: Token; } + +export const BuiltInType: ParseFunc = seq(select( + tok('u8'), tok('i8'), tok('byte'), + tok('u16'), tok('i16'), tok('short'), + tok('u32'), tok('i32'), tok('integer'), + tok('u64'), tok('i64'), tok('long'), + tok('int'), + tok('f32'), tok('float'), + tok('f64'), tok('double'), + tok('string'), + tok('char'), + tok('bool'), + tok('void'), + tok('any'), + tok('never') +), (name, location) => ({ + location, + syntaxType: SyntaxType.BuiltInType as SyntaxType.BuiltInType, + name +})); diff --git a/src/syntax/types/FunctionType.ts b/src/syntax/types/FunctionType.ts index dcdc182..b90e40a 100644 --- a/src/syntax/types/FunctionType.ts +++ b/src/syntax/types/FunctionType.ts @@ -1,37 +1,27 @@ -import INodeVisitor from '~/syntax/INodeVisitor'; -import { Type } from '~/syntax/types/Type'; -import { nonTerminal, parser } from '~/parser/Parser'; -import { IdentifierType } from '~/syntax/types/IdentifierType'; -import { TokenType, Token } from '~/parser/Tokenizer'; -import { ParenthesizedType } from '~/syntax/types/ParenthesizedType'; -import { TupleType } from '~/syntax/types/TupleType'; +import { seq, tok, ParseFunc, repeat } from '~/parser/parser'; +import { TypeNode, NodeBase, SyntaxType } from '~/syntax/environment'; -@nonTerminal({ implements: Type, before: [IdentifierType, ParenthesizedType, TupleType] }) -export class FunctionType extends Type { - @parser(TokenType.LPAREN) - setOpenParen(token: Token) { - this.registerLocation('openParen', token.getLocation()); - } - - @parser(Type, { repeat: '*', err: 'INVALID_TYPE', sep: TokenType.COMMA }) - setParamTypes(types: Type[]) { - this.paramTypes = types; - } - - @parser(TokenType.RPAREN) setCloseParen() {} - @parser(TokenType.FAT_ARROW, { definite: true }) setFatArrow() {} +export interface FunctionType extends NodeBase { + syntaxType: SyntaxType.FunctionType; + paramTypes: TypeNode[]; + returnType: TypeNode; +} - @parser(Type, { err: 'FUNCTION_TYPE_INVALID_RETURN_TYPE' }) - setReturnType(type: Type) { - this.returnType = type; - this.createAndRegisterLocation('self', this.locations.openParen, type.locations.self); - } +export function register(TypeNode: ParseFunc) { + const FunctionType: ParseFunc = seq( + tok('('), + repeat(TypeNode, '*', tok(',')), + tok(')'), + tok('=>'), + TypeNode, + ([_1, paramTypes, _2, _3, returnType], location) => ({ + syntaxType: SyntaxType.FunctionType as SyntaxType.FunctionType, + location, + paramTypes, + returnType + }) + ); - paramTypes: Type[]; - returnType: Type; - - visit(visitor: INodeVisitor) { - return visitor.visitFunctionType(this); - } + return { FunctionType }; } diff --git a/src/syntax/types/IdentifierType.ts b/src/syntax/types/IdentifierType.ts index deaa8df..acc8dcd 100644 --- a/src/syntax/types/IdentifierType.ts +++ b/src/syntax/types/IdentifierType.ts @@ -1,20 +1,19 @@ -import { Type } from './Type'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { TokenType, Token } from '~/parser/Tokenizer'; -import { parser, nonTerminal } from '~/parser/Parser'; +import { NodeBase, SyntaxType } from '~/syntax/environment'; +import { Token, TokenType } from '~/parser/lexer'; +import { seq, tok, ParseFunc } from '~/parser/parser'; -@nonTerminal({ implements: Type }) -export class IdentifierType extends Type { - @parser(TokenType.IDENT, { definite: true }) - setIdentifier(token: Token) { - this.name = token.image; - this.registerLocation('self', token.getLocation()); - } - name: string; - - visit(visitor: INodeVisitor) { - return visitor.visitIdentifierType(this); - } -} \ No newline at end of file +export interface IdentifierType extends NodeBase { + syntaxType: SyntaxType.IdentifierType; + name: Token; +} + +export const IdentifierType: ParseFunc = seq( + tok(TokenType.IDENT), + (name, location) => ({ + syntaxType: SyntaxType.IdentifierType as SyntaxType.IdentifierType, + location, + name + }) +); diff --git a/src/syntax/types/NamespaceAccessType.ts b/src/syntax/types/NamespaceAccessType.ts index 5e773c0..a26f1a0 100644 --- a/src/syntax/types/NamespaceAccessType.ts +++ b/src/syntax/types/NamespaceAccessType.ts @@ -1,27 +1,33 @@ -import { Type } from '~/syntax/types/Type'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { nonTerminal, parser } from '~/parser/Parser'; -import { TokenType, Token } from '~/parser/Tokenizer'; +import { NodeBase, SyntaxType, TypeNode } from '~/syntax/environment'; +import { Token, TokenType } from '~/parser/lexer'; +import { ParseFunc, seq, tok } from '~/parser/parser'; -@nonTerminal({ implements: Type, leftRecursive: 'setBaseType' }) -export class NamespaceAccessType extends Type { - setBaseType(type: Type) { - this.baseType = type; - } +export interface NamespaceAccessType extends NodeBase { + syntaxType: SyntaxType.NamespaceAccessType; + baseType: TypeNode; + typeName: Token; +} - @parser(TokenType.DOT, { definite: true }) setDot() {} +export interface NamespaceAccessTypeSuffix extends NodeBase { + syntaxType: SyntaxType.NamespaceAccessType; + typeName: Token; + setBase(baseType: TypeNode): NamespaceAccessType; +} - @parser(TokenType.IDENT) - setTypeName(token: Token) { - this.typeName = token.image; - this.createAndRegisterLocation('self', this.baseType.locations.self, token.getLocation()); - } - - baseType: Type; - typeName: string; - - visit(visitor: INodeVisitor) { - return visitor.visitNamespaceAccessType(this); - } -} \ No newline at end of file +export const NamespaceAccessTypeSuffix: ParseFunc = seq( + tok('.'), + tok(TokenType.IDENT), + ([_1, typeName], location) => ({ + syntaxType: SyntaxType.NamespaceAccessType as SyntaxType.NamespaceAccessType, + location, + typeName, + setBase(baseType: TypeNode) { + return { + ...this, + baseType, + location: this.location.merge(baseType.location) + } + } + }) +); diff --git a/src/syntax/types/ParenthesizedType.ts b/src/syntax/types/ParenthesizedType.ts index 8b72bb8..8bab0a6 100644 --- a/src/syntax/types/ParenthesizedType.ts +++ b/src/syntax/types/ParenthesizedType.ts @@ -1,30 +1,23 @@ -import { Type } from '~/syntax/types/Type'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { nonTerminal, parser } from '~/parser/Parser'; -import { TokenType, Token } from '~/parser/Tokenizer'; -import { TupleType } from '~/syntax/types/TupleType'; +import { ParseFunc, seq, tok } from '~/parser/parser'; +import { TypeNode, NodeBase, SyntaxType } from '~/syntax/environment'; -@nonTerminal({ implements: Type, before: [TupleType] }) -export class ParenthesizedType extends Type { - @parser(TokenType.LPAREN) - setOpenParen(token: Token) { - this.registerLocation('openParen', token.getLocation()); - } +export interface ParenthesizedType extends NodeBase { + syntaxType: SyntaxType.ParenthesizedType; + inner: TypeNode; +} - @parser(Type) - setInnerType(type: Type) { - this.inner = type; - } +export function register(TypeNode: ParseFunc) { + const ParenthesizedType: ParseFunc = seq( + tok('('), + TypeNode, + tok(')'), + ([_1, inner, _2], location) => ({ + syntaxType: SyntaxType.ParenthesizedType as SyntaxType.ParenthesizedType, + location, + inner + }) + ); - @parser(TokenType.RPAREN, { definite: true }) - setCloseParen(token: Token){ - this.createAndRegisterLocation('self', this.locations.openParen, token.getLocation()); - } - - inner: Type; - - visit(visitor: INodeVisitor) { - return visitor.visitParenthesizedType(this); - } -} \ No newline at end of file + return { ParenthesizedType }; +} diff --git a/src/syntax/types/SpecificType.ts b/src/syntax/types/SpecificType.ts index b1eec5b..5174e0d 100644 --- a/src/syntax/types/SpecificType.ts +++ b/src/syntax/types/SpecificType.ts @@ -1,31 +1,42 @@ -import { Type } from '~/syntax/types/Type'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { nonTerminal, ParseResult, parser, exp } from '~/parser/Parser'; -import { TokenType, Token } from '~/parser/Tokenizer'; +import { NodeBase, SyntaxType, TypeNode } from '~/syntax/environment'; +import { ParseFunc, seq, repeat, tok } from '~/parser/parser'; -export const TypeArgList = { - '<': exp('<', { definite: true }), - types: exp(Type, { repeat: '*', sep: TokenType.COMMA, err: 'INVALID_TYPE_ARG' }), - '>': exp('>', { err: 'INVALID_TYPE_ARG_LIST' }) +export interface SpecificType extends NodeBase { + syntaxType: SyntaxType.SpecificType; + typeNode: TypeNode; + typeArgs: ReadonlyArray; } -@nonTerminal({ implements: Type, leftRecursive: 'setGenericType' }) -export class SpecificType extends Type { - setGenericType(type: Type) { - this.typeNode = type; - } +export interface SpecificTypeSuffix extends NodeBase { + syntaxType: SyntaxType.SpecificType; + typeArgs: ReadonlyArray; + setBase(typeNode: TypeNode): SpecificType; +} + +export function register(TypeNode: ParseFunc) { + const TypeArgList: ParseFunc = seq( + tok('<'), + repeat(TypeNode, '*', tok(',')), + tok('>'), + ([_1, types, _2]) => types + ); - @parser(TypeArgList, { definite: true }) - setTypeArgs(result: ParseResult) { - this.typeArgs = result.types as Type[]; - this.createAndRegisterLocation('self', this.typeNode.locations.self, (result['>'] as Token).getLocation()); - } + const SpecificTypeSuffix: ParseFunc = seq( + TypeArgList, + (typeArgs, location) => ({ + syntaxType: SyntaxType.SpecificType as SyntaxType.SpecificType, + location, + typeArgs, + setBase(typeNode: TypeNode) { + return { + ...this, + typeNode, + location: this.location.merge(typeNode.location) + } + } + }) + ); - typeNode: Type; - typeArgs: Type[]; - - visit(visitor: INodeVisitor) { - return visitor.visitSpecificType(this); - } -} \ No newline at end of file + return { SpecificTypeSuffix, TypeArgList }; +} diff --git a/src/syntax/types/StructType.ts b/src/syntax/types/StructType.ts index e2e91b9..73596a5 100644 --- a/src/syntax/types/StructType.ts +++ b/src/syntax/types/StructType.ts @@ -1,38 +1,34 @@ -import { Type } from '~/syntax/types/Type'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { nonTerminal, parser, exp, ParseResult } from '~/parser/Parser'; -import { TokenType, Token } from '~/parser/Tokenizer'; +import { TypeNode, NodeBase, SyntaxType } from '~/syntax/environment'; +import { Token, TokenType } from '~/parser/lexer'; +import { ParseFunc, seq, tok, repeat } from '~/parser/parser'; -const Field = { - typeNode: exp(Type, { definite: true }), - name: exp(TokenType.IDENT, { err: 'INVALID_FIELD_NAME' }), -}; -@nonTerminal({ implements: Type }) -export class StructType extends Type { - @parser(TokenType.LBRACE, { definite: true }) - setOpenBrace(token: Token) { - this.registerLocation('openBrace', token.getLocation()); - } +interface Field { + typeNode: TypeNode; + name: Token; +} - @parser(Field, { repeat: '*' }) - setFields(fields: ParseResult[]) { - for (const field of fields) { - const name = field.name as Token; - this.fields.push({ type: field.typeNode as Type, name: name.image }); - this.registerLocation(`field_${name}`, name.getLocation()); - } - } +export interface StructType extends NodeBase { + syntaxType: SyntaxType.StructType; + fields: ReadonlyArray; +} - @parser(TokenType.RBRACE, { err: 'INVALID_STRUCT_NO_CLOSE_BRACE' }) - setCloseBrace(token: Token) { - this.createAndRegisterLocation('self', this.locations.openBrace, token.getLocation()); - } +export function register(TypeNode: ParseFunc) { + const StructType: ParseFunc = seq( + tok('{'), + repeat(seq( + TypeNode, + tok(TokenType.IDENT), + ([typeNode, name]) => ({ typeNode, name }) + ), '*'), + tok('}'), + ([_1, fields, _2], location) => ({ + syntaxType: SyntaxType.StructType as SyntaxType.StructType, + location, + fields + }) + ); - fields: { type: Type, name: string }[] = []; - - visit(visitor: INodeVisitor) { - return visitor.visitStructType(this); - } + return { StructType }; } diff --git a/src/syntax/types/TupleType.ts b/src/syntax/types/TupleType.ts index 5f1ec12..9508863 100644 --- a/src/syntax/types/TupleType.ts +++ b/src/syntax/types/TupleType.ts @@ -1,29 +1,24 @@ -import { Type } from '~/syntax/types/Type'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { nonTerminal, parser } from '~/parser/Parser'; -import { Token, TokenType } from '~/parser/Tokenizer'; +import { NodeBase, SyntaxType, TypeNode } from '~/syntax/environment'; +import { ParseFunc, seq, tok, repeat } from '~/parser/parser'; -@nonTerminal({ implements: Type }) -export class TupleType extends Type { - @parser(TokenType.LPAREN, { definite: true }) - setOpenParen(token: Token) { - this.registerLocation('openParen', token.getLocation()); - } - @parser(Type, { repeat: '*', sep: TokenType.COMMA }) - setTypes(types: Type[]) { - this.types = types; - } +export interface TupleType extends NodeBase { + syntaxType: SyntaxType.TupleType; + types: ReadonlyArray; +} - @parser(TokenType.RPAREN) - setCloseParen(token: Token) { - this.createAndRegisterLocation('self', this.locations.openParen, token.getLocation()); - } +export function register(TypeNode: ParseFunc) { + const TupleType: ParseFunc = seq( + tok('('), + repeat(TypeNode, '*', tok(',')), + tok(')'), + ([_1, types, _2], location) => ({ + syntaxType: SyntaxType.TupleType as SyntaxType.TupleType, + location, + types + }) + ); - types: Type[]; - - visit(visitor: INodeVisitor) { - return visitor.visitTupleType(this); - } + return { TupleType }; } diff --git a/src/syntax/types/Type.ts b/src/syntax/types/Type.ts deleted file mode 100644 index d3b9ab8..0000000 --- a/src/syntax/types/Type.ts +++ /dev/null @@ -1,6 +0,0 @@ -import ASTNode from '~/syntax/ASTNode'; -import { nonTerminal } from '~/parser/Parser'; - - -@nonTerminal({ abstract: true }) -export abstract class Type extends ASTNode {} diff --git a/src/syntax/types/UnionType.ts b/src/syntax/types/UnionType.ts index 8e18d40..a90ac56 100644 --- a/src/syntax/types/UnionType.ts +++ b/src/syntax/types/UnionType.ts @@ -1,26 +1,36 @@ -import { Type } from '~/syntax/types/Type'; -import INodeVisitor from '~/syntax/INodeVisitor'; -import { nonTerminal, parser } from '~/parser/Parser'; +import { NodeBase, SyntaxType, TypeNode } from '~/syntax/environment'; +import { ParseFunc, seq, tok } from '~/parser/parser'; -@nonTerminal({ implements: Type, leftRecursive: 'setLeft' }) -export class UnionType extends Type { - setLeft(left: Type) { - this.types = [left]; - } +export interface UnionType extends NodeBase { + syntaxType: SyntaxType.UnionType; + left: TypeNode; + right: TypeNode; +} - @parser('|', { definite: true }) setVbarToken() {} +export interface UnionTypeSuffix extends NodeBase { + syntaxType: SyntaxType.UnionType; + right: TypeNode; + setBase(left: TypeNode): UnionType; +} - @parser(Type, { err: 'INVALID_UNION_TYPE' }) - setRight(right: Type) { - if (right instanceof UnionType) this.types.push(...right.types); - else this.types.push(right); - this.createAndRegisterLocation('self', this.types[0].locations.self, this.types[this.types.length - 1].locations.self); - } +export function register(TypeNode: ParseFunc) { + const UnionTypeSuffix: ParseFunc = seq( + tok('|'), + TypeNode, + ([_1, right], location) => ({ + syntaxType: SyntaxType.UnionType as SyntaxType.UnionType, + location, + right, + setBase(left: TypeNode) { + return { + ...this, + left, + location: this.location.merge(left.location) + }; + } + }) + ); - types: Type[]; - - visit(visitor: INodeVisitor) { - return visitor.visitUnionType(this); - } + return { UnionTypeSuffix }; } diff --git a/src/syntax/types/index.ts b/src/syntax/types/index.ts index 8c0c595..96abe52 100644 --- a/src/syntax/types/index.ts +++ b/src/syntax/types/index.ts @@ -1,11 +1,10 @@ -export * from './Type'; -export * from './ArrayType'; -export * from './IdentifierType'; -export * from './BuiltInType'; -export * from './TupleType'; -export * from './ParenthesizedType'; -export * from './FunctionType'; -export * from './NamespaceAccessType'; -export * from './SpecificType'; -export * from './StructType'; -export * from './UnionType'; +export { ArrayType } from './ArrayType'; +export { IdentifierType } from './IdentifierType'; +export { BuiltInType } from './BuiltInType'; +export { TupleType } from './TupleType'; +export { ParenthesizedType } from './ParenthesizedType'; +export { FunctionType } from './FunctionType'; +export { NamespaceAccessType } from './NamespaceAccessType'; +export { SpecificType } from './SpecificType'; +export { StructType } from './StructType'; +export { UnionType } from './UnionType'; From b68bd9736716185bb38b621950a69f2e0ac7c6f1 Mon Sep 17 00:00:00 2001 From: Jake Chitel Date: Sun, 29 Apr 2018 07:54:14 -0500 Subject: [PATCH 04/15] added jest config, moving "old" code to src_old for increased clarity --- istanbul.json | 5 - package.json | 34 +- scripts/test.js | 24 - src/require-hook.js | 26 - src/syntax/INodeVisitor.ts | 60 - src/typecheck/TypeCheckError.ts | 17 - src/typecheck/TypeChecker.ts | 411 --- src/typecheck/TypeCheckerMessages.ts | 26 - src/typecheck/types/OrderedMap.ts | 44 - src/typecheck/types/TArray.ts | 23 - src/typecheck/types/TBool.ts | 17 - src/typecheck/types/TChar.ts | 17 - src/typecheck/types/TGeneric.ts | 42 - src/typecheck/types/TInferred.ts | 9 - src/typecheck/types/TParam.ts | 45 - src/typecheck/visitors/ITypeVisitor.ts | 35 - src/typecheck/visitors/SpecifyTypeVisitor.ts | 94 - src/typecheck/visitors/index.ts | 7 - src/utils/OrderedMap.ts | 59 - {src => src_old}/interpreter/Interpreter.ts | 0 {src => src_old}/interpreter/frames.ts | 0 {src => src_old}/interpreter/index.ts | 0 {src => src_old}/parser/ParserError.ts | 0 {src => src_old}/parser/ParserMessages.ts | 0 {src => src_old}/runtime/instructions.ts | 0 {src => src_old}/runtime/operators.ts | 0 {src => src_old}/runtime/types/RArray.ts | 0 {src => src_old}/runtime/types/RBool.ts | 0 {src => src_old}/runtime/types/RChar.ts | 0 {src => src_old}/runtime/types/RFloat.ts | 0 {src => src_old}/runtime/types/RFunction.ts | 0 {src => src_old}/runtime/types/RInteger.ts | 0 {src => src_old}/runtime/types/RString.ts | 0 {src => src_old}/runtime/types/RStruct.ts | 0 {src => src_old}/runtime/types/RTuple.ts | 0 {src => src_old}/runtime/types/RValue.ts | 0 {src => src_old}/runtime/types/index.ts | 0 .../syntax/visitors/DeclarationNameVisitor.ts | 260 ++ .../syntax/visitors/DeclarationTypeVisitor.ts | 161 + .../syntax/visitors}/TranslationVisitor.ts | 10 +- .../syntax}/visitors/TypeCheckVisitor.ts | 295 +- .../syntax/visitors/TypeResolutionVisitor.ts | 263 ++ .../interfaces/IDeclarationVisitor.ts | 20 + .../visitors/interfaces/IExpressionVisitor.ts | 26 + .../visitors/interfaces/INodeVisitor.ts | 11 + .../visitors/interfaces/IStatementVisitor.ts | 18 + .../visitors/interfaces/ITypeVisitor.ts | 18 + {src => src_old}/translator/ConstFunc.ts | 0 {src => src_old}/translator/Func.ts | 27 +- {src => src_old}/translator/Translator.ts | 0 {src => src_old}/translator/index.ts | 0 .../typecheck/TypeCheckContext.ts | 0 src_old/typecheck/TypeChecker.ts | 157 + src_old/typecheck/resolveModule.ts | 47 + src_old/typecheck/types.ts | 252 ++ {src => src_old}/typecheck/types/TAny.ts | 11 +- src_old/typecheck/types/TArray.ts | 24 + src_old/typecheck/types/TBool.ts | 22 + src_old/typecheck/types/TChar.ts | 22 + {src => src_old}/typecheck/types/TFloat.ts | 12 +- {src => src_old}/typecheck/types/TFunction.ts | 29 +- src_old/typecheck/types/TGeneric.ts | 75 + src_old/typecheck/types/TInferred.ts | 16 + {src => src_old}/typecheck/types/TInteger.ts | 14 +- .../typecheck/types/TNamespace.ts | 11 +- {src => src_old}/typecheck/types/TNever.ts | 11 +- src_old/typecheck/types/TParam.ts | 62 + .../typecheck/types/TRecursive.ts | 14 +- {src => src_old}/typecheck/types/TStruct.ts | 12 +- {src => src_old}/typecheck/types/TTuple.ts | 12 +- {src => src_old}/typecheck/types/TType.ts | 24 +- {src => src_old}/typecheck/types/TUnion.ts | 12 +- {src => src_old}/typecheck/types/TUnknown.ts | 0 {src => src_old}/typecheck/types/index.ts | 4 +- .../visitors/AssertAssignmentVisitor.ts | 330 ++ .../typecheck/visitors/AssignmentVisitor.ts | 11 + src_old/typecheck/visitors/CloneVisitor.ts | 131 + src_old/typecheck/visitors/ITypeVisitor.ts | 38 + .../visitors/InferTypeArgsVisitor.ts | 9 +- .../typecheck/visitors/IsVisitors.ts | 49 +- .../typecheck/visitors/SpecifyTypeVisitor.ts | 48 + src_old/utils/OrderedMap.ts | 62 + {src => src_old}/utils/Scope.ts | 0 {src => src_old}/utils/preVisit.ts | 0 test/setup.js | 4 + yarn.lock | 3177 ++++++++++++++--- 86 files changed, 4985 insertions(+), 1821 deletions(-) delete mode 100644 istanbul.json delete mode 100644 scripts/test.js delete mode 100644 src/require-hook.js delete mode 100644 src/syntax/INodeVisitor.ts delete mode 100644 src/typecheck/TypeCheckError.ts delete mode 100644 src/typecheck/TypeChecker.ts delete mode 100644 src/typecheck/TypeCheckerMessages.ts delete mode 100644 src/typecheck/types/OrderedMap.ts delete mode 100644 src/typecheck/types/TArray.ts delete mode 100644 src/typecheck/types/TBool.ts delete mode 100644 src/typecheck/types/TChar.ts delete mode 100644 src/typecheck/types/TGeneric.ts delete mode 100644 src/typecheck/types/TInferred.ts delete mode 100644 src/typecheck/types/TParam.ts delete mode 100644 src/typecheck/visitors/ITypeVisitor.ts delete mode 100644 src/typecheck/visitors/SpecifyTypeVisitor.ts delete mode 100644 src/typecheck/visitors/index.ts delete mode 100644 src/utils/OrderedMap.ts rename {src => src_old}/interpreter/Interpreter.ts (100%) rename {src => src_old}/interpreter/frames.ts (100%) rename {src => src_old}/interpreter/index.ts (100%) rename {src => src_old}/parser/ParserError.ts (100%) rename {src => src_old}/parser/ParserMessages.ts (100%) rename {src => src_old}/runtime/instructions.ts (100%) rename {src => src_old}/runtime/operators.ts (100%) rename {src => src_old}/runtime/types/RArray.ts (100%) rename {src => src_old}/runtime/types/RBool.ts (100%) rename {src => src_old}/runtime/types/RChar.ts (100%) rename {src => src_old}/runtime/types/RFloat.ts (100%) rename {src => src_old}/runtime/types/RFunction.ts (100%) rename {src => src_old}/runtime/types/RInteger.ts (100%) rename {src => src_old}/runtime/types/RString.ts (100%) rename {src => src_old}/runtime/types/RStruct.ts (100%) rename {src => src_old}/runtime/types/RTuple.ts (100%) rename {src => src_old}/runtime/types/RValue.ts (100%) rename {src => src_old}/runtime/types/index.ts (100%) create mode 100644 src_old/syntax/visitors/DeclarationNameVisitor.ts create mode 100644 src_old/syntax/visitors/DeclarationTypeVisitor.ts rename {src/translator => src_old/syntax/visitors}/TranslationVisitor.ts (98%) rename {src/typecheck => src_old/syntax}/visitors/TypeCheckVisitor.ts (63%) create mode 100644 src_old/syntax/visitors/TypeResolutionVisitor.ts create mode 100644 src_old/syntax/visitors/interfaces/IDeclarationVisitor.ts create mode 100644 src_old/syntax/visitors/interfaces/IExpressionVisitor.ts create mode 100644 src_old/syntax/visitors/interfaces/INodeVisitor.ts create mode 100644 src_old/syntax/visitors/interfaces/IStatementVisitor.ts create mode 100644 src_old/syntax/visitors/interfaces/ITypeVisitor.ts rename {src => src_old}/translator/ConstFunc.ts (100%) rename {src => src_old}/translator/Func.ts (89%) rename {src => src_old}/translator/Translator.ts (100%) rename {src => src_old}/translator/index.ts (100%) rename {src => src_old}/typecheck/TypeCheckContext.ts (100%) create mode 100644 src_old/typecheck/TypeChecker.ts create mode 100644 src_old/typecheck/resolveModule.ts create mode 100644 src_old/typecheck/types.ts rename {src => src_old}/typecheck/types/TAny.ts (65%) create mode 100644 src_old/typecheck/types/TArray.ts create mode 100644 src_old/typecheck/types/TBool.ts create mode 100644 src_old/typecheck/types/TChar.ts rename {src => src_old}/typecheck/types/TFloat.ts (65%) rename {src => src_old}/typecheck/types/TFunction.ts (83%) create mode 100644 src_old/typecheck/types/TGeneric.ts create mode 100644 src_old/typecheck/types/TInferred.ts rename {src => src_old}/typecheck/types/TInteger.ts (66%) rename {src => src_old}/typecheck/types/TNamespace.ts (74%) rename {src => src_old}/typecheck/types/TNever.ts (65%) create mode 100644 src_old/typecheck/types/TParam.ts rename {src => src_old}/typecheck/types/TRecursive.ts (57%) rename {src => src_old}/typecheck/types/TStruct.ts (53%) rename {src => src_old}/typecheck/types/TTuple.ts (50%) rename {src => src_old}/typecheck/types/TType.ts (69%) rename {src => src_old}/typecheck/types/TUnion.ts (53%) rename {src => src_old}/typecheck/types/TUnknown.ts (100%) rename {src => src_old}/typecheck/types/index.ts (90%) create mode 100644 src_old/typecheck/visitors/AssertAssignmentVisitor.ts rename {src => src_old}/typecheck/visitors/AssignmentVisitor.ts (95%) create mode 100644 src_old/typecheck/visitors/CloneVisitor.ts create mode 100644 src_old/typecheck/visitors/ITypeVisitor.ts rename {src => src_old}/typecheck/visitors/InferTypeArgsVisitor.ts (89%) rename {src => src_old}/typecheck/visitors/IsVisitors.ts (81%) create mode 100644 src_old/typecheck/visitors/SpecifyTypeVisitor.ts create mode 100644 src_old/utils/OrderedMap.ts rename {src => src_old}/utils/Scope.ts (100%) rename {src => src_old}/utils/preVisit.ts (100%) create mode 100644 test/setup.js diff --git a/istanbul.json b/istanbul.json deleted file mode 100644 index 6baef21..0000000 --- a/istanbul.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "instrumentation": { - "excludes": ["scripts/*.js", "dist/src/require-hook.js"] - } -} \ No newline at end of file diff --git a/package.json b/package.json index 8491437..0b0fb90 100644 --- a/package.json +++ b/package.json @@ -7,26 +7,30 @@ "build": "node scripts/build.js", "circular-deps": "yarn run build && madge --circular dist/src", "deps-graph": "yarn run build && madge --image graph.svg dist/src", - "test": "node scripts/test.js", + "test": "yarn run build && jest", "coverage": "istanbul --config=istanbul.json cover scripts/test.js" }, "repository": "https://github.com/jchitel/renlang.git", "author": "Jake Chitel ", "license": "MIT", + "jest": { + "globalSetup": "./test/setup.js", + "transform": { + "^.+\\.tsx?$": "ts-jest" + }, + "testRegex": "/test/.*\\.ts$", + "moduleFileExtensions": [ + "ts" + ] + }, "devDependencies": { - "@types/chai": "^4.0.4", - "@types/chai-subset": "^1.3.1", - "@types/mocha": "^2.2.43", - "@types/node": "^8.0.34", - "@types/sinon": "^2.3.7", - "chai": "^4.0.2", - "chai-subset": "^1.6.0", - "istanbul": "^1.0.0-alpha.2", - "madge": "^2.2.0", - "mocha": "^3.4.2", - "sinon": "^4.1.2", - "tslib": "^1.8.0", - "tslint": "^5.7.0", - "typescript": "^2.6.0" + "@types/jest": "^22.0.1", + "@types/node": "^9.3.0", + "jest": "^22.1.4", + "madge": "^3.0.0", + "ts-jest": "^22.0.1", + "tslib": "^1.9.0", + "tslint": "^5.9.1", + "typescript": "^2.8.3" } } diff --git a/scripts/test.js b/scripts/test.js deleted file mode 100644 index 7f8b55f..0000000 --- a/scripts/test.js +++ /dev/null @@ -1,24 +0,0 @@ -const Mocha = require('mocha'); -const { sync: glob } = require('glob'); -const { resolve } = require('path'); -const chai = require('chai'); - -const { run } = require('./util'); -require('../dist/src/require-hook'); -chai.use(require('chai-subset')); - - -run('build'); - -/** - * Process pulled from mocha/bin/_mocha - */ -const mocha = new Mocha(); -mocha.reporter('spec'); // default reporter -mocha.useColors(true); // we love colors! -mocha.ui('bdd'); // default ui -mocha.enableTimeouts(false); // disable timeouts cuz we be debuggin' -// add each file under generated test directory -glob('dist/test/**/*.js').map(f => resolve(f)).forEach(f => mocha.addFile(f)); -// run, using the default process for exiting that mocha uses -mocha.run(code => process.on('exit', () => process.exit(Math.min(code, 255)))); diff --git a/src/require-hook.js b/src/require-hook.js deleted file mode 100644 index ce0c432..0000000 --- a/src/require-hook.js +++ /dev/null @@ -1,26 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const Module = require("module"); -const req = Module.prototype.require; -/** - * We make use of typescript's "paths" option to allow us to not - * have to specify a bunch of ".." in module import paths. - * Having a simple path such as "~" as the root of the src/ directory - * looks much cleaner. However, typescript doesn't translate these - * imports to the proper paths, so we still need a mechanism to - * translate the imports at runtime. - * - * This file should be imported for any code that will run a module - * in this package. The src/index.ts file already imports it, - * and the test command is configured to do so as well. - */ -Module.prototype.require = function (path, ...args) { - let resolved = path; - if (path.startsWith('~/')) { - resolved = path.replace(/^~/, __dirname); - } - else if (path.startsWith('~test/')) { - resolved = path.replace(/^~test/, `${__dirname}/../test`); - } - return req.call(this, resolved, ...args); -}; diff --git a/src/syntax/INodeVisitor.ts b/src/syntax/INodeVisitor.ts deleted file mode 100644 index 2d1f78c..0000000 --- a/src/syntax/INodeVisitor.ts +++ /dev/null @@ -1,60 +0,0 @@ -import * as ast from '.'; - - -export default interface INodeVisitor { - // declarations - visitProgram(program: ast.Program): T; - visitImportDeclaration(decl: ast.ImportDeclaration): T; - visitTypeDeclaration(decl: ast.TypeDeclaration): T; - visitTypeParam(param: ast.TypeParam): T; - visitFunctionDeclaration(decl: ast.FunctionDeclaration): T; - visitParam(param: ast.Param): T; - visitLambdaParam(param: ast.LambdaParam): T; - visitConstantDeclaration(decl: ast.ConstantDeclaration): T; - visitExportDeclaration(decl: ast.ExportDeclaration): T; - visitExportForwardDeclaration(decl: ast.ExportForwardDeclaration): T; - - // types - visitBuiltInType(type: ast.BuiltInType): T; - visitIdentifierType(type: ast.IdentifierType): T; - visitArrayType(type: ast.ArrayType): T; - visitFunctionType(type: ast.FunctionType): T; - visitParenthesizedType(type: ast.ParenthesizedType): T; - visitSpecificType(type: ast.SpecificType): T; - visitStructType(type: ast.StructType): T; - visitTupleType(type: ast.TupleType): T; - visitUnionType(type: ast.UnionType): T; - visitNamespaceAccessType(type: ast.NamespaceAccessType): T; - - // statements - visitBlock(block: ast.Block): T; - visitExpressionStatement(exp: ast.ExpressionStatement): T; - visitBreakStatement(stmt: ast.BreakStatement): T; - visitContinueStatement(stmt: ast.ContinueStatement): T; - visitDoWhileStatement(stmt: ast.DoWhileStatement): T; - visitForStatement(stmt: ast.ForStatement): T; - visitReturnStatement(stmt: ast.ReturnStatement): T; - visitThrowStatement(stmt: ast.ThrowStatement): T; - visitTryCatchStatement(stmt: ast.TryCatchStatement): T; - visitWhileStatement(stmt: ast.WhileStatement): T; - - // expressions - visitBoolLiteral(lit: ast.BoolLiteral): T; - visitCharLiteral(lit: ast.CharLiteral): T; - visitFloatLiteral(lit: ast.FloatLiteral): T; - visitIntegerLiteral(lit: ast.IntegerLiteral): T; - visitStringLiteral(lit: ast.StringLiteral): T; - visitIdentifierExpression(exp: ast.IdentifierExpression): T; - visitArrayAccess(acc: ast.ArrayAccess): T; - visitArrayLiteral(lit: ast.ArrayLiteral): T; - visitBinaryExpression(exp: ast.BinaryExpression): T; - visitFieldAccess(acc: ast.FieldAccess): T; - visitFunctionApplication(app: ast.FunctionApplication): T; - visitIfElseExpression(exp: ast.IfElseExpression): T; - visitLambdaExpression(exp: ast.BaseLambdaExpression): T; - visitParenthesizedExpression(exp: ast.ParenthesizedExpression): T; - visitStructLiteral(lit: ast.StructLiteral): T; - visitTupleLiteral(lit: ast.TupleLiteral): T; - visitUnaryExpression(exp: ast.UnaryExpression): T; - visitVarDeclaration(decl: ast.VarDeclaration): T; -} diff --git a/src/typecheck/TypeCheckError.ts b/src/typecheck/TypeCheckError.ts deleted file mode 100644 index 7cf8eda..0000000 --- a/src/typecheck/TypeCheckError.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Location } from '~/parser/Tokenizer'; - - -export default class TypeCheckError extends Error { - file: string; - location: Location; - - constructor(message: string, file: string, location: Location) { - super(TypeCheckError.constructMessage(message, file, location)); - this.file = file; - this.location = location; - } - - static constructMessage(message: string, file: string, location: Location) { - return `${message} [${file}:${location.startLine}:${location.startColumn}]`; - } -} diff --git a/src/typecheck/TypeChecker.ts b/src/typecheck/TypeChecker.ts deleted file mode 100644 index 6effb65..0000000 --- a/src/typecheck/TypeChecker.ts +++ /dev/null @@ -1,411 +0,0 @@ -import Module, { ModuleElement } from '~/runtime/Module'; -import TypeCheckError from './TypeCheckError'; -import * as mess from './TypeCheckerMessages'; -import { TType, TUnknown, TRecursive, TNamespace } from './types'; -import { Location } from '~/parser/Tokenizer'; -import { TypeCheckVisitor } from './visitors'; -import { Program, TypeDeclaration, FunctionDeclaration, ConstantDeclaration, - ExportDeclaration, ExportForwardDeclaration, ImportDeclaration } from '~/syntax'; - - -export type SymbolTable = { [symbol: string]: T }; - -/** - * Semantic analysis class - */ -export default class TypeChecker { - // Module table - modules: Module[]; - moduleCache: { [path: string]: number }; - // list of errors gathered during type checking - errors: TypeCheckError[]; - // pointer to the main module - mainModule: Module; - - constructor() { - // array of all modules loaded in the application, where each index is the id of the module - this.modules = []; - // object mapping absolute paths of modules to the corresponding module id - this.moduleCache = {}; - // list of errors to emit in the event of type check errors - this.errors = []; - } - - /** - * Helper function to add a type check error and return a resolved type at the same time. - * Because most errors result in an unknown type, the default is TUnknown. - */ - pushError(message: string, modulePath: string, location: Location, resolvedType: TType = new TUnknown()) { - this.errors.push(new TypeCheckError(message, modulePath, location)); - return resolvedType; - } - - /** - * Perform semantic analysis on the program, starting with the AST and file path of the - * entry point (known as "main") module. - * The outputted value will be a table of all modules in the program, - * with the main module at position 0. - */ - check(mainAst: Program, mainModulePath: string) { - // create a module for the main AST - this.mainModule = new Module(0, mainModulePath, mainAst); - this.modules.push(this.mainModule); - this.moduleCache = { [mainModulePath]: 0 }; - // process all declarations, recursively traversing all modules - this.processDeclarations(this.mainModule); - // analyze types of declarations, type check expressions and statements - this.resolveTypes(); - if (this.errors.length) { - // if there were any errors, throw a combined one - throw new Error(this.errors.map(e => e.message).join('\n')); - } - // the program is now type checked and all modules are loaded. Return them. - return this.modules; - } - - /** - * Process all name declarations in a module. - * This will organize all imports and exports of a module. - * It will also organize all available names in the module into one of three categories: - * types, functions, or constants. - * When this is done, all modules in the environment will be loaded, - * and all available names in each module will be available for access. - */ - processDeclarations(module: Module) { - for (const imp of module.ast.imports) this.processImport(module, imp); - for (const decl of module.ast.declarations) { - if (decl instanceof TypeDeclaration) this.processType(module, decl); - else if (decl instanceof FunctionDeclaration) this.processFunction(module, decl); - else if (decl instanceof ConstantDeclaration) this.processConstant(module, decl); - else if (decl instanceof ExportDeclaration) this.processExport(module, decl); - else if (decl instanceof ExportForwardDeclaration) this.processForward(module, decl); - } - } - - /** - * Traverse each module, resolving the type of every declaration - * and performing type checking for all statements and expressions. - * Ignore imported declarations because they don't exist in the module. - * Those will be resolved as they are used. - */ - resolveTypes() { - for (const module of this.modules) { - // types, functions, and constants need to be resolved - const toResolve = [...Object.values(module.types), ...Object.values(module.functions), ...Object.values(module.constants)].filter(t => !t.imported); - for (const decl of toResolve) { - this.resolveType(module, decl); - } - } - } - - // ////////////////////////// - // PROCESSING DECLARATIONS // - // ////////////////////////// - - private resolveModule(sourceModule: Module, moduleName: string): Optional { - // resolve the module's path - const importPath = sourceModule.resolvePath(moduleName); - if (!importPath) return null; - // load the module. if it has been loaded already, get it from the cache - let imported; - if (!this.moduleCache[importPath]) { - imported = new Module(this.modules.length, importPath); - this.modules.push(imported); - this.moduleCache[importPath] = imported.id; - // this is a new module, so we need to process it before we can proceed - this.processDeclarations(imported); - } else { - imported = this.modules[this.moduleCache[importPath]]; - } - return imported; - } - - /** - * Process an import of a module. - * This will load the imported module (if not already loaded), - * determine the validity of each imported name, - * and organize each imported value into the modules symbol tables. - */ - processImport(module: Module, imp: ImportDeclaration) { - // resolve the module - const imported = this.resolveModule(module, imp.moduleName); - if (!imported) { - // invalid module path specified - this.errors.push(new TypeCheckError(mess.MODULE_PATH_NOT_EXIST(imp.moduleName), module.path, imp.locations.moduleName)); - return; - } - // process the imports - for (const { importName, importLocation, aliasName, aliasLocation } of imp.imports) { - // verify that the module exports the name, only if it isn't a wildcard import - if (importName !== '*' && !imported.exports[importName]) { - this.pushError(mess.MODULE_DOESNT_EXPORT_NAME(imp.moduleName, importName), module.path, importLocation); - continue; - } - // verify that the alias doesn't already exist; imports always come first, so we only need to check imports - if (module.imports[aliasName]) { - this.pushError(mess.NAME_CLASH(aliasName), module.path, aliasLocation); - continue; - } - // valid import, create an import entry linking the import to the export - module.imports[aliasName] = { - moduleId: imported.id, - exportName: importName, - kind: importName !== '*' ? imported.exports[importName].kind : 'namespace', - ast: imp - }; - // add the value to the appropriate table with a flag indicating that it exists in another module - switch (module.imports[aliasName].kind) { - case 'type': module.types[aliasName] = { imported: true } as ModuleElement; break; - case 'func': module.functions[aliasName] = { imported: true } as ModuleElement; break; - case 'const': module.constants[aliasName] = { imported: true } as ModuleElement; break; - case 'namespace': module.namespaces[aliasName] = imported.id; break; - } - } - } - - /** - * Process a type declared in a module. - * This will determine the validity of the type name - * and organize it into the type symbol table in the module. - */ - processType(module: Module, typ: TypeDeclaration) { - const name = typ.name; - // handle name clashes - if (module.types[name]) { - this.errors.push(new TypeCheckError(mess.NAME_CLASH(name), module.path, typ.locations.name)); - return; - } - if (module.functions[name]) this.addNameClash(name, module.path, module.functions[name].ast.locations.name, typ.locations.name); - else if (module.constants[name]) this.addNameClash(name, module.path, module.constants[name].ast.locations.name, typ.locations.name); - module.types[name] = { ast: typ } as ModuleElement; - } - - /** - * Process a function declared in a module. - * This will determine the validity of the function name - * and organize it into the function symbol table in the module. - */ - processFunction(module: Module, func: FunctionDeclaration) { - const name = func.name; - // handle name clashes - if (module.functions[name]) { - this.errors.push(new TypeCheckError(mess.NAME_CLASH(name), module.path, func.locations.name)); - return; - } - if (module.types[name]) this.addNameClash(name, module.path, module.types[name].ast.locations.name, func.locations.name); - else if (module.constants[name]) this.addNameClash(name, module.path, module.constants[name].ast.locations.name, func.locations.name); - module.functions[name] = { ast: func } as ModuleElement; - } - - /** - * Process a constant declared in a module. - * This will determine the validity of the constant name - * and organize it into the constant symbol table in the module. - */ - processConstant(module: Module, con: ConstantDeclaration) { - const name = con.name; - // handle name clashed - if (module.constants[name]) { - this.errors.push(new TypeCheckError(mess.NAME_CLASH(name), module.path, con.locations.name)); - return; - } - if (module.types[name]) this.addNameClash(name, module.path, module.types[name].ast.locations.name, con.locations.name); - else if (module.functions[name]) this.addNameClash(name, module.path, module.functions[name].ast.locations.name, con.locations.name); - module.constants[name] = { ast: con } as ModuleElement; - } - - /** - * Process an export declared in a module. - * This will determine the validity of the export name and the exported value's name, - * and organize the exported value into the module's symbol tables. - */ - processExport(module: Module, exp: ExportDeclaration) { - for (const { exportName, exportNameLocation, valueName, valueNameLocation, value } of exp.exports) { - // exports are in their own scope, so we only need to check against export names - if (module.exports[exportName]) { - this.pushError(mess.EXPORT_CLASH(exportName), module.path, exportNameLocation); - continue; - } - // determine the kind and match it with a value - if (value) { - // inline export, the value will need to be added to the module, special name used for default export - if (value instanceof TypeDeclaration) { - module.exports[exportName] = { kind: 'type', valueName }; - this.processType(module, value); - } else if (value instanceof FunctionDeclaration) { - module.exports[exportName] = { kind: 'func', valueName }; - this.processFunction(module, value); - } else if (value instanceof ConstantDeclaration) { - module.exports[exportName] = { kind: 'const', valueName }; - this.processConstant(module, value); - } - } else if (valueName) { // this will always be true if there is no inline value - // export of existing value, get the kind from that value - if (module.imports[valueName]) { - // re-export of an import, get the kind from the import - module.exports[exportName] = { kind: module.imports[valueName].kind, valueName: valueName }; - } else if (module.types[valueName]) { - module.exports[exportName] = { kind: 'type', valueName }; - } else if (module.functions[valueName]) { - module.exports[exportName] = { kind: 'func', valueName }; - } else if (module.constants[valueName]) { - module.exports[exportName] = { kind: 'const', valueName }; - } else { - // exporting a non-declared value - this.pushError(mess.VALUE_NOT_DEFINED(valueName), module.path, valueNameLocation!); - } - } - } - } - - /** - * Process an export forward declared in a module. - * This will determine the validity of the export name nad the import name, - * and organize the import and export into the module's symbol tables. - */ - processForward(module: Module, fwd: ExportForwardDeclaration) { - // resolve the module - const imported = this.resolveModule(module, fwd.moduleName); - if (!imported) { - // invalid module path specified - this.errors.push(new TypeCheckError(mess.MODULE_PATH_NOT_EXIST(fwd.moduleName), module.path, fwd.locations.moduleName)); - return; - } - // process the forwards, both as an import and as an export - for (const { importName, importLocation, exportName, exportLocation } of fwd.forwards) { - // verify that the module exports the name, only if it is not a wildcard import - if (importName !== '*' && !imported.exports[importName]) { - this.pushError(mess.MODULE_DOESNT_EXPORT_NAME(fwd.moduleName, importName), module.path, importLocation); - continue; - } - // verify that the export isn't already declared, only if it is not a wildcard export - if (exportName !== '*' && module.exports[exportName]) { - this.pushError(mess.EXPORT_CLASH(exportName), module.path, exportLocation); - continue; - } - if (exportName !== '*') { - // valid forward, create an import and export entry, use the module name and the import name as a dummy ID - const dummyImport = `"${fwd.moduleName}"_${importName}`; - module.imports[dummyImport] = { - moduleId: imported.id, - exportName: importName, - kind: importName !== '*' ? imported.exports[importName].kind : 'namespace', - ast: fwd - }; - module.exports[exportName] = { kind: module.imports[dummyImport].kind, valueName: dummyImport }; - } else { - // wildcard export, this forwards ALL exports of the forwarded module - for (const imp of Object.keys(imported.exports)) { - // verify that the forward isn't already exported - if (module.exports[imp]) { - this.pushError(mess.EXPORT_CLASH(imp), module.path, exportLocation); - continue; - } - // valid, setup forward entries - const dummyImport = `"${fwd.moduleName}"_${imp}`; - module.imports[dummyImport] = { moduleId: imported.id, exportName: imp, kind: imported.exports[imp].kind, ast: fwd }; - module.exports[imp] = { kind: module.imports[dummyImport].kind, valueName: dummyImport }; - } - } - } - } - - private addNameClash(name: string, path: string, loc1: Location, loc2: Location) { - // set the error on whichever comes last - if (loc1.startLine < loc2.startLine || (loc1.startLine === loc2.startLine && loc1.startColumn < loc2.startColumn)) { - this.errors.push(new TypeCheckError(mess.NAME_CLASH(name), path, loc2)); - } else { - this.errors.push(new TypeCheckError(mess.NAME_CLASH(name), path, loc1)); - } - } - - // ////////////////// - // RESOLVING TYPES // - // ////////////////// - - /** - * Type check a declaration. - * Do nothing if is has already been checked. - * If it is already resolving, we have a circular dependency that can't be resolved, which is an error. - * Otherwise, it hasn't been resolved yet, and we visit the top level of the declaration's AST. - * If a type resolution reaches a name, it will resolve that name in place, calling either getType() or getValueType() below. - * To prevent double resolution, we track which ones have already been resolved. - */ - resolveType(module: Module, decl: ModuleElement) { - if (decl.ast.type) return decl.ast.type; // resolved already - if (decl.resolving) { - // type recursion is handled in getType(), so this will only happen for recursively defined constants - this.errors.push(new TypeCheckError(mess.CIRCULAR_DEPENDENCY, module.path, decl.ast.locations.self)); - // set the type to Unknown so that this error only occurs once - decl.ast.type = new TUnknown(); - return decl.ast.type; - } - if (decl.ast instanceof FunctionDeclaration) { - // function declarations can be recursive, and they always contain their type right in their declaration - decl.ast.visit(new TypeCheckVisitor(this, module)); - } else { - // Set a flag on each declaration as we resolve it so that we can track circular dependencies - decl.resolving = true; - decl.ast.visit(new TypeCheckVisitor(this, module)); - decl.resolving = false; - } - return decl.ast.type; - } - - /** - * Given a module and the name of a type, get the Type instance of the type. - * The type may exist in another module, so this method resolves imports and exports - * to track down the actual declaration. - * The type is also resolved here if it hasn't been already. - */ - getType(module: Module, name: string): Optional { - if (module.namespaces.hasOwnProperty(name)) { - // namespaces can be present in types - return new TNamespace(module.namespaces[name]); - } - const type = module.types[name]; - if (!type) return null; - if (type.imported) { - // type is imported, resolve the import to the corresponding export in the imported module - const imp = module.imports[name]; - const importedModule = this.modules[imp.moduleId]; - const exp = importedModule.exports[imp.exportName]; - // get the type from that module, recursively so that we can handle forwarded imports - return this.getType(importedModule, exp.valueName); - } else { - // the type was declared in this module, return it if it has already been type checked - if (type.ast.type) return type.ast.type; - // if the type is resolving, we have a recursive type, return the recursive reference because we don't have an actual type yet - if (type.resolving) return new TRecursive(type.ast); - // otherwise resolve it and return the resolved type - return this.resolveType(module, type); - } - } - - /** - * Given a module and the name of some value (either a function or a constant), get the Type instance of the value. - * The value may exist in another module, so this method resolves imports and exports - * to track down the actual declaration. - * The type is also resolves here if it hasn't been already. - */ - getValueType(module: Module, name: string): Optional { - if (module.namespaces.hasOwnProperty(name)) { - return new TNamespace(module.namespaces[name]); - } - const value = module.functions[name] || module.constants[name]; - if (!value) return null; - if (value.imported) { - // value is imported, resolve the import to the corresponding export in the imported module - const imp = module.imports[name]; - const importedModule = this.modules[imp.moduleId]; - const exp = importedModule.exports[imp.exportName]; - // get the value from that module, recursively so that we can handle forwarded imports - return this.getValueType(importedModule, exp.valueName); - } else { - // the value was declared in this module, return it if it has already been type checked - if (value.ast.type) return value.ast.type; - // otherwise resolve it and return the resolved type - return this.resolveType(module, value); - } - } -} diff --git a/src/typecheck/TypeCheckerMessages.ts b/src/typecheck/TypeCheckerMessages.ts deleted file mode 100644 index 084de2e..0000000 --- a/src/typecheck/TypeCheckerMessages.ts +++ /dev/null @@ -1,26 +0,0 @@ -import { TType } from './types'; - - -export const MODULE_PATH_NOT_EXIST = (path: string) => `Module "${path}" does not exist`; -export const MODULE_DOESNT_EXPORT_NAME = (path: string, name: string) => `Module "${path}" does not have an export with name "${name}"`; -export const NAME_CLASH = (name: string) => `A value with name "${name}" is already declared`; -export const EXPORT_CLASH = (name: string) => `An export with name "${name}" is already declared`; -export const VALUE_NOT_DEFINED = (name: string) => `Value "${name}" is not defined`; -export const TYPE_NOT_DEFINED = (name: string) => `Type "${name}" is not defined`; -export const CIRCULAR_DEPENDENCY = 'Cannot resolve type, circular dependency found'; -export const TYPE_MISMATCH = (actual: TType, expected: string) => `Type "${actual}" is not assignable to type "${expected}"`; -export const INVALID_BREAK_STATEMENT = '"break" statement cannot be present outside loop'; -export const INVALID_LOOP_NUM = (actual: number, expected: number) => `Invalid loop number ${actual} in loop with depth ${expected}`; -export const INVALID_CONTINUE_STATEMENT = '"continue" statement cannot be present outside loop'; -export const INVALID_UNARY_OPERATOR = (oper: string, type: TType) => `Operator "${oper}" does not operate on type "${type}"`; -export const INVALID_BINARY_OPERATOR = (oper: string, left: TType, right: TType) => `Operator "${oper}" does not operate on types "${left}" and "${right}"`; -export const NOT_INVOKABLE = 'Cannot invoke a value that is not a function'; -export const NOT_STRUCT = 'Cannot access field of a value that is not a struct or a namespace'; -export const NOT_ARRAY = 'Cannot access index of a value that is not an array'; -export const CONFLICTING_ASSOCIATIVITY = (oper1: string, oper2: string) => `Precedence order between operators "${oper1}" and "${oper2}" could not be established because they have conflicting associativity`; -export const INVALID_ARG_COUNT = (expected: number, actual: number) => `Invalid argument count: expected ${expected}, actual ${actual}`; -export const NOT_GENERIC = 'Type is not generic'; -export const NOT_GENERIC_FUNCTION = 'Function is not generic'; -export const INVALID_TYPE_ARG = (type: TType, name: string, constraint: TType) => `Type "${type}" is not assignable to type parameter "${name}" with constraint "${constraint}"`; -export const INVALID_TYPE_ARG_COUNT = (expected: number, actual: number) => `Invalid type argument count: expected ${expected}, actual ${actual}`; -export const NOT_NAMESPACE = (name: string) => `Value "${name}" is not a namespace`; diff --git a/src/typecheck/types/OrderedMap.ts b/src/typecheck/types/OrderedMap.ts deleted file mode 100644 index 745064f..0000000 --- a/src/typecheck/types/OrderedMap.ts +++ /dev/null @@ -1,44 +0,0 @@ -export default class OrderedMap { - fieldOrder: string[]; - private _values: { [key: string]: V }; - - constructor() { - this.fieldOrder = []; - this._values = {}; - } - - add(key: string, value: V) { - this.fieldOrder.push(key); - this._values[key] = value; - } - - getKey(i: number) { - return this.fieldOrder[i]; - } - - get(key: string) { - return this._values[key]; - } - - getValue(i: number) { - return this.get(this.getKey(i)); - } - - get length() { - return this.fieldOrder.length; - } - - keys() { - return this.fieldOrder; - } - - values() { - return [...this]; - } - - *[Symbol.iterator]() { - for (const key of this.fieldOrder) { - yield this._values[key]; - } - } -} diff --git a/src/typecheck/types/TArray.ts b/src/typecheck/types/TArray.ts deleted file mode 100644 index 1161cbf..0000000 --- a/src/typecheck/types/TArray.ts +++ /dev/null @@ -1,23 +0,0 @@ -import TType from './TType'; -import ITypeVisitor from '~/typecheck/visitors'; - - -/** - * Array type, variable sized list of homogeneous values (only one type). - */ -export default class TArray extends TType { - baseType: TType; - - constructor(baseType: TType) { - super(); - this.baseType = baseType; - } - - visit(visitor: ITypeVisitor) { - return visitor.visitArray(this); - } - - toString() { - return this.baseType ? `${this.baseType}[]` : '?[]'; - } -} \ No newline at end of file diff --git a/src/typecheck/types/TBool.ts b/src/typecheck/types/TBool.ts deleted file mode 100644 index b0c16c8..0000000 --- a/src/typecheck/types/TBool.ts +++ /dev/null @@ -1,17 +0,0 @@ -import TType from './TType'; -import ITypeVisitor from '~/typecheck/visitors'; - - -/** - * Boolean type, contains two values: true and false. - * Has a wide array of uses. - */ -export default class TBool extends TType { - visit(visitor: ITypeVisitor) { - return visitor.visitBool(this); - } - - toString() { - return 'bool'; - } -} \ No newline at end of file diff --git a/src/typecheck/types/TChar.ts b/src/typecheck/types/TChar.ts deleted file mode 100644 index 71bc2c7..0000000 --- a/src/typecheck/types/TChar.ts +++ /dev/null @@ -1,17 +0,0 @@ -import TType from './TType'; -import ITypeVisitor from '~/typecheck/visitors'; - - -/** - * Unicode character type, represents the set of unicode characters. - * There is only one possible character type. - */ -export default class TChar extends TType { - visit(visitor: ITypeVisitor) { - return visitor.visitChar(this); - } - - toString() { - return 'char'; - } -} \ No newline at end of file diff --git a/src/typecheck/types/TGeneric.ts b/src/typecheck/types/TGeneric.ts deleted file mode 100644 index 1f03d3d..0000000 --- a/src/typecheck/types/TGeneric.ts +++ /dev/null @@ -1,42 +0,0 @@ -import TType from './TType'; -import TParam from './TParam'; -import { SymbolTable } from '~/typecheck/TypeCheckContext'; -import OrderedMap from './OrderedMap'; -import ITypeVisitor from '~/typecheck/visitors'; - - -/** - * Represents a type with type params. - * 'typeParams' is an object mapping the type parameter names to TParam types. - * 'type' is the definition of the type, which makes use of the type parameters. - */ -export default class TGeneric extends TType { - typeParams: OrderedMap; - type: TType; - - constructor(typeParams: OrderedMap, type: TType) { - super(); - this.typeParams = typeParams; - this.type = type; - } - - visit(visitor: ITypeVisitor) { - return visitor.visitGeneric(this); - } - - /** - * Here, we need to clone the type definition and visit it, specifying - * all instances of TParam. This is where we check the type constraint. - */ - specifyGenericType(args: TType[]) { - const specific = this.type.clone(); - // create map of param name -> provided arg - const argMap: SymbolTable = {}; - for (let i = 0; i < args.length; ++i) { - const name = this.typeParams.getKey(i); - argMap[name] = this.typeParams.get(name).createTypeArg(args[i]); - } - // visit the type with the map so that params can be replaced with actual types - return specific.specifyTypeParams(argMap); - } -} \ No newline at end of file diff --git a/src/typecheck/types/TInferred.ts b/src/typecheck/types/TInferred.ts deleted file mode 100644 index cb28f89..0000000 --- a/src/typecheck/types/TInferred.ts +++ /dev/null @@ -1,9 +0,0 @@ -import TType from './TType'; -import ITypeVisitor from '~/typecheck/visitors'; - - -export default class TInferred extends TType { - visit(visitor: ITypeVisitor): T { - return visitor.visitInferred(this); - } -} \ No newline at end of file diff --git a/src/typecheck/types/TParam.ts b/src/typecheck/types/TParam.ts deleted file mode 100644 index 535490f..0000000 --- a/src/typecheck/types/TParam.ts +++ /dev/null @@ -1,45 +0,0 @@ -import TType from './TType'; -import ITypeVisitor from '~/typecheck/visitors'; - - -export type Variance = 'covariant' | 'contravariant' | 'invariant'; - -/** - * Represents the type of an untyped type parameter, used in TGeneric and wherever - * a type parameters is used. - */ -export default class TParam extends TType { - name: string; - variance: Variance; - constraint: TType; - - constructor(name: string, variance: Variance, constraint: TType) { - super(); - this.name = name; - this.variance = variance; - this.constraint = constraint; - } - - visit(visitor: ITypeVisitor) { - return visitor.visitParam(this); - } - - createTypeArg(t: TType) { - return new TArg(this, t); - } -} - -export class TArg extends TType { - variance: Variance; - type: TType; - - constructor(param: TParam, type: TType) { - super(); - this.variance = param.variance; - this.type = type; - } - - visit(visitor: ITypeVisitor) { - return visitor.visitArg(this); - } -} diff --git a/src/typecheck/visitors/ITypeVisitor.ts b/src/typecheck/visitors/ITypeVisitor.ts deleted file mode 100644 index aaebe96..0000000 --- a/src/typecheck/visitors/ITypeVisitor.ts +++ /dev/null @@ -1,35 +0,0 @@ -import { - TInteger, TFloat, TChar, TBool, TArray, TStruct, TTuple, TFunction, - TGeneric, TParam, TArg, TUnion, TAny, TNever, TRecursive, TInferred, - TNamespace -} from '~/typecheck/types'; - - -export default interface ITypeVisitor { - // primitive types - visitInteger(type: TInteger): T; - visitFloat(type: TFloat): T; - visitChar(type: TChar): T; - visitBool(type: TBool): T; - - // structured types - visitArray(type: TArray): T; - visitStruct(type: TStruct): T; - visitTuple(type: TTuple): T; - - // complex types - visitFunction(type: TFunction): T; - visitGeneric(type: TGeneric): T; - visitParam(type: TParam): T; - visitArg(type: TArg): T; - visitUnion(type: TUnion): T; - - // special types - visitAny(type: TAny): T; - visitNever(type: TNever): T; - - // hidden types - visitRecursive(type: TRecursive): T; - visitInferred(type: TInferred): T; - visitNamespace(type: TNamespace): T; -} diff --git a/src/typecheck/visitors/SpecifyTypeVisitor.ts b/src/typecheck/visitors/SpecifyTypeVisitor.ts deleted file mode 100644 index 95f69cf..0000000 --- a/src/typecheck/visitors/SpecifyTypeVisitor.ts +++ /dev/null @@ -1,94 +0,0 @@ -import ITypeVisitor from './ITypeVisitor'; -import { - TType, TInteger, TFloat, TChar, TBool, TArray, TStruct, TTuple, TFunction, - TGeneric, TParam, TArg, TUnion, TAny, TNever, TRecursive, TInferred -} from '~/typecheck/types'; -import { SymbolTable } from '~/typecheck/TypeCheckContext'; - - -/** - * This visitor specifies generic types, which must happen - * every time a generic type is used in order to resolve the - * usage to a specific type. - * - * The main goal of this process is to replace usages of type - * parameters in a generic type with the provided type arguments - * corresponding to those type parameters. - * See "visitParam()" for that logic. - * All other logic is simply to clone the current type and visit - * all component types within those types, if any exist. - */ -export default class SpecifyTypeVisitor implements ITypeVisitor { - args: SymbolTable; - - constructor(args: SymbolTable) { - this.args = args; - } - - // primitive types have no component types, so we leave them alone - visitInteger(type: TInteger): TType { return type.clone(); } - visitFloat(type: TFloat): TType { return type.clone(); } - visitChar(type: TChar): TType { return type.clone(); } - visitBool(type: TBool): TType { return type.clone(); } - - visitArray(type: TArray): TType { - const specific = type.clone(); - specific.baseType = specific.baseType.visit(this); - return specific; - } - - visitStruct(type: TStruct): TType { - const specific = type.clone(); - specific.fields = {}; - for (const k of Object.keys(type.fields)) { - specific.fields[k] = type.fields[k].visit(this); - } - return specific; - } - - visitTuple(type: TTuple): TType { - const specific = type.clone(); - specific.types = specific.types.map(t => t.visit(this)); - return specific; - } - - visitFunction(type: TFunction): TType { - const specific = type.clone(); - specific.paramTypes = specific.paramTypes.map(t => t.visit(this)); - specific.returnType = specific.returnType.visit(this); - return specific; - } - - visitGeneric(_type: TGeneric): TType { - // this should never be called on a generic type - throw new Error("Method not implemented."); - } - - visitNamespace(): TType { - // this should never be called on a namespace - throw new Error("Method not implemented."); - } - - /** - * This is the "leaf" operation of this visitor. - * Once we reach a type parameter, we can use the provided args table - * to get the corresponding type provided for the parameter. - */ - visitParam(type: TParam): TType { - return this.args[type.name]; - } - - // already been specified, just return it - visitArg(type: TArg): TType { return type.clone(); } - - visitUnion(type: TUnion): TType { - const specific = type.clone(); - specific.types = specific.types.map(t => t.visit(this)); - return specific; - } - - visitAny(type: TAny): TType { return type.clone(); } - visitNever(type: TNever): TType { return type.clone(); } - visitRecursive(type: TRecursive): TType { return type.clone(); } - visitInferred(type: TInferred): TType { return type.clone(); } -} diff --git a/src/typecheck/visitors/index.ts b/src/typecheck/visitors/index.ts deleted file mode 100644 index 0de4039..0000000 --- a/src/typecheck/visitors/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import ITypeVisitor from './ITypeVisitor'; -export default ITypeVisitor; -export * from './IsVisitors'; -export { default as AssignmentVisitor } from './AssignmentVisitor'; -export { default as SpecifyTypeVisitor } from './SpecifyTypeVisitor'; -export { default as InferTypeArgsVisitor } from './InferTypeArgsVisitor'; -export { default as TypeCheckVisitor } from './TypeCheckVisitor'; diff --git a/src/utils/OrderedMap.ts b/src/utils/OrderedMap.ts deleted file mode 100644 index 9bfdb1c..0000000 --- a/src/utils/OrderedMap.ts +++ /dev/null @@ -1,59 +0,0 @@ -export default class OrderedMap { - fieldOrder: string[]; - private _values: { [key: string]: V }; - - constructor() { - this.fieldOrder = []; - this._values = {}; - } - - add(key: string, value: V) { - this.fieldOrder.push(key); - this._values[key] = value; - } - - getKey(i: number) { - return this.fieldOrder[i]; - } - - get(key: string) { - return this._values[key]; - } - - getValue(i: number) { - return this.get(this.getKey(i)); - } - - get length() { - return this.fieldOrder.length; - } - - keys() { - return this.fieldOrder; - } - - values() { - return [...this]; - } - - *[Symbol.iterator]() { - for (const key of this.fieldOrder) { - yield this._values[key]; - } - } - - some(predicate: (item: V) => bool) { - for (const i of this) { - if (predicate(i)) return true; - } - return false; - } - - map(mapper: (item: V, key?: string) => T) { - const map = new OrderedMap(); - for (const key of this.fieldOrder) { - map.add(key, mapper(this._values[key], key)); - } - return map; - } -} diff --git a/src/interpreter/Interpreter.ts b/src_old/interpreter/Interpreter.ts similarity index 100% rename from src/interpreter/Interpreter.ts rename to src_old/interpreter/Interpreter.ts diff --git a/src/interpreter/frames.ts b/src_old/interpreter/frames.ts similarity index 100% rename from src/interpreter/frames.ts rename to src_old/interpreter/frames.ts diff --git a/src/interpreter/index.ts b/src_old/interpreter/index.ts similarity index 100% rename from src/interpreter/index.ts rename to src_old/interpreter/index.ts diff --git a/src/parser/ParserError.ts b/src_old/parser/ParserError.ts similarity index 100% rename from src/parser/ParserError.ts rename to src_old/parser/ParserError.ts diff --git a/src/parser/ParserMessages.ts b/src_old/parser/ParserMessages.ts similarity index 100% rename from src/parser/ParserMessages.ts rename to src_old/parser/ParserMessages.ts diff --git a/src/runtime/instructions.ts b/src_old/runtime/instructions.ts similarity index 100% rename from src/runtime/instructions.ts rename to src_old/runtime/instructions.ts diff --git a/src/runtime/operators.ts b/src_old/runtime/operators.ts similarity index 100% rename from src/runtime/operators.ts rename to src_old/runtime/operators.ts diff --git a/src/runtime/types/RArray.ts b/src_old/runtime/types/RArray.ts similarity index 100% rename from src/runtime/types/RArray.ts rename to src_old/runtime/types/RArray.ts diff --git a/src/runtime/types/RBool.ts b/src_old/runtime/types/RBool.ts similarity index 100% rename from src/runtime/types/RBool.ts rename to src_old/runtime/types/RBool.ts diff --git a/src/runtime/types/RChar.ts b/src_old/runtime/types/RChar.ts similarity index 100% rename from src/runtime/types/RChar.ts rename to src_old/runtime/types/RChar.ts diff --git a/src/runtime/types/RFloat.ts b/src_old/runtime/types/RFloat.ts similarity index 100% rename from src/runtime/types/RFloat.ts rename to src_old/runtime/types/RFloat.ts diff --git a/src/runtime/types/RFunction.ts b/src_old/runtime/types/RFunction.ts similarity index 100% rename from src/runtime/types/RFunction.ts rename to src_old/runtime/types/RFunction.ts diff --git a/src/runtime/types/RInteger.ts b/src_old/runtime/types/RInteger.ts similarity index 100% rename from src/runtime/types/RInteger.ts rename to src_old/runtime/types/RInteger.ts diff --git a/src/runtime/types/RString.ts b/src_old/runtime/types/RString.ts similarity index 100% rename from src/runtime/types/RString.ts rename to src_old/runtime/types/RString.ts diff --git a/src/runtime/types/RStruct.ts b/src_old/runtime/types/RStruct.ts similarity index 100% rename from src/runtime/types/RStruct.ts rename to src_old/runtime/types/RStruct.ts diff --git a/src/runtime/types/RTuple.ts b/src_old/runtime/types/RTuple.ts similarity index 100% rename from src/runtime/types/RTuple.ts rename to src_old/runtime/types/RTuple.ts diff --git a/src/runtime/types/RValue.ts b/src_old/runtime/types/RValue.ts similarity index 100% rename from src/runtime/types/RValue.ts rename to src_old/runtime/types/RValue.ts diff --git a/src/runtime/types/index.ts b/src_old/runtime/types/index.ts similarity index 100% rename from src/runtime/types/index.ts rename to src_old/runtime/types/index.ts diff --git a/src_old/syntax/visitors/DeclarationNameVisitor.ts b/src_old/syntax/visitors/DeclarationNameVisitor.ts new file mode 100644 index 0000000..99b2669 --- /dev/null +++ b/src_old/syntax/visitors/DeclarationNameVisitor.ts @@ -0,0 +1,260 @@ +import * as ast from '~/syntax'; +import IDeclarationVisitor from './interfaces/IDeclarationVisitor'; +import TypeChecker from '~/typecheck/TypeChecker'; +import { Token } from '~/parser/Tokenizer'; +import TypeErrorContext from '~/typecheck/TypeErrorContext'; +import resolveModule from '~/typecheck/resolveModule'; +import parse from '~/parser'; + + +/** + * Visits all declarations in a module, registering each declaration by name. + * This has two main parts: + * - Registering imports and exports (and loading all imported modules for the whole app) + * - Registering module-scope definitions by name, checking for name clashes with other declarations + */ +export default class DeclarationNameVisitor implements IDeclarationVisitor { + private error: TypeErrorContext; + + constructor(private typeChecker: TypeChecker) { + this.error = new TypeErrorContext(typeChecker.errors); + } + /** + * Asserts that the module at the specified path has been loaded, + * parsed, and all declarations inside it have been registered. + */ + private loadModule(path: string) { + if (!this.typeChecker.names[path]) { + const module = parse(path); + // this will make sure that circular dependencies work TODO maybe? + this.typeChecker.names[path] = {}; + this.typeChecker.exports[path] = {}; + module.visit(this); + } + } + + /** + * Gives a declaration a unique id and registers it under the specified name + */ + private addName(module: string, name: Token, decl: ast.Declaration) { + const id = this.typeChecker.declarations.length; + this.typeChecker.declarations.push(decl); + const names = this.typeChecker.names[module]; + if (!names[name.image]) names[name.image] = []; + names[name.image].push({ id, location: name.location }); + } + + /** + * Registers an existing declaration id under a specified name + */ + private addExport(module: string, name: Token, id: number) { + const exports = this.typeChecker.exports[module]; + if (!exports[name.image]) exports[name.image] = []; + exports[name.image].push({ id, location: name.location }); + } + + /** + * Gives a declaration a unique it and registers it under a specified + * declaration name and export name. + */ + private addNameAndExport(module: string, exportName: Token, valueName: Token, decl: ast.Declaration) { + const id = this.typeChecker.declarations.length; + this.addName(module, valueName, decl); + this.addExport(module, exportName, id); + } + + /** + * Gets the list of declarations of a given name in a given module + */ + private getDeclarations(module: string, name: string) { + if (!(name in this.typeChecker.names[module])) return []; + return this.typeChecker.names[module][name].map(({ id, location }) => ({ + name: Token.fromLocation(location, name), + declaration: this.typeChecker.declarations[id] + })); + } + + /** + * Gets the list of exports of a given name in a given module + */ + private getExports(module: string, name: string) { + return this.typeChecker.exports[module][name].map(({ id, location }) => ({ + name: Token.fromLocation(location, name), + declaration: this.typeChecker.declarations[id] + })); + } + + /** + * Adds the declaration ids of the specified export name under the specified import name + */ + private link(module: string, name: Token, importedModule: string, importedName: string) { + const names = this.typeChecker.names[module], exports = this.typeChecker.exports[importedModule]; + if (!names[name.image]) names[name.image] = []; + const decls = exports[importedName].map(({ id }) => ({ id, location: name.location })); + names[name.image].push(...decls); + } + + private getExport(path: string, name: string) { + return this.typeChecker.exports[path][name]; + } + + /** + * Once all declarations and exports have been added for a module, + * go through them all to verify that there are no name clashes. + * Constants and namespaces can have no clashes, only one declaration is allowed per name. + * All other declarations can be overloaded, so more than one is allowed, but names + * all have to be of the same type. + */ + private checkNameClashes(module: string) { + // constants and namespaces can have no clashes + const noClashes: Function[] = [ast.ConstantDeclaration, ast.NamespaceDeclaration]; + for (const name of Object.keys(this.typeChecker.names[module])) { + const list = this.getDeclarations(module, name); + if (list.some(d => noClashes.includes(d.declaration.constructor)) && list.length > 1 + || list.some(d => list.some(d1 => d.declaration.constructor !== d1.declaration.constructor))) { + for (const { name: token } of list) this.error.declNameClash(token); + } + } + for (const name of Object.keys(this.typeChecker.exports[module])) { + const list = this.getExports(module, name); + if (list.some(d => noClashes.includes(d.declaration.constructor)) && list.length > 1 + || list.some(d => list.some(d1 => d.declaration.constructor !== d1.declaration.constructor))) { + for (const { name: token } of list) this.error.exportClash(token); + } + } + } + + visitModule(module: ast.Module): void { + // process imports first to enumerate all modules + for (const imp of module.imports) imp.visit(this); + // process module-scoped declarations + for (const decl of module.declarations.filter(d => !(d instanceof ast.ExportDeclaration))) decl.visit(this); + // process exports last so all overloads are available + for (const exp of module.declarations.filter(d => d instanceof ast.ExportDeclaration)) exp.visit(this); + // add name clash messages + this.checkNameClashes(module.location.path); + } + + /** + * An import declaration exposes an export of another module as a local name in the module + * containing the declaration. To process it, we need to resolve the imported module path, + * make sure that the requested export name exists, make sure that the requested alias name + * does not clash with any already declared names, and then add the name to the module, + * linking it to the exported declaration in the other module. + */ + visitImportDeclaration(decl: ast.ImportDeclaration): void { + const currentModule = decl.location.path; + // resolve the module + const importedModule = resolveModule(currentModule, decl.moduleName.value); + // invalid module path specified + if (!importedModule) return this.error.noModule(decl.moduleName); + // make sure the module has been loaded + this.loadModule(importedModule); + // process the imports + for (const { importName, aliasName } of decl.imports) { + // if wildcard, process it as a namespace, not an import + if (importName.image === '*') { + const namespace = new ast.NamespaceDeclaration(importedModule, aliasName, decl.location); + namespace.visit(this); + continue; + } + // regular import, verify that the module exports the name + if (!this.getExport(importedModule, importName.image)) { + this.error.noModuleExport(decl.moduleName.value, importName); + continue; + } + // register the alias name to the module using the imported export + this.link(currentModule, aliasName, importedModule, importName.image); + } + } + + /** + * All declarations are processed the same: + * - make sure it has a name + * - add the name under the containing module + */ + private processDeclaration(decl: ast.Declaration) { + const name = decl.name; + if (!name) return this.error.noName(decl); + this.addName(decl.location.path, name, decl); + } + + visitNamespaceDeclaration(decl: ast.NamespaceDeclaration) { this.processDeclaration(decl); } + visitTypeDeclaration(decl: ast.TypeDeclaration): void { this.processDeclaration(decl); } + visitFunctionDeclaration(decl: ast.FunctionDeclaration): void { this.processDeclaration(decl); } + visitConstantDeclaration(decl: ast.ConstantDeclaration): void { this.processDeclaration(decl); } + + visitExportDeclaration(decl: ast.ExportDeclaration): void { + const module = decl.location.path; + for (const { exportName, valueName, value } of decl.exports) { + if (!exportName) { + this.error.noName(decl); + continue; + } + // determine the kind and match it with a value + if (value) { + // if the declaration has a name, add it to the module's names + if (valueName) this.addNameAndExport(module, exportName, valueName, value); + // otherwise, just make it an export + else { + const id = this.typeChecker.declarations.length; + this.typeChecker.declarations.push(value); + this.addExport(module, exportName, id); + } + } else if (valueName) { // this will always be true if there is no inline value + // exporting a non-declared value + if (!(valueName.image in this.typeChecker.names[module])) { + this.error.valueNotDefined(valueName); + continue; + } + // add each declaration as an export + for (const { id } of this.typeChecker.names[module][valueName.image]) + this.addExport(module, exportName, id); + } + } + } + + visitExportForwardDeclaration(decl: ast.ExportForwardDeclaration): void { + const currentModule = decl.location.path; + // resolve the module + const importedModule = resolveModule(currentModule, decl.moduleName.value); + // invalid module path specified + if (!importedModule) return this.error.noModule(decl.moduleName); + // make sure the module has been loaded + this.loadModule(importedModule); + // process the forwards + for (const { importName, exportName } of decl.forwards) { + if (importName.image === '*' && exportName.image !== '*') { + // if wildcard, export a namespace + const namespace = new ast.NamespaceDeclaration(importedModule, exportName, decl.location); + const id = this.typeChecker.declarations.length; + this.typeChecker.declarations.push(namespace); + this.addExport(currentModule, exportName, id); + } else if (importName.image === '*' && exportName.image === '*') { + // forward all exports + for (const imp of Object.keys(this.typeChecker.exports[importedModule])) { + for (const { id } of this.typeChecker.exports[importedModule][imp]) { + const token = Token.fromLocation(exportName.location, imp); + this.addExport(currentModule, token, id); + } + } + } else { + // named export, verify that the module exports the name + if (!this.getExport(importedModule, importName.image)) { + this.error.noModuleExport(decl.moduleName.value, importName); + continue; + } + // add the export + for (const { id } of this.typeChecker.exports[importedModule][importName.image]) + this.addExport(currentModule, importName, id); + } + } + } + + /** TypeParams are not module-scoped declarations */ + visitTypeParam(_param: ast.TypeParam): void { throw new Error("Method not implemented"); } + /** Params are not module-scoped declarations */ + visitParam(_param: ast.Param): void { throw new Error("Method not implemented"); } + /** LambdaParams are not module-scoped declarations */ + visitLambdaParam(_param: ast.LambdaParam): void { throw new Error("Method not implemented"); } +} \ No newline at end of file diff --git a/src_old/syntax/visitors/DeclarationTypeVisitor.ts b/src_old/syntax/visitors/DeclarationTypeVisitor.ts new file mode 100644 index 0000000..f131857 --- /dev/null +++ b/src_old/syntax/visitors/DeclarationTypeVisitor.ts @@ -0,0 +1,161 @@ +import * as ast from '~/syntax'; +import IDeclarationVisitor from './interfaces/IDeclarationVisitor'; +import TypeChecker from '~/typecheck/TypeChecker'; +import * as types from '~/typecheck/types'; +import { Variance } from '~/typecheck/types/TParam'; +import TypeResolutionVisitor from './TypeResolutionVisitor'; +import preVisit from '~/utils/preVisit'; +import OrderedMap from '~/utils/OrderedMap'; +import { createNamespace } from '~/typecheck/types'; + + +/** + * This visitor is the top-level of pass 2 of the type checker. + * It is responsible for resolving the types of all declarations + * of a module. + * By this point it is expected that pass 1 (DeclarationNameVisitor) + * has already been run, so that all declarations are available by name. + */ +@preVisit() +export default class DeclarationTypeVisitor implements IDeclarationVisitor { + private visitor: TypeResolutionVisitor; + + constructor(private typeChecker: TypeChecker) {} + + /** + * Some declarations (types and constants) need to have recursion detection + * to prevent an infinite loop. This will call a function with recursion + * detection enabled during the call. + */ + private callWithRecursionDetection(decl: ast.Declaration, cb: () => types.Type): types.Type { + // already resolving, avoid infinite recursion + if (this.typeChecker.resolving.has(decl)) return types.createRecursive(decl); + // add to set so circular references are handled + this.typeChecker.resolving.add(decl); + const val = cb(); + this.typeChecker.resolving.delete(decl); + return val; + } + + /** + * Pre-visitor that provides memoization + */ + preVisit(visitor: () => types.Type, visitee: ast.Declaration): types.Type { + if (visitee.type) return visitee.type; + return visitee.type = visitor(); + } + + /** + * The type of a namespace is simply a namespace. + * The namespace has a reference to the module it was imported from, + * so we just need to grab all of the declaration ids of that module + * and create a new namespace type. + */ + visitNamespaceDeclaration(decl: ast.NamespaceDeclaration): types.Type { + const names = this.typeChecker.names[decl.modulePath]; + const namespace: { [name: string]: number[] } = {}; + for (const name of Object.keys(names)) { + namespace[name] = names[name].map(({ id }) => id); + } + return createNamespace(namespace); + } + + /** + * Type declarations need to have their type node (the definition) + * resolved. If the declaration has type parameters, then it is a + * generic type. Generic types need to initialize the type params + * scope of the visitor before visiting the node. + */ + visitTypeDeclaration(decl: ast.TypeDeclaration): types.Type { + return this.callWithRecursionDetection(decl, () => { + // initialize a new visitor for this declaration + this.visitor = new TypeResolutionVisitor(this.typeChecker); + if (decl.typeParams) { + // if there are type parameters, this is a generic type + const typeParams = new OrderedMap(); + for (const p of decl.typeParams) { + p.visit(this); + this.visitor.typeParams.set(p.name.image, p.type as types.ParamType); + typeParams.add(p.name.image, p.type as types.ParamType); + } + this.visitor.typeParams.push(); + const type = types.createGeneric(typeParams, decl.typeNode.visit(this.visitor)); + this.visitor.typeParams.pop(); + return type; + } else { + // otherwise, it just resolves to the type of the type definition + return decl.typeNode.visit(this.visitor); + } + }); + } + + visitTypeParam(param: ast.TypeParam): types.ParamType { + // no defined variance means it needs to be inferred from how it is used + let variance: Variance = 'invariant'; + if (param.varianceOp) variance = param.varianceOp.image === '+' ? 'covariant' : 'contravariant'; + // no defined constraint means it defaults to any + let constraint: types.Type = types.createAny(); + if (param.typeConstraint) { + constraint = param.typeConstraint.visit(this.visitor); + } + return types.createParam(param.name.image, constraint, variance); + } + + visitFunctionDeclaration(decl: ast.FunctionDeclaration): types.Type { + // initialize a new visitor for this declaration + this.visitor = new TypeResolutionVisitor(this.typeChecker); + // resolve type parameter types (this must be done first because param and return types may use them) + let typeParams: OrderedMap | undefined; + if (decl.typeParams) { + typeParams = new OrderedMap(); + for (const p of decl.typeParams) { + p.visit(this); + this.visitor.typeParams.set(p.name.image, p.type as types.ParamType); + typeParams.add(p.name.image, p.type as types.ParamType); + } + } + // resolve types of parameters and return type + const paramTypes = decl.params.map(p => { p.visit(this); return p.type; }); + const returnType = decl.returnType.visit(this.visitor); + // save the type to the instance right away so recursion will work + decl.type = types.createFunction(paramTypes, returnType); + // TODO: this is just wrong + if (typeParams) decl.type = types.createGeneric(typeParams, decl.type); + // add each parameter to the visitor's params scope + for (let i = 0; i < decl.params.length; ++i) { + this.visitor.params.set(decl.params[i].name.image, paramTypes[i]); + } + // type check the function body, the ternary is necessary because + // Statement.visit() and Expression.visit() are not properly unioned + const actualReturnType = decl.body instanceof ast.Expression + ? decl.body.visit(this.visitor) + : decl.body.visit(this.visitor); + returnType.assertAssignableFrom(actualReturnType, decl.returnType); + return decl.type; + + if (!(returnType instanceof TUnknown) && !returnType.isAssignableFrom(actualReturnType)) { + this.pushError(p => TypeError.typeMismatch(p, actualReturnType, returnType, decl.returnType)); + } + } + + visitParam(param: ast.Param): types.Type { + throw new Error("Method not implemented."); + } + + visitLambdaParam(param: ast.LambdaParam): types.Type { + throw new Error("Method not implemented."); + } + + visitConstantDeclaration(decl: ast.ConstantDeclaration): types.Type { + throw new Error("Method not implemented."); + } + + /** modules do not have a type */ + visitModule(_module: ast.Module): types.Type { throw new Error("Method not implemented."); } + /** imports do not have a type */ + visitImportDeclaration(_decl: ast.ImportDeclaration): types.Type { throw new Error("Method not implemented."); } + /** exports do not have a type */ + visitExportDeclaration(_decl: ast.ExportDeclaration): types.Type { throw new Error("Method not implemented."); } + /** forwards do not have a type */ + visitExportForwardDeclaration(_decl: ast.ExportForwardDeclaration): types.Type { throw new Error("Method not implemented."); } +} diff --git a/src/translator/TranslationVisitor.ts b/src_old/syntax/visitors/TranslationVisitor.ts similarity index 98% rename from src/translator/TranslationVisitor.ts rename to src_old/syntax/visitors/TranslationVisitor.ts index 6a540aa..609c0be 100644 --- a/src/translator/TranslationVisitor.ts +++ b/src_old/syntax/visitors/TranslationVisitor.ts @@ -1,13 +1,13 @@ -import INodeVisitor from '../syntax/INodeVisitor'; -import Translator from './Translator'; -import Func from './Func'; +import INodeVisitor from './interfaces/INodeVisitor'; +import Translator from '~/translator/Translator'; +import Func from '~/translator/Func'; import Instruction, { PushScopeFrame, PopFrame, Break, Continue, PushLoopFrame, TrueBranch, SetBoolRef, SetIntegerRef, InteropReference, FalseBranch, AddToScope, ReferenceMutate, Jump, Noop, SetTupleRef, Return, Throw, PushTryFrame, ErrorRef, SetCharRef, SetFloatRef, SetStringRef, ArrayAccessRef, SetArrayRef, BinaryOperatorRef, FieldAccessRef, FunctionCallRef, CopyRef, SetStructRef, UnaryOperatorRef -} from '../runtime/instructions'; -import { RBool, RInteger } from '../runtime/types'; +} from '~/runtime/instructions'; +import { RBool, RInteger } from '~/runtime/types'; import * as ast from '~/syntax'; diff --git a/src/typecheck/visitors/TypeCheckVisitor.ts b/src_old/syntax/visitors/TypeCheckVisitor.ts similarity index 63% rename from src/typecheck/visitors/TypeCheckVisitor.ts rename to src_old/syntax/visitors/TypeCheckVisitor.ts index c648348..c8ca607 100644 --- a/src/typecheck/visitors/TypeCheckVisitor.ts +++ b/src_old/syntax/visitors/TypeCheckVisitor.ts @@ -1,15 +1,14 @@ import ASTNode from '~/syntax/ASTNode'; -import { Location } from '~/parser/Tokenizer'; -import INodeVisitor from '~/syntax/INodeVisitor'; +import INodeVisitor from '~/syntax/visitors/interfaces/INodeVisitor'; import { TType, TInteger, TFloat, TChar, TBool, TArray, TStruct, TTuple, TInferred, TFunction, TUnion, TGeneric, TParam, TAny, TNever, TUnknown, determineGeneralType } from '~/typecheck/types'; import Module from '~/runtime/Module'; import TypeChecker from '~/typecheck/TypeChecker'; import TypeCheckContext, { SymbolTable } from '~/typecheck/TypeCheckContext'; -import * as mess from '~/typecheck/TypeCheckerMessages'; -import OrderedMap from '~/typecheck/types/OrderedMap'; +import OrderedMap from '~/utils/OrderedMap'; import { createUnary, createBinary } from '~/runtime/operators'; import * as ast from '~/syntax'; +import TypeError from '~/typecheck/TypeError'; /** @@ -57,119 +56,24 @@ export default class TypeCheckVisitor implements INodeVisitor { * Adds a type checking error with the specified message and location. * Returns a resolved type, this type defaults to Unknown */ - pushError(message: string, location: Location, resolvedType: TType = new TUnknown()) { - return this.typeChecker.pushError(message, this.module.path, location, resolvedType); - } - - typeMismatch(actual: TType, expected: string, node: ASTNode) { - return this.pushError(mess.TYPE_MISMATCH(actual, expected), node.locations.self); - } - - nameClash(name: string, location: Location) { - return this.pushError(mess.NAME_CLASH(name), location); - } - - typeNotDefined(name: string, node: ASTNode) { - return this.pushError(mess.TYPE_NOT_DEFINED(name), node.locations.self); - } - - valueNotDefined(name: string, location: Location) { - return this.pushError(mess.VALUE_NOT_DEFINED(name), location); - } - - notGeneric(node: ASTNode) { - return this.pushError(mess.NOT_GENERIC, node.locations.self); - } - - notArray(node: ASTNode) { - return this.pushError(mess.NOT_ARRAY, node.locations.self); - } - - notNamespace(name: string, node: ASTNode) { - return this.pushError(mess.NOT_NAMESPACE(name), node.locations.self); - } - - notStruct(node: ASTNode) { - return this.pushError(mess.NOT_STRUCT, node.locations.self); - } - - notInvokable(node: ASTNode) { - return this.pushError(mess.NOT_INVOKABLE, node.locations.self); - } - - notGenericFunction(node: ASTNode) { - return this.pushError(mess.NOT_GENERIC_FUNCTION, node.locations.self); - } - - invalidTypeArgCount(expected: number, actual: number, node: ASTNode) { - return this.pushError(mess.INVALID_TYPE_ARG_COUNT(expected, actual), node.locations.self); - } - - invalidTypeArg(arg: TType, param: TParam, node: ASTNode) { - return this.pushError(mess.INVALID_TYPE_ARG(arg, param.name, param.constraint), node.locations.self); - } - - invalidArgCount(expected: number, actual: number, node: ASTNode) { - return this.pushError(mess.INVALID_ARG_COUNT(expected, actual), node.locations.self); - } - - invalidBreak(node: ASTNode) { - return this.pushError(mess.INVALID_BREAK_STATEMENT, node.locations.self); - } - - invalidContinue(node: ASTNode) { - return this.pushError(mess.INVALID_CONTINUE_STATEMENT, node.locations.self); - } - - invalidLoopNum(stmt: ast.BreakStatement | ast.ContinueStatement) { - return this.pushError(mess.INVALID_LOOP_NUM(stmt.loopNumber, this.context.loopNumber), stmt.locations.self); - } - - invalidBinaryOp(exp: ast.BinaryExpression, left: TType, right: TType) { - return this.pushError(mess.INVALID_BINARY_OPERATOR(exp.symbol, left, right), exp.locations.self); - } - - invalidUnaryOp(exp: ast.UnaryExpression, target: TType) { - return this.pushError(mess.INVALID_UNARY_OPERATOR(exp.symbol, target), exp.locations.self); + pushError(error: (modulePath: string) => TypeError, resolvedType: TType = new TUnknown()) { + return this.typeChecker.pushError(error(this.module.path), resolvedType); } /**************** * DECLARATIONS * ****************/ - visitProgram(_program: ast.Program): TType { throw new Error("Method not implemented."); } + visitModule(_program: ast.Module): TType { throw new Error("Method not implemented."); } + visitNamespaceDeclaration(_decl: ast.NamespaceDeclaration): TType {} visitImportDeclaration(_decl: ast.ImportDeclaration): TType { throw new Error("Method not implemented."); } visitExportDeclaration(_decl: ast.ExportDeclaration): TType { throw new Error("Method not implemented."); } visitExportForwardDeclaration(_decl: ast.ExportForwardDeclaration): TType { throw new Error("Method not implemented."); } - - @baseCheck - visitTypeDeclaration(decl: ast.TypeDeclaration): TType { - this.context = new TypeCheckContext(); - // if there are type parameters, this is a generic type - if (decl.typeParams) { - const typeParams = new OrderedMap(); - for (const p of decl.typeParams) { - this.context.typeParams[p.name] = p.visit(this) as TParam; - typeParams.add(p.name, this.context.typeParams[p.name]); - } - return new TGeneric(typeParams, decl.typeNode.visit(this)); - } - // otherwise, it just resolves to the type of the type definition - return decl.typeNode.visit(this); - } - - @baseCheck - visitTypeParam(param: ast.TypeParam): TType { - // no defined variance means it needs to be inferred from how it is used - const variance = param.varianceOp === '+' ? 'covariant' : param.varianceOp === '-' ? 'contravariant' : 'invariant'; - // no defined constraint means it defaults to any - const constraint = param.typeConstraint ? param.typeConstraint.visit(this) : new TAny(); - return new TParam(param.name, variance, constraint); - } + visitTypeDeclaration(decl: ast.TypeDeclaration): TType {} + visitTypeParam(param: ast.TypeParam): TType {} @baseCheck visitFunctionDeclaration(decl: ast.FunctionDeclaration): TType { - let type: TType; this.context = new TypeCheckContext(); // resolve type parameter types (this must be done first because param and return types may use them) let typeParams: OrderedMap | undefined; @@ -183,19 +87,20 @@ export default class TypeCheckVisitor implements INodeVisitor { // resolve types of parameters and return type const paramTypes = decl.params.map(p => p.visit(this)); const returnType = decl.returnType.visit(this); - // the type of the function will be unknown if any component types are unknown, otherwise it has a function type - if (paramTypes.some(t => t instanceof TUnknown) || returnType instanceof TUnknown) type = new TUnknown(); - else type = new TFunction(paramTypes, returnType, typeParams); + // save the type to the instance right away so recursion will work + decl.type = TFunction.create(paramTypes, returnType, typeParams); // create a symbol table initialized to contain the parameters for (let i = 0; i < decl.params.length; ++i) { this.context.symbolTable[decl.params[i].name] = paramTypes[i]; } // type check the function body, passing along the starting symbol table and the return type of the function as the expected type of the body - const actualReturnType = decl.body.visit(this) as TType; + const actualReturnType = decl.body.visit(this); + returnType.assertAssignableFrom(actualReturnType, decl.returnType); + if (!(returnType instanceof TUnknown) && !returnType.isAssignableFrom(actualReturnType)) { - this.typeMismatch(actualReturnType, returnType.toString(), decl.returnType); + this.pushError(p => TypeError.typeMismatch(p, actualReturnType, returnType, decl.returnType)); } - return type; + return decl.type; } @baseCheck @@ -221,133 +126,16 @@ export default class TypeCheckVisitor implements INodeVisitor { * TYPES * *********/ - @baseCheck - visitBuiltInType(type: ast.BuiltInType): TType { - switch (type.typeNode) { - case 'u8': case 'byte': return new TInteger(8, false); - case 'i8': return new TInteger(8, true); - case 'u16': case 'short': return new TInteger(16, false); - case 'i16': return new TInteger(16, true); - case 'u32': return new TInteger(32, false); - case 'i32': case 'integer': return new TInteger(32, true); - case 'u64': return new TInteger(64, false); - case 'i64': case 'long': return new TInteger(64, true); - case 'int': return new TInteger(Infinity, true); - case 'f32': case 'float': return new TFloat(32); - case 'f64': case 'double': return new TFloat(64); - case 'char': return new TChar(); - case 'string': return new TArray(new TChar()); - case 'bool': return new TBool(); - case 'void': return new TTuple([]); - case 'any': return new TAny(); - default: throw new Error(`Invalid built-in type ${type.typeNode}`); - } - } - - @baseCheck - visitIdentifierType(type: ast.IdentifierType): TType { - // check for a type param first - if (this.context.typeParams[type.name]) { - return this.context.typeParams[type.name]; - } else { - const moduleType = this.getModuleType(type.name); - if (!moduleType) return this.typeNotDefined(type.name, type); - return moduleType; - } - } - - @baseCheck - visitArrayType(type: ast.ArrayType): TType { - const baseType = type.baseType.visit(this); - if (baseType instanceof TUnknown) return new TUnknown(); - else return new TArray(baseType); - } - - /** - * TODO: does it make sense for explicit function types to have type params? - * If so, the syntax will have to be extended to allow for that... - */ - @baseCheck - visitFunctionType(type: ast.FunctionType): TType { - const paramTypes = type.paramTypes.map(t => t.visit(this)); - const returnType = type.returnType.visit(this); - if (paramTypes.some(t => t instanceof TUnknown) || returnType instanceof TUnknown) return new TUnknown(); - else return new TFunction(paramTypes, returnType); - } - - @baseCheck - visitParenthesizedType(type: ast.ParenthesizedType): TType { - return type.inner.visit(this); - } - - /** - * A specific type resolves to a particular "instantiation" of its corresponding generic type. - * What is returned is a clone of the generic type's underlying type, but with the - * type parameters replaced with the corresponding type arguments. - * Those type arguments keep the variance constraints from the parameters - * so that we know what types are assignable to the specific type. - */ - @baseCheck - visitSpecificType(type: ast.SpecificType): TType { - // first, resolve the TGeneric - const genericType = type.typeNode.visit(this); - if (!genericType.isGeneric()) return this.notGeneric(type); - // second, resolve all type arguments - const typeArgs = type.typeArgs.map(a => a.visit(this)); - // third, make sure the number of type arguments is correct - const paramTypes = genericType.getTypeParams(); - const numParams = paramTypes.length; - if (typeArgs.length !== numParams) return this.invalidTypeArgCount(numParams, typeArgs.length, type); - // fourth, make sure each type argument is assignable to the corresponding type parameter - for (let i = 0; i < typeArgs.length; ++i) { - const [param, arg] = [paramTypes.getValue(i), typeArgs[i]]; - if (param.isAssignableFrom(arg)) return this.invalidTypeArg(arg, param, type.typeArgs[i]); - } - // fifth, specify the generic type - return (genericType as TGeneric).specifyGenericType(typeArgs); - } - - @baseCheck - visitStructType(type: ast.StructType): TType { - const fields: { [name: string]: TType } = {}; - for (const field of type.fields) { - if (fields[field.name]) return this.nameClash(field.name, type.locations[`field_${field.name}`]); - fields[field.name] = field.type.visit(this); - if (fields[field.name] instanceof TUnknown) return new TUnknown(); - } - return new TStruct(fields); - } - - @baseCheck - visitTupleType(type: ast.TupleType): TType { - const types = type.types.map(t => t.visit(this)); - if (types.some(t => t instanceof TUnknown)) return new TUnknown(); - else return new TTuple(types); - } - - @baseCheck - visitUnionType(type: ast.UnionType): TType { - const types = type.types.map(t => t.visit(this)); - if (types.some(t => t instanceof TUnknown)) return new TUnknown(); - else return new TUnion(types); - } - - @baseCheck - visitNamespaceAccessType(type: ast.NamespaceAccessType): TType { - const baseType = type.baseType.visit(this); - /** - * The lowest-level node that can resolve to a namespace is an identifier type, - * but those don't have to be namespaces, so the actual error-throwing happens here. - */ - if (!baseType.isNamespace() && type.baseType instanceof ast.IdentifierType) - return this.notNamespace(type.baseType.name, type.baseType); - // resolve the module of the namespace - const module = this.typeChecker.modules[baseType.getModuleId()]; - // resolve the corresponding type - const resolvedType = this.typeChecker.getType(module, type.typeName); - if (!resolvedType) return this.typeNotDefined(type.typeName, type); - return resolvedType; - } + visitBuiltInType(type: ast.BuiltInType): TType {} + visitIdentifierType(type: ast.IdentifierType): TType {} + visitArrayType(type: ast.ArrayType): TType {} + visitFunctionType(type: ast.FunctionType): TType {} + visitParenthesizedType(type: ast.ParenthesizedType): TType {} + visitSpecificType(type: ast.SpecificType): TType {} + visitStructType(type: ast.StructType): TType {} + visitTupleType(type: ast.TupleType): TType {} + visitUnionType(type: ast.UnionType): TType {} + visitNamespaceAccessType(type: ast.NamespaceAccessType): TType {} /************** * STATEMENTS * @@ -405,7 +193,7 @@ export default class TypeCheckVisitor implements INodeVisitor { // type check the condition const conditionType = stmt.conditionExp.visit(this); if (!conditionType.isBool()) { - this.typeMismatch(conditionType, 'bool', stmt.conditionExp); + this.typeMismatch(conditionType, new TBool(), stmt.conditionExp); } return returnType; } @@ -416,7 +204,7 @@ export default class TypeCheckVisitor implements INodeVisitor { const arrayType = stmt.iterableExp.visit(this); let iterType; if (!arrayType.isArray()) { - iterType = this.typeMismatch(arrayType, '?[]', stmt.iterableExp); + iterType = this.notArray(stmt.iterableExp); } else { iterType = arrayType.getBaseType(); } @@ -433,7 +221,7 @@ export default class TypeCheckVisitor implements INodeVisitor { @baseCheck visitReturnStatement(stmt: ast.ReturnStatement): TType { // no return value, assumed to be () - if (!stmt.exp) return new TTuple([]); + if (!stmt.exp) return TTuple.create(); // otherwise check the return value return stmt.exp.visit(this); } @@ -468,7 +256,7 @@ export default class TypeCheckVisitor implements INodeVisitor { // type check the condition const conditionType = stmt.conditionExp.visit(this); if (!conditionType.isBool()) { - this.typeMismatch(conditionType, 'bool', stmt.conditionExp); + this.typeMismatch(conditionType, new TBool(), stmt.conditionExp); } // increment the loop number this.context.loopNumber++; @@ -520,7 +308,7 @@ export default class TypeCheckVisitor implements INodeVisitor { @baseCheck visitStringLiteral(_lit: ast.StringLiteral): TType { - return new TArray(new TChar()); + return TArray.create(new TChar()); } @baseCheck @@ -537,7 +325,7 @@ export default class TypeCheckVisitor implements INodeVisitor { if (!arrayType.isArray()) return this.notArray(acc.target); // verify that the index expression is an integer const indexExpType = acc.indexExp.visit(this); - if (!indexExpType.isInteger()) this.typeMismatch(indexExpType, 'unsigned int', acc.indexExp); + if (!indexExpType.isInteger()) this.typeMismatch(indexExpType, new TInteger(32, false), acc.indexExp); // type is the base type of the array return arrayType.getBaseType(); } @@ -545,8 +333,7 @@ export default class TypeCheckVisitor implements INodeVisitor { @baseCheck visitArrayLiteral(lit: ast.ArrayLiteral): TType { // for all items, make sure there is one base assignable type for them all - const baseType = lit.items.map(i => i.visit(this)).reduce(determineGeneralType, new TNever()) - return new TArray(baseType); + return TArray.create(lit.items.map(i => i.visit(this)).reduce(determineGeneralType, new TNever())); } @baseCheck @@ -643,7 +430,7 @@ export default class TypeCheckVisitor implements INodeVisitor { for (let i = 0; i < fromTypes.length; ++i) { if (fromTypes[i] instanceof TUnknown) continue; // skip errors if (!toTypes[i].isAssignableFrom(fromTypes[i])) { - this.typeMismatch(fromTypes[i], toTypes[i].toString(), nodes[i]); + this.typeMismatch(fromTypes[i], toTypes[i], nodes[i]); error = true; } } @@ -664,7 +451,7 @@ export default class TypeCheckVisitor implements INodeVisitor { @baseCheck visitIfElseExpression(exp: ast.IfElseExpression): TType { const conditionType = exp.condition.visit(this); - if (!conditionType.isBool()) this.typeMismatch(conditionType, 'bool', exp.condition); + if (!conditionType.isBool()) this.typeMismatch(conditionType, new TBool(), exp.condition); const type = exp.consequent.visit(this); const altType = exp.alternate.visit(this); return determineGeneralType(type, altType); @@ -674,7 +461,7 @@ export default class TypeCheckVisitor implements INodeVisitor { visitLambdaExpression(exp: ast.LambdaExpression): TType { const paramTypes = exp.params.map(p => p.visit(this)); // can't infer return type, that will happen when we are checking types - return new TFunction(paramTypes, new TInferred()); + return TFunction.create(paramTypes, new TInferred()); } /** @@ -690,7 +477,7 @@ export default class TypeCheckVisitor implements INodeVisitor { // type check the function body, passing along the starting symbol table const actualReturnType = exp.body.visit(this); if (!exp.type.returnType.isAssignableFrom(actualReturnType)) - this.typeMismatch(actualReturnType, exp.type.returnType.toString(), exp); + this.typeMismatch(actualReturnType, exp.type.returnType, exp); } @baseCheck @@ -704,16 +491,12 @@ export default class TypeCheckVisitor implements INodeVisitor { for (const { key, value } of lit.entries) { fields[key] = value.visit(this); } - return new TStruct(fields); + return TStruct.create(fields); } @baseCheck visitTupleLiteral(lit: ast.TupleLiteral): TType { - const itemTypes = []; - for (const item of lit.items) { - itemTypes.push(item.visit(this)); - } - return new TTuple(itemTypes); + return TTuple.create(lit.items.map(i => i.visit(this))); } @baseCheck diff --git a/src_old/syntax/visitors/TypeResolutionVisitor.ts b/src_old/syntax/visitors/TypeResolutionVisitor.ts new file mode 100644 index 0000000..1560cb3 --- /dev/null +++ b/src_old/syntax/visitors/TypeResolutionVisitor.ts @@ -0,0 +1,263 @@ +import ITypeVisitor from '~/syntax/visitors/interfaces/ITypeVisitor'; +import IExpressionVisitor from '~/syntax/visitors/interfaces/IExpressionVisitor'; +import IStatementVisitor from '~/syntax/visitors/interfaces/IStatementVisitor'; +import * as types from '~/typecheck/types'; +import * as ast from '~/syntax'; +import Scope from '~/utils/Scope'; +import TypeChecker from '~/typecheck/TypeChecker'; +import TypeErrorContext from '~/typecheck/TypeErrorContext'; +import preVisit from '~/utils/preVisit'; +import { Token } from '~/parser/Tokenizer'; +import DeclarationTypeVisitor from '~/syntax/visitors/DeclarationTypeVisitor'; +import SpecifyTypeVisitor from '~/typecheck/visitors/SpecifyTypeVisitor'; + + +/** + * Resolves the type of a node in a specific declaration. + * This will keep track of any variables or type parameters + * that are in scope. + */ +@preVisit() +export default class TypeResolutionVisitor implements ITypeVisitor, IExpressionVisitor, IStatementVisitor { + public typeParams = new Scope(); + public params = new Scope(); + private error: TypeErrorContext; + + constructor(private typeChecker: TypeChecker) { + this.error = new TypeErrorContext(typeChecker.errors); + } + + /** + * This pre-visitor method memoizes already-resolved types + */ + preVisit(visitor: () => types.Type, visitee: ast.ASTNode): types.Type { + if (visitee.type) return visitee.type; + return visitee.type = visitor(); + } + + /** + * Resolves the type of a named type in the current module + */ + private getModuleType(name: Token): Optional { + // resolve the module, make sure that the name exists + const module = name.location.path; + const names = this.typeChecker.names[module]; + if (!(name.image in names)) return null; + // get all declarations + const decls = names[name.image].map(({ id }) => this.typeChecker.declarations[id]); + if (!(decls[0] instanceof ast.TypeDeclaration)) return null; + // resolve all of the types + const resolved: types.Type[] = decls.map(type => type.visit(new DeclarationTypeVisitor(this.typeChecker))); + // if the name resolves to more than one type, it is overloaded + return resolved.length === 1 ? resolved[0] : new TOverloadedGeneric(resolved); + } + + // #region Types + + visitBuiltInType(type: ast.BuiltInType): TType { + switch (type.typeNode) { + case 'u8': case 'byte': return new TInteger(type.location, 8, false); + case 'i8': return new TInteger(type.location, 8, true); + case 'u16': case 'short': return new TInteger(type.location, 16, false); + case 'i16': return new TInteger(type.location, 16, true); + case 'u32': return new TInteger(type.location, 32, false); + case 'i32': case 'integer': return new TInteger(type.location, 32, true); + case 'u64': return new TInteger(type.location, 64, false); + case 'i64': case 'long': return new TInteger(type.location, 64, true); + case 'int': return new TInteger(type.location, Infinity, true); + case 'f32': case 'float': return new TFloat(type.location, 32); + case 'f64': case 'double': return new TFloat(type.location, 64); + case 'char': return new TChar(type.location); + case 'string': return new TArray(new TChar(), type.location); + case 'bool': return new TBool(type.location); + case 'void': return new TTuple(type.location); + case 'any': return new TAny(type.location); + default: throw new Error(`Invalid built-in type ${type.typeNode}`); + } + } + + visitIdentifierType(type: ast.IdentifierType): TType { + // check for a type param first + const typeParam = this.typeParams.get(type.name); + if (typeParam) { + return typeParam; + } else { + const name = Token.fromLocation(type.location, type.name); + const moduleType = this.getModuleType(name); + if (!moduleType) return this.error.typeNotDefined(name); + return Object.assign(moduleType.clone(), { location: type.location }); + } + } + + visitArrayType(type: ast.ArrayType): TType { + return new TArray(type.baseType.visit(this), type.location); + } + + /** + * TODO: does it make sense for explicit function types to have type params? + * If so, the syntax will have to be extended to allow for that... + */ + visitFunctionType(type: ast.FunctionType): TType { + const paramTypes = type.paramTypes.map(t => t.visit(this)); + const returnType = type.returnType.visit(this); + return new TFunction(paramTypes, returnType, type.location); + } + + visitParenthesizedType(type: ast.ParenthesizedType): TType { + return type.inner.visit(this); + } + + visitSpecificType(type: ast.SpecificType): TType { + // resolve the TGeneric + const genericType = type.typeNode.visit(this); + if (!genericType.isGeneric()) return this.error.notGeneric(type.location); + // resolve all type arguments + const typeArgs = type.typeArgs.map(a => a.visit(this)); + // specify the type + const visitor = new SpecifyTypeVisitor(this.typeChecker) + const specificType = new TSpecific(genericType) + + // third, make sure the number of type arguments is correct + const paramTypes = genericType.getTypeParams(); + const numParams = paramTypes.length; + if (typeArgs.length !== numParams) return this.invalidTypeArgCount(numParams, typeArgs.length, type); + // fourth, make sure each type argument is assignable to the corresponding type parameter + for (let i = 0; i < typeArgs.length; ++i) { + const [param, arg] = [paramTypes.getValue(i), typeArgs[i]]; + if (param.isAssignableFrom(arg)) return this.invalidTypeArg(arg, param, type.typeArgs[i]); + } + // fifth, specify the generic type + return (genericType as TGeneric).specifyGenericType(typeArgs); + } + + visitStructType(type: ast.StructType): TType { + const fields: { [name: string]: TType } = {}; + for (const field of type.fields) { + if (fields[field.name.image]) { + fields[field.name.image] = this.error.nameClash(field.name); + } else { + fields[field.name.image] = field.type.visit(this); + } + } + return new TStruct(type.location, fields); + } + + visitTupleType(type: ast.TupleType): TType { + return new TTuple(type.location, type.types.map(t => t.visit(this))); + } + + visitUnionType(type: ast.UnionType): TType { + return new TUnion(type.location, type.types.map(t => t.visit(this))); + } + + visitNamespaceAccessType(type: ast.NamespaceAccessType): TType { + const baseType = type.baseType.visit(this); + if (!baseType.isNamespace()) return this.error.notNamespace(type.baseType.location); + const name = type.typeName.image; + const names = baseType.getNamespaceNames(); + // verify the type exists + if (!(name in names)) return this.error.typeNotDefined(type.typeName); + // get all declarations + const decls = names[type.typeName.image].map(id => this.typeChecker.declarations[id]); + if (!(decls[0] instanceof ast.TypeDeclaration)) return this.error.typeNotDefined(type.typeName); + // resolve all of the types + const resolved: TType[] = decls.map(type => type.visit(new DeclarationTypeVisitor(this.typeChecker))); + // if the name resolves to more than one type, it is overloaded + const nsType = resolved.length === 1 ? resolved[0] : new TOverloadedGeneric(resolved); + return Object.assign(nsType.clone(), { location: type.location }); + } + + // #endregion + // #region Expressions + + visitBoolLiteral(lit: ast.BoolLiteral): TType { + throw new Error("Method not implemented."); + } + visitCharLiteral(lit: ast.CharLiteral): TType { + throw new Error("Method not implemented."); + } + visitFloatLiteral(lit: ast.FloatLiteral): TType { + throw new Error("Method not implemented."); + } + visitIntegerLiteral(lit: ast.IntegerLiteral): TType { + throw new Error("Method not implemented."); + } + visitStringLiteral(lit: ast.StringLiteral): TType { + throw new Error("Method not implemented."); + } + visitIdentifierExpression(exp: ast.IdentifierExpression): TType { + throw new Error("Method not implemented."); + } + visitArrayAccess(acc: ast.ArrayAccess): TType { + throw new Error("Method not implemented."); + } + visitArrayLiteral(lit: ast.ArrayLiteral): TType { + throw new Error("Method not implemented."); + } + visitBinaryExpression(exp: ast.BinaryExpression): TType { + throw new Error("Method not implemented."); + } + visitFieldAccess(acc: ast.FieldAccess): TType { + throw new Error("Method not implemented."); + } + visitFunctionApplication(app: ast.FunctionApplication): TType { + throw new Error("Method not implemented."); + } + visitIfElseExpression(exp: ast.IfElseExpression): TType { + throw new Error("Method not implemented."); + } + visitLambdaExpression(exp: ast.BaseLambdaExpression): TType { + throw new Error("Method not implemented."); + } + visitParenthesizedExpression(exp: ast.ParenthesizedExpression): TType { + throw new Error("Method not implemented."); + } + visitStructLiteral(lit: ast.StructLiteral): TType { + throw new Error("Method not implemented."); + } + visitTupleLiteral(lit: ast.TupleLiteral): TType { + throw new Error("Method not implemented."); + } + visitUnaryExpression(exp: ast.UnaryExpression): TType { + throw new Error("Method not implemented."); + } + visitVarDeclaration(decl: ast.VarDeclaration): TType { + throw new Error("Method not implemented."); + } + + // #endregion + // #region Statements + + visitBlock(block: ast.Block): TType { + throw new Error("Method not implemented."); + } + visitExpressionStatement(exp: ast.ExpressionStatement): TType { + throw new Error("Method not implemented."); + } + visitBreakStatement(stmt: ast.BreakStatement): TType { + throw new Error("Method not implemented."); + } + visitContinueStatement(stmt: ast.ContinueStatement): TType { + throw new Error("Method not implemented."); + } + visitDoWhileStatement(stmt: ast.DoWhileStatement): TType { + throw new Error("Method not implemented."); + } + visitForStatement(stmt: ast.ForStatement): TType { + throw new Error("Method not implemented."); + } + visitReturnStatement(stmt: ast.ReturnStatement): TType { + throw new Error("Method not implemented."); + } + visitThrowStatement(stmt: ast.ThrowStatement): TType { + throw new Error("Method not implemented."); + } + visitTryCatchStatement(stmt: ast.TryCatchStatement): TType { + throw new Error("Method not implemented."); + } + visitWhileStatement(stmt: ast.WhileStatement): TType { + throw new Error("Method not implemented."); + } + + // #endregion +} \ No newline at end of file diff --git a/src_old/syntax/visitors/interfaces/IDeclarationVisitor.ts b/src_old/syntax/visitors/interfaces/IDeclarationVisitor.ts new file mode 100644 index 0000000..e3d598f --- /dev/null +++ b/src_old/syntax/visitors/interfaces/IDeclarationVisitor.ts @@ -0,0 +1,20 @@ +import * as ast from '~/syntax/declarations'; +import { LambdaParam } from '~/syntax/expressions'; + + +/** + * A visitor type for only declaration node types + */ +export default interface IDeclarationVisitor { + visitModule(program: ast.Module): T; + visitImportDeclaration(decl: ast.ImportDeclaration): T; + visitTypeDeclaration(decl: ast.TypeDeclaration): T; + visitTypeParam(param: ast.TypeParam): T; + visitFunctionDeclaration(decl: ast.FunctionDeclaration): T; + visitParam(param: ast.Param): T; + visitLambdaParam(param: LambdaParam): T; + visitConstantDeclaration(decl: ast.ConstantDeclaration): T; + visitExportDeclaration(decl: ast.ExportDeclaration): T; + visitExportForwardDeclaration(decl: ast.ExportForwardDeclaration): T; + visitNamespaceDeclaration(decl: ast.NamespaceDeclaration): T; +} diff --git a/src_old/syntax/visitors/interfaces/IExpressionVisitor.ts b/src_old/syntax/visitors/interfaces/IExpressionVisitor.ts new file mode 100644 index 0000000..9c54016 --- /dev/null +++ b/src_old/syntax/visitors/interfaces/IExpressionVisitor.ts @@ -0,0 +1,26 @@ +import * as ast from '~/syntax/expressions'; + + +/** + * A visitor type for only expression node types + */ +export default interface IExpressionVisitor { + visitBoolLiteral(lit: ast.BoolLiteral): T; + visitCharLiteral(lit: ast.CharLiteral): T; + visitFloatLiteral(lit: ast.FloatLiteral): T; + visitIntegerLiteral(lit: ast.IntegerLiteral): T; + visitStringLiteral(lit: ast.StringLiteral): T; + visitIdentifierExpression(exp: ast.IdentifierExpression): T; + visitArrayAccess(acc: ast.ArrayAccess): T; + visitArrayLiteral(lit: ast.ArrayLiteral): T; + visitBinaryExpression(exp: ast.BinaryExpression): T; + visitFieldAccess(acc: ast.FieldAccess): T; + visitFunctionApplication(app: ast.FunctionApplication): T; + visitIfElseExpression(exp: ast.IfElseExpression): T; + visitLambdaExpression(exp: ast.BaseLambdaExpression): T; + visitParenthesizedExpression(exp: ast.ParenthesizedExpression): T; + visitStructLiteral(lit: ast.StructLiteral): T; + visitTupleLiteral(lit: ast.TupleLiteral): T; + visitUnaryExpression(exp: ast.UnaryExpression): T; + visitVarDeclaration(decl: ast.VarDeclaration): T; +} diff --git a/src_old/syntax/visitors/interfaces/INodeVisitor.ts b/src_old/syntax/visitors/interfaces/INodeVisitor.ts new file mode 100644 index 0000000..6f2b2d9 --- /dev/null +++ b/src_old/syntax/visitors/interfaces/INodeVisitor.ts @@ -0,0 +1,11 @@ +import IDeclarationVisitor from './IDeclarationVisitor'; +import ITypeVisitor from './ITypeVisitor'; +import IExpressionVisitor from './IExpressionVisitor'; +import IStatementVisitor from './IStatementVisitor'; + + +/** + * A visitor type for all node types + */ +export default interface INodeVisitor + extends IDeclarationVisitor, ITypeVisitor, IExpressionVisitor, IStatementVisitor {} diff --git a/src_old/syntax/visitors/interfaces/IStatementVisitor.ts b/src_old/syntax/visitors/interfaces/IStatementVisitor.ts new file mode 100644 index 0000000..ce351c6 --- /dev/null +++ b/src_old/syntax/visitors/interfaces/IStatementVisitor.ts @@ -0,0 +1,18 @@ +import * as ast from '~/syntax/statements'; + + +/** + * A visitor type for only statement node types + */ +export default interface IStatementVisitor { + visitBlock(block: ast.Block): T; + visitExpressionStatement(exp: ast.ExpressionStatement): T; + visitBreakStatement(stmt: ast.BreakStatement): T; + visitContinueStatement(stmt: ast.ContinueStatement): T; + visitDoWhileStatement(stmt: ast.DoWhileStatement): T; + visitForStatement(stmt: ast.ForStatement): T; + visitReturnStatement(stmt: ast.ReturnStatement): T; + visitThrowStatement(stmt: ast.ThrowStatement): T; + visitTryCatchStatement(stmt: ast.TryCatchStatement): T; + visitWhileStatement(stmt: ast.WhileStatement): T; +} diff --git a/src_old/syntax/visitors/interfaces/ITypeVisitor.ts b/src_old/syntax/visitors/interfaces/ITypeVisitor.ts new file mode 100644 index 0000000..ed505ec --- /dev/null +++ b/src_old/syntax/visitors/interfaces/ITypeVisitor.ts @@ -0,0 +1,18 @@ +import * as ast from '~/syntax/types'; + + +/** + * A visitor type for only type node types + */ +export default interface ITypeVisitor { + visitBuiltInType(type: ast.BuiltInType): T; + visitIdentifierType(type: ast.IdentifierType): T; + visitArrayType(type: ast.ArrayType): T; + visitFunctionType(type: ast.FunctionType): T; + visitParenthesizedType(type: ast.ParenthesizedType): T; + visitSpecificType(type: ast.SpecificType): T; + visitStructType(type: ast.StructType): T; + visitTupleType(type: ast.TupleType): T; + visitUnionType(type: ast.UnionType): T; + visitNamespaceAccessType(type: ast.NamespaceAccessType): T; +} diff --git a/src/translator/ConstFunc.ts b/src_old/translator/ConstFunc.ts similarity index 100% rename from src/translator/ConstFunc.ts rename to src_old/translator/ConstFunc.ts diff --git a/src/translator/Func.ts b/src_old/translator/Func.ts similarity index 89% rename from src/translator/Func.ts rename to src_old/translator/Func.ts index 828f34b..ec21632 100644 --- a/src/translator/Func.ts +++ b/src_old/translator/Func.ts @@ -1,8 +1,9 @@ import ASTNode from '~/syntax/ASTNode'; import Instruction, { Return, ParamRef, AddToScope } from '~/runtime/instructions'; import Translator from './Translator'; -import TranslationVisitor from './TranslationVisitor'; +import TranslationVisitor from '~/syntax/visitors/TranslationVisitor'; import { FunctionDeclaration, LambdaExpression, Expression } from '~/syntax'; +import Scope from '~/utils/Scope'; type ASTFunction = FunctionDeclaration | LambdaExpression; @@ -18,7 +19,7 @@ export default abstract class Func { moduleId: number; modulePath: string; instructions: Instruction[]; - scope: { [key: string]: number }[]; + scope: Scope; constructor(id: number, moduleFunction: { ast: ASTNode }, moduleId: number, modulePath: string = '') { // id of the function, a target for callers @@ -31,7 +32,7 @@ export default abstract class Func { // the list of instructions that make up this function this.instructions = []; // the scope stack (just for the translation process, not the runtime table) - this.scope = [{}]; + this.scope = new Scope(); } abstract translate(translator: Translator): void; @@ -71,14 +72,7 @@ export default abstract class Func { * Get the reference of a scope variable */ getFromScope(name: string): number { - let scope = this.scope[this.scope.length - 1]; - for (let i = this.scope.length - 1; i >= 0; --i) { - if (name in this.scope[i]) { - scope = this.scope[i]; - break; - } - } - return scope[name]; + return this.scope.get(name); } /** @@ -87,14 +81,7 @@ export default abstract class Func { */ addToScope(name: string, ref: number, inst: Instruction) { this.addInstruction(inst); - let scope = this.scope[this.scope.length - 1]; - for (let i = this.scope.length - 1; i >= 0; --i) { - if (name in this.scope[i]) { - scope = this.scope[i]; - break; - } - } - scope[name] = ref; + this.scope.add(name, ref); return inst; } @@ -104,7 +91,7 @@ export default abstract class Func { */ pushScope(inst: T) { this.addInstruction(inst); - this.scope.push({}); + this.scope.push(); return inst; } diff --git a/src/translator/Translator.ts b/src_old/translator/Translator.ts similarity index 100% rename from src/translator/Translator.ts rename to src_old/translator/Translator.ts diff --git a/src/translator/index.ts b/src_old/translator/index.ts similarity index 100% rename from src/translator/index.ts rename to src_old/translator/index.ts diff --git a/src/typecheck/TypeCheckContext.ts b/src_old/typecheck/TypeCheckContext.ts similarity index 100% rename from src/typecheck/TypeCheckContext.ts rename to src_old/typecheck/TypeCheckContext.ts diff --git a/src_old/typecheck/TypeChecker.ts b/src_old/typecheck/TypeChecker.ts new file mode 100644 index 0000000..ecc27a3 --- /dev/null +++ b/src_old/typecheck/TypeChecker.ts @@ -0,0 +1,157 @@ +import { TypeError } from './TypeErrorContext'; +import * as types from './types'; +import TypeCheckVisitor from '~/syntax/visitors/TypeCheckVisitor'; +import { TypeDeclaration, FunctionDeclaration, ConstantDeclaration, Module, Declaration } from '~/syntax'; +import DeclarationNameVisitor from '~/syntax/visitors/DeclarationNameVisitor'; +import DeclarationTypeVisitor from '~/syntax/visitors/DeclarationTypeVisitor'; +import { Location } from '~/parser/Tokenizer'; + + +/** + * A registry keyed by module path, then declaration name. + * References the id of the corresponding declaration, + * and the location of the name in code. + */ +interface NameRegistry { + [path: string]: { + [name: string]: { + id: number, + location: Location, + }[] + } +} + +/** + * Semantic analysis class + */ +export default class TypeChecker { + /** The main module */ + main: Module; + /** Registry of all module-scoped names in the program */ + names: NameRegistry = {}; + /** Registry of all module exports in the program */ + exports: NameRegistry = {}; + /** Table of all declarations by unique id */ + declarations: Declaration[] = []; + /** list of errors gathered during type checking */ + errors: TypeError[] = []; + /** Set of currently-resolving declarations for recursion tracking */ + resolving = new Set(); + + constructor(module: Module) { + this.main = module; + this.names[module.location.path] = {}; + this.exports[module.location.path] = {}; + } + + /** + * Perform semantic analysis on the program, starting with the entry point (known as "main") module. + * The outputted value will be a table of all modules in the program, + * with the main module at position 0. + */ + check() { + // 1st pass: process all declarations, recursively traversing all modules + this.main.visit(new DeclarationNameVisitor(this)); + // 2nd pass: analyze types of declarations, type check expressions and statements + for (const decl of this.declarations) decl.visit(new DeclarationTypeVisitor(this)); + if (this.errors.length) { + // if there were any errors, throw a combined one + throw new Error(this.errors.map(e => e.message).join('\n')); + } + // the program is now type checked and all declarations are loaded. Return them. + return this.declarations; + } + + // ////////////////// + // RESOLVING TYPES // + // ////////////////// + + /** + * Type check a declaration. + * Do nothing if is has already been checked. + * If it is already resolving, we have a circular dependency that can't be resolved, which is an error. + * Otherwise, it hasn't been resolved yet, and we visit the top level of the declaration's AST. + * If a type resolution reaches a name, it will resolve that name in place, calling either getType() or getValueType() below. + * To prevent double resolution, we track which ones have already been resolved. + */ + resolveType(module: Module, decl: ModuleElement) { + if (decl.ast.type) return decl.ast.type; // resolved already + if (decl.resolving) { + // type recursion is handled in getType(), so this will only happen for recursively defined constants + this.errors.push(new TypeCheckError(mess.CIRCULAR_DEPENDENCY, module.path, decl.ast.location)); + // set the type to Unknown so that this error only occurs once + decl.ast.type = new TUnknown(); + return decl.ast.type; + } + if (decl.ast instanceof FunctionDeclaration) { + // function declarations can be recursive, and they always contain their type right in their declaration + decl.ast.visit(new TypeCheckVisitor(this, module)); + } else { + // Set a flag on each declaration as we resolve it so that we can track circular dependencies + decl.resolving = true; + decl.ast.visit(new TypeCheckVisitor(this, module)); + decl.resolving = false; + } + return decl.ast.type; + } + + /** + * Given a module and the name of a type, get the Type instance of the type. + * The type may exist in another module, so this method resolves imports and exports + * to track down the actual declaration. + * The type is also resolved here if it hasn't been already. + */ + getType(module: Module, name: string): Optional { + if (module.namespaces.hasOwnProperty(name)) { + // namespaces can be present in types + return new TNamespace(module.namespaces[name]); + } + const types = module.types[name]; + if (!types) return null; + const resolved = types.map(type => { + if (type.imported) { + // type is imported, resolve the import to the corresponding export in the imported module + const imp = module.imports[name]; + const importedModule = this.modules[imp.modulePath]; + const exp = importedModule.exports[imp.exportName]; + // get the type from that module, recursively so that we can handle forwarded imports + return this.getType(importedModule, exp.valueName); + } else { + // the type was declared in this module, return it if it has already been type checked + if (type.ast.type) return type.ast.type; + // if the type is resolving, we have a recursive type, return the recursive reference because we don't have an actual type yet + if (type.resolving) return new TRecursive(type.ast); + // otherwise resolve it and return the resolved type + return this.resolveType(module, type); + } + }); + return resolved.length === 1 ? resolved[0] : new TOverloadedGeneric(resolved); + } + + /** + * Given a module and the name of some value (either a function or a constant), get the Type instance of the value. + * The value may exist in another module, so this method resolves imports and exports + * to track down the actual declaration. + * The type is also resolves here if it hasn't been already. + */ + getValueType(module: Module, name: string): Optional { + if (module.namespaces.hasOwnProperty(name)) { + return new TNamespace(module.namespaces[name]); + } + const value = module.functions[name] || module.constants[name]; + if (!value) return null; + if (value.imported) { + // value is imported, resolve the import to the corresponding export in the imported module + const imp = module.imports[name]; + const importedModule = this.modules[imp.moduleId]; + const exp = importedModule.exports[imp.exportName]; + // get the value from that module, recursively so that we can handle forwarded imports + return this.getValueType(importedModule, exp.valueName); + } else { + // the value was declared in this module, return it if it has already been type checked + if (value.ast.type) return value.ast.type; + // otherwise resolve it and return the resolved type + return this.resolveType(module, value); + } + } +} diff --git a/src_old/typecheck/resolveModule.ts b/src_old/typecheck/resolveModule.ts new file mode 100644 index 0000000..63bd9b5 --- /dev/null +++ b/src_old/typecheck/resolveModule.ts @@ -0,0 +1,47 @@ +import { resolve, dirname, join } from 'path'; +import { existsSync as exists, lstatSync as lstat } from 'fs'; + + +/** + * Given a path of a module imported into this module, + * resolve the absolute path of that module. + */ +export default function resolveModule(from: string, path: string) { + // if it is a relative path, resolve the relation and determine if it exists + if (path.startsWith('.')) { + const resolved = resolve(dirname(from), path); + return resolveDirectPath(resolved); + } + // otherwise, it is a package import + let dir = dirname(from); + while (dir) { + // we want to check the path '{currentModuleDir}/packages/{importPath}' for a valid module + const resolved = resolveDirectPath(join(dir, 'packages', path)); + // valid path, use it + if (resolved) return resolved; + // if it didn't exist, we want to continue to check parent directories until we reach the fs root + if (dir === dirname(dir)) break; + dir = dirname(dir); + } + return null; +} + +/** + * Given an absolute path to an imported module (it may not exist), + * follow the module system rules for module resolution + * to determine the exact path to the module file, or return null + * if it does not exist. + */ +function resolveDirectPath(path: string) { + // first check the direct path as-is + if (exists(path)) { + // check as if it is a directory + if (exists(join(path, 'index.ren'))) return join(path, 'index.ren'); + // return the path as long as it's not a directory + if (!lstat(path).isDirectory()) return path; + } + // then check it with a .ren extension + if (exists(`${path}.ren`)) return `${path}.ren`; + // doesn't exist according to the rules of the module system + return null; +} diff --git a/src_old/typecheck/types.ts b/src_old/typecheck/types.ts new file mode 100644 index 0000000..f11a4b0 --- /dev/null +++ b/src_old/typecheck/types.ts @@ -0,0 +1,252 @@ +import OrderedMap from '~/utils/OrderedMap'; +import { Location } from '~/parser/Tokenizer'; +import { Declaration } from '~/syntax'; + + +/** + * These flags drive the majority of type definition logic. + * + * ## Normal types + * + * Normal types are the base types of the language, + * and consist of primitive types and structured types. + * Primitive types are atomic types, and structured types + * are types that are composed of other types. + * Normal types can only ever be assignable to or from + * their own kind, and may contain further properties that + * determine assignability from there. 'char' and 'bool' + * are the only types that do not have further properties. + * Structured types contain other types, and have special + * logic for how to determine assignability of those inner + * types. + * The flags for normal types determine whether the type + * can be used as one of the normal types, i.e. whether + * the type is assignable to the normal type. + * If a type has multiple normal flags, then it can be + * used as multiple kinds of types (intersection). + * + * ## Union types + * + * Union types are a kind of computed type that form types + * that can be assigned from multiple kinds of types. + * For example, the type 'int | string' can be assigned + * either an int or a string. This is useful for when + * multiple types can be used in a certain place. The + * drawback of this is that while multiple types are + * assignable to the union, the union itself is often + * assignable to very few types, and very often is not + * assignable to any type at all (except itself). + * Because each flag indicates what type(s) a type is + * assignable *to*, the flags do not support union types + * out of the box. When a type is a union, the 'Union' + * flag should be turned on, which effectively inverts + * the meaning of all flags. Basically, a normal type flag + * flipped on for a union type means that that type is *NOT* + * included in the union. Any flag turned off *IS* included. + * This means that when determining if a type is a kind of + * normal type, if the type is a union, the expected flag + * should be the only one turned off. + * + * ## 'any' + * + * 'any' is a built-in type that represents the union of + * all types. Being this, it is assignable to only itself, + * but all types are assignable to it. This is a short-circuit + * check. When checking assignability, if the 'to' type is 'any', + * the logic will automatically return true, even if the 'from' + * type is structured. If the 'from' type is any, false will + * be returned, unless the 'to' type is any. + * + * ## 'never' + * + * 'never' is a built-in type that represents the intersection + * of all types. Being this, no types are assignable to it, + * but it is assignable to all types. This is a short-circuit + * check. When checking assignability, if the 'from' type is + * 'never', the logic will automatically return true, even if + * the 'to' type is structured. If the 'to' type is never, false + * will be returned, unless the 'from' type is never. + */ +export const enum TypeFlags { + // #region Primitives + /** Type is an integer, sets properties 'signed' and 'size' */ + Integer = 1 << 0, + /** Type is a float, set property 'size' */ + Float = 1 << 1, + /** Type is a char */ + Char = 1 << 2, + /** Type is a bool */ + Bool = 1 << 3, + // #endregion + + // #region Structured + /** Type is an array, sets property 'baseType' */ + Array = 1 << 4, + /** Type is a struct, sets property 'fields' */ + Struct = 1 << 5, + /** Type is a tuple, sets property 'tupleTypes' */ + Tuple = 1 << 6, + /** Type is a function, sets properties 'paramTypes' and 'returnType' */ + Function = 1 << 7, + // #endregion + + // #region Nuanced (special types that don't fit into the category of "normal") + /** Type is any (union of all types) */ + Any = 1 << 8, + /** Type is never (intersection of all types) */ + Never = 1 << 9, + /** Type is inferred, sets property 'inference' */ + Inferred = 1 << 10, + /** Type is unknown (assignable to/from all types) */ + Unknown = 1 << 11, + // #endregion + + // #region Attributes + /** Flag for objects that have to be "types" for processing purposes but aren't actually types */ + NotAType = 1 << 12, + /** Type is overloaded (special logic for overloaded functions/generics) */ + Overloaded = 1 << 13, + /** Indicates that the "normal" flags (0-7) should be treated as inverted */ + Union = 1 << 14, + // #endregion + + // #region Aggregated + Primitive = Integer | Float | Char | Bool, + Structured = Array | Struct | Tuple | Function, + Normal = Primitive | Structured, + // #endregion +} + +export type Variance = 'covariant' | 'contravariant' | 'invariant'; + +export interface Type { + readonly flags: TypeFlags; + variance: Variance; + location?: Location; + signed?: boolean; + size?: number; + baseType?: Type; + fields?: { [key: string]: Type } + tupleTypes?: Type[]; + paramTypes?: Type[]; + returnType?: Type; + inference?: Inference; +} + +export interface GenericType extends Type { + typeParams: OrderedMap; +} + +export interface ParamType extends Type { + name: string; +} + +export interface Namespace extends Type { + names: { [name: string]: number[] }; +} + +export interface Recursive extends Type { + decl: Declaration; +} + +type AllTypeProps = { location?: Location, variance: Variance }; +const defaultTypeProps: AllTypeProps = { variance: 'covariant' }; + +export interface Inference {/* TODO */} + +function is(flags: TypeFlags, flag: TypeFlags) { + return (flags & TypeFlags.Union) + ? (~flags === flag) // if union, it must match exactly TODO ignore other flags + : !!(flags & flag); // otherwise, the flag must simply be turned on +} + +export const isInteger = (type: Type) => is(type.flags, TypeFlags.Integer); +export const isFloat = (type: Type) => is(type.flags, TypeFlags.Float); +export const isChar = (type: Type) => is(type.flags, TypeFlags.Char); +export const isBool = (type: Type) => is(type.flags, TypeFlags.Bool); + +export const isArray = (type: Type) => is(type.flags, TypeFlags.Array); +export const isStruct = (type: Type) => is(type.flags, TypeFlags.Struct); +export const isTuple = (type: Type) => is(type.flags, TypeFlags.Tuple); +export const isFunction = (type: Type) => is(type.flags, TypeFlags.Function); + +export const isAny = (type: Type) => !!(type.flags & TypeFlags.Any); +export const isNever = (type: Type) => !!(type.flags & TypeFlags.Never); +export const isInferred = (type: Type) => !!(type.flags & TypeFlags.Inferred); +export const isUnknown = (type: Type) => !!(type.flags & TypeFlags.Unknown); + +export const isOverloaded = (type: Type) => !!(type.flags & TypeFlags.Overloaded); +export const isUnion = (type: Type) => !!(type.flags & TypeFlags.Union); + +export const isNotAType = (type: Type) => !!(type.flags & TypeFlags.NotAType); + +export const createInteger = (signed: boolean, size: number, props = defaultTypeProps): Type => ({ + flags: TypeFlags.Integer, + signed, + size, + ...props, +}); +export const createFloat = (size: number, props = defaultTypeProps): Type => ({ + flags: TypeFlags.Float, + size, + ...props, +}); +export const createChar = (props = defaultTypeProps): Type => ({ flags: TypeFlags.Char, ...props }); +export const createBool = (props = defaultTypeProps): Type => ({ flags: TypeFlags.Bool, ...props }); + +export const createArray = (baseType: Type, props = defaultTypeProps): Type => ({ + flags: TypeFlags.Array, + baseType, + ...props, +}); +export const createStruct = (fields: { [key: string]: Type }, props = defaultTypeProps): Type => ({ + flags: TypeFlags.Struct, + fields, + ...props, +}); +export const createTuple = (tupleTypes: Type[], props = defaultTypeProps): Type => ({ + flags: TypeFlags.Tuple, + tupleTypes, + ...props, +}); +export const createFunction = (paramTypes: Type[], returnType: Type, props = defaultTypeProps): Type => ({ + flags: TypeFlags.Function, + paramTypes, + returnType, + ...props, +}); + +export const createAny = (props = defaultTypeProps): Type => ({ flags: TypeFlags.Any, ...props }); +export const createNever = (props = defaultTypeProps): Type => ({ flags: TypeFlags.Any, ...props }); + +export const createGeneric = (typeParams: OrderedMap, type: Type, props = defaultTypeProps): GenericType => ({ + ...type, + flags: type.flags | TypeFlags.NotAType, + typeParams, + ...props, + variance: type.variance, +}); + +export const createParam = (name: string, constraint: Type, variance: Variance, props = defaultTypeProps): ParamType => ({ + ...constraint, + name, + ...props, + variance, +}); + +export const createNamespace = (names: { [name: string]: number[] }, props = defaultTypeProps): Namespace => ({ + flags: TypeFlags.NotAType, + names, + ...props, +}); + +export const createRecursive = (decl: Declaration, props = defaultTypeProps): Recursive => ({ + flags: TypeFlags.NotAType, + decl, + ...props, +}); + +export function checkAssignment(from: Type, to: Type, hasLocation: 'from' | 'to' = 'from'): boolean { + if (isGeneric(from) || isGeneric(to) || isNamespace(from) || isNamespace(to)) + return //TODO you were here +} diff --git a/src/typecheck/types/TAny.ts b/src_old/typecheck/types/TAny.ts similarity index 65% rename from src/typecheck/types/TAny.ts rename to src_old/typecheck/types/TAny.ts index 67db471..91c4fed 100644 --- a/src/typecheck/types/TAny.ts +++ b/src_old/typecheck/types/TAny.ts @@ -1,5 +1,6 @@ import TType from './TType'; -import ITypeVisitor from '~/typecheck/visitors'; +import ITypeVisitor from '~/typecheck/visitors/ITypeVisitor'; +import { Location } from '~/parser/Tokenizer'; /** @@ -13,8 +14,12 @@ import ITypeVisitor from '~/typecheck/visitors'; * 'any' should be thought of as the supertype of all types. */ export default class TAny extends TType { - visit(visitor: ITypeVisitor) { - return visitor.visitAny(this); + constructor(public location?: Location) { + super(); + } + + visit(visitor: ITypeVisitor, param?: P) { + return visitor.visitAny(this, param); } toString() { diff --git a/src_old/typecheck/types/TArray.ts b/src_old/typecheck/types/TArray.ts new file mode 100644 index 0000000..14a6249 --- /dev/null +++ b/src_old/typecheck/types/TArray.ts @@ -0,0 +1,24 @@ +import TType from './TType'; +import ITypeVisitor from '~/typecheck/visitors/ITypeVisitor'; +import { Location } from '~/parser/Tokenizer'; + + +/** + * Array type, variable sized list of homogeneous values (only one type). + */ +export default class TArray extends TType { + /** + * Constructor not public, use TArray.create() instead. + */ + constructor(public baseType: TType, public location?: Location) { + super(); + } + + visit(visitor: ITypeVisitor, param?: P) { + return visitor.visitArray(this, param); + } + + toString() { + return this.baseType ? `${this.baseType}[]` : '?[]'; + } +} \ No newline at end of file diff --git a/src_old/typecheck/types/TBool.ts b/src_old/typecheck/types/TBool.ts new file mode 100644 index 0000000..491afc5 --- /dev/null +++ b/src_old/typecheck/types/TBool.ts @@ -0,0 +1,22 @@ +import TType from './TType'; +import ITypeVisitor from '~/typecheck/visitors/ITypeVisitor'; +import { Location } from '~/parser/Tokenizer'; + + +/** + * Boolean type, contains two values: true and false. + * Has a wide array of uses. + */ +export default class TBool extends TType { + constructor(public location?: Location) { + super(); + } + + visit(visitor: ITypeVisitor, param?: P) { + return visitor.visitBool(this, param); + } + + toString() { + return 'bool'; + } +} \ No newline at end of file diff --git a/src_old/typecheck/types/TChar.ts b/src_old/typecheck/types/TChar.ts new file mode 100644 index 0000000..06e14aa --- /dev/null +++ b/src_old/typecheck/types/TChar.ts @@ -0,0 +1,22 @@ +import TType from './TType'; +import ITypeVisitor from '~/typecheck/visitors/ITypeVisitor'; +import { Location } from '~/parser/Tokenizer'; + + +/** + * Unicode character type, represents the set of unicode characters. + * There is only one possible character type. + */ +export default class TChar extends TType { + constructor(public location?: Location) { + super(); + } + + visit(visitor: ITypeVisitor, param?: P) { + return visitor.visitChar(this, param); + } + + toString() { + return 'char'; + } +} \ No newline at end of file diff --git a/src/typecheck/types/TFloat.ts b/src_old/typecheck/types/TFloat.ts similarity index 65% rename from src/typecheck/types/TFloat.ts rename to src_old/typecheck/types/TFloat.ts index 9a71515..2422a4c 100644 --- a/src/typecheck/types/TFloat.ts +++ b/src_old/typecheck/types/TFloat.ts @@ -1,5 +1,6 @@ import TType from './TType'; -import ITypeVisitor from '~/typecheck/visitors'; +import ITypeVisitor from '~/typecheck/visitors/ITypeVisitor'; +import { Location } from '~/parser/Tokenizer'; /** @@ -10,15 +11,12 @@ import ITypeVisitor from '~/typecheck/visitors'; * and the sign of the number. */ export default class TFloat extends TType { - size: number; - - constructor(size: number = 64) { + constructor(public location?: Location, public size: number = 64) { super(); - this.size = size; } - visit(visitor: ITypeVisitor) { - return visitor.visitFloat(this); + visit(visitor: ITypeVisitor, param?: P) { + return visitor.visitFloat(this, param); } toString() { diff --git a/src/typecheck/types/TFunction.ts b/src_old/typecheck/types/TFunction.ts similarity index 83% rename from src/typecheck/types/TFunction.ts rename to src_old/typecheck/types/TFunction.ts index 8a272ed..c74f7c8 100644 --- a/src/typecheck/types/TFunction.ts +++ b/src_old/typecheck/types/TFunction.ts @@ -1,29 +1,34 @@ import TType from './TType'; import TNever from './TNever'; import { SymbolTable } from '~/typecheck/TypeCheckContext'; -import TParam from './TParam'; -import OrderedMap from './OrderedMap'; +import TParam, { TParams } from './TParam'; +import OrderedMap from '~/utils/OrderedMap'; import TInferred from './TInferred'; -import ITypeVisitor, { InferTypeArgsVisitor } from '~/typecheck/visitors'; +import ITypeVisitor from '~/typecheck/visitors/ITypeVisitor'; +import { Location } from '~/parser/Tokenizer'; +import InferTypeArgsVisitor from '~/typecheck/visitors/InferTypeArgsVisitor'; /** * Function type, represented by a group of parameter types and a single return type. */ export default class TFunction extends TType { - paramTypes: TType[]; - returnType: TType; - typeParamTypes: OrderedMap; + public typeParams: TParams; - constructor(paramTypes: TType[], returnType: TType, typeParamTypes: OrderedMap = new OrderedMap()) { + /** + * Private constructor, use TFunction.create() instead. + */ + constructor( + public paramTypes: TType[], + public returnType: TType, + public location?: Location, + typeParams: OrderedMap = new OrderedMap()) { super(); - this.paramTypes = paramTypes; - this.returnType = returnType; - if (typeParamTypes) this.typeParamTypes = typeParamTypes; + this.typeParams = new TParams(typeParams); } - visit(visitor: ITypeVisitor) { - return visitor.visitFunction(this); + visit(visitor: ITypeVisitor, param?: P) { + return visitor.visitFunction(this, param); } /** diff --git a/src_old/typecheck/types/TGeneric.ts b/src_old/typecheck/types/TGeneric.ts new file mode 100644 index 0000000..beead6a --- /dev/null +++ b/src_old/typecheck/types/TGeneric.ts @@ -0,0 +1,75 @@ +import TType from './TType'; +import TParam, { TParams } from './TParam'; +import { SymbolTable } from '~/typecheck/TypeCheckContext'; +import OrderedMap from '~/utils/OrderedMap'; +import ITypeVisitor from '~/typecheck/visitors/ITypeVisitor'; +import { Location } from '~/parser/Tokenizer'; + + +/** + * Represents a type with type params. + * 'typeParams' is an object mapping the type parameter names to TParam types. + * 'type' is the definition of the type, which makes use of the type parameters. + */ +export default class TGeneric extends TType { + typeParams: TParams; + + constructor(typeParams: OrderedMap, public type: TType, public location?: Location) { + super(); + this.typeParams = new TParams(typeParams); + } + + visit(visitor: ITypeVisitor, param?: P) { + return visitor.visitGeneric(this, param); + } + + /** + * Here, we need to clone the type definition and visit it, specifying + * all instances of TParam. This is where we check the type constraint. + */ + specifyGenericType(args: TType[]) { + const specific = this.type.clone(); + // create map of param name -> provided arg + const argMap: SymbolTable = {}; + for (let i = 0; i < args.length; ++i) { + const name = this.typeParams.getKey(i); + argMap[name] = this.typeParams.get(name).createTypeArg(args[i]); + } + // visit the type with the map so that params can be replaced with actual types + return specific.specifyTypeParams(argMap); + } +} + +/** + * An overloaded generic type is the "union" of types + * with the same name. When resolving a module-scoped + * type, if there are more than one type of the given + * name, they will be grouped in an overloaded generic + * type. + * + * Where overloaded generics differ from overloaded + * functions is that a type with no type parameters + * is never "invoked" (made specific). It is just used. + * So when an overloaded generic is "used" in any way + * that is not specifying type parameters, it is as if + * the non-generic overload is being used, so that is + * how it will behave (only if a non-generic overload + * is present). + */ +export class TOverloadedGeneric extends TType { + constructor(public types: TType[], public location?: Location) { + super(); + } + + visit(visitor: ITypeVisitor, param?: P) { + return visitor.visitOverloadedGeneric(this, param); + } + + /** + * In normal type operations, all that is required from + * this is the parameter-less type, if there is one. + */ + getParamLessType(): TType | undefined { + return this.types.filter(t => !t.isGeneric())[0]; + } +} diff --git a/src_old/typecheck/types/TInferred.ts b/src_old/typecheck/types/TInferred.ts new file mode 100644 index 0000000..a7a150c --- /dev/null +++ b/src_old/typecheck/types/TInferred.ts @@ -0,0 +1,16 @@ +import TType from './TType'; +import ITypeVisitor from '~/typecheck/visitors/ITypeVisitor'; +import { Location } from '~/parser/Tokenizer'; + + +export default class TInferred extends TType { + type?: TType; + + constructor(public location?: Location) { + super(); + } + + visit(visitor: ITypeVisitor, param?: P) { + return visitor.visitInferred(this, param); + } +} \ No newline at end of file diff --git a/src/typecheck/types/TInteger.ts b/src_old/typecheck/types/TInteger.ts similarity index 66% rename from src/typecheck/types/TInteger.ts rename to src_old/typecheck/types/TInteger.ts index 5099152..bd6564a 100644 --- a/src/typecheck/types/TInteger.ts +++ b/src_old/typecheck/types/TInteger.ts @@ -1,5 +1,6 @@ import TType from './TType'; -import ITypeVisitor from '~/typecheck/visitors'; +import ITypeVisitor from '~/typecheck/visitors/ITypeVisitor'; +import { Location } from '~/parser/Tokenizer'; /** @@ -8,17 +9,12 @@ import ITypeVisitor from '~/typecheck/visitors'; * and a signed flag, indicating whether or not negative values are included. */ export default class TInteger extends TType { - size: number; - signed: boolean; - - constructor(size: number = Infinity, signed: boolean = true) { + constructor(public location?: Location, public size: number = Infinity, public signed: boolean = true) { super(); - this.size = size; - this.signed = signed; } - visit(visitor: ITypeVisitor) { - return visitor.visitInteger(this); + visit(visitor: ITypeVisitor, param?: P) { + return visitor.visitInteger(this, param); } toString() { diff --git a/src/typecheck/types/TNamespace.ts b/src_old/typecheck/types/TNamespace.ts similarity index 74% rename from src/typecheck/types/TNamespace.ts rename to src_old/typecheck/types/TNamespace.ts index 7e12ac8..4620355 100644 --- a/src/typecheck/types/TNamespace.ts +++ b/src_old/typecheck/types/TNamespace.ts @@ -1,7 +1,12 @@ import TType from './TType'; import ITypeVisitor from '~/typecheck/visitors/ITypeVisitor'; +import { Location } from '~/parser/Tokenizer'; +export interface NamespaceNames { + [name: string]: number[]; +} + /** * Namespaces are the result of having the ability to declare wildcard imports, * whereby all exports of a module are grouped under a single "object". @@ -15,11 +20,11 @@ import ITypeVisitor from '~/typecheck/visitors/ITypeVisitor'; * will require a type checker and the namespace's module to resolve it. */ export default class TNamespace extends TType { - constructor(public moduleId: number) { + constructor(public names: NamespaceNames, public location?: Location) { super(); } - visit(visitor: ITypeVisitor): T { - return visitor.visitNamespace(this); + visit(visitor: ITypeVisitor, param?: P) { + return visitor.visitNamespace(this, param); } } diff --git a/src/typecheck/types/TNever.ts b/src_old/typecheck/types/TNever.ts similarity index 65% rename from src/typecheck/types/TNever.ts rename to src_old/typecheck/types/TNever.ts index f32f538..baa4ecf 100644 --- a/src/typecheck/types/TNever.ts +++ b/src_old/typecheck/types/TNever.ts @@ -1,5 +1,6 @@ import TType from './TType'; -import ITypeVisitor from '~/typecheck/visitors'; +import ITypeVisitor from '~/typecheck/visitors/ITypeVisitor'; +import { Location } from '~/parser/Tokenizer'; /** @@ -15,8 +16,12 @@ import ITypeVisitor from '~/typecheck/visitors'; * Thus, it is assignable to all types. */ export default class TNever extends TType { - visit(visitor: ITypeVisitor) { - return visitor.visitNever(this); + constructor(public location?: Location) { + super(); + } + + visit(visitor: ITypeVisitor, param?: P) { + return visitor.visitNever(this, param); } toString() { diff --git a/src_old/typecheck/types/TParam.ts b/src_old/typecheck/types/TParam.ts new file mode 100644 index 0000000..9404b17 --- /dev/null +++ b/src_old/typecheck/types/TParam.ts @@ -0,0 +1,62 @@ +import TType from './TType'; +import ITypeVisitor from '~/typecheck/visitors/ITypeVisitor'; +import { Location } from '~/parser/Tokenizer'; +import OrderedMap from '~/utils/OrderedMap'; + + +export type Variance = 'covariant' | 'contravariant' | 'invariant'; + +/** + * Represents the type of an untyped type parameter, used in TGeneric and wherever + * a type parameters is used. + */ +export default class TParam extends TType { + constructor( + public name: string, + public variance: Variance, + public constraint: TType, + public location?: Location) { + super(); + } + + visit(visitor: ITypeVisitor, param?: P) { + return visitor.visitParam(this, param); + } + + createTypeArg(t: TType) { + return new TArg(this.variance, t); + } +} + +export class TParams extends TType { + constructor( + public params: OrderedMap, + public location?: Location + ) { + super(); + } + + visit(visitor: ITypeVisitor, param?: P) { + return visitor.visitParams(this, param); + } +} + +export class TArg extends TType { + constructor(public variance: Variance, public type: TType, public location?: Location) { + super(); + } + + visit(visitor: ITypeVisitor, param?: P) { + return visitor.visitArg(this, param); + } +} + +export class TArgs extends TType { + constructor(public args: TArg[], public location?: Location) { + super(); + } + + visit(visitor: ITypeVisitor, param?: P) { + return visitor.visitArgs(this, param); + } +} diff --git a/src/typecheck/types/TRecursive.ts b/src_old/typecheck/types/TRecursive.ts similarity index 57% rename from src/typecheck/types/TRecursive.ts rename to src_old/typecheck/types/TRecursive.ts index 45f50e3..1e1064b 100644 --- a/src/typecheck/types/TRecursive.ts +++ b/src_old/typecheck/types/TRecursive.ts @@ -1,6 +1,7 @@ import TType from './TType'; -import { TypeDeclaration } from '~/syntax'; -import ITypeVisitor from '~/typecheck/visitors'; +import { Declaration } from '~/syntax'; +import ITypeVisitor from '~/typecheck/visitors/ITypeVisitor'; +import { Location } from '~/parser/Tokenizer'; /** @@ -11,15 +12,12 @@ import ITypeVisitor from '~/typecheck/visitors'; * TODO: this may not work the way we want it to. */ export default class TRecursive extends TType { - decl: TypeDeclaration; - - constructor(decl: TypeDeclaration) { + constructor(public decl: Declaration, public location?: Location) { super(); - this.decl = decl; } - visit(visitor: ITypeVisitor) { - return visitor.visitRecursive(this); + visit(visitor: ITypeVisitor, param?: P) { + return visitor.visitRecursive(this, param); } toString() { diff --git a/src/typecheck/types/TStruct.ts b/src_old/typecheck/types/TStruct.ts similarity index 53% rename from src/typecheck/types/TStruct.ts rename to src_old/typecheck/types/TStruct.ts index 963fb57..47e13ac 100644 --- a/src/typecheck/types/TStruct.ts +++ b/src_old/typecheck/types/TStruct.ts @@ -1,5 +1,6 @@ import TType from './TType'; -import ITypeVisitor from '~/typecheck/visitors'; +import ITypeVisitor from '~/typecheck/visitors/ITypeVisitor'; +import { Location } from '~/parser/Tokenizer'; type StructFieldTypes = { [name: string]: TType }; @@ -8,15 +9,12 @@ type StructFieldTypes = { [name: string]: TType }; * Struct type, extension of tuple type where the values have names (fields). */ export default class TStruct extends TType { - fields: StructFieldTypes; - - constructor(fields: StructFieldTypes = {}) { + constructor(public location?: Location, public fields: StructFieldTypes = {}) { super(); - this.fields = fields; } - visit(visitor: ITypeVisitor) { - return visitor.visitStruct(this); + visit(visitor: ITypeVisitor, param?: P) { + return visitor.visitStruct(this, param); } toString() { diff --git a/src/typecheck/types/TTuple.ts b/src_old/typecheck/types/TTuple.ts similarity index 50% rename from src/typecheck/types/TTuple.ts rename to src_old/typecheck/types/TTuple.ts index 83cd1c6..983068c 100644 --- a/src/typecheck/types/TTuple.ts +++ b/src_old/typecheck/types/TTuple.ts @@ -1,20 +1,18 @@ import TType from './TType'; -import ITypeVisitor from '~/typecheck/visitors'; +import ITypeVisitor from '~/typecheck/visitors/ITypeVisitor'; +import { Location } from '~/parser/Tokenizer'; /** * Tuple type, represents a group of values of several heterogeneous types, including no values at all. */ export default class TTuple extends TType { - types: TType[]; - - constructor(types: TType[] = []) { + constructor(public location?: Location, public types: TType[] = []) { super(); - this.types = types; } - visit(visitor: ITypeVisitor) { - return visitor.visitTuple(this); + visit(visitor: ITypeVisitor, param?: P) { + return visitor.visitTuple(this, param); } toString() { diff --git a/src/typecheck/types/TType.ts b/src_old/typecheck/types/TType.ts similarity index 69% rename from src/typecheck/types/TType.ts rename to src_old/typecheck/types/TType.ts index ffeb92c..ad002bf 100644 --- a/src/typecheck/types/TType.ts +++ b/src_old/typecheck/types/TType.ts @@ -1,5 +1,10 @@ import { SymbolTable } from '~/typecheck/TypeCheckContext'; -import ITypeVisitor, * as visitors from '~/typecheck/visitors'; +import ITypeVisitor from '~/typecheck/visitors/ITypeVisitor'; +import ASTNode from '~/syntax/ASTNode'; +import AssignmentVisitor from '~/typecheck/visitors/AssignmentVisitor'; +import SpecifyTypeVisitor from '~/typecheck/visitors/SpecifyTypeVisitor'; +import * as visitors from '~/typecheck/visitors/IsVisitors'; +import { Location } from '~/parser/Tokenizer'; /** @@ -9,18 +14,26 @@ import ITypeVisitor, * as visitors from '~/typecheck/visitors'; * between types. */ export default abstract class TType { - abstract visit(visitor: ITypeVisitor): T; + abstract location?: Location; + + abstract visit(visitor: ITypeVisitor, param?: P): T; /** * Determines if a type can be assigned to this type. * @see visitors.AssignmentVisitor */ isAssignableFrom(from: TType) { - return this.visit(new visitors.AssignmentVisitor(from)); + return this.visit(new AssignmentVisitor(from)); + } + + assertAssignableFrom(from: TType, node: ASTNode) { + // never and inferred types are assignable to all types + if (from.isNever() || from.constructor.name === 'TInferred') return true; + return this.visit(new AssertAssignmentVisitor(from, node)); } specifyTypeParams(args: SymbolTable) { - return this.visit(new visitors.SpecifyTypeVisitor(args)); + return this.visit(new SpecifyTypeVisitor(args)); } /** @@ -45,12 +58,13 @@ export default abstract class TType { getSize() { return this.visit(new visitors.GetSizeVisitor()); } getBaseType(): TType { return this.visit(new visitors.GetBaseTypeVisitor()); } + getFields() { return this.visit(new visitors.GetFieldsVisitor()); } getField(field: string) { return this.visit(new visitors.GetFieldVisitor(field)); } getTupleTypes() { return this.visit(new visitors.GetTupleTypesVisitor()); } getParams() { return this.visit(new visitors.GetParamsVisitor()); } getTypeParams() { return this.visit(new visitors.GetTypeParamsVisitor()); } getReturnType() { return this.visit(new visitors.GetReturnTypeVisitor()); } - getModuleId() { return this.visit(new visitors.GetModuleIdVisitor()); } + getNamespaceNames() { return this.visit(new visitors.GetNamespaceNamesVisitor()); } /** * Return an exact (shallow) copy of this instance diff --git a/src/typecheck/types/TUnion.ts b/src_old/typecheck/types/TUnion.ts similarity index 53% rename from src/typecheck/types/TUnion.ts rename to src_old/typecheck/types/TUnion.ts index 2ec68c7..1bd5fb9 100644 --- a/src/typecheck/types/TUnion.ts +++ b/src_old/typecheck/types/TUnion.ts @@ -1,5 +1,6 @@ import TType from './TType'; -import ITypeVisitor from '~/typecheck/visitors'; +import ITypeVisitor from '~/typecheck/visitors/ITypeVisitor'; +import { Location } from '~/parser/Tokenizer'; /** @@ -7,15 +8,12 @@ import ITypeVisitor from '~/typecheck/visitors'; * These are structured as a binary tree. */ export default class TUnion extends TType { - types: TType[]; - - constructor(types: TType[] = []) { + constructor(public location?: Location, public types: TType[] = []) { super(); - this.types = types; } - visit(visitor: ITypeVisitor) { - return visitor.visitUnion(this); + visit(visitor: ITypeVisitor, param?: P) { + return visitor.visitUnion(this, param); } toString() { diff --git a/src/typecheck/types/TUnknown.ts b/src_old/typecheck/types/TUnknown.ts similarity index 100% rename from src/typecheck/types/TUnknown.ts rename to src_old/typecheck/types/TUnknown.ts diff --git a/src/typecheck/types/index.ts b/src_old/typecheck/types/index.ts similarity index 90% rename from src/typecheck/types/index.ts rename to src_old/typecheck/types/index.ts index 1a76efb..f86dd60 100644 --- a/src/typecheck/types/index.ts +++ b/src_old/typecheck/types/index.ts @@ -4,10 +4,10 @@ export { default as TBool } from './TBool'; export { default as TChar } from './TChar'; export { default as TFloat } from './TFloat'; export { default as TFunction } from './TFunction'; -export { default as TGeneric } from './TGeneric'; +export { default as TGeneric, TOverloadedGeneric } from './TGeneric'; export { default as TInteger } from './TInteger'; export { default as TNever } from './TNever'; -export { default as TParam, TArg } from './TParam'; +export { default as TParam, TParams, TArg, TArgs } from './TParam'; export { default as TRecursive } from './TRecursive'; export { default as TStruct } from './TStruct'; export { default as TTuple } from './TTuple'; diff --git a/src_old/typecheck/visitors/AssertAssignmentVisitor.ts b/src_old/typecheck/visitors/AssertAssignmentVisitor.ts new file mode 100644 index 0000000..7313e84 --- /dev/null +++ b/src_old/typecheck/visitors/AssertAssignmentVisitor.ts @@ -0,0 +1,330 @@ +import ITypeVisitor from './ITypeVisitor'; +import { + TType, TInteger, TFloat, TChar, TBool, TArray, TStruct, TTuple, TFunction, + TGeneric, TParam, TArg, TUnion, TAny, TNever, TRecursive, TInferred, TParams, TArgs, + TNamespace +} from '~/typecheck/types'; +import { TOverloadedGeneric } from '~/typecheck/types/TGeneric'; +import TypeChecker from '~/typecheck/TypeChecker'; +import TypeErrorContext from '~/typecheck/TypeErrorContext'; +import preVisit from '~/utils/preVisit'; +import { range } from '~/utils/utils'; + + +/** + * Determines if any type is assignable from a type. + * + * Reading the below can seem a bit confusing. + * Here's some context to try to make sense of it. + * + * ## Assignability + * + * When we talk about "assignability", we're talking about + * the ability of values of a "from" type to be "assigned" + * to values of a "to" type. + * For example, 16-bit integers are assignable to 32-bit + * integers because the set of 16-bit integers falls into + * the set of 32-bit integers. The reverse is not true + * because there are 32-bit integers that cannot be represented + * by a 16-bit value. + * + * ## Assignability to vs. from + * + * The reason that we check assignability "from" as opposed + * to assignability "to" is because when these checks need to be made, + * it is usually the "to" type that is known, and the "from" type can + * be any type. + * For example, when we are doing an "add" operation, we expect numbers + * to be used. So when we are checking the type of value being used + * in the operation, we don't need to know that "from" type, but we do + * know that the "to" type is a number. + * This holds true with most type checking operations, so we treat the + * operation as checking if a specific type is assignable "from" some + * arbitrary type. + * + * ## Assignability with the visitor pattern + * + * With the visitor pattern, you have an operation that exists for + * each type in a given set of types, and that operation may have + * parameters. To the "visitee", those parameters are provided + * as parameters to its "visit()" (or "accept()") function. + * But the "visitor" (this class for example) stores the operation + * parameters on the instance of itself, and the object being + * operated on is provided as a parameter to each visit() method. + * Because we check assignability "from", this means that the object + * being operated on is the "to" type, and the parameter of the + * operation is the "from" type. Thus, the "from" type is stored + * on the AssignmentVisitor instance, and the "to" type is provided + * as a parameter to each "visit()" method. + * To illustrate this, here is an example call stack for an + * AssignmentVisitor operation (assuming "to" is a TInteger): + * + * 1. someplace: toType.isAssignableFrom(from: fromType) + * 2. TInteger.isAssignableFrom(): this.visit(visitor: new AssignmentVisitor(from: fromType)) + * 3. TInteger.visit(): visitor.visitInteger(to: this) + * 4. AssignmentVisitor.visitInteger(): Now `this.from` and parameter `to` are the operands. + */ + + +class EscapeError extends Error {} + +/** + * This visitor takes an 'expected' type and compares it with another 'actual' type for assignment. + * There is an optional 'inverted' flag that controls the direction of the relationship: + * - if 'inverted' is false, you are seeing if the actual type is assignable to the expected type (normal case) + * - if 'inverted' is true, you are seeing if the expected type is assignable to the actual type (inverted case) + * The check will have one of three results: + * - the assignment will be valid, and the visitor will return true + * - the assignment will be found invalid, an error will be created using the 'actual' type's locataion and an + * escape error will be thrown to exit the visitor + * - the assignment will be found invalid, but the 'actual' type has no location, so false will be returned, + * and the first of its ancestors that has a location will be used to create the error + */ +@preVisit() +export default class AssertAssignmentVisitor implements ITypeVisitor { + private error: TypeErrorContext; + private running: boolean = false; + + constructor(private typeChecker: TypeChecker, private target: 'from' | 'to') { + this.error = new TypeErrorContext(typeChecker.errors); + } + + private getTarget(to: TType, from: TType) { + return this.target === 'from' ? from : to; + } + + private invertTarget() { + if (this.target === 'from') return 'to'; + return 'from'; + } + + private typeMismatch(to: TType, from: TType) { + const target = this.getTarget(to, from); + // this level has no location, forward to parent + if (!target.location) return false; + // there is a location, add error and throw + this.error.typeMismatch(from, to, target.location); + throw new EscapeError(); + } + + /** + * When the expected type needs to be drilled down into, + * a more complex child visit is required, which actually + * needs to create a new visitor off of this one. + * For all intents and purposes, the new visitor can be thought of + * as identical to the original, just with a new expected type, + * and optionally a new 'inverted' flag (e.g. for function params). + */ + private fork(to: TType, from: TType, target = this.target) { + const newVisitor = new AssertAssignmentVisitor(this.typeChecker, target); + newVisitor.running = true; + return to.visit(newVisitor, from); + } + + private callWithCatch(func: () => bool) { + try { + return func(); + } catch (err) { + if (err instanceof EscapeError) return false; + throw err; + } + } + + /** + * For nested types with mutliple child types, we need to handle + * the true/false/throw logic correctly. + * Basically, in all cases, all children need to be visited. + * If all return true, the result is true. + * If none throw but at least one returns false, the result is false. + * In that instance the parent can create an error just like normal. + * If at least one throws, the result is to throw. + */ + private wrapChildren(children: Iterable, func: (child: T) => bool) { + let success = true, thrown = false; + for (const child of children) { + try { + success = func(child); + } catch (err) { + if (err instanceof EscapeError) thrown = true; + else throw err; + } + } + if (success) return true; + if (thrown) throw new EscapeError(); + return false; + } + + /** + * This pre-visitor is designed to capture where the visitor is entered. + * When the first visit is called, 'this.running' will be false, meaning + * that the visitor has just started. 'this.running' is immediately + * set to true so that child callers know that they are not the top-level. + * Only in the instance of the top-level, the visitor is wrapped in a try-catch, + * so that escape errors are caught and converted to a false result. + * All other visitors are simply called so that the escape error ascends + * to the top. + */ + preVisit(visitor: () => bool) { + const running = !this.running; + this.running = true; + if (running) { + return this.callWithCatch(() => { + const result = visitor(); + if (!result) throw new Error('No location specified for assignment error'); + return result; + }); + } else { + return visitor(); + } + } + + visitInteger(to: TInteger, from: TType): boolean { + // only integers can be assigned to other integers + if (!from.isInteger()) return this.typeMismatch(to, from); + // signed ints cannot be assigned to unsigned ints + const toSigned = to.isSigned(), fromSigned = from.isSigned(); + if (!toSigned && fromSigned) return this.typeMismatch(to, from); + // ints of size n can't be assigned to ints of size ( { + if (!from.hasField(k)) return this.typeMismatch(to, from); + return to.getField(k).visit(this, from.getField(k)); + }); + if (!success) return this.typeMismatch(to, from); + return true; + } + + visitTuple(to: TTuple, from: TType): boolean { + // only tuples can be assigned to other tuples + if (!from.isTuple()) return this.typeMismatch(to, from); + // 'from' is assignable to 'to' only if 'from' has at least as many items as 'to' + // and the types of those items are assignable + const fromTypes = from.getTupleTypes(), toTypes = to.getTupleTypes(); + if (fromTypes.length < toTypes.length) return this.typeMismatch(to, from); + const success = this.wrapChildren(range(toTypes.length), i => { + return to.getTupleTypes()[i].visit(this, from.getTupleTypes()[i]); + }); + if (!success) return this.typeMismatch(to, from); + return true; + } + + /** + * Function assignability is more complex than other types. + * Function return types are covariant (just like most types) + * because they are 'read' values rather than 'write' values. + * Function parameters are contravariant because they are + * 'write' values. This means that the assignability relationship + * for function parameters is inverted. + */ + visitFunction(to: TFunction, from: TType): boolean { + // only functions can be assigned to other functions + if (!from.isFunction()) return this.typeMismatch(to, from); + // 'from' needs to have *at most* the same number of params as 'to' + // because the extra params passed to it won't do anything + const fromParams = from.getParams(), toParams = to.getParams(); + if (fromParams.length > toParams.length) return this.typeMismatch(to, from); + // the return types need to be assignable + if (!to.getReturnType().visit(this, from.getReturnType())) return this.typeMismatch(to, from); + // the param types need to be assignable (using the reverse relationship as described above) + const success = this.wrapChildren(range(toParams.length), i => { + if (i >= fromParams.length) return true; + return this.fork(from.getParams()[i], to.getParams()[i], this.invertTarget()); + }) + if (!success) return this.typeMismatch(to, from); + return true; + } + + visitParam(to: TParam, from: TType): boolean { + return to.constraint.visit(this, from); + } + + visitArg(to: TArg, from: TType): boolean { + if (to.variance === 'covariant') { + // the type must be assignable to our type + return to.type.visit(this, from); + } else if (to.variance === 'contravariant') { + // our type must be assignable to the type + return this.fork(from, to.type, this.invertTarget()); + } else { + // invariant, both must be true + return to.type.visit(this, from) && this.fork(from, to.type, this.invertTarget()); + } + } + + visitUnion(to: TUnion, from: TType): boolean { + // the type just needs to be assignable to one of the types in the union + for (const tt of to.types) { + if (tt.visit(this, from)) return true; + } + return this.typeMismatch(to, from); + } + + visitAny(_to: TAny, _from: TType): boolean { + // all types are assignable to any + return true; + } + + visitNever(_to: TNever, _from: TType): boolean { + // no types are assignable to never + return false; + } + + visitRecursive(to: TRecursive, from: TType): boolean { + return to.decl.type.visit(this, from); + } + + visitInferred(to: TInferred, from: TType): boolean { + if (to.type) return to.type.visit(this, from); + // reaching this point means that we can assign the inferred type + to.type = from; + return true; + } + + visitOverloadedGeneric(type: TOverloadedGeneric): boolean { + throw new Error("Method not implemented."); + } + + /** + * Types that will never be visited for assignment + */ + visitGeneric(_actual: TGeneric): boolean { throw new Error("Method not implemented."); } + visitParams(_type: TParams): boolean { throw new Error("Method not implemented."); } + visitArgs(_type: TArgs): boolean { throw new Error("Method not implemented."); } + visitNamespace(_type: TNamespace): boolean { throw new Error("Method not implemented."); } +} diff --git a/src/typecheck/visitors/AssignmentVisitor.ts b/src_old/typecheck/visitors/AssignmentVisitor.ts similarity index 95% rename from src/typecheck/visitors/AssignmentVisitor.ts rename to src_old/typecheck/visitors/AssignmentVisitor.ts index 6368237..96efd85 100644 --- a/src/typecheck/visitors/AssignmentVisitor.ts +++ b/src_old/typecheck/visitors/AssignmentVisitor.ts @@ -3,6 +3,7 @@ import { TType, TInteger, TFloat, TChar, TBool, TArray, TStruct, TTuple, TFunction, TGeneric, TParam, TArg, TUnion, TAny, TNever, TRecursive, TInferred } from '~/typecheck/types'; +import { TOverloadedGeneric } from '~/typecheck/types/TGeneric'; /** @@ -176,6 +177,16 @@ export default class AssignmentVisitor implements ITypeVisitor { return false; } + @baseCheck + visitOverloadedGeneric(to: TOverloadedGeneric): boolean { + // if we are trying to assign to an overloaded generic type, + // it means we are trying to assign to the param-less type + const paramLess = to.getParamLessType(); + if (paramLess) return paramLess.visit(this); + // there is no param-less type, so this shouldn't even be happening + return false; + } + visitNamespace() { // namespaces can't just be passed around return false; diff --git a/src_old/typecheck/visitors/CloneVisitor.ts b/src_old/typecheck/visitors/CloneVisitor.ts new file mode 100644 index 0000000..2948673 --- /dev/null +++ b/src_old/typecheck/visitors/CloneVisitor.ts @@ -0,0 +1,131 @@ +import ITypeVisitor from '~/typecheck/visitors/ITypeVisitor'; +import { + TType, TInteger, TFloat, TChar, TBool, TArray, TStruct, TTuple, + TFunction, TGeneric, TParam, TArg, TUnion, TAny, TNever, TRecursive, + TInferred, TNamespace, TOverloadedGeneric, TParams, TArgs +} from '~/typecheck/types'; +import preVisit from '~/utils/preVisit'; + + +/** + * This is a simple visitor that deep clones a type. + * It is intended to be overridden for different operations + * that clone types, such as specifying generic types. + */ +@preVisit() +export default class CloneVisitor implements ITypeVisitor { + /** + * This pre-visitor strips the locations from the cloned types, + * because the clones should never correspond to the same locations + * as the originals. + */ + preVisit(visitor: () => TType) { + return Object.assign(visitor(), { location: undefined }); + } + + visitInteger(type: TInteger): TType { + return type.clone(); + } + + visitFloat(type: TFloat): TType { + return type.clone(); + } + + visitChar(type: TChar): TType { + return type.clone(); + } + + visitBool(type: TBool): TType { + return type.clone(); + } + + visitArray(type: TArray): TType { + return Object.assign(type.clone(), { + baseType: type.baseType.visit(this), + }); + } + + visitStruct(type: TStruct): TType { + return Object.assign(type.clone(), { + fields: Object.keys(type.fields) + .reduce((obj, k) => ({ ...obj, [k]: type.fields[k].visit(this) }), {}), + }); + } + + visitTuple(type: TTuple): TType { + return Object.assign(type.clone(), { + types: type.types.map(t => t.visit(this)), + }); + } + + visitFunction(type: TFunction): TType { + return Object.assign(type.clone(), { + typeParamTypes: type.typeParamTypes.map(t => t.visit(this) as TParam), + paramTypes: type.paramTypes.map(t => t.visit(this)), + returnType: type.returnType.visit(this), + }); + } + + visitGeneric(type: TGeneric): TType { + return Object.assign(type.clone(), { + typeParams: type.typeParams.visit(this), + type: type.type.visit(this), + }); + } + + visitParam(type: TParam): TType { + return Object.assign(type.clone(), { + constraint: type.constraint ? type.constraint.visit(this) : undefined, + }); + } + + visitParams(type: TParams): TType { + return Object.assign(type.clone(), { + params: type.params.map(t => t.visit(this) as TParam), + }); + } + + visitArg(type: TArg): TType { + return Object.assign(type.clone(), { + type: type.type.visit(this), + }); + } + + visitArgs(type: TArgs): TType { + return Object.assign(type.clone(), { + args: type.args.map(t => t.visit(this)), + }); + } + + visitUnion(type: TUnion): TType { + return Object.assign(type.clone(), { + types: type.types.map(t => t.visit(this)), + }); + } + + visitAny(type: TAny): TType { + return type.clone(); + } + + visitNever(type: TNever): TType { + return type.clone(); + } + + visitRecursive(type: TRecursive): TType { + return type.clone(); + } + + visitInferred(type: TInferred): TType { + return type.clone(); + } + + visitNamespace(type: TNamespace): TType { + return type.clone(); + } + + visitOverloadedGeneric(type: TOverloadedGeneric): TType { + return Object.assign(type.clone(), { + types: type.types.map(t => t.visit(this)), + }); + } +} \ No newline at end of file diff --git a/src_old/typecheck/visitors/ITypeVisitor.ts b/src_old/typecheck/visitors/ITypeVisitor.ts new file mode 100644 index 0000000..c480f7d --- /dev/null +++ b/src_old/typecheck/visitors/ITypeVisitor.ts @@ -0,0 +1,38 @@ +import { + TInteger, TFloat, TChar, TBool, TArray, TStruct, TTuple, TFunction, + TGeneric, TParam, TArg, TUnion, TAny, TNever, TRecursive, TInferred, + TNamespace, TOverloadedGeneric, TParams, TArgs +} from '~/typecheck/types'; + + +export default interface ITypeVisitor { + // primitive types + visitInteger(type: TInteger, param?: P): T; + visitFloat(type: TFloat, param?: P): T; + visitChar(type: TChar, param?: P): T; + visitBool(type: TBool, param?: P): T; + + // structured types + visitArray(type: TArray, param?: P): T; + visitStruct(type: TStruct, param?: P): T; + visitTuple(type: TTuple, param?: P): T; + + // complex types + visitFunction(type: TFunction, param?: P): T; + visitGeneric(type: TGeneric, param?: P): T; + visitParam(type: TParam, param?: P): T; + visitParams(type: TParams, param?: P): T; + visitArg(type: TArg, param?: P): T; + visitArgs(type: TArgs, param?: P): T; + visitUnion(type: TUnion, param?: P): T; + + // special types + visitAny(type: TAny, param?: P): T; + visitNever(type: TNever, param?: P): T; + + // hidden types + visitRecursive(type: TRecursive, param?: P): T; + visitInferred(type: TInferred, param?: P): T; + visitNamespace(type: TNamespace, param?: P): T; + visitOverloadedGeneric(type: TOverloadedGeneric, param?: P): T; +} diff --git a/src/typecheck/visitors/InferTypeArgsVisitor.ts b/src_old/typecheck/visitors/InferTypeArgsVisitor.ts similarity index 89% rename from src/typecheck/visitors/InferTypeArgsVisitor.ts rename to src_old/typecheck/visitors/InferTypeArgsVisitor.ts index e014cbe..c802b5d 100644 --- a/src/typecheck/visitors/InferTypeArgsVisitor.ts +++ b/src_old/typecheck/visitors/InferTypeArgsVisitor.ts @@ -1,7 +1,7 @@ import ITypeVisitor from './ITypeVisitor'; import { TType, TInteger, TFloat, TChar, TBool, TArray, TStruct, TTuple, TFunction, - TGeneric, TParam, TArg, TUnion, TAny, TNever, TRecursive, TInferred + TGeneric, TParam, TArg, TUnion, TAny, TNever, TRecursive, TInferred, TOverloadedGeneric } from '~/typecheck/types'; import { SymbolTable } from '~/typecheck/TypeCheckContext'; @@ -53,6 +53,13 @@ export default class InferTypeArgsVisitor implements ITypeVisitor { // this should never be called on generic throw new Error("Method not implemented."); } + + visitOverloadedGeneric(type: TOverloadedGeneric): void { + // if this is being called, it is being called on the param-less type + const paramLess = type.getParamLessType(); + if (paramLess) return paramLess.visit(this); + throw new Error('This should never be called on an overloaded generic with no parameter-less type'); + } visitNamespace(): void { // this should never be called on a namespace diff --git a/src/typecheck/visitors/IsVisitors.ts b/src_old/typecheck/visitors/IsVisitors.ts similarity index 81% rename from src/typecheck/visitors/IsVisitors.ts rename to src_old/typecheck/visitors/IsVisitors.ts index 9f0bac9..4b1b5e3 100644 --- a/src/typecheck/visitors/IsVisitors.ts +++ b/src_old/typecheck/visitors/IsVisitors.ts @@ -2,9 +2,9 @@ import ITypeVisitor from './ITypeVisitor'; import { TType, TInteger, TFloat, TChar, TBool, TArray, TStruct, TTuple, TFunction, TGeneric, TParam, TArg, TUnion, TAny, TNever, TRecursive, TInferred, - TNamespace + TNamespace, TOverloadedGeneric, TParams, TArgs } from '~/typecheck/types'; -import OrderedMap from '~/typecheck/types/OrderedMap'; +import { NamespaceNames } from '~/typecheck/types/TNamespace'; /** @@ -69,8 +69,15 @@ abstract class GenericVisitor implements ITypeVisitor { visitTuple(_type: TTuple): T { throw new Error("Method not implemented."); } visitFunction(_type: TFunction): T { throw new Error("Method not implemented."); } visitGeneric(_type: TGeneric): T { throw new Error("Method not implemented."); } + visitOverloadedGeneric(type: TOverloadedGeneric): T { + const t = type.getParamLessType(); + if (t) return t.visit(this); + else throw new Error("Method not implemented."); + } visitParam(type: TParam): T { return type.constraint.visit(this); } + visitParams(_type: TParams): T { throw new Error("Method not implemented."); } visitArg(type: TArg): T { return type.type.visit(this); } + visitArgs(_type: TArgs): T { throw new Error("Method not implemented."); } visitUnion(_type: TUnion): T { throw new Error("Method not implemented."); } visitNamespace(_type: TNamespace): T { throw new Error("Method not implemented."); } visitAny(_type: TAny): T { throw new Error("Method not implemented."); } @@ -94,14 +101,20 @@ abstract class IsXVisitor implements ITypeVisitor { visitTuple(_type: TTuple): boolean { return false; } visitFunction(_type: TFunction): boolean { return false; } visitGeneric(_type: TGeneric): boolean { return false; } + visitOverloadedGeneric(type: TOverloadedGeneric): boolean { + const t = type.getParamLessType(); + return t ? t.visit(this) : false; + } visitParam(type: TParam): boolean { return type.constraint.visit(this); } + visitParams(_type: TParams): boolean { return false; } visitArg(type: TArg): boolean { return type.type.visit(this); } + visitArgs(_type: TArgs): boolean { return false; } visitUnion(type: TUnion): boolean { return type.types.every(t => t.visit(this)); } visitNamespace(_type: TNamespace): boolean { return false; } visitAny(_type: TAny): boolean { return false; } visitNever(_type: TNever): boolean { return true; } visitRecursive(type: TRecursive): boolean { return type.decl.type.visit(this); } - visitInferred(_type: TInferred): boolean { return false; /* TODO: ??? */} + visitInferred(_type: TInferred): boolean { return false; /* TODO: ??? */ } } /** @@ -130,7 +143,8 @@ export class IsFunctionVisitor extends IsXVisitor { } export class IsGenericVisitor extends IsXVisitor { visitGeneric() { return true; } - visitFunction(type: TFunction) { return !!type.typeParamTypes.length; } + visitOverloadedGeneric() { return true; } + visitFunction(type: TFunction) { return !!type.typeParams.params.length; } visitUnion() { return false; } } export class IsNamespaceVisitor extends IsXVisitor { @@ -170,10 +184,18 @@ export class GetSizeVisitor extends GenericVisitor { export class GetBaseTypeVisitor extends GenericVisitor { visitArray(type: TArray) { return type.baseType; } - visitUnion(type: TUnion): TType { return new TUnion(type.types.map(t => t.visit(this))); } + visitUnion(type: TUnion): TType { return new TUnion(undefined, type.types.map(t => t.visit(this))); } visitNever(type: TNever) { return type; } } +export class GetFieldsVisitor extends GenericVisitor { + visitStruct(type: TStruct) { return Object.keys(type.fields); } + visitUnion(type: TUnion): string[] { + const fieldses = type.types.map(t => t.visit(this)); + return fieldses[0].filter(f => fieldses.every(fs => fs.includes(f))); + } +} + export class GetFieldVisitor extends GenericVisitor { field: string; @@ -183,7 +205,7 @@ export class GetFieldVisitor extends GenericVisitor { } visitStruct(type: TStruct) { return type.fields[this.field]; } - visitUnion(type: TUnion): TType { return new TUnion(type.types.map(t => t.visit(this))); } + visitUnion(type: TUnion): TType { return new TUnion(undefined, type.types.map(t => t.visit(this))); } } export class GetTupleTypesVisitor extends GenericVisitor { @@ -192,7 +214,7 @@ export class GetTupleTypesVisitor extends GenericVisitor { const tupleTypes = []; const types = type.types.map(t => t.visit(this)); for (let i = 0; i < types[0].length; ++i) { - tupleTypes.push(new TUnion(types.map(ts => ts[i]))); + tupleTypes.push(new TUnion(undefined, types.map(ts => ts[i]))); } return tupleTypes; } @@ -204,22 +226,23 @@ export class GetParamsVisitor extends GenericVisitor { const paramTypes = []; const types = type.types.map(t => t.visit(this)); for (let i = 0; i < types[0].length; ++i) { - paramTypes.push(new TUnion(types.map(ts => ts[i]))); + paramTypes.push(new TUnion(undefined, types.map(ts => ts[i]))); } return paramTypes; } } -export class GetTypeParamsVisitor extends GenericVisitor> { +export class GetTypeParamsVisitor extends GenericVisitor { visitGeneric(type: TGeneric) { return type.typeParams; } - visitFunction(type: TFunction) { return type.typeParamTypes; } + visitOverloadedGeneric(): never { throw new Error('A generic type overload must be chosen'); } + visitFunction(type: TFunction) { return type.typeParams; } } export class GetReturnTypeVisitor extends GenericVisitor { visitFunction(type: TFunction) { return type.returnType; } - visitUnion(type: TUnion): TType { return new TUnion(type.types.map(t => t.visit(this))); } + visitUnion(type: TUnion): TType { return new TUnion(undefined, type.types.map(t => t.visit(this))); } } -export class GetModuleIdVisitor extends GenericVisitor { - visitNamespace(type: TNamespace) { return type.moduleId; } +export class GetNamespaceNamesVisitor extends GenericVisitor { + visitNamespace(type: TNamespace) { return type.names; } } diff --git a/src_old/typecheck/visitors/SpecifyTypeVisitor.ts b/src_old/typecheck/visitors/SpecifyTypeVisitor.ts new file mode 100644 index 0000000..dd2b347 --- /dev/null +++ b/src_old/typecheck/visitors/SpecifyTypeVisitor.ts @@ -0,0 +1,48 @@ +import { + TType, TGeneric, TParam, TArg, TOverloadedGeneric +} from '~/typecheck/types'; +import { SymbolTable } from '~/typecheck/TypeCheckContext'; +import CloneVisitor from '~/typecheck/visitors/CloneVisitor'; + + +/** + * This visitor specifies generic types, which must happen + * every time a generic type is used in order to resolve the + * usage to a specific type. + * + * The main goal of this process is to replace usages of type + * parameters in a generic type with the provided type arguments + * corresponding to those type parameters. + * See "visitParam()" for that logic. + * All other logic is simply to clone the current type and visit + * all component types within those types, if any exist. + */ +export default class SpecifyTypeVisitor extends CloneVisitor { + args: SymbolTable; + + constructor(args: SymbolTable) { + super(); + this.args = args; + } + + visitGeneric(_type: TGeneric): TType { + // this should never be called on a generic type + throw new Error("Method not implemented."); + } + + visitOverloadedGeneric(type: TOverloadedGeneric): TType { + // TODO this should operate on the non-generic type, but this is getting obnoxious + } + + /** + * This is the "leaf" operation of this visitor. + * Once we reach a type parameter, we can use the provided args table + * to get the corresponding type provided for the parameter. + */ + visitParam(type: TParam): TType { + return this.args[type.name]; + } + + // already been specified, just return it + visitArg(type: TArg): TType { return type.clone(); } +} diff --git a/src_old/utils/OrderedMap.ts b/src_old/utils/OrderedMap.ts new file mode 100644 index 0000000..7244b73 --- /dev/null +++ b/src_old/utils/OrderedMap.ts @@ -0,0 +1,62 @@ +import { mapSet } from '~/utils/utils'; + +export interface OrderedMap { + add(key: K, value: V): OrderedMap; + getKey(i: number): K; + get(key: K): V | undefined; + getValue(i: number): V | undefined; + size(): number; + keys(): ReadonlyArray; + values(): ReadonlyArray; + [Symbol.iterator](): IterableIterator; + some(predicate: (item: V) => boolean): boolean; + map(mapper: (item: V, key?: K) => T): OrderedMap; +} + +interface OrderedMapInternal extends OrderedMap { + readonly keyOrder: ReadonlyArray; + readonly innerMap: ReadonlyMap; +} + +export function OrderedMap(): OrderedMap { + const map: OrderedMapInternal = { + keyOrder: [], + innerMap: new Map(), + add(key: K, value: V): OrderedMap { + const map: OrderedMapInternal = { + ...this, + keyOrder: [...this.keyOrder, key], + innerMap: mapSet(this.innerMap, key, value) + }; + return map; + }, + getKey(i: number): K { return this.keyOrder[i]; }, + get(key: K): V | undefined { return this.innerMap.get(key); }, + getValue(i: number): V | undefined { return this.innerMap.get(this.keyOrder[i]); }, + size(): number { return this.keyOrder.length; }, + keys(): ReadonlyArray { return this.keyOrder; }, + values(): ReadonlyArray { return [...this]; }, + *[Symbol.iterator](): IterableIterator { + for (const key of this.keyOrder) { + yield this.innerMap.get(key)!; + } + }, + some(predicate: (item: V) => boolean): boolean { + for (const i of this) { + if (predicate(i)) return true; + } + return false; + }, + map(mapper: (item: V, key: K) => T): OrderedMap { + const map: OrderedMapInternal = { + ...OrderedMap(), + keyOrder: this.keyOrder, + innerMap: new Map( + this.keyOrder.map<[K, T]>(k => [k, mapper(this.innerMap.get(k)!, k)]) + ) + }; + return map; + } + } + return map; +} diff --git a/src/utils/Scope.ts b/src_old/utils/Scope.ts similarity index 100% rename from src/utils/Scope.ts rename to src_old/utils/Scope.ts diff --git a/src/utils/preVisit.ts b/src_old/utils/preVisit.ts similarity index 100% rename from src/utils/preVisit.ts rename to src_old/utils/preVisit.ts diff --git a/test/setup.js b/test/setup.js new file mode 100644 index 0000000..ee92599 --- /dev/null +++ b/test/setup.js @@ -0,0 +1,4 @@ +module.exports = async function () { + // require-hook allows us to use our import path aliases + require('../dist/src/require-hook'); +} diff --git a/yarn.lock b/yarn.lock index a33aa9c..d31511d 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2,32 +2,60 @@ # yarn lockfile v1 -"@types/chai-subset@^1.3.1": - version "1.3.1" - resolved "https://registry.yarnpkg.com/@types/chai-subset/-/chai-subset-1.3.1.tgz#114af342bb0a3e04d23f55af279b0a4d383c2edd" +"@babel/code-frame@^7.0.0-beta.35": + version "7.0.0-beta.37" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.0.0-beta.37.tgz#2da1dd3b1b57bfdea777ddc378df7cd12fe40171" dependencies: - "@types/chai" "*" + chalk "^2.0.0" + esutils "^2.0.2" + js-tokens "^3.0.0" -"@types/chai@*", "@types/chai@^4.0.4": - version "4.0.4" - resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.0.4.tgz#fe86315d9a66827feeb16f73bc954688ec950e18" +"@types/jest@^22.0.1": + version "22.0.1" + resolved "https://registry.yarnpkg.com/@types/jest/-/jest-22.0.1.tgz#6370a6d60cce3845e4cd5d00bf65f654264685bc" -"@types/mocha@^2.2.43": - version "2.2.43" - resolved "https://registry.yarnpkg.com/@types/mocha/-/mocha-2.2.43.tgz#03c54589c43ad048cbcbfd63999b55d0424eec27" +"@types/node@*": + version "9.3.0" + resolved "https://registry.yarnpkg.com/@types/node/-/node-9.3.0.tgz#3a129cda7c4e5df2409702626892cb4b96546dd5" -"@types/node@^8.0.34": - version "8.0.34" - resolved "https://registry.yarnpkg.com/@types/node/-/node-8.0.34.tgz#55f801fa2ddb2a40dd6dfc15ecfe1dde9c129fe9" +"@types/node@^9.3.0": + version "9.6.7" + resolved "https://registry.yarnpkg.com/@types/node/-/node-9.6.7.tgz#5f3816d1db2155edcde1b2e3aa5d0e5c520cb564" -"@types/sinon@^2.3.7": - version "2.3.7" - resolved "https://registry.yarnpkg.com/@types/sinon/-/sinon-2.3.7.tgz#e92c2fed3297eae078d78d1da032b26788b4af86" +abab@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/abab/-/abab-1.0.4.tgz#5faad9c2c07f60dd76770f71cf025b62a63cfd4e" -abbrev@1, abbrev@1.0.x: +abbrev@1: version "1.0.9" resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.0.9.tgz#91b4792588a7738c25f35dd6f63752a2f8776135" +acorn-globals@^4.0.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/acorn-globals/-/acorn-globals-4.1.0.tgz#ab716025dbe17c54d3ef81d32ece2b2d99fe2538" + dependencies: + acorn "^5.0.0" + +acorn@^5.0.0, acorn@^5.1.2: + version "5.3.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-5.3.0.tgz#7446d39459c54fb49a80e6ee6478149b940ec822" + +ajv@^4.9.1: + version "4.11.8" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-4.11.8.tgz#82ffb02b29e662ae53bdc20af15947706739c536" + dependencies: + co "^4.6.0" + json-stable-stringify "^1.0.1" + +ajv@^5.1.0: + version "5.5.2" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-5.5.2.tgz#73b5eeca3fab653e3d3f9422b341ad42205dc965" + dependencies: + co "^4.6.0" + fast-deep-equal "^1.0.0" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.3.0" + align-text@^0.1.1, align-text@^0.1.3: version "0.1.4" resolved "https://registry.yarnpkg.com/align-text/-/align-text-0.1.4.tgz#0cd90a561093f35d0a99256c22b7069433fad117" @@ -40,21 +68,48 @@ amdefine@>=0.0.4: version "1.0.1" resolved "https://registry.yarnpkg.com/amdefine/-/amdefine-1.0.1.tgz#4a5282ac164729e93619bcfd3ad151f817ce91f5" +ansi-escapes@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-3.0.0.tgz#ec3e8b4e9f8064fc02c3ac9b65f1c275bda8ef92" + ansi-regex@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" +ansi-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" + ansi-styles@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" +ansi-styles@^3.1.0, ansi-styles@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.0.tgz#c159b8d5be0f9e5a6f346dab94f16ce022161b88" + dependencies: + color-convert "^1.9.0" + +ansi-styles@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" + dependencies: + color-convert "^1.9.0" + any-promise@^1.0.0: version "1.3.0" resolved "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f" -app-module-path@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/app-module-path/-/app-module-path-1.1.0.tgz#a6ac5368450f209b9f5b86e9a3e4a6ab6fe7531c" +anymatch@^1.3.0: + version "1.3.2" + resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-1.3.2.tgz#553dcb8f91e3c889845dfdba34c77721b90b9d7a" + dependencies: + micromatch "^2.1.5" + normalize-path "^2.0.0" + +app-module-path@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/app-module-path/-/app-module-path-2.2.0.tgz#641aa55dfb7d6a6f0a8141c4b9c0aa50b6c24dd5" append-transform@^0.4.0: version "0.4.0" @@ -62,21 +117,82 @@ append-transform@^0.4.0: dependencies: default-require-extensions "^1.0.0" +aproba@^1.0.3: + version "1.2.0" + resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a" + +are-we-there-yet@~1.1.2: + version "1.1.4" + resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.4.tgz#bb5dca382bb94f05e15194373d16fd3ba1ca110d" + dependencies: + delegates "^1.0.0" + readable-stream "^2.0.6" + argparse@^1.0.7: version "1.0.9" resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.9.tgz#73d83bc263f86e97f8cc4f6bae1b0e90a7d22c86" dependencies: sprintf-js "~1.0.2" -assertion-error@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/assertion-error/-/assertion-error-1.0.2.tgz#13ca515d86206da0bac66e834dd397d87581094c" +arr-diff@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-2.0.0.tgz#8f3b827f955a8bd669697e4a4256ac3ceae356cf" + dependencies: + arr-flatten "^1.0.1" + +arr-flatten@^1.0.1: + version "1.1.0" + resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" + +array-equal@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/array-equal/-/array-equal-1.0.0.tgz#8c2a5ef2472fd9ea742b04c77a75093ba2757c93" + +array-filter@~0.0.0: + version "0.0.1" + resolved "https://registry.yarnpkg.com/array-filter/-/array-filter-0.0.1.tgz#7da8cf2e26628ed732803581fd21f67cacd2eeec" + +array-map@~0.0.0: + version "0.0.0" + resolved "https://registry.yarnpkg.com/array-map/-/array-map-0.0.0.tgz#88a2bab73d1cf7bcd5c1b118a003f66f665fa662" + +array-reduce@~0.0.0: + version "0.0.0" + resolved "https://registry.yarnpkg.com/array-reduce/-/array-reduce-0.0.0.tgz#173899d3ffd1c7d9383e4479525dbe278cab5f2b" + +array-unique@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.2.1.tgz#a1d97ccafcbc2625cc70fadceb36a50c58b01a53" + +arrify@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d" + +asn1@~0.2.3: + version "0.2.3" + resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.3.tgz#dac8787713c9966849fc8180777ebe9c1ddf3b86" + +assert-plus@1.0.0, assert-plus@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" + +assert-plus@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-0.2.0.tgz#d74e1b87e7affc0db8aadb7021f3fe48101ab234" ast-module-types@^2.3.1, ast-module-types@^2.3.2: version "2.3.2" resolved "https://registry.yarnpkg.com/ast-module-types/-/ast-module-types-2.3.2.tgz#4bb1de2d729678824429e22a628d03e87df4ad11" -async@1.x, async@^1.4.0: +astral-regex@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-1.0.0.tgz#6c8c3fb827dd43ee3918f27b82782ab7658a6fd9" + +async-each@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.1.tgz#19d386a1d9edc6e7c1c85d388aedbcc56d33602d" + +async@^1.4.0: version "1.5.2" resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a" @@ -86,6 +202,22 @@ async@^2.1.4: dependencies: lodash "^4.14.0" +asynckit@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + +aws-sign2@~0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.6.0.tgz#14342dd38dbcc94d0e5b87d763cd63612c0e794f" + +aws-sign2@~0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" + +aws4@^1.2.1, aws4@^1.6.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.6.0.tgz#83ef5ca860b2b32e4a0deedee8c771b9db57471e" + babel-code-frame@^6.22.0: version "6.22.0" resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.22.0.tgz#027620bee567a88c32561574e7fd0801d33118e4" @@ -94,6 +226,38 @@ babel-code-frame@^6.22.0: esutils "^2.0.2" js-tokens "^3.0.0" +babel-code-frame@^6.26.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b" + dependencies: + chalk "^1.1.3" + esutils "^2.0.2" + js-tokens "^3.0.2" + +babel-core@^6.0.0, babel-core@^6.24.1, babel-core@^6.26.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-core/-/babel-core-6.26.0.tgz#af32f78b31a6fcef119c87b0fd8d9753f03a0bb8" + dependencies: + babel-code-frame "^6.26.0" + babel-generator "^6.26.0" + babel-helpers "^6.24.1" + babel-messages "^6.23.0" + babel-register "^6.26.0" + babel-runtime "^6.26.0" + babel-template "^6.26.0" + babel-traverse "^6.26.0" + babel-types "^6.26.0" + babylon "^6.18.0" + convert-source-map "^1.5.0" + debug "^2.6.8" + json5 "^0.5.1" + lodash "^4.17.4" + minimatch "^3.0.4" + path-is-absolute "^1.0.1" + private "^0.1.7" + slash "^1.0.0" + source-map "^0.5.6" + babel-generator@^6.18.0: version "6.25.0" resolved "https://registry.yarnpkg.com/babel-generator/-/babel-generator-6.25.0.tgz#33a1af70d5f2890aeb465a4a7793c1df6a9ea9fc" @@ -107,12 +271,101 @@ babel-generator@^6.18.0: source-map "^0.5.0" trim-right "^1.0.1" +babel-generator@^6.26.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-generator/-/babel-generator-6.26.0.tgz#ac1ae20070b79f6e3ca1d3269613053774f20dc5" + dependencies: + babel-messages "^6.23.0" + babel-runtime "^6.26.0" + babel-types "^6.26.0" + detect-indent "^4.0.0" + jsesc "^1.3.0" + lodash "^4.17.4" + source-map "^0.5.6" + trim-right "^1.0.1" + +babel-helpers@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-helpers/-/babel-helpers-6.24.1.tgz#3471de9caec388e5c850e597e58a26ddf37602b2" + dependencies: + babel-runtime "^6.22.0" + babel-template "^6.24.1" + +babel-jest@^22.4.3: + version "22.4.3" + resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-22.4.3.tgz#4b7a0b6041691bbd422ab49b3b73654a49a6627a" + dependencies: + babel-plugin-istanbul "^4.1.5" + babel-preset-jest "^22.4.3" + babel-messages@^6.23.0: version "6.23.0" resolved "https://registry.yarnpkg.com/babel-messages/-/babel-messages-6.23.0.tgz#f3cdf4703858035b2a2951c6ec5edf6c62f2630e" dependencies: babel-runtime "^6.22.0" +babel-plugin-istanbul@^4.1.4, babel-plugin-istanbul@^4.1.5: + version "4.1.5" + resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-4.1.5.tgz#6760cdd977f411d3e175bb064f2bc327d99b2b6e" + dependencies: + find-up "^2.1.0" + istanbul-lib-instrument "^1.7.5" + test-exclude "^4.1.1" + +babel-plugin-jest-hoist@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-22.1.0.tgz#c1281dd7887d77a1711dc760468c3b8285dde9ee" + +babel-plugin-jest-hoist@^22.4.3: + version "22.4.3" + resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-22.4.3.tgz#7d8bcccadc2667f96a0dcc6afe1891875ee6c14a" + +babel-plugin-syntax-object-rest-spread@^6.13.0: + version "6.13.0" + resolved "https://registry.yarnpkg.com/babel-plugin-syntax-object-rest-spread/-/babel-plugin-syntax-object-rest-spread-6.13.0.tgz#fd6536f2bce13836ffa3a5458c4903a597bb3bf5" + +babel-plugin-transform-es2015-modules-commonjs@^6.24.1: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-modules-commonjs/-/babel-plugin-transform-es2015-modules-commonjs-6.26.0.tgz#0d8394029b7dc6abe1a97ef181e00758dd2e5d8a" + dependencies: + babel-plugin-transform-strict-mode "^6.24.1" + babel-runtime "^6.26.0" + babel-template "^6.26.0" + babel-types "^6.26.0" + +babel-plugin-transform-strict-mode@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-strict-mode/-/babel-plugin-transform-strict-mode-6.24.1.tgz#d5faf7aa578a65bbe591cf5edae04a0c67020758" + dependencies: + babel-runtime "^6.22.0" + babel-types "^6.24.1" + +babel-preset-jest@^22.0.1: + version "22.1.0" + resolved "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-22.1.0.tgz#ff4e704102f9642765e2254226050561d8942ec9" + dependencies: + babel-plugin-jest-hoist "^22.1.0" + babel-plugin-syntax-object-rest-spread "^6.13.0" + +babel-preset-jest@^22.4.3: + version "22.4.3" + resolved "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-22.4.3.tgz#e92eef9813b7026ab4ca675799f37419b5a44156" + dependencies: + babel-plugin-jest-hoist "^22.4.3" + babel-plugin-syntax-object-rest-spread "^6.13.0" + +babel-register@^6.26.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-register/-/babel-register-6.26.0.tgz#6ed021173e2fcb486d7acb45c6009a856f647071" + dependencies: + babel-core "^6.26.0" + babel-runtime "^6.26.0" + core-js "^2.5.0" + home-or-tmp "^2.0.0" + lodash "^4.17.4" + mkdirp "^0.5.1" + source-map-support "^0.4.15" + babel-runtime@^6.0.0, babel-runtime@^6.22.0: version "6.23.0" resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.23.0.tgz#0a9489f144de70efb3ce4300accdb329e2fc543b" @@ -120,6 +373,13 @@ babel-runtime@^6.0.0, babel-runtime@^6.22.0: core-js "^2.4.0" regenerator-runtime "^0.10.0" +babel-runtime@^6.26.0, babel-runtime@^6.9.2: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.26.0.tgz#965c7058668e82b55d7bfe04ff2337bc8b5647fe" + dependencies: + core-js "^2.4.0" + regenerator-runtime "^0.11.0" + babel-template@^6.16.0: version "6.25.0" resolved "https://registry.yarnpkg.com/babel-template/-/babel-template-6.25.0.tgz#665241166b7c2aa4c619d71e192969552b10c071" @@ -130,6 +390,16 @@ babel-template@^6.16.0: babylon "^6.17.2" lodash "^4.2.0" +babel-template@^6.24.1, babel-template@^6.26.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-template/-/babel-template-6.26.0.tgz#de03e2d16396b069f46dd9fff8521fb1a0e35e02" + dependencies: + babel-runtime "^6.26.0" + babel-traverse "^6.26.0" + babel-types "^6.26.0" + babylon "^6.18.0" + lodash "^4.17.4" + babel-traverse@^6.18.0, babel-traverse@^6.25.0: version "6.25.0" resolved "https://registry.yarnpkg.com/babel-traverse/-/babel-traverse-6.25.0.tgz#2257497e2fcd19b89edc13c4c91381f9512496f1" @@ -144,6 +414,20 @@ babel-traverse@^6.18.0, babel-traverse@^6.25.0: invariant "^2.2.0" lodash "^4.2.0" +babel-traverse@^6.26.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-traverse/-/babel-traverse-6.26.0.tgz#46a9cbd7edcc62c8e5c064e2d2d8d0f4035766ee" + dependencies: + babel-code-frame "^6.26.0" + babel-messages "^6.23.0" + babel-runtime "^6.26.0" + babel-types "^6.26.0" + babylon "^6.18.0" + debug "^2.6.8" + globals "^9.18.0" + invariant "^2.2.2" + lodash "^4.17.4" + babel-types@^6.18.0, babel-types@^6.25.0: version "6.25.0" resolved "https://registry.yarnpkg.com/babel-types/-/babel-types-6.25.0.tgz#70afb248d5660e5d18f811d91c8303b54134a18e" @@ -153,10 +437,23 @@ babel-types@^6.18.0, babel-types@^6.25.0: lodash "^4.2.0" to-fast-properties "^1.0.1" -babylon@^6.17.0, babylon@^6.17.2, babylon@^6.17.4: +babel-types@^6.24.1, babel-types@^6.26.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-types/-/babel-types-6.26.0.tgz#a3b073f94ab49eb6fa55cd65227a334380632497" + dependencies: + babel-runtime "^6.26.0" + esutils "^2.0.2" + lodash "^4.17.4" + to-fast-properties "^1.0.3" + +babylon@^6.17.0, babylon@^6.17.2: version "6.17.4" resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.17.4.tgz#3e8b7402b88d22c3423e137a1577883b15ff869a" +babylon@^6.18.0: + version "6.18.0" + resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.18.0.tgz#af2f3b88fa6f5c1e4c634d1a0f8eac4f55b395e3" + babylon@~6.8.1: version "6.8.4" resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.8.4.tgz#097306b8dabae95159225cf29b3ea55912053180" @@ -167,6 +464,40 @@ balanced-match@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" +bcrypt-pbkdf@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz#63bc5dcb61331b92bc05fd528953c33462a06f8d" + dependencies: + tweetnacl "^0.14.3" + +binary-extensions@^1.0.0: + version "1.11.0" + resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.11.0.tgz#46aa1751fb6a2f93ee5e689bb1087d4b14c6c205" + +block-stream@*: + version "0.0.9" + resolved "https://registry.yarnpkg.com/block-stream/-/block-stream-0.0.9.tgz#13ebfe778a03205cfe03751481ebb4b3300c126a" + dependencies: + inherits "~2.0.0" + +boom@2.x.x: + version "2.10.1" + resolved "https://registry.yarnpkg.com/boom/-/boom-2.10.1.tgz#39c8918ceff5799f83f9492a848f625add0c766f" + dependencies: + hoek "2.x.x" + +boom@4.x.x: + version "4.3.1" + resolved "https://registry.yarnpkg.com/boom/-/boom-4.3.1.tgz#4f8a3005cb4a7e3889f749030fd25b96e01d2e31" + dependencies: + hoek "4.x.x" + +boom@5.x.x: + version "5.2.0" + resolved "https://registry.yarnpkg.com/boom/-/boom-5.2.0.tgz#5dd9da6ee3a5f302077436290cb717d3f4a54e02" + dependencies: + hoek "4.x.x" + brace-expansion@^1.1.7: version "1.1.8" resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.8.tgz#c07b211c7c952ec1f8efd51a77ef0d1d3990a292" @@ -174,14 +505,50 @@ brace-expansion@^1.1.7: balanced-match "^1.0.0" concat-map "0.0.1" -browser-stdout@1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/browser-stdout/-/browser-stdout-1.3.0.tgz#f351d32969d32fa5d7a5567154263d928ae3bd1f" +braces@^1.8.2: + version "1.8.5" + resolved "https://registry.yarnpkg.com/braces/-/braces-1.8.5.tgz#ba77962e12dff969d6b76711e914b737857bf6a7" + dependencies: + expand-range "^1.8.1" + preserve "^0.2.0" + repeat-element "^1.1.2" + +browser-process-hrtime@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-0.1.2.tgz#425d68a58d3447f02a04aa894187fce8af8b7b8e" + +browser-resolve@^1.11.2: + version "1.11.2" + resolved "https://registry.yarnpkg.com/browser-resolve/-/browser-resolve-1.11.2.tgz#8ff09b0a2c421718a1051c260b32e48f442938ce" + dependencies: + resolve "1.1.7" + +bser@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/bser/-/bser-2.0.0.tgz#9ac78d3ed5d915804fd87acb158bc797147a1719" + dependencies: + node-int64 "^0.4.0" + +builtin-modules@^1.0.0, builtin-modules@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-1.1.1.tgz#270f076c5a72c02f5b65a47df94c5fe3a278892f" + +callsites@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-2.0.0.tgz#06eb84f00eea413da86affefacbffb36093b3c50" camelcase@^1.0.2: version "1.2.1" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-1.2.1.tgz#9bb5304d2e0b56698b2c758b08a3eaa9daa58a39" +camelcase@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-4.1.0.tgz#d545635be1e33c542649c69173e5de6acfae34dd" + +caseless@~0.12.0: + version "0.12.0" + resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" + center-align@^0.1.1: version "0.1.3" resolved "https://registry.yarnpkg.com/center-align/-/center-align-0.1.3.tgz#aa0d32629b6ee972200411cbd4461c907bc2b7ad" @@ -189,22 +556,7 @@ center-align@^0.1.1: align-text "^0.1.3" lazy-cache "^1.0.3" -chai-subset@^1.6.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/chai-subset/-/chai-subset-1.6.0.tgz#a5d0ca14e329a79596ed70058b6646bd6988cfe9" - -chai@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/chai/-/chai-4.0.2.tgz#2f7327c4de6f385dd7787999e2ab02697a32b83b" - dependencies: - assertion-error "^1.0.1" - check-error "^1.0.1" - deep-eql "^2.0.1" - get-func-name "^2.0.0" - pathval "^1.0.0" - type-detect "^4.0.0" - -chalk@1.1.3, chalk@^1.0.0, chalk@^1.1.0, chalk@^1.1.1: +chalk@^1.1.0, chalk@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" dependencies: @@ -214,9 +566,40 @@ chalk@1.1.3, chalk@^1.0.0, chalk@^1.1.0, chalk@^1.1.1: strip-ansi "^3.0.0" supports-color "^2.0.0" -check-error@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/check-error/-/check-error-1.0.2.tgz#574d312edd88bb5dd8912e9286dd6c0aed4aac82" +chalk@^2.0.0, chalk@^2.0.1: + version "2.3.0" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.3.0.tgz#b5ea48efc9c1793dccc9b4767c93914d3f2d52ba" + dependencies: + ansi-styles "^3.1.0" + escape-string-regexp "^1.0.5" + supports-color "^4.0.0" + +chalk@^2.1.0, chalk@^2.3.0, chalk@^2.3.2: + version "2.4.1" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.1.tgz#18c49ab16a037b6eb0152cc83e3471338215b66e" + dependencies: + ansi-styles "^3.2.1" + escape-string-regexp "^1.0.5" + supports-color "^5.3.0" + +chokidar@^1.6.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-1.7.0.tgz#798e689778151c8076b4b360e5edd28cda2bb468" + dependencies: + anymatch "^1.3.0" + async-each "^1.0.0" + glob-parent "^2.0.0" + inherits "^2.0.1" + is-binary-path "^1.0.0" + is-glob "^2.0.0" + path-is-absolute "^1.0.0" + readdirp "^2.0.0" + optionalDependencies: + fsevents "^1.0.0" + +ci-info@^1.0.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-1.1.2.tgz#03561259db48d0474c8bdc90f5b47b068b6bbfb4" cli-cursor@^2.1.0: version "2.1.0" @@ -224,9 +607,9 @@ cli-cursor@^2.1.0: dependencies: restore-cursor "^2.0.0" -cli-spinners@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-1.1.0.tgz#f1847b168844d917a671eb9d147e3df497c90d06" +cli-spinners@^1.0.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-1.3.1.tgz#002c1990912d0d59580c93bd36c056de99e4259a" cliui@^2.1.0: version "2.1.0" @@ -236,27 +619,57 @@ cliui@^2.1.0: right-align "^0.1.1" wordwrap "0.0.2" -colors@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/colors/-/colors-1.1.2.tgz#168a4701756b6a7f51a12ce0c97bfa28c084ed63" +cliui@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-4.0.0.tgz#743d4650e05f36d1ed2575b59638d87322bfbbcc" + dependencies: + string-width "^2.1.1" + strip-ansi "^4.0.0" + wrap-ansi "^2.0.0" + +co@^4.6.0: + version "4.6.0" + resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" + +code-point-at@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" -commander@2.9.0: - version "2.9.0" - resolved "https://registry.yarnpkg.com/commander/-/commander-2.9.0.tgz#9c99094176e12240cb22d6c5146098400fe0f7d4" +color-convert@^1.9.0: + version "1.9.1" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.1.tgz#c1261107aeb2f294ebffec9ed9ecad529a6097ed" dependencies: - graceful-readlink ">= 1.0.0" + color-name "^1.1.1" + +color-name@^1.1.1: + version "1.1.3" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" -commander@^2.11.0, commander@^2.6.0, commander@^2.8.1, commander@^2.9.0: +combined-stream@^1.0.5, combined-stream@~1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.5.tgz#938370a57b4a51dea2c77c15d5c5fdf895164009" + dependencies: + delayed-stream "~1.0.0" + +commander@2.13.0: + version "2.13.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-2.13.0.tgz#6964bca67685df7c1f1430c584f07d7597885b9c" + +commander@^2.11.0, commander@^2.6.0, commander@^2.8.1: version "2.11.0" resolved "https://registry.yarnpkg.com/commander/-/commander-2.11.0.tgz#157152fd1e7a6c8d98a5b715cf376df928004563" +commander@^2.12.1, commander@^2.13.0: + version "2.15.1" + resolved "https://registry.yarnpkg.com/commander/-/commander-2.15.1.tgz#df46e867d0fc2aec66a34662b406a9ccafff5b0f" + commander@~2.8.1: version "2.8.1" resolved "https://registry.yarnpkg.com/commander/-/commander-2.8.1.tgz#06be367febfda0c330aa1e2a072d3dc9762425d4" dependencies: graceful-readlink ">= 1.0.0" -commondir@1.0.1: +commondir@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" @@ -264,25 +677,81 @@ concat-map@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" +console-control-strings@^1.0.0, console-control-strings@~1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e" + +content-type-parser@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/content-type-parser/-/content-type-parser-1.0.2.tgz#caabe80623e63638b2502fd4c7f12ff4ce2352e7" + +convert-source-map@^1.4.0, convert-source-map@^1.5.0: + version "1.5.1" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.5.1.tgz#b8278097b9bc229365de5c62cf5fcaed8b5599e5" + core-js@^2.4.0: version "2.4.1" resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.4.1.tgz#4de911e667b0eae9124e34254b53aea6fc618d3e" -core-util-is@~1.0.0: +core-js@^2.5.0: + version "2.5.3" + resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.5.3.tgz#8acc38345824f16d8365b7c9b4259168e8ed603e" + +core-util-is@1.0.2, core-util-is@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" -debug@2.2.0, debug@~2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/debug/-/debug-2.2.0.tgz#f87057e995b1a1f6ae6a4960664137bc56f039da" +cpx@^1.5.0: + version "1.5.0" + resolved "https://registry.yarnpkg.com/cpx/-/cpx-1.5.0.tgz#185be018511d87270dedccc293171e37655ab88f" + dependencies: + babel-runtime "^6.9.2" + chokidar "^1.6.0" + duplexer "^0.1.1" + glob "^7.0.5" + glob2base "^0.0.12" + minimatch "^3.0.2" + mkdirp "^0.5.1" + resolve "^1.1.7" + safe-buffer "^5.0.1" + shell-quote "^1.6.1" + subarg "^1.0.0" + +cross-spawn@^5.0.1: + version "5.1.0" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-5.1.0.tgz#e8bd0efee58fcff6f8f94510a0a554bbfa235449" + dependencies: + lru-cache "^4.0.1" + shebang-command "^1.2.0" + which "^1.2.9" + +cryptiles@2.x.x: + version "2.0.5" + resolved "https://registry.yarnpkg.com/cryptiles/-/cryptiles-2.0.5.tgz#3bdfecdc608147c1c67202fa291e7dca59eaa3b8" + dependencies: + boom "2.x.x" + +cryptiles@3.x.x: + version "3.1.2" + resolved "https://registry.yarnpkg.com/cryptiles/-/cryptiles-3.1.2.tgz#a89fbb220f5ce25ec56e8c4aa8a4fd7b5b0d29fe" dependencies: - ms "0.7.1" + boom "5.x.x" -debug@2.6.0: - version "2.6.0" - resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.0.tgz#bc596bcabe7617f11d9fa15361eded5608b8499b" +cssom@0.3.x, "cssom@>= 0.3.2 < 0.4.0": + version "0.3.2" + resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.3.2.tgz#b8036170c79f07a90ff2f16e22284027a243848b" + +"cssstyle@>= 0.2.37 < 0.3.0": + version "0.2.37" + resolved "https://registry.yarnpkg.com/cssstyle/-/cssstyle-0.2.37.tgz#541097234cb2513c83ceed3acddc27ff27987d54" + dependencies: + cssom "0.3.x" + +dashdash@^1.12.0: + version "1.14.1" + resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" dependencies: - ms "0.7.2" + assert-plus "^1.0.0" debug@^2.2.0, debug@^2.6.3: version "2.6.8" @@ -290,22 +759,22 @@ debug@^2.2.0, debug@^2.6.3: dependencies: ms "2.0.0" +debug@^2.6.8: + version "2.6.9" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + dependencies: + ms "2.0.0" + debug@^3.0.1, debug@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261" dependencies: ms "2.0.0" -decamelize@^1.0.0: +decamelize@^1.0.0, decamelize@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" -deep-eql@^2.0.1: - version "2.0.2" - resolved "https://registry.yarnpkg.com/deep-eql/-/deep-eql-2.0.2.tgz#b1bac06e56f0a76777686d50c9feb75c2ed7679a" - dependencies: - type-detect "^3.0.0" - deep-extend@~0.4.0: version "0.4.2" resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.4.2.tgz#48b699c27e334bf89f10892be432f6e4c7d34a7f" @@ -320,14 +789,29 @@ default-require-extensions@^1.0.0: dependencies: strip-bom "^2.0.0" -dependency-tree@5.11.0: - version "5.11.0" - resolved "https://registry.yarnpkg.com/dependency-tree/-/dependency-tree-5.11.0.tgz#928464d6f9273607d3f66b9a57e259e635667755" +define-properties@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.2.tgz#83a73f2fea569898fb737193c8f873caf6d45c94" + dependencies: + foreach "^2.0.5" + object-keys "^1.0.8" + +delayed-stream@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + +delegates@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a" + +dependency-tree@^6.0.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/dependency-tree/-/dependency-tree-6.1.0.tgz#bfef43e1236778f7f8c387a3a718a79eec1d1a29" dependencies: commander "^2.6.0" - debug "^2.2.0" - filing-cabinet "^1.9.0" - precinct "^3.8.0" + debug "^3.1.0" + filing-cabinet "^1.13.0" + precinct "^4.1.0" detect-indent@^4.0.0: version "4.0.0" @@ -335,6 +819,14 @@ detect-indent@^4.0.0: dependencies: repeating "^2.0.0" +detect-libc@^1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b" + +detect-newline@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-2.1.0.tgz#f41f1c10be4b00e87b5f13da680759f2c5bfd3e2" + detective-amd@^2.4.0: version "2.4.0" resolved "https://registry.yarnpkg.com/detective-amd/-/detective-amd-2.4.0.tgz#5eb0df4ef5c18a94033b07daf136dbcd5fc75cd5" @@ -357,14 +849,23 @@ detective-es6@^1.2.0: dependencies: node-source-walk "^3.3.0" -detective-less@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/detective-less/-/detective-less-1.0.0.tgz#426c78c9ab6e3275bf66cc91abac0053bb452d7d" +detective-less@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/detective-less/-/detective-less-1.0.1.tgz#06ce19dfdeea53291074ce2888cc4b644bc94c09" dependencies: - debug "~2.2.0" + debug "^3.1.0" gonzales-pe "^3.4.4" node-source-walk "^3.2.0" +detective-postcss@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/detective-postcss/-/detective-postcss-2.0.0.tgz#1f62c312a401bfb6cdd9a2ffc5e952d112afd3f2" + dependencies: + debug "^3.1.0" + is-url "^1.2.4" + postcss "^6.0.21" + postcss-values-parser "^1.5.0" + detective-sass@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/detective-sass/-/detective-sass-2.0.1.tgz#05660aa1b95cfd87f574643bface3e8a268112a1" @@ -385,30 +886,40 @@ detective-stylus@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/detective-stylus/-/detective-stylus-1.0.0.tgz#50aee7db8babb990381f010c63fabba5b58e54cd" -detective-typescript@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/detective-typescript/-/detective-typescript-1.0.1.tgz#6affa0d4bf8ca500194f30c28be445c99fecf81d" +detective-typescript@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/detective-typescript/-/detective-typescript-2.0.0.tgz#dc619fc3d69ccd59021412c5855a6ff22f41a710" dependencies: node-source-walk "3.2.0" - typescript "2.0.10" - typescript-eslint-parser "1.0.2" + typescript "^2.6.1" + typescript-eslint-parser "^9.0.0" -diff@3.2.0, diff@^3.2.0: +diff@^3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/diff/-/diff-3.2.0.tgz#c9ce393a4b7cbd0b058a725c93df299027868ff9" -diff@^3.1.0: - version "3.4.0" - resolved "https://registry.yarnpkg.com/diff/-/diff-3.4.0.tgz#b1d85507daf3964828de54b37d0d73ba67dda56c" +domexception@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/domexception/-/domexception-1.0.0.tgz#81fe5df81b3f057052cde3a9fa9bf536a85b9ab0" -enhanced-resolve@~3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-3.0.3.tgz#df14c06b5fc5eecade1094c9c5a12b4b3edc0b62" +duplexer@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/duplexer/-/duplexer-0.1.1.tgz#ace6ff808c1ce66b57d1ebf97977acb02334cfc1" + +ecc-jsbn@~0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz#0fc73a9ed5f0d53c38193398523ef7e543777505" + dependencies: + jsbn "~0.1.0" + +enhanced-resolve@^3.4.1: + version "3.4.1" + resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-3.4.1.tgz#0421e339fd71419b3da13d129b3979040230476e" dependencies: graceful-fs "^4.1.2" memory-fs "^0.4.0" object-assign "^4.0.1" - tapable "^0.2.5" + tapable "^0.2.7" errno@^0.1.3: version "0.1.4" @@ -416,11 +927,35 @@ errno@^0.1.3: dependencies: prr "~0.0.0" -escape-string-regexp@1.0.5, escape-string-regexp@^1.0.2: +error-ex@^1.2.0: + version "1.3.1" + resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.1.tgz#f855a86ce61adc4e8621c3cda21e7a7612c3a8dc" + dependencies: + is-arrayish "^0.2.1" + +es-abstract@^1.5.1: + version "1.10.0" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.10.0.tgz#1ecb36c197842a00d8ee4c2dfd8646bb97d60864" + dependencies: + es-to-primitive "^1.1.1" + function-bind "^1.1.1" + has "^1.0.1" + is-callable "^1.1.3" + is-regex "^1.0.4" + +es-to-primitive@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.1.1.tgz#45355248a88979034b6792e19bb81f2b7975dd0d" + dependencies: + is-callable "^1.1.1" + is-date-object "^1.0.1" + is-symbol "^1.0.1" + +escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" -escodegen@^1.8.0: +escodegen@^1.8.0, escodegen@^1.9.0: version "1.9.0" resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-1.9.0.tgz#9811a2f265dc1cd3894420ee3717064b632b8852" dependencies: @@ -439,10 +974,6 @@ esprima@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.0.tgz#4499eddcd1110e0b218bacf2fa7f7f59f55ca804" -esprima@~1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-1.0.4.tgz#9f557e08fc3b4d26ece9dd34f8fbf476b62585ad" - estraverse@^4.2.0: version "4.2.0" resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.2.0.tgz#0dee3fed31fcd469618ce7342099fc1afa0bdb13" @@ -451,64 +982,262 @@ esutils@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.2.tgz#0abf4f1caa5bcb1f7a9d8acc6dea4faaa04bac9b" -fast-levenshtein@~2.0.4: - version "2.0.6" - resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" - -file-exists@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/file-exists/-/file-exists-1.0.0.tgz#e6d269b56567b8922581398e990dd7078f72d616" +exec-sh@^0.2.0: + version "0.2.1" + resolved "https://registry.yarnpkg.com/exec-sh/-/exec-sh-0.2.1.tgz#163b98a6e89e6b65b47c2a28d215bc1f63989c38" + dependencies: + merge "^1.1.3" -fileset@^2.0.2: - version "2.0.3" - resolved "https://registry.yarnpkg.com/fileset/-/fileset-2.0.3.tgz#8e7548a96d3cc2327ee5e674168723a333bba2a0" +execa@^0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/execa/-/execa-0.7.0.tgz#944becd34cc41ee32a63a9faf27ad5a65fc59777" dependencies: - glob "^7.0.3" - minimatch "^3.0.3" + cross-spawn "^5.0.1" + get-stream "^3.0.0" + is-stream "^1.1.0" + npm-run-path "^2.0.0" + p-finally "^1.0.0" + signal-exit "^3.0.0" + strip-eof "^1.0.0" + +exit@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" -filing-cabinet@^1.9.0: - version "1.12.0" - resolved "https://registry.yarnpkg.com/filing-cabinet/-/filing-cabinet-1.12.0.tgz#6fb93fd968e83bedf1b4298f28236a8aafdf7d78" +expand-brackets@^0.1.4: + version "0.1.5" + resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-0.1.5.tgz#df07284e342a807cd733ac5af72411e581d1177b" dependencies: - app-module-path "^1.1.0" - commander "^2.8.1" - debug "^2.2.0" - enhanced-resolve "~3.0.3" - is-relative-path "^1.0.1" - module-definition "^2.2.4" - module-lookup-amd "^4.0.2" - object-assign "^4.0.1" - resolve "^1.1.7" - resolve-dependency-path "^1.0.2" - sass-lookup "^1.1.0" - stylus-lookup "^1.0.1" - typescript "^2.4.2" + is-posix-bracket "^0.1.0" -find@0.2.6: - version "0.2.6" - resolved "https://registry.yarnpkg.com/find/-/find-0.2.6.tgz#0d218b5d48c3424193f64cea59d389f8daa71d01" +expand-range@^1.8.1: + version "1.8.2" + resolved "https://registry.yarnpkg.com/expand-range/-/expand-range-1.8.2.tgz#a299effd335fe2721ebae8e257ec79644fc85337" dependencies: - traverse-chain "~0.1.0" + fill-range "^2.1.0" -formatio@1.2.0, formatio@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/formatio/-/formatio-1.2.0.tgz#f3b2167d9068c4698a8d51f4f760a39a54d818eb" +expect@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/expect/-/expect-22.1.0.tgz#f8f9b019ab275d859cbefed531fbaefe8972431d" dependencies: - samsam "1.x" + ansi-styles "^3.2.0" + jest-diff "^22.1.0" + jest-get-type "^22.1.0" + jest-matcher-utils "^22.1.0" + jest-message-util "^22.1.0" + jest-regex-util "^22.1.0" -fs-extra@~0.6.4: - version "0.6.4" - resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-0.6.4.tgz#f46f0c75b7841f8d200b3348cd4d691d5a099d15" +expect@^22.4.3: + version "22.4.3" + resolved "https://registry.yarnpkg.com/expect/-/expect-22.4.3.tgz#d5a29d0a0e1fb2153557caef2674d4547e914674" dependencies: - jsonfile "~1.0.1" - mkdirp "0.3.x" - ncp "~0.4.2" - rimraf "~2.2.0" + ansi-styles "^3.2.0" + jest-diff "^22.4.3" + jest-get-type "^22.4.3" + jest-matcher-utils "^22.4.3" + jest-message-util "^22.4.3" + jest-regex-util "^22.4.3" -fs.realpath@^1.0.0: +extend@~3.0.0, extend@~3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.1.tgz#a755ea7bc1adfcc5a31ce7e762dbaadc5e636444" + +extglob@^0.3.1: + version "0.3.2" + resolved "https://registry.yarnpkg.com/extglob/-/extglob-0.3.2.tgz#2e18ff3d2f49ab2765cec9023f011daa8d8349a1" + dependencies: + is-extglob "^1.0.0" + +extsprintf@1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" + +extsprintf@^1.2.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f" + +fast-deep-equal@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-1.0.0.tgz#96256a3bc975595eb36d82e9929d060d893439ff" + +fast-json-stable-stringify@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz#d5142c0caee6b1189f87d3a76111064f86c8bbf2" + +fast-levenshtein@~2.0.4: + version "2.0.6" + resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + +fb-watchman@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-2.0.0.tgz#54e9abf7dfa2f26cd9b1636c588c1afc05de5d58" + dependencies: + bser "^2.0.0" + +file-exists@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/file-exists/-/file-exists-2.0.0.tgz#a24150665150e62d55bc5449281d88d2b0810dca" + +filename-regex@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/filename-regex/-/filename-regex-2.0.1.tgz#c1c4b9bee3e09725ddb106b75c1e301fe2f18b26" + +fileset@^2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/fileset/-/fileset-2.0.3.tgz#8e7548a96d3cc2327ee5e674168723a333bba2a0" + dependencies: + glob "^7.0.3" + minimatch "^3.0.3" + +filing-cabinet@^1.13.0: + version "1.14.0" + resolved "https://registry.yarnpkg.com/filing-cabinet/-/filing-cabinet-1.14.0.tgz#629d9db0a8410c463ce222364b5a708f2cbd71b3" + dependencies: + app-module-path "^2.2.0" + commander "^2.13.0" + debug "^3.1.0" + enhanced-resolve "^3.4.1" + is-relative-path "^1.0.2" + module-definition "^2.2.4" + module-lookup-amd "^5.0.1" + resolve "^1.5.0" + resolve-dependency-path "^1.0.2" + sass-lookup "^1.1.0" + stylus-lookup "^1.0.1" + typescript "^2.4.2" + +fill-range@^2.1.0: + version "2.2.3" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-2.2.3.tgz#50b77dfd7e469bc7492470963699fe7a8485a723" + dependencies: + is-number "^2.1.0" + isobject "^2.0.0" + randomatic "^1.1.3" + repeat-element "^1.1.2" + repeat-string "^1.5.2" + +find-index@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/find-index/-/find-index-0.1.1.tgz#675d358b2ca3892d795a1ab47232f8b6e2e0dde4" + +find-up@^1.0.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f" + dependencies: + path-exists "^2.0.0" + pinkie-promise "^2.0.0" + +find-up@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7" + dependencies: + locate-path "^2.0.0" + +find@^0.2.8: + version "0.2.9" + resolved "https://registry.yarnpkg.com/find/-/find-0.2.9.tgz#4b73f1ff9e56ad91b76e716407fe5ffe6554bb8c" + dependencies: + traverse-chain "~0.1.0" + +flatten@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/flatten/-/flatten-1.0.2.tgz#dae46a9d78fbe25292258cc1e780a41d95c03782" + +for-in@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" + +for-own@^0.1.4: + version "0.1.5" + resolved "https://registry.yarnpkg.com/for-own/-/for-own-0.1.5.tgz#5265c681a4f294dabbf17c9509b6763aa84510ce" + dependencies: + for-in "^1.0.1" + +foreach@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/foreach/-/foreach-2.0.5.tgz#0bee005018aeb260d0a3af3ae658dd0136ec1b99" + +forever-agent@~0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" + +form-data@~2.1.1: + version "2.1.4" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.1.4.tgz#33c183acf193276ecaa98143a69e94bfee1750d1" + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.5" + mime-types "^2.1.12" + +form-data@~2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.1.tgz#6fb94fbd71885306d73d15cc497fe4cc4ecd44bf" + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.5" + mime-types "^2.1.12" + +fs-extra@4.0.3: + version "4.0.3" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-4.0.3.tgz#0d852122e5bc5beb453fb028e9c0c9bf36340c94" + dependencies: + graceful-fs "^4.1.2" + jsonfile "^4.0.0" + universalify "^0.1.0" + +fs-extra@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-5.0.0.tgz#414d0110cdd06705734d055652c5411260c31abd" + dependencies: + graceful-fs "^4.1.2" + jsonfile "^4.0.0" + universalify "^0.1.0" + +fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" +fsevents@^1.0.0, fsevents@^1.1.1: + version "1.1.3" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.1.3.tgz#11f82318f5fe7bb2cd22965a108e9306208216d8" + dependencies: + nan "^2.3.0" + node-pre-gyp "^0.6.39" + +fstream-ignore@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/fstream-ignore/-/fstream-ignore-1.0.5.tgz#9c31dae34767018fe1d249b24dada67d092da105" + dependencies: + fstream "^1.0.0" + inherits "2" + minimatch "^3.0.0" + +fstream@^1.0.0, fstream@^1.0.10, fstream@^1.0.2: + version "1.0.11" + resolved "https://registry.yarnpkg.com/fstream/-/fstream-1.0.11.tgz#5c1fb1f117477114f0632a0eb4b71b3cb0fd3171" + dependencies: + graceful-fs "^4.1.2" + inherits "~2.0.0" + mkdirp ">=0.5 0" + rimraf "2" + +function-bind@^1.0.2, function-bind@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + +gauge@~2.7.3: + version "2.7.4" + resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7" + dependencies: + aproba "^1.0.3" + console-control-strings "^1.0.0" + has-unicode "^2.0.0" + object-assign "^4.1.0" + signal-exit "^3.0.0" + string-width "^1.0.1" + strip-ansi "^3.0.1" + wide-align "^1.1.0" + get-amd-module-type@^2.0.4: version "2.0.5" resolved "https://registry.yarnpkg.com/get-amd-module-type/-/get-amd-module-type-2.0.5.tgz#e671ec5a96ad5fbf53a3a22a289e9238c772ddb0" @@ -516,22 +1245,44 @@ get-amd-module-type@^2.0.4: ast-module-types "^2.3.2" node-source-walk "^3.2.0" -get-func-name@^2.0.0: +get-caller-file@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.2.tgz#f702e63127e7e231c160a80c1554acb70d5047e5" + +get-own-enumerable-property-symbols@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-2.0.1.tgz#5c4ad87f2834c4b9b4e84549dc1e0650fb38c24b" + +get-stream@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-3.0.0.tgz#8e943d1358dc37555054ecbe2edb05aa174ede14" + +getpass@^0.1.1: + version "0.1.7" + resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" + dependencies: + assert-plus "^1.0.0" + +glob-base@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/glob-base/-/glob-base-0.3.0.tgz#dbb164f6221b1c0b1ccf82aea328b497df0ea3c4" + dependencies: + glob-parent "^2.0.0" + is-glob "^2.0.0" + +glob-parent@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/get-func-name/-/get-func-name-2.0.0.tgz#ead774abee72e20409433a066366023dd6887a41" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-2.0.0.tgz#81383d72db054fcccf5336daa902f182f6edbb28" + dependencies: + is-glob "^2.0.0" -glob@7.1.1: - version "7.1.1" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.1.tgz#805211df04faaf1c63a3600306cdf5ade50b2ec8" +glob2base@^0.0.12: + version "0.0.12" + resolved "https://registry.yarnpkg.com/glob2base/-/glob2base-0.0.12.tgz#9d419b3e28f12e83a362164a277055922c9c0d56" dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^3.0.2" - once "^1.3.0" - path-is-absolute "^1.0.0" + find-index "^0.1.1" -glob@^7.0.3, glob@^7.0.5, glob@^7.1.1: +glob@^7.0.3, glob@^7.0.5, glob@^7.1.1, glob@^7.1.2: version "7.1.2" resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.2.tgz#c19c9df9a028702d678612384a6552404c636d15" dependencies: @@ -542,7 +1293,7 @@ glob@^7.0.3, glob@^7.0.5, glob@^7.1.1: once "^1.3.0" path-is-absolute "^1.0.0" -globals@^9.0.0: +globals@^9.0.0, globals@^9.18.0: version "9.18.0" resolved "https://registry.yarnpkg.com/globals/-/globals-9.18.0.tgz#aa3896b3e69b487f17e31ed2143d69a8e30c2d8a" @@ -552,7 +1303,7 @@ gonzales-pe@^3.4.4: dependencies: minimist "1.1.x" -graceful-fs@^4.1.2: +graceful-fs@^4.1.11, graceful-fs@^4.1.2, graceful-fs@^4.1.6: version "4.1.11" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.1.11.tgz#0e8bdfe4d1ddb8854d64e04ea7c00e2a026e5658" @@ -560,15 +1311,15 @@ graceful-fs@^4.1.2: version "1.0.1" resolved "https://registry.yarnpkg.com/graceful-readlink/-/graceful-readlink-1.0.1.tgz#4cafad76bc62f02fa039b2f94e9a3dd3a391a725" -graphviz@0.0.8: +graphviz@^0.0.8: version "0.0.8" resolved "https://registry.yarnpkg.com/graphviz/-/graphviz-0.0.8.tgz#e599e40733ef80e1653bfe89a5f031ecf2aa4aaa" dependencies: temp "~0.4.0" -growl@1.9.2: - version "1.9.2" - resolved "https://registry.yarnpkg.com/growl/-/growl-1.9.2.tgz#0ea7743715db8d8de2c5ede1775e1b45ac85c02f" +growly@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/growly/-/growly-1.3.0.tgz#f10748cbe76af964b7c96c93c6bcc28af120c081" handlebars@^4.0.3: version "4.0.10" @@ -580,6 +1331,28 @@ handlebars@^4.0.3: optionalDependencies: uglify-js "^2.6" +har-schema@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-1.0.5.tgz#d263135f43307c02c602afc8fe95970c0151369e" + +har-schema@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" + +har-validator@~4.2.1: + version "4.2.1" + resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-4.2.1.tgz#33481d0f1bbff600dd203d75812a6a5fba002e2a" + dependencies: + ajv "^4.9.1" + har-schema "^1.0.5" + +har-validator@~5.0.3: + version "5.0.3" + resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.0.3.tgz#ba402c266194f15956ef15e0fcf242993f6a7dfd" + dependencies: + ajv "^5.1.0" + har-schema "^2.0.0" + has-ansi@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" @@ -594,6 +1367,98 @@ has-flag@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-2.0.0.tgz#e8207af1cc7b30d446cc70b734b5e8be18f88d51" +has-flag@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" + +has-unicode@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" + +has@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/has/-/has-1.0.1.tgz#8461733f538b0837c9361e39a9ab9e9704dc2f28" + dependencies: + function-bind "^1.0.2" + +hawk@3.1.3, hawk@~3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/hawk/-/hawk-3.1.3.tgz#078444bd7c1640b0fe540d2c9b73d59678e8e1c4" + dependencies: + boom "2.x.x" + cryptiles "2.x.x" + hoek "2.x.x" + sntp "1.x.x" + +hawk@~6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/hawk/-/hawk-6.0.2.tgz#af4d914eb065f9b5ce4d9d11c1cb2126eecc3038" + dependencies: + boom "4.x.x" + cryptiles "3.x.x" + hoek "4.x.x" + sntp "2.x.x" + +hoek@2.x.x: + version "2.16.3" + resolved "https://registry.yarnpkg.com/hoek/-/hoek-2.16.3.tgz#20bb7403d3cea398e91dc4710a8ff1b8274a25ed" + +hoek@4.x.x: + version "4.2.0" + resolved "https://registry.yarnpkg.com/hoek/-/hoek-4.2.0.tgz#72d9d0754f7fe25ca2d01ad8f8f9a9449a89526d" + +home-or-tmp@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/home-or-tmp/-/home-or-tmp-2.0.0.tgz#e36c3f2d2cae7d746a857e38d18d5f32a7882db8" + dependencies: + os-homedir "^1.0.0" + os-tmpdir "^1.0.1" + +hosted-git-info@^2.1.4: + version "2.5.0" + resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.5.0.tgz#6d60e34b3abbc8313062c3b798ef8d901a07af3c" + +html-encoding-sniffer@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-1.0.2.tgz#e70d84b94da53aa375e11fe3a351be6642ca46f8" + dependencies: + whatwg-encoding "^1.0.1" + +http-signature@~1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.1.1.tgz#df72e267066cd0ac67fb76adf8e134a8fbcf91bf" + dependencies: + assert-plus "^0.2.0" + jsprim "^1.2.2" + sshpk "^1.7.0" + +http-signature@~1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" + dependencies: + assert-plus "^1.0.0" + jsprim "^1.2.2" + sshpk "^1.7.0" + +iconv-lite@0.4.19: + version "0.4.19" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.19.tgz#f7468f60135f5e5dad3399c0a81be9a1603a082b" + +import-local@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/import-local/-/import-local-1.0.0.tgz#5e4ffdc03f4fe6c009c6729beb29631c2f8227bc" + dependencies: + pkg-dir "^2.0.0" + resolve-cwd "^2.0.0" + +imurmurhash@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + +indexes-of@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/indexes-of/-/indexes-of-1.0.1.tgz#f30f716c8e2bd346c7b67d3df3915566a7c05607" + inflight@^1.0.4: version "1.0.6" resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" @@ -601,7 +1466,7 @@ inflight@^1.0.4: once "^1.3.0" wrappy "1" -inherits@2, inherits@~2.0.3: +inherits@2, inherits@^2.0.1, inherits@~2.0.0, inherits@~2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" @@ -609,35 +1474,157 @@ ini@~1.3.0: version "1.3.4" resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.4.tgz#0537cb79daf59b59a1a517dff706c86ec039162e" -invariant@^2.2.0: +invariant@^2.2.0, invariant@^2.2.2: version "2.2.2" resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.2.tgz#9e1f56ac0acdb6bf303306f338be3b204ae60360" dependencies: loose-envify "^1.0.0" +invert-kv@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-1.0.0.tgz#104a8e4aaca6d3d8cd157a8ef8bfab2d7a3ffdb6" + +irregular-plurals@^1.0.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/irregular-plurals/-/irregular-plurals-1.4.0.tgz#2ca9b033651111855412f16be5d77c62a458a766" + +is-arrayish@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" + +is-binary-path@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-1.0.1.tgz#75f16642b480f187a711c814161fd3a4a7655898" + dependencies: + binary-extensions "^1.0.0" + is-buffer@^1.1.5: version "1.1.5" resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.5.tgz#1f3b26ef613b214b88cbca23cc6c01d87961eecc" -is-finite@^1.0.0, is-finite@^1.0.1: +is-builtin-module@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-builtin-module/-/is-builtin-module-1.0.0.tgz#540572d34f7ac3119f8f76c30cbc1b1e037affbe" + dependencies: + builtin-modules "^1.0.0" + +is-callable@^1.1.1, is-callable@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.1.3.tgz#86eb75392805ddc33af71c92a0eedf74ee7604b2" + +is-ci@^1.0.10: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-ci/-/is-ci-1.1.0.tgz#247e4162e7860cebbdaf30b774d6b0ac7dcfe7a5" + dependencies: + ci-info "^1.0.0" + +is-date-object@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.1.tgz#9aa20eb6aeebbff77fbd33e74ca01b33581d3a16" + +is-dotfile@^1.0.0: + version "1.0.3" + resolved "https://registry.yarnpkg.com/is-dotfile/-/is-dotfile-1.0.3.tgz#a6a2f32ffd2dfb04f5ca25ecd0f6b83cf798a1e1" + +is-equal-shallow@^0.1.3: + version "0.1.3" + resolved "https://registry.yarnpkg.com/is-equal-shallow/-/is-equal-shallow-0.1.3.tgz#2238098fc221de0bcfa5d9eac4c45d638aa1c534" + dependencies: + is-primitive "^2.0.0" + +is-extendable@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" + +is-extglob@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-1.0.0.tgz#ac468177c4943405a092fc8f29760c6ffc6206c0" + +is-finite@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/is-finite/-/is-finite-1.0.2.tgz#cc6677695602be550ef11e8b4aa6305342b6d0aa" dependencies: number-is-nan "^1.0.0" -is-relative-path@^1.0.1, is-relative-path@~1.0.0: +is-fullwidth-code-point@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" + dependencies: + number-is-nan "^1.0.0" + +is-fullwidth-code-point@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" + +is-generator-fn@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-1.0.0.tgz#969d49e1bb3329f6bb7f09089be26578b2ddd46a" + +is-glob@^2.0.0, is-glob@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-2.0.1.tgz#d096f926a3ded5600f3fdfd91198cb0888c2d863" + dependencies: + is-extglob "^1.0.0" + +is-number@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-2.1.0.tgz#01fcbbb393463a548f2f466cce16dece49db908f" + dependencies: + kind-of "^3.0.2" + +is-number@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" + dependencies: + kind-of "^3.0.2" + +is-obj@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f" + +is-posix-bracket@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/is-posix-bracket/-/is-posix-bracket-0.1.1.tgz#3334dc79774368e92f016e6fbc0a88f5cd6e6bc4" + +is-primitive@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-primitive/-/is-primitive-2.0.0.tgz#207bab91638499c07b2adf240a41a87210034575" + +is-regex@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.0.4.tgz#5517489b547091b0930e095654ced25ee97e9491" + dependencies: + has "^1.0.1" + +is-regexp@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-regexp/-/is-regexp-1.0.0.tgz#fd2d883545c46bac5a633e7b9a09e87fa2cb5069" + +is-relative-path@^1.0.2, is-relative-path@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/is-relative-path/-/is-relative-path-1.0.2.tgz#091b46a0d67c1ed0fe85f1f8cfdde006bb251d46" +is-stream@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" + +is-symbol@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.1.tgz#3cc59f00025194b6ab2e38dbae6689256b660572" + +is-typedarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" + +is-url@^1.2.4: + version "1.2.4" + resolved "https://registry.yarnpkg.com/is-url/-/is-url-1.2.4.tgz#04a4df46d28c4cff3d73d01ff06abeb318a1aa52" + is-utf8@^0.2.0: version "0.2.1" resolved "https://registry.yarnpkg.com/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72" -isarray@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" - -isarray@~1.0.0: +isarray@1.0.0, isarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" @@ -645,18 +1632,28 @@ isexe@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" -istanbul-api@^1.0.0-alpha: - version "1.1.10" - resolved "https://registry.yarnpkg.com/istanbul-api/-/istanbul-api-1.1.10.tgz#f27e5e7125c8de13f6a80661af78f512e5439b2b" +isobject@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" + dependencies: + isarray "1.0.0" + +isstream@~0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" + +istanbul-api@^1.1.14: + version "1.2.1" + resolved "https://registry.yarnpkg.com/istanbul-api/-/istanbul-api-1.2.1.tgz#0c60a0515eb11c7d65c6b50bba2c6e999acd8620" dependencies: async "^2.1.4" fileset "^2.0.2" istanbul-lib-coverage "^1.1.1" - istanbul-lib-hook "^1.0.7" - istanbul-lib-instrument "^1.7.3" - istanbul-lib-report "^1.1.1" - istanbul-lib-source-maps "^1.2.1" - istanbul-reports "^1.1.1" + istanbul-lib-hook "^1.1.0" + istanbul-lib-instrument "^1.9.1" + istanbul-lib-report "^1.1.2" + istanbul-lib-source-maps "^1.2.2" + istanbul-reports "^1.1.3" js-yaml "^3.7.0" mkdirp "^0.5.1" once "^1.4.0" @@ -665,27 +1662,27 @@ istanbul-lib-coverage@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-1.1.1.tgz#73bfb998885299415c93d38a3e9adf784a77a9da" -istanbul-lib-hook@^1.0.7: - version "1.0.7" - resolved "https://registry.yarnpkg.com/istanbul-lib-hook/-/istanbul-lib-hook-1.0.7.tgz#dd6607f03076578fe7d6f2a630cf143b49bacddc" +istanbul-lib-hook@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-hook/-/istanbul-lib-hook-1.1.0.tgz#8538d970372cb3716d53e55523dd54b557a8d89b" dependencies: append-transform "^0.4.0" -istanbul-lib-instrument@^1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-1.7.3.tgz#925b239163eabdd68cc4048f52c2fa4f899ecfa7" +istanbul-lib-instrument@^1.7.5, istanbul-lib-instrument@^1.8.0, istanbul-lib-instrument@^1.9.1: + version "1.9.1" + resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-1.9.1.tgz#250b30b3531e5d3251299fdd64b0b2c9db6b558e" dependencies: babel-generator "^6.18.0" babel-template "^6.16.0" babel-traverse "^6.18.0" babel-types "^6.18.0" - babylon "^6.17.4" + babylon "^6.18.0" istanbul-lib-coverage "^1.1.1" semver "^5.3.0" -istanbul-lib-report@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-1.1.1.tgz#f0e55f56655ffa34222080b7a0cd4760e1405fc9" +istanbul-lib-report@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-1.1.2.tgz#922be27c13b9511b979bd1587359f69798c1d425" dependencies: istanbul-lib-coverage "^1.1.1" mkdirp "^0.5.1" @@ -702,51 +1699,494 @@ istanbul-lib-source-maps@^1.2.1: rimraf "^2.6.1" source-map "^0.5.3" -istanbul-reports@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-1.1.1.tgz#042be5c89e175bc3f86523caab29c014e77fee4e" +istanbul-lib-source-maps@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-1.2.2.tgz#750578602435f28a0c04ee6d7d9e0f2960e62c1c" + dependencies: + debug "^3.1.0" + istanbul-lib-coverage "^1.1.1" + mkdirp "^0.5.1" + rimraf "^2.6.1" + source-map "^0.5.3" + +istanbul-reports@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-1.1.3.tgz#3b9e1e8defb6d18b1d425da8e8b32c5a163f2d10" dependencies: handlebars "^4.0.3" -istanbul@^1.0.0-alpha.2: - version "1.0.0-alpha.2" - resolved "https://registry.yarnpkg.com/istanbul/-/istanbul-1.0.0-alpha.2.tgz#06096bc08e98baad744aae46962d8df9fac63d08" +jest-changed-files@^22.4.3: + version "22.4.3" + resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-22.4.3.tgz#8882181e022c38bd46a2e4d18d44d19d90a90fb2" dependencies: - abbrev "1.0.x" - async "1.x" - istanbul-api "^1.0.0-alpha" - js-yaml "3.x" - mkdirp "0.5.x" - nopt "3.x" - which "^1.1.1" - wordwrap "^1.0.0" + throat "^4.0.0" -js-tokens@^3.0.0: +jest-cli@^22.4.3: + version "22.4.3" + resolved "https://registry.yarnpkg.com/jest-cli/-/jest-cli-22.4.3.tgz#bf16c4a5fb7edc3fa5b9bb7819e34139e88a72c7" + dependencies: + ansi-escapes "^3.0.0" + chalk "^2.0.1" + exit "^0.1.2" + glob "^7.1.2" + graceful-fs "^4.1.11" + import-local "^1.0.0" + is-ci "^1.0.10" + istanbul-api "^1.1.14" + istanbul-lib-coverage "^1.1.1" + istanbul-lib-instrument "^1.8.0" + istanbul-lib-source-maps "^1.2.1" + jest-changed-files "^22.4.3" + jest-config "^22.4.3" + jest-environment-jsdom "^22.4.3" + jest-get-type "^22.4.3" + jest-haste-map "^22.4.3" + jest-message-util "^22.4.3" + jest-regex-util "^22.4.3" + jest-resolve-dependencies "^22.4.3" + jest-runner "^22.4.3" + jest-runtime "^22.4.3" + jest-snapshot "^22.4.3" + jest-util "^22.4.3" + jest-validate "^22.4.3" + jest-worker "^22.4.3" + micromatch "^2.3.11" + node-notifier "^5.2.1" + realpath-native "^1.0.0" + rimraf "^2.5.4" + slash "^1.0.0" + string-length "^2.0.0" + strip-ansi "^4.0.0" + which "^1.2.12" + yargs "^10.0.3" + +jest-config@^22.0.1: + version "22.1.1" + resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-22.1.1.tgz#642ffc0c704ca66a598eae1f2a473d1f9096056d" + dependencies: + chalk "^2.0.1" + glob "^7.1.1" + jest-environment-jsdom "^22.1.0" + jest-environment-node "^22.1.0" + jest-get-type "^22.1.0" + jest-jasmine2 "^22.1.1" + jest-regex-util "^22.1.0" + jest-resolve "^22.1.0" + jest-util "^22.1.0" + jest-validate "^22.1.0" + pretty-format "^22.1.0" + +jest-config@^22.4.3: + version "22.4.3" + resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-22.4.3.tgz#0e9d57db267839ea31309119b41dc2fa31b76403" + dependencies: + chalk "^2.0.1" + glob "^7.1.1" + jest-environment-jsdom "^22.4.3" + jest-environment-node "^22.4.3" + jest-get-type "^22.4.3" + jest-jasmine2 "^22.4.3" + jest-regex-util "^22.4.3" + jest-resolve "^22.4.3" + jest-util "^22.4.3" + jest-validate "^22.4.3" + pretty-format "^22.4.3" + +jest-diff@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-22.1.0.tgz#0fad9d96c87b453896bf939df3dc8aac6919ac38" + dependencies: + chalk "^2.0.1" + diff "^3.2.0" + jest-get-type "^22.1.0" + pretty-format "^22.1.0" + +jest-diff@^22.4.3: + version "22.4.3" + resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-22.4.3.tgz#e18cc3feff0aeef159d02310f2686d4065378030" + dependencies: + chalk "^2.0.1" + diff "^3.2.0" + jest-get-type "^22.4.3" + pretty-format "^22.4.3" + +jest-docblock@^22.4.3: + version "22.4.3" + resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-22.4.3.tgz#50886f132b42b280c903c592373bb6e93bb68b19" + dependencies: + detect-newline "^2.1.0" + +jest-environment-jsdom@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/jest-environment-jsdom/-/jest-environment-jsdom-22.1.0.tgz#d0b83359a1dd4dc7faa9be27895da1859692b9e8" + dependencies: + jest-mock "^22.1.0" + jest-util "^22.1.0" + jsdom "^11.5.1" + +jest-environment-jsdom@^22.4.3: + version "22.4.3" + resolved "https://registry.yarnpkg.com/jest-environment-jsdom/-/jest-environment-jsdom-22.4.3.tgz#d67daa4155e33516aecdd35afd82d4abf0fa8a1e" + dependencies: + jest-mock "^22.4.3" + jest-util "^22.4.3" + jsdom "^11.5.1" + +jest-environment-node@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-22.1.0.tgz#372d590c5229a349e882d9404808c4e99bd40f62" + dependencies: + jest-mock "^22.1.0" + jest-util "^22.1.0" + +jest-environment-node@^22.4.3: + version "22.4.3" + resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-22.4.3.tgz#54c4eaa374c83dd52a9da8759be14ebe1d0b9129" + dependencies: + jest-mock "^22.4.3" + jest-util "^22.4.3" + +jest-get-type@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-22.1.0.tgz#4e90af298ed6181edc85d2da500dbd2753e0d5a9" + +jest-get-type@^22.4.3: + version "22.4.3" + resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-22.4.3.tgz#e3a8504d8479342dd4420236b322869f18900ce4" + +jest-haste-map@^22.4.3: + version "22.4.3" + resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-22.4.3.tgz#25842fa2ba350200767ac27f658d58b9d5c2e20b" + dependencies: + fb-watchman "^2.0.0" + graceful-fs "^4.1.11" + jest-docblock "^22.4.3" + jest-serializer "^22.4.3" + jest-worker "^22.4.3" + micromatch "^2.3.11" + sane "^2.0.0" + +jest-jasmine2@^22.1.1: + version "22.1.1" + resolved "https://registry.yarnpkg.com/jest-jasmine2/-/jest-jasmine2-22.1.1.tgz#990a13cd62803ae44bcb6e34909d3b0ebd2dcb13" + dependencies: + callsites "^2.0.0" + chalk "^2.0.1" + co "^4.6.0" + expect "^22.1.0" + graceful-fs "^4.1.11" + is-generator-fn "^1.0.0" + jest-diff "^22.1.0" + jest-matcher-utils "^22.1.0" + jest-message-util "^22.1.0" + jest-snapshot "^22.1.0" + source-map-support "^0.5.0" + +jest-jasmine2@^22.4.3: + version "22.4.3" + resolved "https://registry.yarnpkg.com/jest-jasmine2/-/jest-jasmine2-22.4.3.tgz#4daf64cd14c793da9db34a7c7b8dcfe52a745965" + dependencies: + chalk "^2.0.1" + co "^4.6.0" + expect "^22.4.3" + graceful-fs "^4.1.11" + is-generator-fn "^1.0.0" + jest-diff "^22.4.3" + jest-matcher-utils "^22.4.3" + jest-message-util "^22.4.3" + jest-snapshot "^22.4.3" + jest-util "^22.4.3" + source-map-support "^0.5.0" + +jest-leak-detector@^22.4.3: + version "22.4.3" + resolved "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-22.4.3.tgz#2b7b263103afae8c52b6b91241a2de40117e5b35" + dependencies: + pretty-format "^22.4.3" + +jest-matcher-utils@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-22.1.0.tgz#e164665b5d313636ac29f7f6fe9ef0a6ce04febc" + dependencies: + chalk "^2.0.1" + jest-get-type "^22.1.0" + pretty-format "^22.1.0" + +jest-matcher-utils@^22.4.3: + version "22.4.3" + resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-22.4.3.tgz#4632fe428ebc73ebc194d3c7b65d37b161f710ff" + dependencies: + chalk "^2.0.1" + jest-get-type "^22.4.3" + pretty-format "^22.4.3" + +jest-message-util@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-22.1.0.tgz#51ba0794cb6e579bfc4e9adfac452f9f1a0293fc" + dependencies: + "@babel/code-frame" "^7.0.0-beta.35" + chalk "^2.0.1" + micromatch "^2.3.11" + slash "^1.0.0" + stack-utils "^1.0.1" + +jest-message-util@^22.4.3: + version "22.4.3" + resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-22.4.3.tgz#cf3d38aafe4befddbfc455e57d65d5239e399eb7" + dependencies: + "@babel/code-frame" "^7.0.0-beta.35" + chalk "^2.0.1" + micromatch "^2.3.11" + slash "^1.0.0" + stack-utils "^1.0.1" + +jest-mock@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-22.1.0.tgz#87ec21c0599325671c9a23ad0e05c86fb5879b61" + +jest-mock@^22.4.3: + version "22.4.3" + resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-22.4.3.tgz#f63ba2f07a1511772cdc7979733397df770aabc7" + +jest-regex-util@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-22.1.0.tgz#5daf2fe270074b6da63e5d85f1c9acc866768f53" + +jest-regex-util@^22.4.3: + version "22.4.3" + resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-22.4.3.tgz#a826eb191cdf22502198c5401a1fc04de9cef5af" + +jest-resolve-dependencies@^22.4.3: + version "22.4.3" + resolved "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-22.4.3.tgz#e2256a5a846732dc3969cb72f3c9ad7725a8195e" + dependencies: + jest-regex-util "^22.4.3" + +jest-resolve@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-22.1.0.tgz#5f4307f48b93c1abdbeacc9ed80642ffcb246294" + dependencies: + browser-resolve "^1.11.2" + chalk "^2.0.1" + +jest-resolve@^22.4.3: + version "22.4.3" + resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-22.4.3.tgz#0ce9d438c8438229aa9b916968ec6b05c1abb4ea" + dependencies: + browser-resolve "^1.11.2" + chalk "^2.0.1" + +jest-runner@^22.4.3: + version "22.4.3" + resolved "https://registry.yarnpkg.com/jest-runner/-/jest-runner-22.4.3.tgz#298ddd6a22b992c64401b4667702b325e50610c3" + dependencies: + exit "^0.1.2" + jest-config "^22.4.3" + jest-docblock "^22.4.3" + jest-haste-map "^22.4.3" + jest-jasmine2 "^22.4.3" + jest-leak-detector "^22.4.3" + jest-message-util "^22.4.3" + jest-runtime "^22.4.3" + jest-util "^22.4.3" + jest-worker "^22.4.3" + throat "^4.0.0" + +jest-runtime@^22.4.3: + version "22.4.3" + resolved "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-22.4.3.tgz#b69926c34b851b920f666c93e86ba2912087e3d0" + dependencies: + babel-core "^6.0.0" + babel-jest "^22.4.3" + babel-plugin-istanbul "^4.1.5" + chalk "^2.0.1" + convert-source-map "^1.4.0" + exit "^0.1.2" + graceful-fs "^4.1.11" + jest-config "^22.4.3" + jest-haste-map "^22.4.3" + jest-regex-util "^22.4.3" + jest-resolve "^22.4.3" + jest-util "^22.4.3" + jest-validate "^22.4.3" + json-stable-stringify "^1.0.1" + micromatch "^2.3.11" + realpath-native "^1.0.0" + slash "^1.0.0" + strip-bom "3.0.0" + write-file-atomic "^2.1.0" + yargs "^10.0.3" + +jest-serializer@^22.4.3: + version "22.4.3" + resolved "https://registry.yarnpkg.com/jest-serializer/-/jest-serializer-22.4.3.tgz#a679b81a7f111e4766235f4f0c46d230ee0f7436" + +jest-snapshot@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-22.1.0.tgz#4a9b27a1974cff0c48ff0f86bbbefd1a6cc3c5f5" + dependencies: + chalk "^2.0.1" + jest-diff "^22.1.0" + jest-matcher-utils "^22.1.0" + mkdirp "^0.5.1" + natural-compare "^1.4.0" + pretty-format "^22.1.0" + +jest-snapshot@^22.4.3: + version "22.4.3" + resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-22.4.3.tgz#b5c9b42846ffb9faccb76b841315ba67887362d2" + dependencies: + chalk "^2.0.1" + jest-diff "^22.4.3" + jest-matcher-utils "^22.4.3" + mkdirp "^0.5.1" + natural-compare "^1.4.0" + pretty-format "^22.4.3" + +jest-util@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-22.1.0.tgz#2ce0ead08a00a38383c308d0bd3431a9f159cbaa" + dependencies: + callsites "^2.0.0" + chalk "^2.0.1" + graceful-fs "^4.1.11" + is-ci "^1.0.10" + jest-message-util "^22.1.0" + jest-validate "^22.1.0" + mkdirp "^0.5.1" + +jest-util@^22.4.3: + version "22.4.3" + resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-22.4.3.tgz#c70fec8eec487c37b10b0809dc064a7ecf6aafac" + dependencies: + callsites "^2.0.0" + chalk "^2.0.1" + graceful-fs "^4.1.11" + is-ci "^1.0.10" + jest-message-util "^22.4.3" + mkdirp "^0.5.1" + source-map "^0.6.0" + +jest-validate@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-22.1.0.tgz#69d570687029e2349ca5779e209bdd360898746c" + dependencies: + chalk "^2.0.1" + jest-get-type "^22.1.0" + leven "^2.1.0" + pretty-format "^22.1.0" + +jest-validate@^22.4.3: + version "22.4.3" + resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-22.4.3.tgz#0780954a5a7daaeec8d3c10834b9280865976b30" + dependencies: + chalk "^2.0.1" + jest-config "^22.4.3" + jest-get-type "^22.4.3" + leven "^2.1.0" + pretty-format "^22.4.3" + +jest-worker@^22.4.3: + version "22.4.3" + resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-22.4.3.tgz#5c421417cba1c0abf64bf56bd5fb7968d79dd40b" + dependencies: + merge-stream "^1.0.1" + +jest@^22.1.4: + version "22.4.3" + resolved "https://registry.yarnpkg.com/jest/-/jest-22.4.3.tgz#2261f4b117dc46d9a4a1a673d2150958dee92f16" + dependencies: + import-local "^1.0.0" + jest-cli "^22.4.3" + +js-tokens@^3.0.0, js-tokens@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b" -js-yaml@3.x, js-yaml@^3.7.0: +js-yaml@^3.7.0: version "3.9.0" resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.9.0.tgz#4ffbbf25c2ac963b8299dc74da7e3740de1c18ce" dependencies: argparse "^1.0.7" esprima "^4.0.0" +jsbn@~0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" + +jsdom@^11.5.1: + version "11.5.1" + resolved "https://registry.yarnpkg.com/jsdom/-/jsdom-11.5.1.tgz#5df753b8d0bca20142ce21f4f6c039f99a992929" + dependencies: + abab "^1.0.3" + acorn "^5.1.2" + acorn-globals "^4.0.0" + array-equal "^1.0.0" + browser-process-hrtime "^0.1.2" + content-type-parser "^1.0.1" + cssom ">= 0.3.2 < 0.4.0" + cssstyle ">= 0.2.37 < 0.3.0" + domexception "^1.0.0" + escodegen "^1.9.0" + html-encoding-sniffer "^1.0.1" + left-pad "^1.2.0" + nwmatcher "^1.4.3" + parse5 "^3.0.2" + pn "^1.0.0" + request "^2.83.0" + request-promise-native "^1.0.3" + sax "^1.2.1" + symbol-tree "^3.2.1" + tough-cookie "^2.3.3" + webidl-conversions "^4.0.2" + whatwg-encoding "^1.0.1" + whatwg-url "^6.3.0" + xml-name-validator "^2.0.1" + jsesc@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-1.3.0.tgz#46c3fec8c1892b12b0833db9bc7622176dbab34b" -json3@3.3.2: - version "3.3.2" - resolved "https://registry.yarnpkg.com/json3/-/json3-3.3.2.tgz#3c0434743df93e2f5c42aee7b19bcb483575f4e1" +json-schema-traverse@^0.3.0: + version "0.3.1" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz#349a6d44c53a51de89b40805c5d5e59b417d3340" -jsonfile@~1.0.1: +json-schema@0.2.3: + version "0.2.3" + resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13" + +json-stable-stringify@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-1.0.1.tgz#ea5efe40b83690b98667614a7392fc60e842c0dd" + resolved "https://registry.yarnpkg.com/json-stable-stringify/-/json-stable-stringify-1.0.1.tgz#9a759d39c5f2ff503fd5300646ed445f88c4f9af" + dependencies: + jsonify "~0.0.0" -just-extend@^1.1.26: - version "1.1.27" - resolved "https://registry.yarnpkg.com/just-extend/-/just-extend-1.1.27.tgz#ec6e79410ff914e472652abfa0e603c03d60e905" +json-stringify-safe@~5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" + +json5@^0.5.1: + version "0.5.1" + resolved "https://registry.yarnpkg.com/json5/-/json5-0.5.1.tgz#1eade7acc012034ad84e2396767ead9fa5495821" + +jsonfile@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" + optionalDependencies: + graceful-fs "^4.1.6" + +jsonify@~0.0.0: + version "0.0.0" + resolved "https://registry.yarnpkg.com/jsonify/-/jsonify-0.0.0.tgz#2c74b6ee41d93ca51b7b5aaee8f503631d252a73" + +jsprim@^1.2.2: + version "1.4.1" + resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2" + dependencies: + assert-plus "1.0.0" + extsprintf "1.3.0" + json-schema "0.2.3" + verror "1.10.0" kind-of@^3.0.2: version "3.2.2" @@ -754,10 +2194,30 @@ kind-of@^3.0.2: dependencies: is-buffer "^1.1.5" +kind-of@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57" + dependencies: + is-buffer "^1.1.5" + lazy-cache@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/lazy-cache/-/lazy-cache-1.0.4.tgz#a1d78fc3a50474cb80845d3b3b6e1da49a446e8e" +lcid@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/lcid/-/lcid-1.0.0.tgz#308accafa0bc483a3867b4b6f2b9506251d1b835" + dependencies: + invert-kv "^1.0.0" + +left-pad@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/left-pad/-/left-pad-1.2.0.tgz#d30a73c6b8201d8f7d8e7956ba9616087a68e0ee" + +leven@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/leven/-/leven-2.1.0.tgz#c2e7a9f772094dee9d34202ae8acce4687875580" + levn@~0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" @@ -765,84 +2225,40 @@ levn@~0.3.0: prelude-ls "~1.1.2" type-check "~0.3.2" -lodash._baseassign@^3.0.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/lodash._baseassign/-/lodash._baseassign-3.2.0.tgz#8c38a099500f215ad09e59f1722fd0c52bfe0a4e" - dependencies: - lodash._basecopy "^3.0.0" - lodash.keys "^3.0.0" - -lodash._basecopy@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/lodash._basecopy/-/lodash._basecopy-3.0.1.tgz#8da0e6a876cf344c0ad8a54882111dd3c5c7ca36" - -lodash._basecreate@^3.0.0: - version "3.0.3" - resolved "https://registry.yarnpkg.com/lodash._basecreate/-/lodash._basecreate-3.0.3.tgz#1bc661614daa7fc311b7d03bf16806a0213cf821" - -lodash._getnative@^3.0.0: - version "3.9.1" - resolved "https://registry.yarnpkg.com/lodash._getnative/-/lodash._getnative-3.9.1.tgz#570bc7dede46d61cdcde687d65d3eecbaa3aaff5" - -lodash._isiterateecall@^3.0.0: - version "3.0.9" - resolved "https://registry.yarnpkg.com/lodash._isiterateecall/-/lodash._isiterateecall-3.0.9.tgz#5203ad7ba425fae842460e696db9cf3e6aac057c" - -lodash.create@3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/lodash.create/-/lodash.create-3.1.1.tgz#d7f2849f0dbda7e04682bb8cd72ab022461debe7" +load-json-file@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0" dependencies: - lodash._baseassign "^3.0.0" - lodash._basecreate "^3.0.0" - lodash._isiterateecall "^3.0.0" - -lodash.get@^4.4.2: - version "4.4.2" - resolved "https://registry.yarnpkg.com/lodash.get/-/lodash.get-4.4.2.tgz#2d177f652fa31e939b4438d5341499dfa3825e99" - -lodash.isarguments@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz#2f573d85c6a24289ff00663b491c1d338ff3458a" - -lodash.isarray@^3.0.0: - version "3.0.4" - resolved "https://registry.yarnpkg.com/lodash.isarray/-/lodash.isarray-3.0.4.tgz#79e4eb88c36a8122af86f844aa9bcd851b5fbb55" + graceful-fs "^4.1.2" + parse-json "^2.2.0" + pify "^2.0.0" + pinkie-promise "^2.0.0" + strip-bom "^2.0.0" -lodash.keys@^3.0.0: - version "3.1.2" - resolved "https://registry.yarnpkg.com/lodash.keys/-/lodash.keys-3.1.2.tgz#4dbc0472b156be50a0b286855d1bd0b0c656098a" +locate-path@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e" dependencies: - lodash._getnative "^3.0.0" - lodash.isarguments "^3.0.0" - lodash.isarray "^3.0.0" + p-locate "^2.0.0" + path-exists "^3.0.0" -lodash.tostring@^4.0.0: - version "4.1.4" - resolved "https://registry.yarnpkg.com/lodash.tostring/-/lodash.tostring-4.1.4.tgz#560c27d1f8eadde03c2cce198fef5c031d8298fb" +lodash.sortby@^4.7.0: + version "4.7.0" + resolved "https://registry.yarnpkg.com/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438" -lodash.unescape@4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/lodash.unescape/-/lodash.unescape-4.0.0.tgz#36debfc492b81478471ef974cd3783e202eb6cef" - dependencies: - lodash.tostring "^4.0.0" +lodash.unescape@4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/lodash.unescape/-/lodash.unescape-4.0.1.tgz#bf2249886ce514cda112fae9218cdc065211fc9c" -lodash@^4.14.0, lodash@^4.2.0: +lodash@^4.13.1, lodash@^4.14.0, lodash@^4.17.4, lodash@^4.2.0: version "4.17.4" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.4.tgz#78203a4d1c328ae1d86dca6460e369b57f4055ae" -log-symbols@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-1.0.2.tgz#376ff7b58ea3086a0f09facc74617eca501e1a18" - dependencies: - chalk "^1.0.0" - -lolex@^1.6.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/lolex/-/lolex-1.6.0.tgz#3a9a0283452a47d7439e72731b9e07d7386e49f6" - -lolex@^2.2.0: +log-symbols@^2.1.0: version "2.2.0" - resolved "https://registry.yarnpkg.com/lolex/-/lolex-2.2.0.tgz#628af0882dfc6438723b0a98c2966681d2d4e4ff" + resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-2.2.0.tgz#5740e1c5d6f0dfda4ad9323b5332107ef6b4c40a" + dependencies: + chalk "^2.0.1" longest@^1.0.1: version "1.0.1" @@ -854,22 +2270,41 @@ loose-envify@^1.0.0: dependencies: js-tokens "^3.0.0" -madge@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/madge/-/madge-2.2.0.tgz#560ae7fe73a31a9e784fe402d9b3f79c039527cc" - dependencies: - chalk "1.1.3" - commander "2.9.0" - commondir "1.0.1" - debug "2.2.0" - dependency-tree "5.11.0" - graphviz "0.0.8" - mz "2.4.0" - ora "1.2.0" - pluralize "4.0.0" - pretty-ms "2.1.0" - rc "1.1.6" - walkdir "0.0.11" +lru-cache@^4.0.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.1.tgz#622e32e82488b49279114a4f9ecf45e7cd6bba55" + dependencies: + pseudomap "^1.0.2" + yallist "^2.1.2" + +madge@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/madge/-/madge-3.0.1.tgz#c289ddc4b0e9d8f9f22f8464349a7d643dc021d5" + dependencies: + chalk "^2.3.0" + commander "2.13.0" + commondir "^1.0.1" + debug "^3.1.0" + dependency-tree "^6.0.0" + graphviz "^0.0.8" + mz "^2.7.0" + ora "1.4.0" + pluralize "^7.0.0" + pretty-ms "^3.0.1" + rc "1.2.5" + walkdir "^0.0.12" + +makeerror@1.0.x: + version "1.0.11" + resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.11.tgz#e01a5c9109f2af79660e4e8b9587790184f5a96c" + dependencies: + tmpl "1.0.x" + +mem@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/mem/-/mem-1.1.0.tgz#5edd52b485ca1d900fe64895505399a0dfa45f76" + dependencies: + mimic-fn "^1.0.0" memory-fs@^0.4.0: version "0.4.1" @@ -878,11 +2313,49 @@ memory-fs@^0.4.0: errno "^0.1.3" readable-stream "^2.0.1" +merge-stream@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-1.0.1.tgz#4041202d508a342ba00174008df0c251b8c135e1" + dependencies: + readable-stream "^2.0.1" + +merge@^1.1.3: + version "1.2.0" + resolved "https://registry.yarnpkg.com/merge/-/merge-1.2.0.tgz#7531e39d4949c281a66b8c5a6e0265e8b05894da" + +micromatch@^2.1.5, micromatch@^2.3.11: + version "2.3.11" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-2.3.11.tgz#86677c97d1720b363431d04d0d15293bd38c1565" + dependencies: + arr-diff "^2.0.0" + array-unique "^0.2.1" + braces "^1.8.2" + expand-brackets "^0.1.4" + extglob "^0.3.1" + filename-regex "^2.0.0" + is-extglob "^1.0.0" + is-glob "^2.0.1" + kind-of "^3.0.2" + normalize-path "^2.0.1" + object.omit "^2.0.0" + parse-glob "^3.0.4" + regex-cache "^0.4.2" + +mime-db@~1.30.0: + version "1.30.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.30.0.tgz#74c643da2dd9d6a45399963465b26d5ca7d71f01" + +mime-types@^2.1.12, mime-types@~2.1.17, mime-types@~2.1.7: + version "2.1.17" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.17.tgz#09d7a393f03e995a79f8af857b70a9e0ab16557a" + dependencies: + mime-db "~1.30.0" + mimic-fn@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-1.1.0.tgz#e667783d92e89dbd342818b5230b9d62a672ad18" -minimatch@^3.0.2, minimatch@^3.0.3, minimatch@^3.0.4: +minimatch@^3.0.0, minimatch@^3.0.2, minimatch@^3.0.3, minimatch@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" dependencies: @@ -896,36 +2369,16 @@ minimist@1.1.x: version "1.1.3" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.1.3.tgz#3bedfd91a92d39016fcfaa1c681e8faa1a1efda8" -minimist@^1.2.0: +minimist@^1.1.0, minimist@^1.1.1, minimist@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284" -mkdirp@0.3.x: - version "0.3.5" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.3.5.tgz#de3e5f8961c88c787ee1368df849ac4413eca8d7" - -mkdirp@0.5.1, mkdirp@0.5.x, mkdirp@^0.5.1: +"mkdirp@>=0.5 0", mkdirp@^0.5.1: version "0.5.1" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903" dependencies: minimist "0.0.8" -mocha@^3.4.2: - version "3.4.2" - resolved "https://registry.yarnpkg.com/mocha/-/mocha-3.4.2.tgz#d0ef4d332126dbf18d0d640c9b382dd48be97594" - dependencies: - browser-stdout "1.3.0" - commander "2.9.0" - debug "2.6.0" - diff "3.2.0" - escape-string-regexp "1.0.5" - glob "7.1.1" - growl "1.9.2" - json3 "3.3.2" - lodash.create "3.1.1" - mkdirp "0.5.1" - supports-color "3.1.2" - module-definition@^2.2.4: version "2.2.4" resolved "https://registry.yarnpkg.com/module-definition/-/module-definition-2.2.4.tgz#c0a3771de58cf6bcf12aed2476706c596ad4b2cb" @@ -933,50 +2386,65 @@ module-definition@^2.2.4: ast-module-types "^2.3.2" node-source-walk "^3.0.0" -module-lookup-amd@^4.0.2: - version "4.0.5" - resolved "https://registry.yarnpkg.com/module-lookup-amd/-/module-lookup-amd-4.0.5.tgz#58e353f9dc01ecec057b1ccdd00ed059484acca5" +module-lookup-amd@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/module-lookup-amd/-/module-lookup-amd-5.0.1.tgz#7ed9c6a81a0c3317df0649e9f89877c9531594e0" dependencies: commander "^2.8.1" debug "^3.1.0" - file-exists "~1.0.0" - find "0.2.6" - requirejs "~2.2.0" - requirejs-config-file "~2.0.0" - -ms@0.7.1: - version "0.7.1" - resolved "https://registry.yarnpkg.com/ms/-/ms-0.7.1.tgz#9cd13c03adbff25b65effde7ce864ee952017098" - -ms@0.7.2: - version "0.7.2" - resolved "https://registry.yarnpkg.com/ms/-/ms-0.7.2.tgz#ae25cf2512b3885a1d95d7f037868d8431124765" + file-exists "^2.0.0" + find "^0.2.8" + requirejs "^2.3.5" + requirejs-config-file "^3.0.0" ms@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" -mz@2.4.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/mz/-/mz-2.4.0.tgz#987ba9624d89395388c37cb4741e2caf4dd13b1a" +mz@^2.7.0: + version "2.7.0" + resolved "https://registry.yarnpkg.com/mz/-/mz-2.7.0.tgz#95008057a56cafadc2bc63dde7f9ff6955948e32" dependencies: any-promise "^1.0.0" object-assign "^4.0.1" thenify-all "^1.0.0" -ncp@~0.4.2: - version "0.4.2" - resolved "https://registry.yarnpkg.com/ncp/-/ncp-0.4.2.tgz#abcc6cbd3ec2ed2a729ff6e7c1fa8f01784a8574" +nan@^2.3.0: + version "2.8.0" + resolved "https://registry.yarnpkg.com/nan/-/nan-2.8.0.tgz#ed715f3fe9de02b57a5e6252d90a96675e1f085a" -nise@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/nise/-/nise-1.2.0.tgz#079d6cadbbcb12ba30e38f1c999f36ad4d6baa53" +natural-compare@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + +node-int64@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" + +node-notifier@^5.2.1: + version "5.2.1" + resolved "https://registry.yarnpkg.com/node-notifier/-/node-notifier-5.2.1.tgz#fa313dd08f5517db0e2502e5758d664ac69f9dea" + dependencies: + growly "^1.3.0" + semver "^5.4.1" + shellwords "^0.1.1" + which "^1.3.0" + +node-pre-gyp@^0.6.39: + version "0.6.39" + resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.6.39.tgz#c00e96860b23c0e1420ac7befc5044e1d78d8649" dependencies: - formatio "^1.2.0" - just-extend "^1.1.26" - lolex "^1.6.0" - path-to-regexp "^1.7.0" - text-encoding "^0.6.4" + detect-libc "^1.0.2" + hawk "3.1.3" + mkdirp "^0.5.1" + nopt "^4.0.1" + npmlog "^4.0.2" + rc "^1.1.7" + request "2.81.0" + rimraf "^2.6.1" + semver "^5.3.0" + tar "^2.2.1" + tar-pack "^3.4.0" node-source-walk@3.2.0: version "3.2.0" @@ -990,21 +2458,78 @@ node-source-walk@^3.0.0, node-source-walk@^3.2.0, node-source-walk@^3.3.0: dependencies: babylon "^6.17.0" -nopt@3.x: - version "3.0.6" - resolved "https://registry.yarnpkg.com/nopt/-/nopt-3.0.6.tgz#c6465dbf08abcd4db359317f79ac68a646b28ff9" +nopt@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/nopt/-/nopt-4.0.1.tgz#d0d4685afd5415193c8c7505602d0d17cd64474d" dependencies: abbrev "1" + osenv "^0.1.4" + +normalize-package-data@^2.3.2: + version "2.4.0" + resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.4.0.tgz#12f95a307d58352075a04907b84ac8be98ac012f" + dependencies: + hosted-git-info "^2.1.4" + is-builtin-module "^1.0.0" + semver "2 || 3 || 4 || 5" + validate-npm-package-license "^3.0.1" + +normalize-path@^2.0.0, normalize-path@^2.0.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" + dependencies: + remove-trailing-separator "^1.0.1" + +npm-run-path@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" + dependencies: + path-key "^2.0.0" + +npmlog@^4.0.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b" + dependencies: + are-we-there-yet "~1.1.2" + console-control-strings "~1.1.0" + gauge "~2.7.3" + set-blocking "~2.0.0" number-is-nan@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" -object-assign@^4.0.1: +nwmatcher@^1.4.3: + version "1.4.3" + resolved "https://registry.yarnpkg.com/nwmatcher/-/nwmatcher-1.4.3.tgz#64348e3b3d80f035b40ac11563d278f8b72db89c" + +oauth-sign@~0.8.1, oauth-sign@~0.8.2: + version "0.8.2" + resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.8.2.tgz#46a6ab7f0aead8deae9ec0565780b7d4efeb9d43" + +object-assign@^4.0.1, object-assign@^4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" -once@^1.3.0, once@^1.4.0: +object-keys@^1.0.8: + version "1.0.11" + resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.0.11.tgz#c54601778ad560f1142ce0e01bcca8b56d13426d" + +object.getownpropertydescriptors@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.0.3.tgz#8758c846f5b407adab0f236e0986f14b051caa16" + dependencies: + define-properties "^1.1.2" + es-abstract "^1.5.1" + +object.omit@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/object.omit/-/object.omit-2.0.1.tgz#1a9c744829f39dbb858c76ca3579ae2a54ebd1fa" + dependencies: + for-own "^0.1.4" + is-extendable "^0.1.1" + +once@^1.3.0, once@^1.3.3, once@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" dependencies: @@ -1034,59 +2559,186 @@ optionator@^0.8.1: type-check "~0.3.2" wordwrap "~1.0.0" -ora@1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/ora/-/ora-1.2.0.tgz#32fb3183500efe83f5ea89101785f0ee6060fec9" +ora@1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/ora/-/ora-1.4.0.tgz#884458215b3a5d4097592285f93321bb7a79e2e5" dependencies: - chalk "^1.1.1" + chalk "^2.1.0" cli-cursor "^2.1.0" - cli-spinners "^1.0.0" - log-symbols "^1.0.2" + cli-spinners "^1.0.1" + log-symbols "^2.1.0" + +os-homedir@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" + +os-locale@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-2.1.0.tgz#42bc2900a6b5b8bd17376c8e882b65afccf24bf2" + dependencies: + execa "^0.7.0" + lcid "^1.0.0" + mem "^1.1.0" + +os-tmpdir@^1.0.0, os-tmpdir@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" + +osenv@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.4.tgz#42fe6d5953df06c8064be6f176c3d05aaaa34644" + dependencies: + os-homedir "^1.0.0" + os-tmpdir "^1.0.0" + +p-finally@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" + +p-limit@^1.1.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.2.0.tgz#0e92b6bedcb59f022c13d0f1949dc82d15909f1c" + dependencies: + p-try "^1.0.0" + +p-locate@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43" + dependencies: + p-limit "^1.1.0" + +p-try@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/p-try/-/p-try-1.0.0.tgz#cbc79cdbaf8fd4228e13f621f2b1a237c1b207b3" + +parse-glob@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/parse-glob/-/parse-glob-3.0.4.tgz#b2c376cfb11f35513badd173ef0bb6e3a388391c" + dependencies: + glob-base "^0.3.0" + is-dotfile "^1.0.0" + is-extglob "^1.0.0" + is-glob "^2.0.0" + +parse-json@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-2.2.0.tgz#f480f40434ef80741f8469099f8dea18f55a4dc9" + dependencies: + error-ex "^1.2.0" parse-ms@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/parse-ms/-/parse-ms-1.0.1.tgz#56346d4749d78f23430ca0c713850aef91aa361d" -path-is-absolute@^1.0.0: +parse5@^3.0.2: + version "3.0.3" + resolved "https://registry.yarnpkg.com/parse5/-/parse5-3.0.3.tgz#042f792ffdd36851551cf4e9e066b3874ab45b5c" + dependencies: + "@types/node" "*" + +path-exists@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b" + dependencies: + pinkie-promise "^2.0.0" + +path-exists@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" + +path-is-absolute@^1.0.0, path-is-absolute@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" +path-key@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" + path-parse@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.5.tgz#3c1adf871ea9cd6c9431b6ea2bd74a0ff055c4c1" -path-to-regexp@^1.7.0: - version "1.7.0" - resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-1.7.0.tgz#59fde0f435badacba103a84e9d3bc64e96b9937d" +path-type@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-1.1.0.tgz#59c44f7ee491da704da415da5a4070ba4f8fe441" + dependencies: + graceful-fs "^4.1.2" + pify "^2.0.0" + pinkie-promise "^2.0.0" + +performance-now@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-0.2.0.tgz#33ef30c5c77d4ea21c5a53869d91b56d8f2555e5" + +performance-now@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" + +pify@^2.0.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" + +pinkie-promise@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" + dependencies: + pinkie "^2.0.0" + +pinkie@^2.0.0: + version "2.0.4" + resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" + +pkg-dir@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-2.0.0.tgz#f6d5d1109e19d63edf428e0bd57e12777615334b" + dependencies: + find-up "^2.1.0" + +plur@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/plur/-/plur-2.1.2.tgz#7482452c1a0f508e3e344eaec312c91c29dc655a" dependencies: - isarray "0.0.1" + irregular-plurals "^1.0.0" -pathval@^1.0.0: +pluralize@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/pluralize/-/pluralize-7.0.0.tgz#298b89df8b93b0221dbf421ad2b1b1ea23fc6777" + +pn@^1.0.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/pathval/-/pathval-1.1.0.tgz#b942e6d4bde653005ef6b71361def8727d0645e0" + resolved "https://registry.yarnpkg.com/pn/-/pn-1.1.0.tgz#e2f4cef0e219f463c179ab37463e4e1ecdccbafb" -plur@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/plur/-/plur-1.0.0.tgz#db85c6814f5e5e5a3b49efc28d604fec62975156" +postcss-values-parser@^1.5.0: + version "1.5.0" + resolved "https://registry.yarnpkg.com/postcss-values-parser/-/postcss-values-parser-1.5.0.tgz#5d9fa63e2bcb0179ce48f3235303765eb89f3047" + dependencies: + flatten "^1.0.2" + indexes-of "^1.0.1" + uniq "^1.0.1" -pluralize@4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/pluralize/-/pluralize-4.0.0.tgz#59b708c1c0190a2f692f1c7618c446b052fd1762" +postcss@^6.0.21: + version "6.0.21" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-6.0.21.tgz#8265662694eddf9e9a5960db6da33c39e4cd069d" + dependencies: + chalk "^2.3.2" + source-map "^0.6.1" + supports-color "^5.3.0" -precinct@^3.8.0: - version "3.8.0" - resolved "https://registry.yarnpkg.com/precinct/-/precinct-3.8.0.tgz#259a9490a85477a1f26989fbdf2fb26c44f2475a" +precinct@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/precinct/-/precinct-4.1.0.tgz#8b7a365e950324c4204b7edb476737606d49725f" dependencies: commander "^2.11.0" debug "^3.0.1" detective-amd "^2.4.0" detective-cjs "^2.0.0" detective-es6 "^1.2.0" - detective-less "1.0.0" + detective-less "^1.0.1" + detective-postcss "^2.0.0" detective-sass "^2.0.0" detective-scss "^1.0.0" detective-stylus "^1.0.0" - detective-typescript "^1.0.0" + detective-typescript "^2.0.0" module-definition "^2.2.4" node-source-walk "^3.3.0" @@ -1094,13 +2746,34 @@ prelude-ls@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" -pretty-ms@2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/pretty-ms/-/pretty-ms-2.1.0.tgz#4257c256df3fb0b451d6affaab021884126981dc" +preserve@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/preserve/-/preserve-0.2.0.tgz#815ed1f6ebc65926f865b310c0713bcb3315ce4b" + +pretty-format@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-22.1.0.tgz#2277605b40ed4529ae4db51ff62f4be817647914" + dependencies: + ansi-regex "^3.0.0" + ansi-styles "^3.2.0" + +pretty-format@^22.4.3: + version "22.4.3" + resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-22.4.3.tgz#f873d780839a9c02e9664c8a082e9ee79eaac16f" + dependencies: + ansi-regex "^3.0.0" + ansi-styles "^3.2.0" + +pretty-ms@^3.0.1: + version "3.1.0" + resolved "https://registry.yarnpkg.com/pretty-ms/-/pretty-ms-3.1.0.tgz#e9cac9c76bf6ee52fe942dd9c6c4213153b12881" dependencies: - is-finite "^1.0.1" parse-ms "^1.0.0" - plur "^1.0.0" + plur "^2.1.2" + +private@^0.1.7: + version "0.1.8" + resolved "https://registry.yarnpkg.com/private/-/private-0.1.8.tgz#2381edb3689f7a53d653190060fcf822d2f368ff" process-nextick-args@~1.0.6: version "1.0.7" @@ -1110,16 +2783,67 @@ prr@~0.0.0: version "0.0.0" resolved "https://registry.yarnpkg.com/prr/-/prr-0.0.0.tgz#1a84b85908325501411853d0081ee3fa86e2926a" -rc@1.1.6: - version "1.1.6" - resolved "https://registry.yarnpkg.com/rc/-/rc-1.1.6.tgz#43651b76b6ae53b5c802f1151fa3fc3b059969c9" +pseudomap@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" + +punycode@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" + +punycode@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.0.tgz#5f863edc89b96db09074bad7947bf09056ca4e7d" + +qs@~6.4.0: + version "6.4.0" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.4.0.tgz#13e26d28ad6b0ffaa91312cd3bf708ed351e7233" + +qs@~6.5.1: + version "6.5.1" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.1.tgz#349cdf6eef89ec45c12d7d5eb3fc0c870343a6d8" + +randomatic@^1.1.3: + version "1.1.7" + resolved "https://registry.yarnpkg.com/randomatic/-/randomatic-1.1.7.tgz#c7abe9cc8b87c0baa876b19fde83fd464797e38c" + dependencies: + is-number "^3.0.0" + kind-of "^4.0.0" + +rc@1.2.5: + version "1.2.5" + resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.5.tgz#275cd687f6e3b36cc756baa26dfee80a790301fd" + dependencies: + deep-extend "~0.4.0" + ini "~1.3.0" + minimist "^1.2.0" + strip-json-comments "~2.0.1" + +rc@^1.1.7: + version "1.2.3" + resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.3.tgz#51575a900f8dd68381c710b4712c2154c3e2035b" dependencies: deep-extend "~0.4.0" ini "~1.3.0" minimist "^1.2.0" - strip-json-comments "~1.0.4" + strip-json-comments "~2.0.1" -readable-stream@^2.0.1: +read-pkg-up@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-1.0.1.tgz#9d63c13276c065918d57f002a57f40a1b643fb02" + dependencies: + find-up "^1.0.0" + read-pkg "^1.0.0" + +read-pkg@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-1.1.0.tgz#f5ffaa5ecd29cb31c0474bca7d756b6bb29e3f28" + dependencies: + load-json-file "^1.0.0" + normalize-package-data "^2.3.2" + path-type "^1.0.0" + +readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.6, readable-stream@^2.1.4: version "2.3.3" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.3.tgz#368f2512d79f9d46fdfc71349ae7878bbc1eb95c" dependencies: @@ -1131,10 +2855,43 @@ readable-stream@^2.0.1: string_decoder "~1.0.3" util-deprecate "~1.0.1" +readdirp@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.1.0.tgz#4ed0ad060df3073300c48440373f72d1cc642d78" + dependencies: + graceful-fs "^4.1.2" + minimatch "^3.0.2" + readable-stream "^2.0.2" + set-immediate-shim "^1.0.1" + +realpath-native@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/realpath-native/-/realpath-native-1.0.0.tgz#7885721a83b43bd5327609f0ddecb2482305fdf0" + dependencies: + util.promisify "^1.0.0" + regenerator-runtime@^0.10.0: version "0.10.5" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.10.5.tgz#336c3efc1220adcedda2c9fab67b5a7955a33658" +regenerator-runtime@^0.11.0: + version "0.11.1" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz#be05ad7f9bf7d22e056f9726cee5017fbf19e2e9" + +regex-cache@^0.4.2: + version "0.4.4" + resolved "https://registry.yarnpkg.com/regex-cache/-/regex-cache-0.4.4.tgz#75bdc58a2a1496cec48a12835bc54c8d562336dd" + dependencies: + is-equal-shallow "^0.1.3" + +remove-trailing-separator@^1.0.1: + version "1.1.0" + resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" + +repeat-element@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.2.tgz#ef089a178d1483baae4d93eb98b4f9e4e11d990a" + repeat-string@^1.5.2: version "1.6.1" resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" @@ -1145,28 +2902,124 @@ repeating@^2.0.0: dependencies: is-finite "^1.0.0" -requirejs-config-file@~2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/requirejs-config-file/-/requirejs-config-file-2.0.1.tgz#1f2912703e3c4df8982b2c7bddd7a2a64fd16fb9" +request-promise-core@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/request-promise-core/-/request-promise-core-1.1.1.tgz#3eee00b2c5aa83239cfb04c5700da36f81cd08b6" dependencies: - esprima "~1.0.4" - fs-extra "~0.6.4" - stringify-object "~0.1.7" + lodash "^4.13.1" -requirejs@~2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/requirejs/-/requirejs-2.2.0.tgz#0f2b1538af2b8d0a4fffffde5d367aa9cd4cfe84" +request-promise-native@^1.0.3: + version "1.0.5" + resolved "https://registry.yarnpkg.com/request-promise-native/-/request-promise-native-1.0.5.tgz#5281770f68e0c9719e5163fd3fab482215f4fda5" + dependencies: + request-promise-core "1.1.1" + stealthy-require "^1.1.0" + tough-cookie ">=2.3.3" + +request@2.81.0: + version "2.81.0" + resolved "https://registry.yarnpkg.com/request/-/request-2.81.0.tgz#c6928946a0e06c5f8d6f8a9333469ffda46298a0" + dependencies: + aws-sign2 "~0.6.0" + aws4 "^1.2.1" + caseless "~0.12.0" + combined-stream "~1.0.5" + extend "~3.0.0" + forever-agent "~0.6.1" + form-data "~2.1.1" + har-validator "~4.2.1" + hawk "~3.1.3" + http-signature "~1.1.0" + is-typedarray "~1.0.0" + isstream "~0.1.2" + json-stringify-safe "~5.0.1" + mime-types "~2.1.7" + oauth-sign "~0.8.1" + performance-now "^0.2.0" + qs "~6.4.0" + safe-buffer "^5.0.1" + stringstream "~0.0.4" + tough-cookie "~2.3.0" + tunnel-agent "^0.6.0" + uuid "^3.0.0" + +request@^2.83.0: + version "2.83.0" + resolved "https://registry.yarnpkg.com/request/-/request-2.83.0.tgz#ca0b65da02ed62935887808e6f510381034e3356" + dependencies: + aws-sign2 "~0.7.0" + aws4 "^1.6.0" + caseless "~0.12.0" + combined-stream "~1.0.5" + extend "~3.0.1" + forever-agent "~0.6.1" + form-data "~2.3.1" + har-validator "~5.0.3" + hawk "~6.0.2" + http-signature "~1.2.0" + is-typedarray "~1.0.0" + isstream "~0.1.2" + json-stringify-safe "~5.0.1" + mime-types "~2.1.17" + oauth-sign "~0.8.2" + performance-now "^2.1.0" + qs "~6.5.1" + safe-buffer "^5.1.1" + stringstream "~0.0.5" + tough-cookie "~2.3.3" + tunnel-agent "^0.6.0" + uuid "^3.1.0" + +require-directory@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + +require-main-filename@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-1.0.1.tgz#97f717b69d48784f5f526a6c5aa8ffdda055a4d1" + +requirejs-config-file@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/requirejs-config-file/-/requirejs-config-file-3.0.0.tgz#0eff582d6bda711099437941803ad47a5a451783" + dependencies: + esprima "^4.0.0" + fs-extra "^5.0.0" + stringify-object "^3.2.1" + +requirejs@^2.3.5: + version "2.3.5" + resolved "https://registry.yarnpkg.com/requirejs/-/requirejs-2.3.5.tgz#617b9acbbcb336540ef4914d790323a8d4b861b0" + +resolve-cwd@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-2.0.0.tgz#00a9f7387556e27038eae232caa372a6a59b665a" + dependencies: + resolve-from "^3.0.0" resolve-dependency-path@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/resolve-dependency-path/-/resolve-dependency-path-1.0.2.tgz#6abe93a6de3e4f9dce7b5e8261e1f47aa1af4dc2" +resolve-from@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-3.0.0.tgz#b22c7af7d9d6881bc8b6e653335eebcb0a188748" + +resolve@1.1.7: + version "1.1.7" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b" + resolve@^1.1.7, resolve@^1.3.2: version "1.3.3" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.3.3.tgz#655907c3469a8680dc2de3a275a8fdd69691f0e5" dependencies: path-parse "^1.0.5" +resolve@^1.5.0: + version "1.7.1" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.7.1.tgz#aadd656374fd298aee895bc026b8297418677fd3" + dependencies: + path-parse "^1.0.5" + restore-cursor@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-2.0.0.tgz#9f7ee287f82fd326d4fd162923d62129eee0dfaf" @@ -1180,23 +3033,35 @@ right-align@^0.1.1: dependencies: align-text "^0.1.1" +rimraf@2, rimraf@^2.5.1, rimraf@^2.5.4: + version "2.6.2" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.2.tgz#2ed8150d24a16ea8651e6d6ef0f47c4158ce7a36" + dependencies: + glob "^7.0.5" + rimraf@^2.6.1: version "2.6.1" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.1.tgz#c2338ec643df7a1b7fe5c54fa86f57428a55f33d" dependencies: glob "^7.0.5" -rimraf@~2.2.0: - version "2.2.8" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.2.8.tgz#e439be2aaee327321952730f99a8929e4fc50582" - -safe-buffer@~5.1.0, safe-buffer@~5.1.1: +safe-buffer@^5.0.1, safe-buffer@^5.1.1, safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.1.tgz#893312af69b2123def71f57889001671eeb2c853" -samsam@1.x: - version "1.3.0" - resolved "https://registry.yarnpkg.com/samsam/-/samsam-1.3.0.tgz#8d1d9350e25622da30de3e44ba692b5221ab7c50" +sane@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/sane/-/sane-2.2.0.tgz#d6d2e2fcab00e3d283c93b912b7c3a20846f1d56" + dependencies: + anymatch "^1.3.0" + exec-sh "^0.2.0" + fb-watchman "^2.0.0" + minimatch "^3.0.2" + minimist "^1.1.1" + walker "~1.0.5" + watch "~0.18.0" + optionalDependencies: + fsevents "^1.1.1" sass-lookup@^1.1.0: version "1.1.0" @@ -1205,25 +3070,80 @@ sass-lookup@^1.1.0: commander "~2.8.1" is-relative-path "~1.0.0" +sax@^1.2.1: + version "1.2.4" + resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" + +"semver@2 || 3 || 4 || 5", semver@5.4.1, semver@^5.4.1: + version "5.4.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.4.1.tgz#e059c09d8571f0540823733433505d3a2f00b18e" + semver@^5.3.0: version "5.3.0" resolved "https://registry.yarnpkg.com/semver/-/semver-5.3.0.tgz#9b2ce5d3de02d17c6012ad326aa6b4d0cf54f94f" -signal-exit@^3.0.2: +set-blocking@^2.0.0, set-blocking@~2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" + +set-immediate-shim@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/set-immediate-shim/-/set-immediate-shim-1.0.1.tgz#4b2b1b27eb808a9f8dcc481a58e5e56f599f3f61" + +shebang-command@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" + dependencies: + shebang-regex "^1.0.0" + +shebang-regex@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" + +shell-quote@^1.6.1: + version "1.6.1" + resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.6.1.tgz#f4781949cce402697127430ea3b3c5476f481767" + dependencies: + array-filter "~0.0.0" + array-map "~0.0.0" + array-reduce "~0.0.0" + jsonify "~0.0.0" + +shellwords@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/shellwords/-/shellwords-0.1.1.tgz#d6b9181c1a48d397324c84871efbcfc73fc0654b" + +signal-exit@^3.0.0, signal-exit@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" -sinon@^4.1.2: - version "4.1.2" - resolved "https://registry.yarnpkg.com/sinon/-/sinon-4.1.2.tgz#65610521d926fb53742dd84cd599f0b89a82f440" +slash@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/slash/-/slash-1.0.0.tgz#c41f2f6c39fc16d1cd17ad4b5d896114ae470d55" + +sntp@1.x.x: + version "1.0.9" + resolved "https://registry.yarnpkg.com/sntp/-/sntp-1.0.9.tgz#6541184cc90aeea6c6e7b35e2659082443c66198" + dependencies: + hoek "2.x.x" + +sntp@2.x.x: + version "2.1.0" + resolved "https://registry.yarnpkg.com/sntp/-/sntp-2.1.0.tgz#2c6cec14fedc2222739caf9b5c3d85d1cc5a2cc8" + dependencies: + hoek "4.x.x" + +source-map-support@^0.4.15: + version "0.4.18" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.4.18.tgz#0286a6de8be42641338594e97ccea75f0a2c585f" dependencies: - diff "^3.1.0" - formatio "1.2.0" - lodash.get "^4.4.2" - lolex "^2.2.0" - nise "^1.2.0" - supports-color "^4.4.0" - type-detect "^4.0.0" + source-map "^0.5.6" + +source-map-support@^0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.0.tgz#2018a7ad2bdf8faf2691e5fddab26bed5a2bacab" + dependencies: + source-map "^0.6.0" source-map@^0.4.4: version "0.4.4" @@ -1235,35 +3155,123 @@ source-map@^0.5.0, source-map@^0.5.3, source-map@~0.5.1, source-map@~0.5.6: version "0.5.6" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.6.tgz#75ce38f52bf0733c5a7f0c118d81334a2bb5f412" +source-map@^0.5.6: + version "0.5.7" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" + +source-map@^0.6.0, source-map@^0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" + +spdx-correct@~1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-1.0.2.tgz#4b3073d933ff51f3912f03ac5519498a4150db40" + dependencies: + spdx-license-ids "^1.0.2" + +spdx-expression-parse@~1.0.0: + version "1.0.4" + resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-1.0.4.tgz#9bdf2f20e1f40ed447fbe273266191fced51626c" + +spdx-license-ids@^1.0.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-1.2.2.tgz#c9df7a3424594ade6bd11900d596696dc06bac57" + sprintf-js@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" +sshpk@^1.7.0: + version "1.13.1" + resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.13.1.tgz#512df6da6287144316dc4c18fe1cf1d940739be3" + dependencies: + asn1 "~0.2.3" + assert-plus "^1.0.0" + dashdash "^1.12.0" + getpass "^0.1.1" + optionalDependencies: + bcrypt-pbkdf "^1.0.0" + ecc-jsbn "~0.1.1" + jsbn "~0.1.0" + tweetnacl "~0.14.0" + +stack-utils@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-1.0.1.tgz#d4f33ab54e8e38778b0ca5cfd3b3afb12db68620" + +stealthy-require@^1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/stealthy-require/-/stealthy-require-1.1.1.tgz#35b09875b4ff49f26a777e509b3090a3226bf24b" + +string-length@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/string-length/-/string-length-2.0.0.tgz#d40dbb686a3ace960c1cffca562bf2c45f8363ed" + dependencies: + astral-regex "^1.0.0" + strip-ansi "^4.0.0" + +string-width@^1.0.1, string-width@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" + dependencies: + code-point-at "^1.0.0" + is-fullwidth-code-point "^1.0.0" + strip-ansi "^3.0.0" + +string-width@^2.0.0, string-width@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" + dependencies: + is-fullwidth-code-point "^2.0.0" + strip-ansi "^4.0.0" + string_decoder@~1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.0.3.tgz#0fc67d7c141825de94282dd536bec6b9bce860ab" dependencies: safe-buffer "~5.1.0" -stringify-object@~0.1.7: - version "0.1.8" - resolved "https://registry.yarnpkg.com/stringify-object/-/stringify-object-0.1.8.tgz#463348f38fdcd4fec1c011084c24a59ac653c1ee" +stringify-object@^3.2.1: + version "3.2.2" + resolved "https://registry.yarnpkg.com/stringify-object/-/stringify-object-3.2.2.tgz#9853052e5a88fb605a44cd27445aa257ad7ffbcd" + dependencies: + get-own-enumerable-property-symbols "^2.0.1" + is-obj "^1.0.1" + is-regexp "^1.0.0" + +stringstream@~0.0.4, stringstream@~0.0.5: + version "0.0.5" + resolved "https://registry.yarnpkg.com/stringstream/-/stringstream-0.0.5.tgz#4e484cd4de5a0bbbee18e46307710a8a81621878" -strip-ansi@^3.0.0: +strip-ansi@^3.0.0, strip-ansi@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" dependencies: ansi-regex "^2.0.0" +strip-ansi@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" + dependencies: + ansi-regex "^3.0.0" + +strip-bom@3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" + strip-bom@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-2.0.0.tgz#6219a85616520491f35788bdbf1447a99c7e6b0e" dependencies: is-utf8 "^0.2.0" -strip-json-comments@~1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-1.0.4.tgz#1e15fbcac97d3ee99bf2d73b4c656b082bbafb91" +strip-eof@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" + +strip-json-comments@~2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" stylus-lookup@^1.0.1: version "1.0.2" @@ -1273,33 +3281,76 @@ stylus-lookup@^1.0.1: debug "^3.1.0" is-relative-path "~1.0.0" -supports-color@3.1.2, supports-color@^3.1.2: - version "3.1.2" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-3.1.2.tgz#72a262894d9d408b956ca05ff37b2ed8a6e2a2d5" +subarg@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/subarg/-/subarg-1.0.0.tgz#f62cf17581e996b48fc965699f54c06ae268b8d2" dependencies: - has-flag "^1.0.0" + minimist "^1.1.0" supports-color@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" -supports-color@^4.4.0: +supports-color@^3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-3.1.2.tgz#72a262894d9d408b956ca05ff37b2ed8a6e2a2d5" + dependencies: + has-flag "^1.0.0" + +supports-color@^4.0.0: version "4.5.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-4.5.0.tgz#be7a0de484dec5c5cddf8b3d59125044912f635b" dependencies: has-flag "^2.0.0" -tapable@^0.2.5: +supports-color@^5.3.0: + version "5.4.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.4.0.tgz#1c6b337402c2137605efe19f10fec390f6faab54" + dependencies: + has-flag "^3.0.0" + +symbol-tree@^3.2.1: + version "3.2.2" + resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.2.tgz#ae27db38f660a7ae2e1c3b7d1bc290819b8519e6" + +tapable@^0.2.7: version "0.2.8" resolved "https://registry.yarnpkg.com/tapable/-/tapable-0.2.8.tgz#99372a5c999bf2df160afc0d74bed4f47948cd22" +tar-pack@^3.4.0: + version "3.4.1" + resolved "https://registry.yarnpkg.com/tar-pack/-/tar-pack-3.4.1.tgz#e1dbc03a9b9d3ba07e896ad027317eb679a10a1f" + dependencies: + debug "^2.2.0" + fstream "^1.0.10" + fstream-ignore "^1.0.5" + once "^1.3.3" + readable-stream "^2.1.4" + rimraf "^2.5.1" + tar "^2.2.1" + uid-number "^0.0.6" + +tar@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/tar/-/tar-2.2.1.tgz#8e4d2a256c0e2185c6b18ad694aec968b83cb1d1" + dependencies: + block-stream "*" + fstream "^1.0.2" + inherits "2" + temp@~0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/temp/-/temp-0.4.0.tgz#671ad63d57be0fe9d7294664b3fc400636678a60" -text-encoding@^0.6.4: - version "0.6.4" - resolved "https://registry.yarnpkg.com/text-encoding/-/text-encoding-0.6.4.tgz#e399a982257a276dae428bb92845cb71bdc26d19" +test-exclude@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-4.1.1.tgz#4d84964b0966b0087ecc334a2ce002d3d9341e26" + dependencies: + arrify "^1.0.1" + micromatch "^2.3.11" + object-assign "^4.1.0" + read-pkg-up "^1.0.1" + require-main-filename "^1.0.1" thenify-all@^1.0.0: version "1.6.0" @@ -1313,10 +3364,30 @@ thenify-all@^1.0.0: dependencies: any-promise "^1.0.0" -to-fast-properties@^1.0.1: +throat@^4.0.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/throat/-/throat-4.1.0.tgz#89037cbc92c56ab18926e6ba4cbb200e15672a6a" + +tmpl@1.0.x: + version "1.0.4" + resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.4.tgz#23640dd7b42d00433911140820e5cf440e521dd1" + +to-fast-properties@^1.0.1, to-fast-properties@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-1.0.3.tgz#b83571fa4d8c25b82e231b06e3a3055de4ca1a47" +tough-cookie@>=2.3.3, tough-cookie@^2.3.3, tough-cookie@~2.3.0, tough-cookie@~2.3.3: + version "2.3.3" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.3.3.tgz#0b618a5565b6dea90bf3425d04d55edc475a7561" + dependencies: + punycode "^1.4.1" + +tr46@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-1.0.1.tgz#a8b13fd6bfd2489519674ccde55ba3693b706d09" + dependencies: + punycode "^2.1.0" + traverse-chain@~0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/traverse-chain/-/traverse-chain-0.1.0.tgz#61dbc2d53b69ff6091a12a168fd7d433107e40f1" @@ -1325,30 +3396,61 @@ trim-right@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/trim-right/-/trim-right-1.0.1.tgz#cb2e1203067e0c8de1f614094b9fe45704ea6003" -tslib@^1.7.1, tslib@^1.8.0: +ts-jest@^22.0.1: + version "22.0.1" + resolved "https://registry.yarnpkg.com/ts-jest/-/ts-jest-22.0.1.tgz#48942936a466c2e76e259b02e2f1356f1839afc3" + dependencies: + babel-core "^6.24.1" + babel-plugin-istanbul "^4.1.4" + babel-plugin-transform-es2015-modules-commonjs "^6.24.1" + babel-preset-jest "^22.0.1" + cpx "^1.5.0" + fs-extra "4.0.3" + jest-config "^22.0.1" + pkg-dir "^2.0.0" + source-map-support "^0.5.0" + yargs "^10.0.3" + +tslib@^1.8.0: version "1.8.0" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.8.0.tgz#dc604ebad64bcbf696d613da6c954aa0e7ea1eb6" -tslint@^5.7.0: - version "5.7.0" - resolved "https://registry.yarnpkg.com/tslint/-/tslint-5.7.0.tgz#c25e0d0c92fa1201c2bc30e844e08e682b4f3552" +tslib@^1.8.1, tslib@^1.9.0: + version "1.9.0" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.9.0.tgz#e37a86fda8cbbaf23a057f473c9f4dc64e5fc2e8" + +tslint@^5.9.1: + version "5.9.1" + resolved "https://registry.yarnpkg.com/tslint/-/tslint-5.9.1.tgz#1255f87a3ff57eb0b0e1f0e610a8b4748046c9ae" dependencies: babel-code-frame "^6.22.0" - colors "^1.1.2" - commander "^2.9.0" + builtin-modules "^1.1.1" + chalk "^2.3.0" + commander "^2.12.1" diff "^3.2.0" glob "^7.1.1" + js-yaml "^3.7.0" minimatch "^3.0.4" resolve "^1.3.2" semver "^5.3.0" - tslib "^1.7.1" - tsutils "^2.8.1" + tslib "^1.8.0" + tsutils "^2.12.1" + +tsutils@^2.12.1: + version "2.26.2" + resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-2.26.2.tgz#a9f9f63434a456a5e0c95a45d9a59181cb32d3bf" + dependencies: + tslib "^1.8.1" -tsutils@^2.8.1: - version "2.12.1" - resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-2.12.1.tgz#f4d95ce3391c8971e46e54c4cf0edb0a21dd5b24" +tunnel-agent@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" dependencies: - tslib "^1.7.1" + safe-buffer "^5.0.1" + +tweetnacl@^0.14.3, tweetnacl@~0.14.0: + version "0.14.5" + resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" type-check@~0.3.2: version "0.3.2" @@ -1356,29 +3458,21 @@ type-check@~0.3.2: dependencies: prelude-ls "~1.1.2" -type-detect@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-3.0.0.tgz#46d0cc8553abb7b13a352b0d6dea2fd58f2d9b55" - -type-detect@^4.0.0: - version "4.0.3" - resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.3.tgz#0e3f2670b44099b0b46c284d136a7ef49c74c2ea" - -typescript-eslint-parser@1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/typescript-eslint-parser/-/typescript-eslint-parser-1.0.2.tgz#fd2abacf2ee3d9382ab3e449c8762b6beae4d0d7" +typescript-eslint-parser@^9.0.0: + version "9.0.1" + resolved "https://registry.yarnpkg.com/typescript-eslint-parser/-/typescript-eslint-parser-9.0.1.tgz#1497a565d192ca2a321bc5bbf89dcab0a2da75e8" dependencies: - lodash.unescape "4.0.0" - object-assign "^4.0.1" - -typescript@2.0.10: - version "2.0.10" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-2.0.10.tgz#ccdd4ed86fd5550a407101a0814012e1b3fac3dd" + lodash.unescape "4.0.1" + semver "5.4.1" -typescript@^2.4.2, typescript@^2.6.0: +typescript@^2.4.2: version "2.6.1" resolved "https://registry.yarnpkg.com/typescript/-/typescript-2.6.1.tgz#ef39cdea27abac0b500242d6726ab90e0c846631" +typescript@^2.6.1, typescript@^2.8.3: + version "2.8.3" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-2.8.3.tgz#5d817f9b6f31bb871835f4edf0089f21abe6c170" + uglify-js@^2.6: version "2.8.29" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-2.8.29.tgz#29c5733148057bb4e1f75df35b7a9cb72e6a59dd" @@ -1392,20 +3486,99 @@ uglify-to-browserify@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz#6e0924d6bda6b5afe349e39a6d632850a0f882b7" +uid-number@^0.0.6: + version "0.0.6" + resolved "https://registry.yarnpkg.com/uid-number/-/uid-number-0.0.6.tgz#0ea10e8035e8eb5b8e4449f06da1c730663baa81" + +uniq@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/uniq/-/uniq-1.0.1.tgz#b31c5ae8254844a3a8281541ce2b04b865a734ff" + +universalify@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.1.tgz#fa71badd4437af4c148841e3b3b165f9e9e590b7" + util-deprecate@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" -walkdir@0.0.11: - version "0.0.11" - resolved "https://registry.yarnpkg.com/walkdir/-/walkdir-0.0.11.tgz#a16d025eb931bd03b52f308caed0f40fcebe9532" +util.promisify@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/util.promisify/-/util.promisify-1.0.0.tgz#440f7165a459c9a16dc145eb8e72f35687097030" + dependencies: + define-properties "^1.1.2" + object.getownpropertydescriptors "^2.0.3" + +uuid@^3.0.0, uuid@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.1.0.tgz#3dd3d3e790abc24d7b0d3a034ffababe28ebbc04" + +validate-npm-package-license@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.1.tgz#2804babe712ad3379459acfbe24746ab2c303fbc" + dependencies: + spdx-correct "~1.0.0" + spdx-expression-parse "~1.0.0" + +verror@1.10.0: + version "1.10.0" + resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" + dependencies: + assert-plus "^1.0.0" + core-util-is "1.0.2" + extsprintf "^1.2.0" + +walkdir@^0.0.12: + version "0.0.12" + resolved "https://registry.yarnpkg.com/walkdir/-/walkdir-0.0.12.tgz#2f24f1ade64aab1e458591d4442c8868356e9281" + +walker@~1.0.5: + version "1.0.7" + resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.7.tgz#2f7f9b8fd10d677262b18a884e28d19618e028fb" + dependencies: + makeerror "1.0.x" + +watch@~0.18.0: + version "0.18.0" + resolved "https://registry.yarnpkg.com/watch/-/watch-0.18.0.tgz#28095476c6df7c90c963138990c0a5423eb4b986" + dependencies: + exec-sh "^0.2.0" + minimist "^1.2.0" + +webidl-conversions@^4.0.1, webidl-conversions@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-4.0.2.tgz#a855980b1f0b6b359ba1d5d9fb39ae941faa63ad" + +whatwg-encoding@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/whatwg-encoding/-/whatwg-encoding-1.0.3.tgz#57c235bc8657e914d24e1a397d3c82daee0a6ba3" + dependencies: + iconv-lite "0.4.19" + +whatwg-url@^6.3.0: + version "6.4.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-6.4.0.tgz#08fdf2b9e872783a7a1f6216260a1d66cc722e08" + dependencies: + lodash.sortby "^4.7.0" + tr46 "^1.0.0" + webidl-conversions "^4.0.1" -which@^1.1.1: - version "1.2.14" - resolved "https://registry.yarnpkg.com/which/-/which-1.2.14.tgz#9a87c4378f03e827cecaf1acdf56c736c01c14e5" +which-module@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" + +which@^1.2.12, which@^1.2.9, which@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/which/-/which-1.3.0.tgz#ff04bdfc010ee547d780bec38e1ac1c2777d253a" dependencies: isexe "^2.0.0" +wide-align@^1.1.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.2.tgz#571e0f1b0604636ebc0dfc21b0339bbe31341710" + dependencies: + string-width "^1.0.2" + window-size@0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/window-size/-/window-size-0.1.0.tgz#5438cd2ea93b202efa3a19fe8887aee7c94f9c9d" @@ -1414,18 +3587,68 @@ wordwrap@0.0.2: version "0.0.2" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.2.tgz#b79669bb42ecb409f83d583cad52ca17eaa1643f" -wordwrap@^1.0.0, wordwrap@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" - wordwrap@~0.0.2: version "0.0.3" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.3.tgz#a3d5da6cd5c0bc0008d37234bbaf1bed63059107" +wordwrap@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" + +wrap-ansi@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-2.1.0.tgz#d8fc3d284dd05794fe84973caecdd1cf824fdd85" + dependencies: + string-width "^1.0.1" + strip-ansi "^3.0.1" + wrappy@1: version "1.0.2" resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" +write-file-atomic@^2.1.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-2.3.0.tgz#1ff61575c2e2a4e8e510d6fa4e243cce183999ab" + dependencies: + graceful-fs "^4.1.11" + imurmurhash "^0.1.4" + signal-exit "^3.0.2" + +xml-name-validator@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-2.0.1.tgz#4d8b8f1eccd3419aa362061becef515e1e559635" + +y18n@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-3.2.1.tgz#6d15fba884c08679c0d77e88e7759e811e07fa41" + +yallist@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" + +yargs-parser@^8.1.0: + version "8.1.0" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-8.1.0.tgz#f1376a33b6629a5d063782944da732631e966950" + dependencies: + camelcase "^4.1.0" + +yargs@^10.0.3: + version "10.1.1" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-10.1.1.tgz#5fe1ea306985a099b33492001fa19a1e61efe285" + dependencies: + cliui "^4.0.0" + decamelize "^1.1.1" + find-up "^2.1.0" + get-caller-file "^1.0.1" + os-locale "^2.0.0" + require-directory "^2.1.1" + require-main-filename "^1.0.1" + set-blocking "^2.0.0" + string-width "^2.0.0" + which-module "^2.0.0" + y18n "^3.2.1" + yargs-parser "^8.1.0" + yargs@~3.10.0: version "3.10.0" resolved "https://registry.yarnpkg.com/yargs/-/yargs-3.10.0.tgz#f7ee7bd857dd7c1d2d38c0e74efbd681d1431fd1" From 3601fdf080b79313d8bf803adc33b46b6b25135b Mon Sep 17 00:00:00 2001 From: Jake Chitel Date: Sun, 29 Apr 2018 08:10:12 -0500 Subject: [PATCH 05/15] converted everything back to classes, started working on new type checking process --- src/core.ts | 150 +++++++------ src/parser/index.ts | 12 +- src/parser/lexer/char-stream.ts | 103 +++++---- src/parser/lexer/lexer-state.ts | 210 +++++++++--------- src/parser/lexer/lexer.ts | 27 ++- src/parser/lexer/token.ts | 55 +---- src/parser/parser.ts | 98 ++++---- src/runner.ts | 44 +--- src/syntax/ModuleRoot.ts | 3 +- .../declarations/ConstantDeclaration.ts | 3 +- src/syntax/declarations/ExportDeclaration.ts | 5 +- .../declarations/ExportForwardDeclaration.ts | 3 +- .../declarations/FunctionDeclaration.ts | 6 +- src/syntax/declarations/ImportDeclaration.ts | 7 +- src/syntax/declarations/TypeDeclaration.ts | 6 +- src/syntax/environment.ts | 106 +++++---- src/syntax/expressions/ArrayAccess.ts | 6 +- src/syntax/expressions/ArrayLiteral.ts | 3 +- src/syntax/expressions/BinaryExpression.ts | 16 +- src/syntax/expressions/BoolLiteral.ts | 3 +- src/syntax/expressions/CharLiteral.ts | 3 +- src/syntax/expressions/FieldAccess.ts | 6 +- src/syntax/expressions/FloatLiteral.ts | 3 +- src/syntax/expressions/FunctionApplication.ts | 6 +- .../expressions/IdentifierExpression.ts | 3 +- src/syntax/expressions/IfElseExpression.ts | 3 +- src/syntax/expressions/IntegerLiteral.ts | 3 +- src/syntax/expressions/LambdaExpression.ts | 5 +- .../expressions/ParenthesizedExpression.ts | 3 +- src/syntax/expressions/StringLiteral.ts | 3 +- src/syntax/expressions/StructLiteral.ts | 3 +- src/syntax/expressions/TupleLiteral.ts | 3 +- src/syntax/expressions/UnaryExpression.ts | 6 +- src/syntax/expressions/VarDeclaration.ts | 3 +- src/syntax/index.ts | 4 +- src/syntax/statements/Block.ts | 3 +- src/syntax/statements/BreakStatement.ts | 3 +- src/syntax/statements/ContinueStatement.ts | 3 +- src/syntax/statements/DoWhileStatement.ts | 3 +- src/syntax/statements/ExpressionStatement.ts | 3 +- src/syntax/statements/ForStatement.ts | 3 +- src/syntax/statements/ReturnStatement.ts | 3 +- src/syntax/statements/ThrowStatement.ts | 3 +- src/syntax/statements/TryCatchStatement.ts | 3 +- src/syntax/statements/WhileStatement.ts | 3 +- src/syntax/types/ArrayType.ts | 6 +- src/syntax/types/BuiltInType.ts | 3 +- src/syntax/types/FunctionType.ts | 3 +- src/syntax/types/IdentifierType.ts | 3 +- src/syntax/types/NamespaceAccessType.ts | 6 +- src/syntax/types/ParenthesizedType.ts | 3 +- src/syntax/types/SpecificType.ts | 6 +- src/syntax/types/StructType.ts | 4 +- src/syntax/types/TupleType.ts | 3 +- src/syntax/types/UnionType.ts | 6 +- src/syntax/visitor.ts | 41 ++++ src/typecheck/checker.ts | 123 ++++++++++ src/typecheck/error-context.ts | 48 ++++ src/typecheck/index.ts | 8 +- .../node-visitors/declaration-name-visitor.ts | 0 src/typecheck/node-visitors/module-visitor.ts | 78 +++++++ src/typecheck/program.ts | 72 ++++++ .../typecheck/resolver.ts | 0 src/utils/lazy-list.ts | 93 ++++---- src/utils/lazy.ts | 24 +- src/utils/utils.ts | 23 -- 66 files changed, 870 insertions(+), 636 deletions(-) create mode 100644 src/syntax/visitor.ts create mode 100644 src/typecheck/checker.ts create mode 100644 src/typecheck/error-context.ts create mode 100644 src/typecheck/node-visitors/declaration-name-visitor.ts create mode 100644 src/typecheck/node-visitors/module-visitor.ts create mode 100644 src/typecheck/program.ts rename src_old/typecheck/resolveModule.ts => src/typecheck/resolver.ts (100%) diff --git a/src/core.ts b/src/core.ts index cccdb95..59927dd 100644 --- a/src/core.ts +++ b/src/core.ts @@ -1,34 +1,50 @@ -export interface FilePosition { - readonly type: 'FilePosition'; - readonly path: string; - readonly position: [number, number]; - readonly computeRange: (image: string) => FileRange; - readonly nextLine: () => FilePosition; - readonly nextColumn: () => FilePosition; +/** + * Base type of all types in this project. + * JS classes are not particularly well-suited for immutable operations, + * so this provides some base-level operations to handle that. + * The type parameter must be the same type, because TS doesn't handle + * 'this' type properly for some reason. + */ +export class CoreObject> { + /** + * Creates a clone of 'this', applying an optional set of properties to the new object. + * Note that the type parameter is to allow private properties to be added. + * There will be an error if invalid types are provided for public properties. + */ + clone>(props: C = {} as C): T { + // TS does not know how to properly handle spreads + const _props = { ...(this as any), ...(props as any) }; + return Object.assign(Object.create(Object.getPrototypeOf(this)), _props); + } } -export function FilePosition(path: string, position: [number, number]): FilePosition { - return { type: 'FilePosition', path, position, computeRange, nextLine, nextColumn }; -} +export class FilePosition extends CoreObject { + constructor( + readonly path: string, + readonly position: [number, number] + ) { + super(); + } -function computeRange(this: FilePosition, image: string) { - if (!image.includes('\n')) return FileRange(this.path, this.position, [this.position[0], this.position[1] + image.length - 1]); - const length = image.length; - // if the image ends with a newline, we have to ignore it because it is included within the previous line - const search = image.endsWith('\n') ? image.substring(0, length - 2) : image; - // number of line breaks in the string - const numBreaks = [...search].filter(c => c === '\n').length; - // number of characters after the previous line break (use the real length here) - const trailing = length - search.lastIndexOf('\n') - 1; - return FileRange(this.path, this.position, [this.position[0] + numBreaks, trailing]); -} + computeRange(image: string): FileRange { + if (!image.includes('\n')) return new FileRange(this.path, this.position, [this.position[0], this.position[1] + image.length - 1]); + const length = image.length; + // if the image ends with a newline, we have to ignore it because it is included within the previous line + const search = image.endsWith('\n') ? image.substring(0, length - 2) : image; + // number of line breaks in the string + const numBreaks = [...search].filter(c => c === '\n').length; + // number of characters after the previous line break (use the real length here) + const trailing = length - search.lastIndexOf('\n') - 1; + return new FileRange(this.path, this.position, [this.position[0] + numBreaks, trailing]); + } -function nextLine(this: FilePosition): FilePosition { - return { ...this, position: [this.position[0] + 1, 0] }; -} + nextLine(): FilePosition { + return this.clone({ position: [this.position[0] + 1, 0] }); + } -function nextColumn(this: FilePosition): FilePosition { - return { ...this, position: [this.position[0], this.position[1] + 1] }; + nextColumn(): FilePosition { + return this.clone({ position: [this.position[0], this.position[1] + 1] }); + } } /** @@ -37,65 +53,65 @@ function nextColumn(this: FilePosition): FilePosition { * - the start line/column of the range * - the end line/column of the range */ -export interface FileRange { - readonly type: 'FileRange'; - readonly path: string; - readonly start: [number, number]; - readonly end: [number, number]; - readonly merge: (location: FileRange) => FileRange; -} - -export function FileRange(path: string, start: [number, number], end: [number, number]): FileRange { - return { type: 'FileRange', path, start, end, merge }; -} +export class FileRange extends CoreObject { + constructor( + readonly path: string, + readonly start: [number, number], + readonly end: [number, number] + ) { + super(); + } -/** - * Create a new location that contains both this location and the specified location - */ -function merge(this: FileRange, location: FileRange): FileRange { - if (this.path !== location.path) throw new Error('Two locations in different files cannot be merged.'); - let start = this.start; - let end = this.end; - if (location.start[0] < this.start[0] || location.start[0] === this.start[0] && location.start[1] < this.start[1]) { - [start[0], start[1]] = [location.start[0], location.start[0]]; - } else if (location.end[0] > this.end[0] || location.end[0] === this.end[0] && location.end[1] > this.end[1]) { - [end[0], end[1]] = [location.end[0], location.end[1]]; + /** + * Create a new location that contains both this location and the specified location + */ + merge(location: FileRange): FileRange { + if (this.path !== location.path) throw new Error('Two locations in different files cannot be merged.'); + let start = this.start; + let end = this.end; + if (location.start[0] < this.start[0] || location.start[0] === this.start[0] && location.start[1] < this.start[1]) { + [start[0], start[1]] = [location.start[0], location.start[0]]; + } else if (location.end[0] > this.end[0] || location.end[0] === this.end[0] && location.end[1] > this.end[1]) { + [end[0], end[1]] = [location.end[0], location.end[1]]; + } + return new FileRange(this.path, start, end); } - return FileRange(this.path, start, end); } /** - * The level of a diagnostic, listed in order so that comparison operators can be used: - * - Verbose: diagnostics that should only appear when the user requests as much information as possible - * - Message: diagnostics that serve to notify the user, and can be safely ignored - * - Warning: diagnostics that indicate a problem that will not trigger a failure - * - Error: diagnostics that indicate a problem that will trigger a failure - * - Fatal: diagnostics that indicate a problem that causes compilation to immediately fail + * The level of a diagnostic, listed in order so that comparison operators can be used */ export enum DiagnosticLevel { + /** Diagnostics that should only appear when the user requests as much information as possible */ Verbose = 1, + /** Diagnostics that serve to notify the user, and can be safely ignored */ Message = 2, + /** Diagnostics that indicate a problem that will not trigger a failure, but may trigger a failure later on */ Warning = 3, + /** Diagnostics that indicate a problem that will trigger a failure */ Error = 4, + /** Diagnostics that indicate a problem that causes compilation to immediately fail */ Fatal = 5, } /** * Represents a message to report to the user as an output of compilation. */ -export interface Diagnostic { - readonly level: DiagnosticLevel; - readonly message: string; +export class Diagnostic extends CoreObject { readonly location: FileRange; - readonly toString: () => string; -} -export function Diagnostic(message: string, location: FileRange | FilePosition, level: DiagnosticLevel = DiagnosticLevel.Error): Diagnostic { - if (location.type === 'FilePosition') location = FileRange(location.path, location.position, location.position); - return { message, location, level, toString: diagToString }; -} + constructor( + readonly message: string, + location: FileRange | FilePosition, + readonly level: DiagnosticLevel = DiagnosticLevel.Error + ) { + super(); + if (location instanceof FilePosition) location = new FileRange(location.path, location.position, location.position); + this.location = location; + } -function diagToString(this: Diagnostic) { - const { path, start: [line, column] } = this.location; - return `${DiagnosticLevel[this.level]}: ${this.message} (${path}:${line}:${column})`; + diagToString(): string { + const { path, start: [line, column] } = this.location; + return `${DiagnosticLevel[this.level]}: ${this.message} (${path}:${line}:${column})`; + } } diff --git a/src/parser/index.ts b/src/parser/index.ts index 89cb680..94b82d9 100644 --- a/src/parser/index.ts +++ b/src/parser/index.ts @@ -1,11 +1,15 @@ import { ModuleRoot } from '~/syntax'; import { createTokenStream } from './lexer'; -import { Parser } from './parser'; +import { createParser } from './parser'; import { SyntaxEnvironment } from '~/syntax/environment'; +import { Diagnostic } from '~/core'; -export function parseModule(path: string): ModuleRoot { - const parser = Parser(createTokenStream(path)); +export function parseModule(path: string): { module: Optional, diagnostics: ReadonlyArray } { + const { tokens, diagnostics: _diags } = createTokenStream(path); + if (_diags.length) return { module: null, diagnostics: _diags }; + const parser = createParser(tokens); const env = SyntaxEnvironment(); - return parser.parse(env.ModuleRoot); + const { result: module, diagnostics } = parser.parse(env.ModuleRoot); + return { module, diagnostics }; } \ No newline at end of file diff --git a/src/parser/lexer/char-stream.ts b/src/parser/lexer/char-stream.ts index e291ad7..222e240 100644 --- a/src/parser/lexer/char-stream.ts +++ b/src/parser/lexer/char-stream.ts @@ -1,31 +1,63 @@ import { openSync as open, readSync as read } from 'fs'; import { StringDecoder } from 'string_decoder'; import { LazyList, NonEmptyLazyList, fromIterable, infList } from '~/utils/lazy-list'; -import { FilePosition } from '~/core'; +import { FilePosition, CoreObject } from '~/core'; export type CharStream = EmptyCharStream | NonEmptyCharStream; -export interface EmptyCharStream { - readonly empty: true; - /** The file position of the end of the file */ - readonly position: FilePosition; +export class EmptyCharStream extends CoreObject { + readonly empty = true; + + constructor( + /** The file position of the end of the file */ + readonly position: FilePosition + ) { + super(); + } } -export interface NonEmptyCharStream { - readonly empty: false; - /** The file position of the next character in the stream */ - readonly position: FilePosition; +export class NonEmptyCharStream extends CoreObject { + readonly empty = false; + + constructor( + /** The file position of the next character in the stream */ + readonly position: FilePosition, + private readonly list: NonEmptyLazyList + ) { + super(); + } + /** Reads one character from the stream and returns it */ - readonly first: () => string; + first(): string { + return this.list.head; + } + /** Reads one character from the stream, and returns it with the remaining stream */ - readonly read: () => { char: string, stream: CharStream }; - /** Reads as many characters from the stream as possible, up to {count} */ - readonly forceRead: (count: number) => { chars: string, stream: CharStream }; -} + read(): { char: string, stream: CharStream } { + const char = this.list.head; + const empty = this.list.tail.empty; + const position = char === '\n' ? this.position.nextLine() : this.position.nextColumn(); + if (empty) return { char, stream: new EmptyCharStream(position) }; + return { + char, + stream: this.clone({ list: this.list.tail, position }), + } + } -interface InternalCharStream extends NonEmptyCharStream { - readonly list: NonEmptyLazyList; + /** Reads as many characters from the stream as possible, up to {count} */ + forceRead(count: number): { chars: string, stream: CharStream } { + // if we don't need any more, return the base of the recursion + if (count === 0) return { chars: '', stream: this }; + // read one from the front + const { char, stream } = this.read(); + // if it's now empty, just return that + if (stream.empty) return { chars: char, stream }; + // otherwise we've reached the recursion state, descend one level + const { chars, stream: stream1 } = stream.forceRead(count - 1); + // prepend the current character + return { chars: char + chars, stream: stream1 }; + } } /** @@ -57,41 +89,6 @@ export default function createCharStream(path: string): CharStream { const list = createByteStream(path) .flatMap(byte => decoder.write(byte)) .concat(fromIterable(decoder.end())); - if (list.empty) return { empty: true, position: FilePosition(path, [1, 1]) }; - return { - empty: false, - list, - position: FilePosition(path, [1, 1]), - read: readChar, - first: readFirst, - forceRead, - } as InternalCharStream; -} - -function readChar(this: InternalCharStream): { char: string, stream: CharStream } { - const char = this.list.head; - const empty = this.list.tail.empty; - const position = char === '\n' ? this.position.nextLine() : this.position.nextColumn(); - if (empty) return { char, stream: { empty: true, position } }; - return { - char, - stream: { ...this, list: this.list.tail, position } as InternalCharStream, - } -} - -function readFirst(this: InternalCharStream): string { - return this.list.head; -} - -function forceRead(this: InternalCharStream, count: number): { chars: string, stream: CharStream } { - // if we don't need any more, return the base of the recursion - if (count === 0) return { chars: '', stream: this }; - // read one from the front - const { char, stream } = this.read(); - // if it's now empty, just return that - if (stream.empty) return { chars: char, stream }; - // otherwise we've reached the recursion state, descend one level - const { chars, stream: stream1 } = stream.forceRead(count - 1); - // prepend the current character - return { chars: char + chars, stream: stream1 }; + if (list.empty) return new EmptyCharStream(new FilePosition(path, [1, 1])); + return new NonEmptyCharStream(new FilePosition(path, [1, 1]), list); } diff --git a/src/parser/lexer/lexer-state.ts b/src/parser/lexer/lexer-state.ts index 36aed14..1097ddf 100644 --- a/src/parser/lexer/lexer-state.ts +++ b/src/parser/lexer/lexer-state.ts @@ -1,4 +1,4 @@ -import { FilePosition } from '~/core'; +import { FilePosition, CoreObject } from '~/core'; import { TokenType, Token } from './token'; import { CharStream, EmptyCharStream, NonEmptyCharStream } from './char-stream'; @@ -8,70 +8,84 @@ export interface TokenResult { remaining: CharStream; } -// #region IfHasNextOperation +class IfHasNextOperation extends CoreObject { + constructor( + private readonly previous: LexerState, + readonly result: Optional = null + ) { super(); } -interface IfHasNextOperation { - readonly previous: LexerState; - readonly result: Optional; /** * In the event of a false result for the previous ifHasNext(), try again with a different predicate. * This method can chain so that the first successful predicate will propagate through to the last else. */ - readonly elseIf: (count: number, pred: (values: string[]) => boolean, - then: (state: LexerState, accepted: string) => LexerState - ) => IfHasNextOperation; - /** - * In the event of a false result for the previous ifHasNext(), return an alternate result. - * This method will end a chain, so the first successful result will return from this method. - */ - readonly else: (fn: (state: LexerState) => LexerState) => LexerState; -} - -function IfHasNextOperation(previous: LexerState, result: Optional = null): IfHasNextOperation { - return { previous, result, elseIf: IfHasNextOperation.elseIf, else: IfHasNextOperation._else }; -} - -namespace IfHasNextOperation { - /** - * If we already have a result, then skip this else-if and return it so it propagates to the end. - * Otherwise, execute ifHasNext() on the original with the new parameters. - */ - export function elseIf(this: IfHasNextOperation, count: number, pred: (values: string[]) => boolean, - then: (state: LexerState, accepted: string) => LexerState - ) { + elseIf(count: number, pred: (values: string[]) => boolean, then: (state: LexerState, accepted: string) => LexerState): IfHasNextOperation { + // If we already have a result, then skip this else-if and return it so it propagates to the end. if (this.result) return this; + // Otherwise, execute ifHasNext() on the original with the new parameters. return this.previous.ifHasNext(count, pred, then); } /** - * End of the chain, if a previous predicate yielded a result, return it. - * Otherwise return the alternate. + * In the event of a false result for the previous ifHasNext(), return an alternate result. + * This method will end a chain, so the first successful result will return from this method. */ - export function _else(this: IfHasNextOperation, fn: (state: LexerState) => LexerState) { + else(fn: (state: LexerState) => LexerState): LexerState { + // End of the chain, if a previous predicate yielded a result, return it. if (this.result) return this.result; + // Otherwise return the alternate. return fn(this.previous); } } -// #endregion - +/** + * Tracks the state for the consumption of one token. + * EmptyLexerState and NonEmptyLexerState have the same properties, except: + * - empty is true for Empty, false for NonEmpty (the discriminant) + * - stream is empty for Empty, non-empty for NonEmpty + * - consume() is available for NonEmpty, not for Empty + */ export type LexerState = EmptyLexerState | NonEmptyLexerState; -interface LexerStateBase { +export function LexerState(position: FilePosition, char: string, stream: CharStream): LexerState { + if (stream.empty) return new EmptyLexerState(position, char, stream); + return new NonEmptyLexerState(position, char, stream); +} + +abstract class LexerStateBase extends CoreObject { + abstract readonly empty: boolean; + abstract readonly stream: CharStream; + /** The expected resulting type of token, can be changed with setType() */ - readonly type: TokenType; - /** The start position of the token */ - readonly position: FilePosition; + readonly type: TokenType = TokenType.NONE; /** The progressing image of the consumed token, can be appended to with consume() */ readonly image: string; /** The expeceted resulting value of the token, can be set with setValue() */ readonly value?: any; + + constructor( + /** The start position of the token */ + readonly position: FilePosition, + char: string, + ) { + super(); + this.image = char; + } + /** Returns a new LexerState with the provided type */ - readonly setType: (type: TokenType) => LexerState; + setType(type: TokenType): LexerState { + return this.clone({ type }) as LexerState; + } + /** Returns a new LexerState with a value based on the current image */ - readonly setValue: (fn: (image: string) => any) => LexerState; + setValue(fn: (image: string) => any): LexerState { + return this.clone({ value: fn(this.image) }) as LexerState; + } + /** Returns a new LexerState with a value based on the current value */ - readonly mapValue: (fn: (value: any) => any) => LexerState; + mapValue(fn: (value: any) => any): LexerState { + return this.clone({ value: fn(this.value) }) as LexerState; + } + /** * This is a very useful tool to handle conditional consumption. * First, specify the number of characters from the stream you wish to analyze. @@ -81,48 +95,61 @@ interface LexerStateBase { * and the string of the accepted characters, which should return the state to return from the operation. * This method will return a chainable object that can be used to append more checks in the instance that one fails. */ - readonly ifHasNext: (count: number, pred: (values: string[]) => boolean, + ifHasNext( + count: number, + pred: (values: string[]) => boolean, then: (state: LexerState, accepted: string) => LexerState - ) => IfHasNextOperation; + ): IfHasNextOperation { + const state = this as LexerState; + // not enough chars + if (state.empty) return new IfHasNextOperation(state); + const { chars, stream } = state.stream.forceRead(count); + // not enough chars + if (chars.length !== count) return new IfHasNextOperation(state); + // predicate deemed it not so + if (!pred([...chars])) return new IfHasNextOperation(state); + // predicate deemed it so + const image = state.image + chars; + const result = (stream.empty + ? { ...state, empty: true, image, stream } + : { ...state, empty: false, image, stream }) as LexerState; + return new IfHasNextOperation(state, then(result, chars)); + } + + /** Returns a completed token and remaining stream based on this LexerState */ - readonly finish: () => TokenResult; + finish(): TokenResult { + return { + final: new Token(this.type, this.position, this.image, this.value), + remaining: this.stream + }; + } } -interface EmptyLexerState extends LexerStateBase { +class EmptyLexerState extends LexerStateBase { /** Determines whether this LexerState is empty */ - readonly empty: true; - /** The remaining available character stream */ - readonly stream: EmptyCharStream; + readonly empty = true; + + constructor( + position: FilePosition, + char: string, + /** The remaining available character stream */ + readonly stream: EmptyCharStream + ) { super(position, char); } } -interface NonEmptyLexerState extends LexerStateBase { +class NonEmptyLexerState extends LexerStateBase { /** Determines whether this LexerState is empty */ - readonly empty: false; - /** The remaining available character stream */ - readonly stream: NonEmptyCharStream; - /** Consumes at least one character from the stream and appends it to the image, returning a new LexerState */ - readonly consume: (count?: number) => LexerState; -} - -/** - * Tracks the state for the consumption of one token. - * EmptyLexerState and NonEmptyLexerState have the same properties, except: - * - empty is true for Empty, false for NonEmpty (the discriminant) - * - stream is empty for Empty, non-empty for NonEmpty - * - consume() is available for NonEmpty, not for Empty - */ -export function LexerState(position: FilePosition, char: string, stream: CharStream) { - return LexerState.init(position, char, stream); -} + readonly empty = false; -export namespace LexerState { - export function init(position: FilePosition, char: string, stream: CharStream): LexerState { - const base = { type: TokenType.NONE, position, image: char, setType, setValue, mapValue, ifHasNext, finish }; - if (stream.empty) return { ...base, empty: true, stream }; - return { ...base, empty: false, stream, consume }; - } + constructor( + position: FilePosition, + char: string, + /** The remaining available character stream */ + readonly stream: NonEmptyCharStream + ) { super(position, char); } - function consume(this: NonEmptyLexerState, count = 1): LexerState { + consume(count = 1): LexerState { let chars: string, stream: CharStream; if (count === 1) { ({ char: chars, stream } = this.stream.read()); @@ -130,43 +157,8 @@ export namespace LexerState { ({ chars, stream } = this.stream.forceRead(count)); } const image = this.image + chars; - return (stream.empty - ? { ...this, empty: true, image, stream } - : { ...this, empty: false, image, stream }) as LexerState; - } - - function setType(this: LexerState, type: TokenType): LexerState { - return { ...this, type }; - } - - function setValue(this: LexerState, fn: (image: string) => any): LexerState { - return { ...this, value: fn(this.image) }; - } - - function mapValue(this: LexerState, fn: (value: any) => any): LexerState { - return { ...this, value: fn(this.value) }; - } - - function ifHasNext(this: LexerState, count: number, pred: (values: string[]) => boolean, - then: (state: LexerState, accepted: string) => LexerState - ): IfHasNextOperation { - const state = this; - // not enough chars - if (state.empty) return IfHasNextOperation(state); - const { chars, stream } = state.stream.forceRead(count); - // not enough chars - if (chars.length !== count) return IfHasNextOperation(state); - // predicate deemed it not so - if (!pred([...chars])) return IfHasNextOperation(state); - // predicate deemed it so - const image = state.image + chars; - const result = (stream.empty - ? { ...state, empty: true, image, stream } - : { ...state, empty: false, image, stream }) as LexerState; - return IfHasNextOperation(state, then(result, chars)); - } - - function finish(this: LexerState) { - return { final: Token(this.type, this.position, this.image, this.value), remaining: this.stream }; + return stream.empty + ? new EmptyLexerState(this.position, image, stream) + : this.clone({ empty: false, image, stream }) as LexerState; } } diff --git a/src/parser/lexer/lexer.ts b/src/parser/lexer/lexer.ts index 14f9244..ce355b4 100644 --- a/src/parser/lexer/lexer.ts +++ b/src/parser/lexer/lexer.ts @@ -98,10 +98,15 @@ const IGNORED_TYPES = [TokenType.COMMENT, TokenType.WHITESPACE]; * Reads a stream of characters from the file at the specified path and performs lexical analysis on the stream, * returning a stream of tokens. */ -export function createTokenStream(path: string, ignoreMode = true): LazyList { - const list = consumeTokens(createCharStream(path)); - if (!ignoreMode) return list; - return list.filter(t => !IGNORED_TYPES.includes(t.type)); +export function createTokenStream(path: string, ignoreMode = true): { tokens: LazyList, diagnostics: ReadonlyArray } { + try { + const list = consumeTokens(createCharStream(path)); + if (!ignoreMode) return { tokens: list, diagnostics: [] }; + return { tokens: list.filter(t => !IGNORED_TYPES.includes(t.type)), diagnostics: [] }; + } catch (err) { + if (!(err instanceof BoxError)) throw err; + return { tokens: empty(), diagnostics: [err.value] }; + } } /** @@ -118,7 +123,7 @@ function consumeTokens(charStream: CharStream): LazyList { */ function consumeToken(charStream: CharStream): TokenResult { // stream is empty, return the final EOF token - if (charStream.empty) return { final: Token(TokenType.EOF, charStream.position, ''), remaining: charStream }; + if (charStream.empty) return { final: new Token(TokenType.EOF, charStream.position, ''), remaining: charStream }; // read a single character from the stream const { char, stream } = charStream.read(); @@ -143,7 +148,7 @@ function consumeToken(charStream: CharStream): TokenResult { state => consumeWhitespace(state.setType(TokenType.WHITESPACE))) .else(() => { // otherwise it is not a valid character (for now) - throw new BoxError(Diagnostic(`Invalid character '${char}'`, charStream.position)); + throw new BoxError(new Diagnostic(`Invalid character '${char}'`, charStream.position)); }) .finish(); } @@ -175,7 +180,7 @@ enum MutliLineCommentState { */ function consumeMultiLineComment(pending: LexerState, state = MutliLineCommentState.START): LexerState { // we can't use ifHasNext() here because a) we need tail recursion b) we have a state parameter - if (pending.empty) throw new BoxError(Diagnostic('Unterminated comment', pending.stream.position)); + if (pending.empty) throw new BoxError(new Diagnostic('Unterminated comment', pending.stream.position)); const first = pending.stream.first(); let nextState = state; if (state === MutliLineCommentState.START) { @@ -272,7 +277,7 @@ const ESCAPE: { readonly [key: string]: string } = { n: '\n', r: '\r', t: '\t', * Literals of character sequences */ function consumeStringLiteral(pending: LexerState): LexerState { - if (pending.empty) throw new BoxError(Diagnostic('Unterminated string', pending.stream.position)); + if (pending.empty) throw new BoxError(new Diagnostic('Unterminated string', pending.stream.position)); const next = pending // end of string .ifHasNext(1, ([c]) => c === '"', state => state) @@ -306,10 +311,10 @@ function consumeStringLiteral(pending: LexerState): LexerState { * Literals of single characters */ function consumeCharLiteral(pending: LexerState): LexerState { - if (pending.empty) throw new BoxError(Diagnostic('Unterminated character', pending.stream.position)); + if (pending.empty) throw new BoxError(new Diagnostic('Unterminated character', pending.stream.position)); const next = pending .ifHasNext(1, ([c]) => c === "'", - () => { throw new BoxError(Diagnostic('Empty character', pending.stream.position)) }) + () => { throw new BoxError(new Diagnostic('Empty character', pending.stream.position)) }) // basic escape codes .elseIf(2, ([c1, c2]) => c1 === '\\' && 'nrtfbv'.includes(c2), (state, cs) => state.setValue(() => ESCAPE[cs[1]])) @@ -334,7 +339,7 @@ function consumeCharLiteral(pending: LexerState): LexerState { // the next character must absolutely be a ' and nothing else return next .ifHasNext(1, ([c]) => c == "'", state => state) - .else(() => { throw new BoxError(Diagnostic('Unterminated character', next.stream.position)) }); + .else(() => { throw new BoxError(new Diagnostic('Unterminated character', next.stream.position)) }); } function consumeSymbol(pending: LexerState): LexerState { diff --git a/src/parser/lexer/token.ts b/src/parser/lexer/token.ts index c108bc4..ef79325 100644 --- a/src/parser/lexer/token.ts +++ b/src/parser/lexer/token.ts @@ -1,4 +1,4 @@ -import { FilePosition, FileRange } from '~/core'; +import { FilePosition, FileRange, CoreObject } from '~/core'; /** @@ -28,53 +28,20 @@ export enum TokenType { * 'image' is an exact copy of the token from the original source string. * 'value' is an optional value that represents the parsed value of the token, if it makes sense for the token type (numbers, strings, etc.). */ -export interface Token { - readonly type: TokenType; +export class Token extends CoreObject { readonly location: FileRange; - readonly image: string; - readonly value?: any; - toString(): string; - with(props: Partial): Token; -} - -/** Creates a new token */ -export function Token(type: TokenType, position: FilePosition, image: string, value?: any): Token { - return Token.create(type, position, image, value); -} - -export namespace Token { - const tokenSymbol = Symbol('Token'); - - /** Creates a new token */ - export function create(type: TokenType, position: FilePosition, image: string, value?: any): Token { - const token: Token = { - type, image, value, location: position.computeRange(image), - toString, with: _with, - }; - // separate symbol assignment so that we catch excessive property errors above - return { ...token, [tokenSymbol]: tokenSymbol } as Token; - } - - /** - * Creates a new token that has no type, useful for creating tokens after parsing is done - * and types don't matter anymore. - */ - export function fromLocation(position: FilePosition, image: string) { - return create(TokenType.NONE, position, image); - } - /** - * Determines if an object is a token. - */ - export function isToken(token: {}): token is Token { - return tokenSymbol in token; + constructor( + readonly type: TokenType, + position: FilePosition, + readonly image: string, + readonly value?: any + ) { + super(); + this.location = position.computeRange(image); } - function toString(this: Token) { + toString() { return this.image; } - - function _with(this: Token, props: Partial) { - return { ...this, ...props }; - } } diff --git a/src/parser/parser.ts b/src/parser/parser.ts index 619a803..ff8c100 100644 --- a/src/parser/parser.ts +++ b/src/parser/parser.ts @@ -1,4 +1,4 @@ -import { FileRange } from '~/core'; +import { FileRange, Diagnostic, CoreObject } from '~/core'; import { Token, TokenType } from '~/parser/lexer'; import { LazyList, NonEmptyLazyList, EmptyLazyList } from '~/utils/lazy-list'; @@ -14,68 +14,66 @@ export interface ParseResult { interface ParseResultInternal { result: Optional; - remaining: ParserInternal; + remaining: Parser; } // #region Parser -export interface Parser { - parse(fn: ParseFunc): T; -} +abstract class ParserBase extends CoreObject { + abstract readonly empty: boolean; + abstract readonly tokens: LazyList; -interface BaseParserInternal extends Parser { - readonly failToken: Optional; - readonly successLocation: Optional; - fail(token: Optional): ParserInternal; - succeed(location: Optional): ParserInternal; -} - -interface NonEmptyParserInternal extends BaseParserInternal { - readonly empty: false; - readonly tokens: NonEmptyLazyList; - next(): { token: Token, remaining: ParserInternal }; -} + readonly failToken: Optional = null; + readonly successLocation: Optional = null; -interface EmptyParserInternal extends BaseParserInternal { - readonly empty: true; - readonly tokens: EmptyLazyList; -} + fail(token: Optional): Parser { + return this.clone({ failToken: token, successLocation: null }) as Parser; + } -type ParserInternal = NonEmptyParserInternal | EmptyParserInternal; + succeed(location: Optional): Parser { + return this.clone({ successLocation: location, failToken: null }) as Parser; + } -export function Parser(tokenStream: LazyList): Parser { - return Parser.init(tokenStream); + parse(fn: ParseFunc): { result: Optional, diagnostics: ReadonlyArray } { + const { result, remaining } = fn(this as Parser) as ParseResultInternal; + if (!remaining.empty) throw new Error('Unprocessed input remains, you likely need to include an EOF in your syntax definition'); + return { + result, + // TODO: proper error system + diagnostics: remaining.failToken + ? [new Diagnostic(`Unexpected "${remaining.failToken.image}" token`, remaining.failToken.location)] + : [] + }; + } } -export namespace Parser { - export function init(tokenStream: LazyList): Parser { - const common = { parse, succeed, fail, successLocation: null, failToken: null }; - if (tokenStream.empty) { - const parser: EmptyParserInternal = { ...common, empty: true, tokens: tokenStream }; - return parser; - } else { - const parser: NonEmptyParserInternal = { ...common, empty: false, tokens: tokenStream, next }; - return parser; - } - } +class NonEmptyParser extends ParserBase { + readonly empty: false = false; - function parse(this: ParserInternal, fn: ParseFunc): T { - const { result, remaining } = fn(this) as ParseResultInternal; - if (!result) throw new Error('FAIL: unable to parse'); // TODO this is not how this should work - if (!remaining.empty) throw new Error('FAIL: unprocessed input remains'); // TODO pass token - return result; + constructor(readonly tokens: NonEmptyLazyList) { + super(); } - function next(this: NonEmptyParserInternal): { token: Token, remaining: ParserInternal } { - return { token: this.tokens.head, remaining: init(this.tokens.tail) as ParserInternal }; + next(): { token: Token, remaining: Parser } { + return { token: this.tokens.head, remaining: createParser(this.tokens.tail) }; } +} - function succeed(this: ParserInternal, location: Optional): ParserInternal { - return { ...this, successLocation: location, failToken: null }; +class EmptyParser extends ParserBase { + readonly empty: true = true; + + constructor(readonly tokens: EmptyLazyList) { + super(); } +} - function fail(this: ParserInternal, token: Optional): ParserInternal { - return { ...this, failToken: token, successLocation: null }; +export type Parser = NonEmptyParser | EmptyParser; + +export function createParser(tokenStream: LazyList): Parser { + if (tokenStream.empty) { + return new EmptyParser(tokenStream); + } else { + return new NonEmptyParser(tokenStream); } } @@ -117,7 +115,7 @@ export function repeat(fn: ParseFunc, key: RepeatKey, sep?: ParseFunc { const results: T[] = []; - let next = parser as ParserInternal; + let next = parser; let location: Optional = null; while (true) { const { result, remaining } = fn(next) as ParseResultInternal; @@ -144,7 +142,7 @@ export function seq(...args: any[]): ParseFunc { const fns = args.slice(0, args.length - 1) as ParseFunc<{}>[]; const toResult = args[args.length - 1] as (s: Array>, location: FileRange) => T; - return (parser: ParserInternal) => { + return (parser) => { let next = parser; const results: Array> = []; let location: Optional = null; @@ -161,7 +159,7 @@ export function seq(...args: any[]): ParseFunc { export function tok(image: string): ParseFunc; export function tok(type: TokenType): ParseFunc; export function tok(t: string | TokenType): ParseFunc { - return (parser: ParserInternal) => { + return (parser) => { if (parser.empty) throw new Error('token stream was empty'); const { token, remaining } = parser.next(); if (typeof t === 'string') { @@ -174,7 +172,7 @@ export function tok(t: string | TokenType): ParseFunc { } export function select(...fns: ParseFunc[]): ParseFunc { - return (parser: ParserInternal) => { + return (parser) => { for (const fn of fns) { const { result, remaining } = fn(parser) as ParseResultInternal; if (remaining.failToken) continue; diff --git a/src/runner.ts b/src/runner.ts index 93adbf4..c414c0c 100644 --- a/src/runner.ts +++ b/src/runner.ts @@ -1,19 +1,14 @@ -import { mapSet } from '~/utils/utils'; -import { parseModule } from '~/parser'; -import { Diagnostic, DiagnosticLevel } from '~/core'; +import { DiagnosticLevel } from '~/core'; +import typecheck from '~/typecheck'; +import { Program } from '~/typecheck/program'; /** * Runs the program at the given (absolute) path with the provided arguments. */ export function runProgram(path: string, args: string[]) { - let program: Program = { - modules: new Map(), - diagnostics: [], - addModule, - }; - // add the main module (this will be a recursive operation for any imported modules) - program = program.addModule(path); + // perform type checking on the specified path, which will enumerate all modules in the Program + const program = typecheck(path); // we will eventually provide a verbosity option, but for now just set it to Message const diags = program.diagnostics.filter(d => d.level >= DiagnosticLevel.Message); const errCount = diags.count(d => d.level >= DiagnosticLevel.Error); @@ -31,27 +26,12 @@ export function runProgram(path: string, args: string[]) { const suffix = warnCount > 0 ? ' with warnings' : ''; process.stderr.write(`\nCompilation succeeded${suffix}\n\n`); } - // compiled successfully, run the main module - return runModule(program, path, args); + // semantically good, translate the program + const executable = translate(program); + // compiled successfully, run the program + return interpret(executable, args); } -export function addModule(this: Program, path: string): Program { - const { module, diagnostics } = parseModule(path); - return { - ...this, - modules: mapSet(this.modules, path, module), - diagnostics: [...this.diagnostics, ...diagnostics], - } -} - -export function runModule(program: Program, path: string, args: string[]): number { - const module = program.modules.get(path); -} - -export interface Program { - readonly modules: ReadonlyMap; - readonly diagnostics: ReadonlyArray; - readonly addModule: typeof addModule; -} - -export interface Module {} +// TODO +const translate = (_program: Program) => ({}); +const interpret = (_executable: {}, _args: string[]) => 0; diff --git a/src/syntax/ModuleRoot.ts b/src/syntax/ModuleRoot.ts index 90429e1..24d10e0 100644 --- a/src/syntax/ModuleRoot.ts +++ b/src/syntax/ModuleRoot.ts @@ -5,8 +5,7 @@ import { ExportDeclaration, ExportForwardDeclaration } from '~/syntax'; import { ParseFunc, seq, repeat, select, tok } from '~/parser/parser'; -export interface ModuleRoot extends NodeBase { - readonly syntaxType: SyntaxType.ModuleRoot; +export interface ModuleRoot extends NodeBase { readonly imports: ReadonlyArray; readonly declarations: ReadonlyArray; } diff --git a/src/syntax/declarations/ConstantDeclaration.ts b/src/syntax/declarations/ConstantDeclaration.ts index 2ba0600..8345d8d 100644 --- a/src/syntax/declarations/ConstantDeclaration.ts +++ b/src/syntax/declarations/ConstantDeclaration.ts @@ -3,8 +3,7 @@ import { ParseFunc, seq, tok, optional } from '~/parser/parser'; import { TokenType, Token } from '~/parser/lexer'; -export interface ConstantDeclaration extends NodeBase { - syntaxType: SyntaxType.ConstantDeclaration; +export interface ConstantDeclaration extends NodeBase { name: Optional; value: Expression; } diff --git a/src/syntax/declarations/ExportDeclaration.ts b/src/syntax/declarations/ExportDeclaration.ts index 954c4ad..efb27db 100644 --- a/src/syntax/declarations/ExportDeclaration.ts +++ b/src/syntax/declarations/ExportDeclaration.ts @@ -21,8 +21,7 @@ interface Export { readonly value?: Declaration; } -export interface ExportDeclaration extends NodeBase { - readonly syntaxType: SyntaxType.ExportDeclaration; +export interface ExportDeclaration extends NodeBase { readonly exports: ReadonlyArray; } @@ -37,7 +36,7 @@ export function register(Declaration: ParseFunc) { ([_, def, value], location) => ({ location, syntaxType: SyntaxType.ExportDeclaration as SyntaxType.ExportDeclaration, - exports: Token.isToken(value) + exports: value instanceof Token ? [{ exportName: def, valueName: value }] : [{ exportName: def, valueName: value.name, value }] }) diff --git a/src/syntax/declarations/ExportForwardDeclaration.ts b/src/syntax/declarations/ExportForwardDeclaration.ts index 9541905..270c2de 100644 --- a/src/syntax/declarations/ExportForwardDeclaration.ts +++ b/src/syntax/declarations/ExportForwardDeclaration.ts @@ -9,8 +9,7 @@ export interface Forward { readonly exportName: Token; } -export interface ExportForwardDeclaration extends NodeBase { - readonly syntaxType: SyntaxType.ExportForwardDeclaration; +export interface ExportForwardDeclaration extends NodeBase { readonly moduleName: Token; readonly forwards: ReadonlyArray; } diff --git a/src/syntax/declarations/FunctionDeclaration.ts b/src/syntax/declarations/FunctionDeclaration.ts index a0d5fd1..1edcdf0 100644 --- a/src/syntax/declarations/FunctionDeclaration.ts +++ b/src/syntax/declarations/FunctionDeclaration.ts @@ -5,14 +5,12 @@ import { Token, TokenType } from '~/parser/lexer'; import { Block } from '~/syntax'; -export interface Param extends NodeBase { - readonly syntaxType: SyntaxType.Param; +export interface Param extends NodeBase { readonly name: Token; readonly typeNode: Optional; // optional to support lambda params } -export interface FunctionDeclaration extends NodeBase { - readonly syntaxType: SyntaxType.FunctionDeclaration; +export interface FunctionDeclaration extends NodeBase { readonly returnType: TypeNode; readonly name: Optional; readonly typeParams: ReadonlyArray; diff --git a/src/syntax/declarations/ImportDeclaration.ts b/src/syntax/declarations/ImportDeclaration.ts index ef6d33a..a1584d5 100644 --- a/src/syntax/declarations/ImportDeclaration.ts +++ b/src/syntax/declarations/ImportDeclaration.ts @@ -34,7 +34,7 @@ const NamedImports: ParseFunc = seq( WildcardImport ), '+', tok(',')), tok('}'), - ([_1, names, _2]) => names.map(n => Token.isToken(n) ? { importName: n, aliasName: n } : n) + ([_1, names, _2]) => names.map(n => n instanceof Token ? { importName: n, aliasName: n } : n) ); /** @@ -67,8 +67,7 @@ interface Import { aliasName: Token; } -export interface ImportDeclaration extends NodeBase { - readonly syntaxType: SyntaxType.ImportDeclaration; +export interface ImportDeclaration extends NodeBase { readonly moduleName: Token; readonly imports: ReadonlyArray; } @@ -90,4 +89,4 @@ export const ImportDeclaration: ParseFunc = seq( }) ); -const defaultImport = (token: Token) => ({ importName: token.with({ image: 'default' }), aliasName: token }); +const defaultImport = (token: Token) => ({ importName: token.clone({ image: 'default' }), aliasName: token }); diff --git a/src/syntax/declarations/TypeDeclaration.ts b/src/syntax/declarations/TypeDeclaration.ts index 8ec65d6..f1765b1 100644 --- a/src/syntax/declarations/TypeDeclaration.ts +++ b/src/syntax/declarations/TypeDeclaration.ts @@ -3,8 +3,7 @@ import { ParseFunc, seq, optional, select, tok, repeat } from '~/parser/parser'; import { TokenType, Token } from '~/parser/lexer'; -export interface TypeParam extends NodeBase { - readonly syntaxType: SyntaxType.TypeParam; +export interface TypeParam extends NodeBase { readonly name: Token; readonly varianceOp: Optional; readonly typeConstraint: Optional; @@ -14,8 +13,7 @@ export interface TypeParamList { readonly params: ReadonlyArray; } -export interface TypeDeclaration extends NodeBase { - readonly syntaxType: SyntaxType.TypeDeclaration; +export interface TypeDeclaration extends NodeBase { readonly name: Optional; readonly typeParams: ReadonlyArray; readonly typeNode: TypeNode; diff --git a/src/syntax/environment.ts b/src/syntax/environment.ts index f286446..a149c3f 100644 --- a/src/syntax/environment.ts +++ b/src/syntax/environment.ts @@ -59,61 +59,63 @@ import { register as register_ModuleRoot } from './ModuleRoot'; */ export enum SyntaxType { // #region module - ModuleRoot, - ImportDeclaration, - ExportDeclaration, - ExportForwardDeclaration, + ModuleRoot = 'ModuleRoot', + ImportDeclaration = 'ImportDeclaration', + ExportDeclaration = 'ExportDeclaration', + ExportForwardDeclaration = 'ExportForwardDeclaration', // #endregion // #region declarations - TypeDeclaration, - TypeParam, - FunctionDeclaration, - Param, - ConstantDeclaration, + TypeDeclaration = 'TypeDeclaration', + FunctionDeclaration = 'FunctionDeclaration', + ConstantDeclaration = 'ConstantDeclaration', // #endregion // #region types - BuiltInType, - StructType, - TupleType, - ArrayType, - FunctionType, - UnionType, - IdentifierType, - ParenthesizedType, - SpecificType, - NamespaceAccessType, + BuiltInType = 'BuiltInType', + StructType = 'StructType', + TupleType = 'TupleType', + ArrayType = 'ArrayType', + FunctionType = 'FunctionType', + UnionType = 'UnionType', + IdentifierType = 'IdentifierType', + ParenthesizedType = 'ParenthesizedType', + SpecificType = 'SpecificType', + NamespaceAccessType = 'NamespaceAccessType', // #endregion // #region expressions - IntegerLiteral, - FloatLiteral, - CharLiteral, - BoolLiteral, - StringLiteral, - StructLiteral, - TupleLiteral, - ArrayLiteral, - IdentifierExpression, - ParenthesizedExpression, - VarDeclaration, - UnaryExpression, - BinaryExpression, - FunctionApplication, - ArrayAccess, - FieldAccess, - IfElseExpression, - LambdaExpression, + IntegerLiteral = 'IntegerLiteral', + FloatLiteral = 'FloatLiteral', + CharLiteral = 'CharLiteral', + BoolLiteral = 'BoolLiteral', + StringLiteral = 'StringLiteral', + StructLiteral = 'StructLiteral', + TupleLiteral = 'TupleLiteral', + ArrayLiteral = 'ArrayLiteral', + IdentifierExpression = 'IdentifierExpression', + ParenthesizedExpression = 'ParenthesizedExpression', + VarDeclaration = 'VarDeclaration', + UnaryExpression = 'UnaryExpression', + BinaryExpression = 'BinaryExpression', + FunctionApplication = 'FunctionApplication', + ArrayAccess = 'ArrayAccess', + FieldAccess = 'FieldAccess', + IfElseExpression = 'IfElseExpression', + LambdaExpression = 'LambdaExpression', // #endregion // #region statements - Block, - ExpressionStatement, - ForStatement, - WhileStatement, - DoWhileStatement, - TryCatchStatement, - ReturnStatement, - ThrowStatement, - BreakStatement, - ContinueStatement, + Block = 'Block', + ExpressionStatement = 'ExpressionStatement', + ForStatement = 'ForStatement', + WhileStatement = 'WhileStatement', + DoWhileStatement = 'DoWhileStatement', + TryCatchStatement = 'TryCatchStatement', + ReturnStatement = 'ReturnStatement', + ThrowStatement = 'ThrowStatement', + BreakStatement = 'BreakStatement', + ContinueStatement = 'ContinueStatement', + // #endregion + // #region other + TypeParam = 'TypeParam', + Param = 'Param', // #endregion } @@ -275,15 +277,11 @@ export function SyntaxEnvironment() { * The base type of all syntax nodes. * All nodes have: * - a location (range of text in a file) - * - * All nodes also have a node type (of type SyntaxType). - * However, specifying that here does nothing and is actually - * more likely to hide errors than reveal them. The Node type - * expects every node type to have a 'nodeType' field defined, - * so it will be very clear if one doesn't. + * - a syntax type (the discriminant for the various node union types) */ -export interface NodeBase { +export interface NodeBase { readonly location: FileRange; + readonly syntaxType: K; } /** diff --git a/src/syntax/expressions/ArrayAccess.ts b/src/syntax/expressions/ArrayAccess.ts index f354a2c..730fec5 100644 --- a/src/syntax/expressions/ArrayAccess.ts +++ b/src/syntax/expressions/ArrayAccess.ts @@ -2,14 +2,12 @@ import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; import { ParseFunc, seq, tok } from '~/parser/parser'; -export interface ArrayAccess extends NodeBase { - syntaxType: SyntaxType.ArrayAccess; +export interface ArrayAccess extends NodeBase { target: Expression; index: Expression; } -export interface ArrayAccessSuffix extends NodeBase { - syntaxType: SyntaxType.ArrayAccess; +export interface ArrayAccessSuffix extends NodeBase { index: Expression; setBase(target: Expression): ArrayAccess; } diff --git a/src/syntax/expressions/ArrayLiteral.ts b/src/syntax/expressions/ArrayLiteral.ts index 2035369..98a5fcd 100644 --- a/src/syntax/expressions/ArrayLiteral.ts +++ b/src/syntax/expressions/ArrayLiteral.ts @@ -2,8 +2,7 @@ import { SyntaxType, Expression, NodeBase } from '~/syntax/environment'; import { ParseFunc, seq, tok, repeat } from '~/parser/parser'; -export interface ArrayLiteral extends NodeBase { - syntaxType: SyntaxType.ArrayLiteral; +export interface ArrayLiteral extends NodeBase { items: ReadonlyArray; } diff --git a/src/syntax/expressions/BinaryExpression.ts b/src/syntax/expressions/BinaryExpression.ts index 764e6e0..ca97606 100644 --- a/src/syntax/expressions/BinaryExpression.ts +++ b/src/syntax/expressions/BinaryExpression.ts @@ -4,15 +4,13 @@ import { Token, TokenType } from '~/parser/lexer'; import { ParseFunc, seq, repeat, tok } from '~/parser/parser'; -export interface BinaryExpression extends NodeBase { - syntaxType: SyntaxType.BinaryExpression; +export interface BinaryExpression extends NodeBase { left: Expression; symbol: Token; right: Expression; } -export interface BinaryExpressionSuffix extends NodeBase { - syntaxType: SyntaxType.BinaryExpression; +export interface BinaryExpressionSuffix extends NodeBase { symbol: Token; right: Expression; setBase(left: Expression): BinaryExpression; @@ -56,7 +54,7 @@ function resolvePrecedence(exp: BinaryExpression) { const operStack: Token[] = []; while (items.length) { const item = items.shift() as (Expression | Token); - if (!Token.isToken(item)) { + if (!(item instanceof Token)) { expStack.push(item); } else { while (operStack.length && shouldPopOperator(item, operStack[operStack.length - 1])) { @@ -82,14 +80,14 @@ function shouldPopOperator(nextToken: Token, stackToken: Token) { function binaryExpressionToList(exp: BinaryExpression) { const items: (Token | Expression)[] = []; // the tree is left-associative, so we assemble the list from right to left - let operToken = createNewOperToken(exp.symbol); + let operToken = exp.symbol.clone(); let left = exp.left, right = exp.right; while (true) { items.unshift(right); items.unshift(operToken); if (left.syntaxType === SyntaxType.BinaryExpression) { right = left.right; - operToken = createNewOperToken(left.symbol); + operToken = left.symbol.clone(); left = left.left; } else { items.unshift(left); @@ -99,10 +97,6 @@ function binaryExpressionToList(exp: BinaryExpression) { return items; } -function createNewOperToken(tok: Token) { - return tok.with({}); -} - function createNewBinExpression(right: Expression, left: Expression, oper: Token) { return { syntaxType: SyntaxType.BinaryExpression as SyntaxType.BinaryExpression, diff --git a/src/syntax/expressions/BoolLiteral.ts b/src/syntax/expressions/BoolLiteral.ts index 399dad6..e5e0550 100644 --- a/src/syntax/expressions/BoolLiteral.ts +++ b/src/syntax/expressions/BoolLiteral.ts @@ -3,8 +3,7 @@ import { Token } from '~/parser/lexer'; import { ParseFunc, seq, select, tok } from '~/parser/parser'; -export interface BoolLiteral extends NodeBase { - syntaxType: SyntaxType.BoolLiteral; +export interface BoolLiteral extends NodeBase { value: Token; } diff --git a/src/syntax/expressions/CharLiteral.ts b/src/syntax/expressions/CharLiteral.ts index e6cc0ec..c8d6c73 100644 --- a/src/syntax/expressions/CharLiteral.ts +++ b/src/syntax/expressions/CharLiteral.ts @@ -3,8 +3,7 @@ import { Token, TokenType } from '~/parser/lexer'; import { ParseFunc, seq, tok } from '~/parser/parser'; -export interface CharLiteral extends NodeBase { - syntaxType: SyntaxType.CharLiteral; +export interface CharLiteral extends NodeBase { value: Token; } diff --git a/src/syntax/expressions/FieldAccess.ts b/src/syntax/expressions/FieldAccess.ts index 8751b15..92a95f4 100644 --- a/src/syntax/expressions/FieldAccess.ts +++ b/src/syntax/expressions/FieldAccess.ts @@ -3,14 +3,12 @@ import { Token, TokenType } from '~/parser/lexer'; import { ParseFunc, seq, tok } from '~/parser/parser'; -export interface FieldAccess extends NodeBase { - syntaxType: SyntaxType.FieldAccess; +export interface FieldAccess extends NodeBase { target: Expression; field: Token; } -export interface FieldAccessSuffix extends NodeBase { - syntaxType: SyntaxType.FieldAccess; +export interface FieldAccessSuffix extends NodeBase { field: Token; setBase(target: Expression): FieldAccess; } diff --git a/src/syntax/expressions/FloatLiteral.ts b/src/syntax/expressions/FloatLiteral.ts index b440a9c..4285d46 100644 --- a/src/syntax/expressions/FloatLiteral.ts +++ b/src/syntax/expressions/FloatLiteral.ts @@ -3,8 +3,7 @@ import { Token, TokenType } from '~/parser/lexer'; import { ParseFunc, seq, tok } from '~/parser/parser'; -export interface FloatLiteral extends NodeBase { - syntaxType: SyntaxType.FloatLiteral; +export interface FloatLiteral extends NodeBase { value: Token; } diff --git a/src/syntax/expressions/FunctionApplication.ts b/src/syntax/expressions/FunctionApplication.ts index aadf354..7419d2c 100644 --- a/src/syntax/expressions/FunctionApplication.ts +++ b/src/syntax/expressions/FunctionApplication.ts @@ -2,15 +2,13 @@ import { NodeBase, SyntaxType, Expression, TypeNode } from '~/syntax/environment import { ParseFunc, seq, tok, optional, repeat } from '~/parser/parser'; -export interface FunctionApplication extends NodeBase { - syntaxType: SyntaxType.FunctionApplication; +export interface FunctionApplication extends NodeBase { target: Expression; typeArgs: TypeNode[]; args: Expression[]; } -export interface FunctionApplicationSuffix extends NodeBase { - syntaxType: SyntaxType.FunctionApplication; +export interface FunctionApplicationSuffix extends NodeBase { typeArgs: TypeNode[]; args: Expression[]; setBase(target: Expression): FunctionApplication; diff --git a/src/syntax/expressions/IdentifierExpression.ts b/src/syntax/expressions/IdentifierExpression.ts index 7265d0e..8ae7d3a 100644 --- a/src/syntax/expressions/IdentifierExpression.ts +++ b/src/syntax/expressions/IdentifierExpression.ts @@ -3,8 +3,7 @@ import { Token, TokenType } from '~/parser/lexer'; import { ParseFunc, seq, tok } from '~/parser/parser'; -export interface IdentifierExpression extends NodeBase { - syntaxType: SyntaxType.IdentifierExpression; +export interface IdentifierExpression extends NodeBase { name: Token; } diff --git a/src/syntax/expressions/IfElseExpression.ts b/src/syntax/expressions/IfElseExpression.ts index 345f44e..80bdf6c 100644 --- a/src/syntax/expressions/IfElseExpression.ts +++ b/src/syntax/expressions/IfElseExpression.ts @@ -2,8 +2,7 @@ import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; import { ParseFunc, seq, tok } from '~/parser/parser'; -export interface IfElseExpression extends NodeBase { - syntaxType: SyntaxType.IfElseExpression; +export interface IfElseExpression extends NodeBase { condition: Expression; consequent: Expression; alternate: Expression; diff --git a/src/syntax/expressions/IntegerLiteral.ts b/src/syntax/expressions/IntegerLiteral.ts index 84e313c..8c1e33c 100644 --- a/src/syntax/expressions/IntegerLiteral.ts +++ b/src/syntax/expressions/IntegerLiteral.ts @@ -3,8 +3,7 @@ import { Token, TokenType } from '~/parser/lexer'; import { ParseFunc, seq, tok } from '~/parser/parser'; -export interface IntegerLiteral extends NodeBase { - syntaxType: SyntaxType.IntegerLiteral; +export interface IntegerLiteral extends NodeBase { value: Token; } diff --git a/src/syntax/expressions/LambdaExpression.ts b/src/syntax/expressions/LambdaExpression.ts index a691c26..d1861d1 100644 --- a/src/syntax/expressions/LambdaExpression.ts +++ b/src/syntax/expressions/LambdaExpression.ts @@ -4,8 +4,7 @@ import { Token, TokenType } from '~/parser/lexer'; import { ParseFunc, seq, tok, repeat, select } from '~/parser/parser'; -export interface LambdaExpression extends NodeBase { - syntaxType: SyntaxType.LambdaExpression; +export interface LambdaExpression extends NodeBase { params: ReadonlyArray; body: Expression | Statement; } @@ -26,7 +25,7 @@ export function register(Param: ParseFunc, FunctionBody: ParseFunc ({ syntaxType: SyntaxType.LambdaExpression as SyntaxType.LambdaExpression, location, - params: params.map(p => Token.isToken(p) ? lambdaParam(p) : p), + params: params.map(p => p instanceof Token ? lambdaParam(p) : p), body }) ); diff --git a/src/syntax/expressions/ParenthesizedExpression.ts b/src/syntax/expressions/ParenthesizedExpression.ts index 965fbb3..4030f5a 100644 --- a/src/syntax/expressions/ParenthesizedExpression.ts +++ b/src/syntax/expressions/ParenthesizedExpression.ts @@ -2,8 +2,7 @@ import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; import { ParseFunc, seq, tok } from '~/parser/parser'; -export interface ParenthesizedExpression extends NodeBase { - syntaxType: SyntaxType.ParenthesizedExpression; +export interface ParenthesizedExpression extends NodeBase { inner: Expression; } diff --git a/src/syntax/expressions/StringLiteral.ts b/src/syntax/expressions/StringLiteral.ts index 33c9a6b..e2e0187 100644 --- a/src/syntax/expressions/StringLiteral.ts +++ b/src/syntax/expressions/StringLiteral.ts @@ -3,8 +3,7 @@ import { Token, TokenType } from '~/parser/lexer'; import { ParseFunc, seq, tok } from '~/parser/parser'; -export interface StringLiteral extends NodeBase { - syntaxType: SyntaxType.StringLiteral; +export interface StringLiteral extends NodeBase { value: Token; } diff --git a/src/syntax/expressions/StructLiteral.ts b/src/syntax/expressions/StructLiteral.ts index 396f752..aff77f2 100644 --- a/src/syntax/expressions/StructLiteral.ts +++ b/src/syntax/expressions/StructLiteral.ts @@ -8,8 +8,7 @@ interface StructEntry { value: Expression; } -export interface StructLiteral extends NodeBase { - syntaxType: SyntaxType.StructLiteral; +export interface StructLiteral extends NodeBase { entries: ReadonlyArray; } diff --git a/src/syntax/expressions/TupleLiteral.ts b/src/syntax/expressions/TupleLiteral.ts index 2755765..20b42d7 100644 --- a/src/syntax/expressions/TupleLiteral.ts +++ b/src/syntax/expressions/TupleLiteral.ts @@ -2,8 +2,7 @@ import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; import { ParseFunc, seq, tok, repeat } from '~/parser/parser'; -export interface TupleLiteral extends NodeBase { - syntaxType: SyntaxType.TupleLiteral; +export interface TupleLiteral extends NodeBase { items: ReadonlyArray; } diff --git a/src/syntax/expressions/UnaryExpression.ts b/src/syntax/expressions/UnaryExpression.ts index 7823e69..e25d85f 100644 --- a/src/syntax/expressions/UnaryExpression.ts +++ b/src/syntax/expressions/UnaryExpression.ts @@ -4,15 +4,13 @@ import { ParseFunc, seq, tok, repeat } from '~/parser/parser'; import { verifyMultiOperator } from '~/runtime/operators'; -export interface UnaryExpression extends NodeBase { - syntaxType: SyntaxType.UnaryExpression; +export interface UnaryExpression extends NodeBase { target: Expression; symbol: Token; prefix: boolean; } -export interface PostfixExpressionSuffix extends NodeBase { - syntaxType: SyntaxType.UnaryExpression; +export interface PostfixExpressionSuffix extends NodeBase { symbol: Token; prefix: false; setBase(target: Expression): UnaryExpression; diff --git a/src/syntax/expressions/VarDeclaration.ts b/src/syntax/expressions/VarDeclaration.ts index 3f583f7..4018aa3 100644 --- a/src/syntax/expressions/VarDeclaration.ts +++ b/src/syntax/expressions/VarDeclaration.ts @@ -3,8 +3,7 @@ import { Token, TokenType } from '~/parser/lexer'; import { ParseFunc, seq, tok } from '~/parser/parser'; -export interface VarDeclaration extends NodeBase { - syntaxType: SyntaxType.VarDeclaration; +export interface VarDeclaration extends NodeBase { name: Token; init: Expression; } diff --git a/src/syntax/index.ts b/src/syntax/index.ts index 9f06a82..a77dc63 100644 --- a/src/syntax/index.ts +++ b/src/syntax/index.ts @@ -6,4 +6,6 @@ export * from './statements'; export * from './declarations'; export { ModuleRoot } from './ModuleRoot'; -export { default as INodeVisitor } from './visitors/interfaces/INodeVisitor'; +export { Declaration, TypeNode, Expression, Statement } from './environment'; + +export * from './visitor'; diff --git a/src/syntax/statements/Block.ts b/src/syntax/statements/Block.ts index 3e5c485..60ebb59 100644 --- a/src/syntax/statements/Block.ts +++ b/src/syntax/statements/Block.ts @@ -2,8 +2,7 @@ import { NodeBase, SyntaxType, Statement } from '~/syntax/environment'; import { ParseFunc, seq, tok, repeat } from '~/parser/parser'; -export interface Block extends NodeBase { - syntaxType: SyntaxType.Block; +export interface Block extends NodeBase { statements: ReadonlyArray; } diff --git a/src/syntax/statements/BreakStatement.ts b/src/syntax/statements/BreakStatement.ts index 0556048..675cab0 100644 --- a/src/syntax/statements/BreakStatement.ts +++ b/src/syntax/statements/BreakStatement.ts @@ -3,8 +3,7 @@ import { Token, TokenType } from '~/parser/lexer'; import { ParseFunc, seq, tok, optional } from '~/parser/parser'; -export interface BreakStatement extends NodeBase { - syntaxType: SyntaxType.BreakStatement; +export interface BreakStatement extends NodeBase { loopNumber: Optional; } diff --git a/src/syntax/statements/ContinueStatement.ts b/src/syntax/statements/ContinueStatement.ts index c17d1f7..8b3a961 100644 --- a/src/syntax/statements/ContinueStatement.ts +++ b/src/syntax/statements/ContinueStatement.ts @@ -3,8 +3,7 @@ import { Token, TokenType } from '~/parser/lexer'; import { ParseFunc, seq, tok, optional } from '~/parser/parser'; -export interface ContinueStatement extends NodeBase { - syntaxType: SyntaxType.ContinueStatement; +export interface ContinueStatement extends NodeBase { loopNumber: Optional; } diff --git a/src/syntax/statements/DoWhileStatement.ts b/src/syntax/statements/DoWhileStatement.ts index ed7409c..b0fe07b 100644 --- a/src/syntax/statements/DoWhileStatement.ts +++ b/src/syntax/statements/DoWhileStatement.ts @@ -2,8 +2,7 @@ import { NodeBase, SyntaxType, Statement, Expression } from '~/syntax/environmen import { ParseFunc, seq, tok } from '~/parser/parser'; -export interface DoWhileStatement extends NodeBase { - syntaxType: SyntaxType.DoWhileStatement; +export interface DoWhileStatement extends NodeBase { body: Statement; condition: Expression; } diff --git a/src/syntax/statements/ExpressionStatement.ts b/src/syntax/statements/ExpressionStatement.ts index 56260ac..93520aa 100644 --- a/src/syntax/statements/ExpressionStatement.ts +++ b/src/syntax/statements/ExpressionStatement.ts @@ -2,8 +2,7 @@ import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; import { ParseFunc, seq } from '~/parser/parser'; -export interface ExpressionStatement extends NodeBase { - syntaxType: SyntaxType.ExpressionStatement; +export interface ExpressionStatement extends NodeBase { expression: Expression; } diff --git a/src/syntax/statements/ForStatement.ts b/src/syntax/statements/ForStatement.ts index aba5463..1220f98 100644 --- a/src/syntax/statements/ForStatement.ts +++ b/src/syntax/statements/ForStatement.ts @@ -3,8 +3,7 @@ import { Token, TokenType } from '~/parser/lexer'; import { ParseFunc, seq, tok } from '~/parser/parser'; -export interface ForStatement extends NodeBase { - syntaxType: SyntaxType.ForStatement; +export interface ForStatement extends NodeBase { variable: Token; iterable: Expression; body: Statement; diff --git a/src/syntax/statements/ReturnStatement.ts b/src/syntax/statements/ReturnStatement.ts index 2a01804..de2c5ff 100644 --- a/src/syntax/statements/ReturnStatement.ts +++ b/src/syntax/statements/ReturnStatement.ts @@ -2,8 +2,7 @@ import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; import { ParseFunc, seq, tok, optional } from '~/parser/parser'; -export interface ReturnStatement extends NodeBase { - syntaxType: SyntaxType.ReturnStatement; +export interface ReturnStatement extends NodeBase { exp: Optional; } diff --git a/src/syntax/statements/ThrowStatement.ts b/src/syntax/statements/ThrowStatement.ts index 1721e24..9ae082c 100644 --- a/src/syntax/statements/ThrowStatement.ts +++ b/src/syntax/statements/ThrowStatement.ts @@ -2,8 +2,7 @@ import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; import { ParseFunc, seq, tok } from '~/parser/parser'; -export interface ThrowStatement extends NodeBase { - syntaxType: SyntaxType.ThrowStatement; +export interface ThrowStatement extends NodeBase { exp: Expression; } diff --git a/src/syntax/statements/TryCatchStatement.ts b/src/syntax/statements/TryCatchStatement.ts index eca4cfd..0812678 100644 --- a/src/syntax/statements/TryCatchStatement.ts +++ b/src/syntax/statements/TryCatchStatement.ts @@ -8,8 +8,7 @@ interface Catch { body: Statement; } -export interface TryCatchStatement extends NodeBase { - syntaxType: SyntaxType.TryCatchStatement; +export interface TryCatchStatement extends NodeBase { try: Statement; catches: ReadonlyArray; finally: Optional; diff --git a/src/syntax/statements/WhileStatement.ts b/src/syntax/statements/WhileStatement.ts index 05cde07..1d94b66 100644 --- a/src/syntax/statements/WhileStatement.ts +++ b/src/syntax/statements/WhileStatement.ts @@ -2,8 +2,7 @@ import { NodeBase, SyntaxType, Expression, Statement } from '~/syntax/environmen import { ParseFunc, seq, tok } from '~/parser/parser'; -export interface WhileStatement extends NodeBase { - syntaxType: SyntaxType.WhileStatement; +export interface WhileStatement extends NodeBase { condition: Expression; body: Statement; } diff --git a/src/syntax/types/ArrayType.ts b/src/syntax/types/ArrayType.ts index 5a30f41..e1a8f37 100644 --- a/src/syntax/types/ArrayType.ts +++ b/src/syntax/types/ArrayType.ts @@ -2,13 +2,11 @@ import { NodeBase, SyntaxType, TypeNode } from '~/syntax/environment'; import { ParseFunc, seq, tok } from '~/parser/parser'; -export interface ArrayType extends NodeBase { - syntaxType: SyntaxType.ArrayType; +export interface ArrayType extends NodeBase { baseType: TypeNode; } -export interface ArrayTypeSuffix extends NodeBase { - syntaxType: SyntaxType.ArrayType; +export interface ArrayTypeSuffix extends NodeBase { setBase(baseType: TypeNode): ArrayType; } diff --git a/src/syntax/types/BuiltInType.ts b/src/syntax/types/BuiltInType.ts index feca739..c094dad 100644 --- a/src/syntax/types/BuiltInType.ts +++ b/src/syntax/types/BuiltInType.ts @@ -3,8 +3,7 @@ import { NodeBase, SyntaxType } from '~/syntax/environment'; import { Token } from '~/parser/lexer'; -export interface BuiltInType extends NodeBase { - syntaxType: SyntaxType.BuiltInType; +export interface BuiltInType extends NodeBase { name: Token; } diff --git a/src/syntax/types/FunctionType.ts b/src/syntax/types/FunctionType.ts index b90e40a..917a3b0 100644 --- a/src/syntax/types/FunctionType.ts +++ b/src/syntax/types/FunctionType.ts @@ -2,8 +2,7 @@ import { seq, tok, ParseFunc, repeat } from '~/parser/parser'; import { TypeNode, NodeBase, SyntaxType } from '~/syntax/environment'; -export interface FunctionType extends NodeBase { - syntaxType: SyntaxType.FunctionType; +export interface FunctionType extends NodeBase { paramTypes: TypeNode[]; returnType: TypeNode; } diff --git a/src/syntax/types/IdentifierType.ts b/src/syntax/types/IdentifierType.ts index acc8dcd..9288a29 100644 --- a/src/syntax/types/IdentifierType.ts +++ b/src/syntax/types/IdentifierType.ts @@ -4,8 +4,7 @@ import { seq, tok, ParseFunc } from '~/parser/parser'; -export interface IdentifierType extends NodeBase { - syntaxType: SyntaxType.IdentifierType; +export interface IdentifierType extends NodeBase { name: Token; } diff --git a/src/syntax/types/NamespaceAccessType.ts b/src/syntax/types/NamespaceAccessType.ts index a26f1a0..60d375d 100644 --- a/src/syntax/types/NamespaceAccessType.ts +++ b/src/syntax/types/NamespaceAccessType.ts @@ -3,14 +3,12 @@ import { Token, TokenType } from '~/parser/lexer'; import { ParseFunc, seq, tok } from '~/parser/parser'; -export interface NamespaceAccessType extends NodeBase { - syntaxType: SyntaxType.NamespaceAccessType; +export interface NamespaceAccessType extends NodeBase { baseType: TypeNode; typeName: Token; } -export interface NamespaceAccessTypeSuffix extends NodeBase { - syntaxType: SyntaxType.NamespaceAccessType; +export interface NamespaceAccessTypeSuffix extends NodeBase { typeName: Token; setBase(baseType: TypeNode): NamespaceAccessType; } diff --git a/src/syntax/types/ParenthesizedType.ts b/src/syntax/types/ParenthesizedType.ts index 8bab0a6..e15501b 100644 --- a/src/syntax/types/ParenthesizedType.ts +++ b/src/syntax/types/ParenthesizedType.ts @@ -2,8 +2,7 @@ import { ParseFunc, seq, tok } from '~/parser/parser'; import { TypeNode, NodeBase, SyntaxType } from '~/syntax/environment'; -export interface ParenthesizedType extends NodeBase { - syntaxType: SyntaxType.ParenthesizedType; +export interface ParenthesizedType extends NodeBase { inner: TypeNode; } diff --git a/src/syntax/types/SpecificType.ts b/src/syntax/types/SpecificType.ts index 5174e0d..3a52a5d 100644 --- a/src/syntax/types/SpecificType.ts +++ b/src/syntax/types/SpecificType.ts @@ -2,14 +2,12 @@ import { NodeBase, SyntaxType, TypeNode } from '~/syntax/environment'; import { ParseFunc, seq, repeat, tok } from '~/parser/parser'; -export interface SpecificType extends NodeBase { - syntaxType: SyntaxType.SpecificType; +export interface SpecificType extends NodeBase { typeNode: TypeNode; typeArgs: ReadonlyArray; } -export interface SpecificTypeSuffix extends NodeBase { - syntaxType: SyntaxType.SpecificType; +export interface SpecificTypeSuffix extends NodeBase { typeArgs: ReadonlyArray; setBase(typeNode: TypeNode): SpecificType; } diff --git a/src/syntax/types/StructType.ts b/src/syntax/types/StructType.ts index 73596a5..13047f0 100644 --- a/src/syntax/types/StructType.ts +++ b/src/syntax/types/StructType.ts @@ -3,14 +3,12 @@ import { Token, TokenType } from '~/parser/lexer'; import { ParseFunc, seq, tok, repeat } from '~/parser/parser'; - interface Field { typeNode: TypeNode; name: Token; } -export interface StructType extends NodeBase { - syntaxType: SyntaxType.StructType; +export interface StructType extends NodeBase { fields: ReadonlyArray; } diff --git a/src/syntax/types/TupleType.ts b/src/syntax/types/TupleType.ts index 9508863..b2058ba 100644 --- a/src/syntax/types/TupleType.ts +++ b/src/syntax/types/TupleType.ts @@ -3,8 +3,7 @@ import { ParseFunc, seq, tok, repeat } from '~/parser/parser'; -export interface TupleType extends NodeBase { - syntaxType: SyntaxType.TupleType; +export interface TupleType extends NodeBase { types: ReadonlyArray; } diff --git a/src/syntax/types/UnionType.ts b/src/syntax/types/UnionType.ts index a90ac56..f3872bf 100644 --- a/src/syntax/types/UnionType.ts +++ b/src/syntax/types/UnionType.ts @@ -2,14 +2,12 @@ import { NodeBase, SyntaxType, TypeNode } from '~/syntax/environment'; import { ParseFunc, seq, tok } from '~/parser/parser'; -export interface UnionType extends NodeBase { - syntaxType: SyntaxType.UnionType; +export interface UnionType extends NodeBase { left: TypeNode; right: TypeNode; } -export interface UnionTypeSuffix extends NodeBase { - syntaxType: SyntaxType.UnionType; +export interface UnionTypeSuffix extends NodeBase { right: TypeNode; setBase(left: TypeNode): UnionType; } diff --git a/src/syntax/visitor.ts b/src/syntax/visitor.ts new file mode 100644 index 0000000..3989f56 --- /dev/null +++ b/src/syntax/visitor.ts @@ -0,0 +1,41 @@ +import { Declaration, TypeNode, Expression, Statement, Node } from '~/syntax/environment'; + + +/** + * Describes a visitor for a specific set of node types. + * This visitor type is designed to work in a pure functional manner, + * so each visitor accepts a node of the given type, and a value + * of the return type, and should return a processed version of that + * value. For example, a type checking visitor might be: + * + * type TypeCheckVisitor = NodeVisitor; + * + * And then each visitor function would be defined as: + * + * const visitor: TypeCheckVisitor = { + * ... + * [SyntaxType.Something]: (node: Something, checker: TypeChecker): TypeChecker => { ... } + * ... + * }; + * + * so the input checker would do some processing based on the node, + * and return a new checker incorporating that node. + * + * This generic Visitor type can visit any set of node types; + * the other types exported by this module are predefined for the + * known sets of node types. + */ +export type Visitor = { + [P in N['syntaxType']]: (node: N, thing: T) => T; +}; + +/** A visitor of declaration nodes */ +export type DeclarationVisitor = Visitor; +/** A visitor of type nodes */ +export type TypeNodeVisitor = Visitor; +/** A visitor of expression nodes */ +export type ExpressionVisitor = Visitor; +/** A visitor of statement nodes */ +export type StatementVisitor = Visitor; +/** A visitor of all node types */ +export type NodeVisitor = Visitor; diff --git a/src/typecheck/checker.ts b/src/typecheck/checker.ts new file mode 100644 index 0000000..9a80585 --- /dev/null +++ b/src/typecheck/checker.ts @@ -0,0 +1,123 @@ +import { Diagnostic, FileRange } from '~/core'; +import { Program } from './program'; +import { parseModule } from '~/parser'; +import { ModuleVisitor } from './node-visitors/module-visitor'; +import { Declaration, ModuleRoot } from '~/syntax'; +import { TypeCheckErrorContext } from './error-context'; + + +export interface TypeChecker { + /** + * Top-level interface for type checking. + * Pass the path of an entry-point of a program, and get a fully type-checked + * Program as a result. The Program will contain any errors found during checking, + * and a reference to every successfully parsed module. + */ + check(path: string): Program; + /** + * Add an error to this type checker, using the specified + * error generator function. This function will be passed + * a context object that contains several built-in message + * generator functions. + */ + error(fn: (ctx: TypeCheckErrorContext) => Diagnostic): TypeChecker; + /** + * Given an absolute path to a module file, parse the module + * and add it to the type checker's internal module registry. + */ + parseModule(path: string): TypeChecker; + /** + * Add a diagnostic to the type checker. + */ + addDiagnostic(diagnostic: Diagnostic): TypeChecker; + /** + * Add a list of diagnostics to the type checker. + */ + addDiagnostics(diagnostics: ReadonlyArray): TypeChecker; +} + +interface NameEntry { + moduleId: number; + location: FileRange; +} + +interface TypeCheckerInternal extends TypeChecker { + readonly diagnostics: ReadonlyArray; + readonly modules: ReadonlyArray; + readonly declarations: ReadonlyArray; + readonly names: ReadonlyMap>; + readonly exports: ReadonlyMap>; + readonly errorContext: TypeCheckErrorContext; + createProgram(): Program; +} + +export function TypeChecker() { + return TypeChecker.init(); +} + +export namespace TypeChecker { + export function init(): TypeChecker { + const checker: TypeCheckerInternal = { + diagnostics: [], + modules: [], + declarations: [], + names: new Map(), + exports: new Map(), + errorContext: TypeCheckErrorContext, + check, + error, + parseModule: _parseModule, + addDiagnostic, + addDiagnostics, + createProgram, + }; + return checker; + } + + function check(this: TypeCheckerInternal, path: string): Program { + // we can't do anything until we have a parsed module, so do that first + const withModule = this.parseModule(path) as TypeCheckerInternal; + // if there is no module, there was a parse error, and we should return right away + if (!withModule.modules.length) return withModule.createProgram(); + // 1st pass: resolve all names + const module = withModule.modules[0]; + const firstPass = ModuleVisitor[module.syntaxType](module, this) as TypeCheckerInternal; + // 2nd pass: resolve all types + const secondPass = firstPass.declarations.reduce((tc, d) => DeclarationTypeVisitor[d.syntaxType](d, tc), firstPass); + // 3rd pass: handle name clashes (overloads are valid for all declarations) + const thirdPass = secondPass.modules.reduce((tc, m) => NameClashVisitor[m.syntaxType](m, tc), secondPass); + // everything has been type checked, return the program + return thirdPass.createProgram(); + } + + function error(this: TypeCheckerInternal, fn: (ctx: TypeCheckErrorContext) => Diagnostic) { + return this.addDiagnostic(fn(this.errorContext)); + } + + function _parseModule(this: TypeCheckerInternal, path: string) { + const { module, diagnostics } = parseModule(path); + return { + ...this, + modules: module ? [...this.modules, module] : this.modules, + diagnostics: [...this.diagnostics, ...diagnostics] + }; + } + + function addDiagnostic(this: TypeCheckerInternal, diagnostic: Diagnostic) { + return this.addDiagnostics([diagnostic]); + } + + function addDiagnostics(this: TypeCheckerInternal, diagnostics: ReadonlyArray) { + return { + ...this, + diagnostics: [...this.diagnostics, ...diagnostics] + } + } + + function createProgram(this: TypeCheckerInternal) { + return new Program().clone({ + // TODO: modules? + diagnostics: this.diagnostics + }); + } +} \ No newline at end of file diff --git a/src/typecheck/error-context.ts b/src/typecheck/error-context.ts new file mode 100644 index 0000000..93a8e18 --- /dev/null +++ b/src/typecheck/error-context.ts @@ -0,0 +1,48 @@ +import { Token } from '~/parser/lexer'; +import { FileRange, Diagnostic } from '~/core'; +import { TType, TParam, TUnknown } from './types'; +import { BreakStatement, ContinueStatement, UnaryExpression, BinaryExpression, Declaration } from '~/syntax'; + + +/** + * A context for adding type errors for a specific module. + * Pass the list of errors from a TypeChecker instance. + */ +const _context = { + // #region name resolution errors, don't need to return unknown + noModule: (name: Token) => new Diagnostic(`Module "${name.value}" does not exist`, name.location), + noModuleExport: (path: string, name: Token) => new Diagnostic(`Module "${path}" does not have an export with name "${name.image}"`, name.location), + exportClash: (name: Token) => new Diagnostic(`An export with name "${name.image}" is already declared`, name.location), + noName: (decl: Declaration) => { + const { path, start } = decl.location; + const location = new FileRange(path, start, start); + return new Diagnostic('Declarations that are not part of a default export must have a name', location); + }, + declNameClash: (name: Token) => new Diagnostic(`The name ${name.image} is already declared.`, name.location), + // #endregion + // #region type resolution errors, should return unknown + circularDependency: (location: FileRange) => new Diagnostic('Circular dependency found', location), + typeMismatch: (from: TType, to: TType, location: FileRange) => new Diagnostic(`Type "${from}" is not assignable to type "${to}"`, location), + nameClash: (name: Token) => new Diagnostic(`A value with name "${name.image}" is already declared`, name.location), + typeNotDefined: (name: Token) => new Diagnostic(`Type "${name.image}" is not defined`, name.location), + valueNotDefined: (name: Token) => new Diagnostic(`Value "${name.image}" is not defined`, name.location), + notGeneric: (location: FileRange) => new Diagnostic('Type is not generic', location), + notArray: (location: FileRange) => new Diagnostic('Cannot access index of a value that is not an array', location), + notNamespace: (location: FileRange) => new Diagnostic(`Type is not a namespace`, location), + notStruct: (location: FileRange) => new Diagnostic('Cannot access field of a value that is not a struct or a namespace', location), + notInvokable: (location: FileRange) => new Diagnostic('Cannot invoke a value that is not a function', location), + notGenericFunction: (location: FileRange) => new Diagnostic('Function is not generic', location), + invalidTypeArgCount: (expected: number, actual: number, location: FileRange) => new Diagnostic(`Invalid type argument count: expected ${expected}, actual ${actual}`, location), + invalidTypeArg: (arg: TType, param: TParam, location: FileRange) => new Diagnostic(`Type "${arg}" is not assignable to type parameter "${param.name}" with constraint "${param.constraint}"`, location), + invalidArgCount: (expected: number, actual: number, location: FileRange) => new Diagnostic(`Invalid argument count: expected ${expected}, actual ${actual}`, location), + invalidBreak: (location: FileRange) => new Diagnostic('"break" statement cannot be present outside loop', location), + invalidContinue: (location: FileRange) => new Diagnostic('"continue" statement cannot be present outside loop', location), + invalidLoopNum: (expected: number, node: BreakStatement | ContinueStatement) => new Diagnostic(`Invalid loop number ${node.loopNumber} in loop with depth ${expected}`, node.location), + invalidUnary: (exp: UnaryExpression, target: TType) => new Diagnostic(`Operator "${exp.symbol}" does not operate on type "${target}"`, exp.location), + invalidBinary: (exp: BinaryExpression, left: TType, right: TType) => new Diagnostic(`Operator "${exp.symbol}" does not operate on types "${left}" and "${right}"`, exp.location), + assocConflict: (oper1: string, oper2: string, location: FileRange) => new Diagnostic(`Precedence order between operators "${oper1}" and "${oper2}" could not be established because they have conflicting associativity`, location), + // #endregion +}; + +export type TypeCheckErrorContext = typeof _context; +export const TypeCheckErrorContext = _context; diff --git a/src/typecheck/index.ts b/src/typecheck/index.ts index 3e9587a..c7168e0 100644 --- a/src/typecheck/index.ts +++ b/src/typecheck/index.ts @@ -1,7 +1,7 @@ -import TypeChecker from './TypeChecker'; -import { Program } from '~/syntax'; +import { TypeChecker } from './checker'; -export default function typecheck(ast: Program, path: string) { - return new TypeChecker().check(ast, path); +export default function typecheck(path: string) { + const checker = TypeChecker(); + return checker.check(path); } diff --git a/src/typecheck/node-visitors/declaration-name-visitor.ts b/src/typecheck/node-visitors/declaration-name-visitor.ts new file mode 100644 index 0000000..e69de29 diff --git a/src/typecheck/node-visitors/module-visitor.ts b/src/typecheck/node-visitors/module-visitor.ts new file mode 100644 index 0000000..6bf2693 --- /dev/null +++ b/src/typecheck/node-visitors/module-visitor.ts @@ -0,0 +1,78 @@ +import { Visitor } from '~/syntax/visitor'; +import { ModuleRoot, ImportDeclaration, ExportDeclaration, ExportForwardDeclaration, TypeDeclaration, FunctionDeclaration, ConstantDeclaration } from '~/syntax'; +import { TypeChecker } from '~/typecheck/checker'; +import { SyntaxType, Declaration } from '~/syntax/environment'; +import resolveModule from '~/typecheck/resolver'; + + +type ModuleNode = ModuleRoot | ImportDeclaration | ExportDeclaration | ExportForwardDeclaration | Declaration; + +const isDeclaration = (node: ExportDeclaration | ExportForwardDeclaration | Declaration): node is Declaration => + ![SyntaxType.ExportDeclaration, SyntaxType.ExportForwardDeclaration].includes(node.syntaxType); + +/** + * This visitor is responsible for enumerating all modules and declarations in the program + * to prepare for import and export resolution in the next pass. + */ +export const ModuleVisitor: Visitor = { + [SyntaxType.ModuleRoot]: (node: ModuleRoot, checker: TypeChecker) => { + // process imports first to enumerate all modules + const withImports = node.imports.reduce((c, i) => ModuleVisitor[i.syntaxType](i, c), checker); + // process module-scoped declarations + const withDeclarations = node.declarations + .filter(isDeclaration) + .reduce((c, d) => ModuleVisitor[d.syntaxType](d, c), withImports); + // process exports last so all overloads are available + return node.declarations + .filter(d => !isDeclaration(d)) + .reduce((c, d) => ModuleVisitor[d.syntaxType](d, c), withDeclarations); + }, + /** + * An import declaration exposes an export of another module as a local name in the module + * containing the declaration. To process it, we need to resolve the imported module path, + * make sure that the requested export name exists, make sure that the requested alias name + * does not clash with any already declared names, and then add the name to the module, + * linking it to the exported declaration in the other module. + */ + [SyntaxType.ImportDeclaration]: (node: ImportDeclaration, checker: TypeChecker) => { + const currentModule = node.location.path; + // resolve the module + const importedModule = resolveModule(currentModule, node.moduleName.value); + // invalid module path specified + if (!importedModule) return checker.error(_ => _.noModule(node.moduleName)); + // make sure the module has been loaded + this.loadModule(importedModule); + // process the imports + let tc = checker; + for (const { importName, aliasName } of node.imports) { + // if wildcard, process it as a namespace, not an import + if (importName.image === '*') { + const namespace = new ast.NamespaceDeclaration(importedModule, aliasName, node.location); + namespace.visit(this); + continue; + } + // regular import, verify that the module exports the name + if (!this.getExport(importedModule, importName.image)) { + tc = tc.error(_ => _.noModuleExport(node.moduleName.value, importName)); + continue; + } + // register the alias name to the module using the imported export + this.link(currentModule, aliasName, importedModule, importName.image); + } + }, + [SyntaxType.ExportDeclaration]: (node: ExportDeclaration, checker: TypeChecker) => { + // + }, + [SyntaxType.ExportForwardDeclaration]: (node: ExportForwardDeclaration, checker: TypeChecker) => { + // + }, + [SyntaxType.TypeDeclaration]: (node: TypeDeclaration, checker: TypeChecker) => { + // + }, + [SyntaxType.FunctionDeclaration]: (node: FunctionDeclaration, checker: TypeChecker) => { + // + }, + [SyntaxType.ConstantDeclaration]: (node: ConstantDeclaration, checker: TypeChecker) => { + // + } +}; diff --git a/src/typecheck/program.ts b/src/typecheck/program.ts new file mode 100644 index 0000000..6568667 --- /dev/null +++ b/src/typecheck/program.ts @@ -0,0 +1,72 @@ +import { Diagnostic, CoreObject } from '~/core'; +import * as syntax from '~/syntax'; + + +/** + * A complete semantic program. This is the top-level data structure + * for the semantic process of the compiler. + */ +export class Program extends CoreObject { + readonly modules: ReadonlyMap = new Map(); + readonly declarations: ReadonlyArray = []; + readonly diagnostics: ReadonlyArray = []; +} + +/** + * A semantic container for a module in a program. + * A module contains a list of declarations (by name) accessible inside the module + * and a list of exports (by name) accessible from outside the module + */ +export class Module extends CoreObject { + readonly declarations: ReadonlyMap = new Map(); + readonly exports: ReadonlyMap = new Map(); +} + +/** + * For a given name, the mapping of resolved declaration id to the next link + * in the resolution chain. There are two kinds of mappings: + * - local: the end of a chain. the resolved declaration exists in this module. + * - import: pointer to another link in the chain, containing the module/export combination to look up next. + */ +export interface MappingGroup { + readonly mappings: ReadonlyMap; +} + +interface LocalMapping { + kind: 'local'; +} + +interface ImportMapping { + kind: 'import'; + modulePath: string; + exportName: string; +} + +/** + * A semantic declaration is a node that is ultimately associated with a name, either: + * - a function + * - a type + * - a constant + * - a namespace, created via a wildcard import or export forward + */ +export type Declaration = FunctionDeclaration | TypeDeclaration | ConstantDeclaration | Namespace; + +export interface FunctionDeclaration { + kind: 'function'; + syntaxNode: syntax.FunctionDeclaration; +} + +export interface TypeDeclaration { + kind: 'type'; + syntaxNode: syntax.TypeDeclaration; +} + +export interface ConstantDeclaration { + kind: 'constant'; + syntaxNode: syntax.ConstantDeclaration; +} + +export interface Namespace { + kind: 'namespace'; + syntaxNode: syntax.ImportDeclaration | syntax.ExportForwardDeclaration; +} diff --git a/src_old/typecheck/resolveModule.ts b/src/typecheck/resolver.ts similarity index 100% rename from src_old/typecheck/resolveModule.ts rename to src/typecheck/resolver.ts diff --git a/src/utils/lazy-list.ts b/src/utils/lazy-list.ts index da64798..f32a960 100644 --- a/src/utils/lazy-list.ts +++ b/src/utils/lazy-list.ts @@ -10,51 +10,53 @@ import Lazy, { lazy } from '~/utils/lazy'; */ export type LazyList = NonEmptyLazyList | EmptyLazyList; -interface LazyListOperations extends Iterable { +abstract class AbstractLazyList implements Iterable { + abstract [Symbol.iterator](): Iterator; + // #region functional operations /** * Transforms this list by applying a mapper function to each item */ - map(mapper: (item: T) => R): LazyList; + abstract map(mapper: (item: T) => R): LazyList; /** * Applies a mapper function to each item, concatenating each yielded result into one list */ - flatMap(mapper: (item: T) => Iterable): LazyList; + abstract flatMap(mapper: (item: T) => Iterable): LazyList; /** * Creates a new list containing only items that return true for the predicate */ - filter(predicate: (item: T) => boolean): LazyList; + abstract filter(predicate: (item: T) => boolean): LazyList; /** * Starting with an initial value, combine all items in this list * into the value using a reducer function */ - reduce(reducer: (value: R, item: T) => R, init: R): R; + abstract reduce(reducer: (value: R, item: T) => R, init: R): R; /** * Same as reduce(), but the initial value used is the first item of the list */ - reduceSelf(reducer: (value: T, item: T) => T): T; + abstract reduceSelf(reducer: (value: T, item: T) => T): T; /** * Appends a list to the end of this one */ - concat(list: LazyList): LazyList; + abstract concat(list: LazyList): LazyList; /** * Prepends an item to this list, returning another list. * NOTE: This is a constant-time operation */ - prepend(item: T): LazyList; + abstract prepend(item: T): LazyList; /** * Copies each item of this list to a new one, stopping for the first item * that returns false for the specified predicate */ - takeWhile(predicate?: (item: T) => boolean): LazyList; + abstract takeWhile(predicate?: (item: T) => boolean): LazyList; // #endregion @@ -64,32 +66,23 @@ interface LazyListOperations extends Iterable { * Returns the specified number of items from the beginning * of this list, as well as the tail following the last item */ - shift(count: number): { values: T[], tail: LazyList }; + abstract shift(count: number): { values: T[], tail: LazyList }; /** * Returns only the specified number of items from the beginning * of this list. */ - peek(count: number): T[]; + abstract peek(count: number): T[]; // #endregion } -export interface NonEmptyLazyList extends LazyListOperations { - readonly empty: false; - readonly head: T; - readonly tail: LazyList; -} - -export interface EmptyLazyList extends LazyListOperations { - readonly empty: true; -} - -class LazyListImpl implements NonEmptyLazyList { - public readonly empty = false; +export class NonEmptyLazyList extends AbstractLazyList { + readonly empty = false; private readonly _tail: Lazy>; - constructor(public readonly head: T, getTail: () => LazyList) { + constructor(readonly head: T, getTail: () => LazyList) { + super(); this._tail = lazy(getTail); } @@ -106,18 +99,18 @@ class LazyListImpl implements NonEmptyLazyList { } public map(mapper: (item: T) => R): LazyList { - return new LazyListImpl(mapper(this.head), () => this.tail.map(mapper)); + return new NonEmptyLazyList(mapper(this.head), () => this.tail.map(mapper)); } public flatMap(mapper: (item: T) => Iterable): LazyList { const list = fromIterable(mapper(this.head)); return list.empty ? this.tail.flatMap(mapper) - : new LazyListImpl(list.head, () => list.tail.concat(this.tail.flatMap(mapper))); + : new NonEmptyLazyList(list.head, () => list.tail.concat(this.tail.flatMap(mapper))); } public filter(predicate: (item: T) => boolean): LazyList { - if (predicate(this.head)) return new LazyListImpl(this.head, () => this.tail.filter(predicate)); + if (predicate(this.head)) return new NonEmptyLazyList(this.head, () => this.tail.filter(predicate)); return this.tail.filter(predicate); } @@ -130,17 +123,17 @@ class LazyListImpl implements NonEmptyLazyList { } public concat(list: LazyList): LazyList { - return new LazyListImpl(this.head, () => this.tail.concat(list)); + return new NonEmptyLazyList(this.head, () => this.tail.concat(list)); } public prepend(item: T): LazyList { - return new LazyListImpl(item, () => this); + return new NonEmptyLazyList(item, () => this); } public takeWhile(predicate?: (item: T) => boolean): LazyList { if (!predicate) return this.takeWhile(i => !!i); - if (predicate(this.head)) return new LazyListImpl(this.head, () => this.tail.takeWhile(predicate)); - return new EmptyLazyListImpl(); + if (predicate(this.head)) return new NonEmptyLazyList(this.head, () => this.tail.takeWhile(predicate)); + return new EmptyLazyList(); } public shift(count: number): { values: T[], tail: LazyList } { @@ -149,54 +142,54 @@ class LazyListImpl implements NonEmptyLazyList { return { values: [this.head, ...values], tail }; } - public peek(count: number) { + public peek(count: number): T[] { if (count === 0) return []; return [this.head, ...this.tail.peek(count - 1)]; } } -class EmptyLazyListImpl implements EmptyLazyList { - public readonly empty = true; +export class EmptyLazyList extends AbstractLazyList { + readonly empty = true; [Symbol.iterator](): Iterator { return { next() { return { done: true, value: {} as T } } }; } - public map(_mapper: (item: T) => R): LazyList { return new EmptyLazyListImpl(); } - public flatMap(_mapper: (item: T) => Iterable): LazyList { return new EmptyLazyListImpl(); } - public filter(_predicate: (item: T) => boolean): LazyList { return new EmptyLazyListImpl(); } - public reduce(_reducer: (value: R, item: T) => R, init: R): R { return init; } - public reduceSelf(_reducer: (value: T, item: T) => T): never { + map(_mapper: (item: T) => R): LazyList { return new EmptyLazyList(); } + flatMap(_mapper: (item: T) => Iterable): LazyList { return new EmptyLazyList(); } + filter(_predicate: (item: T) => boolean): LazyList { return new EmptyLazyList(); } + reduce(_reducer: (value: R, item: T) => R, init: R): R { return init; } + reduceSelf(_reducer: (value: T, item: T) => T): never { throw new Error('Cannot call reduceSelf() on an empty list. Try reduce() instead'); } - public concat(list: LazyList): LazyList { return list; } - public prepend(item: T): LazyList { return new LazyListImpl(item, () => this); } - public takeWhile(_predicate?: (item: T) => boolean): LazyList { return new EmptyLazyListImpl(); } - public shift(_count: number): { values: T[], tail: LazyList } { + concat(list: LazyList): LazyList { return list; } + prepend(item: T): LazyList { return new NonEmptyLazyList(item, () => this); } + takeWhile(_predicate?: (item: T) => boolean): LazyList { return new EmptyLazyList(); } + shift(_count: number): { values: T[], tail: LazyList } { return { values: [], tail: this }; } - public peek(_count: number) { return []; } + peek(_count: number) { return []; } } /** * Creates a lazy list of the provided type */ export function create(head: T, getTail: () => LazyList): LazyList { - return new LazyListImpl(head, getTail); + return new NonEmptyLazyList(head, getTail); } /** * Creates a lazy list containing a single item */ export function single(item: T): LazyList { - return new LazyListImpl(item, empty); + return new NonEmptyLazyList(item, empty); } /** * Creates an empty lazy list of the provided type. */ export function empty(): LazyList { - return new EmptyLazyListImpl(); + return new EmptyLazyList(); } /** @@ -204,7 +197,7 @@ export function empty(): LazyList { * from the specified number (or 0 by default). */ export function infList(start = 0): LazyList { - return new LazyListImpl(start, () => infList(start + 1)); + return new NonEmptyLazyList(start, () => infList(start + 1)); } /** @@ -221,6 +214,6 @@ export function fromIterable(iterable: Iterable): LazyList { */ export function fromIterator(iterator: Iterator): LazyList { const next = iterator.next(); - if (next.done) return new EmptyLazyListImpl(); - return new LazyListImpl(next.value, () => fromIterator(iterator)); + if (next.done) return new EmptyLazyList(); + return new NonEmptyLazyList(next.value, () => fromIterator(iterator)); } diff --git a/src/utils/lazy.ts b/src/utils/lazy.ts index a270ff6..62580a9 100644 --- a/src/utils/lazy.ts +++ b/src/utils/lazy.ts @@ -1,3 +1,6 @@ +import { CoreObject } from '~/core'; + + /** * This is a simple interface for lazy evaluation in TS. * @@ -16,9 +19,16 @@ export default interface Lazy { readonly value: T; } -interface SimpleLazy extends Lazy { - readonly _evaluator: () => T; - _value?: T; +class SimpleLazy extends CoreObject> implements Lazy { + private _value?: T; + + constructor(private evaluator: () => T) { + super(); + } + + get value(): T { + return !('_value' in this) ? (this._value = this.evaluator()) : this._value as T; + } } /** @@ -33,11 +43,5 @@ interface SimpleLazy extends Lazy { * mechanism. */ export function lazy(evaluator: () => T): Lazy { - const obj: SimpleLazy = { - _evaluator: evaluator, - get value(): T { - return !('_value' in this) ? (this._value = this._evaluator()) : this._value as T; - }, - }; - return obj; + return new SimpleLazy(evaluator); } diff --git a/src/utils/utils.ts b/src/utils/utils.ts index a5294e4..21e23da 100644 --- a/src/utils/utils.ts +++ b/src/utils/utils.ts @@ -13,26 +13,3 @@ export function mapSet(map: ReadonlyMap, key: K, value: V): Readonly clone.set(key, value); return clone; } - -/** - * Creates an instance of the specified class with the specified properties. - * This can be used to bypass the class's constructor. - * This should ONLY be used internally as an alternative to JS's lack - * of constructor overloading. - */ -export function createInstance(cls: Class, props: Partial = {}) { - const obj = Object.create(cls.prototype); - return Object.assign(obj, props); -} - -/** - * Clones an instance of a class, optionally overriding properties - * with the specified properties. - */ -export function cloneInstance(obj: T, props: Partial = {}) { - const clone = Object.create(Object.getPrototypeOf(obj)); - for (const key of Object.keys(obj)) { - clone[key] = obj[key]; - } - return Object.assign(clone, props); -} From 2894497c7703323b864d3613b229cafdc418886c Mon Sep 17 00:00:00 2001 From: Jake Chitel Date: Sun, 29 Apr 2018 11:57:57 -0500 Subject: [PATCH 06/15] added namespace declaration syntax --- .../declarations/NamespaceDeclaration.ts | 44 +++++++++++++++++++ src/syntax/declarations/index.ts | 1 + src/syntax/environment.ts | 11 +++-- 3 files changed, 53 insertions(+), 3 deletions(-) create mode 100644 src/syntax/declarations/NamespaceDeclaration.ts diff --git a/src/syntax/declarations/NamespaceDeclaration.ts b/src/syntax/declarations/NamespaceDeclaration.ts new file mode 100644 index 0000000..de35c8e --- /dev/null +++ b/src/syntax/declarations/NamespaceDeclaration.ts @@ -0,0 +1,44 @@ +import { ParseFunc, seq, tok, repeat, select, optional } from '~/parser/parser'; +import { NodeBase, SyntaxType, Declaration } from '~/syntax/environment'; +import { ImportDeclaration } from './ImportDeclaration'; +import { ExportDeclaration, ExportForwardDeclaration } from '~/syntax'; +import { Token, TokenType } from '~/parser/lexer'; + + +export interface NamespaceDeclaration extends NodeBase { + readonly name: Optional; + readonly imports: ReadonlyArray; + readonly declarations: ReadonlyArray; +} + +export function register( + Declaration: ParseFunc, + ExportDeclaration: ParseFunc +) { + /** + * NamespaceDeclaration ::= 'namespace' IDENT? '{' ImportDeclaration* (Declaration | ExportDeclaration | ExportForwardDeclaration)* '}' + */ + const NamespaceDeclaration: ParseFunc = seq( + tok('namespace'), + optional(tok(TokenType.IDENT)), + tok('{'), + repeat(ImportDeclaration, '*'), + repeat(select( + Declaration, + ExportDeclaration, + ExportForwardDeclaration + ), '*'), + tok('}'), + ([_1, name, _2, imports, declarations], location) => ({ + syntaxType: SyntaxType.NamespaceDeclaration as SyntaxType.NamespaceDeclaration, + location, + name, + imports, + declarations + }) + ); + + return { + NamespaceDeclaration + }; +} diff --git a/src/syntax/declarations/index.ts b/src/syntax/declarations/index.ts index 557764f..665c50c 100644 --- a/src/syntax/declarations/index.ts +++ b/src/syntax/declarations/index.ts @@ -4,3 +4,4 @@ export { ExportDeclaration } from './ExportDeclaration'; export { ExportForwardDeclaration } from './ExportForwardDeclaration'; export { TypeDeclaration, TypeParam } from './TypeDeclaration'; export { FunctionDeclaration, Param } from './FunctionDeclaration'; +export { NamespaceDeclaration } from './NamespaceDeclaration'; diff --git a/src/syntax/environment.ts b/src/syntax/environment.ts index a149c3f..29e620e 100644 --- a/src/syntax/environment.ts +++ b/src/syntax/environment.ts @@ -4,7 +4,7 @@ import { // module ModuleRoot, ImportDeclaration, ExportDeclaration, ExportForwardDeclaration, // declaration - TypeDeclaration, FunctionDeclaration, ConstantDeclaration, + TypeDeclaration, FunctionDeclaration, ConstantDeclaration, NamespaceDeclaration, // type BuiltInType, StructType, TupleType, ArrayType, FunctionType, UnionType, IdentifierType, ParenthesizedType, SpecificType, NamespaceAccessType, @@ -52,6 +52,7 @@ import { register as register_ExportDeclaration } from './declarations/ExportDec import { register as register_TypeDeclaration } from './declarations/TypeDeclaration'; import { register as register_FunctionDeclaration } from './declarations/FunctionDeclaration'; import { register as register_ConstantDeclaration } from './declarations/ConstantDeclaration'; +import { register as register_NamespaceDeclaration } from './declarations/NamespaceDeclaration'; import { register as register_ModuleRoot } from './ModuleRoot'; /** @@ -68,6 +69,7 @@ export enum SyntaxType { TypeDeclaration = 'TypeDeclaration', FunctionDeclaration = 'FunctionDeclaration', ConstantDeclaration = 'ConstantDeclaration', + NamespaceDeclaration = 'NamespaceDeclaration', // #endregion // #region types BuiltInType = 'BuiltInType', @@ -189,13 +191,15 @@ export function SyntaxEnvironment() { // module const { ExportDeclaration } = register_ExportDeclaration(Declaration); + const { NamespaceDeclaration } = register_NamespaceDeclaration(Declaration, ExportDeclaration); const { ModuleRoot } = register_ModuleRoot(Declaration, ExportDeclaration); function Declaration(parser: Parser): ParseResult { const fn: ParseFunc = select( TypeDeclaration, FunctionDeclaration, - ConstantDeclaration + ConstantDeclaration, + NamespaceDeclaration ); return fn(parser); } @@ -290,7 +294,8 @@ export interface NodeBase { export type Declaration = | TypeDeclaration | FunctionDeclaration - | ConstantDeclaration; + | ConstantDeclaration + | NamespaceDeclaration; /** * The discriminated union of all type nodes From 88fe6faf9a6ceba9ca1956d6458df275920838d6 Mon Sep 17 00:00:00 2001 From: Jake Chitel Date: Mon, 30 Apr 2018 20:40:49 -0500 Subject: [PATCH 07/15] added high-level semantic types --- src/core.ts | 10 +- src/extensions.ts | 14 +- src/index.ts | 5 +- src/parser/lexer/char-stream.ts | 4 +- src/parser/lexer/lexer-state.ts | 4 +- src/parser/lexer/token.ts | 2 +- src/parser/parser.ts | 2 +- src/typecheck/checker.ts | 142 +++++++----------- src/typecheck/index.ts | 2 +- src/typecheck/node-visitors/module-visitor.ts | 6 +- src/typecheck/program.ts | 132 +++++++++++----- src/utils/lazy.ts | 2 +- 12 files changed, 181 insertions(+), 144 deletions(-) diff --git a/src/core.ts b/src/core.ts index 59927dd..37e8f92 100644 --- a/src/core.ts +++ b/src/core.ts @@ -5,20 +5,20 @@ * The type parameter must be the same type, because TS doesn't handle * 'this' type properly for some reason. */ -export class CoreObject> { +export class CoreObject { /** * Creates a clone of 'this', applying an optional set of properties to the new object. * Note that the type parameter is to allow private properties to be added. * There will be an error if invalid types are provided for public properties. */ - clone>(props: C = {} as C): T { + clone>(props: C = {} as C): T { // TS does not know how to properly handle spreads const _props = { ...(this as any), ...(props as any) }; return Object.assign(Object.create(Object.getPrototypeOf(this)), _props); } } -export class FilePosition extends CoreObject { +export class FilePosition extends CoreObject { constructor( readonly path: string, readonly position: [number, number] @@ -53,7 +53,7 @@ export class FilePosition extends CoreObject { * - the start line/column of the range * - the end line/column of the range */ -export class FileRange extends CoreObject { +export class FileRange extends CoreObject { constructor( readonly path: string, readonly start: [number, number], @@ -97,7 +97,7 @@ export enum DiagnosticLevel { /** * Represents a message to report to the user as an output of compilation. */ -export class Diagnostic extends CoreObject { +export class Diagnostic extends CoreObject { readonly location: FileRange; constructor( diff --git a/src/extensions.ts b/src/extensions.ts index 86a8448..dca7790 100644 --- a/src/extensions.ts +++ b/src/extensions.ts @@ -5,6 +5,8 @@ interface Array { last(count: number): T[]; /** Get the number of items in this array that match the given predicate */ count(predicate: (item: T) => boolean): number; + /** Mutates the value at the specified index using the specified mutator function, returning a new array containing the new value. */ + mutate(index: number, fn: (value: T) => T): Array; } interface ReadonlyArray { @@ -14,17 +16,25 @@ interface ReadonlyArray { last(count: number): T[]; /** Get the number of items in this array that match the given predicate */ count(predicate: (item: T) => boolean): number; + /** Mutates the value at the specified index using the specified mutator function, returning a new array containing the new value. */ + mutate(index: number, fn: (value: T) => T): ReadonlyArray; } -Array.prototype.last = function last(count?: number) { +Array.prototype.last = function last(this: Array, count?: number) { if (typeof count === 'undefined') return this[this.length - 1]; return this.slice(this.length - count, this.length); } -Array.prototype.count = function count(predicate: (item: T) => boolean): number { +Array.prototype.count = function count(this: Array, predicate: (item: T) => boolean): number { return this.filter(predicate).length; } +Array.prototype.mutate = function mutate(this: Array, index: number, fn: (value: T) => T): Array { + const newArray = [...this]; + newArray.splice(index, 1, fn(this[index])); + return newArray; +} + interface String { /** Get the substring from this string containing the last {count} characters in this string */ last(count?: number): string; diff --git a/src/index.ts b/src/index.ts index f294ce3..6fef995 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,9 +1,10 @@ -import { resolve } from 'path'; - /** Allows us to use require path aliases (~, ~test) */ import './require-hook'; /** Injects extensions into built-in APIs */ import './extensions'; + + +import { resolve } from 'path'; import { runProgram } from './runner'; diff --git a/src/parser/lexer/char-stream.ts b/src/parser/lexer/char-stream.ts index 222e240..b8a4910 100644 --- a/src/parser/lexer/char-stream.ts +++ b/src/parser/lexer/char-stream.ts @@ -6,7 +6,7 @@ import { FilePosition, CoreObject } from '~/core'; export type CharStream = EmptyCharStream | NonEmptyCharStream; -export class EmptyCharStream extends CoreObject { +export class EmptyCharStream extends CoreObject { readonly empty = true; constructor( @@ -17,7 +17,7 @@ export class EmptyCharStream extends CoreObject { } } -export class NonEmptyCharStream extends CoreObject { +export class NonEmptyCharStream extends CoreObject { readonly empty = false; constructor( diff --git a/src/parser/lexer/lexer-state.ts b/src/parser/lexer/lexer-state.ts index 1097ddf..320791d 100644 --- a/src/parser/lexer/lexer-state.ts +++ b/src/parser/lexer/lexer-state.ts @@ -8,7 +8,7 @@ export interface TokenResult { remaining: CharStream; } -class IfHasNextOperation extends CoreObject { +class IfHasNextOperation extends CoreObject { constructor( private readonly previous: LexerState, readonly result: Optional = null @@ -51,7 +51,7 @@ export function LexerState(position: FilePosition, char: string, stream: CharStr return new NonEmptyLexerState(position, char, stream); } -abstract class LexerStateBase extends CoreObject { +abstract class LexerStateBase extends CoreObject { abstract readonly empty: boolean; abstract readonly stream: CharStream; diff --git a/src/parser/lexer/token.ts b/src/parser/lexer/token.ts index ef79325..20aa98b 100644 --- a/src/parser/lexer/token.ts +++ b/src/parser/lexer/token.ts @@ -28,7 +28,7 @@ export enum TokenType { * 'image' is an exact copy of the token from the original source string. * 'value' is an optional value that represents the parsed value of the token, if it makes sense for the token type (numbers, strings, etc.). */ -export class Token extends CoreObject { +export class Token extends CoreObject { readonly location: FileRange; constructor( diff --git a/src/parser/parser.ts b/src/parser/parser.ts index ff8c100..86309f7 100644 --- a/src/parser/parser.ts +++ b/src/parser/parser.ts @@ -19,7 +19,7 @@ interface ParseResultInternal { // #region Parser -abstract class ParserBase extends CoreObject { +abstract class ParserBase extends CoreObject { abstract readonly empty: boolean; abstract readonly tokens: LazyList; diff --git a/src/typecheck/checker.ts b/src/typecheck/checker.ts index 9a80585..3f14cb3 100644 --- a/src/typecheck/checker.ts +++ b/src/typecheck/checker.ts @@ -1,123 +1,89 @@ -import { Diagnostic, FileRange } from '~/core'; -import { Program } from './program'; +import { Diagnostic, CoreObject } from '~/core'; +import { Program, Module, Declaration, Namespace } from './program'; import { parseModule } from '~/parser'; import { ModuleVisitor } from './node-visitors/module-visitor'; -import { Declaration, ModuleRoot } from '~/syntax'; import { TypeCheckErrorContext } from './error-context'; +import { mapSet } from '~/utils/utils'; +import { ModuleRoot, Declaration as SyntaxDeclaration } from '~/syntax'; -export interface TypeChecker { +export class TypeChecker extends CoreObject { + readonly diagnostics: ReadonlyArray = []; + readonly syntaxModules: ReadonlyMap = new Map(); + readonly modules: ReadonlyMap = new Map(); + readonly syntaxDeclarations: ReadonlyArray = []; + readonly declarations: ReadonlyArray = []; + readonly namespaces: ReadonlyArray = []; + readonly dependencies: ReadonlyArray = []; + readonly errorContext: TypeCheckErrorContext = TypeCheckErrorContext; + /** * Top-level interface for type checking. * Pass the path of an entry-point of a program, and get a fully type-checked * Program as a result. The Program will contain any errors found during checking, * and a reference to every successfully parsed module. */ - check(path: string): Program; + check(path: string): Program { + // we can't do anything until we have a parsed module, so do that first + let checker = this.parseModule(path); + // if there is no module, there was a parse error, and we should return right away + if (!checker.syntaxModules.size) return checker.createProgram(); + // 1st pass: resolve all modules, namespaces, and declarations + const module = checker.syntaxModules.get(path)!; + checker = ModuleVisitor[module.syntaxType](module, this); + // 2nd pass: resolve all dependencies + checker = checker.dependencies.reduce((tc, d) => processDependency(d, tc), checker); + // 3rd pass: resolve all types + checker = checker.syntaxDeclarations.reduce((tc, d) => DeclarationTypeVisitor[d.syntaxType](d, tc), checker); + // 4th pass: handle name clashes (overloads are valid for all declarations) + checker = checker.modules.reduce((tc, m) => NameClashVisitor[m.syntaxType](m, tc), checker); + // everything has been type checked, return the program + return checker.createProgram(); + } + /** * Add an error to this type checker, using the specified * error generator function. This function will be passed * a context object that contains several built-in message * generator functions. */ - error(fn: (ctx: TypeCheckErrorContext) => Diagnostic): TypeChecker; + error(fn: (ctx: TypeCheckErrorContext) => Diagnostic): TypeChecker { + return this.addDiagnostic(fn(this.errorContext)); + } + /** * Given an absolute path to a module file, parse the module * and add it to the type checker's internal module registry. */ - parseModule(path: string): TypeChecker; - /** - * Add a diagnostic to the type checker. - */ - addDiagnostic(diagnostic: Diagnostic): TypeChecker; - /** - * Add a list of diagnostics to the type checker. - */ - addDiagnostics(diagnostics: ReadonlyArray): TypeChecker; -} - -interface NameEntry { - moduleId: number; - location: FileRange; -} - -interface TypeCheckerInternal extends TypeChecker { - readonly diagnostics: ReadonlyArray; - readonly modules: ReadonlyArray; - readonly declarations: ReadonlyArray; - readonly names: ReadonlyMap>; - readonly exports: ReadonlyMap>; - readonly errorContext: TypeCheckErrorContext; - createProgram(): Program; -} - -export function TypeChecker() { - return TypeChecker.init(); -} - -export namespace TypeChecker { - export function init(): TypeChecker { - const checker: TypeCheckerInternal = { - diagnostics: [], - modules: [], - declarations: [], - names: new Map(), - exports: new Map(), - errorContext: TypeCheckErrorContext, - check, - error, - parseModule: _parseModule, - addDiagnostic, - addDiagnostics, - createProgram, - }; - return checker; - } - - function check(this: TypeCheckerInternal, path: string): Program { - // we can't do anything until we have a parsed module, so do that first - const withModule = this.parseModule(path) as TypeCheckerInternal; - // if there is no module, there was a parse error, and we should return right away - if (!withModule.modules.length) return withModule.createProgram(); - // 1st pass: resolve all names - const module = withModule.modules[0]; - const firstPass = ModuleVisitor[module.syntaxType](module, this) as TypeCheckerInternal; - // 2nd pass: resolve all types - const secondPass = firstPass.declarations.reduce((tc, d) => DeclarationTypeVisitor[d.syntaxType](d, tc), firstPass); - // 3rd pass: handle name clashes (overloads are valid for all declarations) - const thirdPass = secondPass.modules.reduce((tc, m) => NameClashVisitor[m.syntaxType](m, tc), secondPass); - // everything has been type checked, return the program - return thirdPass.createProgram(); - } - - function error(this: TypeCheckerInternal, fn: (ctx: TypeCheckErrorContext) => Diagnostic) { - return this.addDiagnostic(fn(this.errorContext)); - } - - function _parseModule(this: TypeCheckerInternal, path: string) { + parseModule(path: string): TypeChecker { const { module, diagnostics } = parseModule(path); - return { - ...this, - modules: module ? [...this.modules, module] : this.modules, + return this.clone({ + syntaxModules: module ? mapSet(this.syntaxModules, path, module) : this.syntaxModules, diagnostics: [...this.diagnostics, ...diagnostics] - }; + }); } - function addDiagnostic(this: TypeCheckerInternal, diagnostic: Diagnostic) { + /** + * Add a diagnostic to the type checker. + */ + addDiagnostic(diagnostic: Diagnostic): TypeChecker { return this.addDiagnostics([diagnostic]); } - function addDiagnostics(this: TypeCheckerInternal, diagnostics: ReadonlyArray) { - return { - ...this, + /** + * Add a list of diagnostics to the type checker. + */ + addDiagnostics(diagnostics: ReadonlyArray): TypeChecker { + return this.clone({ diagnostics: [...this.diagnostics, ...diagnostics] - } + }); } - function createProgram(this: TypeCheckerInternal) { + private createProgram(): Program { return new Program().clone({ - // TODO: modules? + modules: this.modules, + declarations: this.declarations, diagnostics: this.diagnostics }); } -} \ No newline at end of file +} diff --git a/src/typecheck/index.ts b/src/typecheck/index.ts index c7168e0..20052f4 100644 --- a/src/typecheck/index.ts +++ b/src/typecheck/index.ts @@ -2,6 +2,6 @@ import { TypeChecker } from './checker'; export default function typecheck(path: string) { - const checker = TypeChecker(); + const checker = new TypeChecker(); return checker.check(path); } diff --git a/src/typecheck/node-visitors/module-visitor.ts b/src/typecheck/node-visitors/module-visitor.ts index 6bf2693..de22a5c 100644 --- a/src/typecheck/node-visitors/module-visitor.ts +++ b/src/typecheck/node-visitors/module-visitor.ts @@ -1,5 +1,5 @@ import { Visitor } from '~/syntax/visitor'; -import { ModuleRoot, ImportDeclaration, ExportDeclaration, ExportForwardDeclaration, TypeDeclaration, FunctionDeclaration, ConstantDeclaration } from '~/syntax'; +import { ModuleRoot, ImportDeclaration, ExportDeclaration, ExportForwardDeclaration, TypeDeclaration, FunctionDeclaration, ConstantDeclaration, NamespaceDeclaration } from '~/syntax'; import { TypeChecker } from '~/typecheck/checker'; import { SyntaxType, Declaration } from '~/syntax/environment'; import resolveModule from '~/typecheck/resolver'; @@ -13,6 +13,7 @@ const isDeclaration = (node: ExportDeclaration | ExportForwardDeclaration | Decl /** * This visitor is responsible for enumerating all modules and declarations in the program * to prepare for import and export resolution in the next pass. + * No dependencies or name linkages are resolved here. */ export const ModuleVisitor: Visitor = { [SyntaxType.ModuleRoot]: (node: ModuleRoot, checker: TypeChecker) => { @@ -74,5 +75,8 @@ export const ModuleVisitor: Visitor = { }, [SyntaxType.ConstantDeclaration]: (node: ConstantDeclaration, checker: TypeChecker) => { // + }, + [SyntaxType.NamespaceDeclaration]: (node: NamespaceDeclaration, checker: TypeChecker) => { + // } }; diff --git a/src/typecheck/program.ts b/src/typecheck/program.ts index 6568667..b825f24 100644 --- a/src/typecheck/program.ts +++ b/src/typecheck/program.ts @@ -1,72 +1,128 @@ import { Diagnostic, CoreObject } from '~/core'; -import * as syntax from '~/syntax'; /** * A complete semantic program. This is the top-level data structure * for the semantic process of the compiler. */ -export class Program extends CoreObject { +export class Program extends CoreObject { readonly modules: ReadonlyMap = new Map(); readonly declarations: ReadonlyArray = []; readonly diagnostics: ReadonlyArray = []; } /** - * A semantic container for a module in a program. - * A module contains a list of declarations (by name) accessible inside the module - * and a list of exports (by name) accessible from outside the module + * An abstract object representing a "namespace". + * Serves as a parent class for modules and declared namespaces, + * both of which are semantically "namespaces". + */ +export abstract class Namespace extends CoreObject { + readonly localNames: Map = new Map(); + readonly exports: Map = new Map(); + readonly declarations: Declaration[] = []; + + constructor( + readonly namespaceId: number + ) { super(); } +} + +/** + * A declared namespace within another namespace. + * It has a name, a parent namespace id, and a declaration id (because it is a declaration). */ -export class Module extends CoreObject { - readonly declarations: ReadonlyMap = new Map(); - readonly exports: ReadonlyMap = new Map(); +export class DeclaredNamespace extends Namespace { + constructor( + namespaceId: number, + readonly name: string, + readonly parentNamespaceId: number, + readonly declarationId: number + ) { super(namespaceId); } } /** - * For a given name, the mapping of resolved declaration id to the next link - * in the resolution chain. There are two kinds of mappings: - * - local: the end of a chain. the resolved declaration exists in this module. - * - import: pointer to another link in the chain, containing the module/export combination to look up next. + * A semantic container for a module in a program. + * A module is a type of namespace, and can contain local names, exports, and declarations. + * Where it differs from a generic namespace is that it has no parent namespace, and is associated with a file path. */ -export interface MappingGroup { - readonly mappings: ReadonlyMap; +export class Module extends Namespace { + constructor( + namespaceId: number, + readonly absolutePath: string + ) { super(namespaceId); } } -interface LocalMapping { - kind: 'local'; +/** + * For any given name in a program, there are target(s) to which that name resolves. + * A target will always be either: + * - an export name of another module + * - a module's namespace + * - a locally-scoped name + * - a declaration inline with the name (only in the case of exported declarations) + */ +export type NameTarget = RemoteName | RemoteNamespace | LocalName | LocalDeclaration; + +/** + * A remote name is reference to an export name from another module. + */ +export class RemoteName extends CoreObject { + constructor( + readonly modulePath: string, + readonly exportName: string, + readonly resolvedDeclarationId: number + ) { super() } } -interface ImportMapping { - kind: 'import'; - modulePath: string; - exportName: string; +/** + * A remote namespace is a pointer to a module's top-level namespace + */ +export class RemoteNamespace extends CoreObject { + constructor( + readonly modulePath: string + ) { super() } } /** - * A semantic declaration is a node that is ultimately associated with a name, either: - * - a function - * - a type - * - a constant - * - a namespace, created via a wildcard import or export forward + * A local name is a reference to a name that is scoped to the current module */ -export type Declaration = FunctionDeclaration | TypeDeclaration | ConstantDeclaration | Namespace; +export class LocalName extends CoreObject { + constructor( + readonly name: string, + readonly resolvedDeclarationId: number + ) { super() } +} -export interface FunctionDeclaration { - kind: 'function'; - syntaxNode: syntax.FunctionDeclaration; +/** + * A local declaration is a reference to a declaration that has no name, + * i.e. in the case of an anonymous default export. + */ +export class LocalDeclaration extends CoreObject { + constructor( + readonly resolvedDeclarationId: number + ) { super() } } -export interface TypeDeclaration { - kind: 'type'; - syntaxNode: syntax.TypeDeclaration; +/** + * A semantic declaration is a node that is ultimately associated with a name + */ +export type Declaration = DeclaredFunction | DeclaredType | DeclaredConstant | DeclaredNamespace; + +export class DeclaredFunction extends CoreObject { + constructor( + readonly name: string, + readonly declarationId: number + ) { super() } } -export interface ConstantDeclaration { - kind: 'constant'; - syntaxNode: syntax.ConstantDeclaration; +export class DeclaredType extends CoreObject { + constructor( + readonly name: string, + readonly declarationId: number + ) { super() } } -export interface Namespace { - kind: 'namespace'; - syntaxNode: syntax.ImportDeclaration | syntax.ExportForwardDeclaration; +export class DeclaredConstant extends CoreObject { + constructor( + readonly name: string, + readonly declarationId: number + ) { super() } } diff --git a/src/utils/lazy.ts b/src/utils/lazy.ts index 62580a9..bda360e 100644 --- a/src/utils/lazy.ts +++ b/src/utils/lazy.ts @@ -19,7 +19,7 @@ export default interface Lazy { readonly value: T; } -class SimpleLazy extends CoreObject> implements Lazy { +class SimpleLazy extends CoreObject implements Lazy { private _value?: T; constructor(private evaluator: () => T) { From 9ef3b64c8b909944e28e5346c3693fa4c86ffdf6 Mon Sep 17 00:00:00 2001 From: Jake Chitel Date: Mon, 30 Apr 2018 20:54:17 -0500 Subject: [PATCH 08/15] renamed "typecheck\: to "semantic" --- src/runner.ts | 4 ++-- src/{typecheck => semantic}/checker.ts | 0 src/{typecheck => semantic}/error-context.ts | 0 src/{typecheck => semantic}/index.ts | 0 .../node-visitors/declaration-name-visitor.ts | 0 src/{typecheck => semantic}/node-visitors/module-visitor.ts | 4 ++-- src/{typecheck => semantic}/program.ts | 0 src/{typecheck => semantic}/resolver.ts | 0 8 files changed, 4 insertions(+), 4 deletions(-) rename src/{typecheck => semantic}/checker.ts (100%) rename src/{typecheck => semantic}/error-context.ts (100%) rename src/{typecheck => semantic}/index.ts (100%) rename src/{typecheck => semantic}/node-visitors/declaration-name-visitor.ts (100%) rename src/{typecheck => semantic}/node-visitors/module-visitor.ts (97%) rename src/{typecheck => semantic}/program.ts (100%) rename src/{typecheck => semantic}/resolver.ts (100%) diff --git a/src/runner.ts b/src/runner.ts index c414c0c..f2aa8ca 100644 --- a/src/runner.ts +++ b/src/runner.ts @@ -1,6 +1,6 @@ import { DiagnosticLevel } from '~/core'; -import typecheck from '~/typecheck'; -import { Program } from '~/typecheck/program'; +import typecheck from '~/semantic'; +import { Program } from '~/semantic/program'; /** diff --git a/src/typecheck/checker.ts b/src/semantic/checker.ts similarity index 100% rename from src/typecheck/checker.ts rename to src/semantic/checker.ts diff --git a/src/typecheck/error-context.ts b/src/semantic/error-context.ts similarity index 100% rename from src/typecheck/error-context.ts rename to src/semantic/error-context.ts diff --git a/src/typecheck/index.ts b/src/semantic/index.ts similarity index 100% rename from src/typecheck/index.ts rename to src/semantic/index.ts diff --git a/src/typecheck/node-visitors/declaration-name-visitor.ts b/src/semantic/node-visitors/declaration-name-visitor.ts similarity index 100% rename from src/typecheck/node-visitors/declaration-name-visitor.ts rename to src/semantic/node-visitors/declaration-name-visitor.ts diff --git a/src/typecheck/node-visitors/module-visitor.ts b/src/semantic/node-visitors/module-visitor.ts similarity index 97% rename from src/typecheck/node-visitors/module-visitor.ts rename to src/semantic/node-visitors/module-visitor.ts index de22a5c..5eb405d 100644 --- a/src/typecheck/node-visitors/module-visitor.ts +++ b/src/semantic/node-visitors/module-visitor.ts @@ -1,8 +1,8 @@ import { Visitor } from '~/syntax/visitor'; import { ModuleRoot, ImportDeclaration, ExportDeclaration, ExportForwardDeclaration, TypeDeclaration, FunctionDeclaration, ConstantDeclaration, NamespaceDeclaration } from '~/syntax'; -import { TypeChecker } from '~/typecheck/checker'; +import { TypeChecker } from '~/semantic/checker'; import { SyntaxType, Declaration } from '~/syntax/environment'; -import resolveModule from '~/typecheck/resolver'; +import resolveModule from '~/semantic/resolver'; type ModuleNode = ModuleRoot | ImportDeclaration | ExportDeclaration | ExportForwardDeclaration | Declaration; diff --git a/src/typecheck/program.ts b/src/semantic/program.ts similarity index 100% rename from src/typecheck/program.ts rename to src/semantic/program.ts diff --git a/src/typecheck/resolver.ts b/src/semantic/resolver.ts similarity index 100% rename from src/typecheck/resolver.ts rename to src/semantic/resolver.ts From 4afbc6fef63978a99f8d5c2f7bdc3b74c5e6d7dc Mon Sep 17 00:00:00 2001 From: Jake Chitel Date: Mon, 30 Apr 2018 21:53:42 -0500 Subject: [PATCH 09/15] added passes skeleton --- src/semantic/checker.ts | 55 +++++++++++++++++++-- src/semantic/passes/enumeration/index.ts | 14 ++++++ src/semantic/passes/name-clash/index.ts | 0 src/semantic/passes/resolution/index.ts | 61 ++++++++++++++++++++++++ src/semantic/passes/typecheck/index.ts | 0 src/semantic/program.ts | 10 ++++ 6 files changed, 136 insertions(+), 4 deletions(-) create mode 100644 src/semantic/passes/enumeration/index.ts create mode 100644 src/semantic/passes/name-clash/index.ts create mode 100644 src/semantic/passes/resolution/index.ts create mode 100644 src/semantic/passes/typecheck/index.ts diff --git a/src/semantic/checker.ts b/src/semantic/checker.ts index 3f14cb3..3bde40a 100644 --- a/src/semantic/checker.ts +++ b/src/semantic/checker.ts @@ -18,10 +18,57 @@ export class TypeChecker extends CoreObject { readonly errorContext: TypeCheckErrorContext = TypeCheckErrorContext; /** - * Top-level interface for type checking. - * Pass the path of an entry-point of a program, and get a fully type-checked - * Program as a result. The Program will contain any errors found during checking, - * and a reference to every successfully parsed module. + * Top-level interface for semantic analysis. + * + * Pass 1 - Namespace Enumeration: + * - Starting with the first module, enumerate all declarations, including imports, exports, and forwards + * - Recursively enumerate all referenced modules and all namespaces + * - Inputs: + * - the main module path + * - Outputs: + * - module registry (all modules by path) + * - declaration registry (all declarations by id) + * - namespace registry (all namespaces by id) + * - dependency queue (a built queue of dependencies that need to be resolved) + * - any errors from the process + * - NOTE: this does NOT involve actually processing the internals of declarations, only names and references + * Pass 2 - Dependency Resolution: + * - One output of the first pass was a dependency queue. Now that enumeration is done, we must process those dependencies + * - This involves resolving all imports and exports to corresponding declarations, creting a reference chain + * - Inputs: + * - module registry + * - declaration registry + * - namespace registry + * - dependency queue + * - Outputs: + * - module registry (unchanged) + * - declaration registry (unchanged) + * - namespace registry, now with names and exports resolved + * - any errors from the process + * Pass 3 - Type Checking: + * - Now that we have all declarations enumerated, and all declaration references resolved, we resolve the types of everything + * - This involves setting the type of everything that is typeable + * - As well as making sure that assignability is correct + * - Inputs: + * - declaration registry + * - namespace registry + * - Outputs: + * - declaration registry, now with everything typed + * - namespace registry (unchanged) + * - any errors from the process + * Pass 4 - Name Clash Checking: + * - Once we have resolved the type of everything, we can make sure that everything that has the same name is able to do so + * - Some declarations can be merged, others cannot + * - Several things can be overloaded, but those overloads must be valid + * - Inputs: + * - namespace registry + * - declaration registry + * - Outputs: + * - namespace registry, now with name clashes processed (may create overloads, merges, etc.) + * - declaration registry (possibly unchanged, overloads and merges may need to change things) + * - any errors from the process + * + * Once we are done with all passes, we output a Program instance that contains all errors and all modules (which contain all namespaces, which contain all declarations). */ check(path: string): Program { // we can't do anything until we have a parsed module, so do that first diff --git a/src/semantic/passes/enumeration/index.ts b/src/semantic/passes/enumeration/index.ts new file mode 100644 index 0000000..d9c4b66 --- /dev/null +++ b/src/semantic/passes/enumeration/index.ts @@ -0,0 +1,14 @@ +import { Module, Namespace, Declaration } from '~/semantic/program'; +import { Dependency } from '~/semantic/passes/resolution'; + + +export interface NamespaceEnumerationOutput { + readonly modules: ReadonlyMap; + readonly namespaces: ReadonlyArray; + readonly declarations: ReadonlyArray; + readonly dependencyQueue: ReadonlyArray; +} + +export default function enumerateNamespaces(mainModulePath: string): NamespaceEnumerationOutput { + // +} \ No newline at end of file diff --git a/src/semantic/passes/name-clash/index.ts b/src/semantic/passes/name-clash/index.ts new file mode 100644 index 0000000..e69de29 diff --git a/src/semantic/passes/resolution/index.ts b/src/semantic/passes/resolution/index.ts new file mode 100644 index 0000000..bb3610e --- /dev/null +++ b/src/semantic/passes/resolution/index.ts @@ -0,0 +1,61 @@ +import { CoreObject } from '~/core'; + + +export type Dependency = ImportedName | ImportedNamespace | ForwardedName | ForwardedNamespace | PureForward | ExportedName | ExportedDeclaration; + +export class ImportedName extends CoreObject { + constructor( + readonly importModule: string, + readonly importName: string, + readonly exportModule: string, + readonly exportName: string + ) { super() } +} + +export class ImportedNamespace extends CoreObject { + constructor( + readonly importModule: string, + readonly importName: string, + readonly exportModule: string + ) { super() } +} + +export class ForwardedName extends CoreObject { + constructor( + readonly forwardModule: string, + readonly forwardName: string, + readonly exportModule: string, + readonly exportName: string + ) { super() } +} + +export class ForwardedNamespace extends CoreObject { + constructor( + readonly forwardModule: string, + readonly forwardName: string, + readonly exportModule: string + ) { super() } +} + +export class PureForward extends CoreObject { + constructor( + readonly forwardModule: string, + readonly exportModule: string + ) { super() } +} + +export class ExportedName extends CoreObject { + constructor( + readonly module: string, + readonly localName: string, + readonly exportName: string + ) { super() } +} + +export class ExportedDeclaration extends CoreObject { + constructor( + readonly module: string, + readonly declarationId: number, + readonly exportName: string + ) { super() } +} diff --git a/src/semantic/passes/typecheck/index.ts b/src/semantic/passes/typecheck/index.ts new file mode 100644 index 0000000..e69de29 diff --git a/src/semantic/program.ts b/src/semantic/program.ts index b825f24..56bbae7 100644 --- a/src/semantic/program.ts +++ b/src/semantic/program.ts @@ -106,6 +106,9 @@ export class LocalDeclaration extends CoreObject { */ export type Declaration = DeclaredFunction | DeclaredType | DeclaredConstant | DeclaredNamespace; +/** + * A semantic function entity, identified by a name. + */ export class DeclaredFunction extends CoreObject { constructor( readonly name: string, @@ -113,6 +116,10 @@ export class DeclaredFunction extends CoreObject { ) { super() } } +/** + * A semantic type entity, identified by a name. + * NOTE: this is different from the concept of a "type" in type checking TODO then what is? + */ export class DeclaredType extends CoreObject { constructor( readonly name: string, @@ -120,6 +127,9 @@ export class DeclaredType extends CoreObject { ) { super() } } +/** + * A semantic constant entity, identified by a name. + */ export class DeclaredConstant extends CoreObject { constructor( readonly name: string, From d5b84e22171467837bad9b9cec1cd2b9468b95fb Mon Sep 17 00:00:00 2001 From: Jake Chitel Date: Thu, 10 May 2018 16:02:21 -0500 Subject: [PATCH 10/15] changing syntax and parser structure to better support visitors --- src/parser/index.ts | 2 +- src/syntax/ModuleRoot.ts | 32 +- .../declarations/ConstantDeclaration.ts | 64 ++- src/syntax/declarations/ExportDeclaration.ts | 67 +-- .../declarations/ExportForwardDeclaration.ts | 52 ++- .../declarations/FunctionDeclaration.ts | 141 +++++-- src/syntax/declarations/ImportDeclaration.ts | 51 ++- .../declarations/NamespaceDeclaration.ts | 86 ++-- src/syntax/declarations/TypeDeclaration.ts | 111 +++-- src/syntax/declarations/index.ts | 8 +- src/syntax/declarations/parsing.ts | 7 + src/syntax/declarations/visitors.ts | 7 + src/syntax/environment.ts | 388 +++++++++--------- src/syntax/expressions/ArrayAccess.ts | 49 ++- src/syntax/expressions/ArrayLiteral.ts | 30 +- src/syntax/expressions/BinaryExpression.ts | 68 ++- src/syntax/expressions/BoolLiteral.ts | 24 +- src/syntax/expressions/CharLiteral.ts | 24 +- src/syntax/expressions/FieldAccess.ts | 43 +- src/syntax/expressions/FloatLiteral.ts | 24 +- src/syntax/expressions/FunctionApplication.ts | 58 +-- .../expressions/IdentifierExpression.ts | 24 +- src/syntax/expressions/IfElseExpression.ts | 40 +- src/syntax/expressions/IntegerLiteral.ts | 24 +- src/syntax/expressions/LambdaExpression.ts | 53 ++- .../expressions/ParenthesizedExpression.ts | 30 +- src/syntax/expressions/StringLiteral.ts | 24 +- src/syntax/expressions/StructLiteral.ts | 30 +- src/syntax/expressions/TupleLiteral.ts | 30 +- src/syntax/expressions/UnaryExpression.ts | 65 ++- src/syntax/expressions/VarDeclaration.ts | 33 +- src/syntax/expressions/index.ts | 36 +- src/syntax/expressions/parsing.ts | 18 + src/syntax/expressions/visitors.ts | 18 + src/syntax/index.ts | 2 +- src/syntax/parsing.ts | 5 + src/syntax/statements/Block.ts | 30 +- src/syntax/statements/BreakStatement.ts | 24 +- src/syntax/statements/ContinueStatement.ts | 24 +- src/syntax/statements/DoWhileStatement.ts | 35 +- src/syntax/statements/ExpressionStatement.ts | 30 +- src/syntax/statements/ForStatement.ts | 38 +- src/syntax/statements/ReturnStatement.ts | 30 +- src/syntax/statements/ThrowStatement.ts | 30 +- src/syntax/statements/TryCatchStatement.ts | 50 ++- src/syntax/statements/WhileStatement.ts | 35 +- src/syntax/statements/index.ts | 20 +- src/syntax/statements/parsing.ts | 10 + src/syntax/statements/visitors.ts | 10 + src/syntax/types/ArrayType.ts | 38 +- src/syntax/types/BuiltInType.ts | 24 +- src/syntax/types/FunctionType.ts | 37 +- src/syntax/types/IdentifierType.ts | 23 +- src/syntax/types/NamespaceAccessType.ts | 45 +- src/syntax/types/ParenthesizedType.ts | 32 +- src/syntax/types/SpecificType.ts | 55 +-- src/syntax/types/StructType.ts | 34 +- src/syntax/types/TupleType.ts | 31 +- src/syntax/types/UnionType.ts | 51 +-- src/syntax/types/index.ts | 20 +- src/syntax/types/parsing.ts | 10 + src/syntax/types/visitors.ts | 10 + src/syntax/visitor.ts | 89 +++- 63 files changed, 1595 insertions(+), 1038 deletions(-) create mode 100644 src/syntax/declarations/parsing.ts create mode 100644 src/syntax/declarations/visitors.ts create mode 100644 src/syntax/expressions/parsing.ts create mode 100644 src/syntax/expressions/visitors.ts create mode 100644 src/syntax/parsing.ts create mode 100644 src/syntax/statements/parsing.ts create mode 100644 src/syntax/statements/visitors.ts create mode 100644 src/syntax/types/parsing.ts create mode 100644 src/syntax/types/visitors.ts diff --git a/src/parser/index.ts b/src/parser/index.ts index 94b82d9..d993bc8 100644 --- a/src/parser/index.ts +++ b/src/parser/index.ts @@ -10,6 +10,6 @@ export function parseModule(path: string): { module: Optional, diagn if (_diags.length) return { module: null, diagnostics: _diags }; const parser = createParser(tokens); const env = SyntaxEnvironment(); - const { result: module, diagnostics } = parser.parse(env.ModuleRoot); + const { result: module, diagnostics } = parser.parse(env.parseModuleRoot); return { module, diagnostics }; } \ No newline at end of file diff --git a/src/syntax/ModuleRoot.ts b/src/syntax/ModuleRoot.ts index 24d10e0..35f0570 100644 --- a/src/syntax/ModuleRoot.ts +++ b/src/syntax/ModuleRoot.ts @@ -2,30 +2,30 @@ import { TokenType } from '~/parser/lexer'; import { ImportDeclaration } from './declarations/ImportDeclaration'; import { NodeBase, SyntaxType, Declaration } from '~/syntax/environment'; import { ExportDeclaration, ExportForwardDeclaration } from '~/syntax'; +import { parseImportDeclaration, parseExportForwardDeclaration } from './declarations/parsers'; import { ParseFunc, seq, repeat, select, tok } from '~/parser/parser'; +import { FileRange } from '~/core'; -export interface ModuleRoot extends NodeBase { - readonly imports: ReadonlyArray; - readonly declarations: ReadonlyArray; +export class ModuleRoot extends NodeBase { + constructor( + location: FileRange, + readonly imports: ReadonlyArray, + readonly declarations: ReadonlyArray + ) { super(location, SyntaxType.ModuleRoot) } } -export function register(Declaration: ParseFunc, ExportDeclaration: ParseFunc) { - const ModuleRoot: ParseFunc = seq( - repeat(ImportDeclaration, '*'), +export function register(parseDeclaration: ParseFunc, parseExportDeclaration: ParseFunc) { + const parseModuleRoot: ParseFunc = seq( + repeat(parseImportDeclaration, '*'), repeat(select( - Declaration, - ExportDeclaration, - ExportForwardDeclaration + parseDeclaration, + parseExportDeclaration, + parseExportForwardDeclaration ), '*'), tok(TokenType.EOF), - ([imports, declarations], location) => ({ - syntaxType: SyntaxType.ModuleRoot as SyntaxType.ModuleRoot, - location, - imports, - declarations - }) + ([imports, declarations], location) => new ModuleRoot(location, imports, declarations) ); - return { ModuleRoot }; + return { parseModuleRoot }; } diff --git a/src/syntax/declarations/ConstantDeclaration.ts b/src/syntax/declarations/ConstantDeclaration.ts index 8345d8d..d584feb 100644 --- a/src/syntax/declarations/ConstantDeclaration.ts +++ b/src/syntax/declarations/ConstantDeclaration.ts @@ -1,29 +1,61 @@ import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; -import { ParseFunc, seq, tok, optional } from '~/parser/parser'; +import { ParseFunc, seq, tok } from '~/parser/parser'; import { TokenType, Token } from '~/parser/lexer'; +import { FileRange } from '~/core'; -export interface ConstantDeclaration extends NodeBase { - name: Optional; - value: Expression; +export class ConstantDeclaration extends NodeBase { + constructor( + location: FileRange, + readonly name: Token, + readonly value: Expression + ) { super(location, SyntaxType.ConstantDeclaration) } + + accept(visitor: ConstantDeclarationVisitor, param: T): R { + return visitor.visitConstantDeclaration(this, param); + } +} + +export interface ConstantDeclarationVisitor { + visitConstantDeclaration(node: ConstantDeclaration, param: T): R; +} + +export class AnonymousConstantDeclaration extends NodeBase { + constructor( + location: FileRange, + readonly value: Expression + ) { super(location, SyntaxType.AnonymousConstantDeclaration) } + + accept(visitor: AnonymousConstantDeclarationVisitor, param: T): R { + return visitor.visitAnonymousConstantDeclaration(this, param); + } +} + +export interface AnonymousConstantDeclarationVisitor { + visitAnonymousConstantDeclaration(node: AnonymousConstantDeclaration, param: T): R; } -export function register(Expression: ParseFunc) { +export function register(parseExpression: ParseFunc) { + /** + * ConstantDeclaration ::= 'const' IDENT EQUALS Expression + */ + const parseConstantDeclaration: ParseFunc = seq( + tok('const'), + tok(TokenType.IDENT), + tok('='), + parseExpression, + ([_1, name, _2, value], location) => new ConstantDeclaration(location, name, value) + ); + /** - * ConstantDeclaration ::= 'const' IDENT? EQUALS Expression + * AnonymousConstantDeclaration ::= 'const' EQUALS Expression */ - const ConstantDeclaration: ParseFunc = seq( + const parseAnonymousConstantDeclaration: ParseFunc = seq( tok('const'), - optional(tok(TokenType.IDENT)), tok('='), - Expression, - ([_1, name, _2, value], location) => ({ - syntaxType: SyntaxType.ConstantDeclaration as SyntaxType.ConstantDeclaration, - location, - name, - value - }) + parseExpression, + ([_1, _2, value], location) => new AnonymousConstantDeclaration(location, value) ); - return { ConstantDeclaration }; + return { parseConstantDeclaration, parseAnonymousConstantDeclaration }; } diff --git a/src/syntax/declarations/ExportDeclaration.ts b/src/syntax/declarations/ExportDeclaration.ts index efb27db..4715896 100644 --- a/src/syntax/declarations/ExportDeclaration.ts +++ b/src/syntax/declarations/ExportDeclaration.ts @@ -1,6 +1,7 @@ -import { NodeBase, SyntaxType, Declaration } from '~/syntax/environment'; +import { NodeBase, SyntaxType, Declaration, AnonymousDeclaration, isDeclaration } from '~/syntax/environment'; import { ParseFunc, seq, tok, select, repeat } from '~/parser/parser'; import { Token, TokenType } from '~/parser/lexer'; +import { FileRange } from '~/core'; /** @@ -13,39 +14,48 @@ import { Token, TokenType } from '~/parser/lexer'; * - Named export of a named value (export name AND value name = name from value, value = value) */ interface Export { - // export name is always present but may not be set TODO: this should ALWAYS be present, we should split out anonymous declarations - readonly exportName: Optional; + // export name is always present + readonly exportName: Token; // value name is present for all but anonymous default exports readonly valueName: Optional; // value is not present for exports of existing names - readonly value?: Declaration; + readonly value?: Declaration | AnonymousDeclaration; } -export interface ExportDeclaration extends NodeBase { - readonly exports: ReadonlyArray; +export class ExportDeclaration extends NodeBase { + constructor( + location: FileRange, + readonly exports: ReadonlyArray + ) { super(location, SyntaxType.ExportDeclaration) } + + accept(visitor: ExportDeclarationVisitor, param: T): R { + return visitor.visitExportDeclaration(this, param); + } +} + +export interface ExportDeclarationVisitor { + visitExportDeclaration(node: ExportDeclaration, param: T): R; } -export function register(Declaration: ParseFunc) { - const DefaultExportDeclaration: ParseFunc = seq( +export function register(parseDeclaration: ParseFunc, parseAnonymousDeclaration: ParseFunc) { + const parseDefaultExportDeclaration: ParseFunc = seq( tok('export'), tok('default'), - select( - Declaration, + select( + parseDeclaration, + parseAnonymousDeclaration, tok(TokenType.IDENT) ), - ([_, def, value], location) => ({ - location, - syntaxType: SyntaxType.ExportDeclaration as SyntaxType.ExportDeclaration, - exports: value instanceof Token - ? [{ exportName: def, valueName: value }] - : [{ exportName: def, valueName: value.name, value }] - }) + ([_, def, value], location) => new ExportDeclaration(location, + value instanceof Token ? [{ exportName: def, valueName: value }] + : isDeclaration(value) ? [{ exportName: def, valueName: value.name, value }] + : [{ exportName: def, valueName: null, value }]) ); /** * NamedExports ::= '{' (IDENT | (IDENT 'as' IDENT))(+ sep ',') '}' */ - const NamedExports: ParseFunc = seq( + const parseNamedExports: ParseFunc = seq( tok('{'), repeat(select( seq( @@ -60,26 +70,23 @@ export function register(Declaration: ParseFunc) { ([_1, names, _2]) => names ); - const NamedExportDeclaration: ParseFunc = seq( + const parseNamedExportDeclaration: ParseFunc = seq( tok('export'), select( - Declaration, - NamedExports + parseDeclaration, + parseNamedExports ), - ([_, value], location) => ({ - location, - syntaxType: SyntaxType.ExportDeclaration as SyntaxType.ExportDeclaration, - exports: Array.isArray(value) ? value : [{ exportName: value.name, valueName: value.name, value }] - }) + ([_, value], location) => new ExportDeclaration(location, + Array.isArray(value) ? value : [{ exportName: value.name, valueName: value.name, value }]) ); /** * ExportDeclaration ::= DefaultExportDeclaration | NamedExportDeclaration */ - const ExportDeclaration: ParseFunc = select( - DefaultExportDeclaration, - NamedExportDeclaration + const parseExportDeclaration: ParseFunc = select( + parseDefaultExportDeclaration, + parseNamedExportDeclaration ); - return { ExportDeclaration }; + return { parseExportDeclaration }; } diff --git a/src/syntax/declarations/ExportForwardDeclaration.ts b/src/syntax/declarations/ExportForwardDeclaration.ts index 270c2de..54d2fa7 100644 --- a/src/syntax/declarations/ExportForwardDeclaration.ts +++ b/src/syntax/declarations/ExportForwardDeclaration.ts @@ -1,7 +1,8 @@ -import { ImportList } from './ImportDeclaration'; +import { parseImportList } from './ImportDeclaration'; import { ParseFunc, seq, select, tok, optional } from '~/parser/parser'; import { Token, TokenType } from '~/parser/lexer'; import { NodeBase, SyntaxType } from '~/syntax/environment'; +import { FileRange } from '~/core'; export interface Forward { @@ -9,15 +10,26 @@ export interface Forward { readonly exportName: Token; } -export interface ExportForwardDeclaration extends NodeBase { - readonly moduleName: Token; - readonly forwards: ReadonlyArray; +export class ExportForwardDeclaration extends NodeBase { + constructor( + location: FileRange, + readonly moduleName: Token, + readonly forwards: ReadonlyArray + ) { super(location, SyntaxType.ExportForwardDeclaration) } + + accept(visitor: ExportForwardDeclarationVisitor, param: T): R { + return visitor.visitExportForwardDeclaration(this, param); + } +} + +export interface ExportForwardDeclarationVisitor { + visitExportForwardDeclaration(node: ExportForwardDeclaration, param: T): R; } /** * DefaultExportForwards ::= COLON (LBRACE IDENT RBRACE | '*') */ -const DefaultExportForwards: ParseFunc = seq( +const parseDefaultExportForwards: ParseFunc = seq( tok(':'), select( tok('*'), @@ -34,44 +46,40 @@ const DefaultExportForwards: ParseFunc = seq( /** * DefaultExportForwardDeclaration ::= EXPORT DEFAULT FROM STRING_LITERAL DefaultExportForwards? */ -const DefaultExportForwardDeclaration: ParseFunc = seq( +const parseDefaultExportForwardDeclaration: ParseFunc = seq( tok('export'), tok('default'), tok('from'), tok(TokenType.STRING_LITERAL), - optional(DefaultExportForwards), - ([_1, def, _2, moduleName, fwd], location) => ({ - syntaxType: SyntaxType.ExportForwardDeclaration as SyntaxType.ExportForwardDeclaration, + optional(parseDefaultExportForwards), + ([_1, def, _2, moduleName, fwd], location) => new ExportForwardDeclaration( location, moduleName, - forwards: [{ importName: fwd || def, exportName: def }] - }) + [{ importName: fwd || def, exportName: def }] + ) ); /** * DefaultExportForwardDeclaration ::= EXPORT FROM STRING_LITERAL ':' (ImportList | '*') */ -const NamedExportForwardDeclaration: ParseFunc = seq( +const parseNamedExportForwardDeclaration: ParseFunc = seq( tok('export'), tok('from'), tok(TokenType.STRING_LITERAL), tok(':'), select( - seq(ImportList, imps => imps.map(({ importName, aliasName }) => ({ importName, exportName: aliasName }))), + seq(parseImportList, imps => imps.map(({ importName, aliasName }) => ({ importName, exportName: aliasName }))), seq(tok('*'), _ => [{ importName: _, exportName: _ }]) ), - ([_1, _2, moduleName, _3, forwards], location) => ({ - syntaxType: SyntaxType.ExportForwardDeclaration as SyntaxType.ExportForwardDeclaration, - location, - moduleName, - forwards - }) + ([_1, _2, moduleName, _3, forwards], location) => new ExportForwardDeclaration( + location, moduleName, forwards + ) ); /** * ExportForwardDeclaration ::= DefaultExportForwardDeclaration | NamedExportForwardDeclaration */ -export const ExportForwardDeclaration: ParseFunc = select( - DefaultExportForwardDeclaration, - NamedExportForwardDeclaration +export const parseExportForwardDeclaration: ParseFunc = select( + parseDefaultExportForwardDeclaration, + parseNamedExportForwardDeclaration ); diff --git a/src/syntax/declarations/FunctionDeclaration.ts b/src/syntax/declarations/FunctionDeclaration.ts index 1edcdf0..dfaecee 100644 --- a/src/syntax/declarations/FunctionDeclaration.ts +++ b/src/syntax/declarations/FunctionDeclaration.ts @@ -1,50 +1,86 @@ import { TypeParamList, TypeParam } from './TypeDeclaration'; import { ParseFunc, seq, tok, repeat, select, optional } from '~/parser/parser'; -import { TypeNode, Expression, NodeBase, SyntaxType, Statement } from '~/syntax/environment'; +import { Type, Expression, NodeBase, SyntaxType, Statement } from '~/syntax/environment'; import { Token, TokenType } from '~/parser/lexer'; import { Block } from '~/syntax'; +import { FileRange } from '~/core'; -export interface Param extends NodeBase { - readonly name: Token; - readonly typeNode: Optional; // optional to support lambda params +export class Param extends NodeBase { + constructor( + location: FileRange, + readonly name: Token, + readonly typeNode: Optional // optional to support lambda params + ) { super(location, SyntaxType.Param) } + + accept(visitor: ParamVisitor, param: T): R { + return visitor.visitParam(this, param); + } +} + +export interface ParamVisitor { + visitParam(node: Param, param: T): R; +} + +export class FunctionDeclaration extends NodeBase { + constructor( + location: FileRange, + readonly returnType: Type, + readonly name: Token, + readonly typeParams: ReadonlyArray, + readonly params: ReadonlyArray, + readonly body: Expression | Statement + ) { super(location, SyntaxType.FunctionDeclaration) } + + accept(visitor: FunctionDeclarationVisitor, param: T): R { + return visitor.visitFunctionDeclaration(this, param); + } } -export interface FunctionDeclaration extends NodeBase { - readonly returnType: TypeNode; - readonly name: Optional; - readonly typeParams: ReadonlyArray; - readonly params: ReadonlyArray; - readonly body: Expression | Statement; +export interface FunctionDeclarationVisitor { + visitFunctionDeclaration(node: FunctionDeclaration, param: T): R; +} + +export class AnonymousFunctionDeclaration extends NodeBase { + constructor( + location: FileRange, + readonly returnType: Type, + readonly typeParams: ReadonlyArray, + readonly params: ReadonlyArray, + readonly body: Expression | Statement + ) { super(location, SyntaxType.AnonymousFunctionDeclaration) } + + accept(visitor: AnonymousFunctionDeclarationVisitor, param: T): R { + return visitor.visitAnonymousFunctionDeclaration(this, param); + } +} + +export interface AnonymousFunctionDeclarationVisitor { + visitAnonymousFunctionDeclaration(node: AnonymousFunctionDeclaration, param: T): R; } export function register( - TypeNode: ParseFunc, - Expression: ParseFunc, - Statement: ParseFunc, - Block: ParseFunc, - TypeParamList: ParseFunc + parseType: ParseFunc, + parseExpression: ParseFunc, + parseStatement: ParseFunc, + parseBlock: ParseFunc, + parseTypeParamList: ParseFunc ) { /** * Param ::= Type IDENT */ - const Param: ParseFunc = seq( - TypeNode, + const parseParam: ParseFunc = seq( + parseType, tok(TokenType.IDENT), - ([typeNode, name], location) => ({ - syntaxType: SyntaxType.Param as SyntaxType.Param, - location, - typeNode, - name - }) + ([typeNode, name], location) => new Param(location, name, typeNode) ); /** * ParameterList ::= LPAREN Param(* sep COMMA) RPAREN */ - const ParamList: ParseFunc = seq( + const parseParamList: ParseFunc = seq( tok('('), - repeat(Param, '*', tok(',')), + repeat(parseParam, '*', tok(',')), tok(')'), ([_1, params, _2]) => params ); @@ -55,37 +91,56 @@ export function register( * Put block before expression because there is a conflict * between empty blocks and empty structs. */ - const FunctionBody: ParseFunc = select( - Block, - Expression, - Statement + const parseFunctionBody: ParseFunc = select( + parseBlock, + parseExpression, + parseStatement ); /** - * FunctionDeclaration ::= 'func' Type IDENT? TypeParamList? ParamList FAT_ARROW FunctionBody + * FunctionDeclaration ::= 'func' Type IDENT TypeParamList? ParamList FAT_ARROW FunctionBody */ - const FunctionDeclaration: ParseFunc = seq( + const parseFunctionDeclaration: ParseFunc = seq( tok('func'), - TypeNode, - optional(tok(TokenType.IDENT)), - optional(TypeParamList), - ParamList, + parseType, + tok(TokenType.IDENT), + optional(parseTypeParamList), + parseParamList, tok('=>'), - FunctionBody, - ([_1, returnType, name, typeParams, params, _2, body], location) => ({ - syntaxType: SyntaxType.FunctionDeclaration as SyntaxType.FunctionDeclaration, + parseFunctionBody, + ([_1, returnType, name, typeParams, params, _2, body], location) => new FunctionDeclaration( location, + returnType, name, + typeParams ? typeParams.params : [], + params, + body + ) + ); + + /** + * AnonymousFunctionDeclaration ::= 'func' Type TypeParamList? ParamList FAT_ARROW FunctionBody + */ + const parseAnonymousFunctionDeclaration: ParseFunc = seq( + tok('func'), + parseType, + optional(parseTypeParamList), + parseParamList, + tok('=>'), + parseFunctionBody, + ([_1, returnType, typeParams, params, _2, body], location) => new AnonymousFunctionDeclaration( + location, returnType, - typeParams: typeParams ? typeParams.params : [], + typeParams ? typeParams.params : [], params, body - }) + ) ); return { - FunctionDeclaration, - Param, - FunctionBody + parseFunctionDeclaration, + parseAnonymousFunctionDeclaration, + parseParam, + parseFunctionBody }; } diff --git a/src/syntax/declarations/ImportDeclaration.ts b/src/syntax/declarations/ImportDeclaration.ts index a1584d5..41a8f4c 100644 --- a/src/syntax/declarations/ImportDeclaration.ts +++ b/src/syntax/declarations/ImportDeclaration.ts @@ -1,12 +1,13 @@ import { TokenType, Token } from '~/parser/lexer'; import { NodeBase, SyntaxType } from '~/syntax/environment'; import { ParseFunc, seq, tok, select, repeat } from '~/parser/parser'; +import { FileRange } from '~/core'; /** * NameAlias ::= IDENT 'as' IDENT */ -export const NameAlias: ParseFunc = seq( +export const parseNameAlias: ParseFunc = seq( tok(TokenType.IDENT), tok('as'), tok(TokenType.IDENT), @@ -16,7 +17,7 @@ export const NameAlias: ParseFunc = seq( /** * WildcardImport ::= '*' 'as' IDENT */ -const WildcardImport: ParseFunc = seq( +const parseWildcardImport: ParseFunc = seq( tok('*'), tok('as'), tok(TokenType.IDENT), @@ -26,12 +27,12 @@ const WildcardImport: ParseFunc = seq( /** * NamedImports ::= LBRACE (AliasImport | IDENT | WildcardImport)+(sep COMMA) RBRACE */ -const NamedImports: ParseFunc = seq( +const parseNamedImports: ParseFunc = seq( tok('{'), repeat(select( - NameAlias, + parseNameAlias, tok(TokenType.IDENT), - WildcardImport + parseWildcardImport ), '+', tok(',')), tok('}'), ([_1, names, _2]) => names.map(n => n instanceof Token ? { importName: n, aliasName: n } : n) @@ -44,19 +45,19 @@ const NamedImports: ParseFunc = seq( * | IDENT COMMA WildcardImport # default and wildcard import * | IDENT # just default import */ -export const ImportList: ParseFunc = select( - NamedImports, +export const parseImportList: ParseFunc = select( + parseNamedImports, seq( tok(TokenType.IDENT), tok(','), - NamedImports, + parseNamedImports, ([def, _, named]) => [defaultImport(def), ...named] ), - seq(WildcardImport, i => [i]), + seq(parseWildcardImport, i => [i]), seq( tok(TokenType.IDENT), tok(','), - WildcardImport, + parseWildcardImport, ([def, _, wildcard]) => [defaultImport(def), wildcard] ), seq(tok(TokenType.IDENT), i => [defaultImport(i)]) @@ -67,26 +68,32 @@ interface Import { aliasName: Token; } -export interface ImportDeclaration extends NodeBase { - readonly moduleName: Token; - readonly imports: ReadonlyArray; +export class ImportDeclaration extends NodeBase { + constructor( + location: FileRange, + readonly moduleName: Token, + readonly imports: ReadonlyArray + ) { super(location, SyntaxType.ImportDeclaration) } + + accept(visitor: ImportDeclarationVisitor, param: T): R { + return visitor.visitImportDeclaration(this, param); + } +} + +export interface ImportDeclarationVisitor { + visitImportDeclaration(node: ImportDeclaration, param: T): R; } /** * ImportDeclaration ::= 'import' 'from' STRING_LITERAL ':' ImportList */ -export const ImportDeclaration: ParseFunc = seq( +export const parseImportDeclaration: ParseFunc = seq( tok('import'), tok('from'), tok(TokenType.STRING_LITERAL), tok(':'), - ImportList, - ([_1, _2, moduleName, _3, imports], location) => ({ - syntaxType: SyntaxType.ImportDeclaration as SyntaxType.ImportDeclaration, - location, - moduleName, - imports - }) + parseImportList, + ([_1, _2, moduleName, _3, imports], location) => new ImportDeclaration(location, moduleName, imports) ); -const defaultImport = (token: Token) => ({ importName: token.clone({ image: 'default' }), aliasName: token }); +const defaultImport = (token: Token): Import => ({ importName: token.clone({ image: 'default' }), aliasName: token }); diff --git a/src/syntax/declarations/NamespaceDeclaration.ts b/src/syntax/declarations/NamespaceDeclaration.ts index de35c8e..716d5e8 100644 --- a/src/syntax/declarations/NamespaceDeclaration.ts +++ b/src/syntax/declarations/NamespaceDeclaration.ts @@ -1,44 +1,84 @@ -import { ParseFunc, seq, tok, repeat, select, optional } from '~/parser/parser'; +import { ParseFunc, seq, tok, repeat, select } from '~/parser/parser'; import { NodeBase, SyntaxType, Declaration } from '~/syntax/environment'; -import { ImportDeclaration } from './ImportDeclaration'; +import { ImportDeclaration, parseImportDeclaration } from './ImportDeclaration'; import { ExportDeclaration, ExportForwardDeclaration } from '~/syntax'; import { Token, TokenType } from '~/parser/lexer'; +import { FileRange } from '~/core'; +import { parseExportForwardDeclaration } from './ExportForwardDeclaration'; -export interface NamespaceDeclaration extends NodeBase { - readonly name: Optional; - readonly imports: ReadonlyArray; - readonly declarations: ReadonlyArray; +export class NamespaceDeclaration extends NodeBase { + constructor( + location: FileRange, + readonly name: Token, + readonly imports: ReadonlyArray, + readonly declarations: ReadonlyArray + ) { super(location, SyntaxType.NamespaceDeclaration) } + + accept(visitor: NamespaceDeclarationVisitor, param: T): R { + return visitor.visitNamespaceDeclaration(this, param); + } +} + +export interface NamespaceDeclarationVisitor { + visitNamespaceDeclaration(node: NamespaceDeclaration, param: T): R; +} + +export class AnonymousNamespaceDeclaration extends NodeBase { + constructor( + location: FileRange, + readonly imports: ReadonlyArray, + readonly declarations: ReadonlyArray + ) { super(location, SyntaxType.AnonymousNamespaceDeclaration) } + + accept(visitor: AnonymousNamespaceDeclarationVisitor, param: T): R { + return visitor.visitAnonymousNamespaceDeclaration(this, param); + } +} + +export interface AnonymousNamespaceDeclarationVisitor { + visitAnonymousNamespaceDeclaration(node: AnonymousNamespaceDeclaration, param: T): R; } export function register( - Declaration: ParseFunc, - ExportDeclaration: ParseFunc + parseDeclaration: ParseFunc, + parseExportDeclaration: ParseFunc ) { /** - * NamespaceDeclaration ::= 'namespace' IDENT? '{' ImportDeclaration* (Declaration | ExportDeclaration | ExportForwardDeclaration)* '}' + * NamespaceDeclaration ::= 'namespace' IDENT '{' ImportDeclaration* (Declaration | ExportDeclaration | ExportForwardDeclaration)* '}' + */ + const parseNamespaceDeclaration: ParseFunc = seq( + tok('namespace'), + tok(TokenType.IDENT), + tok('{'), + repeat(parseImportDeclaration, '*'), + repeat(select( + parseDeclaration, + parseExportDeclaration, + parseExportForwardDeclaration + ), '*'), + tok('}'), + ([_1, name, _2, imports, declarations], location) => new NamespaceDeclaration(location, name, imports, declarations) + ); + + /** + * AnonymousNamespaceDeclaration ::= 'namespace' '{' ImportDeclaration* (Declaration | ExportDeclaration | ExportForwardDeclaration)* '}' */ - const NamespaceDeclaration: ParseFunc = seq( + const parseAnonymousNamespaceDeclaration: ParseFunc = seq( tok('namespace'), - optional(tok(TokenType.IDENT)), tok('{'), - repeat(ImportDeclaration, '*'), + repeat(parseImportDeclaration, '*'), repeat(select( - Declaration, - ExportDeclaration, - ExportForwardDeclaration + parseDeclaration, + parseExportDeclaration, + parseExportForwardDeclaration ), '*'), tok('}'), - ([_1, name, _2, imports, declarations], location) => ({ - syntaxType: SyntaxType.NamespaceDeclaration as SyntaxType.NamespaceDeclaration, - location, - name, - imports, - declarations - }) + ([_1, _2, imports, declarations], location) => new AnonymousNamespaceDeclaration(location, imports, declarations) ); return { - NamespaceDeclaration + parseNamespaceDeclaration, + parseAnonymousNamespaceDeclaration }; } diff --git a/src/syntax/declarations/TypeDeclaration.ts b/src/syntax/declarations/TypeDeclaration.ts index f1765b1..d2fb6cb 100644 --- a/src/syntax/declarations/TypeDeclaration.ts +++ b/src/syntax/declarations/TypeDeclaration.ts @@ -1,76 +1,115 @@ -import { TypeNode, SyntaxType, NodeBase } from '~/syntax/environment'; +import { Type, SyntaxType, NodeBase } from '~/syntax/environment'; import { ParseFunc, seq, optional, select, tok, repeat } from '~/parser/parser'; import { TokenType, Token } from '~/parser/lexer'; +import { FileRange } from '~/core'; -export interface TypeParam extends NodeBase { - readonly name: Token; - readonly varianceOp: Optional; - readonly typeConstraint: Optional; +export class TypeParam extends NodeBase { + constructor( + location: FileRange, + readonly name: Token, + readonly varianceOp: Optional, + readonly typeConstraint: Optional + ) { super(location, SyntaxType.TypeParam) } + + accept(visitor: TypeParamVisitor, param: T): R { + return visitor.visitTypeParam(this, param); + } +} + +export interface TypeParamVisitor { + visitTypeParam(node: TypeParam, param: T): R; } export interface TypeParamList { readonly params: ReadonlyArray; } -export interface TypeDeclaration extends NodeBase { - readonly name: Optional; - readonly typeParams: ReadonlyArray; - readonly typeNode: TypeNode; +export class TypeDeclaration extends NodeBase { + constructor( + location: FileRange, + readonly name: Token, + readonly typeParams: ReadonlyArray, + readonly typeNode: Type + ) { super(location, SyntaxType.TypeDeclaration) } + + accept(visitor: TypeDeclarationVisitor, param: T): R { + return visitor.visitTypeDeclaration(this, param); + } +} + +export interface TypeDeclarationVisitor { + visitTypeDeclaration(node: TypeDeclaration, param: T): R; +} + +export class AnonymousTypeDeclaration extends NodeBase { + constructor( + location: FileRange, + readonly typeParams: ReadonlyArray, + readonly typeNode: Type + ) { super(location, SyntaxType.AnonymousTypeDeclaration) } + + accept(visitor: AnonymousTypeDeclarationVisitor, param: T): R { + return visitor.visitAnonymousTypeDeclaration(this, param); + } +} + +export interface AnonymousTypeDeclarationVisitor { + visitAnonymousTypeDeclaration(node: AnonymousTypeDeclaration, param: T): R; } /** * Registration function to handle circular dependency. */ -export function register(TypeNode: ParseFunc) { +export function register(parseType: ParseFunc) { /** * TypeParam = ('+' | '-')? IDENT (':' TypeNode)? */ - const TypeParam: ParseFunc = seq( + const parseTypeParam: ParseFunc = seq( optional(select(tok('+'), tok('-'))), tok(TokenType.IDENT), optional(seq( tok(':'), - TypeNode, + parseType, (([_, type]) => type) )), - ([varianceOp, name, typeConstraint], location) => ({ - syntaxType: SyntaxType.TypeParam as SyntaxType.TypeParam, - location, - name, - varianceOp, - typeConstraint - }) + ([varianceOp, name, typeConstraint], location) => new TypeParam(location, name, varianceOp, typeConstraint) ); - const TypeParamList: ParseFunc = seq( + const parseTypeParamList: ParseFunc = seq( tok('<'), - repeat(TypeParam, '+', tok(',')), + repeat(parseTypeParam, '+', tok(',')), tok('>'), ([_1, params, _2]) => ({ params }) ); /** - * TypeDeclaration ::= 'type' IDENT? TypeParamList? EQUALS Type + * TypeDeclaration ::= 'type' IDENT TypeParamList? EQUALS Type + */ + const parseTypeDeclaration: ParseFunc = seq( + tok('type'), + tok(TokenType.IDENT), + optional(parseTypeParamList), + tok('='), + parseType, + ([_1, name, params, _2, typeNode], location) => new TypeDeclaration(location, name, params ? params.params : [], typeNode) + ); + + /** + * AnonymousTypeDeclaration ::= 'type' TypeParamList? EQUALS Type */ - const TypeDeclaration: ParseFunc = seq( + const parseAnonymousTypeDeclaration: ParseFunc = seq( tok('type'), - optional(tok(TokenType.IDENT)), - optional(TypeParamList), + optional(parseTypeParamList), tok('='), - TypeNode, - ([_1, name, params, _2, typeNode], location) => ({ - syntaxType: SyntaxType.TypeDeclaration as SyntaxType.TypeDeclaration, - location, - name, - typeParams: params ? params.params : [], - typeNode - }) + parseType, + ([_1, params, _2, typeNode], location) => new AnonymousTypeDeclaration(location, params ? params.params : [], typeNode) ); return { - TypeParam, - TypeParamList, - TypeDeclaration + parseTypeParam, + parseTypeParamList, + parseTypeDeclaration, + parseAnonymousTypeDeclaration } } diff --git a/src/syntax/declarations/index.ts b/src/syntax/declarations/index.ts index 665c50c..33a1d9a 100644 --- a/src/syntax/declarations/index.ts +++ b/src/syntax/declarations/index.ts @@ -1,7 +1,7 @@ export { ImportDeclaration } from './ImportDeclaration'; -export { ConstantDeclaration } from './ConstantDeclaration'; +export { ConstantDeclaration, AnonymousConstantDeclaration } from './ConstantDeclaration'; export { ExportDeclaration } from './ExportDeclaration'; export { ExportForwardDeclaration } from './ExportForwardDeclaration'; -export { TypeDeclaration, TypeParam } from './TypeDeclaration'; -export { FunctionDeclaration, Param } from './FunctionDeclaration'; -export { NamespaceDeclaration } from './NamespaceDeclaration'; +export { TypeDeclaration, TypeParam, AnonymousTypeDeclaration } from './TypeDeclaration'; +export { FunctionDeclaration, Param, AnonymousFunctionDeclaration } from './FunctionDeclaration'; +export { NamespaceDeclaration, AnonymousNamespaceDeclaration } from './NamespaceDeclaration'; diff --git a/src/syntax/declarations/parsing.ts b/src/syntax/declarations/parsing.ts new file mode 100644 index 0000000..90b4285 --- /dev/null +++ b/src/syntax/declarations/parsing.ts @@ -0,0 +1,7 @@ +export { register as registerConstantDeclaration } from './ConstantDeclaration'; +export { register as registerExportDeclaration } from './ExportDeclaration'; +export { parseExportForwardDeclaration } from './ExportForwardDeclaration'; +export { register as registerFunctionDeclaration } from './FunctionDeclaration'; +export { parseImportDeclaration } from './ImportDeclaration'; +export { register as registerNamespaceDeclaration } from './NamespaceDeclaration'; +export { register as registerTypeDeclaration } from './TypeDeclaration'; \ No newline at end of file diff --git a/src/syntax/declarations/visitors.ts b/src/syntax/declarations/visitors.ts new file mode 100644 index 0000000..49880a8 --- /dev/null +++ b/src/syntax/declarations/visitors.ts @@ -0,0 +1,7 @@ +export { ConstantDeclarationVisitor, AnonymousConstantDeclarationVisitor } from './ConstantDeclaration'; +export { ExportDeclarationVisitor } from './ExportDeclaration'; +export { ExportForwardDeclarationVisitor } from './ExportForwardDeclaration'; +export { FunctionDeclarationVisitor, AnonymousFunctionDeclarationVisitor } from './FunctionDeclaration'; +export { ImportDeclarationVisitor } from './ImportDeclaration'; +export { NamespaceDeclarationVisitor, AnonymousNamespaceDeclarationVisitor } from './NamespaceDeclaration'; +export { TypeDeclarationVisitor, AnonymousTypeDeclarationVisitor } from './TypeDeclaration'; \ No newline at end of file diff --git a/src/syntax/environment.ts b/src/syntax/environment.ts index 29e620e..e4f981f 100644 --- a/src/syntax/environment.ts +++ b/src/syntax/environment.ts @@ -1,59 +1,7 @@ -import { FileRange } from '~/core'; +import { FileRange, CoreObject } from '~/core'; import { ParseFunc, Parser, ParseResult, select, seq, repeat } from '~/parser/parser'; -import { - // module - ModuleRoot, ImportDeclaration, ExportDeclaration, ExportForwardDeclaration, - // declaration - TypeDeclaration, FunctionDeclaration, ConstantDeclaration, NamespaceDeclaration, - // type - BuiltInType, StructType, TupleType, ArrayType, FunctionType, UnionType, IdentifierType, ParenthesizedType, - SpecificType, NamespaceAccessType, - // expression - IntegerLiteral, FloatLiteral, CharLiteral, BoolLiteral, StringLiteral, StructLiteral, - LambdaExpression, ParenthesizedExpression, TupleLiteral, IdentifierExpression, UnaryExpression, IfElseExpression, - FunctionApplication, BinaryExpression, ArrayAccess, FieldAccess, ArrayLiteral, VarDeclaration, - // statement - Block, ExpressionStatement, ForStatement, WhileStatement, DoWhileStatement, TryCatchStatement, ReturnStatement, - ThrowStatement, BreakStatement, ContinueStatement, - // other - Param, TypeParam -} from '.'; - -// all of the imports below are "internal" and required only for parsing -import { register as register_FunctionType } from './types/FunctionType'; -import { register as register_ParenthesizedType } from './types/ParenthesizedType'; -import { register as register_StructType } from './types/StructType'; -import { register as register_TupleType } from './types/TupleType'; -import { ArrayTypeSuffix } from './types/ArrayType'; -import { UnionTypeSuffix, register as register_UnionType } from './types/UnionType'; -import { NamespaceAccessTypeSuffix } from './types/NamespaceAccessType'; -import { SpecificTypeSuffix, register as register_SpecificTypeSuffix } from './types/SpecificType'; -import { register as register_StructLiteral } from './expressions/StructLiteral'; -import { register as register_LambdaExpression } from './expressions/LambdaExpression'; -import { register as register_ParenthesizedExpression } from './expressions/ParenthesizedExpression'; -import { register as register_TupleLiteral } from './expressions/TupleLiteral'; -import { register as register_ArrayLiteral } from './expressions/ArrayLiteral'; -import { register as register_VarDeclaration } from './expressions/VarDeclaration'; -import { PostfixExpressionSuffix, register as register_UnaryExpression } from './expressions/UnaryExpression'; -import { register as register_IfElseExpression } from './expressions/IfElseExpression'; -import { FunctionApplicationSuffix, register as register_FunctionApplication } from './expressions/FunctionApplication'; -import { BinaryExpressionSuffix, register as register_BinaryExpression } from './expressions/BinaryExpression'; -import { ArrayAccessSuffix, register as register_ArrayAccess } from './expressions/ArrayAccess'; -import { FieldAccessSuffix } from './expressions/FieldAccess'; -import { register as register_Block } from './statements/Block'; -import { register as register_ExpressionStatement } from './statements/ExpressionStatement'; -import { register as register_ForStatement } from './statements/ForStatement'; -import { register as register_WhileStatement } from './statements/WhileStatement'; -import { register as register_DoWhileStatement } from './statements/DoWhileStatement'; -import { register as register_TryCatchStatement } from './statements/TryCatchStatement'; -import { register as register_ReturnStatement } from './statements/ReturnStatement'; -import { register as register_ThrowStatement } from './statements/ThrowStatement'; -import { register as register_ExportDeclaration } from './declarations/ExportDeclaration'; -import { register as register_TypeDeclaration } from './declarations/TypeDeclaration'; -import { register as register_FunctionDeclaration } from './declarations/FunctionDeclaration'; -import { register as register_ConstantDeclaration } from './declarations/ConstantDeclaration'; -import { register as register_NamespaceDeclaration } from './declarations/NamespaceDeclaration'; -import { register as register_ModuleRoot } from './ModuleRoot'; +import * as syntax from '.'; +import * as parsing from './parsing'; /** * The full enumeration of types of syntax nodes in the language. @@ -67,9 +15,13 @@ export enum SyntaxType { // #endregion // #region declarations TypeDeclaration = 'TypeDeclaration', + AnonymousTypeDeclaration = 'AnonymousTypeDeclaration', FunctionDeclaration = 'FunctionDeclaration', + AnonymousFunctionDeclaration = 'AnonymousFunctionDeclaration', ConstantDeclaration = 'ConstantDeclaration', + AnonymousConstantDeclaration = 'AnonymousConstantDeclaration', NamespaceDeclaration = 'NamespaceDeclaration', + AnonymousNamespaceDeclaration = 'AnonymousNamespaceDeclaration', // #endregion // #region types BuiltInType = 'BuiltInType', @@ -152,128 +104,138 @@ export enum SyntaxType { */ export function SyntaxEnvironment() { // types - const { FunctionType } = register_FunctionType(TypeNode); - const { ParenthesizedType } = register_ParenthesizedType(TypeNode); - const { StructType } = register_StructType(TypeNode); - const { TupleType } = register_TupleType(TypeNode); - const { UnionTypeSuffix } = register_UnionType(TypeNode); - const { SpecificTypeSuffix, TypeArgList } = register_SpecificTypeSuffix(TypeNode); + const { parseFunctionType } = parsing.registerFunctionType(parseType); + const { parseParenthesizedType } = parsing.registerParenthesizedType(parseType); + const { parseStructType } = parsing.registerStructType(parseType); + const { parseTupleType } = parsing.registerTupleType(parseType); + const { parseUnionTypeSuffix } = parsing.registerUnionType(parseType); + const { parseSpecificTypeSuffix, parseTypeArgList } = parsing.registerSpecificType(parseType); // expressions - const { StructLiteral } = register_StructLiteral(Expression); - const { ParenthesizedExpression } = register_ParenthesizedExpression(Expression); - const { TupleLiteral } = register_TupleLiteral(Expression); - const { ArrayLiteral } = register_ArrayLiteral(Expression); - const { VarDeclaration } = register_VarDeclaration(Expression); - const { PrefixExpression, PostfixExpressionSuffix } = register_UnaryExpression(Expression); - const { IfElseExpression } = register_IfElseExpression(Expression); - const { FunctionApplicationSuffix } = register_FunctionApplication(Expression, TypeArgList); - const { BinaryExpressionSuffix } = register_BinaryExpression(Expression); - const { ArrayAccessSuffix } = register_ArrayAccess(Expression); + const { parseStructLiteral } = parsing.registerStructLiteral(parseExpression); + const { parseParenthesizedExpression } = parsing.registerParenthesizedExpression(parseExpression); + const { parseTupleLiteral } = parsing.registerTupleLiteral(parseExpression); + const { parseArrayLiteral } = parsing.registerArrayLiteral(parseExpression); + const { parseVarDeclaration } = parsing.registerVarDeclaration(parseExpression); + const { parsePrefixExpression, parsePostfixExpressionSuffix } = parsing.registerUnaryExpression(parseExpression); + const { parseIfElseExpression } = parsing.registerIfElseExpression(parseExpression); + const { parseFunctionApplicationSuffix } = parsing.registerFunctionApplication(parseExpression, parseTypeArgList); + const { parseBinaryExpressionSuffix } = parsing.registerBinaryExpression(parseExpression); + const { parseArrayAccessSuffix } = parsing.registerArrayAccess(parseExpression); // statements - const { Block } = register_Block(Statement); - const { ExpressionStatement } = register_ExpressionStatement(Expression); - const { ForStatement } = register_ForStatement(Expression, Statement); - const { WhileStatement } = register_WhileStatement(Expression, Statement); - const { DoWhileStatement } = register_DoWhileStatement(Expression, Statement); - const { ReturnStatement } = register_ReturnStatement(Expression); - const { ThrowStatement } = register_ThrowStatement(Expression); + const { parseBlock } = parsing.registerBlock(parseStatement); + const { parseExpressionStatement } = parsing.registerExpressionStatement(parseExpression); + const { parseForStatement } = parsing.registerForStatement(parseExpression, parseStatement); + const { parseWhileStatement } = parsing.registerWhileStatement(parseExpression, parseStatement); + const { parseDoWhileStatement } = parsing.registerDoWhileStatement(parseExpression, parseStatement); + const { parseReturnStatement } = parsing.registerReturnStatement(parseExpression); + const { parseThrowStatement } = parsing.registerThrowStatement(parseExpression); // declarations - const { TypeDeclaration, TypeParamList } = register_TypeDeclaration(TypeNode); - const { FunctionDeclaration, Param, FunctionBody } = register_FunctionDeclaration(TypeNode, Expression, Statement, Block, TypeParamList); - const { ConstantDeclaration } = register_ConstantDeclaration(Expression); + const { parseTypeDeclaration, parseAnonymousTypeDeclaration, parseTypeParamList } = parsing.registerTypeDeclaration(parseType); + const { parseFunctionDeclaration, parseAnonymousFunctionDeclaration, parseParam, parseFunctionBody } = parsing.registerFunctionDeclaration(parseType, parseExpression, parseStatement, parseBlock, parseTypeParamList); + const { parseConstantDeclaration, parseAnonymousConstantDeclaration } = parsing.registerConstantDeclaration(parseExpression); // requires Param/FunctionBody from FunctionDeclaration - const { LambdaExpression, ShorthandLambdaExpression } = register_LambdaExpression(Param, FunctionBody); - const { TryCatchStatement } = register_TryCatchStatement(Statement, Param); + const { parseLambdaExpression, parseShorthandLambdaExpression } = parsing.registerLambdaExpression(parseParam, parseFunctionBody); + const { parseTryCatchStatement } = parsing.registerTryCatchStatement(parseStatement, parseParam); // module - const { ExportDeclaration } = register_ExportDeclaration(Declaration); - const { NamespaceDeclaration } = register_NamespaceDeclaration(Declaration, ExportDeclaration); - const { ModuleRoot } = register_ModuleRoot(Declaration, ExportDeclaration); + const { parseExportDeclaration } = parsing.registerExportDeclaration(parseDeclaration, parseAnonymousDeclaration); + const { parseNamespaceDeclaration, parseAnonymousNamespaceDeclaration } = parsing.registerNamespaceDeclaration(parseDeclaration, parseExportDeclaration); + const { parseModuleRoot } = parsing.registerModuleRoot(parseDeclaration, parseExportDeclaration); - function Declaration(parser: Parser): ParseResult { + function parseDeclaration(parser: Parser): ParseResult { const fn: ParseFunc = select( - TypeDeclaration, - FunctionDeclaration, - ConstantDeclaration, - NamespaceDeclaration + parseTypeDeclaration, + parseFunctionDeclaration, + parseConstantDeclaration, + parseNamespaceDeclaration + ); + return fn(parser); + } + + function parseAnonymousDeclaration(parser: Parser): ParseResult { + const fn: ParseFunc = select( + parseAnonymousTypeDeclaration, + parseAnonymousFunctionDeclaration, + parseAnonymousConstantDeclaration, + parseAnonymousNamespaceDeclaration ); return fn(parser); } - function TypeNode(parser: Parser): ParseResult { - const fn: ParseFunc = seq( - select( - BuiltInType, // must be before IdentifierType - FunctionType, // must be before IdentifierType, ParenthesizedType, TupleType - ParenthesizedType, // must be before TupleType - StructType, - TupleType, - IdentifierType + function parseType(parser: Parser): ParseResult { + const fn: ParseFunc = seq( + select( + parsing.parseBuiltInType, // must be before IdentifierType + parseFunctionType, // must be before IdentifierType, ParenthesizedType, TupleType + parseParenthesizedType, // must be before TupleType + parseStructType, + parseTupleType, + parsing.parseIdentifierType ), - repeat(select( - ArrayTypeSuffix, - UnionTypeSuffix, - SpecificTypeSuffix, - NamespaceAccessTypeSuffix + repeat(select( + parsing.parseArrayTypeSuffix, + parseUnionTypeSuffix, + parseSpecificTypeSuffix, + parsing.parseNamespaceAccessTypeSuffix ), '*'), - ([base, suffixes]) => suffixes.reduce((base, suffix) => suffix.setBase(base), base) + ([base, suffixes]) => suffixes.reduce((base, suffix) => suffix.setBase(base), base) ); return fn(parser); } - function Expression(parser: Parser): ParseResult { + function parseExpression(parser: Parser): ParseResult { const fn: ParseFunc = seq( select( - IntegerLiteral, - FloatLiteral, - CharLiteral, - BoolLiteral, // must be before IdentifierExpression - StringLiteral, - StructLiteral, - LambdaExpression, // must be before TupleLiteral, ParenthesizedExpression - ParenthesizedExpression, // must be before TupleLiteral - TupleLiteral, - ArrayLiteral, - VarDeclaration, // must be before ShorthandLambdaExpression, IdentifierExpression - ShorthandLambdaExpression, // must be before IdentifierExpression - IdentifierExpression, - PrefixExpression, - IfElseExpression + parsing.parseIntegerLiteral, + parsing.parseFloatLiteral, + parsing.parseCharLiteral, + parsing.parseBoolLiteral, // must be before IdentifierExpression + parsing.parseStringLiteral, + parseStructLiteral, + parseLambdaExpression, // must be before TupleLiteral, ParenthesizedExpression + parseParenthesizedExpression, // must be before TupleLiteral + parseTupleLiteral, + parseArrayLiteral, + parseVarDeclaration, // must be before ShorthandLambdaExpression, IdentifierExpression + parseShorthandLambdaExpression, // must be before IdentifierExpression + parsing.parseIdentifierExpression, + parsePrefixExpression, + parseIfElseExpression ), repeat(select( - FunctionApplicationSuffix, // must be before BinaryExpression, PostfixExpression - BinaryExpressionSuffix, // must be before PostfixExpression - PostfixExpressionSuffix, - ArrayAccessSuffix, - FieldAccessSuffix + parseFunctionApplicationSuffix, // must be before BinaryExpression, PostfixExpression + parseBinaryExpressionSuffix, // must be before PostfixExpression + parsePostfixExpressionSuffix, + parseArrayAccessSuffix, + parsing.parseFieldAccessSuffix ), '*'), ([base, suffixes]) => suffixes.reduce((base, suffix) => suffix.setBase(base), base) ); return fn(parser); } - function Statement(parser: Parser): ParseResult { + function parseStatement(parser: Parser): ParseResult { const fn: ParseFunc = select( - Block, // must be before ExpressionStatement - ExpressionStatement, - ForStatement, - WhileStatement, - DoWhileStatement, - TryCatchStatement, - ReturnStatement, - ThrowStatement, - BreakStatement, - ContinueStatement + parseBlock, // must be before ExpressionStatement + parseExpressionStatement, + parseForStatement, + parseWhileStatement, + parseDoWhileStatement, + parseTryCatchStatement, + parseReturnStatement, + parseThrowStatement, + parsing.parseBreakStatement, + parsing.parseContinueStatement ); return fn(parser); } return { - ModuleRoot + parseModuleRoot }; } @@ -281,103 +243,123 @@ export function SyntaxEnvironment() { * The base type of all syntax nodes. * All nodes have: * - a location (range of text in a file) - * - a syntax type (the discriminant for the various node union types) */ -export interface NodeBase { - readonly location: FileRange; - readonly syntaxType: K; +export abstract class NodeBase extends CoreObject { + constructor( + readonly location: FileRange, + readonly syntaxType: K + ) { super() } } /** * The discriminated union of all declaration nodes */ export type Declaration = - | TypeDeclaration - | FunctionDeclaration - | ConstantDeclaration - | NamespaceDeclaration; + | syntax.TypeDeclaration + | syntax.FunctionDeclaration + | syntax.ConstantDeclaration + | syntax.NamespaceDeclaration; + +export function isDeclaration(node: NodeBase): node is Declaration { + return [ + SyntaxType.TypeDeclaration, + SyntaxType.ConstantDeclaration, + SyntaxType.FunctionDeclaration, + SyntaxType.NamespaceDeclaration + ].includes(node.syntaxType); +} + +/** + * The discriminated union of all anonymous declaration nodes + */ +export type AnonymousDeclaration = + | syntax.AnonymousTypeDeclaration + | syntax.AnonymousFunctionDeclaration + | syntax.AnonymousConstantDeclaration + | syntax.AnonymousNamespaceDeclaration; /** * The discriminated union of all type nodes */ -export type TypeNode = - | BuiltInType - | StructType - | TupleType - | ArrayType - | FunctionType - | UnionType - | IdentifierType - | ParenthesizedType - | SpecificType - | NamespaceAccessType; +export type Type = + | syntax.BuiltInType + | syntax.StructType + | syntax.TupleType + | syntax.ArrayType + | syntax.FunctionType + | syntax.UnionType + | syntax.IdentifierType + | syntax.ParenthesizedType + | syntax.SpecificType + | syntax.NamespaceAccessType; -type TypeNode_LeftRecursive = - | ArrayTypeSuffix - | UnionTypeSuffix - | NamespaceAccessTypeSuffix - | SpecificTypeSuffix; +type Type_LeftRecursive = + | parsing.ArrayTypeSuffix + | parsing.UnionTypeSuffix + | parsing.NamespaceAccessTypeSuffix + | parsing.SpecificTypeSuffix; /** * The discriminated union of all expression nodes */ export type Expression = - | IntegerLiteral - | FloatLiteral - | CharLiteral - | BoolLiteral - | StringLiteral - | StructLiteral - | TupleLiteral - | ArrayLiteral - | IdentifierExpression - | ParenthesizedExpression - | VarDeclaration - | UnaryExpression - | BinaryExpression - | FunctionApplication - | ArrayAccess - | FieldAccess - | IfElseExpression - | LambdaExpression; + | syntax.IntegerLiteral + | syntax.FloatLiteral + | syntax.CharLiteral + | syntax.BoolLiteral + | syntax.StringLiteral + | syntax.StructLiteral + | syntax.TupleLiteral + | syntax.ArrayLiteral + | syntax.IdentifierExpression + | syntax.ParenthesizedExpression + | syntax.VarDeclaration + | syntax.UnaryExpression + | syntax.BinaryExpression + | syntax.FunctionApplication + | syntax.ArrayAccess + | syntax.FieldAccess + | syntax.IfElseExpression + | syntax.LambdaExpression; type Expression_LeftRecursive = - | FunctionApplicationSuffix - | BinaryExpressionSuffix - | PostfixExpressionSuffix - | ArrayAccessSuffix - | FieldAccessSuffix; + | parsing.FunctionApplicationSuffix + | parsing.BinaryExpressionSuffix + | parsing.PostfixExpressionSuffix + | parsing.ArrayAccessSuffix + | parsing.FieldAccessSuffix; /** * The discriminated union of all statement nodes */ export type Statement = - | Block - | ExpressionStatement - | ForStatement - | WhileStatement - | DoWhileStatement - | TryCatchStatement - | ReturnStatement - | ThrowStatement - | BreakStatement - | ContinueStatement; + | syntax.Block + | syntax.ExpressionStatement + | syntax.ForStatement + | syntax.WhileStatement + | syntax.DoWhileStatement + | syntax.TryCatchStatement + | syntax.ReturnStatement + | syntax.ThrowStatement + | syntax.BreakStatement + | syntax.ContinueStatement; /** * The discriminated union of all syntax nodes */ export type Node = // module root is a special node type - | ModuleRoot + | syntax.ModuleRoot // types related to the module system - | ImportDeclaration - | ExportDeclaration - | ExportForwardDeclaration + | syntax.ImportDeclaration + | syntax.ExportDeclaration + | syntax.ExportForwardDeclaration + | AnonymousDeclaration // types that do not fit into any of the general categories - | TypeParam - | Param + | syntax.TypeParam + | syntax.Param // the general categories | Declaration - | TypeNode + | Type | Expression | Statement; diff --git a/src/syntax/expressions/ArrayAccess.ts b/src/syntax/expressions/ArrayAccess.ts index 730fec5..07c3ee4 100644 --- a/src/syntax/expressions/ArrayAccess.ts +++ b/src/syntax/expressions/ArrayAccess.ts @@ -1,35 +1,40 @@ import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; import { ParseFunc, seq, tok } from '~/parser/parser'; +import { FileRange } from '~/core'; -export interface ArrayAccess extends NodeBase { - target: Expression; - index: Expression; +export class ArrayAccess extends NodeBase { + constructor( + location: FileRange, + readonly target: Expression, + readonly index: Expression + ) { super(location, SyntaxType.ArrayAccess) } + + accept(visitor: ArrayAccessVisitor, param: P) { + return visitor.visitArrayAccess(this, param); + } +} + +export interface ArrayAccessVisitor { + visitArrayAccess(node: ArrayAccess, param: P): R; } -export interface ArrayAccessSuffix extends NodeBase { - index: Expression; - setBase(target: Expression): ArrayAccess; +export class ArrayAccessSuffix extends NodeBase { + constructor( + location: FileRange, + readonly index: Expression + ) { super(location, SyntaxType.ArrayAccess) } + + setBase = (target: Expression) => new ArrayAccess(this.location, target, this.index); } -export function register(Expression: ParseFunc) { - const ArrayAccessSuffix: ParseFunc = seq( +export function register(parseExpression: ParseFunc) { + const parseArrayAccessSuffix: ParseFunc = seq( tok('['), - Expression, + parseExpression, tok(']'), - ([_1, index, _2], location) => ({ - syntaxType: SyntaxType.ArrayAccess as SyntaxType.ArrayAccess, - location, - index, - setBase(target: Expression) { - return { - ...this, - target, - location: this.location.merge(target.location) - } - } - }) + ([_1, index, _2], location) => new ArrayAccessSuffix(location, index) ); - return { ArrayAccessSuffix }; + return { parseArrayAccessSuffix }; } diff --git a/src/syntax/expressions/ArrayLiteral.ts b/src/syntax/expressions/ArrayLiteral.ts index 98a5fcd..6f5bef7 100644 --- a/src/syntax/expressions/ArrayLiteral.ts +++ b/src/syntax/expressions/ArrayLiteral.ts @@ -1,22 +1,30 @@ import { SyntaxType, Expression, NodeBase } from '~/syntax/environment'; import { ParseFunc, seq, tok, repeat } from '~/parser/parser'; +import { FileRange } from '~/core'; -export interface ArrayLiteral extends NodeBase { - items: ReadonlyArray; +export class ArrayLiteral extends NodeBase { + constructor( + location: FileRange, + readonly items: ReadonlyArray + ) { super(location, SyntaxType.ArrayLiteral) } + + accept(visitor: ArrayLiteralVisitor, param: P) { + return visitor.visitArrayLiteral(this, param); + } +} + +export interface ArrayLiteralVisitor { + visitArrayLiteral(node: ArrayLiteral, param: P): R; } -export function register(Expression: ParseFunc) { - const ArrayLiteral: ParseFunc = seq( +export function register(parseExpression: ParseFunc) { + const parseArrayLiteral: ParseFunc = seq( tok('['), - repeat(Expression, '*', tok(',')), + repeat(parseExpression, '*', tok(',')), tok(']'), - ([_1, items, _2], location) => ({ - syntaxType: SyntaxType.ArrayLiteral as SyntaxType.ArrayLiteral, - location, - items - }) + ([_1, items, _2], location) => new ArrayLiteral(location, items) ); - return { ArrayLiteral }; + return { parseArrayLiteral }; } diff --git a/src/syntax/expressions/BinaryExpression.ts b/src/syntax/expressions/BinaryExpression.ts index ca97606..064d9ca 100644 --- a/src/syntax/expressions/BinaryExpression.ts +++ b/src/syntax/expressions/BinaryExpression.ts @@ -2,42 +2,46 @@ import { verifyMultiOperator, getOperatorMetadata } from '~/runtime/operators'; import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; import { Token, TokenType } from '~/parser/lexer'; import { ParseFunc, seq, repeat, tok } from '~/parser/parser'; +import { FileRange } from '~/core'; -export interface BinaryExpression extends NodeBase { - left: Expression; - symbol: Token; - right: Expression; +export class BinaryExpression extends NodeBase { + constructor( + location: FileRange, + readonly left: Expression, + readonly symbol: Token, + readonly right: Expression + ) { super(location, SyntaxType.BinaryExpression) } + + accept(visitor: BinaryExpressionVisitor, param: P) { + return visitor.visitBinaryExpression(this, param); + } +} + +export interface BinaryExpressionVisitor { + visitBinaryExpression(node: BinaryExpression, param: P): R; } -export interface BinaryExpressionSuffix extends NodeBase { - symbol: Token; - right: Expression; - setBase(left: Expression): BinaryExpression; +export class BinaryExpressionSuffix extends NodeBase { + constructor( + location: FileRange, + readonly symbol: Token, + readonly right: Expression + ) { super(location, SyntaxType.BinaryExpression) } + + // TODO: this will get run more than necessary + setBase = (left: Expression) => resolvePrecedence(new BinaryExpression(this.location, left, this.symbol, this.right)); } -export function register(Expression: ParseFunc) { - const BinaryExpressionSuffix: ParseFunc = seq( +export function register(parseExpression: ParseFunc) { + const parseBinaryExpressionSuffix: ParseFunc = seq( repeat(tok(TokenType.OPER), '+'), - Expression, - ([symbol, right], location) => ({ - syntaxType: SyntaxType.BinaryExpression as SyntaxType.BinaryExpression, - location, - symbol: verifyMultiOperator(symbol), // TODO: make sure this works - right, - setBase(left: Expression) { - return resolvePrecedence({ // TODO: this will get run more than necessary - syntaxType: this.syntaxType, - symbol: this.symbol, - right: this.right, - left, - location: this.location.merge(left.location) - }) - } - }) + parseExpression, + // TODO: make sure this works + ([symbol, right], location) => new BinaryExpressionSuffix(location, verifyMultiOperator(symbol), right) ); - return { BinaryExpressionSuffix }; + return { parseBinaryExpressionSuffix }; } /** @@ -80,7 +84,7 @@ function shouldPopOperator(nextToken: Token, stackToken: Token) { function binaryExpressionToList(exp: BinaryExpression) { const items: (Token | Expression)[] = []; // the tree is left-associative, so we assemble the list from right to left - let operToken = exp.symbol.clone(); + let operToken: Token = exp.symbol.clone(); let left = exp.left, right = exp.right; while (true) { items.unshift(right); @@ -98,11 +102,5 @@ function binaryExpressionToList(exp: BinaryExpression) { } function createNewBinExpression(right: Expression, left: Expression, oper: Token) { - return { - syntaxType: SyntaxType.BinaryExpression as SyntaxType.BinaryExpression, - location: left.location.merge(right.location), - left, - symbol: oper, - right - }; + return new BinaryExpression(left.location.merge(right.location), left, oper, right); } diff --git a/src/syntax/expressions/BoolLiteral.ts b/src/syntax/expressions/BoolLiteral.ts index e5e0550..fe94de6 100644 --- a/src/syntax/expressions/BoolLiteral.ts +++ b/src/syntax/expressions/BoolLiteral.ts @@ -1,17 +1,25 @@ import { NodeBase, SyntaxType } from '~/syntax/environment'; import { Token } from '~/parser/lexer'; import { ParseFunc, seq, select, tok } from '~/parser/parser'; +import { FileRange } from '~/core'; -export interface BoolLiteral extends NodeBase { - value: Token; +export class BoolLiteral extends NodeBase { + constructor( + location: FileRange, + readonly value: Token + ) { super(location, SyntaxType.BoolLiteral) } + + accept(visitor: BoolLiteralVisitor, param: P) { + return visitor.visitBoolLiteral(this, param); + } +} + +export interface BoolLiteralVisitor { + visitBoolLiteral(node: BoolLiteral, param: P): R; } -export const BoolLiteral: ParseFunc = seq( +export const parseBoolLiteral: ParseFunc = seq( select(tok('true'), tok('false')), - (value, location) => ({ - syntaxType: SyntaxType.BoolLiteral as SyntaxType.BoolLiteral, - location, - value - }) + (value, location) => new BoolLiteral(location, value) ); diff --git a/src/syntax/expressions/CharLiteral.ts b/src/syntax/expressions/CharLiteral.ts index c8d6c73..0fc80ff 100644 --- a/src/syntax/expressions/CharLiteral.ts +++ b/src/syntax/expressions/CharLiteral.ts @@ -1,17 +1,25 @@ import { NodeBase, SyntaxType } from '~/syntax/environment'; import { Token, TokenType } from '~/parser/lexer'; import { ParseFunc, seq, tok } from '~/parser/parser'; +import { FileRange } from '~/core'; -export interface CharLiteral extends NodeBase { - value: Token; +export class CharLiteral extends NodeBase { + constructor( + location: FileRange, + readonly value: Token + ) { super(location, SyntaxType.CharLiteral) } + + accept(visitor: CharLiteralVisitor, param: P) { + return visitor.visitCharLiteral(this, param); + } +} + +export interface CharLiteralVisitor { + visitCharLiteral(node: CharLiteral, param: P): R; } -export const CharLiteral: ParseFunc = seq( +export const parseCharLiteral: ParseFunc = seq( tok(TokenType.CHARACTER_LITERAL), - (value, location) => ({ - syntaxType: SyntaxType.CharLiteral as SyntaxType.CharLiteral, - location, - value - }) + (value, location) => new CharLiteral(location, value) ); diff --git a/src/syntax/expressions/FieldAccess.ts b/src/syntax/expressions/FieldAccess.ts index 92a95f4..c0592e6 100644 --- a/src/syntax/expressions/FieldAccess.ts +++ b/src/syntax/expressions/FieldAccess.ts @@ -1,31 +1,36 @@ import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; import { Token, TokenType } from '~/parser/lexer'; import { ParseFunc, seq, tok } from '~/parser/parser'; +import { FileRange } from '~/core'; -export interface FieldAccess extends NodeBase { - target: Expression; - field: Token; +export class FieldAccess extends NodeBase { + constructor( + location: FileRange, + readonly target: Expression, + readonly field: Token + ) { super(location, SyntaxType.FieldAccess) } + + accept(visitor: FieldAccessVisitor, param: P) { + return visitor.visitFieldAccess(this, param); + } +} + +export interface FieldAccessVisitor { + visitFieldAccess(node: FieldAccess, param: P): R; } -export interface FieldAccessSuffix extends NodeBase { - field: Token; - setBase(target: Expression): FieldAccess; +export class FieldAccessSuffix extends NodeBase { + constructor( + location: FileRange, + readonly field: Token + ) { super(location, SyntaxType.FieldAccess) } + + setBase = (target: Expression) => new FieldAccess(this.location.merge(target.location), target, this.field); } -export const FieldAccessSuffix: ParseFunc = seq( +export const parseFieldAccessSuffix: ParseFunc = seq( tok('.'), tok(TokenType.IDENT), - ([_, field], location) => ({ - syntaxType: SyntaxType.FieldAccess as SyntaxType.FieldAccess, - location, - field, - setBase(target: Expression) { - return { - ...this, - target, - location: this.location.merge(target.location) - } - } - }) + ([_, field], location) => new FieldAccessSuffix(location, field) ); diff --git a/src/syntax/expressions/FloatLiteral.ts b/src/syntax/expressions/FloatLiteral.ts index 4285d46..fcda0b9 100644 --- a/src/syntax/expressions/FloatLiteral.ts +++ b/src/syntax/expressions/FloatLiteral.ts @@ -1,17 +1,25 @@ import { NodeBase, SyntaxType } from '~/syntax/environment'; import { Token, TokenType } from '~/parser/lexer'; import { ParseFunc, seq, tok } from '~/parser/parser'; +import { FileRange } from '~/core'; -export interface FloatLiteral extends NodeBase { - value: Token; +export class FloatLiteral extends NodeBase { + constructor( + location: FileRange, + readonly value: Token + ) { super(location, SyntaxType.FloatLiteral) } + + accept(visitor: FloatLiteralVisitor, param: P) { + return visitor.visitFloatLiteral(this, param); + } +} + +export interface FloatLiteralVisitor { + visitFloatLiteral(node: FloatLiteral, param: P): R; } -export const FloatLiteral: ParseFunc = seq( +export const parseFloatLiteral: ParseFunc = seq( tok(TokenType.FLOAT_LITERAL), - (value, location) => ({ - syntaxType: SyntaxType.FloatLiteral as SyntaxType.FloatLiteral, - location, - value - }) + (value, location) => new FloatLiteral(location, value) ); diff --git a/src/syntax/expressions/FunctionApplication.ts b/src/syntax/expressions/FunctionApplication.ts index 7419d2c..25fda5b 100644 --- a/src/syntax/expressions/FunctionApplication.ts +++ b/src/syntax/expressions/FunctionApplication.ts @@ -1,39 +1,43 @@ -import { NodeBase, SyntaxType, Expression, TypeNode } from '~/syntax/environment'; +import { NodeBase, SyntaxType, Expression, Type } from '~/syntax/environment'; import { ParseFunc, seq, tok, optional, repeat } from '~/parser/parser'; +import { FileRange } from '~/core'; -export interface FunctionApplication extends NodeBase { - target: Expression; - typeArgs: TypeNode[]; - args: Expression[]; +export class FunctionApplication extends NodeBase { + constructor( + location: FileRange, + readonly target: Expression, + readonly typeArgs: Optional, + readonly args: Expression[] + ) { super(location, SyntaxType.FunctionApplication) } + + accept(visitor: FunctionApplicationVisitor, param: P) { + return visitor.visitFunctionApplication(this, param); + } +} + +export interface FunctionApplicationVisitor { + visitFunctionApplication(node: FunctionApplication, param: P): R; } -export interface FunctionApplicationSuffix extends NodeBase { - typeArgs: TypeNode[]; - args: Expression[]; - setBase(target: Expression): FunctionApplication; +export class FunctionApplicationSuffix extends NodeBase { + constructor( + location: FileRange, + readonly typeArgs: Optional, + readonly args: Expression[] + ) { super(location, SyntaxType.FunctionApplication) } + + setBase = (target: Expression) => new FunctionApplication(this.location.merge(target.location), target, this.typeArgs, this.args); } -export function register(Expression: ParseFunc, TypeArgList: ParseFunc) { - const FunctionApplicationSuffix: ParseFunc = seq( - optional(TypeArgList), +export function register(parseExpression: ParseFunc, parseTypeArgList: ParseFunc) { + const parseFunctionApplicationSuffix: ParseFunc = seq( + optional(parseTypeArgList), tok('('), - repeat(Expression, '*', tok(',')), + repeat(parseExpression, '*', tok(',')), tok(')'), - ([typeArgs, _1, args, _2], location) => ({ - syntaxType: SyntaxType.FunctionApplication as SyntaxType.FunctionApplication, - location, - typeArgs: typeArgs || [], - args, - setBase(target: Expression) { - return { - ...this, - target, - location: this.location.merge(target.location) - } - } - }) + ([typeArgs, _1, args, _2], location) => new FunctionApplicationSuffix(location, typeArgs, args) ); - return { FunctionApplicationSuffix }; + return { parseFunctionApplicationSuffix }; } diff --git a/src/syntax/expressions/IdentifierExpression.ts b/src/syntax/expressions/IdentifierExpression.ts index 8ae7d3a..98382cf 100644 --- a/src/syntax/expressions/IdentifierExpression.ts +++ b/src/syntax/expressions/IdentifierExpression.ts @@ -1,17 +1,25 @@ import { NodeBase, SyntaxType } from '~/syntax/environment'; import { Token, TokenType } from '~/parser/lexer'; import { ParseFunc, seq, tok } from '~/parser/parser'; +import { FileRange } from '~/core'; -export interface IdentifierExpression extends NodeBase { - name: Token; +export class IdentifierExpression extends NodeBase { + constructor( + location: FileRange, + readonly name: Token + ) { super(location, SyntaxType.IdentifierExpression) } + + accept(visitor: IdentifierExpressionVisitor, param: P) { + return visitor.visitIdentifierExpression(this, param); + } +} + +export interface IdentifierExpressionVisitor { + visitIdentifierExpression(node: IdentifierExpression, param: P): R; } -export const IdentifierExpression: ParseFunc = seq( +export const parseIdentifierExpression: ParseFunc = seq( tok(TokenType.IDENT), - (name, location) => ({ - syntaxType: SyntaxType.IdentifierExpression as SyntaxType.IdentifierExpression, - location, - name - }) + (name, location) => new IdentifierExpression(location, name) ); diff --git a/src/syntax/expressions/IfElseExpression.ts b/src/syntax/expressions/IfElseExpression.ts index 80bdf6c..22c9448 100644 --- a/src/syntax/expressions/IfElseExpression.ts +++ b/src/syntax/expressions/IfElseExpression.ts @@ -1,30 +1,36 @@ import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; import { ParseFunc, seq, tok } from '~/parser/parser'; +import { FileRange } from '~/core'; -export interface IfElseExpression extends NodeBase { - condition: Expression; - consequent: Expression; - alternate: Expression; +export class IfElseExpression extends NodeBase { + constructor( + location: FileRange, + readonly condition: Expression, + readonly consequent: Expression, + readonly alternate: Expression + ) { super(location, SyntaxType.IfElseExpression) } + + accept(visitor: IfElseExpressionVisitor, param: P) { + return visitor.visitIfElseExpression(this, param); + } +} + +export interface IfElseExpressionVisitor { + visitIfElseExpression(node: IfElseExpression, param: P): R; } -export function register(Expression: ParseFunc) { - const IfElseExpression: ParseFunc = seq( +export function register(parseExpression: ParseFunc) { + const parseIfElseExpression: ParseFunc = seq( tok('if'), tok('('), - Expression, + parseExpression, tok(')'), - Expression, + parseExpression, tok('else'), - Expression, - ([_1, _2, condition, _3, consequent, _4, alternate], location) => ({ - syntaxType: SyntaxType.IfElseExpression as SyntaxType.IfElseExpression, - location, - condition, - consequent, - alternate - }) + parseExpression, + ([_1, _2, condition, _3, consequent, _4, alternate], location) => new IfElseExpression(location, condition, consequent, alternate) ); - return { IfElseExpression }; + return { parseIfElseExpression }; } diff --git a/src/syntax/expressions/IntegerLiteral.ts b/src/syntax/expressions/IntegerLiteral.ts index 8c1e33c..8f56e36 100644 --- a/src/syntax/expressions/IntegerLiteral.ts +++ b/src/syntax/expressions/IntegerLiteral.ts @@ -1,17 +1,25 @@ import { NodeBase, SyntaxType } from '~/syntax/environment'; import { Token, TokenType } from '~/parser/lexer'; import { ParseFunc, seq, tok } from '~/parser/parser'; +import { FileRange } from '~/core'; -export interface IntegerLiteral extends NodeBase { - value: Token; +export class IntegerLiteral extends NodeBase { + constructor( + location: FileRange, + readonly value: Token + ) { super(location, SyntaxType.IntegerLiteral) } + + accept(visitor: IntegerLiteralVisitor, param: P) { + return visitor.visitIntegerLiteral(this, param); + } +} + +export interface IntegerLiteralVisitor { + visitIntegerLiteral(node: IntegerLiteral, param: P): R; } -export const IntegerLiteral: ParseFunc = seq( +export const parseIntegerLiteral: ParseFunc = seq( tok(TokenType.INTEGER_LITERAL), - (value, location) => ({ - syntaxType: SyntaxType.IntegerLiteral as SyntaxType.IntegerLiteral, - location, - value - }) + (value, location) => new IntegerLiteral(location, value) ); diff --git a/src/syntax/expressions/LambdaExpression.ts b/src/syntax/expressions/LambdaExpression.ts index d1861d1..9878af0 100644 --- a/src/syntax/expressions/LambdaExpression.ts +++ b/src/syntax/expressions/LambdaExpression.ts @@ -2,55 +2,52 @@ import { NodeBase, SyntaxType, Statement, Expression } from '~/syntax/environmen import { Param } from '~/syntax'; import { Token, TokenType } from '~/parser/lexer'; import { ParseFunc, seq, tok, repeat, select } from '~/parser/parser'; +import { FileRange } from '~/core'; -export interface LambdaExpression extends NodeBase { - params: ReadonlyArray; - body: Expression | Statement; +export class LambdaExpression extends NodeBase { + constructor( + location: FileRange, + readonly params: ReadonlyArray, + readonly body: Expression | Statement + ) { super(location, SyntaxType.LambdaExpression) } + + accept(visitor: LambdaExpressionVisitor, param: P) { + return visitor.visitLambdaExpression(this, param); + } +} + +export interface LambdaExpressionVisitor { + visitLambdaExpression(node: LambdaExpression, param: P): R; } -export function register(Param: ParseFunc, FunctionBody: ParseFunc) { +export function register(parseParam: ParseFunc, parseFunctionBody: ParseFunc) { /** * LambdaExpression ::= '(' (Param | IDENT)(* sep ',') ')' '=>' FunctionBody */ - const LambdaExpression: ParseFunc = seq( + const parseLambdaExpression: ParseFunc = seq( tok('('), repeat(select( - Param, + parseParam, tok(TokenType.IDENT) ), '*', tok(',')), tok(')'), tok('=>'), - FunctionBody, - ([_1, params, _2, _3, body], location) => ({ - syntaxType: SyntaxType.LambdaExpression as SyntaxType.LambdaExpression, - location, - params: params.map(p => p instanceof Token ? lambdaParam(p) : p), - body - }) + parseFunctionBody, + ([_1, params, _2, _3, body], location) => new LambdaExpression(location, params.map(p => p instanceof Token ? lambdaParam(p) : p), body) ); /** * ShorthandLambdaExpression ::= IDENT '=>' FunctionBody */ - const ShorthandLambdaExpression: ParseFunc = seq( + const parseShorthandLambdaExpression: ParseFunc = seq( tok(TokenType.IDENT), tok('=>'), - FunctionBody, - ([param, _, body], location) => ({ - syntaxType: SyntaxType.LambdaExpression as SyntaxType.LambdaExpression, - location, - params: [lambdaParam(param)], - body - }) + parseFunctionBody, + ([param, _, body], location) => new LambdaExpression(location, [lambdaParam(param)], body) ); - return { LambdaExpression, ShorthandLambdaExpression }; + return { parseLambdaExpression, parseShorthandLambdaExpression }; } -const lambdaParam = (p: Token): Param => ({ - syntaxType: SyntaxType.Param as SyntaxType.Param, - location: p.location, - name: p, - typeNode: null -}); +const lambdaParam = (p: Token): Param => new Param(p.location, p, null); diff --git a/src/syntax/expressions/ParenthesizedExpression.ts b/src/syntax/expressions/ParenthesizedExpression.ts index 4030f5a..a53d7f8 100644 --- a/src/syntax/expressions/ParenthesizedExpression.ts +++ b/src/syntax/expressions/ParenthesizedExpression.ts @@ -1,22 +1,30 @@ import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; import { ParseFunc, seq, tok } from '~/parser/parser'; +import { FileRange } from '~/core'; -export interface ParenthesizedExpression extends NodeBase { - inner: Expression; +export class ParenthesizedExpression extends NodeBase { + constructor( + location: FileRange, + readonly inner: Expression + ) { super(location, SyntaxType.ParenthesizedExpression) } + + accept(visitor: ParenthesizedExpressionVisitor, param: P) { + return visitor.visitParenthesizedExpression(this, param); + } +} + +export interface ParenthesizedExpressionVisitor { + visitParenthesizedExpression(node: ParenthesizedExpression, param: P): R; } -export function register(Expression: ParseFunc) { - const ParenthesizedExpression: ParseFunc = seq( +export function register(parseExpression: ParseFunc) { + const parseParenthesizedExpression: ParseFunc = seq( tok('('), - Expression, + parseExpression, tok(')'), - ([_1, inner, _2], location) => ({ - syntaxType: SyntaxType.ParenthesizedExpression as SyntaxType.ParenthesizedExpression, - location, - inner - }) + ([_1, inner, _2], location) => new ParenthesizedExpression(location, inner) ); - return { ParenthesizedExpression }; + return { parseParenthesizedExpression }; } diff --git a/src/syntax/expressions/StringLiteral.ts b/src/syntax/expressions/StringLiteral.ts index e2e0187..2d3b6a0 100644 --- a/src/syntax/expressions/StringLiteral.ts +++ b/src/syntax/expressions/StringLiteral.ts @@ -1,17 +1,25 @@ import { NodeBase, SyntaxType } from '~/syntax/environment'; import { Token, TokenType } from '~/parser/lexer'; import { ParseFunc, seq, tok } from '~/parser/parser'; +import { FileRange } from '~/core'; -export interface StringLiteral extends NodeBase { - value: Token; +export class StringLiteral extends NodeBase { + constructor( + location: FileRange, + readonly value: Token + ) { super(location, SyntaxType.StringLiteral) } + + accept(visitor: StringLiteralVisitor, param: P) { + return visitor.visitStringLiteral(this, param); + } +} + +export interface StringLiteralVisitor { + visitStringLiteral(node: StringLiteral, param: P): R; } -export const StringLiteral: ParseFunc = seq( +export const parseStringLiteral: ParseFunc = seq( tok(TokenType.STRING_LITERAL), - (value, location) => ({ - syntaxType: SyntaxType.StringLiteral as SyntaxType.StringLiteral, - location, - value - }) + (value, location) => new StringLiteral(location, value) ); diff --git a/src/syntax/expressions/StructLiteral.ts b/src/syntax/expressions/StructLiteral.ts index aff77f2..789f2d4 100644 --- a/src/syntax/expressions/StructLiteral.ts +++ b/src/syntax/expressions/StructLiteral.ts @@ -1,6 +1,7 @@ import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; import { Token, TokenType } from '~/parser/lexer'; import { ParseFunc, seq, tok, repeat } from '~/parser/parser'; +import { FileRange } from '~/core'; interface StructEntry { @@ -8,26 +9,33 @@ interface StructEntry { value: Expression; } -export interface StructLiteral extends NodeBase { - entries: ReadonlyArray; +export class StructLiteral extends NodeBase { + constructor( + location: FileRange, + readonly entries: ReadonlyArray + ) { super(location, SyntaxType.StructLiteral) } + + accept(visitor: StructLiteralVisitor, param: P) { + return visitor.visitStructLiteral(this, param); + } +} + +export interface StructLiteralVisitor { + visitStructLiteral(node: StructLiteral, param: P): R; } -export function register(Expression: ParseFunc) { - const StructLiteral: ParseFunc = seq( +export function register(parseExpression: ParseFunc) { + const parseStructLiteral: ParseFunc = seq( tok('{'), repeat(seq( tok(TokenType.IDENT), tok(':'), - Expression, + parseExpression, ([key, _, value]) => ({ key, value }) ), '*', tok(',')), tok('}'), - ([_1, entries, _2], location) => ({ - syntaxType: SyntaxType.StructLiteral as SyntaxType.StructLiteral, - location, - entries - }) + ([_1, entries, _2], location) => new StructLiteral(location, entries) ); - return { StructLiteral }; + return { parseStructLiteral }; } diff --git a/src/syntax/expressions/TupleLiteral.ts b/src/syntax/expressions/TupleLiteral.ts index 20b42d7..64f5936 100644 --- a/src/syntax/expressions/TupleLiteral.ts +++ b/src/syntax/expressions/TupleLiteral.ts @@ -1,22 +1,30 @@ import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; import { ParseFunc, seq, tok, repeat } from '~/parser/parser'; +import { FileRange } from '~/core'; -export interface TupleLiteral extends NodeBase { - items: ReadonlyArray; +export class TupleLiteral extends NodeBase { + constructor( + location: FileRange, + readonly items: ReadonlyArray + ) { super(location, SyntaxType.TupleLiteral) } + + accept(visitor: TupleLiteralVisitor, param: P) { + return visitor.visitTupleLiteral(this, param); + } +} + +export interface TupleLiteralVisitor { + visitTupleLiteral(node: TupleLiteral, param: P): R; } -export function register(Expression: ParseFunc) { - const TupleLiteral: ParseFunc = seq( +export function register(parseExpression: ParseFunc) { + const parseTupleLiteral: ParseFunc = seq( tok('('), - repeat(Expression, '*', tok(',')), + repeat(parseExpression, '*', tok(',')), tok(')'), - ([_1, items, _2], location) => ({ - syntaxType: SyntaxType.TupleLiteral as SyntaxType.TupleLiteral, - location, - items - }) + ([_1, items, _2], location) => new TupleLiteral(location, items) ); - return { TupleLiteral }; + return { parseTupleLiteral }; } diff --git a/src/syntax/expressions/UnaryExpression.ts b/src/syntax/expressions/UnaryExpression.ts index e25d85f..412b96a 100644 --- a/src/syntax/expressions/UnaryExpression.ts +++ b/src/syntax/expressions/UnaryExpression.ts @@ -2,49 +2,48 @@ import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; import { Token, TokenType } from '~/parser/lexer'; import { ParseFunc, seq, tok, repeat } from '~/parser/parser'; import { verifyMultiOperator } from '~/runtime/operators'; +import { FileRange } from '~/core'; -export interface UnaryExpression extends NodeBase { - target: Expression; - symbol: Token; - prefix: boolean; +export class UnaryExpression extends NodeBase { + constructor( + location: FileRange, + readonly target: Expression, + readonly symbol: Token, + readonly prefix: boolean + ) { super(location, SyntaxType.UnaryExpression) } + + accept(visitor: UnaryExpressionVisitor, param: P) { + return visitor.visitUnaryExpression(this, param); + } +} + +export interface UnaryExpressionVisitor { + visitUnaryExpression(node: UnaryExpression, param: P): R; } -export interface PostfixExpressionSuffix extends NodeBase { - symbol: Token; - prefix: false; - setBase(target: Expression): UnaryExpression; +export class PostfixExpressionSuffix extends NodeBase { + constructor( + location: FileRange, + readonly symbol: Token + ) { super(location, SyntaxType.UnaryExpression) } + + setBase = (target: Expression) => new UnaryExpression(this.location.merge(target.location), target, this.symbol, false) } -export function register(Expression: ParseFunc) { - const PrefixExpression: ParseFunc = seq( +export function register(parseExpression: ParseFunc) { + const parsePrefixExpression: ParseFunc = seq( repeat(tok(TokenType.OPER), '+'), - Expression, - ([symbol, target], location) => ({ - syntaxType: SyntaxType.UnaryExpression as SyntaxType.UnaryExpression, - location, - target, - symbol: verifyMultiOperator(symbol), // TODO: make sure this works - prefix: true - }) + parseExpression, + // TODO: make sure this works + ([symbol, target], location) => new UnaryExpression(location, target, verifyMultiOperator(symbol), true) ); - const PostfixExpressionSuffix: ParseFunc = seq( + const parsePostfixExpressionSuffix: ParseFunc = seq( repeat(tok(TokenType.OPER), '+'), - (symbol, location) => ({ - syntaxType: SyntaxType.UnaryExpression as SyntaxType.UnaryExpression, - location, - symbol: verifyMultiOperator(symbol), // TODO: make sure this works - prefix: false as false, - setBase(target: Expression) { - return { - ...this, - target, - location: this.location.merge(target.location) - } - } - }) + // TODO: make sure this works + (symbol, location) => new PostfixExpressionSuffix(location, verifyMultiOperator(symbol)) ); - return { PrefixExpression, PostfixExpressionSuffix }; + return { parsePrefixExpression, parsePostfixExpressionSuffix }; } diff --git a/src/syntax/expressions/VarDeclaration.ts b/src/syntax/expressions/VarDeclaration.ts index 4018aa3..8ddd9d9 100644 --- a/src/syntax/expressions/VarDeclaration.ts +++ b/src/syntax/expressions/VarDeclaration.ts @@ -1,25 +1,32 @@ import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; import { Token, TokenType } from '~/parser/lexer'; import { ParseFunc, seq, tok } from '~/parser/parser'; +import { FileRange } from '~/core'; -export interface VarDeclaration extends NodeBase { - name: Token; - init: Expression; +export class VarDeclaration extends NodeBase { + constructor( + location: FileRange, + readonly name: Token, + readonly init: Expression + ) { super(location, SyntaxType.VarDeclaration) } + + accept(visitor: VarDeclarationVisitor, param: P) { + return visitor.visitVarDeclaration(this, param); + } +} + +export interface VarDeclarationVisitor { + visitVarDeclaration(node: VarDeclaration, param: P): R; } -export function register(Expression: ParseFunc) { - const VarDeclaration: ParseFunc = seq( +export function register(parseExpression: ParseFunc) { + const parseVarDeclaration: ParseFunc = seq( tok(TokenType.IDENT), tok('='), - Expression, - ([name, _, init], location) => ({ - syntaxType: SyntaxType.VarDeclaration as SyntaxType.VarDeclaration, - location, - name, - init - }) + parseExpression, + ([name, _, init], location) => new VarDeclaration(location, name, init) ); - return { VarDeclaration }; + return { parseVarDeclaration }; } diff --git a/src/syntax/expressions/index.ts b/src/syntax/expressions/index.ts index aafc13c..37120ab 100644 --- a/src/syntax/expressions/index.ts +++ b/src/syntax/expressions/index.ts @@ -1,18 +1,18 @@ -export { ArrayAccess } from './ArrayAccess'; -export { ArrayLiteral } from './ArrayLiteral'; -export { UnaryExpression } from './UnaryExpression'; -export { BinaryExpression } from './BinaryExpression'; -export { IdentifierExpression } from './IdentifierExpression'; -export { BoolLiteral } from './BoolLiteral'; -export { CharLiteral } from './CharLiteral'; -export { FieldAccess } from './FieldAccess'; -export { FloatLiteral } from './FloatLiteral'; -export { FunctionApplication } from './FunctionApplication'; -export { IfElseExpression } from './IfElseExpression'; -export { IntegerLiteral } from './IntegerLiteral'; -export { TupleLiteral } from './TupleLiteral'; -export { ParenthesizedExpression } from './ParenthesizedExpression'; -export { LambdaExpression } from './LambdaExpression'; -export { StringLiteral } from './StringLiteral'; -export { StructLiteral } from './StructLiteral'; -export { VarDeclaration } from './VarDeclaration'; +export { ArrayAccess, ArrayAccessVisitor } from './ArrayAccess'; +export { ArrayLiteral, ArrayLiteralVisitor } from './ArrayLiteral'; +export { UnaryExpression, UnaryExpressionVisitor } from './UnaryExpression'; +export { BinaryExpression, BinaryExpressionVisitor } from './BinaryExpression'; +export { IdentifierExpression, IdentifierExpressionVisitor } from './IdentifierExpression'; +export { BoolLiteral, BoolLiteralVisitor } from './BoolLiteral'; +export { CharLiteral, CharLiteralVisitor } from './CharLiteral'; +export { FieldAccess, FieldAccessVisitor } from './FieldAccess'; +export { FloatLiteral, FloatLiteralVisitor } from './FloatLiteral'; +export { FunctionApplication, FunctionApplicationVisitor } from './FunctionApplication'; +export { IfElseExpression, IfElseExpressionVisitor } from './IfElseExpression'; +export { IntegerLiteral, IntegerLiteralVisitor } from './IntegerLiteral'; +export { TupleLiteral, TupleLiteralVisitor } from './TupleLiteral'; +export { ParenthesizedExpression, ParenthesizedExpressionVisitor } from './ParenthesizedExpression'; +export { LambdaExpression, LambdaExpressionVisitor } from './LambdaExpression'; +export { StringLiteral, StringLiteralVisitor } from './StringLiteral'; +export { StructLiteral, StructLiteralVisitor } from './StructLiteral'; +export { VarDeclaration, VarDeclarationVisitor } from './VarDeclaration'; diff --git a/src/syntax/expressions/parsing.ts b/src/syntax/expressions/parsing.ts new file mode 100644 index 0000000..a665262 --- /dev/null +++ b/src/syntax/expressions/parsing.ts @@ -0,0 +1,18 @@ +export { register as registerArrayAccess, ArrayAccessSuffix } from './ArrayAccess'; +export { register as registerArrayLiteral } from './ArrayLiteral'; +export { register as registerBinaryExpression, BinaryExpressionSuffix } from './BinaryExpression'; +export { parseBoolLiteral } from './BoolLiteral'; +export { parseCharLiteral } from './CharLiteral'; +export { parseFieldAccessSuffix, FieldAccessSuffix } from './FieldAccess'; +export { parseFloatLiteral } from './FloatLiteral'; +export { register as registerFunctionApplication, FunctionApplicationSuffix } from './FunctionApplication'; +export { parseIdentifierExpression } from './IdentifierExpression'; +export { register as registerIfElseExpression } from './IfElseExpression'; +export { parseIntegerLiteral } from './IntegerLiteral'; +export { register as registerLambdaExpression } from './LambdaExpression'; +export { register as registerParenthesizedExpression } from './ParenthesizedExpression'; +export { parseStringLiteral } from './StringLiteral'; +export { register as registerStructLiteral } from './StructLiteral'; +export { register as registerTupleLiteral } from './TupleLiteral'; +export { register as registerUnaryExpression, PostfixExpressionSuffix } from './UnaryExpression'; +export { register as registerVarDeclaration } from './VarDeclaration'; \ No newline at end of file diff --git a/src/syntax/expressions/visitors.ts b/src/syntax/expressions/visitors.ts new file mode 100644 index 0000000..e1a1069 --- /dev/null +++ b/src/syntax/expressions/visitors.ts @@ -0,0 +1,18 @@ +export { ArrayAccessVisitor } from './ArrayAccess'; +export { ArrayLiteralVisitor } from './ArrayLiteral'; +export { BinaryExpressionVisitor } from './BinaryExpression'; +export { BoolLiteralVisitor } from './BoolLiteral'; +export { CharLiteralVisitor } from './CharLiteral'; +export { FieldAccessVisitor } from './FieldAccess'; +export { FloatLiteralVisitor } from './FloatLiteral'; +export { FunctionApplicationVisitor } from './FunctionApplication'; +export { IdentifierExpressionVisitor } from './IdentifierExpression'; +export { IfElseExpressionVisitor } from './IfElseExpression'; +export { IntegerLiteralVisitor } from './IntegerLiteral'; +export { LambdaExpressionVisitor } from './LambdaExpression'; +export { ParenthesizedExpressionVisitor } from './ParenthesizedExpression'; +export { StringLiteralVisitor } from './StringLiteral'; +export { StructLiteralVisitor } from './StructLiteral'; +export { TupleLiteralVisitor } from './TupleLiteral'; +export { UnaryExpressionVisitor } from './UnaryExpression'; +export { VarDeclarationVisitor } from './VarDeclaration'; \ No newline at end of file diff --git a/src/syntax/index.ts b/src/syntax/index.ts index a77dc63..cdc3f60 100644 --- a/src/syntax/index.ts +++ b/src/syntax/index.ts @@ -6,6 +6,6 @@ export * from './statements'; export * from './declarations'; export { ModuleRoot } from './ModuleRoot'; -export { Declaration, TypeNode, Expression, Statement } from './environment'; +export { Declaration, Type, Expression, Statement } from './environment'; export * from './visitor'; diff --git a/src/syntax/parsing.ts b/src/syntax/parsing.ts new file mode 100644 index 0000000..d0fcbf8 --- /dev/null +++ b/src/syntax/parsing.ts @@ -0,0 +1,5 @@ +export * from './declarations/parsing'; +export * from './expressions/parsing'; +export * from './statements/parsing'; +export * from './types/parsing'; +export { register as registerModuleRoot } from './ModuleRoot'; \ No newline at end of file diff --git a/src/syntax/statements/Block.ts b/src/syntax/statements/Block.ts index 60ebb59..97969ca 100644 --- a/src/syntax/statements/Block.ts +++ b/src/syntax/statements/Block.ts @@ -1,22 +1,30 @@ import { NodeBase, SyntaxType, Statement } from '~/syntax/environment'; import { ParseFunc, seq, tok, repeat } from '~/parser/parser'; +import { FileRange } from '~/core'; -export interface Block extends NodeBase { - statements: ReadonlyArray; +export class Block extends NodeBase { + constructor( + location: FileRange, + readonly statements: ReadonlyArray + ) { super(location, SyntaxType.Block) } + + accept(visitor: BlockVisitor, param: P) { + return visitor.visitBlock(this, param); + } +} + +export interface BlockVisitor { + visitBlock(node: Block, param: P): R; } -export function register(Statement: ParseFunc) { - const Block: ParseFunc = seq( +export function register(parseStatement: ParseFunc) { + const parseBlock: ParseFunc = seq( tok('{'), - repeat(Statement, '*'), + repeat(parseStatement, '*'), tok('}'), - ([_1, statements, _2], location) => ({ - syntaxType: SyntaxType.Block as SyntaxType.Block, - location, - statements - }) + ([_1, statements, _2], location) => new Block(location, statements) ); - return { Block }; + return { parseBlock }; } diff --git a/src/syntax/statements/BreakStatement.ts b/src/syntax/statements/BreakStatement.ts index 675cab0..2626a0f 100644 --- a/src/syntax/statements/BreakStatement.ts +++ b/src/syntax/statements/BreakStatement.ts @@ -1,18 +1,26 @@ import { NodeBase, SyntaxType } from '~/syntax/environment'; import { Token, TokenType } from '~/parser/lexer'; import { ParseFunc, seq, tok, optional } from '~/parser/parser'; +import { FileRange } from '~/core'; -export interface BreakStatement extends NodeBase { - loopNumber: Optional; +export class BreakStatement extends NodeBase { + constructor( + location: FileRange, + readonly loopNumber: Optional + ) { super(location, SyntaxType.BreakStatement) } + + accept(visitor: BreakStatementVisitor, param: P) { + return visitor.visitBreakStatement(this, param); + } +} + +export interface BreakStatementVisitor { + visitBreakStatement(node: BreakStatement, param: P): R; } -export const BreakStatement: ParseFunc = seq( +export const parseBreakStatement: ParseFunc = seq( tok('break'), optional(tok(TokenType.INTEGER_LITERAL)), - ([_, loopNumber], location) => ({ - syntaxType: SyntaxType.BreakStatement as SyntaxType.BreakStatement, - location, - loopNumber - }) + ([_, loopNumber], location) => new BreakStatement(location, loopNumber) ); diff --git a/src/syntax/statements/ContinueStatement.ts b/src/syntax/statements/ContinueStatement.ts index 8b3a961..551c4e6 100644 --- a/src/syntax/statements/ContinueStatement.ts +++ b/src/syntax/statements/ContinueStatement.ts @@ -1,18 +1,26 @@ import { NodeBase, SyntaxType } from '~/syntax/environment'; import { Token, TokenType } from '~/parser/lexer'; import { ParseFunc, seq, tok, optional } from '~/parser/parser'; +import { FileRange } from '~/core'; -export interface ContinueStatement extends NodeBase { - loopNumber: Optional; +export class ContinueStatement extends NodeBase { + constructor( + location: FileRange, + readonly loopNumber: Optional + ) { super(location, SyntaxType.ContinueStatement) } + + accept(visitor: ContinueStatementVisitor, param: P) { + return visitor.visitContinueStatement(this, param); + } +} + +export interface ContinueStatementVisitor { + visitContinueStatement(node: ContinueStatement, param: P): R; } -export const ContinueStatement: ParseFunc = seq( +export const parseContinueStatement: ParseFunc = seq( tok('continue'), optional(tok(TokenType.INTEGER_LITERAL)), - ([_, loopNumber], location) => ({ - syntaxType: SyntaxType.ContinueStatement as SyntaxType.ContinueStatement, - location, - loopNumber - }) + ([_, loopNumber], location) => new ContinueStatement(location, loopNumber) ); diff --git a/src/syntax/statements/DoWhileStatement.ts b/src/syntax/statements/DoWhileStatement.ts index b0fe07b..372755b 100644 --- a/src/syntax/statements/DoWhileStatement.ts +++ b/src/syntax/statements/DoWhileStatement.ts @@ -1,27 +1,34 @@ import { NodeBase, SyntaxType, Statement, Expression } from '~/syntax/environment'; import { ParseFunc, seq, tok } from '~/parser/parser'; +import { FileRange } from '~/core'; -export interface DoWhileStatement extends NodeBase { - body: Statement; - condition: Expression; +export class DoWhileStatement extends NodeBase { + constructor( + location: FileRange, + readonly body: Statement, + readonly condition: Expression + ) { super(location, SyntaxType.DoWhileStatement) } + + accept(visitor: DoWhileStatementVisitor, param: P) { + return visitor.visitDoWhileStatement(this, param); + } +} + +export interface DoWhileStatementVisitor { + visitDoWhileStatement(node: DoWhileStatement, param: P): R; } -export function register(Expression: ParseFunc, Statement: ParseFunc) { - const DoWhileStatement: ParseFunc = seq( +export function register(parseExpression: ParseFunc, parseStatement: ParseFunc) { + const parseDoWhileStatement: ParseFunc = seq( tok('do'), - Statement, + parseStatement, tok('while'), tok('('), - Expression, + parseExpression, tok(')'), - ([_1, body, _2, _3, condition, _4], location) => ({ - syntaxType: SyntaxType.DoWhileStatement as SyntaxType.DoWhileStatement, - location, - body, - condition - }) + ([_1, body, _2, _3, condition, _4], location) => new DoWhileStatement(location, body, condition) ); - return { DoWhileStatement }; + return { parseDoWhileStatement }; } diff --git a/src/syntax/statements/ExpressionStatement.ts b/src/syntax/statements/ExpressionStatement.ts index 93520aa..7c3b74e 100644 --- a/src/syntax/statements/ExpressionStatement.ts +++ b/src/syntax/statements/ExpressionStatement.ts @@ -1,20 +1,28 @@ import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; import { ParseFunc, seq } from '~/parser/parser'; +import { FileRange } from '~/core'; -export interface ExpressionStatement extends NodeBase { - expression: Expression; +export class ExpressionStatement extends NodeBase { + constructor( + location: FileRange, + readonly expression: Expression + ) { super(location, SyntaxType.ExpressionStatement) } + + accept(visitor: ExpressionStatementVisitor, param: P) { + return visitor.visitExpressionStatement(this, param); + } +} + +export interface ExpressionStatementVisitor { + visitExpressionStatement(node: ExpressionStatement, param: P): R; } -export function register(Expression: ParseFunc) { - const ExpressionStatement: ParseFunc = seq( - Expression, - (expression, location) => ({ - syntaxType: SyntaxType.ExpressionStatement as SyntaxType.ExpressionStatement, - location, - expression - }) +export function register(parseExpression: ParseFunc) { + const parseExpressionStatement: ParseFunc = seq( + parseExpression, + (expression, location) => new ExpressionStatement(location, expression) ); - return { ExpressionStatement }; + return { parseExpressionStatement }; } diff --git a/src/syntax/statements/ForStatement.ts b/src/syntax/statements/ForStatement.ts index 1220f98..a1ddba7 100644 --- a/src/syntax/statements/ForStatement.ts +++ b/src/syntax/statements/ForStatement.ts @@ -1,31 +1,37 @@ import { NodeBase, SyntaxType, Expression, Statement } from '~/syntax/environment'; import { Token, TokenType } from '~/parser/lexer'; import { ParseFunc, seq, tok } from '~/parser/parser'; +import { FileRange } from '~/core'; -export interface ForStatement extends NodeBase { - variable: Token; - iterable: Expression; - body: Statement; +export class ForStatement extends NodeBase { + constructor( + location: FileRange, + readonly variable: Token, + readonly iterable: Expression, + readonly body: Statement + ) { super(location, SyntaxType.ForStatement) } + + accept(visitor: ForStatementVisitor, param: P) { + return visitor.visitForStatement(this, param); + } +} + +export interface ForStatementVisitor { + visitForStatement(node: ForStatement, param: P): R; } -export function register(Expression: ParseFunc, Statement: ParseFunc) { - const ForStatement: ParseFunc = seq( +export function register(parseExpression: ParseFunc, parseStatement: ParseFunc) { + const parseForStatement: ParseFunc = seq( tok('for'), tok('('), tok(TokenType.IDENT), tok('in'), - Expression, + parseExpression, tok(')'), - Statement, - ([_1, _2, variable, _3, iterable, _4, body], location) => ({ - syntaxType: SyntaxType.ForStatement as SyntaxType.ForStatement, - location, - variable, - iterable, - body - }) + parseStatement, + ([_1, _2, variable, _3, iterable, _4, body], location) => new ForStatement(location, variable, iterable, body) ); - return { ForStatement }; + return { parseForStatement }; } diff --git a/src/syntax/statements/ReturnStatement.ts b/src/syntax/statements/ReturnStatement.ts index de2c5ff..23b1c8d 100644 --- a/src/syntax/statements/ReturnStatement.ts +++ b/src/syntax/statements/ReturnStatement.ts @@ -1,21 +1,29 @@ import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; import { ParseFunc, seq, tok, optional } from '~/parser/parser'; +import { FileRange } from '~/core'; -export interface ReturnStatement extends NodeBase { - exp: Optional; +export class ReturnStatement extends NodeBase { + constructor( + location: FileRange, + readonly exp: Optional + ) { super(location, SyntaxType.ReturnStatement) } + + accept(visitor: ReturnStatementVisitor, param: P) { + return visitor.visitReturnStatement(this, param); + } +} + +export interface ReturnStatementVisitor { + visitReturnStatement(node: ReturnStatement, param: P): R; } -export function register(Expression: ParseFunc) { - const ReturnStatement: ParseFunc = seq( +export function register(parseExpression: ParseFunc) { + const parseReturnStatement: ParseFunc = seq( tok('return'), - optional(Expression), - ([_, exp], location) => ({ - syntaxType: SyntaxType.ReturnStatement as SyntaxType.ReturnStatement, - location, - exp - }) + optional(parseExpression), + ([_, exp], location) => new ReturnStatement(location, exp) ); - return { ReturnStatement }; + return { parseReturnStatement }; } diff --git a/src/syntax/statements/ThrowStatement.ts b/src/syntax/statements/ThrowStatement.ts index 9ae082c..75a4d0b 100644 --- a/src/syntax/statements/ThrowStatement.ts +++ b/src/syntax/statements/ThrowStatement.ts @@ -1,21 +1,29 @@ import { NodeBase, SyntaxType, Expression } from '~/syntax/environment'; import { ParseFunc, seq, tok } from '~/parser/parser'; +import { FileRange } from '~/core'; -export interface ThrowStatement extends NodeBase { - exp: Expression; +export class ThrowStatement extends NodeBase { + constructor( + location: FileRange, + readonly exp: Expression + ) { super(location, SyntaxType.ThrowStatement) } + + accept(visitor: ThrowStatementVisitor, param: P) { + return visitor.visitThrowStatement(this, param); + } +} + +export interface ThrowStatementVisitor { + visitThrowStatement(node: ThrowStatement, param: P): R; } -export function register(Expression: ParseFunc) { - const ThrowStatement: ParseFunc = seq( +export function register(parseExpression: ParseFunc) { + const parseThrowStatement: ParseFunc = seq( tok('throw'), - Expression, - ([_, exp], location) => ({ - syntaxType: SyntaxType.ThrowStatement as SyntaxType.ThrowStatement, - location, - exp - }) + parseExpression, + ([_, exp], location) => new ThrowStatement(location, exp) ); - return { ThrowStatement }; + return { parseThrowStatement }; } diff --git a/src/syntax/statements/TryCatchStatement.ts b/src/syntax/statements/TryCatchStatement.ts index 0812678..22bfcff 100644 --- a/src/syntax/statements/TryCatchStatement.ts +++ b/src/syntax/statements/TryCatchStatement.ts @@ -1,6 +1,7 @@ import { NodeBase, SyntaxType, Statement } from '~/syntax/environment'; import { Param } from '~/syntax'; import { ParseFunc, seq, tok, repeat, optional } from '~/parser/parser'; +import { FileRange } from '~/core'; interface Catch { @@ -8,41 +9,46 @@ interface Catch { body: Statement; } -export interface TryCatchStatement extends NodeBase { - try: Statement; - catches: ReadonlyArray; - finally: Optional; +export class TryCatchStatement extends NodeBase { + constructor( + location: FileRange, + readonly _try: Statement, + readonly catches: ReadonlyArray, + readonly _finally: Optional + ) { super(location, SyntaxType.TryCatchStatement) } + + accept(visitor: TryCatchStatementVisitor, param: P) { + return visitor.visitTryCatchStatement(this, param); + } +} + +export interface TryCatchStatementVisitor { + visitTryCatchStatement(node: TryCatchStatement, param: P): R; } -export function register(Statement: ParseFunc, Param: ParseFunc) { - const CatchClause: ParseFunc = seq( +export function register(parseStatement: ParseFunc, parseParam: ParseFunc) { + const parseCatchClause: ParseFunc = seq( tok('catch'), tok('('), - Param, + parseParam, tok(')'), - Statement, + parseStatement, ([_1, _2, param, _3, body]) => ({ param, body }) ); - const FinallyClause: ParseFunc = seq( + const parseFinallyClause: ParseFunc = seq( tok('finally'), - Statement, + parseStatement, ([_, body]) => body ); - const TryCatchStatement: ParseFunc = seq( + const parseTryCatchStatement: ParseFunc = seq( tok('try'), - Statement, - repeat(CatchClause, '+'), - optional(FinallyClause), - ([_, _try, catches, _finally], location) => ({ - syntaxType: SyntaxType.TryCatchStatement as SyntaxType.TryCatchStatement, - location, - try: _try, - catches, - finally: _finally - }) + parseStatement, + repeat(parseCatchClause, '+'), + optional(parseFinallyClause), + ([_, _try, catches, _finally], location) => new TryCatchStatement(location, _try, catches, _finally) ); - return { TryCatchStatement }; + return { parseTryCatchStatement }; } diff --git a/src/syntax/statements/WhileStatement.ts b/src/syntax/statements/WhileStatement.ts index 1d94b66..8922560 100644 --- a/src/syntax/statements/WhileStatement.ts +++ b/src/syntax/statements/WhileStatement.ts @@ -1,26 +1,33 @@ import { NodeBase, SyntaxType, Expression, Statement } from '~/syntax/environment'; import { ParseFunc, seq, tok } from '~/parser/parser'; +import { FileRange } from '~/core'; -export interface WhileStatement extends NodeBase { - condition: Expression; - body: Statement; +export class WhileStatement extends NodeBase { + constructor( + location: FileRange, + readonly condition: Expression, + readonly body: Statement + ) { super(location, SyntaxType.WhileStatement) } + + accept(visitor: WhileStatementVisitor, param: P) { + return visitor.visitWhileStatement(this, param); + } +} + +export interface WhileStatementVisitor { + visitWhileStatement(node: WhileStatement, param: P): R; } -export function register(Expression: ParseFunc, Statement: ParseFunc) { - const WhileStatement: ParseFunc = seq( +export function register(parseExpression: ParseFunc, parseStatement: ParseFunc) { + const parseWhileStatement: ParseFunc = seq( tok('while'), tok('('), - Expression, + parseExpression, tok(')'), - Statement, - ([_1, _2, condition, _3, body], location) => ({ - syntaxType: SyntaxType.WhileStatement as SyntaxType.WhileStatement, - location, - condition, - body - }) + parseStatement, + ([_1, _2, condition, _3, body], location) => new WhileStatement(location, condition, body) ); - return { WhileStatement }; + return { parseWhileStatement }; } diff --git a/src/syntax/statements/index.ts b/src/syntax/statements/index.ts index c2ef5d2..6954fb4 100644 --- a/src/syntax/statements/index.ts +++ b/src/syntax/statements/index.ts @@ -1,10 +1,10 @@ -export { ExpressionStatement } from './ExpressionStatement'; -export { Block } from './Block'; -export { BreakStatement } from './BreakStatement'; -export { ContinueStatement } from './ContinueStatement'; -export { DoWhileStatement } from './DoWhileStatement'; -export { ForStatement } from './ForStatement'; -export { ReturnStatement } from './ReturnStatement'; -export { ThrowStatement } from './ThrowStatement'; -export { TryCatchStatement } from './TryCatchStatement'; -export { WhileStatement } from './WhileStatement'; +export { ExpressionStatement, ExpressionStatementVisitor } from './ExpressionStatement'; +export { Block, BlockVisitor } from './Block'; +export { BreakStatement, BreakStatementVisitor } from './BreakStatement'; +export { ContinueStatement, ContinueStatementVisitor } from './ContinueStatement'; +export { DoWhileStatement, DoWhileStatementVisitor } from './DoWhileStatement'; +export { ForStatement, ForStatementVisitor } from './ForStatement'; +export { ReturnStatement, ReturnStatementVisitor } from './ReturnStatement'; +export { ThrowStatement, ThrowStatementVisitor } from './ThrowStatement'; +export { TryCatchStatement, TryCatchStatementVisitor } from './TryCatchStatement'; +export { WhileStatement, WhileStatementVisitor } from './WhileStatement'; diff --git a/src/syntax/statements/parsing.ts b/src/syntax/statements/parsing.ts new file mode 100644 index 0000000..12d7732 --- /dev/null +++ b/src/syntax/statements/parsing.ts @@ -0,0 +1,10 @@ +export { register as registerBlock } from './Block'; +export { parseBreakStatement } from './BreakStatement'; +export { parseContinueStatement } from './ContinueStatement'; +export { register as registerDoWhileStatement } from './DoWhileStatement'; +export { register as registerExpressionStatement } from './ExpressionStatement'; +export { register as registerForStatement } from './ForStatement'; +export { register as registerReturnStatement } from './ReturnStatement'; +export { register as registerThrowStatement } from './ThrowStatement'; +export { register as registerTryCatchStatement } from './TryCatchStatement'; +export { register as registerWhileStatement } from './WhileStatement'; \ No newline at end of file diff --git a/src/syntax/statements/visitors.ts b/src/syntax/statements/visitors.ts new file mode 100644 index 0000000..0a924b8 --- /dev/null +++ b/src/syntax/statements/visitors.ts @@ -0,0 +1,10 @@ +export { BlockVisitor } from './Block'; +export { BreakStatementVisitor } from './BreakStatement'; +export { ContinueStatementVisitor } from './ContinueStatement'; +export { DoWhileStatementVisitor } from './DoWhileStatement'; +export { ExpressionStatementVisitor } from './ExpressionStatement'; +export { ForStatementVisitor } from './ForStatement'; +export { ReturnStatementVisitor } from './ReturnStatement'; +export { ThrowStatementVisitor } from './ThrowStatement'; +export { TryCatchStatementVisitor } from './TryCatchStatement'; +export { WhileStatementVisitor } from './WhileStatement'; \ No newline at end of file diff --git a/src/syntax/types/ArrayType.ts b/src/syntax/types/ArrayType.ts index e1a8f37..f1d2861 100644 --- a/src/syntax/types/ArrayType.ts +++ b/src/syntax/types/ArrayType.ts @@ -1,27 +1,31 @@ -import { NodeBase, SyntaxType, TypeNode } from '~/syntax/environment'; +import { NodeBase, SyntaxType, Type } from '~/syntax/environment'; import { ParseFunc, seq, tok } from '~/parser/parser'; +import { FileRange } from '~/core'; -export interface ArrayType extends NodeBase { - baseType: TypeNode; +export class ArrayType extends NodeBase { + constructor( + location: FileRange, + readonly baseType: Type + ) { super(location, SyntaxType.ArrayType) } + + accept(visitor: ArrayTypeVisitor, param: P) { + return visitor.visitArrayType(this, param); + } +} + +export interface ArrayTypeVisitor { + visitArrayType(node: ArrayType, param: P): R; } -export interface ArrayTypeSuffix extends NodeBase { - setBase(baseType: TypeNode): ArrayType; +export class ArrayTypeSuffix extends NodeBase { + constructor(location: FileRange) { super(location, SyntaxType.ArrayType) } + + setBase = (baseType: Type) => new ArrayType(this.location.merge(baseType.location), baseType); } -export const ArrayTypeSuffix: ParseFunc = seq( +export const parseArrayTypeSuffix: ParseFunc = seq( tok('['), tok(']'), - ([_1, _2], location) => ({ - syntaxType: SyntaxType.ArrayType as SyntaxType.ArrayType, - location, - setBase(baseType: TypeNode) { - return { - ...this, - baseType, - location: this.location.merge(baseType.location) - } - } - }) + ([_1, _2], location) => new ArrayTypeSuffix(location) ); diff --git a/src/syntax/types/BuiltInType.ts b/src/syntax/types/BuiltInType.ts index c094dad..70aa1e4 100644 --- a/src/syntax/types/BuiltInType.ts +++ b/src/syntax/types/BuiltInType.ts @@ -1,13 +1,25 @@ import { ParseFunc, tok, select, seq } from '~/parser/parser'; import { NodeBase, SyntaxType } from '~/syntax/environment'; import { Token } from '~/parser/lexer'; +import { FileRange } from '~/core'; -export interface BuiltInType extends NodeBase { - name: Token; +export class BuiltInType extends NodeBase { + constructor( + location: FileRange, + readonly name: Token + ) { super(location, SyntaxType.BuiltInType) } + + accept(visitor: BuiltInTypeVisitor, param: P) { + return visitor.visitBuiltInType(this, param); + } +} + +export interface BuiltInTypeVisitor { + visitBuiltInType(node: BuiltInType, param: P): R; } -export const BuiltInType: ParseFunc = seq(select( +export const parseBuiltInType: ParseFunc = seq(select( tok('u8'), tok('i8'), tok('byte'), tok('u16'), tok('i16'), tok('short'), tok('u32'), tok('i32'), tok('integer'), @@ -21,8 +33,4 @@ export const BuiltInType: ParseFunc = seq(select( tok('void'), tok('any'), tok('never') -), (name, location) => ({ - location, - syntaxType: SyntaxType.BuiltInType as SyntaxType.BuiltInType, - name -})); +), (name, location) => new BuiltInType(location, name)); diff --git a/src/syntax/types/FunctionType.ts b/src/syntax/types/FunctionType.ts index 917a3b0..5078257 100644 --- a/src/syntax/types/FunctionType.ts +++ b/src/syntax/types/FunctionType.ts @@ -1,26 +1,33 @@ import { seq, tok, ParseFunc, repeat } from '~/parser/parser'; -import { TypeNode, NodeBase, SyntaxType } from '~/syntax/environment'; +import { Type, NodeBase, SyntaxType } from '~/syntax/environment'; +import { FileRange } from '~/core'; -export interface FunctionType extends NodeBase { - paramTypes: TypeNode[]; - returnType: TypeNode; +export class FunctionType extends NodeBase { + constructor( + location: FileRange, + readonly paramTypes: Type[], + readonly returnType: Type + ) { super(location, SyntaxType.FunctionType) } + + accept(visitor: FunctionTypeVisitor, param: P) { + return visitor.visitFunctionType(this, param); + } +} + +export interface FunctionTypeVisitor { + visitFunctionType(node: FunctionType, param: P): R; } -export function register(TypeNode: ParseFunc) { - const FunctionType: ParseFunc = seq( +export function register(parseType: ParseFunc) { + const parseFunctionType: ParseFunc = seq( tok('('), - repeat(TypeNode, '*', tok(',')), + repeat(parseType, '*', tok(',')), tok(')'), tok('=>'), - TypeNode, - ([_1, paramTypes, _2, _3, returnType], location) => ({ - syntaxType: SyntaxType.FunctionType as SyntaxType.FunctionType, - location, - paramTypes, - returnType - }) + parseType, + ([_1, paramTypes, _2, _3, returnType], location) => new FunctionType(location, paramTypes, returnType) ); - return { FunctionType }; + return { parseFunctionType }; } diff --git a/src/syntax/types/IdentifierType.ts b/src/syntax/types/IdentifierType.ts index 9288a29..49090ee 100644 --- a/src/syntax/types/IdentifierType.ts +++ b/src/syntax/types/IdentifierType.ts @@ -1,18 +1,25 @@ import { NodeBase, SyntaxType } from '~/syntax/environment'; import { Token, TokenType } from '~/parser/lexer'; import { seq, tok, ParseFunc } from '~/parser/parser'; +import { FileRange } from '~/core'; +export class IdentifierType extends NodeBase { + constructor( + location: FileRange, + readonly name: Token + ) { super(location, SyntaxType.IdentifierType) } -export interface IdentifierType extends NodeBase { - name: Token; + accept(visitor: IdentifierTypeVisitor, param: P) { + return visitor.visitIdentifierType(this, param); + } } -export const IdentifierType: ParseFunc = seq( +export interface IdentifierTypeVisitor { + visitIdentifierType(node: IdentifierType, param: P): R; +} + +export const parseIdentifierType: ParseFunc = seq( tok(TokenType.IDENT), - (name, location) => ({ - syntaxType: SyntaxType.IdentifierType as SyntaxType.IdentifierType, - location, - name - }) + (name, location) => new IdentifierType(location, name) ); diff --git a/src/syntax/types/NamespaceAccessType.ts b/src/syntax/types/NamespaceAccessType.ts index 60d375d..bd6bc00 100644 --- a/src/syntax/types/NamespaceAccessType.ts +++ b/src/syntax/types/NamespaceAccessType.ts @@ -1,31 +1,36 @@ -import { NodeBase, SyntaxType, TypeNode } from '~/syntax/environment'; +import { NodeBase, SyntaxType, Type } from '~/syntax/environment'; import { Token, TokenType } from '~/parser/lexer'; import { ParseFunc, seq, tok } from '~/parser/parser'; +import { FileRange } from '~/core'; -export interface NamespaceAccessType extends NodeBase { - baseType: TypeNode; - typeName: Token; +export class NamespaceAccessType extends NodeBase { + constructor( + location: FileRange, + readonly baseType: Type, + readonly typeName: Token + ) { super(location, SyntaxType.NamespaceAccessType) } + + accept(visitor: NamespaceAccessTypeVisitor, param: P) { + return visitor.visitNamespaceAccessType(this, param); + } +} + +export interface NamespaceAccessTypeVisitor { + visitNamespaceAccessType(node: NamespaceAccessType, param: P): R; } -export interface NamespaceAccessTypeSuffix extends NodeBase { - typeName: Token; - setBase(baseType: TypeNode): NamespaceAccessType; +export class NamespaceAccessTypeSuffix extends NodeBase { + constructor( + location: FileRange, + readonly typeName: Token + ) { super(location, SyntaxType.NamespaceAccessType) } + + setBase = (baseType: Type) => new NamespaceAccessType(this.location.merge(baseType.location), baseType, this.typeName); } -export const NamespaceAccessTypeSuffix: ParseFunc = seq( +export const parseNamespaceAccessTypeSuffix: ParseFunc = seq( tok('.'), tok(TokenType.IDENT), - ([_1, typeName], location) => ({ - syntaxType: SyntaxType.NamespaceAccessType as SyntaxType.NamespaceAccessType, - location, - typeName, - setBase(baseType: TypeNode) { - return { - ...this, - baseType, - location: this.location.merge(baseType.location) - } - } - }) + ([_1, typeName], location) => new NamespaceAccessTypeSuffix(location, typeName) ); diff --git a/src/syntax/types/ParenthesizedType.ts b/src/syntax/types/ParenthesizedType.ts index e15501b..f473587 100644 --- a/src/syntax/types/ParenthesizedType.ts +++ b/src/syntax/types/ParenthesizedType.ts @@ -1,22 +1,30 @@ import { ParseFunc, seq, tok } from '~/parser/parser'; -import { TypeNode, NodeBase, SyntaxType } from '~/syntax/environment'; +import { Type, NodeBase, SyntaxType } from '~/syntax/environment'; +import { FileRange } from '~/core'; -export interface ParenthesizedType extends NodeBase { - inner: TypeNode; +export class ParenthesizedType extends NodeBase { + constructor( + location: FileRange, + readonly inner: Type + ) { super(location, SyntaxType.ParenthesizedType) } + + accept(visitor: ParenthesizedTypeVisitor, param: P) { + return visitor.visitParenthesizedType(this, param); + } +} + +export interface ParenthesizedTypeVisitor { + visitParenthesizedType(node: ParenthesizedType, param: P): R; } -export function register(TypeNode: ParseFunc) { - const ParenthesizedType: ParseFunc = seq( +export function register(parseTypeNode: ParseFunc) { + const parseParenthesizedType: ParseFunc = seq( tok('('), - TypeNode, + parseTypeNode, tok(')'), - ([_1, inner, _2], location) => ({ - syntaxType: SyntaxType.ParenthesizedType as SyntaxType.ParenthesizedType, - location, - inner - }) + ([_1, inner, _2], location) => new ParenthesizedType(location, inner) ); - return { ParenthesizedType }; + return { parseParenthesizedType }; } diff --git a/src/syntax/types/SpecificType.ts b/src/syntax/types/SpecificType.ts index 3a52a5d..5c913b0 100644 --- a/src/syntax/types/SpecificType.ts +++ b/src/syntax/types/SpecificType.ts @@ -1,40 +1,45 @@ -import { NodeBase, SyntaxType, TypeNode } from '~/syntax/environment'; +import { NodeBase, SyntaxType, Type } from '~/syntax/environment'; import { ParseFunc, seq, repeat, tok } from '~/parser/parser'; +import { FileRange } from '~/core'; -export interface SpecificType extends NodeBase { - typeNode: TypeNode; - typeArgs: ReadonlyArray; +export class SpecificType extends NodeBase { + constructor( + location: FileRange, + readonly typeNode: Type, + readonly typeArgs: ReadonlyArray + ) { super(location, SyntaxType.SpecificType) } + + accept(visitor: SpecificTypeVisitor, param: P) { + return visitor.visitSpecificType(this, param); + } +} + +export interface SpecificTypeVisitor { + visitSpecificType(node: SpecificType, param: P): R; } -export interface SpecificTypeSuffix extends NodeBase { - typeArgs: ReadonlyArray; - setBase(typeNode: TypeNode): SpecificType; +export class SpecificTypeSuffix extends NodeBase { + constructor( + location: FileRange, + readonly typeArgs: ReadonlyArray + ) { super(location, SyntaxType.SpecificType) } + + setBase = (typeNode: Type) => new SpecificType(this.location.merge(typeNode.location), typeNode, this.typeArgs); } -export function register(TypeNode: ParseFunc) { - const TypeArgList: ParseFunc = seq( +export function register(parseType: ParseFunc) { + const parseTypeArgList: ParseFunc = seq( tok('<'), - repeat(TypeNode, '*', tok(',')), + repeat(parseType, '*', tok(',')), tok('>'), ([_1, types, _2]) => types ); - const SpecificTypeSuffix: ParseFunc = seq( - TypeArgList, - (typeArgs, location) => ({ - syntaxType: SyntaxType.SpecificType as SyntaxType.SpecificType, - location, - typeArgs, - setBase(typeNode: TypeNode) { - return { - ...this, - typeNode, - location: this.location.merge(typeNode.location) - } - } - }) + const parseSpecificTypeSuffix: ParseFunc = seq( + parseTypeArgList, + (typeArgs, location) => new SpecificTypeSuffix(location, typeArgs) ); - return { SpecificTypeSuffix, TypeArgList }; + return { parseSpecificTypeSuffix, parseTypeArgList }; } diff --git a/src/syntax/types/StructType.ts b/src/syntax/types/StructType.ts index 13047f0..3e1da7b 100644 --- a/src/syntax/types/StructType.ts +++ b/src/syntax/types/StructType.ts @@ -1,32 +1,40 @@ -import { TypeNode, NodeBase, SyntaxType } from '~/syntax/environment'; +import { Type, NodeBase, SyntaxType } from '~/syntax/environment'; import { Token, TokenType } from '~/parser/lexer'; import { ParseFunc, seq, tok, repeat } from '~/parser/parser'; +import { FileRange } from '~/core'; interface Field { - typeNode: TypeNode; + typeNode: Type; name: Token; } -export interface StructType extends NodeBase { - fields: ReadonlyArray; +export class StructType extends NodeBase { + constructor( + location: FileRange, + readonly fields: ReadonlyArray + ) { super(location, SyntaxType.StructType) } + + accept(visitor: StructTypeVisitor, param: P) { + return visitor.visitStructType(this, param); + } +} + +export interface StructTypeVisitor { + visitStructType(node: StructType, param: P): R; } -export function register(TypeNode: ParseFunc) { - const StructType: ParseFunc = seq( +export function register(parseType: ParseFunc) { + const parseStructType: ParseFunc = seq( tok('{'), repeat(seq( - TypeNode, + parseType, tok(TokenType.IDENT), ([typeNode, name]) => ({ typeNode, name }) ), '*'), tok('}'), - ([_1, fields, _2], location) => ({ - syntaxType: SyntaxType.StructType as SyntaxType.StructType, - location, - fields - }) + ([_1, fields, _2], location) => new StructType(location, fields) ); - return { StructType }; + return { parseStructType }; } diff --git a/src/syntax/types/TupleType.ts b/src/syntax/types/TupleType.ts index b2058ba..8531bf6 100644 --- a/src/syntax/types/TupleType.ts +++ b/src/syntax/types/TupleType.ts @@ -1,23 +1,30 @@ -import { NodeBase, SyntaxType, TypeNode } from '~/syntax/environment'; +import { NodeBase, SyntaxType, Type } from '~/syntax/environment'; import { ParseFunc, seq, tok, repeat } from '~/parser/parser'; +import { FileRange } from '~/core'; +export class TupleType extends NodeBase { + constructor( + location: FileRange, + readonly types: ReadonlyArray + ) { super(location, SyntaxType.TupleType) } -export interface TupleType extends NodeBase { - types: ReadonlyArray; + accept(visitor: TupleTypeVisitor, param: P) { + return visitor.visitTupleType(this, param); + } } -export function register(TypeNode: ParseFunc) { - const TupleType: ParseFunc = seq( +export interface TupleTypeVisitor { + visitTupleType(node: TupleType, param: P): R; +} + +export function register(parseType: ParseFunc) { + const parseTupleType: ParseFunc = seq( tok('('), - repeat(TypeNode, '*', tok(',')), + repeat(parseType, '*', tok(',')), tok(')'), - ([_1, types, _2], location) => ({ - syntaxType: SyntaxType.TupleType as SyntaxType.TupleType, - location, - types - }) + ([_1, types, _2], location) => new TupleType(location, types) ); - return { TupleType }; + return { parseTupleType }; } diff --git a/src/syntax/types/UnionType.ts b/src/syntax/types/UnionType.ts index f3872bf..8fbae95 100644 --- a/src/syntax/types/UnionType.ts +++ b/src/syntax/types/UnionType.ts @@ -1,34 +1,39 @@ -import { NodeBase, SyntaxType, TypeNode } from '~/syntax/environment'; +import { NodeBase, SyntaxType, Type } from '~/syntax/environment'; import { ParseFunc, seq, tok } from '~/parser/parser'; +import { FileRange } from '~/core'; -export interface UnionType extends NodeBase { - left: TypeNode; - right: TypeNode; +export class UnionType extends NodeBase { + constructor( + location: FileRange, + readonly left: Type, + readonly right: Type + ) { super(location, SyntaxType.UnionType) } + + accept(visitor: UnionTypeVisitor, param: P) { + return visitor.visitUnionType(this, param); + } +} + +export interface UnionTypeVisitor { + visitUnionType(node: UnionType, param: P): R; } -export interface UnionTypeSuffix extends NodeBase { - right: TypeNode; - setBase(left: TypeNode): UnionType; +export class UnionTypeSuffix extends NodeBase { + constructor( + location: FileRange, + readonly right: Type + ) { super(location, SyntaxType.UnionType) } + + setBase = (left: Type) => new UnionType(this.location.merge(left.location), left, this.right) } -export function register(TypeNode: ParseFunc) { - const UnionTypeSuffix: ParseFunc = seq( +export function register(parseType: ParseFunc) { + const parseUnionTypeSuffix: ParseFunc = seq( tok('|'), - TypeNode, - ([_1, right], location) => ({ - syntaxType: SyntaxType.UnionType as SyntaxType.UnionType, - location, - right, - setBase(left: TypeNode) { - return { - ...this, - left, - location: this.location.merge(left.location) - }; - } - }) + parseType, + ([_1, right], location) => new UnionTypeSuffix(location, right) ); - return { UnionTypeSuffix }; + return { parseUnionTypeSuffix }; } diff --git a/src/syntax/types/index.ts b/src/syntax/types/index.ts index 96abe52..f9d7935 100644 --- a/src/syntax/types/index.ts +++ b/src/syntax/types/index.ts @@ -1,10 +1,10 @@ -export { ArrayType } from './ArrayType'; -export { IdentifierType } from './IdentifierType'; -export { BuiltInType } from './BuiltInType'; -export { TupleType } from './TupleType'; -export { ParenthesizedType } from './ParenthesizedType'; -export { FunctionType } from './FunctionType'; -export { NamespaceAccessType } from './NamespaceAccessType'; -export { SpecificType } from './SpecificType'; -export { StructType } from './StructType'; -export { UnionType } from './UnionType'; +export { ArrayType, ArrayTypeVisitor } from './ArrayType'; +export { IdentifierType, IdentifierTypeVisitor } from './IdentifierType'; +export { BuiltInType, BuiltInTypeVisitor } from './BuiltInType'; +export { TupleType, TupleTypeVisitor } from './TupleType'; +export { ParenthesizedType, ParenthesizedTypeVisitor } from './ParenthesizedType'; +export { FunctionType, FunctionTypeVisitor } from './FunctionType'; +export { NamespaceAccessType, NamespaceAccessTypeVisitor } from './NamespaceAccessType'; +export { SpecificType, SpecificTypeVisitor } from './SpecificType'; +export { StructType, StructTypeVisitor } from './StructType'; +export { UnionType, UnionTypeVisitor } from './UnionType'; diff --git a/src/syntax/types/parsing.ts b/src/syntax/types/parsing.ts new file mode 100644 index 0000000..c7a96a4 --- /dev/null +++ b/src/syntax/types/parsing.ts @@ -0,0 +1,10 @@ +export { parseArrayTypeSuffix, ArrayTypeSuffix } from './ArrayType'; +export { parseBuiltInType } from './BuiltInType'; +export { register as registerFunctionType } from './FunctionType'; +export { parseIdentifierType } from './IdentifierType'; +export { parseNamespaceAccessTypeSuffix, NamespaceAccessTypeSuffix } from './NamespaceAccessType'; +export { register as registerParenthesizedType } from './ParenthesizedType'; +export { register as registerSpecificType, SpecificTypeSuffix } from './SpecificType'; +export { register as registerStructType } from './StructType'; +export { register as registerTupleType } from './TupleType'; +export { register as registerUnionType, UnionTypeSuffix } from './UnionType'; \ No newline at end of file diff --git a/src/syntax/types/visitors.ts b/src/syntax/types/visitors.ts new file mode 100644 index 0000000..7f43d01 --- /dev/null +++ b/src/syntax/types/visitors.ts @@ -0,0 +1,10 @@ +export { ArrayTypeVisitor } from './ArrayType'; +export { BuiltInTypeVisitor } from './BuiltInType'; +export { FunctionTypeVisitor } from './FunctionType'; +export { IdentifierTypeVisitor } from './IdentifierType'; +export { NamespaceAccessTypeVisitor } from './NamespaceAccessType'; +export { ParenthesizedTypeVisitor } from './ParenthesizedType'; +export { SpecificTypeVisitor } from './SpecificType'; +export { StructTypeVisitor } from './StructType'; +export { TupleTypeVisitor } from './TupleType'; +export { UnionTypeVisitor } from './UnionType'; \ No newline at end of file diff --git a/src/syntax/visitor.ts b/src/syntax/visitor.ts index 3989f56..78ccbc0 100644 --- a/src/syntax/visitor.ts +++ b/src/syntax/visitor.ts @@ -1,12 +1,15 @@ -import { Declaration, TypeNode, Expression, Statement, Node } from '~/syntax/environment'; +import * as decl from './declarations/visitors'; +import * as expr from './expressions/visitors'; +import * as stmt from './statements/visitors'; +import * as typs from './types/visitors'; /** * Describes a visitor for a specific set of node types. * This visitor type is designed to work in a pure functional manner, - * so each visitor accepts a node of the given type, and a value - * of the return type, and should return a processed version of that - * value. For example, a type checking visitor might be: + * so each visitor accepts a node of the given type and a value, + * and should return some other value (of the same type as the parameter by default). + * For example, a type checking visitor might be: * * type TypeCheckVisitor = NodeVisitor; * @@ -25,17 +28,67 @@ import { Declaration, TypeNode, Expression, Statement, Node } from '~/syntax/env * the other types exported by this module are predefined for the * known sets of node types. */ -export type Visitor = { - [P in N['syntaxType']]: (node: N, thing: T) => T; -}; - -/** A visitor of declaration nodes */ -export type DeclarationVisitor = Visitor; -/** A visitor of type nodes */ -export type TypeNodeVisitor = Visitor; -/** A visitor of expression nodes */ -export type ExpressionVisitor = Visitor; -/** A visitor of statement nodes */ -export type StatementVisitor = Visitor; -/** A visitor of all node types */ -export type NodeVisitor = Visitor; + +export type SyntaxVisitor + = DeclarationVisitor + & ExpressionVisitor + & StatementVisitor + & TypeVisitor; + +export type DeclarationVisitor + = decl.ConstantDeclarationVisitor + & decl.AnonymousConstantDeclarationVisitor + & decl.ExportDeclarationVisitor + & decl.ExportForwardDeclarationVisitor + & decl.FunctionDeclarationVisitor + & decl.AnonymousFunctionDeclarationVisitor + & decl.ImportDeclarationVisitor + & decl.NamespaceDeclarationVisitor + & decl.AnonymousNamespaceDeclarationVisitor + & decl.TypeDeclarationVisitor + & decl.AnonymousTypeDeclarationVisitor; + +export type ExpressionVisitor + = expr.ArrayAccessVisitor + & expr.ArrayLiteralVisitor + & expr.BinaryExpressionVisitor + & expr.BoolLiteralVisitor + & expr.CharLiteralVisitor + & expr.FieldAccessVisitor + & expr.FloatLiteralVisitor + & expr.FunctionApplicationVisitor + & expr.IdentifierExpressionVisitor + & expr.IfElseExpressionVisitor + & expr.IntegerLiteralVisitor + & expr.LambdaExpressionVisitor + & expr.ParenthesizedExpressionVisitor + & expr.StringLiteralVisitor + & expr.StructLiteralVisitor + & expr.TupleLiteralVisitor + & expr.UnaryExpressionVisitor + & expr.VarDeclarationVisitor; + +export type StatementVisitor + = stmt.BlockVisitor + & stmt.BreakStatementVisitor + & stmt.ContinueStatementVisitor + & stmt.DoWhileStatementVisitor + & stmt.ExpressionStatementVisitor + & stmt.ForStatementVisitor + & stmt.ReturnStatementVisitor + & stmt.ThrowStatementVisitor + & stmt.TryCatchStatementVisitor + & stmt.WhileStatementVisitor; + +export type TypeVisitor + = typs.ArrayTypeVisitor + & typs.BuiltInTypeVisitor + & typs.FunctionTypeVisitor + & typs.IdentifierTypeVisitor + & typs.NamespaceAccessTypeVisitor + & typs.ParenthesizedTypeVisitor + & typs.SpecificTypeVisitor + & typs.StructTypeVisitor + & typs.TupleTypeVisitor + & typs.UnionTypeVisitor; + From 50e70fc8bfc4d9d77361447d2820c28cf38ab1f9 Mon Sep 17 00:00:00 2001 From: Jake Chitel Date: Tue, 15 May 2018 11:06:47 -0500 Subject: [PATCH 11/15] added first pass of type checker --- src/extensions.ts | 16 + src/parser/lexer/char-stream.ts | 10 +- src/semantic/checker.ts | 136 ------- src/semantic/index.ts | 71 +++- src/semantic/namespace.ts | 151 ++++++++ .../node-visitors/declaration-name-visitor.ts | 0 src/semantic/node-visitors/module-visitor.ts | 82 ----- src/semantic/passes/enumeration.ts | 337 ++++++++++++++++++ src/semantic/passes/enumeration/index.ts | 14 - src/semantic/passes/resolution/index.ts | 14 +- src/semantic/program.ts | 129 +------ src/syntax/visitor.ts | 116 +++--- src/utils/lazy-list.ts | 11 + src/utils/utils.ts | 9 - 14 files changed, 657 insertions(+), 439 deletions(-) delete mode 100644 src/semantic/checker.ts create mode 100644 src/semantic/namespace.ts delete mode 100644 src/semantic/node-visitors/declaration-name-visitor.ts delete mode 100644 src/semantic/node-visitors/module-visitor.ts create mode 100644 src/semantic/passes/enumeration.ts delete mode 100644 src/semantic/passes/enumeration/index.ts diff --git a/src/extensions.ts b/src/extensions.ts index dca7790..c7d1744 100644 --- a/src/extensions.ts +++ b/src/extensions.ts @@ -43,3 +43,19 @@ interface String { String.prototype.last = function last(count = 1) { return this.slice(this.length - count, this.length); } + +interface Map { + /** Set the value at a specified key immutably, returning a new map object without modifying the original */ + iset(key: K, value: V): Map; +} + +interface ReadonlyMap { + /** Set the value at a specified key immutably, returning a new map object without modifying the original */ + iset(key: K, value: V): ReadonlyMap; +} + +Map.prototype.iset = function iset(this: Map, key: K, value: V): Map { + const clone = new Map(this); + clone.set(key, value); + return clone; +} diff --git a/src/parser/lexer/char-stream.ts b/src/parser/lexer/char-stream.ts index b8a4910..6bf5e35 100644 --- a/src/parser/lexer/char-stream.ts +++ b/src/parser/lexer/char-stream.ts @@ -1,4 +1,4 @@ -import { openSync as open, readSync as read } from 'fs'; +import { openSync as open, readSync as read, closeSync as close } from 'fs'; import { StringDecoder } from 'string_decoder'; import { LazyList, NonEmptyLazyList, fromIterable, infList } from '~/utils/lazy-list'; import { FilePosition, CoreObject } from '~/core'; @@ -78,7 +78,13 @@ function createByteStream(path: string): LazyList { const fd = open(path, 'r'); return infList() .map(i => readByte(fd, i)) - .takeWhile(b => b.length > 0); + .takeWhile(b => { + // if there was a byte read, return it + if (b.length > 0) return true; + // otherwise, close the fd so we don't leave it dangling, and stop iteration + close(fd); + return false; + }); } /** diff --git a/src/semantic/checker.ts b/src/semantic/checker.ts deleted file mode 100644 index 3bde40a..0000000 --- a/src/semantic/checker.ts +++ /dev/null @@ -1,136 +0,0 @@ -import { Diagnostic, CoreObject } from '~/core'; -import { Program, Module, Declaration, Namespace } from './program'; -import { parseModule } from '~/parser'; -import { ModuleVisitor } from './node-visitors/module-visitor'; -import { TypeCheckErrorContext } from './error-context'; -import { mapSet } from '~/utils/utils'; -import { ModuleRoot, Declaration as SyntaxDeclaration } from '~/syntax'; - - -export class TypeChecker extends CoreObject { - readonly diagnostics: ReadonlyArray = []; - readonly syntaxModules: ReadonlyMap = new Map(); - readonly modules: ReadonlyMap = new Map(); - readonly syntaxDeclarations: ReadonlyArray = []; - readonly declarations: ReadonlyArray = []; - readonly namespaces: ReadonlyArray = []; - readonly dependencies: ReadonlyArray = []; - readonly errorContext: TypeCheckErrorContext = TypeCheckErrorContext; - - /** - * Top-level interface for semantic analysis. - * - * Pass 1 - Namespace Enumeration: - * - Starting with the first module, enumerate all declarations, including imports, exports, and forwards - * - Recursively enumerate all referenced modules and all namespaces - * - Inputs: - * - the main module path - * - Outputs: - * - module registry (all modules by path) - * - declaration registry (all declarations by id) - * - namespace registry (all namespaces by id) - * - dependency queue (a built queue of dependencies that need to be resolved) - * - any errors from the process - * - NOTE: this does NOT involve actually processing the internals of declarations, only names and references - * Pass 2 - Dependency Resolution: - * - One output of the first pass was a dependency queue. Now that enumeration is done, we must process those dependencies - * - This involves resolving all imports and exports to corresponding declarations, creting a reference chain - * - Inputs: - * - module registry - * - declaration registry - * - namespace registry - * - dependency queue - * - Outputs: - * - module registry (unchanged) - * - declaration registry (unchanged) - * - namespace registry, now with names and exports resolved - * - any errors from the process - * Pass 3 - Type Checking: - * - Now that we have all declarations enumerated, and all declaration references resolved, we resolve the types of everything - * - This involves setting the type of everything that is typeable - * - As well as making sure that assignability is correct - * - Inputs: - * - declaration registry - * - namespace registry - * - Outputs: - * - declaration registry, now with everything typed - * - namespace registry (unchanged) - * - any errors from the process - * Pass 4 - Name Clash Checking: - * - Once we have resolved the type of everything, we can make sure that everything that has the same name is able to do so - * - Some declarations can be merged, others cannot - * - Several things can be overloaded, but those overloads must be valid - * - Inputs: - * - namespace registry - * - declaration registry - * - Outputs: - * - namespace registry, now with name clashes processed (may create overloads, merges, etc.) - * - declaration registry (possibly unchanged, overloads and merges may need to change things) - * - any errors from the process - * - * Once we are done with all passes, we output a Program instance that contains all errors and all modules (which contain all namespaces, which contain all declarations). - */ - check(path: string): Program { - // we can't do anything until we have a parsed module, so do that first - let checker = this.parseModule(path); - // if there is no module, there was a parse error, and we should return right away - if (!checker.syntaxModules.size) return checker.createProgram(); - // 1st pass: resolve all modules, namespaces, and declarations - const module = checker.syntaxModules.get(path)!; - checker = ModuleVisitor[module.syntaxType](module, this); - // 2nd pass: resolve all dependencies - checker = checker.dependencies.reduce((tc, d) => processDependency(d, tc), checker); - // 3rd pass: resolve all types - checker = checker.syntaxDeclarations.reduce((tc, d) => DeclarationTypeVisitor[d.syntaxType](d, tc), checker); - // 4th pass: handle name clashes (overloads are valid for all declarations) - checker = checker.modules.reduce((tc, m) => NameClashVisitor[m.syntaxType](m, tc), checker); - // everything has been type checked, return the program - return checker.createProgram(); - } - - /** - * Add an error to this type checker, using the specified - * error generator function. This function will be passed - * a context object that contains several built-in message - * generator functions. - */ - error(fn: (ctx: TypeCheckErrorContext) => Diagnostic): TypeChecker { - return this.addDiagnostic(fn(this.errorContext)); - } - - /** - * Given an absolute path to a module file, parse the module - * and add it to the type checker's internal module registry. - */ - parseModule(path: string): TypeChecker { - const { module, diagnostics } = parseModule(path); - return this.clone({ - syntaxModules: module ? mapSet(this.syntaxModules, path, module) : this.syntaxModules, - diagnostics: [...this.diagnostics, ...diagnostics] - }); - } - - /** - * Add a diagnostic to the type checker. - */ - addDiagnostic(diagnostic: Diagnostic): TypeChecker { - return this.addDiagnostics([diagnostic]); - } - - /** - * Add a list of diagnostics to the type checker. - */ - addDiagnostics(diagnostics: ReadonlyArray): TypeChecker { - return this.clone({ - diagnostics: [...this.diagnostics, ...diagnostics] - }); - } - - private createProgram(): Program { - return new Program().clone({ - modules: this.modules, - declarations: this.declarations, - diagnostics: this.diagnostics - }); - } -} diff --git a/src/semantic/index.ts b/src/semantic/index.ts index 20052f4..7311457 100644 --- a/src/semantic/index.ts +++ b/src/semantic/index.ts @@ -1,7 +1,70 @@ -import { TypeChecker } from './checker'; +import { Program } from './program'; +import enumerateNamespaces from './passes/enumeration'; -export default function typecheck(path: string) { - const checker = new TypeChecker(); - return checker.check(path); +/** + * Top-level interface for semantic analysis. + * + * Pass 1 - Namespace Enumeration: + * - Starting with the first module, enumerate all declarations, including imports, exports, and forwards + * - Recursively enumerate all referenced modules and all namespaces + * - Inputs: + * - the main module path + * - Outputs: + * - module registry (all modules by path) + * - declaration registry (all declarations by id) + * - namespace registry (all namespaces by id) + * - dependency queue (a built queue of dependencies that need to be resolved) + * - any errors from the process + * - NOTE: this does NOT involve actually processing the internals of declarations, only names and references + * Pass 2 - Dependency Resolution: + * - One output of the first pass was a dependency queue. Now that enumeration is done, we must process those dependencies + * - This involves resolving all imports and exports to corresponding declarations, creting a reference chain + * - Inputs: + * - module registry + * - declaration registry + * - namespace registry + * - dependency queue + * - Outputs: + * - module registry (unchanged) + * - declaration registry (unchanged) + * - namespace registry, now with names and exports resolved + * - any errors from the process + * Pass 3 - Type Checking: + * - Now that we have all declarations enumerated, and all declaration references resolved, we resolve the types of everything + * - This involves setting the type of everything that is typeable + * - As well as making sure that assignability is correct + * - Inputs: + * - declaration registry + * - namespace registry + * - Outputs: + * - declaration registry, now with everything typed + * - namespace registry (unchanged) + * - any errors from the process + * Pass 4 - Name Clash Checking: + * - Once we have resolved the type of everything, we can make sure that everything that has the same name is able to do so + * - Some declarations can be merged, others cannot + * - Several things can be overloaded, but those overloads must be valid + * - Inputs: + * - namespace registry + * - declaration registry + * - Outputs: + * - namespace registry, now with name clashes processed (may create overloads, merges, etc.) + * - declaration registry (possibly unchanged, overloads and merges may need to change things) + * - any errors from the process + * + * Once we are done with all passes, we output a Program instance that contains all errors and all modules (which contain all namespaces, which contain all declarations). + */ +export default function analyze(path: string) { + // Pass 1: Enumeration + const enumeration = enumerateNamespaces(path); + // Pass 2: Resolution + const resolution = resolveDependencies(enumeration.modules, enumeration.declarations, enumeration.namespaces, enumeration.dependencyQueue); + // Pass 3: Typechecking + const typechecked = typecheck(enumeration.declarations, resolution.namespaces); + // Pass 4: Name clashes + const nameClash = checkNameClashes(typechecked.declarations, resolution.namespaces); + // Create program + const diagnostics = [...enumeration.diagnostics, ...resolution.diagnostics, ...typechecked.diagnostics, ...nameClash.diagnostics]; + return new Program(enumeration.modules, resolution.namespaces, nameClash.declarations, diagnostics); } diff --git a/src/semantic/namespace.ts b/src/semantic/namespace.ts new file mode 100644 index 0000000..482fad5 --- /dev/null +++ b/src/semantic/namespace.ts @@ -0,0 +1,151 @@ +import { CoreObject } from '~/core'; +import { FunctionDeclaration, TypeDeclaration, ConstantDeclaration, AnonymousFunctionDeclaration, AnonymousTypeDeclaration, AnonymousConstantDeclaration, NamespaceDeclaration, AnonymousNamespaceDeclaration } from '~/syntax'; + +/** + * An abstract object representing a "namespace". + * Serves as a parent class for modules and declared namespaces, + * both of which are semantically "namespaces". + */ +export abstract class Namespace extends CoreObject { + readonly localNames: ReadonlyMap> = new Map(); + readonly exports: ReadonlyMap> = new Map(); + + constructor( + readonly namespaceId: number + ) { super(); } + + addImport(targetModule: string, localName: string, exportName: string): Namespace { + let target: NameTarget; + if (exportName === '*') { + target = { modulePath: targetModule } as RemoteNamespace; + } else { + target = { modulePath: targetModule, exportName } as RemoteName; + } + const array = [...(this.localNames.get(localName) || []), target]; + return this.clone({ localNames: this.localNames.iset(localName, array) }); + } + + addForward(targetModule: string, forwardName: string, exportName: string): Namespace { + let target: NameTarget; + if (exportName === '*') { + // if it's a pure forward, we can't resolve anything right now + if (forwardName === '*') return this; + target = { modulePath: targetModule } as RemoteNamespace; + } else { + target = { modulePath: targetModule, exportName } as RemoteName; + } + const array = [...(this.exports.get(forwardName) || []), target]; + return this.clone({ exports: this.exports.iset(forwardName, array) }); + } +} + +/** + * A declared namespace within another namespace. + * It has a name, a parent namespace id, and a declaration id (because it is a declaration). + */ +export class DeclaredNamespace extends Namespace { + constructor( + namespaceId: number, + readonly parentNamespaceId: number, + readonly declarationId: number, + readonly namespaceDeclaration: NamespaceDeclaration | AnonymousNamespaceDeclaration + ) { super(namespaceId); } +} + +/** + * A semantic container for a module in a program. + * A module is a type of namespace, and can contain local names, exports, and declarations. + * Where it differs from a generic namespace is that it has no parent namespace, and is associated with a file path. + */ +export class Module extends Namespace { + constructor( + namespaceId: number, + readonly absolutePath: string + ) { super(namespaceId); } +} + +/** + * For any given name in a program, there are target(s) to which that name resolves. + * A target will always be either: + * - an export name of another module + * - a module's namespace + * - a locally-scoped name + * - a declaration inline with the name (only in the case of exported declarations) + */ +export type NameTarget = RemoteName | RemoteNamespace | LocalName | LocalDeclaration; + +/** + * A remote name is reference to an export name from another module. + */ +export class RemoteName extends CoreObject { + constructor( + readonly modulePath: string, + readonly exportName: string, + readonly resolvedDeclarationId: number + ) { super() } +} + +/** + * A remote namespace is a pointer to a module's top-level namespace + */ +export class RemoteNamespace extends CoreObject { + constructor( + readonly modulePath: string + ) { super() } +} + +/** + * A local name is a reference to a name that is scoped to the current module + */ +export class LocalName extends CoreObject { + constructor( + readonly name: string, + readonly resolvedDeclarationId: number + ) { super() } +} + +/** + * A local declaration is a reference to a declaration that has no name, + * i.e. in the case of an anonymous default export. + */ +export class LocalDeclaration extends CoreObject { + constructor( + readonly resolvedDeclarationId: number + ) { super() } +} + +/** + * A semantic declaration is a node that is ultimately associated with a name + */ +export type DeclaredEntity = DeclaredFunction | DeclaredType | DeclaredConstant | DeclaredNamespace; + +/** + * A semantic function entity, identified by a name. + */ +export class DeclaredFunction extends CoreObject { + constructor( + readonly declarationId: number, + readonly functionDeclaration: FunctionDeclaration | AnonymousFunctionDeclaration + ) { super() } +} + +/** + * A semantic type entity, identified by a name. + * NOTE: this is different from the concept of a "type" in type checking TODO then what is? + */ +export class DeclaredType extends CoreObject { + constructor( + readonly declarationId: number, + readonly typeDeclaration: TypeDeclaration | AnonymousTypeDeclaration + ) { super() } +} + +/** + * A semantic constant entity, identified by a name. + */ +export class DeclaredConstant extends CoreObject { + constructor( + readonly declarationId: number, + readonly constantDeclaration: ConstantDeclaration | AnonymousConstantDeclaration + ) { super() } +} diff --git a/src/semantic/node-visitors/declaration-name-visitor.ts b/src/semantic/node-visitors/declaration-name-visitor.ts deleted file mode 100644 index e69de29..0000000 diff --git a/src/semantic/node-visitors/module-visitor.ts b/src/semantic/node-visitors/module-visitor.ts deleted file mode 100644 index 5eb405d..0000000 --- a/src/semantic/node-visitors/module-visitor.ts +++ /dev/null @@ -1,82 +0,0 @@ -import { Visitor } from '~/syntax/visitor'; -import { ModuleRoot, ImportDeclaration, ExportDeclaration, ExportForwardDeclaration, TypeDeclaration, FunctionDeclaration, ConstantDeclaration, NamespaceDeclaration } from '~/syntax'; -import { TypeChecker } from '~/semantic/checker'; -import { SyntaxType, Declaration } from '~/syntax/environment'; -import resolveModule from '~/semantic/resolver'; - - -type ModuleNode = ModuleRoot | ImportDeclaration | ExportDeclaration | ExportForwardDeclaration | Declaration; - -const isDeclaration = (node: ExportDeclaration | ExportForwardDeclaration | Declaration): node is Declaration => - ![SyntaxType.ExportDeclaration, SyntaxType.ExportForwardDeclaration].includes(node.syntaxType); - -/** - * This visitor is responsible for enumerating all modules and declarations in the program - * to prepare for import and export resolution in the next pass. - * No dependencies or name linkages are resolved here. - */ -export const ModuleVisitor: Visitor = { - [SyntaxType.ModuleRoot]: (node: ModuleRoot, checker: TypeChecker) => { - // process imports first to enumerate all modules - const withImports = node.imports.reduce((c, i) => ModuleVisitor[i.syntaxType](i, c), checker); - // process module-scoped declarations - const withDeclarations = node.declarations - .filter(isDeclaration) - .reduce((c, d) => ModuleVisitor[d.syntaxType](d, c), withImports); - // process exports last so all overloads are available - return node.declarations - .filter(d => !isDeclaration(d)) - .reduce((c, d) => ModuleVisitor[d.syntaxType](d, c), withDeclarations); - }, - /** - * An import declaration exposes an export of another module as a local name in the module - * containing the declaration. To process it, we need to resolve the imported module path, - * make sure that the requested export name exists, make sure that the requested alias name - * does not clash with any already declared names, and then add the name to the module, - * linking it to the exported declaration in the other module. - */ - [SyntaxType.ImportDeclaration]: (node: ImportDeclaration, checker: TypeChecker) => { - const currentModule = node.location.path; - // resolve the module - const importedModule = resolveModule(currentModule, node.moduleName.value); - // invalid module path specified - if (!importedModule) return checker.error(_ => _.noModule(node.moduleName)); - // make sure the module has been loaded - this.loadModule(importedModule); - // process the imports - let tc = checker; - for (const { importName, aliasName } of node.imports) { - // if wildcard, process it as a namespace, not an import - if (importName.image === '*') { - const namespace = new ast.NamespaceDeclaration(importedModule, aliasName, node.location); - namespace.visit(this); - continue; - } - // regular import, verify that the module exports the name - if (!this.getExport(importedModule, importName.image)) { - tc = tc.error(_ => _.noModuleExport(node.moduleName.value, importName)); - continue; - } - // register the alias name to the module using the imported export - this.link(currentModule, aliasName, importedModule, importName.image); - } - }, - [SyntaxType.ExportDeclaration]: (node: ExportDeclaration, checker: TypeChecker) => { - // - }, - [SyntaxType.ExportForwardDeclaration]: (node: ExportForwardDeclaration, checker: TypeChecker) => { - // - }, - [SyntaxType.TypeDeclaration]: (node: TypeDeclaration, checker: TypeChecker) => { - // - }, - [SyntaxType.FunctionDeclaration]: (node: FunctionDeclaration, checker: TypeChecker) => { - // - }, - [SyntaxType.ConstantDeclaration]: (node: ConstantDeclaration, checker: TypeChecker) => { - // - }, - [SyntaxType.NamespaceDeclaration]: (node: NamespaceDeclaration, checker: TypeChecker) => { - // - } -}; diff --git a/src/semantic/passes/enumeration.ts b/src/semantic/passes/enumeration.ts new file mode 100644 index 0000000..e41a69e --- /dev/null +++ b/src/semantic/passes/enumeration.ts @@ -0,0 +1,337 @@ +import { Dependency, ImportedNamespace, ImportedName, PureForward, ForwardedNamespace, ForwardedName, ExportedName, ExportedDeclaration } from '~/semantic/passes/resolution'; +import { parseModule } from '~/parser'; +import { Diagnostic, FilePosition, CoreObject } from '~/core'; +import { ModuleRoot, ImportDeclaration, ExportDeclaration, ExportForwardDeclaration, Declaration as SyntaxDeclaration, TypeDeclaration, FunctionDeclaration, ConstantDeclaration, NamespaceDeclaration, AnonymousTypeDeclaration, AnonymousFunctionDeclaration, AnonymousConstantDeclaration, AnonymousNamespaceDeclaration } from '~/syntax'; +import { LazyList, single } from '~/utils/lazy-list'; +import { SyntaxType, AnonymousDeclaration } from '~/syntax/environment'; +import resolveModule from '~/semantic/resolver'; +import { resolve } from 'path'; +import { Namespace, DeclaredEntity, Module, DeclaredType, DeclaredFunction, DeclaredConstant, DeclaredNamespace } from '~/semantic/namespace'; + + +export interface NamespaceEnumerationOutput { + readonly modules: ReadonlyMap; + readonly namespaces: ReadonlyArray; + readonly declarations: ReadonlyArray; + readonly dependencyQueue: ReadonlyArray; + readonly diagnostics: ReadonlyArray; +} + +export interface EnumeratedModule { + readonly module: Optional; + readonly status: ModuleEnumerationStatus; +} + +export enum ModuleEnumerationStatus { + /** The initial state of any module that is referenced, including the entry. Nothing has been done with it yet. */ + REFERENCED, + /** The module was found and parsed */ + SUCCESS, + /** The module was found, but failed to parse */ + UNPARSED, + /** The module was not found */ + NOT_FOUND +} + +export default function enumerateNamespaces(mainModulePath: string): NamespaceEnumerationOutput { + return new EnumerationProcess(mainModulePath).run(); +} + +type AnyDeclaration = ImportDeclaration | ExportDeclaration | ExportForwardDeclaration | SyntaxDeclaration | AnonymousDeclaration; + +class EnumerationProcess extends CoreObject { + readonly moduleQueue: LazyList; + readonly modules: ReadonlyMap; + readonly namespaces: ReadonlyArray = []; + readonly declarations: ReadonlyArray = []; + readonly dependencyQueue: ReadonlyArray = []; + readonly diagnostics: ReadonlyArray = []; + + constructor(readonly mainModulePath: string) { + super(); + this.moduleQueue = single(mainModulePath); + this.modules = new Map().iset(mainModulePath, { module: null, status: ModuleEnumerationStatus.REFERENCED }); + } + + run() { + const final = this.consumeModuleQueue(); + return final.output(); + } + + consumeModuleQueue(): EnumerationProcess { + if (this.moduleQueue.empty) return this; + const { head: modulePath, tail: moduleQueue } = this.moduleQueue; + let next: EnumerationProcess = this.clone({ moduleQueue }); + // parse the module + let moduleSyntax: Optional, parseDiagnostics: ReadonlyArray; + try { + ({ module: moduleSyntax, diagnostics: parseDiagnostics } = parseModule(modulePath)); + } catch (err) { + // "file not found" errors will be processed in the next pass so we get the import location, so just save as not found + if (err.code === 'ENOENT') { + next = next.setFailedModule(modulePath, ModuleEnumerationStatus.NOT_FOUND); + // however, if this was the main module, we do need a diagnostic, and we need to stop right here + if (modulePath === this.mainModulePath) return next.withEntryError(`Entry point "${this.mainModulePath}" not found.`); + return next.consumeModuleQueue(); + } + throw err; + } + // the module couldn't be parsed, save it as unparsed and let the next pass set the error + if (!moduleSyntax) { + next = next.setFailedModule(modulePath, ModuleEnumerationStatus.UNPARSED); + // if it was the main module, we stop right here + if (modulePath === this.mainModulePath) return next.withEntryError(`Entry point "${this.mainModulePath}" failed to parse.`); + return next.consumeModuleQueue(); + } + // add any parse diagnostics + next = next.addDiagnostics(parseDiagnostics); + // add the module to the module and namespace registries + next = next.setSuccessfulModule(modulePath); + const namespaceId = next.modules.get(modulePath)!.module!.namespaceId; + // module parsed successfully, time to enumerate its contents + for (const declaration of [...moduleSyntax.imports, ...moduleSyntax.declarations]) { + next = next.handleDeclaration(declaration, namespaceId, modulePath); + } + // continue + return next.consumeModuleQueue(); + } + + setFailedModule(path: string, status: ModuleEnumerationStatus): EnumerationProcess { + return this.clone({ modules: this.modules.iset(path, { module: null, status }) }); + } + + setSuccessfulModule(path: string): EnumerationProcess { + const namespaceId = this.namespaces.length; + const module = new Module(namespaceId, path); + return this.clone({ + modules: this.modules.iset(path, { module, status: ModuleEnumerationStatus.SUCCESS }), + namespaces: [...this.namespaces, module] + }); + } + + addReferencedModule(path: string): EnumerationProcess { + if (this.modules.has(path)) return this; + return this.clone({ + modules: this.modules.iset(path, { module: null, status: ModuleEnumerationStatus.REFERENCED }), + moduleQueue: this.moduleQueue.append(path) + }); + } + + addImport(namespaceId: number, targetModule: string, localName: string, exportName: string): EnumerationProcess { + let dep: Dependency; + if (exportName === '*') { + dep = new ImportedNamespace(namespaceId, localName, targetModule); + } else { + dep = new ImportedName(namespaceId, localName, targetModule, exportName); + } + return this.clone({ + dependencyQueue: [...this.dependencyQueue, dep] + }); + } + + addForward(namespaceId: number, targetModule: string, forwardName: string, exportName: string): EnumerationProcess { + let dep: Dependency; + if (exportName === '*') { + if (forwardName === '*') { + dep = new PureForward(namespaceId, targetModule); + } else { + dep = new ForwardedNamespace(namespaceId, forwardName, targetModule); + } + } else { + dep = new ForwardedName(namespaceId, forwardName, targetModule, exportName); + } + return this.clone({ + dependencyQueue: [...this.dependencyQueue, dep] + }); + } + + addExportedName(namespaceId: number, exportName: string, localName: string): EnumerationProcess { + const dep = new ExportedName(namespaceId, localName, exportName); + return this.clone({ + dependencyQueue: [...this.dependencyQueue, dep] + }); + } + + addExportedDeclaration(namespaceId: number, exportName: string, declarationId: number): EnumerationProcess { + const dep = new ExportedDeclaration(namespaceId, declarationId, exportName); + return this.clone({ + dependencyQueue: [...this.dependencyQueue, dep] + }); + } + + addDiagnostics(diagnostics: ReadonlyArray): EnumerationProcess { + return this.clone({ diagnostics: [...this.diagnostics, ...diagnostics] }); + } + + withEntryError(error: string): EnumerationProcess { + return this.clone({ + diagnostics: [new Diagnostic(error, new FilePosition('', [0, 0]))] + }); + } + + handleDeclaration(node: AnyDeclaration, namespaceId: number, modulePath: string, containingExport: Optional = null): EnumerationProcess { + switch (node.syntaxType) { + case SyntaxType.ImportDeclaration: return this.handleImport(node, namespaceId, modulePath); + case SyntaxType.ExportDeclaration: return this.handleExport(node, namespaceId, modulePath); + case SyntaxType.ExportForwardDeclaration: return this.handleForward(node, namespaceId, modulePath); + case SyntaxType.TypeDeclaration: return this.handleType(node, namespaceId, containingExport); + case SyntaxType.AnonymousTypeDeclaration: return this.handleType(node, namespaceId, containingExport); + case SyntaxType.FunctionDeclaration: return this.handleFunction(node, namespaceId, containingExport); + case SyntaxType.AnonymousFunctionDeclaration: return this.handleFunction(node, namespaceId, containingExport); + case SyntaxType.ConstantDeclaration: return this.handleConstant(node, namespaceId, containingExport); + case SyntaxType.AnonymousConstantDeclaration: return this.handleConstant(node, namespaceId, containingExport); + case SyntaxType.NamespaceDeclaration: return this.handleNamespace(node, namespaceId, modulePath, containingExport); + case SyntaxType.AnonymousNamespaceDeclaration: return this.handleNamespace(node, namespaceId, modulePath, containingExport); + } + } + + /** + * Imports are processed by adding, for each name in the import, + * a name to the parent namespace with the corresponding target type + * and an entry to the dependency queue. + * Additionally, the target module path should be resolved and added to the + * module queue and registry, only if it does not already exist in the registry. + */ + handleImport(node: ImportDeclaration, namespaceId: number, modulePath: string) { + let next = this as EnumerationProcess; + // handle the module name + let targetModule = resolveModule(modulePath, node.moduleName.value); + if (!targetModule) { + // resolve the would-be module path instead + targetModule = resolve(modulePath, node.moduleName.value); + next = next.setFailedModule(targetModule, ModuleEnumerationStatus.NOT_FOUND); + } else { + // add the referenced module (will be ignored if it was already referenced) + next = next.addReferencedModule(targetModule); + } + // add import names + for (const imp of node.imports) { + next = next.addImport(namespaceId, targetModule, imp.aliasName.image, imp.importName.image); + } + return next; + } + + /** + * Exports are processed by adding, for each name in the export, + * an export to the parent namespace with the corresponding target type + * and an entry to the dependency queue. + * If the export is an exported declaration, the declaration must also be processed. + */ + handleExport(node: ExportDeclaration, namespaceId: number, modulePath: string) { + let next = this as EnumerationProcess; + // add export names + for (const exp of node.exports) { + if (exp.value) { + // pass the baton to the declaration handler, which will add the export for us + next = next.handleDeclaration(exp.value, namespaceId, modulePath, exp.exportName.value); + } else { + // this is an exported name + next = next.addExportedName(namespaceId, exp.exportName.image, exp.valueName!.image); + } + } + return next; + } + + /** + * Forwards are processed by adding, for each name in the forward, + * an export to the parent namespace with the corresponding target type + * and an entry to the dependency queue. + * Additionally, the target module path should be resolved and added to the + * module queue and registry, only if it does not already exist in the registry. + */ + handleForward(node: ExportForwardDeclaration, namespaceId: number, modulePath: string) { + let next = this as EnumerationProcess; + // handle the module name + let targetModule = resolveModule(modulePath, node.moduleName.value); + if (!targetModule) { + // resolve the would-be module path instead + targetModule = resolve(modulePath, node.moduleName.value); + next = next.setFailedModule(targetModule, ModuleEnumerationStatus.NOT_FOUND); + } else { + // add the referenced module (will be ignored if it was already referenced) + next = next.addReferencedModule(targetModule); + } + // add forward names + for (const fwd of node.forwards) { + next = next.addForward(namespaceId, targetModule, fwd.exportName.image, fwd.importName.image); + } + return next; + } + + /** + * Things that need to happen: + * - Create a DeclaredType + * - Register the DeclaredType to the process's declaration registry + * - If there is a parent export, register the corresponding dependency + */ + handleType(node: TypeDeclaration | AnonymousTypeDeclaration, namespaceId: number, containingExport: Optional) { + const declarationId = this.declarations.length; + const declaredType = new DeclaredType(declarationId, node); + const next: EnumerationProcess = this.clone({ declarations: [...this.declarations, declaredType] }); + if (containingExport) + return next.addExportedDeclaration(namespaceId, containingExport, declarationId); + return next; + } + + /** + * Things that need to happen: + * - Create a DeclaredFunction + * - Register the DeclaredFunction to the process's declaration registry + * - If there is a parent export, register the corresponding dependency + */ + handleFunction(node: FunctionDeclaration | AnonymousFunctionDeclaration, namespaceId: number, containingExport: Optional) { + const declarationId = this.declarations.length; + const declaredFunction = new DeclaredFunction(declarationId, node); + const next: EnumerationProcess = this.clone({ declarations: [...this.declarations, declaredFunction] }); + if (containingExport) + return next.addExportedDeclaration(namespaceId, containingExport, declarationId); + return next; + } + + /** + * Things that need to happen: + * - Create a DeclaredConstant + * - Register the DeclaredConstant to the process's declaration registry + * - If there is a parent export, register the corresponding dependency + */ + handleConstant(node: ConstantDeclaration | AnonymousConstantDeclaration, namespaceId: number, containingExport: Optional) { + const declarationId = this.declarations.length; + const declaredConstant = new DeclaredConstant(declarationId, node); + const next: EnumerationProcess = this.clone({ declarations: [...this.declarations, declaredConstant] }); + if (containingExport) + return next.addExportedDeclaration(namespaceId, containingExport, declarationId); + return next; + } + + /** + * Things that need to happen: + * - Create a DeclaredNamespace + * - Register the DeclaredType to the process's declaration registry and namespace registry + * - If there is a parent export, register the corresponding dependency + * - Process all of the namespace's declarations + */ + handleNamespace(node: NamespaceDeclaration | AnonymousNamespaceDeclaration, parentNamespaceId: number, modulePath: string, containingExport: Optional) { + const namespaceId = this.namespaces.length; + const declarationId = this.declarations.length; + const declaredNamespace = new DeclaredNamespace(namespaceId, parentNamespaceId, declarationId, node); + let next: EnumerationProcess = this.clone({ + declarations: [...this.declarations, declaredNamespace], + namespaces: [...this.namespaces, declaredNamespace] + }); + if (containingExport) + next = next.addExportedDeclaration(parentNamespaceId, containingExport, declarationId); + // process all declarations in the namespace + for (const declaration of [...declaredNamespace.namespaceDeclaration.imports, ...declaredNamespace.namespaceDeclaration.declarations]) { + next = next.handleDeclaration(declaration, namespaceId, modulePath); + } + return next; + } + + output = (): NamespaceEnumerationOutput => ({ + modules: this.modules, + namespaces: this.namespaces, + declarations: this.declarations, + dependencyQueue: this.dependencyQueue, + diagnostics: this.diagnostics + }); +} diff --git a/src/semantic/passes/enumeration/index.ts b/src/semantic/passes/enumeration/index.ts deleted file mode 100644 index d9c4b66..0000000 --- a/src/semantic/passes/enumeration/index.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { Module, Namespace, Declaration } from '~/semantic/program'; -import { Dependency } from '~/semantic/passes/resolution'; - - -export interface NamespaceEnumerationOutput { - readonly modules: ReadonlyMap; - readonly namespaces: ReadonlyArray; - readonly declarations: ReadonlyArray; - readonly dependencyQueue: ReadonlyArray; -} - -export default function enumerateNamespaces(mainModulePath: string): NamespaceEnumerationOutput { - // -} \ No newline at end of file diff --git a/src/semantic/passes/resolution/index.ts b/src/semantic/passes/resolution/index.ts index bb3610e..43a8f56 100644 --- a/src/semantic/passes/resolution/index.ts +++ b/src/semantic/passes/resolution/index.ts @@ -5,7 +5,7 @@ export type Dependency = ImportedName | ImportedNamespace | ForwardedName | Forw export class ImportedName extends CoreObject { constructor( - readonly importModule: string, + readonly importNamespace: number, readonly importName: string, readonly exportModule: string, readonly exportName: string @@ -14,7 +14,7 @@ export class ImportedName extends CoreObject { export class ImportedNamespace extends CoreObject { constructor( - readonly importModule: string, + readonly importNamespace: number, readonly importName: string, readonly exportModule: string ) { super() } @@ -22,7 +22,7 @@ export class ImportedNamespace extends CoreObject { export class ForwardedName extends CoreObject { constructor( - readonly forwardModule: string, + readonly forwardNamespace: number, readonly forwardName: string, readonly exportModule: string, readonly exportName: string @@ -31,7 +31,7 @@ export class ForwardedName extends CoreObject { export class ForwardedNamespace extends CoreObject { constructor( - readonly forwardModule: string, + readonly forwardNamespace: number, readonly forwardName: string, readonly exportModule: string ) { super() } @@ -39,14 +39,14 @@ export class ForwardedNamespace extends CoreObject { export class PureForward extends CoreObject { constructor( - readonly forwardModule: string, + readonly forwardNamespace: number, readonly exportModule: string ) { super() } } export class ExportedName extends CoreObject { constructor( - readonly module: string, + readonly namespace: number, readonly localName: string, readonly exportName: string ) { super() } @@ -54,7 +54,7 @@ export class ExportedName extends CoreObject { export class ExportedDeclaration extends CoreObject { constructor( - readonly module: string, + readonly namespace: number, readonly declarationId: number, readonly exportName: string ) { super() } diff --git a/src/semantic/program.ts b/src/semantic/program.ts index 56bbae7..4c40a1a 100644 --- a/src/semantic/program.ts +++ b/src/semantic/program.ts @@ -1,4 +1,5 @@ import { Diagnostic, CoreObject } from '~/core'; +import { Module, DeclaredEntity } from './namespace'; /** @@ -7,132 +8,6 @@ import { Diagnostic, CoreObject } from '~/core'; */ export class Program extends CoreObject { readonly modules: ReadonlyMap = new Map(); - readonly declarations: ReadonlyArray = []; + readonly declarations: ReadonlyArray = []; readonly diagnostics: ReadonlyArray = []; } - -/** - * An abstract object representing a "namespace". - * Serves as a parent class for modules and declared namespaces, - * both of which are semantically "namespaces". - */ -export abstract class Namespace extends CoreObject { - readonly localNames: Map = new Map(); - readonly exports: Map = new Map(); - readonly declarations: Declaration[] = []; - - constructor( - readonly namespaceId: number - ) { super(); } -} - -/** - * A declared namespace within another namespace. - * It has a name, a parent namespace id, and a declaration id (because it is a declaration). - */ -export class DeclaredNamespace extends Namespace { - constructor( - namespaceId: number, - readonly name: string, - readonly parentNamespaceId: number, - readonly declarationId: number - ) { super(namespaceId); } -} - -/** - * A semantic container for a module in a program. - * A module is a type of namespace, and can contain local names, exports, and declarations. - * Where it differs from a generic namespace is that it has no parent namespace, and is associated with a file path. - */ -export class Module extends Namespace { - constructor( - namespaceId: number, - readonly absolutePath: string - ) { super(namespaceId); } -} - -/** - * For any given name in a program, there are target(s) to which that name resolves. - * A target will always be either: - * - an export name of another module - * - a module's namespace - * - a locally-scoped name - * - a declaration inline with the name (only in the case of exported declarations) - */ -export type NameTarget = RemoteName | RemoteNamespace | LocalName | LocalDeclaration; - -/** - * A remote name is reference to an export name from another module. - */ -export class RemoteName extends CoreObject { - constructor( - readonly modulePath: string, - readonly exportName: string, - readonly resolvedDeclarationId: number - ) { super() } -} - -/** - * A remote namespace is a pointer to a module's top-level namespace - */ -export class RemoteNamespace extends CoreObject { - constructor( - readonly modulePath: string - ) { super() } -} - -/** - * A local name is a reference to a name that is scoped to the current module - */ -export class LocalName extends CoreObject { - constructor( - readonly name: string, - readonly resolvedDeclarationId: number - ) { super() } -} - -/** - * A local declaration is a reference to a declaration that has no name, - * i.e. in the case of an anonymous default export. - */ -export class LocalDeclaration extends CoreObject { - constructor( - readonly resolvedDeclarationId: number - ) { super() } -} - -/** - * A semantic declaration is a node that is ultimately associated with a name - */ -export type Declaration = DeclaredFunction | DeclaredType | DeclaredConstant | DeclaredNamespace; - -/** - * A semantic function entity, identified by a name. - */ -export class DeclaredFunction extends CoreObject { - constructor( - readonly name: string, - readonly declarationId: number - ) { super() } -} - -/** - * A semantic type entity, identified by a name. - * NOTE: this is different from the concept of a "type" in type checking TODO then what is? - */ -export class DeclaredType extends CoreObject { - constructor( - readonly name: string, - readonly declarationId: number - ) { super() } -} - -/** - * A semantic constant entity, identified by a name. - */ -export class DeclaredConstant extends CoreObject { - constructor( - readonly name: string, - readonly declarationId: number - ) { super() } -} diff --git a/src/syntax/visitor.ts b/src/syntax/visitor.ts index 78ccbc0..49a340a 100644 --- a/src/syntax/visitor.ts +++ b/src/syntax/visitor.ts @@ -29,66 +29,66 @@ import * as typs from './types/visitors'; * known sets of node types. */ -export type SyntaxVisitor - = DeclarationVisitor - & ExpressionVisitor - & StatementVisitor - & TypeVisitor; +export interface SyntaxVisitor extends + DeclarationVisitor, + ExpressionVisitor, + StatementVisitor, + TypeVisitor {} -export type DeclarationVisitor - = decl.ConstantDeclarationVisitor - & decl.AnonymousConstantDeclarationVisitor - & decl.ExportDeclarationVisitor - & decl.ExportForwardDeclarationVisitor - & decl.FunctionDeclarationVisitor - & decl.AnonymousFunctionDeclarationVisitor - & decl.ImportDeclarationVisitor - & decl.NamespaceDeclarationVisitor - & decl.AnonymousNamespaceDeclarationVisitor - & decl.TypeDeclarationVisitor - & decl.AnonymousTypeDeclarationVisitor; +export interface DeclarationVisitor extends + decl.ConstantDeclarationVisitor, + decl.AnonymousConstantDeclarationVisitor, + decl.ExportDeclarationVisitor, + decl.ExportForwardDeclarationVisitor, + decl.FunctionDeclarationVisitor, + decl.AnonymousFunctionDeclarationVisitor, + decl.ImportDeclarationVisitor, + decl.NamespaceDeclarationVisitor, + decl.AnonymousNamespaceDeclarationVisitor, + decl.TypeDeclarationVisitor, + decl.AnonymousTypeDeclarationVisitor {} -export type ExpressionVisitor - = expr.ArrayAccessVisitor - & expr.ArrayLiteralVisitor - & expr.BinaryExpressionVisitor - & expr.BoolLiteralVisitor - & expr.CharLiteralVisitor - & expr.FieldAccessVisitor - & expr.FloatLiteralVisitor - & expr.FunctionApplicationVisitor - & expr.IdentifierExpressionVisitor - & expr.IfElseExpressionVisitor - & expr.IntegerLiteralVisitor - & expr.LambdaExpressionVisitor - & expr.ParenthesizedExpressionVisitor - & expr.StringLiteralVisitor - & expr.StructLiteralVisitor - & expr.TupleLiteralVisitor - & expr.UnaryExpressionVisitor - & expr.VarDeclarationVisitor; +export interface ExpressionVisitor extends + expr.ArrayAccessVisitor, + expr.ArrayLiteralVisitor, + expr.BinaryExpressionVisitor, + expr.BoolLiteralVisitor, + expr.CharLiteralVisitor, + expr.FieldAccessVisitor, + expr.FloatLiteralVisitor, + expr.FunctionApplicationVisitor, + expr.IdentifierExpressionVisitor, + expr.IfElseExpressionVisitor, + expr.IntegerLiteralVisitor, + expr.LambdaExpressionVisitor, + expr.ParenthesizedExpressionVisitor, + expr.StringLiteralVisitor, + expr.StructLiteralVisitor, + expr.TupleLiteralVisitor, + expr.UnaryExpressionVisitor, + expr.VarDeclarationVisitor {} -export type StatementVisitor - = stmt.BlockVisitor - & stmt.BreakStatementVisitor - & stmt.ContinueStatementVisitor - & stmt.DoWhileStatementVisitor - & stmt.ExpressionStatementVisitor - & stmt.ForStatementVisitor - & stmt.ReturnStatementVisitor - & stmt.ThrowStatementVisitor - & stmt.TryCatchStatementVisitor - & stmt.WhileStatementVisitor; +export interface StatementVisitor extends + stmt.BlockVisitor, + stmt.BreakStatementVisitor, + stmt.ContinueStatementVisitor, + stmt.DoWhileStatementVisitor, + stmt.ExpressionStatementVisitor, + stmt.ForStatementVisitor, + stmt.ReturnStatementVisitor, + stmt.ThrowStatementVisitor, + stmt.TryCatchStatementVisitor, + stmt.WhileStatementVisitor {} -export type TypeVisitor - = typs.ArrayTypeVisitor - & typs.BuiltInTypeVisitor - & typs.FunctionTypeVisitor - & typs.IdentifierTypeVisitor - & typs.NamespaceAccessTypeVisitor - & typs.ParenthesizedTypeVisitor - & typs.SpecificTypeVisitor - & typs.StructTypeVisitor - & typs.TupleTypeVisitor - & typs.UnionTypeVisitor; +export interface TypeVisitor extends + typs.ArrayTypeVisitor, + typs.BuiltInTypeVisitor, + typs.FunctionTypeVisitor, + typs.IdentifierTypeVisitor, + typs.NamespaceAccessTypeVisitor, + typs.ParenthesizedTypeVisitor, + typs.SpecificTypeVisitor, + typs.StructTypeVisitor, + typs.TupleTypeVisitor, + typs.UnionTypeVisitor {} diff --git a/src/utils/lazy-list.ts b/src/utils/lazy-list.ts index f32a960..56acc51 100644 --- a/src/utils/lazy-list.ts +++ b/src/utils/lazy-list.ts @@ -52,6 +52,11 @@ abstract class AbstractLazyList implements Iterable { */ abstract prepend(item: T): LazyList; + /** + * Shortcut concat that appends just a single item instead of a list. + */ + abstract append(item: T): LazyList; + /** * Copies each item of this list to a new one, stopping for the first item * that returns false for the specified predicate @@ -130,6 +135,10 @@ export class NonEmptyLazyList extends AbstractLazyList { return new NonEmptyLazyList(item, () => this); } + public append(item: T): LazyList { + return new NonEmptyLazyList(this.head, () => this.tail.append(item)); + } + public takeWhile(predicate?: (item: T) => boolean): LazyList { if (!predicate) return this.takeWhile(i => !!i); if (predicate(this.head)) return new NonEmptyLazyList(this.head, () => this.tail.takeWhile(predicate)); @@ -164,6 +173,8 @@ export class EmptyLazyList extends AbstractLazyList { } concat(list: LazyList): LazyList { return list; } prepend(item: T): LazyList { return new NonEmptyLazyList(item, () => this); } + /** Interestingly enough, for an empty list, an append and prepend are the same */ + append(item: T): LazyList { return this.prepend(item); } takeWhile(_predicate?: (item: T) => boolean): LazyList { return new EmptyLazyList(); } shift(_count: number): { values: T[], tail: LazyList } { return { values: [], tail: this }; diff --git a/src/utils/utils.ts b/src/utils/utils.ts index 21e23da..51bf644 100644 --- a/src/utils/utils.ts +++ b/src/utils/utils.ts @@ -4,12 +4,3 @@ export function range(length: number) { return [...Array(length)].map((_, i) => i); } - -/** - * Function to immutably set a key in a ReadonlyMap without exposing Map. - */ -export function mapSet(map: ReadonlyMap, key: K, value: V): ReadonlyMap { - const clone = new Map(map); - clone.set(key, value); - return clone; -} From a862bba50aa9b26ed7b7f5840fd68865c4c6ea55 Mon Sep 17 00:00:00 2001 From: Jake Chitel Date: Tue, 15 May 2018 17:56:57 -0500 Subject: [PATCH 12/15] started work on resolution pass --- src/semantic/index.ts | 1 + src/semantic/namespace.ts | 11 +- .../{resolution/index.ts => dependencies.ts} | 2 +- src/semantic/passes/enumeration.ts | 17 ++- src/semantic/passes/resolution.ts | 112 ++++++++++++++++++ src/semantic/program.ts | 5 +- 6 files changed, 134 insertions(+), 14 deletions(-) rename src/semantic/passes/{resolution/index.ts => dependencies.ts} (99%) create mode 100644 src/semantic/passes/resolution.ts diff --git a/src/semantic/index.ts b/src/semantic/index.ts index 7311457..4d680ed 100644 --- a/src/semantic/index.ts +++ b/src/semantic/index.ts @@ -1,5 +1,6 @@ import { Program } from './program'; import enumerateNamespaces from './passes/enumeration'; +import resolveDependencies from './passes/resolution'; /** diff --git a/src/semantic/namespace.ts b/src/semantic/namespace.ts index 482fad5..1703a58 100644 --- a/src/semantic/namespace.ts +++ b/src/semantic/namespace.ts @@ -72,7 +72,7 @@ export class Module extends Namespace { * - a locally-scoped name * - a declaration inline with the name (only in the case of exported declarations) */ -export type NameTarget = RemoteName | RemoteNamespace | LocalName | LocalDeclaration; +export type NameTarget = RemoteName | RemoteNamespace | LocalName | LocalDeclaration | DanglingReference | CircularReference; /** * A remote name is reference to an export name from another module. @@ -90,7 +90,8 @@ export class RemoteName extends CoreObject { */ export class RemoteNamespace extends CoreObject { constructor( - readonly modulePath: string + readonly modulePath: string, + readonly resolvedDeclarationId: number ) { super() } } @@ -114,6 +115,12 @@ export class LocalDeclaration extends CoreObject { ) { super() } } +/** Indicates that the name cannot be resolved because its target does not exist */ +export class DanglingReference extends CoreObject {} + +/** Indicates that the name cannot be resolved because it depends on itself */ +export class CircularReference extends CoreObject {} + /** * A semantic declaration is a node that is ultimately associated with a name */ diff --git a/src/semantic/passes/resolution/index.ts b/src/semantic/passes/dependencies.ts similarity index 99% rename from src/semantic/passes/resolution/index.ts rename to src/semantic/passes/dependencies.ts index 43a8f56..76233a5 100644 --- a/src/semantic/passes/resolution/index.ts +++ b/src/semantic/passes/dependencies.ts @@ -58,4 +58,4 @@ export class ExportedDeclaration extends CoreObject { readonly declarationId: number, readonly exportName: string ) { super() } -} +} \ No newline at end of file diff --git a/src/semantic/passes/enumeration.ts b/src/semantic/passes/enumeration.ts index e41a69e..796f20a 100644 --- a/src/semantic/passes/enumeration.ts +++ b/src/semantic/passes/enumeration.ts @@ -1,4 +1,4 @@ -import { Dependency, ImportedNamespace, ImportedName, PureForward, ForwardedNamespace, ForwardedName, ExportedName, ExportedDeclaration } from '~/semantic/passes/resolution'; +import { Dependency, ImportedNamespace, ImportedName, PureForward, ForwardedNamespace, ForwardedName, ExportedName, ExportedDeclaration } from './dependencies'; import { parseModule } from '~/parser'; import { Diagnostic, FilePosition, CoreObject } from '~/core'; import { ModuleRoot, ImportDeclaration, ExportDeclaration, ExportForwardDeclaration, Declaration as SyntaxDeclaration, TypeDeclaration, FunctionDeclaration, ConstantDeclaration, NamespaceDeclaration, AnonymousTypeDeclaration, AnonymousFunctionDeclaration, AnonymousConstantDeclaration, AnonymousNamespaceDeclaration } from '~/syntax'; @@ -18,7 +18,7 @@ export interface NamespaceEnumerationOutput { } export interface EnumeratedModule { - readonly module: Optional; + readonly namespaceId: Optional; readonly status: ModuleEnumerationStatus; } @@ -50,7 +50,7 @@ class EnumerationProcess extends CoreObject { constructor(readonly mainModulePath: string) { super(); this.moduleQueue = single(mainModulePath); - this.modules = new Map().iset(mainModulePath, { module: null, status: ModuleEnumerationStatus.REFERENCED }); + this.modules = new Map().iset(mainModulePath, { namespaceId: null, status: ModuleEnumerationStatus.REFERENCED }); } run() { @@ -87,7 +87,7 @@ class EnumerationProcess extends CoreObject { next = next.addDiagnostics(parseDiagnostics); // add the module to the module and namespace registries next = next.setSuccessfulModule(modulePath); - const namespaceId = next.modules.get(modulePath)!.module!.namespaceId; + const namespaceId = next.modules.get(modulePath)!.namespaceId!; // module parsed successfully, time to enumerate its contents for (const declaration of [...moduleSyntax.imports, ...moduleSyntax.declarations]) { next = next.handleDeclaration(declaration, namespaceId, modulePath); @@ -97,22 +97,21 @@ class EnumerationProcess extends CoreObject { } setFailedModule(path: string, status: ModuleEnumerationStatus): EnumerationProcess { - return this.clone({ modules: this.modules.iset(path, { module: null, status }) }); + return this.clone({ modules: this.modules.iset(path, { namespaceId: null, status }) }); } setSuccessfulModule(path: string): EnumerationProcess { const namespaceId = this.namespaces.length; - const module = new Module(namespaceId, path); return this.clone({ - modules: this.modules.iset(path, { module, status: ModuleEnumerationStatus.SUCCESS }), - namespaces: [...this.namespaces, module] + modules: this.modules.iset(path, { namespaceId, status: ModuleEnumerationStatus.SUCCESS }), + namespaces: [...this.namespaces, new Module(namespaceId, path)] }); } addReferencedModule(path: string): EnumerationProcess { if (this.modules.has(path)) return this; return this.clone({ - modules: this.modules.iset(path, { module: null, status: ModuleEnumerationStatus.REFERENCED }), + modules: this.modules.iset(path, { namespaceId: null, status: ModuleEnumerationStatus.REFERENCED }), moduleQueue: this.moduleQueue.append(path) }); } diff --git a/src/semantic/passes/resolution.ts b/src/semantic/passes/resolution.ts new file mode 100644 index 0000000..ea7f710 --- /dev/null +++ b/src/semantic/passes/resolution.ts @@ -0,0 +1,112 @@ +import { EnumeratedModule, ModuleEnumerationStatus } from './enumeration'; +import { DeclaredEntity, Namespace } from '../namespace'; +import { Dependency, ImportedName, ImportedNamespace, ForwardedName, ForwardedNamespace, PureForward, ExportedName } from './dependencies'; +import { CoreObject, Diagnostic } from '~/core'; +import { LazyList, fromIterable } from '~/utils/lazy-list'; + + +export interface DependencyResolutionOutput { + readonly namespaces: ReadonlyArray; + readonly diagnostics: ReadonlyArray; +} + +export default function resolveDependencies(modules: ReadonlyMap, declarations: ReadonlyArray, namespaces: ReadonlyArray, dependencyQueue: ReadonlyArray) { + return new ResolutionProcess(modules, declarations, namespaces, dependencyQueue).run(); +} + +/** The status of a given dependency, once resolution has begun */ +enum DependencyStatus { + /** Initial state, not yet determined whether the dependency is resolvable */ + Resolving = 1, + /** Ideal state, the dependency has been resolved to a set of declaration ids */ + Resolved, + /** The dependency target does not exist, needs to be tracked for posterity */ + Dangling, + /** The dependency target depends on itself, and thus cannot ever be resolved */ + Circular +} + +class ResolutionProcess extends CoreObject { + readonly dependencyQueue: LazyList; + /** namespace id -> name -> status */ + readonly localNameStatuses: ReadonlyMap> = new Map(); + /** namespace id -> name -> status */ + readonly exportNameStatuses: ReadonlyMap> = new Map(); + readonly diagnostics: ReadonlyArray = []; + + constructor( + readonly modules: ReadonlyMap, + readonly declarations: ReadonlyArray, + readonly namespaces: ReadonlyArray, + dependencyQueue: ReadonlyArray + ) { + super(); + this.dependencyQueue = fromIterable(dependencyQueue); + } + + /** + * The goal of this process is to populate the local and export names of every namespace in the program. + * All of the information required to do that is stored in the dependency queue, and all available + * modules and namespaces, including all available declarations within them, is stored in the + * corresponding registries. + * This process will simply consume the entire dependency queue, tracking the status of all dependencies + * until all of them are either resolved, dangling, or circular references. + */ + run(): DependencyResolutionOutput { + const processed = this.consumeDependencyQueue(); + return processed.output(); + } + + consumeDependencyQueue(): ResolutionProcess { + if (this.dependencyQueue.empty) return this; + const { head, tail } = this.dependencyQueue; + const next: ResolutionProcess = this.processDependency(head).clone({ dependencyQueue: tail }); + return next.consumeDependencyQueue(); + } + + processDependency(dependency: Dependency): ResolutionProcess { + if (dependency instanceof ImportedName) return this.processImportedName(dependency); + if (dependency instanceof ImportedNamespace) return this.processImportedNamespace(dependency); + if (dependency instanceof ForwardedName) return this.processForwardedName(dependency); + if (dependency instanceof ForwardedNamespace) return this.processForwardedNamespace(dependency); + if (dependency instanceof PureForward) return this.processPureForward(dependency); + if (dependency instanceof ExportedName) return this.processExportedName(dependency); + return this.processExportedDeclaration(dependency); + } + + processImportedName(dependency: ImportedName) { + // if it's already been processed, we're done here + if (this.isLocalNameDone(dependency.importNamespace, dependency.importName)) return this; + // flag it as resolving for successive dependencies + let next = this.setLocalNameStatus(dependency.importNamespace, dependency.importName, DependencyStatus.Resolving); + const { module, status } = next.modules.get(dependency.exportModule)!; + // make sure the module exists + if (status !== ModuleEnumerationStatus.SUCCESS) { + // module doesn't exist, the dependency is dangling + return next.setLocalNameStatus(dependency.importNamespace, dependency.importName, DependencyStatus.Dangling); + } + } + + /** + * Determines if a local name has reached a terminal status. + */ + isLocalNameDone(namespaceId: number, name: string) { + const ns = this.localNameStatuses.get(namespaceId); + if (!ns) return false; + const n = ns.get(name); + if (!n) return false; + return n !== DependencyStatus.Resolving; + } + + setLocalNameStatus(namespaceId: number, name: string, status: DependencyStatus): ResolutionProcess { + const ns = this.localNameStatuses.get(namespaceId) || new Map(); + return this.clone({ + localNameStatuses: this.localNameStatuses.iset(namespaceId, ns.iset(name, status)) + }); + } + + output = (): DependencyResolutionOutput => ({ + namespaces: this.namespaces, + diagnostics: this.diagnostics + }); +} \ No newline at end of file diff --git a/src/semantic/program.ts b/src/semantic/program.ts index 4c40a1a..4fdba53 100644 --- a/src/semantic/program.ts +++ b/src/semantic/program.ts @@ -1,5 +1,5 @@ import { Diagnostic, CoreObject } from '~/core'; -import { Module, DeclaredEntity } from './namespace'; +import { DeclaredEntity, Namespace } from './namespace'; /** @@ -7,7 +7,8 @@ import { Module, DeclaredEntity } from './namespace'; * for the semantic process of the compiler. */ export class Program extends CoreObject { - readonly modules: ReadonlyMap = new Map(); + readonly modules: ReadonlyMap = new Map(); + readonly namespaces: ReadonlyArray = []; readonly declarations: ReadonlyArray = []; readonly diagnostics: ReadonlyArray = []; } From 0bc3aa9d52ca7bc4e84f00939ff42b1364683332 Mon Sep 17 00:00:00 2001 From: Jake Chitel Date: Wed, 16 May 2018 17:25:40 -0500 Subject: [PATCH 13/15] refactored CoreObject to use set and mutate methods instead of clone --- src/core.ts | 31 +++-- src/parser/lexer/char-stream.ts | 7 +- src/parser/lexer/lexer-state.ts | 8 +- src/parser/parser.ts | 4 +- src/semantic/namespace.ts | 69 ++++------ src/semantic/passes/enumeration.ts | 131 +++++++++---------- src/semantic/passes/resolution.ts | 6 +- src/syntax/declarations/ImportDeclaration.ts | 2 +- src/syntax/index.ts | 2 +- 9 files changed, 125 insertions(+), 135 deletions(-) diff --git a/src/core.ts b/src/core.ts index 37e8f92..0ca6c5d 100644 --- a/src/core.ts +++ b/src/core.ts @@ -7,14 +7,27 @@ */ export class CoreObject { /** - * Creates a clone of 'this', applying an optional set of properties to the new object. - * Note that the type parameter is to allow private properties to be added. - * There will be an error if invalid types are provided for public properties. + * Creates a shallow copy of 'this'. + * TODO: this will be a problem for methods specified as arrow function properties */ - clone>(props: C = {} as C): T { - // TS does not know how to properly handle spreads - const _props = { ...(this as any), ...(props as any) }; - return Object.assign(Object.create(Object.getPrototypeOf(this)), _props); + clone(): this { + return Object.assign(Object.create(Object.getPrototypeOf(this)), this); + } + + /** + * Sets a property of 'this' to a value, returning a new object with the value + */ + set(key: K, value: this[K]): this { + const obj = this.clone(); + return Object.assign(obj, { [key]: value }); + } + + /** + * Mutates a property of 'this' using the specified function, returning a new object with the result value + */ + mutate(key: K, fn: (val: this[K]) => this[K]): this { + const obj = this.clone(); + return Object.assign(obj, { [key]: fn(this[key]) }); } } @@ -39,11 +52,11 @@ export class FilePosition extends CoreObject { } nextLine(): FilePosition { - return this.clone({ position: [this.position[0] + 1, 0] }); + return this.mutate('position', ([l]) => [l + 1, 0] as this['position']); } nextColumn(): FilePosition { - return this.clone({ position: [this.position[0], this.position[1] + 1] }); + return this.mutate('position', ([l, c]) => [l, c + 1] as this['position']); } } diff --git a/src/parser/lexer/char-stream.ts b/src/parser/lexer/char-stream.ts index 6bf5e35..87d32e3 100644 --- a/src/parser/lexer/char-stream.ts +++ b/src/parser/lexer/char-stream.ts @@ -23,7 +23,7 @@ export class NonEmptyCharStream extends CoreObject { constructor( /** The file position of the next character in the stream */ readonly position: FilePosition, - private readonly list: NonEmptyLazyList + readonly list: NonEmptyLazyList ) { super(); } @@ -36,12 +36,11 @@ export class NonEmptyCharStream extends CoreObject { /** Reads one character from the stream, and returns it with the remaining stream */ read(): { char: string, stream: CharStream } { const char = this.list.head; - const empty = this.list.tail.empty; const position = char === '\n' ? this.position.nextLine() : this.position.nextColumn(); - if (empty) return { char, stream: new EmptyCharStream(position) }; + if (this.list.tail.empty) return { char, stream: new EmptyCharStream(position) }; return { char, - stream: this.clone({ list: this.list.tail, position }), + stream: this.set('list', this.list.tail).set('position', position), } } diff --git a/src/parser/lexer/lexer-state.ts b/src/parser/lexer/lexer-state.ts index 320791d..9b3e47b 100644 --- a/src/parser/lexer/lexer-state.ts +++ b/src/parser/lexer/lexer-state.ts @@ -73,17 +73,17 @@ abstract class LexerStateBase extends CoreObject { /** Returns a new LexerState with the provided type */ setType(type: TokenType): LexerState { - return this.clone({ type }) as LexerState; + return this.set('type', type) as LexerState; } /** Returns a new LexerState with a value based on the current image */ setValue(fn: (image: string) => any): LexerState { - return this.clone({ value: fn(this.image) }) as LexerState; + return this.set('value', fn(this.image)) as LexerState; } /** Returns a new LexerState with a value based on the current value */ mapValue(fn: (value: any) => any): LexerState { - return this.clone({ value: fn(this.value) }) as LexerState; + return this.mutate('value', fn) as LexerState; } /** @@ -159,6 +159,6 @@ class NonEmptyLexerState extends LexerStateBase { const image = this.image + chars; return stream.empty ? new EmptyLexerState(this.position, image, stream) - : this.clone({ empty: false, image, stream }) as LexerState; + : this.set('empty', false).set('image', image).set('stream', stream) as LexerState; } } diff --git a/src/parser/parser.ts b/src/parser/parser.ts index 86309f7..db5d8ab 100644 --- a/src/parser/parser.ts +++ b/src/parser/parser.ts @@ -27,11 +27,11 @@ abstract class ParserBase extends CoreObject { readonly successLocation: Optional = null; fail(token: Optional): Parser { - return this.clone({ failToken: token, successLocation: null }) as Parser; + return this.set('failToken', token).set('successLocation', null) as Parser; } succeed(location: Optional): Parser { - return this.clone({ successLocation: location, failToken: null }) as Parser; + return this.set('successLocation', location).set('failToken', null) as Parser; } parse(fn: ParseFunc): { result: Optional, diagnostics: ReadonlyArray } { diff --git a/src/semantic/namespace.ts b/src/semantic/namespace.ts index 1703a58..3bfcd2b 100644 --- a/src/semantic/namespace.ts +++ b/src/semantic/namespace.ts @@ -1,41 +1,16 @@ import { CoreObject } from '~/core'; -import { FunctionDeclaration, TypeDeclaration, ConstantDeclaration, AnonymousFunctionDeclaration, AnonymousTypeDeclaration, AnonymousConstantDeclaration, NamespaceDeclaration, AnonymousNamespaceDeclaration } from '~/syntax'; +import * as syntax from '~/syntax'; -/** - * An abstract object representing a "namespace". - * Serves as a parent class for modules and declared namespaces, - * both of which are semantically "namespaces". - */ -export abstract class Namespace extends CoreObject { + +class NamespaceBase extends CoreObject { readonly localNames: ReadonlyMap> = new Map(); readonly exports: ReadonlyMap> = new Map(); - constructor( - readonly namespaceId: number - ) { super(); } - - addImport(targetModule: string, localName: string, exportName: string): Namespace { - let target: NameTarget; - if (exportName === '*') { - target = { modulePath: targetModule } as RemoteNamespace; - } else { - target = { modulePath: targetModule, exportName } as RemoteName; - } - const array = [...(this.localNames.get(localName) || []), target]; - return this.clone({ localNames: this.localNames.iset(localName, array) }); - } + constructor(readonly namespaceId: number) { super() } - addForward(targetModule: string, forwardName: string, exportName: string): Namespace { - let target: NameTarget; - if (exportName === '*') { - // if it's a pure forward, we can't resolve anything right now - if (forwardName === '*') return this; - target = { modulePath: targetModule } as RemoteNamespace; - } else { - target = { modulePath: targetModule, exportName } as RemoteName; - } - const array = [...(this.exports.get(forwardName) || []), target]; - return this.clone({ exports: this.exports.iset(forwardName, array) }); + addLocalDeclaration(name: string, declarationId: number) { + const existing = this.localNames.get(name) || []; + return this.mutate('localNames', _ => _.iset(name, [...existing, new LocalDeclaration(declarationId)])); } } @@ -43,12 +18,12 @@ export abstract class Namespace extends CoreObject { * A declared namespace within another namespace. * It has a name, a parent namespace id, and a declaration id (because it is a declaration). */ -export class DeclaredNamespace extends Namespace { +export class NestedNamespace extends NamespaceBase { constructor( namespaceId: number, readonly parentNamespaceId: number, readonly declarationId: number, - readonly namespaceDeclaration: NamespaceDeclaration | AnonymousNamespaceDeclaration + readonly node: syntax.NamespaceDeclaration | syntax.AnonymousNamespaceDeclaration ) { super(namespaceId); } } @@ -57,13 +32,15 @@ export class DeclaredNamespace extends Namespace { * A module is a type of namespace, and can contain local names, exports, and declarations. * Where it differs from a generic namespace is that it has no parent namespace, and is associated with a file path. */ -export class Module extends Namespace { +export class ModuleNamespace extends NamespaceBase { constructor( namespaceId: number, readonly absolutePath: string ) { super(namespaceId); } } +export type Namespace = ModuleNamespace | NestedNamespace; + /** * For any given name in a program, there are target(s) to which that name resolves. * A target will always be either: @@ -124,35 +101,41 @@ export class CircularReference extends CoreObject {} /** * A semantic declaration is a node that is ultimately associated with a name */ -export type DeclaredEntity = DeclaredFunction | DeclaredType | DeclaredConstant | DeclaredNamespace; +export type Declaration = FunctionDeclaration | TypeDeclaration | ConstantDeclaration | NamespaceDeclaration; /** * A semantic function entity, identified by a name. */ -export class DeclaredFunction extends CoreObject { +export class FunctionDeclaration extends CoreObject { constructor( readonly declarationId: number, - readonly functionDeclaration: FunctionDeclaration | AnonymousFunctionDeclaration + readonly node: syntax.FunctionDeclaration | syntax.AnonymousFunctionDeclaration ) { super() } } /** * A semantic type entity, identified by a name. - * NOTE: this is different from the concept of a "type" in type checking TODO then what is? */ -export class DeclaredType extends CoreObject { +export class TypeDeclaration extends CoreObject { constructor( readonly declarationId: number, - readonly typeDeclaration: TypeDeclaration | AnonymousTypeDeclaration + readonly node: syntax.TypeDeclaration | syntax.AnonymousTypeDeclaration ) { super() } } /** * A semantic constant entity, identified by a name. */ -export class DeclaredConstant extends CoreObject { +export class ConstantDeclaration extends CoreObject { constructor( readonly declarationId: number, - readonly constantDeclaration: ConstantDeclaration | AnonymousConstantDeclaration + readonly node: syntax.ConstantDeclaration | syntax.AnonymousConstantDeclaration + ) { super() } +} + +export class NamespaceDeclaration extends CoreObject { + constructor( + readonly declarationId: number, + readonly namespaceId: number ) { super() } } diff --git a/src/semantic/passes/enumeration.ts b/src/semantic/passes/enumeration.ts index 796f20a..4a99eb8 100644 --- a/src/semantic/passes/enumeration.ts +++ b/src/semantic/passes/enumeration.ts @@ -1,18 +1,17 @@ import { Dependency, ImportedNamespace, ImportedName, PureForward, ForwardedNamespace, ForwardedName, ExportedName, ExportedDeclaration } from './dependencies'; import { parseModule } from '~/parser'; import { Diagnostic, FilePosition, CoreObject } from '~/core'; -import { ModuleRoot, ImportDeclaration, ExportDeclaration, ExportForwardDeclaration, Declaration as SyntaxDeclaration, TypeDeclaration, FunctionDeclaration, ConstantDeclaration, NamespaceDeclaration, AnonymousTypeDeclaration, AnonymousFunctionDeclaration, AnonymousConstantDeclaration, AnonymousNamespaceDeclaration } from '~/syntax'; +import * as syntax from '~/syntax'; import { LazyList, single } from '~/utils/lazy-list'; -import { SyntaxType, AnonymousDeclaration } from '~/syntax/environment'; import resolveModule from '~/semantic/resolver'; import { resolve } from 'path'; -import { Namespace, DeclaredEntity, Module, DeclaredType, DeclaredFunction, DeclaredConstant, DeclaredNamespace } from '~/semantic/namespace'; +import * as ns from '~/semantic/namespace'; export interface NamespaceEnumerationOutput { readonly modules: ReadonlyMap; - readonly namespaces: ReadonlyArray; - readonly declarations: ReadonlyArray; + readonly namespaces: ReadonlyArray; + readonly declarations: ReadonlyArray; readonly dependencyQueue: ReadonlyArray; readonly diagnostics: ReadonlyArray; } @@ -37,13 +36,18 @@ export default function enumerateNamespaces(mainModulePath: string): NamespaceEn return new EnumerationProcess(mainModulePath).run(); } -type AnyDeclaration = ImportDeclaration | ExportDeclaration | ExportForwardDeclaration | SyntaxDeclaration | AnonymousDeclaration; +type AnyDeclaration + = syntax.ImportDeclaration + | syntax.ExportDeclaration + | syntax.ExportForwardDeclaration + | syntax.Declaration + | syntax.AnonymousDeclaration; class EnumerationProcess extends CoreObject { readonly moduleQueue: LazyList; readonly modules: ReadonlyMap; - readonly namespaces: ReadonlyArray = []; - readonly declarations: ReadonlyArray = []; + readonly namespaces: ReadonlyArray = []; + readonly declarations: ReadonlyArray = []; readonly dependencyQueue: ReadonlyArray = []; readonly diagnostics: ReadonlyArray = []; @@ -61,9 +65,9 @@ class EnumerationProcess extends CoreObject { consumeModuleQueue(): EnumerationProcess { if (this.moduleQueue.empty) return this; const { head: modulePath, tail: moduleQueue } = this.moduleQueue; - let next: EnumerationProcess = this.clone({ moduleQueue }); + let next: EnumerationProcess = this.set('moduleQueue', moduleQueue); // parse the module - let moduleSyntax: Optional, parseDiagnostics: ReadonlyArray; + let moduleSyntax: Optional, parseDiagnostics: ReadonlyArray; try { ({ module: moduleSyntax, diagnostics: parseDiagnostics } = parseModule(modulePath)); } catch (err) { @@ -97,23 +101,21 @@ class EnumerationProcess extends CoreObject { } setFailedModule(path: string, status: ModuleEnumerationStatus): EnumerationProcess { - return this.clone({ modules: this.modules.iset(path, { namespaceId: null, status }) }); + return this.mutate('modules', _ => _.iset(path, { namespaceId: null, status })); } setSuccessfulModule(path: string): EnumerationProcess { const namespaceId = this.namespaces.length; - return this.clone({ - modules: this.modules.iset(path, { namespaceId, status: ModuleEnumerationStatus.SUCCESS }), - namespaces: [...this.namespaces, new Module(namespaceId, path)] - }); + return this + .mutate('modules', _ => _.iset(path, { namespaceId, status: ModuleEnumerationStatus.SUCCESS })) + .mutate('namespaces', _ => [..._, new ns.ModuleNamespace(namespaceId, path)]); } addReferencedModule(path: string): EnumerationProcess { if (this.modules.has(path)) return this; - return this.clone({ - modules: this.modules.iset(path, { namespaceId: null, status: ModuleEnumerationStatus.REFERENCED }), - moduleQueue: this.moduleQueue.append(path) - }); + return this + .mutate('modules', _ => _.iset(path, { namespaceId: null, status: ModuleEnumerationStatus.REFERENCED })) + .mutate('moduleQueue', _ => _.append(path)); } addImport(namespaceId: number, targetModule: string, localName: string, exportName: string): EnumerationProcess { @@ -123,9 +125,7 @@ class EnumerationProcess extends CoreObject { } else { dep = new ImportedName(namespaceId, localName, targetModule, exportName); } - return this.clone({ - dependencyQueue: [...this.dependencyQueue, dep] - }); + return this.mutate('dependencyQueue', _ => [..._, dep]); } addForward(namespaceId: number, targetModule: string, forwardName: string, exportName: string): EnumerationProcess { @@ -139,49 +139,43 @@ class EnumerationProcess extends CoreObject { } else { dep = new ForwardedName(namespaceId, forwardName, targetModule, exportName); } - return this.clone({ - dependencyQueue: [...this.dependencyQueue, dep] - }); + return this.mutate('dependencyQueue', _ => [..._, dep]); } addExportedName(namespaceId: number, exportName: string, localName: string): EnumerationProcess { const dep = new ExportedName(namespaceId, localName, exportName); - return this.clone({ - dependencyQueue: [...this.dependencyQueue, dep] - }); + return this.mutate('dependencyQueue', _ => [..._, dep]); } addExportedDeclaration(namespaceId: number, exportName: string, declarationId: number): EnumerationProcess { const dep = new ExportedDeclaration(namespaceId, declarationId, exportName); - return this.clone({ - dependencyQueue: [...this.dependencyQueue, dep] - }); + return this.mutate('dependencyQueue', _ => [..._, dep]); + } + + addLocalName(namespaceId: number, name: string, declarationId: number): EnumerationProcess { + return this.mutate('namespaces', _ => _.mutate(namespaceId, _ => _.addLocalDeclaration(name, declarationId))); } addDiagnostics(diagnostics: ReadonlyArray): EnumerationProcess { - return this.clone({ diagnostics: [...this.diagnostics, ...diagnostics] }); + return this.mutate('diagnostics', _ => [..._, ...diagnostics]); } withEntryError(error: string): EnumerationProcess { - return this.clone({ - diagnostics: [new Diagnostic(error, new FilePosition('', [0, 0]))] - }); + return this.addDiagnostics([new Diagnostic(error, new FilePosition('', [0, 0]))]); } handleDeclaration(node: AnyDeclaration, namespaceId: number, modulePath: string, containingExport: Optional = null): EnumerationProcess { - switch (node.syntaxType) { - case SyntaxType.ImportDeclaration: return this.handleImport(node, namespaceId, modulePath); - case SyntaxType.ExportDeclaration: return this.handleExport(node, namespaceId, modulePath); - case SyntaxType.ExportForwardDeclaration: return this.handleForward(node, namespaceId, modulePath); - case SyntaxType.TypeDeclaration: return this.handleType(node, namespaceId, containingExport); - case SyntaxType.AnonymousTypeDeclaration: return this.handleType(node, namespaceId, containingExport); - case SyntaxType.FunctionDeclaration: return this.handleFunction(node, namespaceId, containingExport); - case SyntaxType.AnonymousFunctionDeclaration: return this.handleFunction(node, namespaceId, containingExport); - case SyntaxType.ConstantDeclaration: return this.handleConstant(node, namespaceId, containingExport); - case SyntaxType.AnonymousConstantDeclaration: return this.handleConstant(node, namespaceId, containingExport); - case SyntaxType.NamespaceDeclaration: return this.handleNamespace(node, namespaceId, modulePath, containingExport); - case SyntaxType.AnonymousNamespaceDeclaration: return this.handleNamespace(node, namespaceId, modulePath, containingExport); - } + if (node instanceof syntax.ImportDeclaration) return this.handleImport(node, namespaceId, modulePath); + if (node instanceof syntax.ExportDeclaration) return this.handleExport(node, namespaceId, modulePath); + if (node instanceof syntax.ExportForwardDeclaration) return this.handleForward(node, namespaceId, modulePath); + if (node instanceof syntax.TypeDeclaration) return this.handleType(node, namespaceId, containingExport); + if (node instanceof syntax.AnonymousTypeDeclaration) return this.handleType(node, namespaceId, containingExport); + if (node instanceof syntax.FunctionDeclaration) return this.handleFunction(node, namespaceId, containingExport); + if (node instanceof syntax.AnonymousFunctionDeclaration) return this.handleFunction(node, namespaceId, containingExport); + if (node instanceof syntax.ConstantDeclaration) return this.handleConstant(node, namespaceId, containingExport); + if (node instanceof syntax.AnonymousConstantDeclaration) return this.handleConstant(node, namespaceId, containingExport); + if (node instanceof syntax.NamespaceDeclaration) return this.handleNamespace(node, namespaceId, modulePath, containingExport); + return this.handleNamespace(node, namespaceId, modulePath, containingExport); } /** @@ -191,7 +185,7 @@ class EnumerationProcess extends CoreObject { * Additionally, the target module path should be resolved and added to the * module queue and registry, only if it does not already exist in the registry. */ - handleImport(node: ImportDeclaration, namespaceId: number, modulePath: string) { + handleImport(node: syntax.ImportDeclaration, namespaceId: number, modulePath: string) { let next = this as EnumerationProcess; // handle the module name let targetModule = resolveModule(modulePath, node.moduleName.value); @@ -216,7 +210,7 @@ class EnumerationProcess extends CoreObject { * and an entry to the dependency queue. * If the export is an exported declaration, the declaration must also be processed. */ - handleExport(node: ExportDeclaration, namespaceId: number, modulePath: string) { + handleExport(node: syntax.ExportDeclaration, namespaceId: number, modulePath: string) { let next = this as EnumerationProcess; // add export names for (const exp of node.exports) { @@ -238,7 +232,7 @@ class EnumerationProcess extends CoreObject { * Additionally, the target module path should be resolved and added to the * module queue and registry, only if it does not already exist in the registry. */ - handleForward(node: ExportForwardDeclaration, namespaceId: number, modulePath: string) { + handleForward(node: syntax.ExportForwardDeclaration, namespaceId: number, modulePath: string) { let next = this as EnumerationProcess; // handle the module name let targetModule = resolveModule(modulePath, node.moduleName.value); @@ -262,11 +256,12 @@ class EnumerationProcess extends CoreObject { * - Create a DeclaredType * - Register the DeclaredType to the process's declaration registry * - If there is a parent export, register the corresponding dependency + * TODO add local-declaration local-names for each declaration */ - handleType(node: TypeDeclaration | AnonymousTypeDeclaration, namespaceId: number, containingExport: Optional) { + handleType(node: syntax.TypeDeclaration | syntax.AnonymousTypeDeclaration, namespaceId: number, containingExport: Optional) { const declarationId = this.declarations.length; - const declaredType = new DeclaredType(declarationId, node); - const next: EnumerationProcess = this.clone({ declarations: [...this.declarations, declaredType] }); + const declaredType = new ns.TypeDeclaration(declarationId, node); + const next: EnumerationProcess = this.mutate('declarations', _ => [..._, declaredType]); if (containingExport) return next.addExportedDeclaration(namespaceId, containingExport, declarationId); return next; @@ -278,10 +273,10 @@ class EnumerationProcess extends CoreObject { * - Register the DeclaredFunction to the process's declaration registry * - If there is a parent export, register the corresponding dependency */ - handleFunction(node: FunctionDeclaration | AnonymousFunctionDeclaration, namespaceId: number, containingExport: Optional) { + handleFunction(node: syntax.FunctionDeclaration | syntax.AnonymousFunctionDeclaration, namespaceId: number, containingExport: Optional) { const declarationId = this.declarations.length; - const declaredFunction = new DeclaredFunction(declarationId, node); - const next: EnumerationProcess = this.clone({ declarations: [...this.declarations, declaredFunction] }); + const declaredFunction = new ns.FunctionDeclaration(declarationId, node); + const next: EnumerationProcess = this.mutate('declarations', _ => [..._, declaredFunction]); if (containingExport) return next.addExportedDeclaration(namespaceId, containingExport, declarationId); return next; @@ -293,10 +288,10 @@ class EnumerationProcess extends CoreObject { * - Register the DeclaredConstant to the process's declaration registry * - If there is a parent export, register the corresponding dependency */ - handleConstant(node: ConstantDeclaration | AnonymousConstantDeclaration, namespaceId: number, containingExport: Optional) { + handleConstant(node: syntax.ConstantDeclaration | syntax.AnonymousConstantDeclaration, namespaceId: number, containingExport: Optional) { const declarationId = this.declarations.length; - const declaredConstant = new DeclaredConstant(declarationId, node); - const next: EnumerationProcess = this.clone({ declarations: [...this.declarations, declaredConstant] }); + const declaredConstant = new ns.ConstantDeclaration(declarationId, node); + const next: EnumerationProcess = this.mutate('declarations', _ => [..._, declaredConstant]); if (containingExport) return next.addExportedDeclaration(namespaceId, containingExport, declarationId); return next; @@ -309,18 +304,20 @@ class EnumerationProcess extends CoreObject { * - If there is a parent export, register the corresponding dependency * - Process all of the namespace's declarations */ - handleNamespace(node: NamespaceDeclaration | AnonymousNamespaceDeclaration, parentNamespaceId: number, modulePath: string, containingExport: Optional) { + handleNamespace(node: syntax.NamespaceDeclaration | syntax.AnonymousNamespaceDeclaration, parentNamespaceId: number, modulePath: string, containingExport: Optional) { const namespaceId = this.namespaces.length; const declarationId = this.declarations.length; - const declaredNamespace = new DeclaredNamespace(namespaceId, parentNamespaceId, declarationId, node); - let next: EnumerationProcess = this.clone({ - declarations: [...this.declarations, declaredNamespace], - namespaces: [...this.namespaces, declaredNamespace] - }); + const nestedNamespace = new ns.NestedNamespace(namespaceId, parentNamespaceId, declarationId, node); + const declaredNamespace = new ns.NamespaceDeclaration(declarationId, namespaceId); + let next: EnumerationProcess = this + .mutate('declarations', _ => [..._, declaredNamespace]) + .mutate('namespaces', _ => [..._, nestedNamespace]); + if (node instanceof syntax.NamespaceDeclaration) + next = next.addLocalName(parentNamespaceId, node.name.image, declarationId); if (containingExport) next = next.addExportedDeclaration(parentNamespaceId, containingExport, declarationId); // process all declarations in the namespace - for (const declaration of [...declaredNamespace.namespaceDeclaration.imports, ...declaredNamespace.namespaceDeclaration.declarations]) { + for (const declaration of [...node.imports, ...node.declarations]) { next = next.handleDeclaration(declaration, namespaceId, modulePath); } return next; diff --git a/src/semantic/passes/resolution.ts b/src/semantic/passes/resolution.ts index ea7f710..06d7f15 100644 --- a/src/semantic/passes/resolution.ts +++ b/src/semantic/passes/resolution.ts @@ -60,7 +60,7 @@ class ResolutionProcess extends CoreObject { consumeDependencyQueue(): ResolutionProcess { if (this.dependencyQueue.empty) return this; const { head, tail } = this.dependencyQueue; - const next: ResolutionProcess = this.processDependency(head).clone({ dependencyQueue: tail }); + const next: ResolutionProcess = this.processDependency(head).set('dependencyQueue', tail); return next.consumeDependencyQueue(); } @@ -100,9 +100,7 @@ class ResolutionProcess extends CoreObject { setLocalNameStatus(namespaceId: number, name: string, status: DependencyStatus): ResolutionProcess { const ns = this.localNameStatuses.get(namespaceId) || new Map(); - return this.clone({ - localNameStatuses: this.localNameStatuses.iset(namespaceId, ns.iset(name, status)) - }); + return this.mutate('localNameStatuses', _ => _.iset(namespaceId, ns.iset(name, status))); } output = (): DependencyResolutionOutput => ({ diff --git a/src/syntax/declarations/ImportDeclaration.ts b/src/syntax/declarations/ImportDeclaration.ts index 41a8f4c..e72f8c8 100644 --- a/src/syntax/declarations/ImportDeclaration.ts +++ b/src/syntax/declarations/ImportDeclaration.ts @@ -96,4 +96,4 @@ export const parseImportDeclaration: ParseFunc = seq( ([_1, _2, moduleName, _3, imports], location) => new ImportDeclaration(location, moduleName, imports) ); -const defaultImport = (token: Token): Import => ({ importName: token.clone({ image: 'default' }), aliasName: token }); +const defaultImport = (token: Token): Import => ({ importName: token.set('image', 'default'), aliasName: token }); diff --git a/src/syntax/index.ts b/src/syntax/index.ts index cdc3f60..6d870d4 100644 --- a/src/syntax/index.ts +++ b/src/syntax/index.ts @@ -6,6 +6,6 @@ export * from './statements'; export * from './declarations'; export { ModuleRoot } from './ModuleRoot'; -export { Declaration, Type, Expression, Statement } from './environment'; +export { Declaration, AnonymousDeclaration, Type, Expression, Statement } from './environment'; export * from './visitor'; From a1bafa8116002d5c377249c87f1a973841f1a0e4 Mon Sep 17 00:00:00 2001 From: Jake Chitel Date: Wed, 16 May 2018 17:35:30 -0500 Subject: [PATCH 14/15] added local names to namespaces for declarations --- src/semantic/passes/enumeration.ts | 12 +++++++++--- src/semantic/passes/resolution.ts | 6 +++--- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/src/semantic/passes/enumeration.ts b/src/semantic/passes/enumeration.ts index 4a99eb8..4170662 100644 --- a/src/semantic/passes/enumeration.ts +++ b/src/semantic/passes/enumeration.ts @@ -261,7 +261,9 @@ class EnumerationProcess extends CoreObject { handleType(node: syntax.TypeDeclaration | syntax.AnonymousTypeDeclaration, namespaceId: number, containingExport: Optional) { const declarationId = this.declarations.length; const declaredType = new ns.TypeDeclaration(declarationId, node); - const next: EnumerationProcess = this.mutate('declarations', _ => [..._, declaredType]); + let next: EnumerationProcess = this.mutate('declarations', _ => [..._, declaredType]); + if (node instanceof syntax.TypeDeclaration) + next = next.addLocalName(namespaceId, node.name.image, declarationId); if (containingExport) return next.addExportedDeclaration(namespaceId, containingExport, declarationId); return next; @@ -276,7 +278,9 @@ class EnumerationProcess extends CoreObject { handleFunction(node: syntax.FunctionDeclaration | syntax.AnonymousFunctionDeclaration, namespaceId: number, containingExport: Optional) { const declarationId = this.declarations.length; const declaredFunction = new ns.FunctionDeclaration(declarationId, node); - const next: EnumerationProcess = this.mutate('declarations', _ => [..._, declaredFunction]); + let next: EnumerationProcess = this.mutate('declarations', _ => [..._, declaredFunction]); + if (node instanceof syntax.FunctionDeclaration) + next = next.addLocalName(namespaceId, node.name.image, declarationId); if (containingExport) return next.addExportedDeclaration(namespaceId, containingExport, declarationId); return next; @@ -291,7 +295,9 @@ class EnumerationProcess extends CoreObject { handleConstant(node: syntax.ConstantDeclaration | syntax.AnonymousConstantDeclaration, namespaceId: number, containingExport: Optional) { const declarationId = this.declarations.length; const declaredConstant = new ns.ConstantDeclaration(declarationId, node); - const next: EnumerationProcess = this.mutate('declarations', _ => [..._, declaredConstant]); + let next: EnumerationProcess = this.mutate('declarations', _ => [..._, declaredConstant]); + if (node instanceof syntax.ConstantDeclaration) + next = next.addLocalName(namespaceId, node.name.image, declarationId); if (containingExport) return next.addExportedDeclaration(namespaceId, containingExport, declarationId); return next; diff --git a/src/semantic/passes/resolution.ts b/src/semantic/passes/resolution.ts index 06d7f15..9c02b96 100644 --- a/src/semantic/passes/resolution.ts +++ b/src/semantic/passes/resolution.ts @@ -1,5 +1,5 @@ import { EnumeratedModule, ModuleEnumerationStatus } from './enumeration'; -import { DeclaredEntity, Namespace } from '../namespace'; +import { Declaration, Namespace } from '../namespace'; import { Dependency, ImportedName, ImportedNamespace, ForwardedName, ForwardedNamespace, PureForward, ExportedName } from './dependencies'; import { CoreObject, Diagnostic } from '~/core'; import { LazyList, fromIterable } from '~/utils/lazy-list'; @@ -10,7 +10,7 @@ export interface DependencyResolutionOutput { readonly diagnostics: ReadonlyArray; } -export default function resolveDependencies(modules: ReadonlyMap, declarations: ReadonlyArray, namespaces: ReadonlyArray, dependencyQueue: ReadonlyArray) { +export default function resolveDependencies(modules: ReadonlyMap, declarations: ReadonlyArray, namespaces: ReadonlyArray, dependencyQueue: ReadonlyArray) { return new ResolutionProcess(modules, declarations, namespaces, dependencyQueue).run(); } @@ -36,7 +36,7 @@ class ResolutionProcess extends CoreObject { constructor( readonly modules: ReadonlyMap, - readonly declarations: ReadonlyArray, + readonly declarations: ReadonlyArray, readonly namespaces: ReadonlyArray, dependencyQueue: ReadonlyArray ) { From e348d8238be9c1a013fa7d8595d4b9e7970b723a Mon Sep 17 00:00:00 2001 From: Jake Chitel Date: Fri, 15 Mar 2019 08:58:31 -0500 Subject: [PATCH 15/15] adding for posterity --- docs/01-parser.md | 76 ++++++ docs/02-parser-background.md | 93 +++++++ docs/03-lexer.md | 0 docs/README.md | 95 ++++++++ package.json | 2 +- src/extensions.ts | 123 +++++++++- src/parser/lexer/token.ts | 2 +- src/semantic/index.ts | 2 +- src/semantic/namespace.ts | 343 ++++++++++++++++++++++++-- src/semantic/passes/dependencies.ts | 166 +++++++++++-- src/semantic/passes/enumeration.ts | 113 ++++----- src/semantic/passes/resolution.ts | 363 ++++++++++++++++++++++------ src/semantic/program.ts | 4 +- yarn.lock | 6 +- 14 files changed, 1199 insertions(+), 189 deletions(-) create mode 100644 docs/01-parser.md create mode 100644 docs/02-parser-background.md create mode 100644 docs/03-lexer.md create mode 100644 docs/README.md diff --git a/docs/01-parser.md b/docs/01-parser.md new file mode 100644 index 0000000..21dd225 --- /dev/null +++ b/docs/01-parser.md @@ -0,0 +1,76 @@ +# Parser + +The first step of the compiler is the parser. There is a 4 step pipeline to parsing: + +1. Read raw bytes from a file, creating a byte stream (*data sourcing*) +2. Stream the bytes through a UTF-8 decoder to produce a character stream (*character decoding*) +3. Stream the characters through a tokenizer to produce a token stream (*lexical analysis*) +4. Stream the tokens through a parser to produce a syntax tree (*syntactic analysis*) + +I will not go into too much depth of what a lot of these terms mean. Here are some basic definitions: + +* *stream*: A stream is a sequence of data similar to a queue where each item in the sequence is processed one at a time in order and then discarded. Streams are typically *lazy*, meaning that the source of the data is only read on demand. This is opposed to reading in the entire contents of a data source into a single buffer and then processing the buffer. Processing data typically involves creating multiple intermediate sequences, each containing the next step of the processing. Streaming allows each step in the process to be arranged in a pipeline so no data is duplicated, which conserves memory and is more performant. +* *token*: A token is a single unit of text in a parser, always composed of one or more (in special cases zero) characters. Any symbol, word, or other sequence of characters that represents a single unit of text is a token. Lexical analysis (tokenization) is the process responsible for turning a character stream into a token stream by combining some of the characters into tokens according to lexical rules. +* *syntax tree*: While a token stream is purely linear, a syntax tree is (as you may have guessed) a tree structure, with a single root node and potentially several child nodes, where each node can have any number of children. All leaf nodes of the syntax tree appear in the token stream in the same order from left to right (some tokens may be discarded as syntactically meaningless). The parent nodes represent high-level structures created by sequences of tokens and other nodes. The root node of a syntax tree represents the full text that was parsed. Syntactic analysis (parsing) is the process responsible for restructuring a token stream into a syntax tree using a specified grammar. + +If you aren't familiar with parsing, [this](./02-parser-background.md) will give you a basic rundown of the theory and implementation of parsers, ending up with the rationale behind Ren's parsing strategy. + +The first step of parsing, "tokenization" or "lexing", is out of the scope of this file. See [here](./03-lexer.md) for information on the lexical analysis being this parser. + +## Implementation + +The parser was particularly challenging to implement in a pure functional way, for many reasons: + +* Reading a file has side effects (potential errors) and is inherently not referentially transparent (a given file could have vastly different contents at different times). +* Parsers track a lot of state, and sometimes have to backtrack if a particular parse path was not matched. +* Syntax specification is hard when you don't want to just do it as custom logic (which we didn't, because that has involved a lot of repetition in the past) + +These problems were eventually resolved elegantly using a few strategies: + +* Ignore the fact that reading a file isn't pure functional. Just assume it is referentially transparent and handle edge cases elegantly. There are really no consequences of this in TypeScript. +* Use a lazy list to avoid reading the file all at once, as well as more than once. Because the lazy lists are immutable, backtracking involves just returning with a flag indicating that the next branch of logic should be attempted. +* Use a syntax specification system built by composing functions that implement the parsing logic. + +To describe how we landed here, for posterity's sake (and also because I'm very proud of it), I'd like to start at the very beginning of my journey into writing my own parser. + +### History + +#### From Humble Beginnings + +I started my journey with a basic template of the process of an interpreter. You read a file, parse it, translate it to IR, and interpret those IR instructions, returning the resulting exit code. + +The first thing I did was try to specify a basic grammar to start with. I ended up settling on a PEG because they are so beautiful to read and to write. This decision impacted most of the rest of the parser. + +I knew that I wanted to write the parser from scratch, for two main reasons: (1) primarily for bragging rights, and (2) I wanted full control over the resulting API and internals. From there, I just needed to know how to implement said parser. At the time I was only aware of top-down parsers (I may have considered a bottom-up parser instead if I were starting now), so after a few minutes of internet searching, I discovered that the standard "from scratch" way to implement a top-down parser is to implement a recursive descent parser. + +The next thing I wanted to do was implement the tokenizer, which took a while (see [here](03-lexer.md) for details). But once I got that into a good state, I was finally able to start working on the parser. As I got going, I started noticing certain patterns, so I extracted those into common methods. I realized that I needed a system to throw an error when invalid syntax was found, so I constructed a clever way to extract the location in the file based on the current place in the tokenizer. I also implemented AST nodes as classes, which looked quite nice to start out with. + +As time drew on, I was generally unsatisfied with what I was writing. It was overly verbose and I wasn't getting the simplicity that I was promised from a recursive descent parser. I hoped to resolve this slightly by including the grammar specification alongside each non-terminal accept function, and I still looked to extract common logic wherever possible. But I trudged on an implemented the whole parser using the patterns I had. + +Then when I started testing, I realized there were several places where ambiguity was going to be a problem. I ended up having to refactor a lot of the logic and reorder the parse order for several non-terminals to resolve that, but it effectively worked the same way: read one or more tokens to determine the path, try to consume child non-terminals, if successful continue, if not return false to indicate we need the next choice. I also consolidated my AST classes into fewer files because I hated having one file for each of the teeny tiny classes (I ended up backtracking on this decision much later). + +But I did it! I had a working and completely tested parser, as unsatisfied as I was with it. But I didn't want to work on it anymore, so I continued with type checking. It was quickly apparent that my syntax nodes were insufficient. The nodes themselves needed to store location information for error reporting, so I added that. They also needed to be simplified to just semantic information so that the type checker doesn't have to muddy around with tokens. So I added the concept of a "reduce" function, one for each node type, to extract just the semantic info. I also added a Module class as a semantic container for a module and all of its components (as opposed to the Program syntax node which stores the syntax, because that's not confusing). + +As needed by the type checker, I added elements to the syntax, which didn't prove too complicated. I added a "continue" keyword, "true" and "false" literals, an "any" type, and comments to the grammar. Eventually I got to the point where I needed to resolve the type of various nodes that have types. So I added another method to every typeable node for doing that. Suddenly these syntax classes were looking a bit bloated, and contained a whole lot of type checking logic, which isn't really their responsibility. + +Once type checking was done, I blasted through translation (super easy) and interpreting (a bit difficult but mostly easy). I had what was theoretically a working interpreter. The frontend aspects (parsing and type checking) are the most complicated pieces of that. Once I started to taste the end, I could start thinking about what comes next. I obviously implemented this whole thing with very simple features, but in order to be practical, this compiler was going to need to be able to support much more complex features. I identified 5 features that needed to be implemented in order to start considering the language practical: classes, interfaces, overloads, extension methods, and generics. I order those based on dependency, and determined that I needed to start with generics. + +So I began work on generics! Step 1 was to add generics to the parser. I started doing that and realized that in the time since I was working on the parser, I forgot how it worked, and it wasn't immediately obvious based on just looking at it. That was a problem, because I would need to return to the parser a lot if I was to continue maintaining this language. From there, I got kinda bummed out because I was going to have to work on the parser again, and I fell off consistent development for awhile. But I eventually got back into it, and decided to completely redefine the parser implementation into a generic framework. + +#### The First Refactor: A Generic Framework For Parsing + +The framework was composed of an "accept()" method which took a structure containing a specification of the non-terminal, very similar to a grammar specification. You could also specify qualifiers such as "?" (optional) or "*" (zero or more). You would also specify a class to use to construct a syntax node instance, and any other options. This was turning into something beautiful. However, it wasn't very elegant. The main issue was that the compiler would have two modes, one for "I need to parse this, and if it breaks the whole operation will fail" and "I'll try to parse this, and if it fails I should fail softly so that other choices can be considered." Because of this, I called the latter mode "soft mode", and the former... "hard... mode...?" That's super difficult to describe. Eventually I noticed a pattern, where soft mode was only going to be on until a "decision point" where we suddenly know for a fact that we are sticking with this path. So I abstracted the whole soft/hard thing behind a "definite" flag to place on one of the symbols in a production to indicate that it is that "decision point". + +And it worked! I reimplemented the whole parser using this new generic framework. Eventually I was able to convert from a class to a ton of parse functions that passed around a few flags and values to control the current parser state. I made the internal logic more well defined by specifying several "modes" that the parser could be in depending on what kind of thing it was trying to parse. + +But it didn't *really* work. It turns out my testing was insufficient. The problem that I was running into was a classic mutation problem. The parser state was fully mutable, so when state changed low in the call stack and then had to return, there was no graceful way to rewind and pick up where we left off. The solution ended up being to make the whole tokenizer and other parser state immutable. More refactoring ensued. + +Then, once everything was done, I was able to finish the syntax for generics, and start implementing type checking for generics! Yea... turns out generics are complicated... I went through several months of nonsense trying to figure all that out, and getting discouraged again. Eventually, I needed a distraction, so I converted the whole project to TypeScript (that's right, everything before this point was in vanilla JS)! The TypeScript migration was one giant commit that touched basically the whole project. + +This had far-reaching impacts to the parser and the syntax types. I had a convoluted definition structure for each non-terminal that now had to be strongly typed. I found a way to basically do it. The difficult piece was the syntax. Remember the whole concept of "reducing" to make nodes simpler for type checking? I was reusing the same type for both the non-reduced and reduced nodes. That wasn't going to work anymore, so I had to split all the syntax types into two categories of syntax: CST (concrete syntax tree, for the parser output), and AST (abstract syntax tree, for the type checker). So now every syntax type needed two classes... + +Needless to say, this sucked. But I decided to put up with it for now. At that point, I could finish generic types. Which I did! However, then I noticed that the syntax nodes shouldn't have a ton of non-syntax logic inside them, so I converted all that to use visitors instead. Suddenly I had visitors everywhere! But now the logic wasn't mixed up all over the place, so I could be a bit happier. + +#### The Second Refactor: Consolidation of Syntax Using Decorators + +It was at this point that I went through a massive era of dissatisfaction with how the parser and syntax nodes worked. I did all refactoring under the sun. I split each syntax type up into several small pieces, then recombined them. I came out of this entire era with a system that I *kind of* liked, but as with most things, I thought it was the best thing since sliced bread. It was based on *decorators* on a *single node class* for each syntax type. Now I could have the grammar, reduction logic, and resulting AST structure in a single file! But even though diff --git a/docs/02-parser-background.md b/docs/02-parser-background.md new file mode 100644 index 0000000..00f0fbe --- /dev/null +++ b/docs/02-parser-background.md @@ -0,0 +1,93 @@ +# A Brief Explanation of Parsing + +Parsing isn't something everyone understands, so this is a basic runthrough of the problem of parsing, how it is typically solved, and how Ren has decided to go about it. + +Parsing is, at its most basic, converting text to meaningful structured data that can be further processed by a program. Parsing is one of the most important problems of programming. A large number of tasks involve converting text to structured data apart from interpreters and compilers, though it is certainly most prevalent in those kinds of programs because of the complexity of the text being parsed. Parsing is also particularly interesting because it is so prevalent, yet it requires a lot more base knowledge to understand than most problems in programming. + +While parsing has many different strategies, one thing that all parsers have in common is a *grammar*, which is a formal description of a language, such as a programming language. Grammars are complicated in and of themselves because they have several levels of complexity and various notation strategies. + +## Grammars and the Chomsky Hierarchy + +One of the pioneers in grammar and parsing theory was Noam Chomsky, who funnily enough was not actually a computer scientist, but a linguist. This fact alone describes how close programming language parsing is to natural language parsing. + +Noam Chomsky formalized what is called the "Chomsky Hierarchy" of languages, which specifies four levels of complexity in language/grammar. Before we can go into the hierarchy, we need to settle some terminology around languages: + +* *Language*: A language is a theoretical construct that specifies rules (a *grammar*) for how to process and understand written (or typed) text. A language is mostly an idea composed of several more concrete components. For example, Ren is a programming language that specifies a grammar for the syntax of the language, as well as a ton of other rules for how valid syntax is further processed semantically. +* *Grammar*: A grammar is a formal set of rules (called "productions") that specifies how to parse text of a specific language. It is represented as a list of productions, where each production is represented by two strings of symbols separated by an arrow (e.g. `abc -> def`). These symbols are either *terminals* or *non-terminals* (explained below). Each production represents an "expansion" from the left string to the right string. This is how syntax trees are produced. Every node is an instance of a left-side string, and all of its children are each symbol in the right-side string. Every grammar also has a "start symbol", which specifies which production to start at for the parsing process. +* *Terminal*: A terminal is a symbol that cannot be expanded, i.e. a token. Terminals are the concrete structures of the grammar; they specify where a parse path stops successfully, and that parsing should continue with the next terminal. +* *Non-terminal*: A non-terminal is a symbol that expands into a string of one or more other symbols, which can be either terminals or other non-terminals. While a non-terminal is being processed, it means that the parse path is not complete, as further expansion is required. +* *Automaton*: For every level of grammar, there is a corresponding automaton that can parse all grammars within that level. An automaton is a theoretical machine with a formalized state and method of operation, and is designed for processing strings of tokens according to a grammar. A parser is an implementation of an automaton. + +Now that that is out of the way, here is the Chomsky Hierarchy: + +### Type-3 (Regular Languages) + +Regular languages are the simplest types of languages: those that can be parsed from left to right in a linear operation. All productions of regular languages are of the form `a -> b` or `a -> bc`, where `a` is always a non-terminal, `b` is always a terminal, and `c` is always a single terminal or non-terminal. In this way, every production will always consume one terminal, and which production to use is always immediately decidable. + +If you're wondering if the word "regular" has anything to do with regular expressions, you would be correct. It turns out that regular grammars are useful enough that someone invented a syntax for creating parsers of regular languages, which is where regular expressions came from. + +The automaton used to parse regular languages is called a "finite state automaton" (FSA) or often just a "state machine". A FSA is a simple machine with several states linked by transition arrows. There is one start state and one or more completion states. Each consumed character chooses a transition arrow to transition to another state. The string is parsed successfully if it lands on a completion state after it is finished. + +Regular language parsers are typically used to perform lexical analysis on programs to turn a stream of characters to a stream of tokens. In these "tokenizers", the terminals are individual characters, and the non-terminals that are parsed form tokens. + +### Type-2 (Context-Free Languages) + +Context-free languages are the type of language of concern to us in parsing. They are more complex than regular languages in that the right-side string has no restrictions; it can be composed of any number of terminals and non-terminals in any order. + +These languages are called "context-free" because any given non-terminal has a fixed set of potential productions that do not depend at all on the surrounding text. This may seem confusing, because programming languages are definitely context-sensitive. To account for this, parsers are usually implemented to parse a looser subset of the actual language. This has several benefits, primarily that context-free languages are far easier to understand and far easier (and more performant) to implement. Any other context-sensitive logic is implemented in the next stage of the compiler. + +Context-free languages are parsed using an automaton called a "(non-deterministic) pushdown automaton". A pushdown automaton is simply a state machine with a stack alongside it. Each transition now not only factors in the next token in the stream but also the current symbol on top of the stack, and the transition specifies not only the next state but also optionally a new symbol to push onto the stack. In this way context-free parsers can try multiple parse paths, and go with the first successful path it finds. If there is no valid transition specified given the current token and stack symbol, it will backtrack. + +I specified the "non-deterministic" qualifier in parentheses because technically given the nature of context-free grammars, it is possible to specify grammars for which there is more than one possible parse path for a given string, producting ambiguity. True context-free parsers need to be able to handle this, and theoretically they would wind up outputting potentially several equally valid syntax trees for any given source string. However, these kinds of languages are not useful in programming, where we expect only one result, so grammars for programming languages typically add restrictions to remove ambiguity, allowing a *deterministic* pushdown automaton to be used instead, which is far more practical to real-life scenarios. + +### Type-1 (Context-Sensitive Languages) and Type-0 (Recursively Enumerable Languages) + +Type-1 and Type-0 languages are typically not observed in the context of programming languages because they are far more complex to understand and to implement, and all practical scenarios are easily handled by context-free grammars. These languages remove more of the restrictions placed on productions of the prior levels, and Type-0 languages even specify that there can be any sequence of any kind of symbol on either side of the arrow (but the left-side must contain one non-terminal). + +The automaton capable of parsing these languages is called the Turing Machine, which expands further on the concept of a pushdown automaton. Any further explanation is beyond the scope of this documentation. + +The only thing I will mention is that Turing Machines are a theoretical basis of computing in general beyond just parsing. I could theoretically create a Type-1 or Type-0 grammar that fully describes the syntax and semantics of the Ren programming language, and use a Turing machine to not only parse Ren programs, but also execute them. However, the grammar would be massive and difficult to understand, and since Turning Machines have such a simple mechanism of operation, programs would take thousands of times longer to run than is acceptable in today's age. This is where the border between the theoretical and the practical is quite black and white. + +### More Information + +See the [Wikipedia entry](https://en.wikipedia.org/wiki/Context-free_grammar) on Context-Free Grammars for more information if the above did not make sense. + +## Types of Parsers + +Now that we know how grammars are structured, we can go into how parsers actually work. + +There are many varieties of parser implementations. Three of the most important factors in choosing a parsing strategy are: + +> Is the text parsed left-to-right or right-to-left? + +This one is fairly simple. Do we start at the start of the input and consume left to right, or do we start at the end of the input and consume right to left? Nearly all parsers in use today are consumed left to right, so that the grammars are easier to write and the parser logic is easiler to understand. Additionally, choosing the direction of input consumption determines how you need to handle recursion. Context-free grammars are allowed to be recursive, where the first symbol of the right side of a production is the same as the non-terminal on the left side. However, when implementing this practically, when you follow the expansion process, it would result in infinite recursion because we'll never follow an expansion that results in consuming a token. When this happens in a grammar, it needs to be rewritten so that the recursion is eliminated. There is always a way to do this, which we will describe when we dig into the implementation. + +> Is the text parsed bottom-up or top-down? + +This one is a bit harder to understand, because when you look at a grammar, you'd think that top-down is the only way to do it. You start at the start symbol and descend productions until you are able to consume tokens, then continue. However, parsers that work this way can actually parse fewer languages than those that work bottom up. Bottom up parsers work by consuming tokens from the input and trying to find a production that matches, working their way backwards toward the start symbol. There is a tradeoff here: top-down parsers are easier to write and understand but are less powerful, while bottom-up parsers are more powerful but harder to write. + +> How much lookahead do we want to allow? + +It is not always possible to take a symbol from the input and immediately know which production to follow. Some productions have 4 or more symbols that are exactly the same. In these instances, you need to be able to "look ahead" to more symbols of the input in order to know which production to use. Some parsers have a fixed number of lookahead tokens, while others have a configurable number. The simplest parsers have only one token of lookahead, which can only be used to parse a subset of context-free languages. However, there are also parsers that don't care about lookahead. The purpose of lookahead is to choose what production to follow when there is a branch. Another option in this scenario is to simply follow all branches and pick the first one that works successfully. Parsers that do this are very easy to implement. The only other thing to consider for these parsers is the order that the branches are attempted, which has a surprising impact on the parser logic. Usually you want the first branch to be the one that consumes the most number of tokens. + +There are several common parser types, each of which has a corresponding subset of context-free languages it can parse: +* LL(1): This is the simplest type of parser: "L" (left-to-right), "L" (top-down, same direction as consumption), with one token of lookahead. +* LR(1): "L" (left-to-right), "R" (bottom-up, opposite direction as consumption), with one token of lookahead +* LL(k): LL parser with a specific, but configurable, amount of lookahead +* LL(*): LL parser that automatically computes the required lookahead from the provided grammar +* LALR: A variation of LR that is simpler to implement and more performant at the expense of a small amount of power. Most generated parsers today are LALR +* PEG (parsing expression grammar): A different kind of parser which parses LL-like languages with no lookahead by specifying an order of precedence for branches. PEGs also include extra grammar syntax similar to regular expression syntax that make it much easier to specify grammars. + +## Implementing Parsers + +When implementing a parser, one needs to consider two things: +* How to specify the grammar (text representation or manually in code) +* The parsing algorithm (top-down parsers typically use some form of recursive descent, while bottom-up parsers typically use tables with a corresponding state machine) + +The second consideration is typically the important one, and it comes down to one decision: do I want to write the parser from scratch or do I want to generate the parser using a tool? This decision typically decides everything else for you. + +Writing a parser from scratch usually involves specifying a rough grammar which you will then implement effectively 1:1 as a "recursive descent" parser. In a recursive descent parser, each non-terminal of the grammar is implemented as a function. That function will do two things: (1) decide which production to follow, and (2) consume tokens and call other non-terminal functions to "follow" the chosen production. These kinds of parsers are very easy to understand because the implementation mirrors the grammar specification. PEG and LL are well-suited for this implementation. The benefit of this is that you have full control over the implementation. You can do it in whatever language you please, and the API can be exactly as you specify. The drawback is that you may suffer performance issues, because recursive descent parsers (especially those that do not use memoization) are far slower than generated alternatives. It is also more work because you need to write all the logic yourself. + +Generating a parser involves writing a grammar according to a grammar syntax specified by a tool (such as Yacc or ANTLR) and running the tool on the grammar to generate a library that provides an API to parse the language specified by that grammar. Most of these tools generate LALR parsers (though ANTLR uses a variant of LL(*)) that are implemented using parse tables. The benefits of this strategy are that all you have to write is the grammar, and the parser is likely to be very performant. The drawback is that you are limited by both the language(s) and API of the tool you're using. Most popular parser generators support several languages, but you are still forced to use whatever API the parser specifies, making it difficult to conform the parser to your compiler's paradigm or inject your own logic. + +Since most compilers end up being self-hosting (they are written in the very language they compile), it is generally better to roll your own parser, which is what we have done in Ren. Ren implements a recursive-descent PEG parser using a custom-built library designed to make parser implementation expressive. \ No newline at end of file diff --git a/docs/03-lexer.md b/docs/03-lexer.md new file mode 100644 index 0000000..e69de29 diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 0000000..bb2efae --- /dev/null +++ b/docs/README.md @@ -0,0 +1,95 @@ +# renlang Compiler + +This is documentation related **strictly** to this TypeScript implementation of the renlang compiler. + +As it has grown, it has become annoying to have to read through the code to rediscover the rationale behind decisions made in the implementation, as well as just how the implementation works. This documentation is to make that smoother for myself and for others who may find themselves in here one day. + +## 100,000 Foot View + +This is a short description of the principles that drive this implementation. + +This compiler is designed to be as simple as possible, following the Ren semantics as to what programs are. There are a few core principles for the implementation: + +* The compiler must be **100% pure functional**. This means: + * Everything is immutable: *No changing the internal structure of objects after they are created; always use APIs that facilitate immutable transforms*. TypeScript's `readonly` keyword is key for making sure this happens. + * All functionas must be *referentially transparent*, meaning that for a given input, the same output should always be returned. + * *No side effects*. This means no modifying values external to the scope of a function from within the function. All that a function has access to within itself are its parameters, any local variables it creates, and any other *pure* functions it may want to call. Modifying local variables is fine as long as it is the reference that is changing, not the internal structure. +* There is one exception to the above rule: reading from and writing to files. All compilers must to this at some point during their operation, and this is inherently a side-effect operation. Files may not exist, there may be a fault with the operating system. However, the interface provided to deal with the file system must be as purely functional as possible. +* The 100% pure functional rule has far-reaching impact on the rest of the compiler. For example: + * Classes can (and typically should) be used, but the rules of immutability are still enforced. To facilitate this, the compiler libraries provide a base class called `CoreObject` that exposes helpers for modifying fields in an immutable way. + * Native data structures must use immutable interfaces: + * Instead of using `Array` or `T[]`, use `ReadonlyArray`. + * Instead of using `Map`, use `ReadonlyMap`. + * Instead of using `Set`, use `ReadonlySet`. + * Additionally, several other extensions to the above interfaces that provide helpful immutable operations. +* Everything must be strongly typed. No using `any` as a shortcut. And in fact, now that the `unknown` type exists, `any` should never be used. Consequences of this mean that some types can get very complex, but it pays off in maintainability and scalability. It also pushes us to write code in a way where it can be effectively typed. +* Organization is key. The src directory should be split up logically according to the various phases of the compiler. Common components belong in the top level (if they are considered "core" or "integral" to the project, e.g. classes/functions that are used in nearly every file, entry point functions, etc.) or a "utils" directory (if they are not extremely not "compiler-specific" components, e.g. small utility functions, new data structures, etc.). + +The above core principles guide the high-level structure and decision-making of the project. When adding a new component or changing something, the addition must be: + +1. Pure functional +2. Fit well into the type system +3. Go in the right place + +## 10,000 Foot View + +This is a slightly more detailed description of the high-level architecture of this implementation. + +From the simplest point, the compiler is supposed to be thought of as a function: + +```typescript +function compiler(sourceFilePath: string): Program { /* Do all the things */ } +``` + +A source file goes in, a compiled program comes out. + +The internals of this get a bit hairier than this, however. That source file can declare dependencies on other source files, which may or may not exist; any of the resolved source files may not be of valid syntax; even if everything is parsable, the code itself may be semantically invalid; and then once everything is said and done, compiler doesn't actually spit a program back out to the user, it writes a compiled binary to the disk. + +The high level architecture of the compiler is more like this: + +```typescript +function compiler(sourceFilePath: string): void { + const program = getProgram(sourceFilePath); + writeFile('program', generateMachineCode(program)); +} + +function getProgram(sourceFilePath: string): Program { /* Parse the file, get all modules, typecheck the program */ } + +function generateMachineCode(program: Program): Buffer { /* Take the code in the modules and create an executable */ } +``` + +But at the root of it all is that same concept. Start with a single file (the entry point of the program), and create an executable from it. + +Looking at the code above, you see that we have two primary phases of the compiler: + +1. Frontend: turn source code into syntax into a semantic program +2. Backend: turn a semantic program into an executable + +This is true of effectively all compilers. There is typically one frontend, which turns program source code into some intermediate representation (IR), and several backends, each of which turns that IR into a system-specific binary. + +### Frontend + +The frontend of the compiler is responsible for 3 tasks: + +1. [Parsing](./01-parser.md): turning source code into syntax +2. [Semantic analysis](./03-semantic.md): turning syntax into a semantic program, verifying the semantics of the program along the way +3. [Translation](./00-something.md): turning a semantic program into IR + +### Backend + +TBD + +## The Top Level + +This compiler was originally implemented as an interpreter, so most of the logic is set up to "run" the program instead of compiling it. This will change as we re-progress through the process. For now, I'll describe this in terms of the old functionality. + +The entry point of this compiler is /src/index.ts. This bootstraps the environment, extracts the CLI arguments, passes the entry point file path into the runner, and exits the program with the emitted exit code from the program being run. + +The runner consists of a single function: `runProgram`. This function: + +1. Calls the typechecker to typecheck the program at the specified entry point path. This will internally parse and enumerate all source files of the program using the declared dependencies of each source file. It will then visit the entire program and make sure it is semantically correct, emitting errors and warnings along the way. The output of this function is a `Program` instance, which contains all modules, namespaces, and declarations of the program, as well as the full list of diagnostics. +2. Emits any errors and warnings to the console. If there were errors, it returns with an exit code of 1. +3. Calls the translator. This translates the semantic program into a list of executable instructions. This currently has no implementation, but there is a fully functional older implementation from a prior incarnation of this project. +4. Calls the interpreter with the resulting list of instructions and the CLI arguments, returning the result. This currently has no implementation, but there is a fully implemented (but not tested) interpreter from a prior incarnation of this project. + +At this point, we've gotten granular enough that the other pages in this folder can provide additional details. diff --git a/package.json b/package.json index 0b0fb90..396af07 100644 --- a/package.json +++ b/package.json @@ -31,6 +31,6 @@ "ts-jest": "^22.0.1", "tslib": "^1.9.0", "tslint": "^5.9.1", - "typescript": "^2.8.3" + "typescript": "^3.0.3" } } diff --git a/src/extensions.ts b/src/extensions.ts index c7d1744..65bba9e 100644 --- a/src/extensions.ts +++ b/src/extensions.ts @@ -1,3 +1,5 @@ +// #region Array extensions + interface Array { /** Get the last item in this array, or undefined if the array is empty */ last(): T; @@ -7,6 +9,12 @@ interface Array { count(predicate: (item: T) => boolean): number; /** Mutates the value at the specified index using the specified mutator function, returning a new array containing the new value. */ mutate(index: number, fn: (value: T) => T): Array; + /** Set the value at a specified index immutably, returning a new array without modifying the original */ + iset(index: number, value: T): Array; + /** Returns the array with all duplicates removed, NOTE: works only on primitives */ + unique(): Array; + /** Removes the item at the specified index, returning a new array */ + idelete(index: number): Array; } interface ReadonlyArray { @@ -18,6 +26,12 @@ interface ReadonlyArray { count(predicate: (item: T) => boolean): number; /** Mutates the value at the specified index using the specified mutator function, returning a new array containing the new value. */ mutate(index: number, fn: (value: T) => T): ReadonlyArray; + /** Set the value at a specified index immutably, returning a new array without modifying the original */ + iset(index: number, value: T): ReadonlyArray; + /** Returns the array with all duplicates removed, NOTE: works only on primitives */ + unique(): ReadonlyArray; + /** Removes the item at the specified index, returning a new array */ + idelete(index: number): ReadonlyArray; } Array.prototype.last = function last(this: Array, count?: number) { @@ -30,11 +44,28 @@ Array.prototype.count = function count(this: Array, predicate: (item: T) = } Array.prototype.mutate = function mutate(this: Array, index: number, fn: (value: T) => T): Array { + return this.iset(index, fn(this[index])); +} + +Array.prototype.iset = function iset(this: Array, index: number, value: T): Array { + const newArray = [...this]; + newArray.splice(index, 1, value); + return newArray; +} + +Array.prototype.unique = function unique(this: Array): Array { + return [...new Set(this)]; +} + +Array.prototype.idelete = function idelete(this: Array, index: number): Array { const newArray = [...this]; - newArray.splice(index, 1, fn(this[index])); + newArray.splice(index, 1); return newArray; } +// #endregion +// #region String extensions + interface String { /** Get the substring from this string containing the last {count} characters in this string */ last(count?: number): string; @@ -44,14 +75,25 @@ String.prototype.last = function last(count = 1) { return this.slice(this.length - count, this.length); } +// #endregion +// #region Map extensions + interface Map { /** Set the value at a specified key immutably, returning a new map object without modifying the original */ iset(key: K, value: V): Map; + /** Mutate the value at a specified key immutably, returning a new map object without modifying the original */ + mutate(key: K, fn: (value: V) => V): Map; + /** Delete the value at a specified key immutably, returning a new map object without modifying the original */ + idelete(key: K): Map; } interface ReadonlyMap { /** Set the value at a specified key immutably, returning a new map object without modifying the original */ iset(key: K, value: V): ReadonlyMap; + /** Mutate the value at a specified key immutably, returning a new map object without modifying the original */ + mutate(key: K, fn: (value: V) => V): ReadonlyMap; + /** Delete the value at a specified key immutably, returning a new map object without modifying the original */ + idelete(key: K): ReadonlyMap; } Map.prototype.iset = function iset(this: Map, key: K, value: V): Map { @@ -59,3 +101,82 @@ Map.prototype.iset = function iset(this: Map, key: K, value: V): Map clone.set(key, value); return clone; } + +Map.prototype.mutate = function mutate(this: Map, key: K, fn: (value: V) => V): Map { + return this.iset(key, fn(this.get(key)!)); +} + +Map.prototype.idelete = function idelete(this: Map, key: K): Map { + const clone = new Map(this); + clone.delete(key); + return clone; +} + +// #endregion +// #region Set extensions + +interface Set { + /** Performs an array-like map operation on this set, returning a new set */ + map(fn: (value: T) => U): Set; + /** Performs an array-like filter operation on this set, returning a new set */ + filter(predicate: (value: T) => boolean): Set; + /** Performs an array-like reduce operation on this set, returning a new set */ + reduce(fn: (result: U, value: T) => U, initial: U): U; + /** Performs an array-like flatMap operation on this set, returning a new set */ + flatMap(fn: (value: T) => Iterable): Set; + /** Unions this set with another iterable, forming a new set */ + union(other: Iterable): Set; + /** Subtracts another iterable from this set, forming a new set containing only elements from this set that were not in the other */ + subtract(other: Set): Set; +} + +interface ReadonlySet { + /** Performs an array-like map operation on this set, returning a new set */ + map(fn: (value: T) => U): ReadonlySet; + /** Performs an array-like filter operation on this set, returning a new set */ + filter(predicate: (value: T) => boolean): ReadonlySet; + /** Performs an array-like reduce operation on this set, returning a new set */ + reduce(fn: (result: U, value: T) => U, initial: U): U; + /** Performs an array-like flatMap operation on this set, returning a new set */ + flatMap(fn: (value: T) => Iterable): ReadonlySet; + /** Unions this set with another set, forming a new set */ + union(other: Iterable): ReadonlySet; + /** Subtracts another set from this set, forming a new set containing only elements from this set that were not in the other */ + subtract(other: ReadonlySet): ReadonlySet; +} + +Set.prototype.map = function map(this: Set, fn: (value: T) => U): Set { + const newSet = new Set(); + for (const value of this) newSet.add(fn(value)); + return newSet; +} + +Set.prototype.filter = function filter(this: Set, predicate: (value: T) => boolean): Set { + const newSet = new Set(); + for (const value of this) if (predicate(value)) newSet.add(value); + return newSet; +} + +Set.prototype.reduce = function reduce(this: Set, fn: (result: U, value: T) => U, initial: U): U { + let result: U = initial; + for (const value of this) result = fn(result, value); + return result; +} + +Set.prototype.flatMap = function flatMap(this: Set, fn: (value: T) => Iterable): Set { + const newSet = new Set(); + for (const value of this) for (const subValue of fn(value)) newSet.add(subValue); + return newSet; +} + +Set.prototype.union = function union(this: Set, other: Iterable): Set { + const newSet = new Set(this); + for (const value of other) newSet.add(value); + return newSet; +} + +Set.prototype.subtract = function subtract(this: Set, other: Set): Set { + return this.filter(_ => !other.has(_)); +} + +// #endregion diff --git a/src/parser/lexer/token.ts b/src/parser/lexer/token.ts index 20aa98b..720c6b0 100644 --- a/src/parser/lexer/token.ts +++ b/src/parser/lexer/token.ts @@ -35,7 +35,7 @@ export class Token extends CoreObject { readonly type: TokenType, position: FilePosition, readonly image: string, - readonly value?: any + readonly value?: string | number ) { super(); this.location = position.computeRange(image); diff --git a/src/semantic/index.ts b/src/semantic/index.ts index 4d680ed..cf6dff2 100644 --- a/src/semantic/index.ts +++ b/src/semantic/index.ts @@ -60,7 +60,7 @@ export default function analyze(path: string) { // Pass 1: Enumeration const enumeration = enumerateNamespaces(path); // Pass 2: Resolution - const resolution = resolveDependencies(enumeration.modules, enumeration.declarations, enumeration.namespaces, enumeration.dependencyQueue); + const resolution = resolveDependencies(enumeration.modules, enumeration.declarations, enumeration.namespaces, enumeration.pureForwards); // Pass 3: Typechecking const typechecked = typecheck(enumeration.declarations, resolution.namespaces); // Pass 4: Name clashes diff --git a/src/semantic/namespace.ts b/src/semantic/namespace.ts index 3bfcd2b..1212e22 100644 --- a/src/semantic/namespace.ts +++ b/src/semantic/namespace.ts @@ -1,22 +1,96 @@ import { CoreObject } from '~/core'; import * as syntax from '~/syntax'; +import { Dependency } from '~/semantic/passes/dependencies'; +// #region Namespaces + +/** Base class for namespaces, contains all reference collections and reference-related logic */ class NamespaceBase extends CoreObject { - readonly localNames: ReadonlyMap> = new Map(); - readonly exports: ReadonlyMap> = new Map(); + /** Contains all references for all names locally-scoped to the namespace */ + readonly locals: ReadonlyMap = new Map(); + /** Contains all references for all of this namespace's exported names */ + readonly exports: ReadonlyMap = new Map(); constructor(readonly namespaceId: number) { super() } + ensureLocalTarget(name: string, mutator?: (target: NameTarget) => NameTarget) { + let target = this.getLocalTarget(name); + target = mutator ? mutator(target) : target; + return this.mutate('locals', _ => _.iset(name, target)); + } + + ensureExportTarget(name: string, mutator?: (target: NameTarget) => NameTarget) { + let target = this.getExportTarget(name); + target = mutator ? mutator(target) : target; + return this.mutate('exports', _ => _.iset(name, target)); + } + + // #region helpers + + /** Gets the NameTarget corresponding to a local */ + getLocalTarget(name: string) { return this.locals.get(name) || new NameTarget() } + /** Gets the NameTarget corresponding to an export */ + getExportTarget(name: string) { return this.exports.get(name) || new NameTarget() } + + /** Modifies a local's NameTarget */ + mutateLocalTarget(name: string, fn: (target: NameTarget) => NameTarget) { + return this.mutate('locals', _ => _.iset(name, fn(this.getLocalTarget(name)))); + } + + /** Modifies an export's NameTarget */ + mutateExportTarget(name: string, fn: (target: NameTarget) => NameTarget) { + return this.mutate('exports', _ => _.iset(name, fn(this.getExportTarget(name)))); + } + + // #endregion + // #region local references + + /** Adds a resolved reference for an imported name */ + addImportedName(name: string, modulePath: string, exportName: string, declarationId: number) { + return this.mutateLocalTarget(name, _ => _.addReference(new RemoteName(modulePath, exportName, declarationId))); + } + + /** Adds a resolved reference for an imported namespace */ + addImportedNamespace(name: string, modulePath: string, declarationId: number) { + return this.mutateLocalTarget(name, _ => _.addReference(new RemoteNamespace(modulePath, declarationId))); + } + + /** Adds a resolved reference for a local declaration */ addLocalDeclaration(name: string, declarationId: number) { - const existing = this.localNames.get(name) || []; - return this.mutate('localNames', _ => _.iset(name, [...existing, new LocalDeclaration(declarationId)])); + return this.mutateLocalTarget(name, _ => _.addReference(new LocalDeclaration(declarationId))); + } + + // #endregion + // #region export references + + /** Adds a resolved reference for a forwarded name */ + addForwardedName(name: string, modulePath: string, exportName: string, declarationId: number) { + return this.mutateExportTarget(name, _ => _.addReference(new RemoteName(modulePath, exportName, declarationId))); + } + + /** Adds a resolved reference for a forwarded namespace */ + addExportedRemoteNamespace(name: string, modulePath: string, declarationId: number) { + return this.mutateExportTarget(name, _ => _.addReference(new RemoteNamespace(modulePath, declarationId))); + } + + /** Adds a resolved reference for an exported local name */ + addExportedName(name: string, local: string, declarationId: number) { + return this.mutateExportTarget(name, _ => _.addReference(new LocalName(local, declarationId))); + } + + /** Adds a resolved reference for an exported inline declaration */ + addExportedDeclaration(name: string, declarationId: number) { + return this.mutateExportTarget(name, _ => _.addReference(new LocalDeclaration(declarationId))); } + + // #endregion } /** - * A declared namespace within another namespace. - * It has a name, a parent namespace id, and a declaration id (because it is a declaration). + * A namespace declared within another namespace. + * This contains its parent's namespace id, and the corresponding id and syntax + * of its declaration. */ export class NestedNamespace extends NamespaceBase { constructor( @@ -28,9 +102,10 @@ export class NestedNamespace extends NamespaceBase { } /** - * A semantic container for a module in a program. - * A module is a type of namespace, and can contain local names, exports, and declarations. - * Where it differs from a generic namespace is that it has no parent namespace, and is associated with a file path. + * A module is a type of namespace, but has no parent, is not a declaration, + * and is alternatively identified by its file path. + * Other than that, it still has locals and exports just like any other namespace. + * Most namespaces in a program will be module namespaces. */ export class ModuleNamespace extends NamespaceBase { constructor( @@ -39,22 +114,138 @@ export class ModuleNamespace extends NamespaceBase { ) { super(namespaceId); } } +/** A namespace is either a module or a declared namespace nested within another namespace. */ export type Namespace = ModuleNamespace | NestedNamespace; +// #endregion + +// #region ModuleRefs + +interface NonSuccessModuleRef { + readonly namespaceId: null; + readonly status: ModuleStatus.REFERENCED | ModuleStatus.UNPARSED | ModuleStatus.NOT_FOUND; + readonly fullyResolved: boolean; +} + +interface SuccessModuleRef { + readonly namespaceId: number; + readonly status: ModuleStatus.SUCCESS; + readonly fullyResolved: boolean; +} + +/** + * A ModuleRef is a reference to a module namespace. + * It will either be successful (resolved), or not successful (either not yet resolved or unresolvable). + * All successful module references will have a resolved namespace id. + */ +export type ModuleRef = NonSuccessModuleRef | SuccessModuleRef; + +export enum ModuleStatus { + /** The initial state of any module that is referenced, including the entry. Nothing has been done with it yet. */ + REFERENCED, + /** The module was found and parsed */ + SUCCESS, + /** The module was found, but failed to parse */ + UNPARSED, + /** The module was not found */ + NOT_FOUND +} + +// #endregion + +// #region NameTargets and References + /** - * For any given name in a program, there are target(s) to which that name resolves. - * A target will always be either: - * - an export name of another module - * - a module's namespace - * - a locally-scoped name - * - a declaration inline with the name (only in the case of exported declarations) + * Name targets in a program have to be quite complex for a number of reasons, + * the primary ones being that: + * - namespaces can circularly reference one another + * - declaration merging (having a name resolve to multiple declarations) is valid + * + * A namespace has two lists: a list of locals (locally-scoped names) and a list of + * exports (externally-accessible names). These "lists" are mappings from a name + * to a name target. + * + * A name target, once everything is said and done, has simply a list of references. + * + * A reference, once everything is said and done, can be one of 8 types, which are divided + * into 3 categories: + * - resolved references (references that correspond to a declaration id): + * - remote name (a reference to an export of another module) + * - remote namespace (a reference to the top-level namespace of a module) + * - local name (a reference to some locally-scoped name) + * - local declaration (a direct reference to a local declaration) + * - dangling references (references whose targets do not exist) + * - missing module (a reference to a non-existent module) + * - missing export (a reference to a non-existent export of an existent module) + * - missing local (a reference to a non-existent local) + * - circular references (references that depend indirectly to themselves, which are a special type of dangling reference) + * + * This structure forms a tree: namespace -(1:many)-> name -(1:1)-> name target -(1:many)-> reference + * + * This makes sense at face value, but becomes complex when you envision what has to happen + * during the process of resolving all of these names and references. The enumeration + * process, whose job it is to enumerate and register all namespaces and declarations in the program, + * will place only direct references (always local declarations) into name targets. + * Everything else is registered as a dependency on each name target. Dependencies have one + * type for each logical type of dependency (the various kinds of imports, exports, and forwards). + * + * To manage the potential for circular references and declaration merging, + * the concept of a "status" has to be introduced on all three levels of the namespace tree: + * - name targets need an "aggregate status" so that any other name targets referencing them + * know when they have all corresponding references fully resolved (even if they are unsuccessful) + * - references have an implicit status. the three categories of references correspond to the three + * terminal statuses: resolved, dangling, and circular. there is also a special intermediate reference + * type called a "stub reference" that serves as a placeholder to indicate when a reference is in the + * process of being resolved. this serves the purpose of preventing infinite recursion when dealing + * with circular references. */ -export type NameTarget = RemoteName | RemoteNamespace | LocalName | LocalDeclaration | DanglingReference | CircularReference; +export class NameTarget extends CoreObject { + readonly status: NameTargetStatus = NameTargetStatus.NOT_RESOLVED; + readonly references: ReadonlyArray = []; + readonly dependencies: ReadonlyArray = []; + + addReference(ref: Reference): NameTarget { + return this.mutate('references', _ => [..._, ref]) + } + + addDependency(dep: Dependency): NameTarget { + return this.mutate('dependencies', _ => [..._, dep]); + } + + popDependency(): [Dependency, NameTarget] { + const [dep, ...deps] = this.dependencies; + return [dep, this.set('dependencies', deps)]; + } + + setAggregateStatus(): NameTarget { + if (this.references.some(_ => _.status === NameTargetStatus.FULLY_RESOLVED)) + return this.set('status', NameTargetStatus.FULLY_RESOLVED); + if (this.references.some(_ => _.status === NameTargetStatus.DANGLING)) + return this.set('status', NameTargetStatus.DANGLING); + if (this.references.some(_ => _.status === NameTargetStatus.CIRCULAR)) + return this.set('status', NameTargetStatus.CIRCULAR); + return this.set('status', NameTargetStatus.EMPTY); + } +} + +export enum NameTargetStatus { + NOT_RESOLVED = 1, // not yet visited or currently being visited + FULLY_RESOLVED, // done being visited + DANGLING, // no references could be resolved + CIRCULAR, // all references are circular + EMPTY // all references could be resolved, but none contain a declaration +} + +export type Reference = ResolvedReference | DanglingReference | CircularReference | EmptyReference; + +export type ResolvedReference = RemoteName | RemoteNamespace | LocalName | LocalDeclaration; /** - * A remote name is reference to an export name from another module. + * A reference to an export name from another module. */ export class RemoteName extends CoreObject { + readonly status = NameTargetStatus.FULLY_RESOLVED; + constructor( readonly modulePath: string, readonly exportName: string, @@ -63,9 +254,11 @@ export class RemoteName extends CoreObject { } /** - * A remote namespace is a pointer to a module's top-level namespace + * A pointer to a module's top-level namespace */ export class RemoteNamespace extends CoreObject { + readonly status = NameTargetStatus.FULLY_RESOLVED; + constructor( readonly modulePath: string, readonly resolvedDeclarationId: number @@ -73,9 +266,11 @@ export class RemoteNamespace extends CoreObject { } /** - * A local name is a reference to a name that is scoped to the current module + * A reference to a name that is scoped to the current module */ export class LocalName extends CoreObject { + readonly status = NameTargetStatus.FULLY_RESOLVED; + constructor( readonly name: string, readonly resolvedDeclarationId: number @@ -83,20 +278,116 @@ export class LocalName extends CoreObject { } /** - * A local declaration is a reference to a declaration that has no name, - * i.e. in the case of an anonymous default export. + * A direct reference to a local declaration. */ export class LocalDeclaration extends CoreObject { + readonly status = NameTargetStatus.FULLY_RESOLVED; + constructor( readonly resolvedDeclarationId: number ) { super() } } -/** Indicates that the name cannot be resolved because its target does not exist */ -export class DanglingReference extends CoreObject {} +export type DanglingReference = MissingModule | MissingExport | MissingLocal; -/** Indicates that the name cannot be resolved because it depends on itself */ -export class CircularReference extends CoreObject {} +/** + * A reference to a module that doesn't exist. + * This applies for both named and wildcard imports/forwards. + */ +export class MissingModule extends CoreObject { + readonly status = NameTargetStatus.DANGLING; + + constructor( + readonly modulePath: string, + readonly exportName: Optional + ) { super() } +} + +/** + * A reference to a module's export where the module exists, + * but the export does not. + * This applies only for named imports/forwards. + */ +export class MissingExport extends CoreObject { + readonly status = NameTargetStatus.DANGLING; + + constructor( + readonly modulePath: string, + readonly exportName: string + ) { super() } +} + +/** + * A reference to a local that doesn't exist. + * This applies only to named exports. + */ +export class MissingLocal extends CoreObject { + readonly status = NameTargetStatus.DANGLING; + + constructor( + readonly localName: string + ) { super() } +} + +export type CircularReference = RemoteCircularReference | LocalCircularReference; + +/** + * A remote reference whose dependency chain circles back on itself. + * This applies only to named imports/forwards. + */ +export class RemoteCircularReference extends CoreObject { + readonly status = NameTargetStatus.CIRCULAR; + + constructor( + readonly modulePath: string, + readonly exportName: string + ) { super() } +} + +/** + * A local reference whose dependency chain circles back on itself. + * This applies only to named exports. + */ +export class LocalCircularReference extends CoreObject { + readonly status = NameTargetStatus.CIRCULAR; + + constructor( + readonly localName: string + ) { super() } +} + +export type EmptyReference = RemoteEmptyReference | LocalEmptyReference; + +/** + * This is a "chained" remote dangling or circular reference. + * The dependency could be resolved, but the end of the chain + * doesn't actually resolve to a declaration. + */ +export class RemoteEmptyReference extends CoreObject { + readonly status = NameTargetStatus.EMPTY; + + constructor( + readonly modulePath: string, + readonly exportName: string + ) { super() } +} + +/** + * This is a "chained" local dangling or circular reference. + * The dependency could be resolved, but the end of the chain + * doesn't actually resolve to a declaration. + */ +export class LocalEmptyReference extends CoreObject { + readonly status = NameTargetStatus.EMPTY; + + constructor( + readonly localName: string + ) { super() } +} + +// #endregion + +// #region Declarations /** * A semantic declaration is a node that is ultimately associated with a name @@ -139,3 +430,5 @@ export class NamespaceDeclaration extends CoreObject { readonly namespaceId: number ) { super() } } + +// #endregion diff --git a/src/semantic/passes/dependencies.ts b/src/semantic/passes/dependencies.ts index 76233a5..23515cf 100644 --- a/src/semantic/passes/dependencies.ts +++ b/src/semantic/passes/dependencies.ts @@ -1,61 +1,187 @@ -import { CoreObject } from '~/core'; +import { CoreObject, FileRange } from '~/core'; +import { Token } from '~/parser/lexer'; +import { range } from '~/utils/utils'; -export type Dependency = ImportedName | ImportedNamespace | ForwardedName | ForwardedNamespace | PureForward | ExportedName | ExportedDeclaration; +export class PureForward extends CoreObject { + constructor( + readonly forwardNamespace: number, + readonly exportModule: string, + readonly exportModuleLocation: FileRange, + readonly starLocation: FileRange + ) { super() } +} + +export type Dependency = ImportedName | ImportedNamespace | ForwardedName | PureForwardReplacement | ForwardedNamespace | ExportedName; export class ImportedName extends CoreObject { constructor( readonly importNamespace: number, - readonly importName: string, + readonly importName: Token, readonly exportModule: string, - readonly exportName: string + readonly exportModuleLocation: FileRange, + readonly exportName: Token ) { super() } } export class ImportedNamespace extends CoreObject { constructor( readonly importNamespace: number, - readonly importName: string, - readonly exportModule: string + readonly importName: Token, + readonly exportModule: string, + readonly exportModuleLocation: FileRange, + readonly starLocation: FileRange ) { super() } } export class ForwardedName extends CoreObject { constructor( readonly forwardNamespace: number, - readonly forwardName: string, + readonly forwardName: Token, readonly exportModule: string, - readonly exportName: string + readonly exportModuleLocation: FileRange, + readonly exportName: Token ) { super() } } -export class ForwardedNamespace extends CoreObject { +/** + * Pure forwards are processed and replaced with adhoc forwarded names, + * but because they aren't explicit they don't have the same structure as ForwardedNames. + * The forwarded name itself is pulled from the export module's exports, + * and any errors will be applied to the star location from the original pure forward. + */ +export class PureForwardReplacement extends CoreObject { constructor( readonly forwardNamespace: number, readonly forwardName: string, - readonly exportModule: string + readonly exportModule: string, + readonly exportModuleLocation: FileRange, + readonly starLocation: FileRange ) { super() } } -export class PureForward extends CoreObject { +export class ForwardedNamespace extends CoreObject { constructor( readonly forwardNamespace: number, - readonly exportModule: string + readonly forwardName: Token, + readonly exportModule: string, + readonly exportModuleLocation: FileRange, + readonly starLocation: FileRange ) { super() } } export class ExportedName extends CoreObject { constructor( readonly namespace: number, - readonly localName: string, - readonly exportName: string + readonly localName: Token, + readonly exportName: Token ) { super() } } -export class ExportedDeclaration extends CoreObject { - constructor( - readonly namespace: number, - readonly declarationId: number, - readonly exportName: string - ) { super() } +export class PureForwardGraph extends CoreObject { + readonly edgeGrid: ReadonlyArray>>; + + constructor(readonly size: number) { + super(); + this.edgeGrid = range(size).map(_ => range(size).map(_ => null)); + } + + getForward(exporter: number, forwarder: number): Optional { + return this.edgeGrid[exporter][forwarder]; + } + + addForward(exporter: number, forwarder: number, fwd: PureForward): PureForwardGraph { + return this.mutate('edgeGrid', _ => _.mutate(exporter, _1 => _1.iset(forwarder, fwd))); + } + + /** + * Get all namespaces which forward from this one + */ + getConsumers(exporter: number): ReadonlyArray { + const targets = this.edgeGrid[exporter]; + return range(this.size).filter(_ => !!targets[_]); + } + + /** + * Get all namespaces that this one has a forward for + */ + getSuppliers(forwarder: number): ReadonlyArray { + return range(this.size).filter(_ => !!this.edgeGrid[_][forwarder]); + } + + /** + * Determines the pure forward cycles in this graph. + * No node will appear in more than one cycle; + * any cycles that intersect will be merged into one "aggregate" cycle. + */ + getCycles(): ReadonlyMap> { + // initialize lists + const cycles: ReadonlyArray> = []; + const visited: ReadonlyArray = []; + const currentPath: ReadonlyArray = []; + // visit + const [finishedCycles] = this.cyclesVisitor(0, currentPath, visited, cycles); + // assemble map + const entries: Array<[number, ReadonlySet]> = []; + for (const cycle of finishedCycles) { + for (const ns of cycle) { + entries.push([ns, cycle]); + } + } + return new Map(entries); + } + + /** + * Performs a recursive aggregate cycles algorithm for a given namespace, current recursion path, + * set of visited namespaces, and current set of cycles. + * + * For each consumer of the namespace, check to see if it exists in the current recursion path. + * If it does, the path between the two namespaces either needs to be merged into an existing cycle + * or added as a new cycle. + * This is a depth-first search algorithm. + * Once every consumer of the namespace is visited, the namespace is marked visited and the algorithm will + * ascend back to the previous namespace. + * If there are no namespaces left in the chain, the algorithm moves to the next namespace in the graph and starts + * a new chain. + * The algorithm is finished once every namespace in the graph has been visited, either by recursion from + * an existing namespace or by iteration. + */ + private cyclesVisitor(ns: number, currentPath: ReadonlyArray, visited: ReadonlyArray, cycles: ReadonlyArray>): [ReadonlyArray>, ReadonlyArray] { + // break recursion if we're out of namespaces or the namespace has already been visited + if (ns >= this.size || visited[ns]) return [cycles, visited]; + + const nextPath = [...currentPath, ns]; + let nextCycles = cycles; + let nextVisited = visited; + for (const consumer of this.getConsumers(ns)) { + if (consumer === ns) { + // TODO: figure out how to get diagnostics here (possibly just add another method to check for this) + // for posterity: if this is true, the namespace has a pure forward to itself, which should just be a warning + } else if (nextPath.includes(consumer)) { + // we have a cycle, gather all namespaces in the path + const cycle = nextPath.slice(nextPath.indexOf(consumer)); + // check to see if there is an existing cycle containing ANY of them + const index = nextCycles.findIndex(_ => cycle.some(_1 => _.has(_1))); + if (index > -1) { + // existing cycle, merge all of these into it + nextCycles = nextCycles.mutate(index, _ => cycles[index].union(cycle)) + } else { + // no existing cycle, add one + nextCycles = [...nextCycles, new Set(cycle)]; + } + } else { + // no cycle, recurse to consumer + [nextCycles, nextVisited] = this.cyclesVisitor(consumer, nextPath, nextVisited, nextCycles); + } + } + // namespace is now visited + nextVisited = nextVisited.iset(ns, true); + if (currentPath.length > 0) { + // ascend back up to the parent because it may have more namespace to traverse + return [nextCycles, nextVisited]; + } else { + // starting namespace in the path is finished, increment namespace number and recurse + return this.cyclesVisitor(ns + 1, currentPath, nextVisited, nextCycles); + } + } } \ No newline at end of file diff --git a/src/semantic/passes/enumeration.ts b/src/semantic/passes/enumeration.ts index 4170662..14edc3a 100644 --- a/src/semantic/passes/enumeration.ts +++ b/src/semantic/passes/enumeration.ts @@ -1,37 +1,22 @@ -import { Dependency, ImportedNamespace, ImportedName, PureForward, ForwardedNamespace, ForwardedName, ExportedName, ExportedDeclaration } from './dependencies'; +import { Dependency, ImportedNamespace, ImportedName, PureForward, ForwardedNamespace, ForwardedName, ExportedName } from './dependencies'; import { parseModule } from '~/parser'; -import { Diagnostic, FilePosition, CoreObject } from '~/core'; +import { Diagnostic, FilePosition, CoreObject, FileRange } from '~/core'; import * as syntax from '~/syntax'; import { LazyList, single } from '~/utils/lazy-list'; import resolveModule from '~/semantic/resolver'; import { resolve } from 'path'; import * as ns from '~/semantic/namespace'; +import { Token } from '~/parser/lexer'; export interface NamespaceEnumerationOutput { - readonly modules: ReadonlyMap; + readonly modules: ReadonlyMap; readonly namespaces: ReadonlyArray; readonly declarations: ReadonlyArray; - readonly dependencyQueue: ReadonlyArray; + readonly pureForwards: ReadonlyArray; // TODO: try to integrate this into the namespaces readonly diagnostics: ReadonlyArray; } -export interface EnumeratedModule { - readonly namespaceId: Optional; - readonly status: ModuleEnumerationStatus; -} - -export enum ModuleEnumerationStatus { - /** The initial state of any module that is referenced, including the entry. Nothing has been done with it yet. */ - REFERENCED, - /** The module was found and parsed */ - SUCCESS, - /** The module was found, but failed to parse */ - UNPARSED, - /** The module was not found */ - NOT_FOUND -} - export default function enumerateNamespaces(mainModulePath: string): NamespaceEnumerationOutput { return new EnumerationProcess(mainModulePath).run(); } @@ -45,16 +30,16 @@ type AnyDeclaration class EnumerationProcess extends CoreObject { readonly moduleQueue: LazyList; - readonly modules: ReadonlyMap; + readonly modules: ReadonlyMap; readonly namespaces: ReadonlyArray = []; readonly declarations: ReadonlyArray = []; - readonly dependencyQueue: ReadonlyArray = []; + readonly pureForwards: ReadonlyArray = []; readonly diagnostics: ReadonlyArray = []; constructor(readonly mainModulePath: string) { super(); this.moduleQueue = single(mainModulePath); - this.modules = new Map().iset(mainModulePath, { namespaceId: null, status: ModuleEnumerationStatus.REFERENCED }); + this.modules = new Map().iset(mainModulePath, { namespaceId: null, status: ns.ModuleStatus.REFERENCED, fullyResolved: false }); } run() { @@ -73,22 +58,22 @@ class EnumerationProcess extends CoreObject { } catch (err) { // "file not found" errors will be processed in the next pass so we get the import location, so just save as not found if (err.code === 'ENOENT') { - next = next.setFailedModule(modulePath, ModuleEnumerationStatus.NOT_FOUND); + next = next.setFailedModule(modulePath, ns.ModuleStatus.NOT_FOUND); // however, if this was the main module, we do need a diagnostic, and we need to stop right here if (modulePath === this.mainModulePath) return next.withEntryError(`Entry point "${this.mainModulePath}" not found.`); return next.consumeModuleQueue(); } throw err; } + // add any parse diagnostics + next = next.addDiagnostics(parseDiagnostics); // the module couldn't be parsed, save it as unparsed and let the next pass set the error if (!moduleSyntax) { - next = next.setFailedModule(modulePath, ModuleEnumerationStatus.UNPARSED); + next = next.setFailedModule(modulePath, ns.ModuleStatus.UNPARSED); // if it was the main module, we stop right here if (modulePath === this.mainModulePath) return next.withEntryError(`Entry point "${this.mainModulePath}" failed to parse.`); return next.consumeModuleQueue(); } - // add any parse diagnostics - next = next.addDiagnostics(parseDiagnostics); // add the module to the module and namespace registries next = next.setSuccessfulModule(modulePath); const namespaceId = next.modules.get(modulePath)!.namespaceId!; @@ -100,56 +85,55 @@ class EnumerationProcess extends CoreObject { return next.consumeModuleQueue(); } - setFailedModule(path: string, status: ModuleEnumerationStatus): EnumerationProcess { - return this.mutate('modules', _ => _.iset(path, { namespaceId: null, status })); + setFailedModule(path: string, status: Exclude): EnumerationProcess { + return this.mutate('modules', _ => _.iset(path, { namespaceId: null, status, fullyResolved: true })); } setSuccessfulModule(path: string): EnumerationProcess { const namespaceId = this.namespaces.length; return this - .mutate('modules', _ => _.iset(path, { namespaceId, status: ModuleEnumerationStatus.SUCCESS })) + .mutate('modules', _ => _.iset(path, { namespaceId, status: ns.ModuleStatus.SUCCESS, fullyResolved: false })) .mutate('namespaces', _ => [..._, new ns.ModuleNamespace(namespaceId, path)]); } addReferencedModule(path: string): EnumerationProcess { if (this.modules.has(path)) return this; return this - .mutate('modules', _ => _.iset(path, { namespaceId: null, status: ModuleEnumerationStatus.REFERENCED })) + .mutate('modules', _ => _.iset(path, { namespaceId: null, status: ns.ModuleStatus.REFERENCED, fullyResolved: false })) .mutate('moduleQueue', _ => _.append(path)); } - addImport(namespaceId: number, targetModule: string, localName: string, exportName: string): EnumerationProcess { + addImport(namespaceId: number, targetModule: string, targetModuleLocation: FileRange, localName: Token, exportName: Token): EnumerationProcess { let dep: Dependency; - if (exportName === '*') { - dep = new ImportedNamespace(namespaceId, localName, targetModule); + if (exportName.image === '*') { + dep = new ImportedNamespace(namespaceId, localName, targetModule, targetModuleLocation, exportName.location); } else { - dep = new ImportedName(namespaceId, localName, targetModule, exportName); + dep = new ImportedName(namespaceId, localName, targetModule, targetModuleLocation, exportName); } - return this.mutate('dependencyQueue', _ => [..._, dep]); + return this.mutate('namespaces', _ => _.mutate(namespaceId, _ => _.mutateLocalTarget(localName.image, _ => _.addDependency(dep)))); } - addForward(namespaceId: number, targetModule: string, forwardName: string, exportName: string): EnumerationProcess { + addForward(namespaceId: number, targetModule: string, targetModuleLocation: FileRange, forwardName: Token, exportName: Token): EnumerationProcess { let dep: Dependency; - if (exportName === '*') { - if (forwardName === '*') { - dep = new PureForward(namespaceId, targetModule); + if (exportName.image === '*') { + if (forwardName.image === '*') { + return this.mutate('pureForwards', _ => [..._, new PureForward(namespaceId, targetModule, targetModuleLocation, forwardName.location)]); } else { - dep = new ForwardedNamespace(namespaceId, forwardName, targetModule); + dep = new ForwardedNamespace(namespaceId, forwardName, targetModule, targetModuleLocation, exportName.location); } } else { - dep = new ForwardedName(namespaceId, forwardName, targetModule, exportName); + dep = new ForwardedName(namespaceId, forwardName, targetModule, targetModuleLocation, exportName); } - return this.mutate('dependencyQueue', _ => [..._, dep]); + return this.mutate('namespaces', _ => _.mutate(namespaceId, _ => _.mutateExportTarget(exportName.image, _ => _.addDependency(dep)))); } - addExportedName(namespaceId: number, exportName: string, localName: string): EnumerationProcess { + addExportedName(namespaceId: number, exportName: Token, localName: Token): EnumerationProcess { const dep = new ExportedName(namespaceId, localName, exportName); - return this.mutate('dependencyQueue', _ => [..._, dep]); + return this.mutate('namespaces', _ => _.mutate(namespaceId, _ => _.mutateExportTarget(exportName.image, _ => _.addDependency(dep)))); } - addExportedDeclaration(namespaceId: number, exportName: string, declarationId: number): EnumerationProcess { - const dep = new ExportedDeclaration(namespaceId, declarationId, exportName); - return this.mutate('dependencyQueue', _ => [..._, dep]); + addExportedDeclaration(namespaceId: number, exportName: Token, declarationId: number): EnumerationProcess { + return this.mutate('namespaces', _ => _.mutate(namespaceId, _ => _.addExportedDeclaration(exportName.image, declarationId))); } addLocalName(namespaceId: number, name: string, declarationId: number): EnumerationProcess { @@ -164,7 +148,7 @@ class EnumerationProcess extends CoreObject { return this.addDiagnostics([new Diagnostic(error, new FilePosition('', [0, 0]))]); } - handleDeclaration(node: AnyDeclaration, namespaceId: number, modulePath: string, containingExport: Optional = null): EnumerationProcess { + handleDeclaration(node: AnyDeclaration, namespaceId: number, modulePath: string, containingExport: Optional = null): EnumerationProcess { if (node instanceof syntax.ImportDeclaration) return this.handleImport(node, namespaceId, modulePath); if (node instanceof syntax.ExportDeclaration) return this.handleExport(node, namespaceId, modulePath); if (node instanceof syntax.ExportForwardDeclaration) return this.handleForward(node, namespaceId, modulePath); @@ -188,18 +172,18 @@ class EnumerationProcess extends CoreObject { handleImport(node: syntax.ImportDeclaration, namespaceId: number, modulePath: string) { let next = this as EnumerationProcess; // handle the module name - let targetModule = resolveModule(modulePath, node.moduleName.value); + let targetModule = resolveModule(modulePath, node.moduleName.value as string); if (!targetModule) { // resolve the would-be module path instead - targetModule = resolve(modulePath, node.moduleName.value); - next = next.setFailedModule(targetModule, ModuleEnumerationStatus.NOT_FOUND); + targetModule = resolve(modulePath, node.moduleName.value as string); + next = next.setFailedModule(targetModule, ns.ModuleStatus.NOT_FOUND); } else { // add the referenced module (will be ignored if it was already referenced) next = next.addReferencedModule(targetModule); } // add import names for (const imp of node.imports) { - next = next.addImport(namespaceId, targetModule, imp.aliasName.image, imp.importName.image); + next = next.addImport(namespaceId, targetModule, node.moduleName.location, imp.aliasName, imp.importName); } return next; } @@ -216,10 +200,10 @@ class EnumerationProcess extends CoreObject { for (const exp of node.exports) { if (exp.value) { // pass the baton to the declaration handler, which will add the export for us - next = next.handleDeclaration(exp.value, namespaceId, modulePath, exp.exportName.value); + next = next.handleDeclaration(exp.value, namespaceId, modulePath, exp.exportName); } else { // this is an exported name - next = next.addExportedName(namespaceId, exp.exportName.image, exp.valueName!.image); + next = next.addExportedName(namespaceId, exp.exportName, exp.valueName!); } } return next; @@ -235,18 +219,18 @@ class EnumerationProcess extends CoreObject { handleForward(node: syntax.ExportForwardDeclaration, namespaceId: number, modulePath: string) { let next = this as EnumerationProcess; // handle the module name - let targetModule = resolveModule(modulePath, node.moduleName.value); + let targetModule = resolveModule(modulePath, node.moduleName.value as string); if (!targetModule) { // resolve the would-be module path instead - targetModule = resolve(modulePath, node.moduleName.value); - next = next.setFailedModule(targetModule, ModuleEnumerationStatus.NOT_FOUND); + targetModule = resolve(modulePath, node.moduleName.value as string); + next = next.setFailedModule(targetModule, ns.ModuleStatus.NOT_FOUND); } else { // add the referenced module (will be ignored if it was already referenced) next = next.addReferencedModule(targetModule); } // add forward names for (const fwd of node.forwards) { - next = next.addForward(namespaceId, targetModule, fwd.exportName.image, fwd.importName.image); + next = next.addForward(namespaceId, targetModule, node.moduleName.location, fwd.exportName, fwd.importName); } return next; } @@ -256,9 +240,8 @@ class EnumerationProcess extends CoreObject { * - Create a DeclaredType * - Register the DeclaredType to the process's declaration registry * - If there is a parent export, register the corresponding dependency - * TODO add local-declaration local-names for each declaration */ - handleType(node: syntax.TypeDeclaration | syntax.AnonymousTypeDeclaration, namespaceId: number, containingExport: Optional) { + handleType(node: syntax.TypeDeclaration | syntax.AnonymousTypeDeclaration, namespaceId: number, containingExport: Optional) { const declarationId = this.declarations.length; const declaredType = new ns.TypeDeclaration(declarationId, node); let next: EnumerationProcess = this.mutate('declarations', _ => [..._, declaredType]); @@ -275,7 +258,7 @@ class EnumerationProcess extends CoreObject { * - Register the DeclaredFunction to the process's declaration registry * - If there is a parent export, register the corresponding dependency */ - handleFunction(node: syntax.FunctionDeclaration | syntax.AnonymousFunctionDeclaration, namespaceId: number, containingExport: Optional) { + handleFunction(node: syntax.FunctionDeclaration | syntax.AnonymousFunctionDeclaration, namespaceId: number, containingExport: Optional) { const declarationId = this.declarations.length; const declaredFunction = new ns.FunctionDeclaration(declarationId, node); let next: EnumerationProcess = this.mutate('declarations', _ => [..._, declaredFunction]); @@ -292,7 +275,7 @@ class EnumerationProcess extends CoreObject { * - Register the DeclaredConstant to the process's declaration registry * - If there is a parent export, register the corresponding dependency */ - handleConstant(node: syntax.ConstantDeclaration | syntax.AnonymousConstantDeclaration, namespaceId: number, containingExport: Optional) { + handleConstant(node: syntax.ConstantDeclaration | syntax.AnonymousConstantDeclaration, namespaceId: number, containingExport: Optional) { const declarationId = this.declarations.length; const declaredConstant = new ns.ConstantDeclaration(declarationId, node); let next: EnumerationProcess = this.mutate('declarations', _ => [..._, declaredConstant]); @@ -310,7 +293,7 @@ class EnumerationProcess extends CoreObject { * - If there is a parent export, register the corresponding dependency * - Process all of the namespace's declarations */ - handleNamespace(node: syntax.NamespaceDeclaration | syntax.AnonymousNamespaceDeclaration, parentNamespaceId: number, modulePath: string, containingExport: Optional) { + handleNamespace(node: syntax.NamespaceDeclaration | syntax.AnonymousNamespaceDeclaration, parentNamespaceId: number, modulePath: string, containingExport: Optional) { const namespaceId = this.namespaces.length; const declarationId = this.declarations.length; const nestedNamespace = new ns.NestedNamespace(namespaceId, parentNamespaceId, declarationId, node); @@ -333,7 +316,7 @@ class EnumerationProcess extends CoreObject { modules: this.modules, namespaces: this.namespaces, declarations: this.declarations, - dependencyQueue: this.dependencyQueue, + pureForwards: this.pureForwards, diagnostics: this.diagnostics }); } diff --git a/src/semantic/passes/resolution.ts b/src/semantic/passes/resolution.ts index 9c02b96..9467d3c 100644 --- a/src/semantic/passes/resolution.ts +++ b/src/semantic/passes/resolution.ts @@ -1,8 +1,7 @@ -import { EnumeratedModule, ModuleEnumerationStatus } from './enumeration'; -import { Declaration, Namespace } from '../namespace'; -import { Dependency, ImportedName, ImportedNamespace, ForwardedName, ForwardedNamespace, PureForward, ExportedName } from './dependencies'; -import { CoreObject, Diagnostic } from '~/core'; -import { LazyList, fromIterable } from '~/utils/lazy-list'; +import { Declaration, Namespace, ModuleRef, ModuleStatus, NameTargetStatus, NameTarget, RemoteCircularReference, Reference, MissingModule, MissingExport, RemoteName, ResolvedReference, RemoteEmptyReference } from '../namespace'; +import { Dependency, ImportedName, ImportedNamespace, ForwardedName, ForwardedNamespace, PureForward, ExportedName, PureForwardGraph, PureForwardReplacement } from './dependencies'; +import { CoreObject, Diagnostic, DiagnosticLevel, FileRange } from '~/core'; +import { range } from '~/utils/utils'; export interface DependencyResolutionOutput { @@ -10,99 +9,319 @@ export interface DependencyResolutionOutput { readonly diagnostics: ReadonlyArray; } -export default function resolveDependencies(modules: ReadonlyMap, declarations: ReadonlyArray, namespaces: ReadonlyArray, dependencyQueue: ReadonlyArray) { - return new ResolutionProcess(modules, declarations, namespaces, dependencyQueue).run(); -} - -/** The status of a given dependency, once resolution has begun */ -enum DependencyStatus { - /** Initial state, not yet determined whether the dependency is resolvable */ - Resolving = 1, - /** Ideal state, the dependency has been resolved to a set of declaration ids */ - Resolved, - /** The dependency target does not exist, needs to be tracked for posterity */ - Dangling, - /** The dependency target depends on itself, and thus cannot ever be resolved */ - Circular +/** + * Using namespace dependencies added during the enumeration process, + * populate the local and export references of all namespaces. + */ +export default function resolveDependencies(modules: ReadonlyMap, declarations: ReadonlyArray, namespaces: ReadonlyArray, pureForwards: ReadonlyArray) { + return new ResolutionProcess(modules, declarations, namespaces, pureForwards).run(); } class ResolutionProcess extends CoreObject { - readonly dependencyQueue: LazyList; - /** namespace id -> name -> status */ - readonly localNameStatuses: ReadonlyMap> = new Map(); - /** namespace id -> name -> status */ - readonly exportNameStatuses: ReadonlyMap> = new Map(); readonly diagnostics: ReadonlyArray = []; constructor( - readonly modules: ReadonlyMap, + readonly modules: ReadonlyMap, readonly declarations: ReadonlyArray, readonly namespaces: ReadonlyArray, - dependencyQueue: ReadonlyArray - ) { - super(); - this.dependencyQueue = fromIterable(dependencyQueue); - } + readonly pureForwards: ReadonlyArray + ) { super(); } /** * The goal of this process is to populate the local and export names of every namespace in the program. - * All of the information required to do that is stored in the dependency queue, and all available - * modules and namespaces, including all available declarations within them, is stored in the - * corresponding registries. - * This process will simply consume the entire dependency queue, tracking the status of all dependencies + * All of the information required to do that is stored in the dependency info object, + * and all available modules and namespaces, including all available declarations within them, + * is stored in the corresponding registries. + * This process will simply consume all dependencies, tracking the status of all dependencies * until all of them are either resolved, dangling, or circular references. */ run(): DependencyResolutionOutput { - const processed = this.consumeDependencyQueue(); + const next = this.processPureForwards(); + const processed = this.namespaces.reduce((p, _) => p.processNamespace(_.namespaceId), next); return processed.output(); } - consumeDependencyQueue(): ResolutionProcess { - if (this.dependencyQueue.empty) return this; - const { head, tail } = this.dependencyQueue; - const next: ResolutionProcess = this.processDependency(head).set('dependencyQueue', tail); - return next.consumeDependencyQueue(); - } - - processDependency(dependency: Dependency): ResolutionProcess { - if (dependency instanceof ImportedName) return this.processImportedName(dependency); - if (dependency instanceof ImportedNamespace) return this.processImportedNamespace(dependency); - if (dependency instanceof ForwardedName) return this.processForwardedName(dependency); - if (dependency instanceof ForwardedNamespace) return this.processForwardedNamespace(dependency); - if (dependency instanceof PureForward) return this.processPureForward(dependency); - if (dependency instanceof ExportedName) return this.processExportedName(dependency); - return this.processExportedDeclaration(dependency); - } - - processImportedName(dependency: ImportedName) { - // if it's already been processed, we're done here - if (this.isLocalNameDone(dependency.importNamespace, dependency.importName)) return this; - // flag it as resolving for successive dependencies - let next = this.setLocalNameStatus(dependency.importNamespace, dependency.importName, DependencyStatus.Resolving); - const { module, status } = next.modules.get(dependency.exportModule)!; - // make sure the module exists - if (status !== ModuleEnumerationStatus.SUCCESS) { - // module doesn't exist, the dependency is dangling - return next.setLocalNameStatus(dependency.importNamespace, dependency.importName, DependencyStatus.Dangling); + /** + * Pure forwards make things quite complicated. + * We definitely want them because they allow for simple module aggregation. + * However, we have made the stipulation that if a dependency can theoretically be resolved, + * it should be resolvable in this language. + * Because cyclical pure forwards can technically be resolvable, we have to handle that case. + * What a cycle of pure forwards means is that all members of the cycle share the same pool of exports. + * + * After MONTHS of deliberation, I have determined that the only way to effectively handle + * cyclical pure forwards is to handle pure forwards in their own step, because pure forwards + * are ultimately just replaced with normal named forwards. + * The reason this is so complicated is that in order to fully resolve pure forwards, we need to recurse + * down a potentially long, winding, and cyclical chain. I was unable to find a way to deal with that + * while at the same time handling the declaration bundling that comes with module dependencies. + * + * The basic process for handling pure forwards is to arrange them into a graph, where each node is a namespace. + * From there, we can use the graph to detect any cycles in this graph. + * For namespaces that are not members of a cycle, we can evaluate their pure forwards by recursing + * until all dependent pure forwards have been replaced with normal forwards. + * For cycles, we can evaluate their pure forwards by determining all namespaces that "supply" the cycle, + * group all those exports together, and add each of those exports as exports of every cycle member. + * + * Seems a whole hell of a lot more complicated than it should be, but that's where we're at. + */ + processPureForwards(): ResolutionProcess { + let next = this; + let graph = new PureForwardGraph(this.namespaces.length); + // every forward is either an error or an edge in the graph + for (const fwd of this.pureForwards) { + const moduleRef = this.modules.get(fwd.exportModule)!; + if (moduleRef.status !== ModuleStatus.SUCCESS) { + // module was unresolved, add an error + next = next.mutate('diagnostics', _ => [..._, new Diagnostic(`Module ${fwd.exportModule} could not be resolved`, fwd.exportModuleLocation)]); + } else { + // valid module, add the forward to the graph + graph = graph.addForward(moduleRef.namespaceId, fwd.forwardNamespace, fwd); + } + } + // visited array + let visited: ReadonlyArray = range(this.namespaces.length).map(() => false); + // get them cycles + const cycles = graph.getCycles(); + // iterate all namespaces + for (const ns of range(this.namespaces.length)) { + [next, visited] = next.replacePureForwards(ns, visited, graph, cycles); + } + return next; + } + + replacePureForwards(ns: number, visited: ReadonlyArray, graph: PureForwardGraph, cycles: ReadonlyMap>): [this, ReadonlyArray] { + // avoid duplicate logic + if (visited[ns]) return [this, visited]; + let next = this; + // check if it is part of a cycle, because that changes everything + const cycle = cycles.get(ns); + if (cycle) { + // get all suppliers of the cycle, including the members of the cycle + const suppliers = cycle.union(cycle.flatMap(_ => graph.getSuppliers(_))); + // iterate all suppliers, populating the aggregate list of exports + let exports: ReadonlyArray<[number, string]> = []; // [namespace, export] + for (const supplier of suppliers) { + if (!cycle.has(supplier)) { + // non-cyclic suppliers should be treated like normal: recurse to handle its suppliers + [next, visited] = next.replacePureForwards(supplier, visited, graph, cycles); + } + // for all suppliers, add all of their exports to the list + // for cycle members this means that only their own exports will be added + exports = [...exports, ...[...next.namespaces[supplier].exports.keys()].map<[number, string]>(_ => [supplier, _])]; + } + // now we have the full shared pool of cycle exports, so we can replace forwards for the cycle members + for (const member of cycle) { + const directSuppliers = graph.getSuppliers(member); + // this is the default namespace to use as the supplier of a cyclic forward + const firstForwardedMember = directSuppliers.filter(_ => cycle.has(_))[0]; + for (const [supplier, exp] of exports) { + // determine what module to use for the forward: + // 1. if the supplier is the module, ignore it + if (member === supplier) continue; + // 2. if the supplier has a forward in the module, use that + // 3. otherwise, use the first forward from a member of the cycle (see above) + const pure = graph.getForward(directSuppliers.includes(supplier) ? supplier : firstForwardedMember, member)!; + const fwd = new PureForwardReplacement(member, exp, pure.exportModule, pure.exportModuleLocation, pure.starLocation); + next = next.mutate('namespaces', _ => _.mutate(ns, _ => _.ensureExportTarget(exp, _ => _.addDependency(fwd)))); + } + // mark the member visited because the whole cycle is handled here + visited = visited.iset(member, true); + } + } else { + // non-cyclical, we can just evaluate its suppliers + for (const supplier of graph.getSuppliers(ns)) { + [next, visited] = next.replacePureForwards(supplier, visited, graph, cycles); + const pureFwd = graph.getForward(supplier, ns)!; + // add a named forward for each of the supplier's exports + for (const exp of next.namespaces[supplier].exports.keys()) { + const fwd = new PureForwardReplacement(ns, exp, pureFwd.exportModule, pureFwd.exportModuleLocation, pureFwd.starLocation); + next = next.mutate('namespaces', _ => _.mutate(ns, _ => _.ensureExportTarget(exp, _ => _.addDependency(fwd)))); + } + } + } + // namespace is now visited + return [next, visited.iset(ns, true)]; + } + + /** + * Given the id of a namespace, iterate all of its local and export name targets, + * processing all specified dependencies in each one. + * The returned namespace will be marked as fully resolved. + */ + processNamespace(nsid: number): ResolutionProcess { + // process all exports (simple heuristic that is likely to process most locals in-line) + const exports = [...this.namespaces[nsid].exports.keys()]; + let process = exports.reduce((p, _) => p.processExportName(nsid, _, []), this); + // process all locals + const locals = [...process.namespaces[nsid].locals.keys()]; + return locals.reduce((p, _) => p.processLocalName(nsid, _, []), process); + } + + /** + * Given the id of a namespace and the name of one of its exports, + * fully resolve the export, processing all of its dependencies. + */ + processExportName(nsid: number, name: string, chain: ReadonlyArray): ResolutionProcess { + // if we have reached a terminal state, then we're done already + if (this.isExportNameDone(nsid, name)) return this; + let process: ResolutionProcess = this; + // process each dependency in the target + while (process.getExport(nsid, name).dependencies.length) { + // grab the first dependency + const dep = process.getExport(nsid, name).dependencies[0]; + process = process.removeExportDependency(nsid, name, 0); + // process the dependency + process = process.processDependency(dep, chain); + } + // set the name target status to the aggregate status + return process.mutateExport(nsid, name, _ => _.setAggregateStatus()); + } + + /** + * Given the id of a namespace and the name of one of its locals, + * fully resolve the local, processing all of its dependencies. + */ + processLocalName(nsid: number, name: string, chain: ReadonlyArray): ResolutionProcess { + // if we have reached a terminal state, then we're done already + if (this.isLocalNameDone(nsid, name)) return this; + let process: ResolutionProcess = this; + // process each dependency in the target + while (process.getLocal(nsid, name).dependencies.length) { + // grab the first dependency + const dep = process.getLocal(nsid, name).dependencies[0]; + process = process.removeLocalDependency(nsid, name, 0); + // process the dependency + process = process.processDependency(dep, chain); + } + // set the name target status to the aggregate status + return process.mutateLocal(nsid, name, _ => _.setAggregateStatus()); + } + + /** + * Given a dependency and the current dependency chain, + * process the dependency, resulting in the dependency being replaced by a corresponding reference. + * The basic process here is to: + * 1. Check if the dependency is in the current chain, meaning that it is circular, and the chain should stop. + * 2. Check if the target of the dependency exists, and if not, it's a dangling reference. + * 3. Recurse to the target of the dependency to fully resolve it. + * 4. Set a resolved reference if the target was resolved to at least one declaration. + * 5. Set an empty reference if the target was dangling. + * 6. Set a circular reference if the target was circular. + */ + processDependency(dependency: Dependency, chain: ReadonlyArray): ResolutionProcess { + if (dependency instanceof ImportedName) return this.processImportedName(dependency, chain); + if (dependency instanceof ImportedNamespace) return this.processImportedNamespace(dependency, chain); + if (dependency instanceof ForwardedName) return this.processForwardedName(dependency, chain); + if (dependency instanceof PureForwardReplacement) return this.processPureForwardReplacement(dependency, chain); + if (dependency instanceof ForwardedNamespace) return this.processForwardedNamespace(dependency, chain); + return this.processExportedName(dependency, chain); + } + + /** + * Imported names result in remote local references to a specific export name. + * TODO: figure out if it is feasible to reduce duplicate logic across the different dependencies. + */ + processImportedName(dependency: ImportedName, chain: ReadonlyArray): ResolutionProcess { + const { importNamespace, importName, exportModule, exportName, exportModuleLocation } = dependency; + // circular check + if (chain.includes(dependency)) + return this.addLocalReference(importNamespace, importName.image, new RemoteCircularReference(exportModule, exportName.image)) + // TODO: we should only add a diagnostic if all references are circular + .addDiagnostic(`Dependency on export "${exportName.image}" from module "${exportModule}" is circular`, exportName.location); + // dangling module check + // TODO: need full path, not just the path of the dependency, should this be set by enumeration? + const moduleRef = this.modules.get(exportModule); + if (!moduleRef || moduleRef.status !== ModuleStatus.SUCCESS) + return this.addLocalReference(importNamespace, importName.image, new MissingModule(exportModule, exportName.image)) + .addDiagnostic(`Module "${exportModule}" does not exist`, exportModuleLocation); + const exportNamespace = moduleRef.namespaceId; + // dangling export check + if (!this.namespaces[exportNamespace].exports.has(exportName.image)) + return this.addLocalReference(importNamespace, importName.image, new MissingExport(exportModule, exportName.image)) + .addDiagnostic(`Module "${exportModule}" has no exported member "${exportName.image}"`, exportName.location); + // export exists, traverse to it + let process = this.processExportName(exportNamespace, exportName.image, [...chain, dependency]); + // get the aggregate status + const exp = process.getExport(exportNamespace, exportName.image); + switch (exp.status) { + case NameTargetStatus.FULLY_RESOLVED: + // add a reference for each resolved reference of the target + return exp.references.filter((_): _ is ResolvedReference => _.status === NameTargetStatus.FULLY_RESOLVED) + .reduce((p, _) => p.addLocalReference(importNamespace, importName.image, new RemoteName(exportModule, exportName.image, _.resolvedDeclarationId)), process); + case NameTargetStatus.DANGLING: + case NameTargetStatus.EMPTY: + return process.addLocalReference(importNamespace, importName.image, new RemoteEmptyReference(exportModule, exportName.image)); + case NameTargetStatus.CIRCULAR: + return process.addLocalReference(importNamespace, importName.image, new RemoteCircularReference(exportModule, exportName.image)); + default: + throw new Error('This isn\'t supposed to happen'); } } /** - * Determines if a local name has reached a terminal status. + * Imported namespaces result in remote local references to a namespace. + * Interestingly enough, because we don't need to descend for these dependencies, it is impossible for them to be circular. */ - isLocalNameDone(namespaceId: number, name: string) { - const ns = this.localNameStatuses.get(namespaceId); - if (!ns) return false; - const n = ns.get(name); - if (!n) return false; - return n !== DependencyStatus.Resolving; + processImportedNamespace(dependency: ImportedNamespace, chain: ReadonlyArray): ResolutionProcess { + const { importNamespace, importName, exportModule, exportModuleLocation } = dependency; + // dangling module check + // TODO: need full path, not just the path of the dependency, should this be set by enumeration? + const moduleRef = this.modules.get(exportModule); + if (!moduleRef || moduleRef.status !== ModuleStatus.SUCCESS) + return this.addLocalReference(importNamespace, importName.image, new MissingModule(exportModule, null)) + .addDiagnostic(`Module "${exportModule}" does not exist`, exportModuleLocation); + // module exists, the dependency is immediately resolvabl + } + + // #region Helpers + + isExportNameDone(nsid: number, name: string) { + return this.getExport(nsid, name).status !== NameTargetStatus.NOT_RESOLVED; + } + + isLocalNameDone(nsid: number, name: string) { + return this.getLocal(nsid, name).status !== NameTargetStatus.NOT_RESOLVED; + } + + getExport(nsid: number, name: string): NameTarget { + return this.namespaces[nsid].exports.get(name)!; + } + + getLocal(nsid: number, name: string): NameTarget { + return this.namespaces[nsid].locals.get(name)!; } - setLocalNameStatus(namespaceId: number, name: string, status: DependencyStatus): ResolutionProcess { - const ns = this.localNameStatuses.get(namespaceId) || new Map(); - return this.mutate('localNameStatuses', _ => _.iset(namespaceId, ns.iset(name, status))); + mutateExport(nsid: number, name: string, fn: (value: NameTarget) => NameTarget): ResolutionProcess { + return this.mutate('namespaces', _ => _.mutate(nsid, _ => _.mutateExportTarget(name, fn))); } + mutateLocal(nsid: number, name: string, fn: (value: NameTarget) => NameTarget) { + return this.mutate('namespaces', _ => _.mutate(nsid, _ => _.mutateLocalTarget(name, fn))); + } + + removeExportDependency(nsid: number, name: string, idx: number) { + return this.mutateExport(nsid, name, _ => _.mutate('dependencies', _ => _.idelete(idx))); + } + + removeLocalDependency(nsid: number, name: string, idx: number) { + return this.mutateLocal(nsid, name, _ => _.mutate('dependencies', _ => _.idelete(idx))); + } + + addExportReference(nsid: number, name: string, ref: Reference) { + return this.mutateExport(nsid, name, _ => _.mutate('references', _ => [..._, ref])); + } + + addLocalReference(nsid: number, name: string, ref: Reference) { + return this.mutateLocal(nsid, name, _ => _.mutate('references', _ => [..._, ref])); + } + + addDiagnostic(message: string, location: FileRange, level = DiagnosticLevel.Error): ResolutionProcess { + return this.mutate('diagnostics', _ => [..._, new Diagnostic(message, location, level)]); + } + + // #endregion + output = (): DependencyResolutionOutput => ({ namespaces: this.namespaces, diagnostics: this.diagnostics diff --git a/src/semantic/program.ts b/src/semantic/program.ts index 4fdba53..0715049 100644 --- a/src/semantic/program.ts +++ b/src/semantic/program.ts @@ -1,5 +1,5 @@ import { Diagnostic, CoreObject } from '~/core'; -import { DeclaredEntity, Namespace } from './namespace'; +import { Declaration, Namespace } from './namespace'; /** @@ -9,6 +9,6 @@ import { DeclaredEntity, Namespace } from './namespace'; export class Program extends CoreObject { readonly modules: ReadonlyMap = new Map(); readonly namespaces: ReadonlyArray = []; - readonly declarations: ReadonlyArray = []; + readonly declarations: ReadonlyArray = []; readonly diagnostics: ReadonlyArray = []; } diff --git a/yarn.lock b/yarn.lock index d31511d..19ff3cc 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3469,10 +3469,14 @@ typescript@^2.4.2: version "2.6.1" resolved "https://registry.yarnpkg.com/typescript/-/typescript-2.6.1.tgz#ef39cdea27abac0b500242d6726ab90e0c846631" -typescript@^2.6.1, typescript@^2.8.3: +typescript@^2.6.1: version "2.8.3" resolved "https://registry.yarnpkg.com/typescript/-/typescript-2.8.3.tgz#5d817f9b6f31bb871835f4edf0089f21abe6c170" +typescript@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.0.3.tgz#4853b3e275ecdaa27f78fda46dc273a7eb7fc1c8" + uglify-js@^2.6: version "2.8.29" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-2.8.29.tgz#29c5733148057bb4e1f75df35b7a9cb72e6a59dd"