Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Parser Toolkit Grammar Compiler #9

Draft
wants to merge 20 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .envrc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
use_nix
245 changes: 227 additions & 18 deletions build.zig
Original file line number Diff line number Diff line change
@@ -1,31 +1,240 @@
const std = @import("std");

pub fn build(b: *std.build.Builder) void {
// build options:

const target = b.standardTargetOptions(.{});
const optimize = b.standardOptimizeOption(.{});

_ = b.addModule("parser-toolkit", .{
.source_file = .{ .path = "src/main.zig" },
const test_step = b.step("test", "Run library tests");
const examples_step = b.step("examples", "Builds and installs examples");
const run_calc_step = b.step("run-calculator", "Runs calculator example");

const all_step = b.step("all", "Builds everything, tests everything");
all_step.dependOn(b.getInstallStep());
all_step.dependOn(test_step);
all_step.dependOn(examples_step);

// dependencies

const args_dep = b.dependency("args", .{});

// external modules

const args_mod = args_dep.module("args");

// internal modules

const ptk_mod = b.addModule("parser-toolkit", .{
.source_file = .{ .path = "src/toolkit/main.zig" },
.dependencies = &.{},
});

var main_tests = b.addTest(.{
.root_source_file = .{ .path = "src/main.zig" },
.optimize = optimize,
});
// Applications
const ptkdef_exe = blk: {
const ptkdef = b.addExecutable(.{
.name = "ptkgen",
.root_source_file = .{ .path = "src/ptkgen/main.zig" },
.optimize = optimize,
.target = target,
});

const test_step = b.step("test", "Run library tests");
test_step.dependOn(&b.addRunArtifact(main_tests).step);
ptkdef.addModule("parser-toolkit", ptk_mod);
ptkdef.addModule("args", args_mod);

const calculator_example = b.addExecutable(.{
.root_source_file = .{ .path = "examples/calculator.zig" },
.name = "calculator",
.optimize = optimize,
});
b.installArtifact(ptkdef);

b.installArtifact(calculator_example);
calculator_example.addAnonymousModule("parser-toolkit", .{
.source_file = .{ .path = "src/main.zig" },
});
break :blk ptkdef;
};

// test suite
{
// unit tests for ptk:
var ptk_tests = b.addTest(.{
.root_source_file = ptk_mod.source_file,
.optimize = optimize,
});
for (ptk_mod.dependencies.keys()) |dep_name| {
ptk_tests.addModule(dep_name, ptk_mod.dependencies.get(dep_name).?);
}
test_step.dependOn(&b.addRunArtifact(ptk_tests).step);

// unit tests for ptkgen:
var ptkgen_tests = b.addTest(.{
.root_source_file = .{ .path = "src/ptkgen/main.zig" },
.optimize = optimize,
});
ptkgen_tests.addModule("parser-toolkit", ptk_mod);
test_step.dependOn(&b.addRunArtifact(ptkgen_tests).step);

// Integration tests for ptkgen:
for (parser_accept_files ++ parser_reject_files) |file| {
const run = b.addRunArtifact(ptkdef_exe);
run.addArg("--test_mode=parse_only");
run.addFileArg(.{ .path = file });
test_step.dependOn(&run.step);
}

// Integration tests for ptkgen:
for (analyis_accept_files ++ analyis_reject_files) |file| {
const run = b.addRunArtifact(ptkdef_exe);
run.addArg("--test_mode=no_codegen");
run.addFileArg(.{ .path = file });
test_step.dependOn(&run.step);
}
}

// examples
{
const calculator_example = b.addExecutable(.{
.root_source_file = .{ .path = "examples/calculator.zig" },
.name = "calculator",
.optimize = optimize,
});
calculator_example.addModule("parser-toolkit", ptk_mod);
examples_step.dependOn(&b.addInstallArtifact(calculator_example, .{}).step);

b.step("run", "Runs the calculator example").dependOn(&b.addRunArtifact(calculator_example).step);
run_calc_step.dependOn(&b.addRunArtifact(calculator_example).step);
}
}

const example_files = [_][]const u8{
"/home/felix/projects/parser-toolkit/examples/ptkgen/grammar.ptk",
"examples/ptkgen/ast-with-unions.ptk",
};

const analyis_accept_files = [_][]const u8{
"test/analysis/accept/match-literal-rule.ptk",
"test/analysis/accept/match-literal-sequence.ptk",
"test/analysis/accept/match-literal-variants.ptk",
"test/analysis/accept/match-literal-sequence-variant.ptk",
"test/analysis/accept/match-group-one-item.ptk",
"test/analysis/accept/match-group-one-sequence.ptk",
"test/analysis/accept/match-group-many-item.ptk",
"test/analysis/accept/match-group-many-sequence.ptk",
"test/analysis/accept/match-group-nested.ptk",
"test/analysis/accept/match-optional-one-item.ptk",
"test/analysis/accept/match-optional-one-sequence.ptk",
"test/analysis/accept/match-optional-many-item.ptk",
"test/analysis/accept/match-optional-many-sequence.ptk",
"test/analysis/accept/match-optional-nested.ptk",
"test/analysis/accept/match-rep_zero-one-item.ptk",
"test/analysis/accept/match-rep_zero-one-sequence.ptk",
"test/analysis/accept/match-rep_zero-many-item.ptk",
"test/analysis/accept/match-rep_zero-many-sequence.ptk",
"test/analysis/accept/match-rep_zero-nested.ptk",
"test/analysis/accept/match-rep_one-one-item.ptk",
"test/analysis/accept/match-rep_one-one-sequence.ptk",
"test/analysis/accept/match-rep_one-many-item.ptk",
"test/analysis/accept/match-rep_one-many-sequence.ptk",
"test/analysis/accept/match-rep_one-nested.ptk",

"test/analysis/accept/start-decl.ptk",

"test/analysis/accept/pattern-custom.ptk",
"test/analysis/accept/pattern-literal.ptk",
"test/analysis/accept/pattern-regex.ptk",
"test/analysis/accept/pattern-word.ptk",

"test/analysis/accept/pattern-word-skip.ptk",
"test/analysis/accept/pattern-regex-skip.ptk",
"test/analysis/accept/pattern-literal-skip.ptk",
"test/analysis/accept/pattern-custom-skip.ptk",
} ++ example_files;

const analyis_reject_files = [_][]const u8{
"test/analysis/reject/duplicate-node.ptk",
"test/analysis/reject/duplicate-pattern.ptk",
"test/analysis/reject/duplicate-rule.ptk",

"test/analysis/accept/expect-warn-missing-start.ptk",

"test/analysis/reject/undeclared-start.ptk",
"test/analysis/reject/duplicate-undeclared-start.ptk",
"test/analysis/reject/duplicate-start.ptk",

"test/analysis/reject/duplicate-field-record.ptk",
"test/analysis/reject/duplicate-field-variant.ptk",

"test/analysis/reject/production-undeclared-pattern-ref.ptk",
"test/analysis/reject/production-undeclared-rule-ref.ptk",
};

const parser_accept_files = [_][]const u8{
"test/parser/accept/empty.ptk",
"test/parser/accept/empty-with-comment-linefeed.ptk",
"test/parser/accept/empty-with-comment.ptk",
"test/parser/accept/identifiers.ptk",

"test/parser/accept/optional-nospace.ptk",
"test/parser/accept/optional-space.ptk",
"test/parser/accept/rep_one-nospace.ptk",
"test/parser/accept/rep_one-space.ptk",
"test/parser/accept/rep_zero-nospace.ptk",
"test/parser/accept/rep_zero-space.ptk",

"test/parser/accept/basic-rule-ref.ptk",
"test/parser/accept/basic-token-ref.ptk",
"test/parser/accept/rule-primitive-sequence.ptk",

"test/parser/accept/document-start.ptk",

"test/parser/accept/mapping-value-ref.ptk",
"test/parser/accept/mapping-code-literal.ptk",
"test/parser/accept/mapping-user-value.ptk",

"test/parser/accept/mapping-builtin-function-a0.ptk",
"test/parser/accept/mapping-builtin-function-a1.ptk",
"test/parser/accept/mapping-builtin-function-a5.ptk",
"test/parser/accept/mapping-builtin-function-nest.ptk",

"test/parser/accept/mapping-user-function-a0.ptk",
"test/parser/accept/mapping-user-function-a1.ptk",
"test/parser/accept/mapping-user-function-a5.ptk",
"test/parser/accept/mapping-user-function-nest.ptk",

"test/parser/accept/mapping-array-a0.ptk",
"test/parser/accept/mapping-array-a1.ptk",
"test/parser/accept/mapping-array-a5.ptk",
"test/parser/accept/mapping-array-nested.ptk",

"test/parser/accept/mapping-variant-init.ptk",

"test/parser/accept/mapping-record-init-f1.ptk",
"test/parser/accept/mapping-record-init-f3.ptk",

"test/parser/accept/rule-typespec-custom.ptk",
"test/parser/accept/rule-typespec-ref.ptk",
"test/parser/accept/rule-typespec-literal.ptk",

"test/parser/accept/node-alias.ptk",
"test/parser/accept/node-custom.ptk",
"test/parser/accept/node-literal.ptk",

"test/parser/accept/node-record-f1.ptk",
"test/parser/accept/node-record-f4.ptk",

"test/parser/accept/node-variant-f4.ptk",
"test/parser/accept/node-variant-f1.ptk",
} ++ analyis_accept_files;

const parser_reject_files = [_][]const u8{
"test/parser/reject/empty-rule.ptk",
"test/parser/reject/empty-group.ptk",
"test/parser/reject/empty-optional.ptk",
"test/parser/reject/empty-rep_one.ptk",
"test/parser/reject/empty-rep_zero.ptk",

"test/parser/reject/unexpected-token-string.ptk",

"test/parser/reject/empty-mapping.ptk",
"test/parser/reject/bad-mapping-invalid-token.ptk",
"test/parser/reject/bad-mapping-too-long.ptk",

"test/parser/reject/node-no-type.ptk",
"test/parser/reject/rule-no-type.ptk",
"test/parser/reject/rule-no-type-no-prod.ptk",
"test/parser/reject/rule-bad-prod.ptk",

"test/parser/reject/pattern-unexpected-token.ptk",
};
10 changes: 10 additions & 0 deletions build.zig.zon
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
.{
.name = "parser-toolkit",
.version = "0.2.0",
.dependencies = .{
.args = .{
.url = "https://github.com/MasterQ32/zig-args/archive/7989929d055ef7618e60de84cc54644046516fdb.tar.gz",
.hash = "12207752d975a7f5d7cc65662ed1c6b117da8dec6d1bd7af9a39e1b65d90bf86e833",
},
},
}
4 changes: 4 additions & 0 deletions design/ptkdefv/design.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
# Parser Generator Language

Create basic recursive descent parsers with "well-known" patterns that output a Zig AST data structure.

43 changes: 43 additions & 0 deletions design/ptkdefv/grammar.ptk
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@


root <document>; # <...> is a "rule reference"

token identifier = regex "[A-Za-z_][A-Za-z0-9_]*"; # defines token "identifier" to match this regex

token line-comment = regex "//[^\n]*" skip; # ignores this token when parsing, but tokenizer recognizes it
token whitespace = regex "[ \t\r\n]" skip;

rule document =
# [ ... ] is a loop construct, can appear several times
[ <using> ] <namespace-decl> [ <toplevel-decl> ]*
;

rule toplevel-decl =
# | is a "either/or" scenario, with precedence from left to right (first come, first serve)
<namespace-group> | <interface-decl> | <module-decl>
;

rule interface-decl =
"interface" $identifier "(" ... ")" ";";
;

rule module-decl =
"module" $identifier "(" ... ")" "{" ... "}" ";";
;

rule using =
# "bla" is a literal token
# $bla is an explicitly defined token reference
# ...? is an optional part of a parse
"using" <compound-identifier> ";" ( "as" $identifier )?
;

rule namespace-decl =
"namespace" <compound-identifier> ";"
;

rule compound-identifier =
$identifier [ "." $identifier ]*
;


37 changes: 37 additions & 0 deletions design/ptkdefv/mapping-concept-01.ptk
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@

# "!id" is a type reference
# "$id" is a token reference
# "<id>" is a rule reference

# maps type "array" to a slice/arraylist of whatever "int" is
node array = sequence !int;

# "int" is the Zig type "i32"
node int = literal "i32";

# the initial rule is "list", also determines the root type of the ast
start <list>;

# "decimal" token is a decimal number sequence token
token decimal = regex "\d+";

# "list" is a sequence of decimals with comma separated, potential trailing comma,
# enclosed by square brackets
rule list = "[" [ $decimal "," ] $decimal? "]";
# $0 $1______________ $2_______ $3

# the rule "list" is mapped to the type "array"
# as a sequence of the second element (unwrapped into items) and
# the third item appended. square brackets in a map are the "construct array operator".
# if the array is not sequence of optionals, optional items are skipped in construction
map <list> !array = [ $1..., $2 ];

# the "decimal" token is mapped to i32 by invoking a Zig function called
# "parse" that takes the token as input and returns "i32":
map $decimal !int = @parse($0);






Loading