diff --git a/build.zig b/build.zig index 0d899b8..9f8cd5d 100644 --- a/build.zig +++ b/build.zig @@ -33,6 +33,9 @@ pub fn build(b: *std.Build) void { exe.addModule("parser", b.addModule("parser", .{ .source_file = .{ .path = "src/parser.zig" }, })); + exe.addModule("treewalker", b.addModule("treewalker", .{ + .source_file = .{ .path = "src/treewalker.zig" }, + })); // This declares intent for the executable to be installed into the // standard location when the user invokes the "install" step (the default diff --git a/src/main.zig b/src/main.zig index 9eca92f..ff437e1 100644 --- a/src/main.zig +++ b/src/main.zig @@ -1,6 +1,7 @@ const std = @import("std"); const tokenize = @import("tokenizer.zig").tokenize; const parse = @import("parser.zig").parse; +const treewalk = @import("treewalker.zig").interpret; pub fn main() !void { @@ -35,5 +36,5 @@ pub fn main() !void var parserAllocator = std.heap.ArenaAllocator.init(std.heap.page_allocator); defer parserAllocator.deinit(); const root = try parse(tokens, &parserAllocator); - root.dump(0); + try treewalk(root, allocator); } diff --git a/src/parser.zig b/src/parser.zig index cf666f2..729150b 100644 --- a/src/parser.zig +++ b/src/parser.zig @@ -2,14 +2,17 @@ const Token = @import("tokenizer.zig").Token; const TokenType = @import("tokenizer.zig").TokenType; const std = @import("std"); const types = @import("types.zig"); +const CodeRegion = @import("types.zig").CodeRegion; pub const ChunkNode = struct { block: BlockNode, + startRegion: CodeRegion, + endRegion: CodeRegion, pub fn dump(self: *const ChunkNode, indent: usize) void { - std.debug.print("ChunkNode:\n", .{}); + std.debug.print("ChunkNode ({} - {}):\n", .{self.startRegion, self.endRegion}); for (0..indent) |_| { std.debug.print("\t", .{}); @@ -33,6 +36,8 @@ pub const BlockNode = struct { stats: std.ArrayList(StatNode), retstat: ?RetstatNode, + startRegion: CodeRegion, + endRegion: CodeRegion, fn dump(self: *const BlockNode, indent: usize) void { @@ -85,9 +90,12 @@ pub const BlockNode = struct std.debug.print("}}\n", .{}); } }; -const RetstatNode = struct +pub const RetstatNode = struct { values: ?ExplistNode, + startRegion: CodeRegion, + endRegion: CodeRegion, + fn dump(self: *const RetstatNode, indent: usize) void { std.debug.print("Retstat Node:\n", .{}); @@ -117,7 +125,7 @@ const RetstatNode = struct } }; -const StatNode = union(enum) +pub const StatNode = union(enum) { Semicolon, Assignment: AssignmentNode, @@ -157,9 +165,12 @@ fn dumpStatNode(stat: StatNode, indent: usize) void } } -const AssignmentNode = struct +pub const AssignmentNode = struct { lhs: VarlistNode, rhs: ExplistNode, + startRegion: CodeRegion, + endRegion: CodeRegion, + fn dump(self: *const AssignmentNode, indent: usize) void { std.debug.print("Assignment Node:\n", .{}); @@ -187,10 +198,13 @@ const AssignmentNode = struct std.debug.print("}}\n", .{}); } }; -const WhileNode = struct +pub const WhileNode = struct { condition: ExpNode, body: BlockNode, + startRegion: CodeRegion, + endRegion: CodeRegion, + fn dump(self: *const WhileNode, indent: usize) void { std.debug.print("While Node:\n", .{}); @@ -218,10 +232,13 @@ const WhileNode = struct std.debug.print("}}\n", .{}); } }; -const RepeatNode = struct +pub const RepeatNode = struct { condition: ExpNode, body: BlockNode, + startRegion: CodeRegion, + endRegion: CodeRegion, + fn dump(self: *const RepeatNode, indent: usize) void { std.debug.print("Repeat Node:\n", .{}); @@ -249,12 +266,15 @@ const RepeatNode = struct std.debug.print("}}\n", .{}); } }; -const IfNode = struct +pub const IfNode = struct { condition: ExpNode, body: BlockNode, elseifs: std.ArrayList(ElseifNode), else_: ?BlockNode, + startRegion: CodeRegion, + endRegion: CodeRegion, + fn dump(self: *const IfNode, indent: usize) void { std.debug.print("If Node:\n", .{}); @@ -318,13 +338,16 @@ const IfNode = struct std.debug.print("}}\n", .{}); } }; -const ForNumericalNode = struct +pub const ForNumericalNode = struct { variable: []u8, start: ExpNode, end: ExpNode, change: ?ExpNode, body: BlockNode, + startRegion: CodeRegion, + endRegion: CodeRegion, + fn dump(self: *const ForNumericalNode, indent: usize) void { std.debug.print("For Numerical Node:\n", .{}); @@ -376,11 +399,14 @@ const ForNumericalNode = struct std.debug.print("}}\n", .{}); } }; -const ForGenericNode = struct +pub const ForGenericNode = struct { vars: std.ArrayList([]u8), exps: ExplistNode, body: BlockNode, + startRegion: CodeRegion, + endRegion: CodeRegion, + fn dump(self: *const ForGenericNode, indent: usize) void { std.debug.print("For Generic Node:\n", .{}); @@ -427,10 +453,13 @@ const ForGenericNode = struct std.debug.print("}}\n", .{}); } }; -const FunctionNode = struct +pub const FunctionNode = struct { name: FuncnameNode, body: FuncbodyNode, + startRegion: CodeRegion, + endRegion: CodeRegion, + fn dump(self: *const FunctionNode, indent: usize) void { std.debug.print("Function Node:\n", .{}); @@ -458,10 +487,13 @@ const FunctionNode = struct std.debug.print("}}\n", .{}); } }; -const LocalFunctionNode = struct +pub const LocalFunctionNode = struct { name: []u8, body: FuncbodyNode, + startRegion: CodeRegion, + endRegion: CodeRegion, + fn dump(self: *const LocalFunctionNode, indent: usize) void { std.debug.print("Local Function Node:\n", .{}); @@ -489,10 +521,13 @@ const LocalFunctionNode = struct } }; -const LocalNode = struct +pub const LocalNode = struct { attnames: AttnamelistNode, values: ?ExplistNode, + startRegion: CodeRegion, + endRegion: CodeRegion, + fn dump(self: *const LocalNode, indent: usize) void { std.debug.print("Local Node:\n", .{}); @@ -527,11 +562,14 @@ const LocalNode = struct std.debug.print("}}\n", .{}); } }; -const FunctioncallNode = struct +pub const FunctioncallNode = struct { function: SuffixexpNode, objectArg: ?[]u8, args: ArgsNode, + startRegion: CodeRegion, + endRegion: CodeRegion, + fn dump(self: *const FunctioncallNode, indent: usize) void { std.debug.print("Functioncall Node:\n", .{}); @@ -572,9 +610,12 @@ const FunctioncallNode = struct std.debug.print("}}\n", .{}); } }; -const VarlistNode = struct +pub const VarlistNode = struct { vars: std.ArrayList(VarNode), + startRegion: CodeRegion, + endRegion: CodeRegion, + fn dump(self: *const VarlistNode, indent: usize) void { std.debug.print("Varlist Node:\n", .{}); @@ -613,9 +654,12 @@ const VarlistNode = struct std.debug.print("}}\n", .{}); } }; -const ExplistNode = struct +pub const ExplistNode = struct { exps: std.ArrayList(ExpNode), + startRegion: CodeRegion, + endRegion: CodeRegion, + fn dump(self: *const ExplistNode, indent: usize) void { std.debug.print("Explist Node:\n", .{}); @@ -654,7 +698,7 @@ const ExplistNode = struct std.debug.print("}}\n", .{}); } }; -const ExpNode = union(enum) +pub const ExpNode = union(enum) { Nil, False, @@ -685,10 +729,13 @@ fn dumpExpNode(expNode: ExpNode, indent: usize) void .Binop => |*node| node.*.dump(indent), } } -const UnopNode = struct +pub const UnopNode = struct { unopType: UnopType, exp: *ExpNode, + startRegion: CodeRegion, + endRegion: CodeRegion, + fn dump(self: *const UnopNode, indent: usize) void { std.debug.print("Unop Node:\n", .{}); @@ -715,11 +762,14 @@ const UnopNode = struct std.debug.print("}}\n", .{}); } }; -const BinopNode = struct +pub const BinopNode = struct { lhs: ExpNode, op: BinopType, rhs: ExpNode, + startRegion: CodeRegion, + endRegion: CodeRegion, + fn dump(self: *const BinopNode, indent: usize) void { std.debug.print("Binop Node:\n", .{}); @@ -752,10 +802,13 @@ const BinopNode = struct std.debug.print("}}\n", .{}); } }; -const ElseifNode = struct +pub const ElseifNode = struct { condition: ExpNode, body: BlockNode, + startRegion: CodeRegion, + endRegion: CodeRegion, + fn dump(self: *const ElseifNode, indent: usize) void { std.debug.print("Elseif Node:\n", .{}); @@ -783,11 +836,14 @@ const ElseifNode = struct std.debug.print("}}\n", .{}); } }; -const FuncnameNode = struct +pub const FuncnameNode = struct { name: []u8, dottedNames: std.ArrayList([]u8), firstArg: ?[]u8, + startRegion: CodeRegion, + endRegion: CodeRegion, + fn dump(self: *const FuncnameNode, indent: usize) void { std.debug.print("Funcname Node:\n", .{}); @@ -845,10 +901,12 @@ const FuncnameNode = struct } }; -const FuncbodyNode = struct +pub const FuncbodyNode = struct { pars: ?ParlistNode, body: BlockNode, + startRegion: CodeRegion, + endRegion: CodeRegion, fn dump(self: *const FuncbodyNode, indent: usize) void { @@ -883,9 +941,12 @@ const FuncbodyNode = struct std.debug.print("}}\n", .{}); } }; -const AttnamelistNode = struct +pub const AttnamelistNode = struct { attnames: std.ArrayList(AttnameNode), + startRegion: CodeRegion, + endRegion: CodeRegion, + fn dump(self: *const AttnamelistNode, indent: usize) void { std.debug.print("Attnamelist Node:\n", .{}); @@ -924,10 +985,13 @@ const AttnamelistNode = struct std.debug.print("}}\n", .{}); } }; -const AttnameNode = struct +pub const AttnameNode = struct { name: []u8, attribute: ?[]u8, + startRegion: CodeRegion, + endRegion: CodeRegion, + fn dump(self: *const AttnameNode, indent: usize) void { std.debug.print("Funcname Node:\n", .{}); @@ -961,7 +1025,7 @@ const AttnameNode = struct std.debug.print("}}\n", .{}); } }; -const SuffixexpNode = union(enum) +pub const SuffixexpNode = union(enum) { Normal: NormalSuffixNode, Functioncall: *FunctioncallNode, @@ -974,7 +1038,7 @@ fn dumpSuffixExpNode(suffixexpNode: SuffixexpNode, indent: usize) void .Functioncall => |*node| node.*.dump(indent), } } -const ArgsNode = union(enum) +pub const ArgsNode = union(enum) { Bracketed: ?ExplistNode, Tableconstructor: TableconstructorNode, @@ -999,7 +1063,7 @@ fn dumpArgsNode(argsNode: ArgsNode, indent: usize) void .Literal => |*string| std.debug.print("Literal: '{s}'\n", .{string}), } } -const VarNode = union(enum) +pub const VarNode = union(enum) { Name: []u8, Indexed: IndexedVarNode, @@ -1014,10 +1078,13 @@ fn dumpVarNode(varNode: VarNode, indent: usize) void .Member => |*node| node.dump(indent), } } -const IndexedVarNode = struct +pub const IndexedVarNode = struct { value: SuffixexpNode, index: ExpNode, + startRegion: CodeRegion, + endRegion: CodeRegion, + fn dump(self: *const IndexedVarNode, indent: usize) void { std.debug.print("Indexed Var Node:\n", .{}); @@ -1045,10 +1112,13 @@ const IndexedVarNode = struct std.debug.print("}}\n", .{}); } }; -const MemberVarNode = struct +pub const MemberVarNode = struct { value: SuffixexpNode, name: []u8, + startRegion: CodeRegion, + endRegion: CodeRegion, + fn dump(self: *const MemberVarNode, indent: usize) void { std.debug.print("Member Var Node:\n", .{}); @@ -1076,9 +1146,12 @@ const MemberVarNode = struct } }; -const TableconstructorNode = struct +pub const TableconstructorNode = struct { exps: ?FieldlistNode, + startRegion: CodeRegion, + endRegion: CodeRegion, + fn dump(self: *const TableconstructorNode, indent: usize) void { std.debug.print("Tableconstructor Node:\n", .{}); @@ -1107,11 +1180,11 @@ const TableconstructorNode = struct std.debug.print("}}\n", .{}); } }; -const UnopType = enum +pub const UnopType = enum { Minus, LogicalNot, Length, BinaryNot, }; -const BinopType = enum +pub const BinopType = enum { LogicalOr, LocicalAnd, @@ -1125,10 +1198,13 @@ const BinopType = enum Mul, Div, IntDiv, Mod, Exp, }; -const ParlistNode = struct +pub const ParlistNode = struct { names: std.ArrayList([]u8), hasVarargs: bool, + startRegion: CodeRegion, + endRegion: CodeRegion, + fn dump(self: *const ParlistNode, indent: usize) void { std.debug.print("Parlist Node:\n", .{}); @@ -1172,10 +1248,13 @@ const ParlistNode = struct std.debug.print("}}\n", .{}); } }; -const NormalSuffixNode = struct +pub const NormalSuffixNode = struct { firstPart: SuffixexpFirstPart, suffixes: std.ArrayList(SuffixexpSuffix), + startRegion: CodeRegion, + endRegion: CodeRegion, + fn dump(self: *const NormalSuffixNode, indent: usize) void { std.debug.print("Normal Suffix Node:\n", .{}); @@ -1220,7 +1299,7 @@ const NormalSuffixNode = struct std.debug.print("}}\n", .{}); } }; -const SuffixexpFirstPart = union(enum) +pub const SuffixexpFirstPart = union(enum) { Name: []u8, BracketedExpr: ExpNode, @@ -1233,7 +1312,7 @@ fn dumpSuffixExpFirstPart(suffixexpFirstPart: SuffixexpFirstPart, indent: usize) .BracketedExpr => |*node| dumpExpNode(node.*, indent), } } -const SuffixexpSuffix = union(enum) +pub const SuffixexpSuffix = union(enum) { Dot: []u8, Indexed: ExpNode, @@ -1251,10 +1330,13 @@ fn dumpSuffixSuffix(suffixexpSuffix: SuffixexpSuffix, indent: usize) void } } -const ArgsFirstArgNode = struct +pub const ArgsFirstArgNode = struct { name: []u8, rest: ArgsNode, + startRegion: CodeRegion, + endRegion: CodeRegion, + fn dump(self: *const ArgsFirstArgNode, indent: usize) void { std.debug.print("Args First Arg Node:\n", .{}); @@ -1282,9 +1364,12 @@ const ArgsFirstArgNode = struct } }; -const FieldlistNode = struct +pub const FieldlistNode = struct { exps: std.ArrayList(FieldNode), + startRegion: CodeRegion, + endRegion: CodeRegion, + fn dump(self: *const FieldlistNode, indent: usize) void { std.debug.print("Fieldlist Node:\n", .{}); @@ -1323,16 +1408,19 @@ const FieldlistNode = struct std.debug.print("}}\n", .{}); } }; -const FieldNode = union(enum) +pub const FieldNode = union(enum) { IndexedAssignment: IndexedAssignmentNode, Assignment: FieldAssignmentNode, Exp: ExpNode, }; -const FieldAssignmentNode = struct +pub const FieldAssignmentNode = struct { lhs: []u8, rhs: ExpNode, + startRegion: CodeRegion, + endRegion: CodeRegion, + fn dump(self: *const FieldAssignmentNode, indent: usize) void { std.debug.print("Field Assignment Node:\n", .{}); @@ -1368,10 +1456,13 @@ fn dumpFieldNode(fieldNode: FieldNode, indent: usize) void .Exp => |*node| dumpExpNode(node.*, indent), } } -const IndexedAssignmentNode = struct +pub const IndexedAssignmentNode = struct { index: ExpNode, rhs: ExpNode, + startRegion: CodeRegion, + endRegion: CodeRegion, + fn dump(self: *const IndexedAssignmentNode, indent: usize) void { std.debug.print("Indexed Assignment Node:\n", .{}); @@ -1405,7 +1496,7 @@ pub fn parse(tokens: []Token, allocator: *std.heap.ArenaAllocator) !ChunkNode var i: usize = 0; const maybeParsedChunk = parseChunk(tokens, &i, allocator) catch |err| { - std.debug.print("{any}: data: {any}, type: {any}\n", .{tokens[i].location, tokens[i].tokenData, tokens[i].tokenType}); + std.debug.print("{any}: data: {any}, type: {any}\n", .{tokens[i].region, tokens[i].tokenData, tokens[i].tokenType}); return err; }; return maybeParsedChunk; @@ -1482,11 +1573,12 @@ const ParserError = error fn parseChunk(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !ChunkNode { - return ChunkNode { .block = try parseBlock(tokens, i, allocator) }; + const block = try parseBlock(tokens, i, allocator); + return ChunkNode { .block = block, .startRegion = block.startRegion, .endRegion = block.endRegion }; } fn parseBlock(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) ParserError!BlockNode { - var ret = BlockNode { .stats = std.ArrayList(StatNode).init(allocator.*.allocator()), .retstat = null }; + var ret = BlockNode { .stats = std.ArrayList(StatNode).init(allocator.*.allocator()), .retstat = null, .startRegion = tokens[i.*].region, .endRegion = tokens[i.*].region }; while(i.* < tokens.len and tokens[i.*].tokenType != TokenType.Return and tokens[i.*].tokenType != TokenType.End and @@ -1500,6 +1592,7 @@ fn parseBlock(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) P { ret.retstat = try parseRetstat(tokens, i, allocator); } + ret.endRegion = if(i.* - 1 < tokens.len) tokens[i.* - 1].region else tokens[tokens.len - 1].region; return ret; } fn parseStat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !StatNode @@ -1529,7 +1622,9 @@ fn parseStat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !S } if(tokens[i.*].tokenType == TokenType.Name) { - return StatNode { .Goto = tokens[i.*].tokenData.string }; + const name = tokens[i.*].tokenData.string; + i.* += 1; + return StatNode { .Goto = name }; } return error.ExpectedNameForGoto; }, @@ -1546,6 +1641,7 @@ fn parseStat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !S }, TokenType.While => { + const startRegion = tokens[i.*].region; i.* += 1; const condition = try parseExp(tokens, i, allocator); if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Do) @@ -1557,22 +1653,26 @@ fn parseStat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !S { return error.MissingEndForWhileBody; } + const endRegion = tokens[i.*].region; i.* += 1; - return StatNode { .While = WhileNode { .body = body, .condition = condition } }; + return StatNode { .While = WhileNode { .body = body, .condition = condition, .startRegion = startRegion, .endRegion = endRegion } }; }, TokenType.Repeat => { + const startRegion = tokens[i.*].region; i.* += 1; const body = try parseBlock(tokens, i, allocator); if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Until) { return error.ExpectedUntilAfterRepeatBody; } + const endRegion = tokens[i.*].region; i.* += 1; - return StatNode { .Repeat = RepeatNode { .body = body, .condition = try parseExp(tokens, i, allocator) } }; + return StatNode { .Repeat = RepeatNode { .body = body, .condition = try parseExp(tokens, i, allocator), .startRegion = startRegion, .endRegion = endRegion } }; }, TokenType.If => { + const startRegion = tokens[i.*].region; i.* += 1; const condition = try parseExp(tokens, i, allocator); if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Then) @@ -1585,7 +1685,7 @@ fn parseStat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !S { return error.ReachedEOFAfterIfBody; } - var ifNode = IfNode { .body = body, .condition = condition, .elseifs = std.ArrayList(ElseifNode).init(allocator.*.allocator()), .else_ = null}; + var ifNode = IfNode { .body = body, .condition = condition, .elseifs = std.ArrayList(ElseifNode).init(allocator.*.allocator()), .else_ = null, .startRegion = startRegion, .endRegion = startRegion }; while(tokens[i.*].tokenType == TokenType.Elseif) { i.* += 1; @@ -1594,8 +1694,9 @@ fn parseStat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !S { return error.ExpectedThenAfterElseifCondition; } + const endRegion = tokens[i.*].region; i.* += 1; - try ifNode.elseifs.append(ElseifNode { .body = try parseBlock(tokens, i, allocator), .condition = elseifCondition }); + try ifNode.elseifs.append(ElseifNode { .body = try parseBlock(tokens, i, allocator), .condition = elseifCondition, .startRegion = startRegion, .endRegion = endRegion }); } if(i.* >= tokens.len) { @@ -1610,11 +1711,13 @@ fn parseStat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !S { return error.ExpectedEndClosingIf; } + ifNode.endRegion = tokens[i.*].region; i.* += 1; return StatNode { .If = ifNode }; }, TokenType.For => { + const startRegion = tokens[i.*].region; i.* += 1; if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Name) { @@ -1654,8 +1757,9 @@ fn parseStat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !S { return error.ExpectedEndAfterForEqBody; } + const endRegion = tokens[i.*].region; i.* += 1; - return StatNode { .ForNumerical = ForNumericalNode { .variable = variable, .start = start, .end = end, .change = change, .body = body } }; + return StatNode { .ForNumerical = ForNumericalNode { .variable = variable, .start = start, .end = end, .change = change, .body = body, .startRegion = startRegion, .endRegion = endRegion } }; }, TokenType.Comma => { @@ -1691,8 +1795,9 @@ fn parseStat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !S { return error.ExpectedEndAfterForInBody; } + const endRegion = tokens[i.*].region; i.* += 1; - return StatNode { .ForGeneric = ForGenericNode { .vars = names, .exps = exps, .body = body } }; + return StatNode { .ForGeneric = ForGenericNode { .vars = names, .exps = exps, .body = body, .startRegion = startRegion, .endRegion = endRegion } }; }, TokenType.In => { @@ -1708,22 +1813,26 @@ fn parseStat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !S { return error.ExpectedEndAfterForInBody; } + const endRegion = tokens[i.*].region; i.* += 1; var names = try std.ArrayList([]u8).initCapacity(allocator.allocator(), 1); try names.insert(0, variable); - return StatNode { .ForGeneric = ForGenericNode { .vars = names, .exps = exps, .body = body } }; + return StatNode { .ForGeneric = ForGenericNode { .vars = names, .exps = exps, .body = body, .startRegion = startRegion, .endRegion = endRegion } }; }, else => return error.UnexpectedTokenAfterFirstNameInFor, } }, TokenType.Function => { + const startRegion = tokens[i.*].region; i.* += 1; const name = try parseFuncname(tokens, i, allocator); - return StatNode { .Function = FunctionNode { .name = name, .body = try parseFuncbody(tokens, i, allocator) } }; + const body = try parseFuncbody(tokens, i, allocator); + return StatNode { .Function = FunctionNode { .name = name, .body = body, .startRegion = startRegion, .endRegion = body.endRegion } }; }, TokenType.Local => { + const startRegion = tokens[i.*].region; i.* += 1; if(i.* >= tokens.len) { @@ -1737,16 +1846,18 @@ fn parseStat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !S return error.ExpectedLocalFunctionName; } const name = tokens[i.*].tokenData.string; + const endRegion = tokens[i.*].region; i.* += 1; - return StatNode { .LocalFunction = LocalFunctionNode { .name = name, .body = try parseFuncbody(tokens, i, allocator) } }; + return StatNode { .LocalFunction = LocalFunctionNode { .name = name, .body = try parseFuncbody(tokens, i, allocator), .startRegion = startRegion, .endRegion = endRegion } }; } else { - var ret = LocalNode { .attnames = try parseAttnamelist(tokens, i, allocator), .values = null }; + var ret = LocalNode { .attnames = try parseAttnamelist(tokens, i, allocator), .values = null, .startRegion = startRegion, .endRegion = startRegion }; if(i.* < tokens.len and tokens[i.*].tokenType == TokenType.Equals) { i.* += 1; ret.values = try parseExplist(tokens, i, allocator); + ret.endRegion = ret.values.?.endRegion; } return StatNode { .Local = ret }; } @@ -1769,6 +1880,7 @@ fn parseStat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !S }, TokenType.Name, TokenType.RoundOpen => { + const startRegion = tokens[i.*].region; const suffixExp = try parseSuffixExp(tokens, i, allocator); if(i.* >= tokens.len) { @@ -1784,15 +1896,17 @@ fn parseStat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !S { TokenType.Equals => { + const endRegion = tokens[i.*].region; i.* += 1; var lhs = std.ArrayList(VarNode).init(allocator.allocator()); - try lhs.append(try suffixExpToVar(suffixExp)); - return StatNode { .Assignment = AssignmentNode { .lhs = VarlistNode { .vars = lhs }, .rhs = try parseExplist(tokens, i, allocator) } }; + try lhs.append(try suffixExpToVar(suffixExp, startRegion, endRegion)); + const rhs = try parseExplist(tokens, i, allocator); + return StatNode { .Assignment = AssignmentNode { .lhs = VarlistNode { .vars = lhs, .startRegion = endRegion, .endRegion = tokens[@min(i.*, tokens.len) - 1].region }, .rhs = rhs, .startRegion = startRegion, .endRegion = rhs.endRegion } }; }, TokenType.Comma => { - var varlistNode = VarlistNode { .vars = std.ArrayList(VarNode).init(allocator.allocator()) }; - try varlistNode.vars.append(try suffixExpToVar(suffixExp)); + var varlistNode = VarlistNode { .vars = std.ArrayList(VarNode).init(allocator.allocator()), .startRegion = startRegion, .endRegion = startRegion }; + try varlistNode.vars.append(try suffixExpToVar(suffixExp, startRegion, tokens[@min(i.*, tokens.len) - 1].region)); while(tokens[i.*].tokenType == TokenType.Comma) { i.* += 1; @@ -1802,8 +1916,10 @@ fn parseStat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !S { return error.ExpectedEqAfterAssignmentVarList; } + varlistNode.endRegion = tokens[i.*].region; i.* += 1; - return StatNode { .Assignment = AssignmentNode { .lhs = varlistNode, .rhs = try parseExplist(tokens, i, allocator) } }; + const rhs = try parseExplist(tokens, i, allocator); + return StatNode { .Assignment = AssignmentNode { .lhs = varlistNode, .rhs = rhs, .startRegion = startRegion, .endRegion = rhs.endRegion } }; }, else => { @@ -1829,6 +1945,7 @@ fn parseRetstat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) { return error.ExpectedReturnStartingRetstat; } + const startRegion = tokens[i.*].region; i.* += 1; if(i.* >= tokens.len or tokens[i.*].tokenType == TokenType.Semicolon or tokens[i.*].tokenType == TokenType.Else or tokens[i.*].tokenType == TokenType.Elseif or tokens[i.*].tokenType == TokenType.End) @@ -1837,14 +1954,16 @@ fn parseRetstat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) { i.* += 1; } - return RetstatNode { .values = null }; + return RetstatNode { .values = null, .startRegion = startRegion, .endRegion = tokens[@min(i.*, tokens.len) - 1].region }; } - const ret = RetstatNode { .values = try parseExplist(tokens, i, allocator) }; + const values = try parseExplist(tokens, i, allocator); + var endRegion = values.endRegion; if(i.* < tokens.len and tokens[i.*].tokenType == TokenType.Semicolon) { + endRegion = tokens[i.*].region; i.* += 1; } - return ret; + return RetstatNode { .values = values, .startRegion = startRegion, .endRegion = endRegion }; } fn parseExp(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) ParserError!ExpNode { @@ -1857,6 +1976,7 @@ fn parseExpPrimary(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocat { return error.ReachedEOFExpectedPrimaryExpression; } + const startRegion = tokens[i.*].region; switch(tokens[i.*].tokenType) { TokenType.Nil => @@ -1902,28 +2022,32 @@ fn parseExpPrimary(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocat i.* += 1; const unop = try allocator.allocator().create(ExpNode); unop.* = try parseExp(tokens, i, allocator); - return ExpNode { .Unop = UnopNode { .unopType = UnopType.Minus, .exp = unop } }; + const endRegion = tokens[@min(i.*, tokens.len) - 1].region; + return ExpNode { .Unop = UnopNode { .unopType = UnopType.Minus, .exp = unop, .startRegion = startRegion, .endRegion = endRegion } }; }, TokenType.Hash => { i.* += 1; const unop = try allocator.allocator().create(ExpNode); unop.* = try parseExp(tokens, i, allocator); - return ExpNode { .Unop = UnopNode { .unopType = UnopType.Length, .exp = unop } }; + const endRegion = tokens[@min(i.*, tokens.len) - 1].region; + return ExpNode { .Unop = UnopNode { .unopType = UnopType.Length, .exp = unop, .startRegion = startRegion, .endRegion = endRegion } }; }, TokenType.Not => { i.* += 1; const unop = try allocator.allocator().create(ExpNode); unop.* = try parseExp(tokens, i, allocator); - return ExpNode { .Unop = UnopNode { .unopType = UnopType.LogicalNot, .exp = unop } }; + const endRegion = tokens[@min(i.*, tokens.len) - 1].region; + return ExpNode { .Unop = UnopNode { .unopType = UnopType.LogicalNot, .exp = unop, .startRegion = startRegion, .endRegion = endRegion } }; }, TokenType.Tilde => { i.* += 1; const unop = try allocator.allocator().create(ExpNode); unop.* = try parseExp(tokens, i, allocator); - return ExpNode { .Unop = UnopNode { .unopType = UnopType.BinaryNot, .exp = unop } }; + const endRegion = tokens[@min(i.*, tokens.len) - 1].region; + return ExpNode { .Unop = UnopNode { .unopType = UnopType.BinaryNot, .exp = unop, .startRegion = startRegion, .endRegion = endRegion } }; }, else => { @@ -1938,6 +2062,7 @@ fn parseExpPrecedence(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllo var currentLhs = lhs; while(i.* < tokens.len and isBinop(tokens[i.*])) { + const startRegion = tokens[i.*].region; const precedence = try getPrecedence(tokens[i.*]); if(precedence < minPrecedence) { @@ -1954,7 +2079,7 @@ fn parseExpPrecedence(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllo rhs = try parseExpPrecedence(tokens, i, allocator, rhs, precedence + associativityBoost); } const binop = try allocator.allocator().create(BinopNode); - binop.* = BinopNode { .lhs = currentLhs, .op = op, .rhs = rhs }; + binop.* = BinopNode { .lhs = currentLhs, .op = op, .rhs = rhs, .startRegion = startRegion, .endRegion = tokens[@min(i.*, tokens.len) - 1].region }; currentLhs = ExpNode { .Binop = binop }; } return currentLhs; @@ -2022,13 +2147,15 @@ fn isBinop(token: Token) bool } fn parseExplist(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !ExplistNode { - var ret = ExplistNode { .exps = std.ArrayList(ExpNode).init(allocator.allocator()) }; + const startRegion = tokens[@min(i.*, tokens.len) - 1].region; + var ret = ExplistNode { .exps = std.ArrayList(ExpNode).init(allocator.allocator()), .startRegion = startRegion, .endRegion = startRegion }; try ret.exps.append(try parseExp(tokens, i, allocator)); while(i.* < tokens.len and tokens[i.*].tokenType == TokenType.Comma) { i.* += 1; try ret.exps.append(try parseExp(tokens, i, allocator)); } + ret.endRegion = tokens[@min(i.*, tokens.len) - 1].region; return ret; } fn parseFuncname(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !FuncnameNode @@ -2037,7 +2164,8 @@ fn parseFuncname(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator { return error.ExpectedFuncname; } - var ret = FuncnameNode { .name = tokens[i.*].tokenData.string, .dottedNames = std.ArrayList([]u8).init(allocator.allocator()), .firstArg = null }; + const startRange = tokens[i.*].region; + var ret = FuncnameNode { .name = tokens[i.*].tokenData.string, .dottedNames = std.ArrayList([]u8).init(allocator.allocator()), .firstArg = null, .startRegion = startRange, .endRegion = startRange }; i.* += 1; while(i.* < tokens.len and tokens[i.*].tokenType == TokenType.Dot) { @@ -2056,7 +2184,9 @@ fn parseFuncname(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator return error.ExpectedNameOfFirstArgInFuncname; } ret.firstArg = tokens[i.*].tokenData.string; + i.* += 1; } + ret.endRegion = tokens[@min(i.*, tokens.len) - 1].region; return ret; } fn parseFuncbody(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !FuncbodyNode @@ -2065,6 +2195,7 @@ fn parseFuncbody(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator { return error.ExpectedRoundOpenStartingFuncbody; } + const startRegion = tokens[i.*].region; i.* += 1; if(i.* >= tokens.len) { @@ -2084,7 +2215,7 @@ fn parseFuncbody(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator } i.* += 1; } - const ret = FuncbodyNode { .body = try parseBlock(tokens, i, allocator), .pars = pars }; + const ret = FuncbodyNode { .body = try parseBlock(tokens, i, allocator), .pars = pars, .startRegion = startRegion, .endRegion = tokens[i.*].region }; if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.End) { return error.ExpectedEndClosingFuncbody; @@ -2098,16 +2229,18 @@ fn parseParlist(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) { return error.ReachedEOFInParlist; } + const startRegion = tokens[i.*].region; if(tokens[i.*].tokenType == TokenType.DotDotDot) { + const endRegion = tokens[i.*].region; i.* += 1; - return ParlistNode { .names = std.ArrayList([]u8).init(allocator.allocator()), .hasVarargs = true }; + return ParlistNode { .names = std.ArrayList([]u8).init(allocator.allocator()), .hasVarargs = true, .startRegion = startRegion, .endRegion = endRegion }; } if(tokens[i.*].tokenType != TokenType.Name) { return error.ExpectedNameStartingParlist; } - var ret = ParlistNode { .names = std.ArrayList([]u8).init(allocator.allocator()), .hasVarargs = false }; + var ret = ParlistNode { .names = std.ArrayList([]u8).init(allocator.allocator()), .hasVarargs = false, .startRegion = startRegion, .endRegion = startRegion }; try ret.names.append(tokens[i.*].tokenData.string); i.* += 1; while(i.* < tokens.len and tokens[i.*].tokenType == TokenType.Comma) @@ -2133,17 +2266,20 @@ fn parseParlist(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) else => return error.UnexpectedTokenInParlistNameList, } } + ret.endRegion = tokens[@min(i.*, tokens.len) - 1].region; return ret; } fn parseAttnamelist(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !AttnamelistNode { - var ret = AttnamelistNode { .attnames = std.ArrayList(AttnameNode).init(allocator.allocator()) }; + // TODO: What happens if this is reaches EOF? + var ret = AttnamelistNode { .attnames = std.ArrayList(AttnameNode).init(allocator.allocator()), .startRegion = tokens[i.*].region, .endRegion = tokens[i.*].region }; try ret.attnames.append(try parseAttname(tokens, i)); while(i.* < tokens.len and tokens[i.*].tokenType == TokenType.Comma) { i.* += 1; try ret.attnames.append(try parseAttname(tokens, i)); } + ret.endRegion = tokens[@min(i.*, tokens.len) - 1].region; return ret; } fn parseSuffixExp(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !SuffixexpNode @@ -2153,6 +2289,7 @@ fn parseSuffixExp(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocato { return error.ReachedEOFInSuffixExp; } + const startRegion = tokens[i.*].region; const firstPart = try switch(tokens[i.*].tokenType) { TokenType.Name => @@ -2202,14 +2339,16 @@ fn parseSuffixExp(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocato }, TokenType.Colon => { + const argsFirstArgStartRegion = tokens[i.*].region; i.* += 1; if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Name) { return error.ExpectedNameInArgsFirstArgSuffixExp; } const name = tokens[i.*].tokenData.string; + const argsFirstArgEndRegion = tokens[i.*].region; i.* += 1; - try suffixes.append(SuffixexpSuffix { .ArgsFirstArg = ArgsFirstArgNode { .name = name, .rest = try parseArgs(tokens, i, allocator) } }); + try suffixes.append(SuffixexpSuffix { .ArgsFirstArg = ArgsFirstArgNode { .name = name, .rest = try parseArgs(tokens, i, allocator), .startRegion = argsFirstArgStartRegion, .endRegion = argsFirstArgEndRegion } }); }, TokenType.RoundOpen, TokenType.CurlyOpen, TokenType.StringLiteral => { @@ -2218,6 +2357,7 @@ fn parseSuffixExp(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocato else => break, } } + const endRegion = tokens[@min(i.*, tokens.len) - 1].region; const last = suffixes.getLastOrNull(); if(last != null) { @@ -2229,9 +2369,11 @@ fn parseSuffixExp(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocato const functioncall = try allocator.allocator().create(FunctioncallNode); functioncall.* = FunctioncallNode { - .function = SuffixexpNode { .Normal = NormalSuffixNode { .firstPart = firstPart, .suffixes = suffixes } }, + .function = SuffixexpNode { .Normal = NormalSuffixNode { .firstPart = firstPart, .suffixes = suffixes, .startRegion = startRegion, .endRegion = endRegion } }, .args = args.*, .objectArg = null, + .startRegion = startRegion, + .endRegion = endRegion, }; return SuffixexpNode { @@ -2244,9 +2386,11 @@ fn parseSuffixExp(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocato const functioncall = try allocator.allocator().create(FunctioncallNode); functioncall.* = FunctioncallNode { - .function = SuffixexpNode { .Normal = NormalSuffixNode { .firstPart = firstPart, .suffixes = suffixes } }, + .function = SuffixexpNode { .Normal = NormalSuffixNode { .firstPart = firstPart, .suffixes = suffixes, .startRegion = startRegion, .endRegion = endRegion } }, .args = node.rest, .objectArg = node.name, + .startRegion = startRegion, + .endRegion = endRegion, }; return SuffixexpNode { @@ -2256,11 +2400,12 @@ fn parseSuffixExp(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocato else => {} } } - return SuffixexpNode { .Normal = NormalSuffixNode { .firstPart = firstPart, .suffixes = suffixes } }; + return SuffixexpNode { .Normal = NormalSuffixNode { .firstPart = firstPart, .suffixes = suffixes, .startRegion = startRegion, .endRegion = endRegion } }; } fn parseVar(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !VarNode { - return suffixExpToVar(try parseSuffixExp(tokens, i, allocator)); + const startRegion = tokens[i.*].region; + return suffixExpToVar(try parseSuffixExp(tokens, i, allocator), startRegion, tokens[@min(i.*, tokens.len) - 1].region); } fn parseAttname(tokens: []Token, i: *usize) !AttnameNode { @@ -2269,8 +2414,9 @@ fn parseAttname(tokens: []Token, i: *usize) !AttnameNode return error.ExpectednameInAttribName; } const name = tokens[i.*].tokenData.string; + const startRegion = tokens[i.*].region; i.* += 1; - var ret = AttnameNode { .name = name, .attribute = null }; + var ret = AttnameNode { .name = name, .attribute = null, .startRegion = startRegion, .endRegion = startRegion }; if(i.* < tokens.len and tokens[i.*].tokenType == TokenType.Lt) { ret.attribute = tokens[i.*].tokenData.string; @@ -2279,6 +2425,7 @@ fn parseAttname(tokens: []Token, i: *usize) !AttnameNode { return error.ExpectedAttributeInAttrib; } + ret.endRegion = tokens[i.*].region; i.* += 1; if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Gt) { @@ -2320,7 +2467,7 @@ fn parseArgs(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !A { const value = tokens[i.*].tokenData.string; i.* += 1; - return ArgsNode { .Literal = value}; + return ArgsNode { .Literal = value }; }, else => return error.UnexpectedTokenInArgs, } @@ -2331,23 +2478,27 @@ fn parseTableconstructor(tokens: []Token, i: *usize, allocator: *std.heap.ArenaA { return error.ExpectedCurlyOpenOpeningTableconstructor; } + const startRegion = tokens[i.*].region; i.* += 1; if(i.* < tokens.len and tokens[i.*].tokenType == TokenType.CurlyClosed) { + const endRegion = tokens[i.*].region; i.* += 1; - return TableconstructorNode { .exps = null }; + return TableconstructorNode { .exps = null, .startRegion = startRegion, .endRegion = endRegion }; } - const ret = TableconstructorNode { .exps = try parseFieldlist(tokens, i, allocator) }; + var ret = TableconstructorNode { .exps = try parseFieldlist(tokens, i, allocator), .startRegion = startRegion, .endRegion = startRegion }; if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.CurlyClosed) { return error.ExpectedCurlyClosedClosingTableconstructor; } + ret.endRegion = tokens[i.*].region; i.* += 1; return ret; } fn parseFieldlist(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !FieldlistNode { - var ret = FieldlistNode { .exps = std.ArrayList(FieldNode).init(allocator.allocator()) }; + const startRegion = tokens[@min(i.*, tokens.len) - 1].region; + var ret = FieldlistNode { .exps = std.ArrayList(FieldNode).init(allocator.allocator()), .startRegion = startRegion, .endRegion = startRegion }; try ret.exps.append(try parseField(tokens, i, allocator)); while(i.* < tokens.len and isFieldsep(tokens[i.*])) { @@ -2358,6 +2509,7 @@ fn parseFieldlist(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocato { i.* += 1; } + ret.endRegion = tokens[@min(i.*, tokens.len) - 1].region; return ret; } fn isFieldsep(token: Token) bool @@ -2370,6 +2522,7 @@ fn parseField(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) ! { return error.ReachedEOFInField; } + const startRegion = tokens[i.*].region; switch(tokens[i.*].tokenType) { TokenType.SquareOpen => @@ -2385,8 +2538,9 @@ fn parseField(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) ! { return error.ExpectedEqualsInIndexedFieldExpression; } + const endRegion = tokens[i.*].region; i.* += 1; - return FieldNode { .IndexedAssignment = IndexedAssignmentNode { .index = index, .rhs = try parseExp(tokens, i, allocator) } }; + return FieldNode { .IndexedAssignment = IndexedAssignmentNode { .index = index, .rhs = try parseExp(tokens, i, allocator), .startRegion = startRegion, .endRegion = endRegion } }; }, TokenType.Name => { @@ -2394,14 +2548,14 @@ fn parseField(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) ! { const name = tokens[i.*].tokenData.string; i.* += 2; - return FieldNode { .Assignment = FieldAssignmentNode { .lhs = name, .rhs = try parseExp(tokens, i, allocator) } }; + return FieldNode { .Assignment = FieldAssignmentNode { .lhs = name, .rhs = try parseExp(tokens, i, allocator), .startRegion = startRegion, .endRegion = tokens[i.* - 1].region } }; } return FieldNode { .Exp = try parseExp(tokens, i, allocator) }; }, else => return FieldNode { .Exp = try parseExp(tokens, i, allocator) }, } } -fn suffixExpToVar(suffixexp: SuffixexpNode) !VarNode +fn suffixExpToVar(suffixexp: SuffixexpNode, startRegion: CodeRegion, endRegion: CodeRegion) !VarNode { var exp = suffixexp.Normal; if(exp.suffixes.items.len == 0) @@ -2411,8 +2565,8 @@ fn suffixExpToVar(suffixexp: SuffixexpNode) !VarNode const last = exp.suffixes.pop(); return switch(last) { - SuffixexpSuffix.Dot => |*name| VarNode { .Member = MemberVarNode { .name = name.*, .value = SuffixexpNode { .Normal = exp } } }, - SuffixexpSuffix.Indexed => |*index| VarNode { .Indexed = IndexedVarNode { .index = index.*, .value = SuffixexpNode { .Normal = exp } } }, + SuffixexpSuffix.Dot => |*name| VarNode { .Member = MemberVarNode { .name = name.*, .value = SuffixexpNode { .Normal = exp }, .startRegion = startRegion, .endRegion = endRegion } }, + SuffixexpSuffix.Indexed => |*index| VarNode { .Indexed = IndexedVarNode { .index = index.*, .value = SuffixexpNode { .Normal = exp }, .startRegion = startRegion, .endRegion = endRegion } }, else => error.ExpectedDotOrIndexedSuffixWhenConvertingSuffixExpToVar, }; } diff --git a/src/tokenizer.zig b/src/tokenizer.zig index af41427..a726091 100644 --- a/src/tokenizer.zig +++ b/src/tokenizer.zig @@ -1,5 +1,7 @@ const types = @import("types.zig"); const std = @import("std"); +const CodeRegion = @import("types.zig").CodeRegion; +const CodeLocation = @import("types.zig").CodeLocation; pub const TokenType = enum { @@ -29,18 +31,7 @@ pub const Token = struct { tokenType: TokenType, tokenData: TokenData, - location: Location, -}; - -pub const Location = struct -{ - start: ?Point, - length: usize, -}; -pub const Point = struct -{ - line: usize, - col: usize, + region: CodeRegion, }; const TokenizerState = enum @@ -72,38 +63,38 @@ const TokenizerState = enum Function, }; -fn tokenizeUpdateIndexAndState(lastIndex: *?usize, index: ?usize, state: *TokenizerState, newState: TokenizerState, location: *Location) void +fn tokenizeUpdateIndexAndState(lastIndex: *?usize, index: ?usize, state: *TokenizerState, newState: TokenizerState, region: *CodeRegion) void { lastIndex.* = index; state.* = newState; if(index == null) { - location.*.start = null; - location.*.length = 0; + region.*.start = null; + region.*.length = 0; } else { - if(location.*.start == null) + if(region.*.start == null) { // TODO: There is no line/col info here and plumbing it to here would be pain. - location.*.start = Point { .col = 0, .line = 0 }; + region.*.start = CodeLocation { .col = 0, .line = 0 }; } - location.*.length += 1; + region.*.length += 1; } } -fn tokenizeTerminalBase(lastIndex: *?usize, index: ?usize, tokenType: *?TokenType, state: *TokenizerState, newTokenType: ?TokenType, newState: TokenizerState, location: *Location) void +fn tokenizeTerminalBase(lastIndex: *?usize, index: ?usize, tokenType: *?TokenType, state: *TokenizerState, newTokenType: ?TokenType, newState: TokenizerState, region: *CodeRegion) void { - tokenizeUpdateIndexAndState(lastIndex, index, state, newState, location); + tokenizeUpdateIndexAndState(lastIndex, index, state, newState, region); tokenType.* = newTokenType; } -fn tokenizeTerminalStr(lastIndex: *?usize, index: usize, tokenType: *?TokenType, state: *TokenizerState, newTokenType: ?TokenType, newState: TokenizerState, tokenStr: *std.ArrayList(u8), ch: u8, location: *Location) !void +fn tokenizeTerminalStr(lastIndex: *?usize, index: usize, tokenType: *?TokenType, state: *TokenizerState, newTokenType: ?TokenType, newState: TokenizerState, tokenStr: *std.ArrayList(u8), ch: u8, region: *CodeRegion) !void { - tokenizeTerminalBase(lastIndex, index, tokenType, state, newTokenType, newState, location); + tokenizeTerminalBase(lastIndex, index, tokenType, state, newTokenType, newState, region); try tokenStr.append(ch); } -fn tokenizeTerminalIntNum(lastIndex: *?usize, index: usize, tokenType: *?TokenType, state: *TokenizerState, newTokenType: TokenType, newState: TokenizerState, tokenNumeral: *?types.Numeral, ch: u8, location: *Location) !void +fn tokenizeTerminalIntNum(lastIndex: *?usize, index: usize, tokenType: *?TokenType, state: *TokenizerState, newTokenType: TokenType, newState: TokenizerState, tokenNumeral: *?types.Numeral, ch: u8, region: *CodeRegion) !void { - tokenizeTerminalBase(lastIndex, index, tokenType, state, newTokenType, newState, location); + tokenizeTerminalBase(lastIndex, index, tokenType, state, newTokenType, newState, region); if(!std.ascii.isDigit(ch)) { return error.NoDigit; @@ -122,16 +113,16 @@ fn tokenizeTerminalIntNum(lastIndex: *?usize, index: usize, tokenType: *?TokenTy } } } -fn tokenizeTerminalNoToken(lastIndex: *?usize, index: usize, state: *TokenizerState, newState: TokenizerState, tokenStr: *std.ArrayList(u8), ch: u8, location: *Location) !void +fn tokenizeTerminalNoToken(lastIndex: *?usize, index: usize, state: *TokenizerState, newState: TokenizerState, tokenStr: *std.ArrayList(u8), ch: u8, region: *CodeRegion) !void { - tokenizeUpdateIndexAndState(lastIndex, index, state, newState, location); + tokenizeUpdateIndexAndState(lastIndex, index, state, newState, region); try tokenStr.*.append(ch); } -fn tokenizeBacktrack(lastIndex: *?usize, index: *usize, tokens: *std.ArrayList(Token), tokenType: *?TokenType, tokenStr: *std.ArrayList(u8), tokenNumeral: *?types.Numeral, state: *TokenizerState, allocator: std.mem.Allocator, location: *Location) !void +fn tokenizeBacktrack(lastIndex: *?usize, index: *usize, tokens: *std.ArrayList(Token), tokenType: *?TokenType, tokenStr: *std.ArrayList(u8), tokenNumeral: *?types.Numeral, state: *TokenizerState, allocator: std.mem.Allocator, region: *CodeRegion) !void { - try tokenizeBacktrackCustomToken(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, tokenType.*.?, allocator, location); + try tokenizeBacktrackCustomToken(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, tokenType.*.?, allocator, region); } -fn tokenizeBacktrackCustomToken(lastIndex: *?usize, index: *usize, tokens: *std.ArrayList(Token), tokenType: *?TokenType, tokenStr: *std.ArrayList(u8), tokenNumeral: *?types.Numeral, state: *TokenizerState, newTokenType: TokenType, allocator: std.mem.Allocator, location: *Location) !void +fn tokenizeBacktrackCustomToken(lastIndex: *?usize, index: *usize, tokens: *std.ArrayList(Token), tokenType: *?TokenType, tokenStr: *std.ArrayList(u8), tokenNumeral: *?types.Numeral, state: *TokenizerState, newTokenType: TokenType, allocator: std.mem.Allocator, region: *CodeRegion) !void { if(lastIndex.* == null or tokenType.* == null) { @@ -141,32 +132,32 @@ fn tokenizeBacktrackCustomToken(lastIndex: *?usize, index: *usize, tokens: *std. { const content = try allocator.alloc(u8, tokenStr.*.items.len); @memcpy(content, tokenStr.*.items); - try tokens.append(Token { .tokenType = newTokenType, .tokenData = TokenData { .string = content }, .location = location.* }); + try tokens.append(Token { .tokenType = newTokenType, .tokenData = TokenData { .string = content }, .region = region.* }); } else { - try tokens.append(Token { .tokenType = newTokenType, .location = location.*, .tokenData = if(tokenType.*.? == TokenType.Numeral) TokenData { .numeral = tokenNumeral.*.? } + try tokens.append(Token { .tokenType = newTokenType, .region = region.*, .tokenData = if(tokenType.*.? == TokenType.Numeral) TokenData { .numeral = tokenNumeral.*.? } else TokenData.none }); } tokenNumeral.* = null; index.* = lastIndex.*.?; tokenStr.*.clearAndFree(); - // location is reset in tokenizeTerminalBase since null is passed as index - tokenizeTerminalBase(lastIndex, null, tokenType, state, null, TokenizerState.Start, location); + // region is reset in tokenizeTerminalBase since null is passed as index + tokenizeTerminalBase(lastIndex, null, tokenType, state, null, TokenizerState.Start, region); } -fn tokenizeAlphanumericNonstart(lastIndex: *?usize, index: *usize, tokens: *std.ArrayList(Token), tokenType: *?TokenType, tokenStr: *std.ArrayList(u8), tokenNumeral: *?types.Numeral, state: *TokenizerState, ch: u8, newTokenType: TokenType, allocator: std.mem.Allocator, location: *Location) !void +fn tokenizeAlphanumericNonstart(lastIndex: *?usize, index: *usize, tokens: *std.ArrayList(Token), tokenType: *?TokenType, tokenStr: *std.ArrayList(u8), tokenNumeral: *?types.Numeral, state: *TokenizerState, ch: u8, newTokenType: TokenType, allocator: std.mem.Allocator, region: *CodeRegion) !void { if(std.ascii.isAlphanumeric(ch) or ch == '_') { - try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.Name, tokenStr, ch, location); + try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.Name, tokenStr, ch, region); } else { - try tokenizeBacktrackCustomToken(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, newTokenType, allocator, location); + try tokenizeBacktrackCustomToken(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, newTokenType, allocator, region); } } -fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usize, tokenType: *?TokenType, tokenStr: *std.ArrayList(u8), tokenNumeral: *?types.Numeral, tokens: *std.ArrayList(Token), longBracketLevel: *u32, location: *Location, allocator: std.mem.Allocator) !void +fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usize, tokenType: *?TokenType, tokenStr: *std.ArrayList(u8), tokenNumeral: *?types.Numeral, tokens: *std.ArrayList(Token), longBracketLevel: *u32, region: *CodeRegion, allocator: std.mem.Allocator) !void { switch(state.*) { @@ -174,44 +165,44 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz { switch(ch) { - '-' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Minus, TokenizerState.Minus, location), - ',' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Comma, TokenizerState.Comma, location), - '=' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Equals, TokenizerState.Equals, location), - '(' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.RoundOpen, TokenizerState.RoundOpen, location), - ')' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.RoundClosed, TokenizerState.RoundClosed, location), - '.' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Dot, TokenizerState.Dot, location), - ':' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Colon, TokenizerState.Colon, location), - '{' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.CurlyOpen, TokenizerState.CurlyOpen, location), - '}' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.CurlyClosed, TokenizerState.CurlyClosed, location), - '[' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.SquareOpen, TokenizerState.SquareOpen, location), - ']' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.SquareClosed, TokenizerState.SquareClosed, location), - '+' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Plus, TokenizerState.Plus, location), - '~' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Tilde, TokenizerState.Tilde, location), - '>' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Gt, TokenizerState.Gt, location), - '<' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Lt, TokenizerState.Lt, location), - '#' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Hash, TokenizerState.Hash, location), - '|' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Pipe, TokenizerState.Pipe, location), - '&' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Ampersand, TokenizerState.Ampersand, location), - '%' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Percent, TokenizerState.Percent, location), - '*' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Star, TokenizerState.Star, location), - '/' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Slash, TokenizerState.Slash, location), - ';' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Semicolon, TokenizerState.Semicolon, location), - '^' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Caret, TokenizerState.Caret, location), - 'a' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.A, tokenStr, ch, location), - 'b' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.B, tokenStr, ch, location), - 'd' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.D, tokenStr, ch, location), - 'e' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.E, tokenStr, ch, location), - 'f' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.F, tokenStr, ch, location), - 'i' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.I, tokenStr, ch, location), - 'g' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.G, tokenStr, ch, location), - 'l' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.L, tokenStr, ch, location), - 'n' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.N, tokenStr, ch, location), - 'o' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.O, tokenStr, ch, location), - 'r' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.R, tokenStr, ch, location), - 't' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.T, tokenStr, ch, location), - 'u' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.U, tokenStr, ch, location), - 'w' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.W, tokenStr, ch, location), - '0' => try tokenizeTerminalIntNum(lastIndex, index.*, tokenType, state, TokenType.Numeral, TokenizerState.Zero, tokenNumeral, ch, location), + '-' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Minus, TokenizerState.Minus, region), + ',' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Comma, TokenizerState.Comma, region), + '=' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Equals, TokenizerState.Equals, region), + '(' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.RoundOpen, TokenizerState.RoundOpen, region), + ')' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.RoundClosed, TokenizerState.RoundClosed, region), + '.' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Dot, TokenizerState.Dot, region), + ':' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Colon, TokenizerState.Colon, region), + '{' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.CurlyOpen, TokenizerState.CurlyOpen, region), + '}' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.CurlyClosed, TokenizerState.CurlyClosed, region), + '[' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.SquareOpen, TokenizerState.SquareOpen, region), + ']' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.SquareClosed, TokenizerState.SquareClosed, region), + '+' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Plus, TokenizerState.Plus, region), + '~' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Tilde, TokenizerState.Tilde, region), + '>' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Gt, TokenizerState.Gt, region), + '<' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Lt, TokenizerState.Lt, region), + '#' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Hash, TokenizerState.Hash, region), + '|' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Pipe, TokenizerState.Pipe, region), + '&' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Ampersand, TokenizerState.Ampersand, region), + '%' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Percent, TokenizerState.Percent, region), + '*' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Star, TokenizerState.Star, region), + '/' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Slash, TokenizerState.Slash, region), + ';' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Semicolon, TokenizerState.Semicolon, region), + '^' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Caret, TokenizerState.Caret, region), + 'a' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.A, tokenStr, ch, region), + 'b' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.B, tokenStr, ch, region), + 'd' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.D, tokenStr, ch, region), + 'e' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.E, tokenStr, ch, region), + 'f' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.F, tokenStr, ch, region), + 'i' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.I, tokenStr, ch, region), + 'g' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.G, tokenStr, ch, region), + 'l' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.L, tokenStr, ch, region), + 'n' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.N, tokenStr, ch, region), + 'o' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.O, tokenStr, ch, region), + 'r' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.R, tokenStr, ch, region), + 't' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.T, tokenStr, ch, region), + 'u' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.U, tokenStr, ch, region), + 'w' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.W, tokenStr, ch, region), + '0' => try tokenizeTerminalIntNum(lastIndex, index.*, tokenType, state, TokenType.Numeral, TokenizerState.Zero, tokenNumeral, ch, region), '"' => { tokenType.* = null; @@ -230,11 +221,11 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz } else if(std.ascii.isAlphabetic(ch) or ch == '_') { - try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.Name, tokenStr, ch, location); + try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.Name, tokenStr, ch, region); } else if(std.ascii.isDigit(ch)) { - try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Numeral, TokenizerState.Name, tokenStr, ch, location); + try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Numeral, TokenizerState.Name, tokenStr, ch, region); } else { @@ -249,7 +240,7 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz switch(ch) { '\\' => state.* = TokenizerState.QuoteBackslash, - '"' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.StringLiteral, TokenizerState.String, location), + '"' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.StringLiteral, TokenizerState.String, region), else => try tokenStr.*.append(ch), } }, @@ -319,7 +310,7 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz switch(ch) { '\\' => state.* = TokenizerState.QuoteBackslash, - '"' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.StringLiteral, TokenizerState.String, location), + '"' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.StringLiteral, TokenizerState.String, region), else => { if(!std.ascii.isWhitespace(ch)) @@ -340,7 +331,7 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz switch(ch) { '\\' => state.* = TokenizerState.SingleQuoteBackslash, - '\'' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.StringLiteral, TokenizerState.String, location), + '\'' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.StringLiteral, TokenizerState.String, region), else => try tokenStr.append(ch), } }, @@ -410,7 +401,7 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz switch(ch) { '\\' => state.* = TokenizerState.SingleQuoteBackslash, - '\'' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.StringLiteral, TokenizerState.String, location), + '\'' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.StringLiteral, TokenizerState.String, region), else => { if(!std.ascii.isWhitespace(ch)) @@ -426,8 +417,8 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz } } }, - TokenizerState.String => try tokenizeBacktrackCustomToken(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, TokenType.StringLiteral, allocator, location), - TokenizerState.Name => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + TokenizerState.String => try tokenizeBacktrackCustomToken(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, TokenType.StringLiteral, allocator, region), + TokenizerState.Name => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), TokenizerState.Zero => { switch(ch) @@ -457,7 +448,7 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz } else { - try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location); + try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, region); } } } @@ -479,7 +470,7 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz } else { - try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location); + try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, region); } }, TokenizerState.HexNumber => @@ -509,7 +500,7 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz } else { - try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location); + try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, region); } } } @@ -543,7 +534,7 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz } else { - try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location); + try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, region); } } @@ -556,79 +547,79 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz TokenizerState.SquareClosed, TokenizerState.Pipe, TokenizerState.Ampersand, TokenizerState.Percent, TokenizerState.Star, TokenizerState.Semicolon, TokenizerState.Caret, TokenizerState.DotDotDot, TokenizerState.GtGt, - TokenizerState.LtLt, TokenizerState.SlashSlash => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location), + TokenizerState.LtLt, TokenizerState.SlashSlash => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, region), TokenizerState.Tilde => { switch(ch) { - '=' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.TildeEquals, TokenizerState.TildeEquals, location), - else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location), + '=' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.TildeEquals, TokenizerState.TildeEquals, region), + else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, region), } }, TokenizerState.Gt => { switch (ch) { - '>' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.GtGt, TokenizerState.GtGt, location), - '=' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.GtEquals, TokenizerState.GtEquals, location), - else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location), + '>' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.GtGt, TokenizerState.GtGt, region), + '=' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.GtEquals, TokenizerState.GtEquals, region), + else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, region), } }, TokenizerState.Lt => { switch(ch) { - '<' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.LtLt, TokenizerState.LtLt, location), - '=' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.LtEquals, TokenizerState.LtEquals, location), - else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location), + '<' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.LtLt, TokenizerState.LtLt, region), + '=' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.LtEquals, TokenizerState.LtEquals, region), + else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, region), } }, TokenizerState.Slash => { switch(ch) { - '/' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.SlashSlash, TokenizerState.SlashSlash, location), - else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location), + '/' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.SlashSlash, TokenizerState.SlashSlash, region), + else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, region), } }, TokenizerState.Dot => { switch(ch) { - '.' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.DotDot, TokenizerState.DotDot, location), - else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location), + '.' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.DotDot, TokenizerState.DotDot, region), + else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, region), } }, TokenizerState.DotDot => { switch(ch) { - '.' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.DotDotDot, TokenizerState.DotDotDot, location), - else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location), + '.' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.DotDotDot, TokenizerState.DotDotDot, region), + else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, region), } }, TokenizerState.Colon => { switch(ch) { - ':' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.ColonColon, TokenizerState.ColonColon, location), - else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location), + ':' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.ColonColon, TokenizerState.ColonColon, region), + else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, region), } }, TokenizerState.Equals => { switch(ch) { - '=' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.EqualsEquals, TokenizerState.EqualsEquals, location), - else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location), + '=' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.EqualsEquals, TokenizerState.EqualsEquals, region), + else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, region), } }, TokenizerState.Minus => { switch(ch) { - '-' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, null, TokenizerState.SmallCommentStart, location), - else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location), + '-' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, null, TokenizerState.SmallCommentStart, region), + else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, region), } }, TokenizerState.SmallCommentStart => @@ -716,486 +707,486 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz { switch(ch) { - 'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.An, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.An, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.An => { switch(ch) { - 'd' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.And, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'd' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.And, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, - TokenizerState.And => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.And, allocator, location), + TokenizerState.And => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.And, allocator, region), TokenizerState.W => { switch(ch) { - 'h' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Wh, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'h' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Wh, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Wh => { switch(ch) { - 'i' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Whi, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'i' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Whi, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Whi => { switch(ch) { - 'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Whil, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Whil, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Whil => { switch(ch) { - 'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.While, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.While, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, - TokenizerState.While => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.While, allocator, location), + TokenizerState.While => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.While, allocator, region), TokenizerState.B => { switch(ch) { - 'r' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Br, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'r' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Br, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Br => { switch(ch) { - 'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Bre, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Bre, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Bre => { switch(ch) { - 'a' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Brea, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'a' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Brea, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Brea => { switch(ch) { - 'k' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Break, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'k' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Break, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, - TokenizerState.Break => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Break, allocator, location), + TokenizerState.Break => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Break, allocator, region), TokenizerState.G => { switch(ch) { - 'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Go, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Go, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Go => { switch(ch) { - 't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Got, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Got, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Got => { switch(ch) { - 'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Goto, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Goto, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, - TokenizerState.Goto => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Goto, allocator, location), + TokenizerState.Goto => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Goto, allocator, region), TokenizerState.R => { switch(ch) { - 'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Re, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Re, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Re => { switch(ch) { - 't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Ret, tokenStr, ch, location), - 'p' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Rep, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Ret, tokenStr, ch, region), + 'p' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Rep, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Ret => { switch(ch) { - 'u' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Retu, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'u' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Retu, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Retu => { switch(ch) { - 'r' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Retur, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'r' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Retur, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Retur => { switch(ch) { - 'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Return, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Return, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, - TokenizerState.Return => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Return, allocator, location), + TokenizerState.Return => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Return, allocator, region), TokenizerState.Rep => { switch(ch) { - 'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Repe, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Repe, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Repe => { switch(ch) { - 'a' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Repea, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'a' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Repea, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Repea => { switch(ch) { - 't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Repeat, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Repeat, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, - TokenizerState.Repeat => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Repeat, allocator, location), + TokenizerState.Repeat => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Repeat, allocator, region), TokenizerState.N => { switch(ch) { - 'i' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Ni, tokenStr, ch, location), - 'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.No, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'i' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Ni, tokenStr, ch, region), + 'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.No, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.No => { switch(ch) { - 't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Not, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Not, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, - TokenizerState.Not => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Not, allocator, location), + TokenizerState.Not => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Not, allocator, region), TokenizerState.Ni => { switch(ch) { - 'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Nil, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Nil, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, - TokenizerState.Nil => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Nil, allocator, location), + TokenizerState.Nil => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Nil, allocator, region), TokenizerState.T => { switch(ch) { - 'h' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Th, tokenStr, ch, location), - 'r' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Tr, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'h' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Th, tokenStr, ch, region), + 'r' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Tr, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Th => { switch(ch) { - 'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.The, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.The, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.The => { switch(ch) { - 'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Then, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Then, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, - TokenizerState.Then => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Then, allocator, location), + TokenizerState.Then => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Then, allocator, region), TokenizerState.Tr => { switch(ch) { - 'u' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Tru, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'u' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Tru, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Tru => { switch(ch) { - 'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.True, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.True, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, - TokenizerState.True => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.True, allocator, location), + TokenizerState.True => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.True, allocator, region), TokenizerState.E => { switch(ch) { - 'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.El, tokenStr, ch, location), - 'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.En, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.El, tokenStr, ch, region), + 'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.En, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.En => { switch(ch) { - 'd' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.End, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'd' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.End, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, - TokenizerState.End => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.End, allocator, location), + TokenizerState.End => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.End, allocator, region), TokenizerState.El => { switch(ch) { - 's' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Els, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 's' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Els, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Els => { switch(ch) { - 'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Else, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Else, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Else => { switch(ch) { - 'i' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Elsei, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Else, allocator, location), + 'i' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Elsei, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Else, allocator, region), } }, TokenizerState.Elsei => { switch(ch) { - 'f' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Elseif, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'f' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Elseif, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, - TokenizerState.Elseif => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Elseif, allocator, location), + TokenizerState.Elseif => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Elseif, allocator, region), TokenizerState.O => { switch(ch) { - 'r' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Or, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'r' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Or, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, - TokenizerState.Or => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Or, allocator, location), + TokenizerState.Or => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Or, allocator, region), TokenizerState.D => { switch(ch) { - 'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Do, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Do, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, - TokenizerState.Do => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Do, allocator, location), + TokenizerState.Do => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Do, allocator, region), TokenizerState.I => { switch(ch) { - 'f' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.If, tokenStr, ch, location), - 'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.In, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'f' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.If, tokenStr, ch, region), + 'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.In, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, - TokenizerState.In => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.In, allocator, location), - TokenizerState.If => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.If, allocator, location), + TokenizerState.In => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.In, allocator, region), + TokenizerState.If => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.If, allocator, region), TokenizerState.F => { switch(ch) { - 'a' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fa, tokenStr, ch, location), - 'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fo, tokenStr, ch, location), - 'u' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fu, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'a' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fa, tokenStr, ch, region), + 'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fo, tokenStr, ch, region), + 'u' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fu, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Fu => { switch(ch) { - 'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fun, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fun, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Fun => { switch(ch) { - 'c' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Func, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'c' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Func, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Func => { switch(ch) { - 't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Funct, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Funct, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Funct => { switch(ch) { - 'i' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Functi, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'i' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Functi, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Functi => { switch(ch) { - 'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Functio, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Functio, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Functio => { switch(ch) { - 'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Function, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Function, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, - TokenizerState.Function => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Function, allocator, location), + TokenizerState.Function => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Function, allocator, region), TokenizerState.Fa => { switch(ch) { - 'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fal, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fal, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Fal => { switch(ch) { - 's' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fals, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 's' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fals, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Fals => { switch(ch) { - 'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.False, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.False, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, - TokenizerState.False => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.False, allocator, location), + TokenizerState.False => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.False, allocator, region), TokenizerState.Fo => { switch(ch) { - 'r' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.For, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'r' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.For, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, - TokenizerState.For => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.For, allocator, location), + TokenizerState.For => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.For, allocator, region), TokenizerState.L => { switch(ch) { - 'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Lo, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Lo, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Lo => { switch(ch) { - 'c' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Loc, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'c' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Loc, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Loc => { switch(ch) { - 'a' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Loca, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'a' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Loca, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Loca => { switch(ch) { - 'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Local, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Local, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, - TokenizerState.Local => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Local, allocator, location), + TokenizerState.Local => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Local, allocator, region), TokenizerState.U => { switch(ch) { - 'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Un, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Un, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Un => { switch(ch) { - 't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Unt, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Unt, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Unt => { switch(ch) { - 'i' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Unti, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'i' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Unti, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, TokenizerState.Unti => { switch(ch) { - 'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Until, tokenStr, ch, location), - else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), + 'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Until, tokenStr, ch, region), + else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region), } }, - TokenizerState.Until => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Until, allocator, location), + TokenizerState.Until => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Until, allocator, region), else => { std.debug.print("{}\n", . {state.*}); @@ -1215,24 +1206,24 @@ pub fn tokenize(fileContent: []u8, allocator: std.mem.Allocator) ![]Token defer tokenStr.deinit(); var tokenNumeral: ?types.Numeral = null; var longBracketLevel: u32 = 0; - var location = Location { .start = null, .length = 0 }; + var region = CodeRegion { .start = null, .length = 0 }; while(index < fileContent.len) { const ch = fileContent[index]; - try tokenizeChar(&state, ch, &lastIndex, &index, &tokenType, &tokenStr, &tokenNumeral, &tokens, &longBracketLevel, &location, allocator); - if(location.start != null and location.start.?.col == 0 and location.start.?.line == 0) + try tokenizeChar(&state, ch, &lastIndex, &index, &tokenType, &tokenStr, &tokenNumeral, &tokens, &longBracketLevel, ®ion, allocator); + if(region.start != null and region.start.?.col == 0 and region.start.?.line == 0) { - location.start = calculatePoint(fileContent, index); + region.start = calculatePoint(fileContent, index); } index += 1; } return tokens.toOwnedSlice(); } -fn calculatePoint(fileContent: []u8, index: usize) Point +fn calculatePoint(fileContent: []u8, index: usize) CodeLocation { - var ret = Point { .col = 1, .line = 1 }; + var ret = CodeLocation { .col = 1, .line = 1 }; for(0..index) |i| { ret.col += 1; diff --git a/src/treewalker.zig b/src/treewalker.zig new file mode 100644 index 0000000..6312081 --- /dev/null +++ b/src/treewalker.zig @@ -0,0 +1,111 @@ +const std = @import("std"); +const parser = @import("parser.zig"); +const types = @import("types.zig"); + +pub fn interpret(root: parser.ChunkNode, allocator: std.mem.Allocator) !void +{ + var _ENV = types.Table { .items = std.AutoArrayHashMap(types.Value, types.Value).init(allocator) }; + try walkChunk(root, &_ENV, allocator); +} + +fn walkChunk(node: parser.ChunkNode, environment: *types.Table, allocator: std.mem.Allocator) !void +{ + try walkBlock(node.block, environment, allocator); +} + +fn walkBlock(node: parser.BlockNode, environment: *types.Table, allocator: std.mem.Allocator) !void +{ + for(node.stats.items) |stat| + { + try walkStat(stat, environment, allocator); + } + if(node.retstat != null) + { + try walkRetstat(node.retstat.?, environment, allocator); + } +} + +fn walkStat(node: parser.StatNode, environment: *types.Table, allocator: std.mem.Allocator) !void +{ + switch(node) + { + .Assignment => |assignmentNode| + { + return try walkAssignmentNode(assignmentNode, environment, allocator); + }, + else => + { + std.debug.print("{any}\n", .{node}); + return error.NotImplemented; + } + } +} + +fn walkRetstat(node: parser.RetstatNode, environment: *types.Table, allocator: std.mem.Allocator) !void +{ + _ = node; + _ = environment; + _ = allocator; + return error.NotImplemented; +} + +fn walkAssignmentNode(node: parser.AssignmentNode, environment: *types.Table, allocator: std.mem.Allocator) !void +{ + const results = try walkExplist(node.rhs, environment, allocator); + var i: usize = 0; + _ = results; + _ = i; + for(node.lhs.vars.items) |variable| + { + switch(variable) + { + .Indexed => |indexedNode| + { + _ = indexedNode; + return error.NotImplemented; + }, + else => return error.NotImplemented, + } + } +} + +fn walkExplist(node: parser.ExplistNode, environment: *types.Table, allocator: std.mem.Allocator) ![]types.Value +{ + var results = std.ArrayList(types.Value).init(allocator); + for(node.exps.items) |exp| + { + try results.append(try walkExp(exp, environment, allocator)); + } + return results.toOwnedSlice(); +} + +fn walkExp(node: parser.ExpNode, environment: *types.Table, allocator: std.mem.Allocator, isVariadicFunction: bool) !types.Value +{ + switch(node) + { + .Nil => return types.Value.Nil, + .False => return types.Value { .Bool = false }, + .True => return types.Value { .Bool = true }, + .Numeral => |numeral| return types.Value { .Numeral = numeral }, + .LiteralString => |string| return types.Value { .String = string }, + .Varargs => + { + if(isVariadicFunction) + { + return error.NotImplemented; + } + else + { + return error.UseVarargsOutsideVariadicFunction; + } + }, + else => + { + std.debug.print("{}\n", .{node}); + return error.NotImplemented; + } + } + _ = environment; + _ = allocator; + return error.NotImplemented; +} diff --git a/src/types.zig b/src/types.zig index 411c6ca..2f26155 100644 --- a/src/types.zig +++ b/src/types.zig @@ -1,5 +1,41 @@ +const std = @import("std"); + pub const Numeral = union(enum) { Integer: i64, Float: f64, }; +pub const Table = struct +{ + items: std.AutoArrayHashMap(Value, Value), + + pub fn insert(self: *const Table, key: Value, value: Value) !bool + { + self.items.put(key, value); + } + + pub fn get(self: *const Table, key: Value) !Value + { + const value = self.items.get(key); + return if(value == null) Value.Nil else value.?; + } +}; +pub const Value = union(enum) +{ + Nil, + Bool: bool, + Numeral: Numeral, + String: []u8, + Table: *Table, +}; + +pub const CodeRegion = struct +{ + start: ?CodeLocation, + length: usize, +}; +pub const CodeLocation = struct +{ + line: usize, + col: usize, +};