Implement ast dumping and start treewalker

This commit is contained in:
0x4261756D 2023-10-08 21:40:44 +02:00
parent b00a99ab6a
commit cdfa8d3f90
6 changed files with 641 additions and 345 deletions

View File

@ -33,6 +33,9 @@ pub fn build(b: *std.Build) void {
exe.addModule("parser", b.addModule("parser", .{ exe.addModule("parser", b.addModule("parser", .{
.source_file = .{ .path = "src/parser.zig" }, .source_file = .{ .path = "src/parser.zig" },
})); }));
exe.addModule("treewalker", b.addModule("treewalker", .{
.source_file = .{ .path = "src/treewalker.zig" },
}));
// This declares intent for the executable to be installed into the // This declares intent for the executable to be installed into the
// standard location when the user invokes the "install" step (the default // standard location when the user invokes the "install" step (the default

View File

@ -1,6 +1,7 @@
const std = @import("std"); const std = @import("std");
const tokenize = @import("tokenizer.zig").tokenize; const tokenize = @import("tokenizer.zig").tokenize;
const parse = @import("parser.zig").parse; const parse = @import("parser.zig").parse;
const treewalk = @import("treewalker.zig").interpret;
pub fn main() !void pub fn main() !void
{ {
@ -35,5 +36,5 @@ pub fn main() !void
var parserAllocator = std.heap.ArenaAllocator.init(std.heap.page_allocator); var parserAllocator = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer parserAllocator.deinit(); defer parserAllocator.deinit();
const root = try parse(tokens, &parserAllocator); const root = try parse(tokens, &parserAllocator);
root.dump(0); try treewalk(root, allocator);
} }

View File

@ -2,14 +2,17 @@ const Token = @import("tokenizer.zig").Token;
const TokenType = @import("tokenizer.zig").TokenType; const TokenType = @import("tokenizer.zig").TokenType;
const std = @import("std"); const std = @import("std");
const types = @import("types.zig"); const types = @import("types.zig");
const CodeRegion = @import("types.zig").CodeRegion;
pub const ChunkNode = struct pub const ChunkNode = struct
{ {
block: BlockNode, block: BlockNode,
startRegion: CodeRegion,
endRegion: CodeRegion,
pub fn dump(self: *const ChunkNode, indent: usize) void pub fn dump(self: *const ChunkNode, indent: usize) void
{ {
std.debug.print("ChunkNode:\n", .{}); std.debug.print("ChunkNode ({} - {}):\n", .{self.startRegion, self.endRegion});
for (0..indent) |_| for (0..indent) |_|
{ {
std.debug.print("\t", .{}); std.debug.print("\t", .{});
@ -33,6 +36,8 @@ pub const BlockNode = struct
{ {
stats: std.ArrayList(StatNode), stats: std.ArrayList(StatNode),
retstat: ?RetstatNode, retstat: ?RetstatNode,
startRegion: CodeRegion,
endRegion: CodeRegion,
fn dump(self: *const BlockNode, indent: usize) void fn dump(self: *const BlockNode, indent: usize) void
{ {
@ -85,9 +90,12 @@ pub const BlockNode = struct
std.debug.print("}}\n", .{}); std.debug.print("}}\n", .{});
} }
}; };
const RetstatNode = struct pub const RetstatNode = struct
{ {
values: ?ExplistNode, values: ?ExplistNode,
startRegion: CodeRegion,
endRegion: CodeRegion,
fn dump(self: *const RetstatNode, indent: usize) void fn dump(self: *const RetstatNode, indent: usize) void
{ {
std.debug.print("Retstat Node:\n", .{}); std.debug.print("Retstat Node:\n", .{});
@ -117,7 +125,7 @@ const RetstatNode = struct
} }
}; };
const StatNode = union(enum) pub const StatNode = union(enum)
{ {
Semicolon, Semicolon,
Assignment: AssignmentNode, Assignment: AssignmentNode,
@ -157,9 +165,12 @@ fn dumpStatNode(stat: StatNode, indent: usize) void
} }
} }
const AssignmentNode = struct pub const AssignmentNode = struct
{ {
lhs: VarlistNode, rhs: ExplistNode, lhs: VarlistNode, rhs: ExplistNode,
startRegion: CodeRegion,
endRegion: CodeRegion,
fn dump(self: *const AssignmentNode, indent: usize) void fn dump(self: *const AssignmentNode, indent: usize) void
{ {
std.debug.print("Assignment Node:\n", .{}); std.debug.print("Assignment Node:\n", .{});
@ -187,10 +198,13 @@ const AssignmentNode = struct
std.debug.print("}}\n", .{}); std.debug.print("}}\n", .{});
} }
}; };
const WhileNode = struct pub const WhileNode = struct
{ {
condition: ExpNode, condition: ExpNode,
body: BlockNode, body: BlockNode,
startRegion: CodeRegion,
endRegion: CodeRegion,
fn dump(self: *const WhileNode, indent: usize) void fn dump(self: *const WhileNode, indent: usize) void
{ {
std.debug.print("While Node:\n", .{}); std.debug.print("While Node:\n", .{});
@ -218,10 +232,13 @@ const WhileNode = struct
std.debug.print("}}\n", .{}); std.debug.print("}}\n", .{});
} }
}; };
const RepeatNode = struct pub const RepeatNode = struct
{ {
condition: ExpNode, condition: ExpNode,
body: BlockNode, body: BlockNode,
startRegion: CodeRegion,
endRegion: CodeRegion,
fn dump(self: *const RepeatNode, indent: usize) void fn dump(self: *const RepeatNode, indent: usize) void
{ {
std.debug.print("Repeat Node:\n", .{}); std.debug.print("Repeat Node:\n", .{});
@ -249,12 +266,15 @@ const RepeatNode = struct
std.debug.print("}}\n", .{}); std.debug.print("}}\n", .{});
} }
}; };
const IfNode = struct pub const IfNode = struct
{ {
condition: ExpNode, condition: ExpNode,
body: BlockNode, body: BlockNode,
elseifs: std.ArrayList(ElseifNode), elseifs: std.ArrayList(ElseifNode),
else_: ?BlockNode, else_: ?BlockNode,
startRegion: CodeRegion,
endRegion: CodeRegion,
fn dump(self: *const IfNode, indent: usize) void fn dump(self: *const IfNode, indent: usize) void
{ {
std.debug.print("If Node:\n", .{}); std.debug.print("If Node:\n", .{});
@ -318,13 +338,16 @@ const IfNode = struct
std.debug.print("}}\n", .{}); std.debug.print("}}\n", .{});
} }
}; };
const ForNumericalNode = struct pub const ForNumericalNode = struct
{ {
variable: []u8, variable: []u8,
start: ExpNode, start: ExpNode,
end: ExpNode, end: ExpNode,
change: ?ExpNode, change: ?ExpNode,
body: BlockNode, body: BlockNode,
startRegion: CodeRegion,
endRegion: CodeRegion,
fn dump(self: *const ForNumericalNode, indent: usize) void fn dump(self: *const ForNumericalNode, indent: usize) void
{ {
std.debug.print("For Numerical Node:\n", .{}); std.debug.print("For Numerical Node:\n", .{});
@ -376,11 +399,14 @@ const ForNumericalNode = struct
std.debug.print("}}\n", .{}); std.debug.print("}}\n", .{});
} }
}; };
const ForGenericNode = struct pub const ForGenericNode = struct
{ {
vars: std.ArrayList([]u8), vars: std.ArrayList([]u8),
exps: ExplistNode, exps: ExplistNode,
body: BlockNode, body: BlockNode,
startRegion: CodeRegion,
endRegion: CodeRegion,
fn dump(self: *const ForGenericNode, indent: usize) void fn dump(self: *const ForGenericNode, indent: usize) void
{ {
std.debug.print("For Generic Node:\n", .{}); std.debug.print("For Generic Node:\n", .{});
@ -427,10 +453,13 @@ const ForGenericNode = struct
std.debug.print("}}\n", .{}); std.debug.print("}}\n", .{});
} }
}; };
const FunctionNode = struct pub const FunctionNode = struct
{ {
name: FuncnameNode, name: FuncnameNode,
body: FuncbodyNode, body: FuncbodyNode,
startRegion: CodeRegion,
endRegion: CodeRegion,
fn dump(self: *const FunctionNode, indent: usize) void fn dump(self: *const FunctionNode, indent: usize) void
{ {
std.debug.print("Function Node:\n", .{}); std.debug.print("Function Node:\n", .{});
@ -458,10 +487,13 @@ const FunctionNode = struct
std.debug.print("}}\n", .{}); std.debug.print("}}\n", .{});
} }
}; };
const LocalFunctionNode = struct pub const LocalFunctionNode = struct
{ {
name: []u8, name: []u8,
body: FuncbodyNode, body: FuncbodyNode,
startRegion: CodeRegion,
endRegion: CodeRegion,
fn dump(self: *const LocalFunctionNode, indent: usize) void fn dump(self: *const LocalFunctionNode, indent: usize) void
{ {
std.debug.print("Local Function Node:\n", .{}); std.debug.print("Local Function Node:\n", .{});
@ -489,10 +521,13 @@ const LocalFunctionNode = struct
} }
}; };
const LocalNode = struct pub const LocalNode = struct
{ {
attnames: AttnamelistNode, attnames: AttnamelistNode,
values: ?ExplistNode, values: ?ExplistNode,
startRegion: CodeRegion,
endRegion: CodeRegion,
fn dump(self: *const LocalNode, indent: usize) void fn dump(self: *const LocalNode, indent: usize) void
{ {
std.debug.print("Local Node:\n", .{}); std.debug.print("Local Node:\n", .{});
@ -527,11 +562,14 @@ const LocalNode = struct
std.debug.print("}}\n", .{}); std.debug.print("}}\n", .{});
} }
}; };
const FunctioncallNode = struct pub const FunctioncallNode = struct
{ {
function: SuffixexpNode, function: SuffixexpNode,
objectArg: ?[]u8, objectArg: ?[]u8,
args: ArgsNode, args: ArgsNode,
startRegion: CodeRegion,
endRegion: CodeRegion,
fn dump(self: *const FunctioncallNode, indent: usize) void fn dump(self: *const FunctioncallNode, indent: usize) void
{ {
std.debug.print("Functioncall Node:\n", .{}); std.debug.print("Functioncall Node:\n", .{});
@ -572,9 +610,12 @@ const FunctioncallNode = struct
std.debug.print("}}\n", .{}); std.debug.print("}}\n", .{});
} }
}; };
const VarlistNode = struct pub const VarlistNode = struct
{ {
vars: std.ArrayList(VarNode), vars: std.ArrayList(VarNode),
startRegion: CodeRegion,
endRegion: CodeRegion,
fn dump(self: *const VarlistNode, indent: usize) void fn dump(self: *const VarlistNode, indent: usize) void
{ {
std.debug.print("Varlist Node:\n", .{}); std.debug.print("Varlist Node:\n", .{});
@ -613,9 +654,12 @@ const VarlistNode = struct
std.debug.print("}}\n", .{}); std.debug.print("}}\n", .{});
} }
}; };
const ExplistNode = struct pub const ExplistNode = struct
{ {
exps: std.ArrayList(ExpNode), exps: std.ArrayList(ExpNode),
startRegion: CodeRegion,
endRegion: CodeRegion,
fn dump(self: *const ExplistNode, indent: usize) void fn dump(self: *const ExplistNode, indent: usize) void
{ {
std.debug.print("Explist Node:\n", .{}); std.debug.print("Explist Node:\n", .{});
@ -654,7 +698,7 @@ const ExplistNode = struct
std.debug.print("}}\n", .{}); std.debug.print("}}\n", .{});
} }
}; };
const ExpNode = union(enum) pub const ExpNode = union(enum)
{ {
Nil, Nil,
False, False,
@ -685,10 +729,13 @@ fn dumpExpNode(expNode: ExpNode, indent: usize) void
.Binop => |*node| node.*.dump(indent), .Binop => |*node| node.*.dump(indent),
} }
} }
const UnopNode = struct pub const UnopNode = struct
{ {
unopType: UnopType, unopType: UnopType,
exp: *ExpNode, exp: *ExpNode,
startRegion: CodeRegion,
endRegion: CodeRegion,
fn dump(self: *const UnopNode, indent: usize) void fn dump(self: *const UnopNode, indent: usize) void
{ {
std.debug.print("Unop Node:\n", .{}); std.debug.print("Unop Node:\n", .{});
@ -715,11 +762,14 @@ const UnopNode = struct
std.debug.print("}}\n", .{}); std.debug.print("}}\n", .{});
} }
}; };
const BinopNode = struct pub const BinopNode = struct
{ {
lhs: ExpNode, lhs: ExpNode,
op: BinopType, op: BinopType,
rhs: ExpNode, rhs: ExpNode,
startRegion: CodeRegion,
endRegion: CodeRegion,
fn dump(self: *const BinopNode, indent: usize) void fn dump(self: *const BinopNode, indent: usize) void
{ {
std.debug.print("Binop Node:\n", .{}); std.debug.print("Binop Node:\n", .{});
@ -752,10 +802,13 @@ const BinopNode = struct
std.debug.print("}}\n", .{}); std.debug.print("}}\n", .{});
} }
}; };
const ElseifNode = struct pub const ElseifNode = struct
{ {
condition: ExpNode, condition: ExpNode,
body: BlockNode, body: BlockNode,
startRegion: CodeRegion,
endRegion: CodeRegion,
fn dump(self: *const ElseifNode, indent: usize) void fn dump(self: *const ElseifNode, indent: usize) void
{ {
std.debug.print("Elseif Node:\n", .{}); std.debug.print("Elseif Node:\n", .{});
@ -783,11 +836,14 @@ const ElseifNode = struct
std.debug.print("}}\n", .{}); std.debug.print("}}\n", .{});
} }
}; };
const FuncnameNode = struct pub const FuncnameNode = struct
{ {
name: []u8, name: []u8,
dottedNames: std.ArrayList([]u8), dottedNames: std.ArrayList([]u8),
firstArg: ?[]u8, firstArg: ?[]u8,
startRegion: CodeRegion,
endRegion: CodeRegion,
fn dump(self: *const FuncnameNode, indent: usize) void fn dump(self: *const FuncnameNode, indent: usize) void
{ {
std.debug.print("Funcname Node:\n", .{}); std.debug.print("Funcname Node:\n", .{});
@ -845,10 +901,12 @@ const FuncnameNode = struct
} }
}; };
const FuncbodyNode = struct pub const FuncbodyNode = struct
{ {
pars: ?ParlistNode, pars: ?ParlistNode,
body: BlockNode, body: BlockNode,
startRegion: CodeRegion,
endRegion: CodeRegion,
fn dump(self: *const FuncbodyNode, indent: usize) void fn dump(self: *const FuncbodyNode, indent: usize) void
{ {
@ -883,9 +941,12 @@ const FuncbodyNode = struct
std.debug.print("}}\n", .{}); std.debug.print("}}\n", .{});
} }
}; };
const AttnamelistNode = struct pub const AttnamelistNode = struct
{ {
attnames: std.ArrayList(AttnameNode), attnames: std.ArrayList(AttnameNode),
startRegion: CodeRegion,
endRegion: CodeRegion,
fn dump(self: *const AttnamelistNode, indent: usize) void fn dump(self: *const AttnamelistNode, indent: usize) void
{ {
std.debug.print("Attnamelist Node:\n", .{}); std.debug.print("Attnamelist Node:\n", .{});
@ -924,10 +985,13 @@ const AttnamelistNode = struct
std.debug.print("}}\n", .{}); std.debug.print("}}\n", .{});
} }
}; };
const AttnameNode = struct pub const AttnameNode = struct
{ {
name: []u8, name: []u8,
attribute: ?[]u8, attribute: ?[]u8,
startRegion: CodeRegion,
endRegion: CodeRegion,
fn dump(self: *const AttnameNode, indent: usize) void fn dump(self: *const AttnameNode, indent: usize) void
{ {
std.debug.print("Funcname Node:\n", .{}); std.debug.print("Funcname Node:\n", .{});
@ -961,7 +1025,7 @@ const AttnameNode = struct
std.debug.print("}}\n", .{}); std.debug.print("}}\n", .{});
} }
}; };
const SuffixexpNode = union(enum) pub const SuffixexpNode = union(enum)
{ {
Normal: NormalSuffixNode, Normal: NormalSuffixNode,
Functioncall: *FunctioncallNode, Functioncall: *FunctioncallNode,
@ -974,7 +1038,7 @@ fn dumpSuffixExpNode(suffixexpNode: SuffixexpNode, indent: usize) void
.Functioncall => |*node| node.*.dump(indent), .Functioncall => |*node| node.*.dump(indent),
} }
} }
const ArgsNode = union(enum) pub const ArgsNode = union(enum)
{ {
Bracketed: ?ExplistNode, Bracketed: ?ExplistNode,
Tableconstructor: TableconstructorNode, Tableconstructor: TableconstructorNode,
@ -999,7 +1063,7 @@ fn dumpArgsNode(argsNode: ArgsNode, indent: usize) void
.Literal => |*string| std.debug.print("Literal: '{s}'\n", .{string}), .Literal => |*string| std.debug.print("Literal: '{s}'\n", .{string}),
} }
} }
const VarNode = union(enum) pub const VarNode = union(enum)
{ {
Name: []u8, Name: []u8,
Indexed: IndexedVarNode, Indexed: IndexedVarNode,
@ -1014,10 +1078,13 @@ fn dumpVarNode(varNode: VarNode, indent: usize) void
.Member => |*node| node.dump(indent), .Member => |*node| node.dump(indent),
} }
} }
const IndexedVarNode = struct pub const IndexedVarNode = struct
{ {
value: SuffixexpNode, value: SuffixexpNode,
index: ExpNode, index: ExpNode,
startRegion: CodeRegion,
endRegion: CodeRegion,
fn dump(self: *const IndexedVarNode, indent: usize) void fn dump(self: *const IndexedVarNode, indent: usize) void
{ {
std.debug.print("Indexed Var Node:\n", .{}); std.debug.print("Indexed Var Node:\n", .{});
@ -1045,10 +1112,13 @@ const IndexedVarNode = struct
std.debug.print("}}\n", .{}); std.debug.print("}}\n", .{});
} }
}; };
const MemberVarNode = struct pub const MemberVarNode = struct
{ {
value: SuffixexpNode, value: SuffixexpNode,
name: []u8, name: []u8,
startRegion: CodeRegion,
endRegion: CodeRegion,
fn dump(self: *const MemberVarNode, indent: usize) void fn dump(self: *const MemberVarNode, indent: usize) void
{ {
std.debug.print("Member Var Node:\n", .{}); std.debug.print("Member Var Node:\n", .{});
@ -1076,9 +1146,12 @@ const MemberVarNode = struct
} }
}; };
const TableconstructorNode = struct pub const TableconstructorNode = struct
{ {
exps: ?FieldlistNode, exps: ?FieldlistNode,
startRegion: CodeRegion,
endRegion: CodeRegion,
fn dump(self: *const TableconstructorNode, indent: usize) void fn dump(self: *const TableconstructorNode, indent: usize) void
{ {
std.debug.print("Tableconstructor Node:\n", .{}); std.debug.print("Tableconstructor Node:\n", .{});
@ -1107,11 +1180,11 @@ const TableconstructorNode = struct
std.debug.print("}}\n", .{}); std.debug.print("}}\n", .{});
} }
}; };
const UnopType = enum pub const UnopType = enum
{ {
Minus, LogicalNot, Length, BinaryNot, Minus, LogicalNot, Length, BinaryNot,
}; };
const BinopType = enum pub const BinopType = enum
{ {
LogicalOr, LogicalOr,
LocicalAnd, LocicalAnd,
@ -1125,10 +1198,13 @@ const BinopType = enum
Mul, Div, IntDiv, Mod, Mul, Div, IntDiv, Mod,
Exp, Exp,
}; };
const ParlistNode = struct pub const ParlistNode = struct
{ {
names: std.ArrayList([]u8), names: std.ArrayList([]u8),
hasVarargs: bool, hasVarargs: bool,
startRegion: CodeRegion,
endRegion: CodeRegion,
fn dump(self: *const ParlistNode, indent: usize) void fn dump(self: *const ParlistNode, indent: usize) void
{ {
std.debug.print("Parlist Node:\n", .{}); std.debug.print("Parlist Node:\n", .{});
@ -1172,10 +1248,13 @@ const ParlistNode = struct
std.debug.print("}}\n", .{}); std.debug.print("}}\n", .{});
} }
}; };
const NormalSuffixNode = struct pub const NormalSuffixNode = struct
{ {
firstPart: SuffixexpFirstPart, firstPart: SuffixexpFirstPart,
suffixes: std.ArrayList(SuffixexpSuffix), suffixes: std.ArrayList(SuffixexpSuffix),
startRegion: CodeRegion,
endRegion: CodeRegion,
fn dump(self: *const NormalSuffixNode, indent: usize) void fn dump(self: *const NormalSuffixNode, indent: usize) void
{ {
std.debug.print("Normal Suffix Node:\n", .{}); std.debug.print("Normal Suffix Node:\n", .{});
@ -1220,7 +1299,7 @@ const NormalSuffixNode = struct
std.debug.print("}}\n", .{}); std.debug.print("}}\n", .{});
} }
}; };
const SuffixexpFirstPart = union(enum) pub const SuffixexpFirstPart = union(enum)
{ {
Name: []u8, Name: []u8,
BracketedExpr: ExpNode, BracketedExpr: ExpNode,
@ -1233,7 +1312,7 @@ fn dumpSuffixExpFirstPart(suffixexpFirstPart: SuffixexpFirstPart, indent: usize)
.BracketedExpr => |*node| dumpExpNode(node.*, indent), .BracketedExpr => |*node| dumpExpNode(node.*, indent),
} }
} }
const SuffixexpSuffix = union(enum) pub const SuffixexpSuffix = union(enum)
{ {
Dot: []u8, Dot: []u8,
Indexed: ExpNode, Indexed: ExpNode,
@ -1251,10 +1330,13 @@ fn dumpSuffixSuffix(suffixexpSuffix: SuffixexpSuffix, indent: usize) void
} }
} }
const ArgsFirstArgNode = struct pub const ArgsFirstArgNode = struct
{ {
name: []u8, name: []u8,
rest: ArgsNode, rest: ArgsNode,
startRegion: CodeRegion,
endRegion: CodeRegion,
fn dump(self: *const ArgsFirstArgNode, indent: usize) void fn dump(self: *const ArgsFirstArgNode, indent: usize) void
{ {
std.debug.print("Args First Arg Node:\n", .{}); std.debug.print("Args First Arg Node:\n", .{});
@ -1282,9 +1364,12 @@ const ArgsFirstArgNode = struct
} }
}; };
const FieldlistNode = struct pub const FieldlistNode = struct
{ {
exps: std.ArrayList(FieldNode), exps: std.ArrayList(FieldNode),
startRegion: CodeRegion,
endRegion: CodeRegion,
fn dump(self: *const FieldlistNode, indent: usize) void fn dump(self: *const FieldlistNode, indent: usize) void
{ {
std.debug.print("Fieldlist Node:\n", .{}); std.debug.print("Fieldlist Node:\n", .{});
@ -1323,16 +1408,19 @@ const FieldlistNode = struct
std.debug.print("}}\n", .{}); std.debug.print("}}\n", .{});
} }
}; };
const FieldNode = union(enum) pub const FieldNode = union(enum)
{ {
IndexedAssignment: IndexedAssignmentNode, IndexedAssignment: IndexedAssignmentNode,
Assignment: FieldAssignmentNode, Assignment: FieldAssignmentNode,
Exp: ExpNode, Exp: ExpNode,
}; };
const FieldAssignmentNode = struct pub const FieldAssignmentNode = struct
{ {
lhs: []u8, lhs: []u8,
rhs: ExpNode, rhs: ExpNode,
startRegion: CodeRegion,
endRegion: CodeRegion,
fn dump(self: *const FieldAssignmentNode, indent: usize) void fn dump(self: *const FieldAssignmentNode, indent: usize) void
{ {
std.debug.print("Field Assignment Node:\n", .{}); std.debug.print("Field Assignment Node:\n", .{});
@ -1368,10 +1456,13 @@ fn dumpFieldNode(fieldNode: FieldNode, indent: usize) void
.Exp => |*node| dumpExpNode(node.*, indent), .Exp => |*node| dumpExpNode(node.*, indent),
} }
} }
const IndexedAssignmentNode = struct pub const IndexedAssignmentNode = struct
{ {
index: ExpNode, index: ExpNode,
rhs: ExpNode, rhs: ExpNode,
startRegion: CodeRegion,
endRegion: CodeRegion,
fn dump(self: *const IndexedAssignmentNode, indent: usize) void fn dump(self: *const IndexedAssignmentNode, indent: usize) void
{ {
std.debug.print("Indexed Assignment Node:\n", .{}); std.debug.print("Indexed Assignment Node:\n", .{});
@ -1405,7 +1496,7 @@ pub fn parse(tokens: []Token, allocator: *std.heap.ArenaAllocator) !ChunkNode
var i: usize = 0; var i: usize = 0;
const maybeParsedChunk = parseChunk(tokens, &i, allocator) catch |err| const maybeParsedChunk = parseChunk(tokens, &i, allocator) catch |err|
{ {
std.debug.print("{any}: data: {any}, type: {any}\n", .{tokens[i].location, tokens[i].tokenData, tokens[i].tokenType}); std.debug.print("{any}: data: {any}, type: {any}\n", .{tokens[i].region, tokens[i].tokenData, tokens[i].tokenType});
return err; return err;
}; };
return maybeParsedChunk; return maybeParsedChunk;
@ -1482,11 +1573,12 @@ const ParserError = error
fn parseChunk(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !ChunkNode fn parseChunk(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !ChunkNode
{ {
return ChunkNode { .block = try parseBlock(tokens, i, allocator) }; const block = try parseBlock(tokens, i, allocator);
return ChunkNode { .block = block, .startRegion = block.startRegion, .endRegion = block.endRegion };
} }
fn parseBlock(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) ParserError!BlockNode fn parseBlock(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) ParserError!BlockNode
{ {
var ret = BlockNode { .stats = std.ArrayList(StatNode).init(allocator.*.allocator()), .retstat = null }; var ret = BlockNode { .stats = std.ArrayList(StatNode).init(allocator.*.allocator()), .retstat = null, .startRegion = tokens[i.*].region, .endRegion = tokens[i.*].region };
while(i.* < tokens.len and while(i.* < tokens.len and
tokens[i.*].tokenType != TokenType.Return and tokens[i.*].tokenType != TokenType.Return and
tokens[i.*].tokenType != TokenType.End and tokens[i.*].tokenType != TokenType.End and
@ -1500,6 +1592,7 @@ fn parseBlock(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) P
{ {
ret.retstat = try parseRetstat(tokens, i, allocator); ret.retstat = try parseRetstat(tokens, i, allocator);
} }
ret.endRegion = if(i.* - 1 < tokens.len) tokens[i.* - 1].region else tokens[tokens.len - 1].region;
return ret; return ret;
} }
fn parseStat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !StatNode fn parseStat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !StatNode
@ -1529,7 +1622,9 @@ fn parseStat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !S
} }
if(tokens[i.*].tokenType == TokenType.Name) if(tokens[i.*].tokenType == TokenType.Name)
{ {
return StatNode { .Goto = tokens[i.*].tokenData.string }; const name = tokens[i.*].tokenData.string;
i.* += 1;
return StatNode { .Goto = name };
} }
return error.ExpectedNameForGoto; return error.ExpectedNameForGoto;
}, },
@ -1546,6 +1641,7 @@ fn parseStat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !S
}, },
TokenType.While => TokenType.While =>
{ {
const startRegion = tokens[i.*].region;
i.* += 1; i.* += 1;
const condition = try parseExp(tokens, i, allocator); const condition = try parseExp(tokens, i, allocator);
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Do) if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Do)
@ -1557,22 +1653,26 @@ fn parseStat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !S
{ {
return error.MissingEndForWhileBody; return error.MissingEndForWhileBody;
} }
const endRegion = tokens[i.*].region;
i.* += 1; i.* += 1;
return StatNode { .While = WhileNode { .body = body, .condition = condition } }; return StatNode { .While = WhileNode { .body = body, .condition = condition, .startRegion = startRegion, .endRegion = endRegion } };
}, },
TokenType.Repeat => TokenType.Repeat =>
{ {
const startRegion = tokens[i.*].region;
i.* += 1; i.* += 1;
const body = try parseBlock(tokens, i, allocator); const body = try parseBlock(tokens, i, allocator);
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Until) if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Until)
{ {
return error.ExpectedUntilAfterRepeatBody; return error.ExpectedUntilAfterRepeatBody;
} }
const endRegion = tokens[i.*].region;
i.* += 1; i.* += 1;
return StatNode { .Repeat = RepeatNode { .body = body, .condition = try parseExp(tokens, i, allocator) } }; return StatNode { .Repeat = RepeatNode { .body = body, .condition = try parseExp(tokens, i, allocator), .startRegion = startRegion, .endRegion = endRegion } };
}, },
TokenType.If => TokenType.If =>
{ {
const startRegion = tokens[i.*].region;
i.* += 1; i.* += 1;
const condition = try parseExp(tokens, i, allocator); const condition = try parseExp(tokens, i, allocator);
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Then) if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Then)
@ -1585,7 +1685,7 @@ fn parseStat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !S
{ {
return error.ReachedEOFAfterIfBody; return error.ReachedEOFAfterIfBody;
} }
var ifNode = IfNode { .body = body, .condition = condition, .elseifs = std.ArrayList(ElseifNode).init(allocator.*.allocator()), .else_ = null}; var ifNode = IfNode { .body = body, .condition = condition, .elseifs = std.ArrayList(ElseifNode).init(allocator.*.allocator()), .else_ = null, .startRegion = startRegion, .endRegion = startRegion };
while(tokens[i.*].tokenType == TokenType.Elseif) while(tokens[i.*].tokenType == TokenType.Elseif)
{ {
i.* += 1; i.* += 1;
@ -1594,8 +1694,9 @@ fn parseStat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !S
{ {
return error.ExpectedThenAfterElseifCondition; return error.ExpectedThenAfterElseifCondition;
} }
const endRegion = tokens[i.*].region;
i.* += 1; i.* += 1;
try ifNode.elseifs.append(ElseifNode { .body = try parseBlock(tokens, i, allocator), .condition = elseifCondition }); try ifNode.elseifs.append(ElseifNode { .body = try parseBlock(tokens, i, allocator), .condition = elseifCondition, .startRegion = startRegion, .endRegion = endRegion });
} }
if(i.* >= tokens.len) if(i.* >= tokens.len)
{ {
@ -1610,11 +1711,13 @@ fn parseStat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !S
{ {
return error.ExpectedEndClosingIf; return error.ExpectedEndClosingIf;
} }
ifNode.endRegion = tokens[i.*].region;
i.* += 1; i.* += 1;
return StatNode { .If = ifNode }; return StatNode { .If = ifNode };
}, },
TokenType.For => TokenType.For =>
{ {
const startRegion = tokens[i.*].region;
i.* += 1; i.* += 1;
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Name) if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Name)
{ {
@ -1654,8 +1757,9 @@ fn parseStat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !S
{ {
return error.ExpectedEndAfterForEqBody; return error.ExpectedEndAfterForEqBody;
} }
const endRegion = tokens[i.*].region;
i.* += 1; i.* += 1;
return StatNode { .ForNumerical = ForNumericalNode { .variable = variable, .start = start, .end = end, .change = change, .body = body } }; return StatNode { .ForNumerical = ForNumericalNode { .variable = variable, .start = start, .end = end, .change = change, .body = body, .startRegion = startRegion, .endRegion = endRegion } };
}, },
TokenType.Comma => TokenType.Comma =>
{ {
@ -1691,8 +1795,9 @@ fn parseStat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !S
{ {
return error.ExpectedEndAfterForInBody; return error.ExpectedEndAfterForInBody;
} }
const endRegion = tokens[i.*].region;
i.* += 1; i.* += 1;
return StatNode { .ForGeneric = ForGenericNode { .vars = names, .exps = exps, .body = body } }; return StatNode { .ForGeneric = ForGenericNode { .vars = names, .exps = exps, .body = body, .startRegion = startRegion, .endRegion = endRegion } };
}, },
TokenType.In => TokenType.In =>
{ {
@ -1708,22 +1813,26 @@ fn parseStat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !S
{ {
return error.ExpectedEndAfterForInBody; return error.ExpectedEndAfterForInBody;
} }
const endRegion = tokens[i.*].region;
i.* += 1; i.* += 1;
var names = try std.ArrayList([]u8).initCapacity(allocator.allocator(), 1); var names = try std.ArrayList([]u8).initCapacity(allocator.allocator(), 1);
try names.insert(0, variable); try names.insert(0, variable);
return StatNode { .ForGeneric = ForGenericNode { .vars = names, .exps = exps, .body = body } }; return StatNode { .ForGeneric = ForGenericNode { .vars = names, .exps = exps, .body = body, .startRegion = startRegion, .endRegion = endRegion } };
}, },
else => return error.UnexpectedTokenAfterFirstNameInFor, else => return error.UnexpectedTokenAfterFirstNameInFor,
} }
}, },
TokenType.Function => TokenType.Function =>
{ {
const startRegion = tokens[i.*].region;
i.* += 1; i.* += 1;
const name = try parseFuncname(tokens, i, allocator); const name = try parseFuncname(tokens, i, allocator);
return StatNode { .Function = FunctionNode { .name = name, .body = try parseFuncbody(tokens, i, allocator) } }; const body = try parseFuncbody(tokens, i, allocator);
return StatNode { .Function = FunctionNode { .name = name, .body = body, .startRegion = startRegion, .endRegion = body.endRegion } };
}, },
TokenType.Local => TokenType.Local =>
{ {
const startRegion = tokens[i.*].region;
i.* += 1; i.* += 1;
if(i.* >= tokens.len) if(i.* >= tokens.len)
{ {
@ -1737,16 +1846,18 @@ fn parseStat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !S
return error.ExpectedLocalFunctionName; return error.ExpectedLocalFunctionName;
} }
const name = tokens[i.*].tokenData.string; const name = tokens[i.*].tokenData.string;
const endRegion = tokens[i.*].region;
i.* += 1; i.* += 1;
return StatNode { .LocalFunction = LocalFunctionNode { .name = name, .body = try parseFuncbody(tokens, i, allocator) } }; return StatNode { .LocalFunction = LocalFunctionNode { .name = name, .body = try parseFuncbody(tokens, i, allocator), .startRegion = startRegion, .endRegion = endRegion } };
} }
else else
{ {
var ret = LocalNode { .attnames = try parseAttnamelist(tokens, i, allocator), .values = null }; var ret = LocalNode { .attnames = try parseAttnamelist(tokens, i, allocator), .values = null, .startRegion = startRegion, .endRegion = startRegion };
if(i.* < tokens.len and tokens[i.*].tokenType == TokenType.Equals) if(i.* < tokens.len and tokens[i.*].tokenType == TokenType.Equals)
{ {
i.* += 1; i.* += 1;
ret.values = try parseExplist(tokens, i, allocator); ret.values = try parseExplist(tokens, i, allocator);
ret.endRegion = ret.values.?.endRegion;
} }
return StatNode { .Local = ret }; return StatNode { .Local = ret };
} }
@ -1769,6 +1880,7 @@ fn parseStat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !S
}, },
TokenType.Name, TokenType.RoundOpen => TokenType.Name, TokenType.RoundOpen =>
{ {
const startRegion = tokens[i.*].region;
const suffixExp = try parseSuffixExp(tokens, i, allocator); const suffixExp = try parseSuffixExp(tokens, i, allocator);
if(i.* >= tokens.len) if(i.* >= tokens.len)
{ {
@ -1784,15 +1896,17 @@ fn parseStat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !S
{ {
TokenType.Equals => TokenType.Equals =>
{ {
const endRegion = tokens[i.*].region;
i.* += 1; i.* += 1;
var lhs = std.ArrayList(VarNode).init(allocator.allocator()); var lhs = std.ArrayList(VarNode).init(allocator.allocator());
try lhs.append(try suffixExpToVar(suffixExp)); try lhs.append(try suffixExpToVar(suffixExp, startRegion, endRegion));
return StatNode { .Assignment = AssignmentNode { .lhs = VarlistNode { .vars = lhs }, .rhs = try parseExplist(tokens, i, allocator) } }; const rhs = try parseExplist(tokens, i, allocator);
return StatNode { .Assignment = AssignmentNode { .lhs = VarlistNode { .vars = lhs, .startRegion = endRegion, .endRegion = tokens[@min(i.*, tokens.len) - 1].region }, .rhs = rhs, .startRegion = startRegion, .endRegion = rhs.endRegion } };
}, },
TokenType.Comma => TokenType.Comma =>
{ {
var varlistNode = VarlistNode { .vars = std.ArrayList(VarNode).init(allocator.allocator()) }; var varlistNode = VarlistNode { .vars = std.ArrayList(VarNode).init(allocator.allocator()), .startRegion = startRegion, .endRegion = startRegion };
try varlistNode.vars.append(try suffixExpToVar(suffixExp)); try varlistNode.vars.append(try suffixExpToVar(suffixExp, startRegion, tokens[@min(i.*, tokens.len) - 1].region));
while(tokens[i.*].tokenType == TokenType.Comma) while(tokens[i.*].tokenType == TokenType.Comma)
{ {
i.* += 1; i.* += 1;
@ -1802,8 +1916,10 @@ fn parseStat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !S
{ {
return error.ExpectedEqAfterAssignmentVarList; return error.ExpectedEqAfterAssignmentVarList;
} }
varlistNode.endRegion = tokens[i.*].region;
i.* += 1; i.* += 1;
return StatNode { .Assignment = AssignmentNode { .lhs = varlistNode, .rhs = try parseExplist(tokens, i, allocator) } }; const rhs = try parseExplist(tokens, i, allocator);
return StatNode { .Assignment = AssignmentNode { .lhs = varlistNode, .rhs = rhs, .startRegion = startRegion, .endRegion = rhs.endRegion } };
}, },
else => else =>
{ {
@ -1829,6 +1945,7 @@ fn parseRetstat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator)
{ {
return error.ExpectedReturnStartingRetstat; return error.ExpectedReturnStartingRetstat;
} }
const startRegion = tokens[i.*].region;
i.* += 1; i.* += 1;
if(i.* >= tokens.len or if(i.* >= tokens.len or
tokens[i.*].tokenType == TokenType.Semicolon or tokens[i.*].tokenType == TokenType.Else or tokens[i.*].tokenType == TokenType.Elseif or tokens[i.*].tokenType == TokenType.End) tokens[i.*].tokenType == TokenType.Semicolon or tokens[i.*].tokenType == TokenType.Else or tokens[i.*].tokenType == TokenType.Elseif or tokens[i.*].tokenType == TokenType.End)
@ -1837,14 +1954,16 @@ fn parseRetstat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator)
{ {
i.* += 1; i.* += 1;
} }
return RetstatNode { .values = null }; return RetstatNode { .values = null, .startRegion = startRegion, .endRegion = tokens[@min(i.*, tokens.len) - 1].region };
} }
const ret = RetstatNode { .values = try parseExplist(tokens, i, allocator) }; const values = try parseExplist(tokens, i, allocator);
var endRegion = values.endRegion;
if(i.* < tokens.len and tokens[i.*].tokenType == TokenType.Semicolon) if(i.* < tokens.len and tokens[i.*].tokenType == TokenType.Semicolon)
{ {
endRegion = tokens[i.*].region;
i.* += 1; i.* += 1;
} }
return ret; return RetstatNode { .values = values, .startRegion = startRegion, .endRegion = endRegion };
} }
fn parseExp(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) ParserError!ExpNode fn parseExp(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) ParserError!ExpNode
{ {
@ -1857,6 +1976,7 @@ fn parseExpPrimary(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocat
{ {
return error.ReachedEOFExpectedPrimaryExpression; return error.ReachedEOFExpectedPrimaryExpression;
} }
const startRegion = tokens[i.*].region;
switch(tokens[i.*].tokenType) switch(tokens[i.*].tokenType)
{ {
TokenType.Nil => TokenType.Nil =>
@ -1902,28 +2022,32 @@ fn parseExpPrimary(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocat
i.* += 1; i.* += 1;
const unop = try allocator.allocator().create(ExpNode); const unop = try allocator.allocator().create(ExpNode);
unop.* = try parseExp(tokens, i, allocator); unop.* = try parseExp(tokens, i, allocator);
return ExpNode { .Unop = UnopNode { .unopType = UnopType.Minus, .exp = unop } }; const endRegion = tokens[@min(i.*, tokens.len) - 1].region;
return ExpNode { .Unop = UnopNode { .unopType = UnopType.Minus, .exp = unop, .startRegion = startRegion, .endRegion = endRegion } };
}, },
TokenType.Hash => TokenType.Hash =>
{ {
i.* += 1; i.* += 1;
const unop = try allocator.allocator().create(ExpNode); const unop = try allocator.allocator().create(ExpNode);
unop.* = try parseExp(tokens, i, allocator); unop.* = try parseExp(tokens, i, allocator);
return ExpNode { .Unop = UnopNode { .unopType = UnopType.Length, .exp = unop } }; const endRegion = tokens[@min(i.*, tokens.len) - 1].region;
return ExpNode { .Unop = UnopNode { .unopType = UnopType.Length, .exp = unop, .startRegion = startRegion, .endRegion = endRegion } };
}, },
TokenType.Not => TokenType.Not =>
{ {
i.* += 1; i.* += 1;
const unop = try allocator.allocator().create(ExpNode); const unop = try allocator.allocator().create(ExpNode);
unop.* = try parseExp(tokens, i, allocator); unop.* = try parseExp(tokens, i, allocator);
return ExpNode { .Unop = UnopNode { .unopType = UnopType.LogicalNot, .exp = unop } }; const endRegion = tokens[@min(i.*, tokens.len) - 1].region;
return ExpNode { .Unop = UnopNode { .unopType = UnopType.LogicalNot, .exp = unop, .startRegion = startRegion, .endRegion = endRegion } };
}, },
TokenType.Tilde => TokenType.Tilde =>
{ {
i.* += 1; i.* += 1;
const unop = try allocator.allocator().create(ExpNode); const unop = try allocator.allocator().create(ExpNode);
unop.* = try parseExp(tokens, i, allocator); unop.* = try parseExp(tokens, i, allocator);
return ExpNode { .Unop = UnopNode { .unopType = UnopType.BinaryNot, .exp = unop } }; const endRegion = tokens[@min(i.*, tokens.len) - 1].region;
return ExpNode { .Unop = UnopNode { .unopType = UnopType.BinaryNot, .exp = unop, .startRegion = startRegion, .endRegion = endRegion } };
}, },
else => else =>
{ {
@ -1938,6 +2062,7 @@ fn parseExpPrecedence(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllo
var currentLhs = lhs; var currentLhs = lhs;
while(i.* < tokens.len and isBinop(tokens[i.*])) while(i.* < tokens.len and isBinop(tokens[i.*]))
{ {
const startRegion = tokens[i.*].region;
const precedence = try getPrecedence(tokens[i.*]); const precedence = try getPrecedence(tokens[i.*]);
if(precedence < minPrecedence) if(precedence < minPrecedence)
{ {
@ -1954,7 +2079,7 @@ fn parseExpPrecedence(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllo
rhs = try parseExpPrecedence(tokens, i, allocator, rhs, precedence + associativityBoost); rhs = try parseExpPrecedence(tokens, i, allocator, rhs, precedence + associativityBoost);
} }
const binop = try allocator.allocator().create(BinopNode); const binop = try allocator.allocator().create(BinopNode);
binop.* = BinopNode { .lhs = currentLhs, .op = op, .rhs = rhs }; binop.* = BinopNode { .lhs = currentLhs, .op = op, .rhs = rhs, .startRegion = startRegion, .endRegion = tokens[@min(i.*, tokens.len) - 1].region };
currentLhs = ExpNode { .Binop = binop }; currentLhs = ExpNode { .Binop = binop };
} }
return currentLhs; return currentLhs;
@ -2022,13 +2147,15 @@ fn isBinop(token: Token) bool
} }
fn parseExplist(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !ExplistNode fn parseExplist(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !ExplistNode
{ {
var ret = ExplistNode { .exps = std.ArrayList(ExpNode).init(allocator.allocator()) }; const startRegion = tokens[@min(i.*, tokens.len) - 1].region;
var ret = ExplistNode { .exps = std.ArrayList(ExpNode).init(allocator.allocator()), .startRegion = startRegion, .endRegion = startRegion };
try ret.exps.append(try parseExp(tokens, i, allocator)); try ret.exps.append(try parseExp(tokens, i, allocator));
while(i.* < tokens.len and tokens[i.*].tokenType == TokenType.Comma) while(i.* < tokens.len and tokens[i.*].tokenType == TokenType.Comma)
{ {
i.* += 1; i.* += 1;
try ret.exps.append(try parseExp(tokens, i, allocator)); try ret.exps.append(try parseExp(tokens, i, allocator));
} }
ret.endRegion = tokens[@min(i.*, tokens.len) - 1].region;
return ret; return ret;
} }
fn parseFuncname(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !FuncnameNode fn parseFuncname(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !FuncnameNode
@ -2037,7 +2164,8 @@ fn parseFuncname(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator
{ {
return error.ExpectedFuncname; return error.ExpectedFuncname;
} }
var ret = FuncnameNode { .name = tokens[i.*].tokenData.string, .dottedNames = std.ArrayList([]u8).init(allocator.allocator()), .firstArg = null }; const startRange = tokens[i.*].region;
var ret = FuncnameNode { .name = tokens[i.*].tokenData.string, .dottedNames = std.ArrayList([]u8).init(allocator.allocator()), .firstArg = null, .startRegion = startRange, .endRegion = startRange };
i.* += 1; i.* += 1;
while(i.* < tokens.len and tokens[i.*].tokenType == TokenType.Dot) while(i.* < tokens.len and tokens[i.*].tokenType == TokenType.Dot)
{ {
@ -2056,7 +2184,9 @@ fn parseFuncname(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator
return error.ExpectedNameOfFirstArgInFuncname; return error.ExpectedNameOfFirstArgInFuncname;
} }
ret.firstArg = tokens[i.*].tokenData.string; ret.firstArg = tokens[i.*].tokenData.string;
i.* += 1;
} }
ret.endRegion = tokens[@min(i.*, tokens.len) - 1].region;
return ret; return ret;
} }
fn parseFuncbody(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !FuncbodyNode fn parseFuncbody(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !FuncbodyNode
@ -2065,6 +2195,7 @@ fn parseFuncbody(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator
{ {
return error.ExpectedRoundOpenStartingFuncbody; return error.ExpectedRoundOpenStartingFuncbody;
} }
const startRegion = tokens[i.*].region;
i.* += 1; i.* += 1;
if(i.* >= tokens.len) if(i.* >= tokens.len)
{ {
@ -2084,7 +2215,7 @@ fn parseFuncbody(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator
} }
i.* += 1; i.* += 1;
} }
const ret = FuncbodyNode { .body = try parseBlock(tokens, i, allocator), .pars = pars }; const ret = FuncbodyNode { .body = try parseBlock(tokens, i, allocator), .pars = pars, .startRegion = startRegion, .endRegion = tokens[i.*].region };
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.End) if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.End)
{ {
return error.ExpectedEndClosingFuncbody; return error.ExpectedEndClosingFuncbody;
@ -2098,16 +2229,18 @@ fn parseParlist(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator)
{ {
return error.ReachedEOFInParlist; return error.ReachedEOFInParlist;
} }
const startRegion = tokens[i.*].region;
if(tokens[i.*].tokenType == TokenType.DotDotDot) if(tokens[i.*].tokenType == TokenType.DotDotDot)
{ {
const endRegion = tokens[i.*].region;
i.* += 1; i.* += 1;
return ParlistNode { .names = std.ArrayList([]u8).init(allocator.allocator()), .hasVarargs = true }; return ParlistNode { .names = std.ArrayList([]u8).init(allocator.allocator()), .hasVarargs = true, .startRegion = startRegion, .endRegion = endRegion };
} }
if(tokens[i.*].tokenType != TokenType.Name) if(tokens[i.*].tokenType != TokenType.Name)
{ {
return error.ExpectedNameStartingParlist; return error.ExpectedNameStartingParlist;
} }
var ret = ParlistNode { .names = std.ArrayList([]u8).init(allocator.allocator()), .hasVarargs = false }; var ret = ParlistNode { .names = std.ArrayList([]u8).init(allocator.allocator()), .hasVarargs = false, .startRegion = startRegion, .endRegion = startRegion };
try ret.names.append(tokens[i.*].tokenData.string); try ret.names.append(tokens[i.*].tokenData.string);
i.* += 1; i.* += 1;
while(i.* < tokens.len and tokens[i.*].tokenType == TokenType.Comma) while(i.* < tokens.len and tokens[i.*].tokenType == TokenType.Comma)
@ -2133,17 +2266,20 @@ fn parseParlist(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator)
else => return error.UnexpectedTokenInParlistNameList, else => return error.UnexpectedTokenInParlistNameList,
} }
} }
ret.endRegion = tokens[@min(i.*, tokens.len) - 1].region;
return ret; return ret;
} }
fn parseAttnamelist(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !AttnamelistNode fn parseAttnamelist(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !AttnamelistNode
{ {
var ret = AttnamelistNode { .attnames = std.ArrayList(AttnameNode).init(allocator.allocator()) }; // TODO: What happens if this is reaches EOF?
var ret = AttnamelistNode { .attnames = std.ArrayList(AttnameNode).init(allocator.allocator()), .startRegion = tokens[i.*].region, .endRegion = tokens[i.*].region };
try ret.attnames.append(try parseAttname(tokens, i)); try ret.attnames.append(try parseAttname(tokens, i));
while(i.* < tokens.len and tokens[i.*].tokenType == TokenType.Comma) while(i.* < tokens.len and tokens[i.*].tokenType == TokenType.Comma)
{ {
i.* += 1; i.* += 1;
try ret.attnames.append(try parseAttname(tokens, i)); try ret.attnames.append(try parseAttname(tokens, i));
} }
ret.endRegion = tokens[@min(i.*, tokens.len) - 1].region;
return ret; return ret;
} }
fn parseSuffixExp(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !SuffixexpNode fn parseSuffixExp(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !SuffixexpNode
@ -2153,6 +2289,7 @@ fn parseSuffixExp(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocato
{ {
return error.ReachedEOFInSuffixExp; return error.ReachedEOFInSuffixExp;
} }
const startRegion = tokens[i.*].region;
const firstPart = try switch(tokens[i.*].tokenType) const firstPart = try switch(tokens[i.*].tokenType)
{ {
TokenType.Name => TokenType.Name =>
@ -2202,14 +2339,16 @@ fn parseSuffixExp(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocato
}, },
TokenType.Colon => TokenType.Colon =>
{ {
const argsFirstArgStartRegion = tokens[i.*].region;
i.* += 1; i.* += 1;
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Name) if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Name)
{ {
return error.ExpectedNameInArgsFirstArgSuffixExp; return error.ExpectedNameInArgsFirstArgSuffixExp;
} }
const name = tokens[i.*].tokenData.string; const name = tokens[i.*].tokenData.string;
const argsFirstArgEndRegion = tokens[i.*].region;
i.* += 1; i.* += 1;
try suffixes.append(SuffixexpSuffix { .ArgsFirstArg = ArgsFirstArgNode { .name = name, .rest = try parseArgs(tokens, i, allocator) } }); try suffixes.append(SuffixexpSuffix { .ArgsFirstArg = ArgsFirstArgNode { .name = name, .rest = try parseArgs(tokens, i, allocator), .startRegion = argsFirstArgStartRegion, .endRegion = argsFirstArgEndRegion } });
}, },
TokenType.RoundOpen, TokenType.CurlyOpen, TokenType.StringLiteral => TokenType.RoundOpen, TokenType.CurlyOpen, TokenType.StringLiteral =>
{ {
@ -2218,6 +2357,7 @@ fn parseSuffixExp(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocato
else => break, else => break,
} }
} }
const endRegion = tokens[@min(i.*, tokens.len) - 1].region;
const last = suffixes.getLastOrNull(); const last = suffixes.getLastOrNull();
if(last != null) if(last != null)
{ {
@ -2229,9 +2369,11 @@ fn parseSuffixExp(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocato
const functioncall = try allocator.allocator().create(FunctioncallNode); const functioncall = try allocator.allocator().create(FunctioncallNode);
functioncall.* = FunctioncallNode functioncall.* = FunctioncallNode
{ {
.function = SuffixexpNode { .Normal = NormalSuffixNode { .firstPart = firstPart, .suffixes = suffixes } }, .function = SuffixexpNode { .Normal = NormalSuffixNode { .firstPart = firstPart, .suffixes = suffixes, .startRegion = startRegion, .endRegion = endRegion } },
.args = args.*, .args = args.*,
.objectArg = null, .objectArg = null,
.startRegion = startRegion,
.endRegion = endRegion,
}; };
return SuffixexpNode return SuffixexpNode
{ {
@ -2244,9 +2386,11 @@ fn parseSuffixExp(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocato
const functioncall = try allocator.allocator().create(FunctioncallNode); const functioncall = try allocator.allocator().create(FunctioncallNode);
functioncall.* = FunctioncallNode functioncall.* = FunctioncallNode
{ {
.function = SuffixexpNode { .Normal = NormalSuffixNode { .firstPart = firstPart, .suffixes = suffixes } }, .function = SuffixexpNode { .Normal = NormalSuffixNode { .firstPart = firstPart, .suffixes = suffixes, .startRegion = startRegion, .endRegion = endRegion } },
.args = node.rest, .args = node.rest,
.objectArg = node.name, .objectArg = node.name,
.startRegion = startRegion,
.endRegion = endRegion,
}; };
return SuffixexpNode return SuffixexpNode
{ {
@ -2256,11 +2400,12 @@ fn parseSuffixExp(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocato
else => {} else => {}
} }
} }
return SuffixexpNode { .Normal = NormalSuffixNode { .firstPart = firstPart, .suffixes = suffixes } }; return SuffixexpNode { .Normal = NormalSuffixNode { .firstPart = firstPart, .suffixes = suffixes, .startRegion = startRegion, .endRegion = endRegion } };
} }
fn parseVar(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !VarNode fn parseVar(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !VarNode
{ {
return suffixExpToVar(try parseSuffixExp(tokens, i, allocator)); const startRegion = tokens[i.*].region;
return suffixExpToVar(try parseSuffixExp(tokens, i, allocator), startRegion, tokens[@min(i.*, tokens.len) - 1].region);
} }
fn parseAttname(tokens: []Token, i: *usize) !AttnameNode fn parseAttname(tokens: []Token, i: *usize) !AttnameNode
{ {
@ -2269,8 +2414,9 @@ fn parseAttname(tokens: []Token, i: *usize) !AttnameNode
return error.ExpectednameInAttribName; return error.ExpectednameInAttribName;
} }
const name = tokens[i.*].tokenData.string; const name = tokens[i.*].tokenData.string;
const startRegion = tokens[i.*].region;
i.* += 1; i.* += 1;
var ret = AttnameNode { .name = name, .attribute = null }; var ret = AttnameNode { .name = name, .attribute = null, .startRegion = startRegion, .endRegion = startRegion };
if(i.* < tokens.len and tokens[i.*].tokenType == TokenType.Lt) if(i.* < tokens.len and tokens[i.*].tokenType == TokenType.Lt)
{ {
ret.attribute = tokens[i.*].tokenData.string; ret.attribute = tokens[i.*].tokenData.string;
@ -2279,6 +2425,7 @@ fn parseAttname(tokens: []Token, i: *usize) !AttnameNode
{ {
return error.ExpectedAttributeInAttrib; return error.ExpectedAttributeInAttrib;
} }
ret.endRegion = tokens[i.*].region;
i.* += 1; i.* += 1;
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Gt) if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Gt)
{ {
@ -2331,23 +2478,27 @@ fn parseTableconstructor(tokens: []Token, i: *usize, allocator: *std.heap.ArenaA
{ {
return error.ExpectedCurlyOpenOpeningTableconstructor; return error.ExpectedCurlyOpenOpeningTableconstructor;
} }
const startRegion = tokens[i.*].region;
i.* += 1; i.* += 1;
if(i.* < tokens.len and tokens[i.*].tokenType == TokenType.CurlyClosed) if(i.* < tokens.len and tokens[i.*].tokenType == TokenType.CurlyClosed)
{ {
const endRegion = tokens[i.*].region;
i.* += 1; i.* += 1;
return TableconstructorNode { .exps = null }; return TableconstructorNode { .exps = null, .startRegion = startRegion, .endRegion = endRegion };
} }
const ret = TableconstructorNode { .exps = try parseFieldlist(tokens, i, allocator) }; var ret = TableconstructorNode { .exps = try parseFieldlist(tokens, i, allocator), .startRegion = startRegion, .endRegion = startRegion };
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.CurlyClosed) if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.CurlyClosed)
{ {
return error.ExpectedCurlyClosedClosingTableconstructor; return error.ExpectedCurlyClosedClosingTableconstructor;
} }
ret.endRegion = tokens[i.*].region;
i.* += 1; i.* += 1;
return ret; return ret;
} }
fn parseFieldlist(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !FieldlistNode fn parseFieldlist(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !FieldlistNode
{ {
var ret = FieldlistNode { .exps = std.ArrayList(FieldNode).init(allocator.allocator()) }; const startRegion = tokens[@min(i.*, tokens.len) - 1].region;
var ret = FieldlistNode { .exps = std.ArrayList(FieldNode).init(allocator.allocator()), .startRegion = startRegion, .endRegion = startRegion };
try ret.exps.append(try parseField(tokens, i, allocator)); try ret.exps.append(try parseField(tokens, i, allocator));
while(i.* < tokens.len and isFieldsep(tokens[i.*])) while(i.* < tokens.len and isFieldsep(tokens[i.*]))
{ {
@ -2358,6 +2509,7 @@ fn parseFieldlist(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocato
{ {
i.* += 1; i.* += 1;
} }
ret.endRegion = tokens[@min(i.*, tokens.len) - 1].region;
return ret; return ret;
} }
fn isFieldsep(token: Token) bool fn isFieldsep(token: Token) bool
@ -2370,6 +2522,7 @@ fn parseField(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !
{ {
return error.ReachedEOFInField; return error.ReachedEOFInField;
} }
const startRegion = tokens[i.*].region;
switch(tokens[i.*].tokenType) switch(tokens[i.*].tokenType)
{ {
TokenType.SquareOpen => TokenType.SquareOpen =>
@ -2385,8 +2538,9 @@ fn parseField(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !
{ {
return error.ExpectedEqualsInIndexedFieldExpression; return error.ExpectedEqualsInIndexedFieldExpression;
} }
const endRegion = tokens[i.*].region;
i.* += 1; i.* += 1;
return FieldNode { .IndexedAssignment = IndexedAssignmentNode { .index = index, .rhs = try parseExp(tokens, i, allocator) } }; return FieldNode { .IndexedAssignment = IndexedAssignmentNode { .index = index, .rhs = try parseExp(tokens, i, allocator), .startRegion = startRegion, .endRegion = endRegion } };
}, },
TokenType.Name => TokenType.Name =>
{ {
@ -2394,14 +2548,14 @@ fn parseField(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !
{ {
const name = tokens[i.*].tokenData.string; const name = tokens[i.*].tokenData.string;
i.* += 2; i.* += 2;
return FieldNode { .Assignment = FieldAssignmentNode { .lhs = name, .rhs = try parseExp(tokens, i, allocator) } }; return FieldNode { .Assignment = FieldAssignmentNode { .lhs = name, .rhs = try parseExp(tokens, i, allocator), .startRegion = startRegion, .endRegion = tokens[i.* - 1].region } };
} }
return FieldNode { .Exp = try parseExp(tokens, i, allocator) }; return FieldNode { .Exp = try parseExp(tokens, i, allocator) };
}, },
else => return FieldNode { .Exp = try parseExp(tokens, i, allocator) }, else => return FieldNode { .Exp = try parseExp(tokens, i, allocator) },
} }
} }
fn suffixExpToVar(suffixexp: SuffixexpNode) !VarNode fn suffixExpToVar(suffixexp: SuffixexpNode, startRegion: CodeRegion, endRegion: CodeRegion) !VarNode
{ {
var exp = suffixexp.Normal; var exp = suffixexp.Normal;
if(exp.suffixes.items.len == 0) if(exp.suffixes.items.len == 0)
@ -2411,8 +2565,8 @@ fn suffixExpToVar(suffixexp: SuffixexpNode) !VarNode
const last = exp.suffixes.pop(); const last = exp.suffixes.pop();
return switch(last) return switch(last)
{ {
SuffixexpSuffix.Dot => |*name| VarNode { .Member = MemberVarNode { .name = name.*, .value = SuffixexpNode { .Normal = exp } } }, SuffixexpSuffix.Dot => |*name| VarNode { .Member = MemberVarNode { .name = name.*, .value = SuffixexpNode { .Normal = exp }, .startRegion = startRegion, .endRegion = endRegion } },
SuffixexpSuffix.Indexed => |*index| VarNode { .Indexed = IndexedVarNode { .index = index.*, .value = SuffixexpNode { .Normal = exp } } }, SuffixexpSuffix.Indexed => |*index| VarNode { .Indexed = IndexedVarNode { .index = index.*, .value = SuffixexpNode { .Normal = exp }, .startRegion = startRegion, .endRegion = endRegion } },
else => error.ExpectedDotOrIndexedSuffixWhenConvertingSuffixExpToVar, else => error.ExpectedDotOrIndexedSuffixWhenConvertingSuffixExpToVar,
}; };
} }

View File

@ -1,5 +1,7 @@
const types = @import("types.zig"); const types = @import("types.zig");
const std = @import("std"); const std = @import("std");
const CodeRegion = @import("types.zig").CodeRegion;
const CodeLocation = @import("types.zig").CodeLocation;
pub const TokenType = enum pub const TokenType = enum
{ {
@ -29,18 +31,7 @@ pub const Token = struct
{ {
tokenType: TokenType, tokenType: TokenType,
tokenData: TokenData, tokenData: TokenData,
location: Location, region: CodeRegion,
};
pub const Location = struct
{
start: ?Point,
length: usize,
};
pub const Point = struct
{
line: usize,
col: usize,
}; };
const TokenizerState = enum const TokenizerState = enum
@ -72,38 +63,38 @@ const TokenizerState = enum
Function, Function,
}; };
fn tokenizeUpdateIndexAndState(lastIndex: *?usize, index: ?usize, state: *TokenizerState, newState: TokenizerState, location: *Location) void fn tokenizeUpdateIndexAndState(lastIndex: *?usize, index: ?usize, state: *TokenizerState, newState: TokenizerState, region: *CodeRegion) void
{ {
lastIndex.* = index; lastIndex.* = index;
state.* = newState; state.* = newState;
if(index == null) if(index == null)
{ {
location.*.start = null; region.*.start = null;
location.*.length = 0; region.*.length = 0;
} }
else else
{ {
if(location.*.start == null) if(region.*.start == null)
{ {
// TODO: There is no line/col info here and plumbing it to here would be pain. // TODO: There is no line/col info here and plumbing it to here would be pain.
location.*.start = Point { .col = 0, .line = 0 }; region.*.start = CodeLocation { .col = 0, .line = 0 };
} }
location.*.length += 1; region.*.length += 1;
} }
} }
fn tokenizeTerminalBase(lastIndex: *?usize, index: ?usize, tokenType: *?TokenType, state: *TokenizerState, newTokenType: ?TokenType, newState: TokenizerState, location: *Location) void fn tokenizeTerminalBase(lastIndex: *?usize, index: ?usize, tokenType: *?TokenType, state: *TokenizerState, newTokenType: ?TokenType, newState: TokenizerState, region: *CodeRegion) void
{ {
tokenizeUpdateIndexAndState(lastIndex, index, state, newState, location); tokenizeUpdateIndexAndState(lastIndex, index, state, newState, region);
tokenType.* = newTokenType; tokenType.* = newTokenType;
} }
fn tokenizeTerminalStr(lastIndex: *?usize, index: usize, tokenType: *?TokenType, state: *TokenizerState, newTokenType: ?TokenType, newState: TokenizerState, tokenStr: *std.ArrayList(u8), ch: u8, location: *Location) !void fn tokenizeTerminalStr(lastIndex: *?usize, index: usize, tokenType: *?TokenType, state: *TokenizerState, newTokenType: ?TokenType, newState: TokenizerState, tokenStr: *std.ArrayList(u8), ch: u8, region: *CodeRegion) !void
{ {
tokenizeTerminalBase(lastIndex, index, tokenType, state, newTokenType, newState, location); tokenizeTerminalBase(lastIndex, index, tokenType, state, newTokenType, newState, region);
try tokenStr.append(ch); try tokenStr.append(ch);
} }
fn tokenizeTerminalIntNum(lastIndex: *?usize, index: usize, tokenType: *?TokenType, state: *TokenizerState, newTokenType: TokenType, newState: TokenizerState, tokenNumeral: *?types.Numeral, ch: u8, location: *Location) !void fn tokenizeTerminalIntNum(lastIndex: *?usize, index: usize, tokenType: *?TokenType, state: *TokenizerState, newTokenType: TokenType, newState: TokenizerState, tokenNumeral: *?types.Numeral, ch: u8, region: *CodeRegion) !void
{ {
tokenizeTerminalBase(lastIndex, index, tokenType, state, newTokenType, newState, location); tokenizeTerminalBase(lastIndex, index, tokenType, state, newTokenType, newState, region);
if(!std.ascii.isDigit(ch)) if(!std.ascii.isDigit(ch))
{ {
return error.NoDigit; return error.NoDigit;
@ -122,16 +113,16 @@ fn tokenizeTerminalIntNum(lastIndex: *?usize, index: usize, tokenType: *?TokenTy
} }
} }
} }
fn tokenizeTerminalNoToken(lastIndex: *?usize, index: usize, state: *TokenizerState, newState: TokenizerState, tokenStr: *std.ArrayList(u8), ch: u8, location: *Location) !void fn tokenizeTerminalNoToken(lastIndex: *?usize, index: usize, state: *TokenizerState, newState: TokenizerState, tokenStr: *std.ArrayList(u8), ch: u8, region: *CodeRegion) !void
{ {
tokenizeUpdateIndexAndState(lastIndex, index, state, newState, location); tokenizeUpdateIndexAndState(lastIndex, index, state, newState, region);
try tokenStr.*.append(ch); try tokenStr.*.append(ch);
} }
fn tokenizeBacktrack(lastIndex: *?usize, index: *usize, tokens: *std.ArrayList(Token), tokenType: *?TokenType, tokenStr: *std.ArrayList(u8), tokenNumeral: *?types.Numeral, state: *TokenizerState, allocator: std.mem.Allocator, location: *Location) !void fn tokenizeBacktrack(lastIndex: *?usize, index: *usize, tokens: *std.ArrayList(Token), tokenType: *?TokenType, tokenStr: *std.ArrayList(u8), tokenNumeral: *?types.Numeral, state: *TokenizerState, allocator: std.mem.Allocator, region: *CodeRegion) !void
{ {
try tokenizeBacktrackCustomToken(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, tokenType.*.?, allocator, location); try tokenizeBacktrackCustomToken(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, tokenType.*.?, allocator, region);
} }
fn tokenizeBacktrackCustomToken(lastIndex: *?usize, index: *usize, tokens: *std.ArrayList(Token), tokenType: *?TokenType, tokenStr: *std.ArrayList(u8), tokenNumeral: *?types.Numeral, state: *TokenizerState, newTokenType: TokenType, allocator: std.mem.Allocator, location: *Location) !void fn tokenizeBacktrackCustomToken(lastIndex: *?usize, index: *usize, tokens: *std.ArrayList(Token), tokenType: *?TokenType, tokenStr: *std.ArrayList(u8), tokenNumeral: *?types.Numeral, state: *TokenizerState, newTokenType: TokenType, allocator: std.mem.Allocator, region: *CodeRegion) !void
{ {
if(lastIndex.* == null or tokenType.* == null) if(lastIndex.* == null or tokenType.* == null)
{ {
@ -141,32 +132,32 @@ fn tokenizeBacktrackCustomToken(lastIndex: *?usize, index: *usize, tokens: *std.
{ {
const content = try allocator.alloc(u8, tokenStr.*.items.len); const content = try allocator.alloc(u8, tokenStr.*.items.len);
@memcpy(content, tokenStr.*.items); @memcpy(content, tokenStr.*.items);
try tokens.append(Token { .tokenType = newTokenType, .tokenData = TokenData { .string = content }, .location = location.* }); try tokens.append(Token { .tokenType = newTokenType, .tokenData = TokenData { .string = content }, .region = region.* });
} }
else else
{ {
try tokens.append(Token { .tokenType = newTokenType, .location = location.*, .tokenData = if(tokenType.*.? == TokenType.Numeral) TokenData { .numeral = tokenNumeral.*.? } try tokens.append(Token { .tokenType = newTokenType, .region = region.*, .tokenData = if(tokenType.*.? == TokenType.Numeral) TokenData { .numeral = tokenNumeral.*.? }
else TokenData.none else TokenData.none
}); });
} }
tokenNumeral.* = null; tokenNumeral.* = null;
index.* = lastIndex.*.?; index.* = lastIndex.*.?;
tokenStr.*.clearAndFree(); tokenStr.*.clearAndFree();
// location is reset in tokenizeTerminalBase since null is passed as index // region is reset in tokenizeTerminalBase since null is passed as index
tokenizeTerminalBase(lastIndex, null, tokenType, state, null, TokenizerState.Start, location); tokenizeTerminalBase(lastIndex, null, tokenType, state, null, TokenizerState.Start, region);
} }
fn tokenizeAlphanumericNonstart(lastIndex: *?usize, index: *usize, tokens: *std.ArrayList(Token), tokenType: *?TokenType, tokenStr: *std.ArrayList(u8), tokenNumeral: *?types.Numeral, state: *TokenizerState, ch: u8, newTokenType: TokenType, allocator: std.mem.Allocator, location: *Location) !void fn tokenizeAlphanumericNonstart(lastIndex: *?usize, index: *usize, tokens: *std.ArrayList(Token), tokenType: *?TokenType, tokenStr: *std.ArrayList(u8), tokenNumeral: *?types.Numeral, state: *TokenizerState, ch: u8, newTokenType: TokenType, allocator: std.mem.Allocator, region: *CodeRegion) !void
{ {
if(std.ascii.isAlphanumeric(ch) or ch == '_') if(std.ascii.isAlphanumeric(ch) or ch == '_')
{ {
try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.Name, tokenStr, ch, location); try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.Name, tokenStr, ch, region);
} }
else else
{ {
try tokenizeBacktrackCustomToken(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, newTokenType, allocator, location); try tokenizeBacktrackCustomToken(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, newTokenType, allocator, region);
} }
} }
fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usize, tokenType: *?TokenType, tokenStr: *std.ArrayList(u8), tokenNumeral: *?types.Numeral, tokens: *std.ArrayList(Token), longBracketLevel: *u32, location: *Location, allocator: std.mem.Allocator) !void fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usize, tokenType: *?TokenType, tokenStr: *std.ArrayList(u8), tokenNumeral: *?types.Numeral, tokens: *std.ArrayList(Token), longBracketLevel: *u32, region: *CodeRegion, allocator: std.mem.Allocator) !void
{ {
switch(state.*) switch(state.*)
{ {
@ -174,44 +165,44 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz
{ {
switch(ch) switch(ch)
{ {
'-' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Minus, TokenizerState.Minus, location), '-' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Minus, TokenizerState.Minus, region),
',' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Comma, TokenizerState.Comma, location), ',' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Comma, TokenizerState.Comma, region),
'=' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Equals, TokenizerState.Equals, location), '=' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Equals, TokenizerState.Equals, region),
'(' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.RoundOpen, TokenizerState.RoundOpen, location), '(' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.RoundOpen, TokenizerState.RoundOpen, region),
')' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.RoundClosed, TokenizerState.RoundClosed, location), ')' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.RoundClosed, TokenizerState.RoundClosed, region),
'.' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Dot, TokenizerState.Dot, location), '.' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Dot, TokenizerState.Dot, region),
':' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Colon, TokenizerState.Colon, location), ':' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Colon, TokenizerState.Colon, region),
'{' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.CurlyOpen, TokenizerState.CurlyOpen, location), '{' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.CurlyOpen, TokenizerState.CurlyOpen, region),
'}' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.CurlyClosed, TokenizerState.CurlyClosed, location), '}' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.CurlyClosed, TokenizerState.CurlyClosed, region),
'[' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.SquareOpen, TokenizerState.SquareOpen, location), '[' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.SquareOpen, TokenizerState.SquareOpen, region),
']' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.SquareClosed, TokenizerState.SquareClosed, location), ']' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.SquareClosed, TokenizerState.SquareClosed, region),
'+' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Plus, TokenizerState.Plus, location), '+' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Plus, TokenizerState.Plus, region),
'~' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Tilde, TokenizerState.Tilde, location), '~' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Tilde, TokenizerState.Tilde, region),
'>' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Gt, TokenizerState.Gt, location), '>' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Gt, TokenizerState.Gt, region),
'<' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Lt, TokenizerState.Lt, location), '<' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Lt, TokenizerState.Lt, region),
'#' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Hash, TokenizerState.Hash, location), '#' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Hash, TokenizerState.Hash, region),
'|' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Pipe, TokenizerState.Pipe, location), '|' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Pipe, TokenizerState.Pipe, region),
'&' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Ampersand, TokenizerState.Ampersand, location), '&' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Ampersand, TokenizerState.Ampersand, region),
'%' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Percent, TokenizerState.Percent, location), '%' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Percent, TokenizerState.Percent, region),
'*' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Star, TokenizerState.Star, location), '*' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Star, TokenizerState.Star, region),
'/' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Slash, TokenizerState.Slash, location), '/' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Slash, TokenizerState.Slash, region),
';' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Semicolon, TokenizerState.Semicolon, location), ';' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Semicolon, TokenizerState.Semicolon, region),
'^' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Caret, TokenizerState.Caret, location), '^' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Caret, TokenizerState.Caret, region),
'a' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.A, tokenStr, ch, location), 'a' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.A, tokenStr, ch, region),
'b' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.B, tokenStr, ch, location), 'b' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.B, tokenStr, ch, region),
'd' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.D, tokenStr, ch, location), 'd' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.D, tokenStr, ch, region),
'e' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.E, tokenStr, ch, location), 'e' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.E, tokenStr, ch, region),
'f' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.F, tokenStr, ch, location), 'f' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.F, tokenStr, ch, region),
'i' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.I, tokenStr, ch, location), 'i' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.I, tokenStr, ch, region),
'g' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.G, tokenStr, ch, location), 'g' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.G, tokenStr, ch, region),
'l' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.L, tokenStr, ch, location), 'l' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.L, tokenStr, ch, region),
'n' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.N, tokenStr, ch, location), 'n' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.N, tokenStr, ch, region),
'o' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.O, tokenStr, ch, location), 'o' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.O, tokenStr, ch, region),
'r' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.R, tokenStr, ch, location), 'r' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.R, tokenStr, ch, region),
't' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.T, tokenStr, ch, location), 't' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.T, tokenStr, ch, region),
'u' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.U, tokenStr, ch, location), 'u' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.U, tokenStr, ch, region),
'w' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.W, tokenStr, ch, location), 'w' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.W, tokenStr, ch, region),
'0' => try tokenizeTerminalIntNum(lastIndex, index.*, tokenType, state, TokenType.Numeral, TokenizerState.Zero, tokenNumeral, ch, location), '0' => try tokenizeTerminalIntNum(lastIndex, index.*, tokenType, state, TokenType.Numeral, TokenizerState.Zero, tokenNumeral, ch, region),
'"' => '"' =>
{ {
tokenType.* = null; tokenType.* = null;
@ -230,11 +221,11 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz
} }
else if(std.ascii.isAlphabetic(ch) or ch == '_') else if(std.ascii.isAlphabetic(ch) or ch == '_')
{ {
try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.Name, tokenStr, ch, location); try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.Name, tokenStr, ch, region);
} }
else if(std.ascii.isDigit(ch)) else if(std.ascii.isDigit(ch))
{ {
try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Numeral, TokenizerState.Name, tokenStr, ch, location); try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Numeral, TokenizerState.Name, tokenStr, ch, region);
} }
else else
{ {
@ -249,7 +240,7 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz
switch(ch) switch(ch)
{ {
'\\' => state.* = TokenizerState.QuoteBackslash, '\\' => state.* = TokenizerState.QuoteBackslash,
'"' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.StringLiteral, TokenizerState.String, location), '"' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.StringLiteral, TokenizerState.String, region),
else => try tokenStr.*.append(ch), else => try tokenStr.*.append(ch),
} }
}, },
@ -319,7 +310,7 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz
switch(ch) switch(ch)
{ {
'\\' => state.* = TokenizerState.QuoteBackslash, '\\' => state.* = TokenizerState.QuoteBackslash,
'"' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.StringLiteral, TokenizerState.String, location), '"' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.StringLiteral, TokenizerState.String, region),
else => else =>
{ {
if(!std.ascii.isWhitespace(ch)) if(!std.ascii.isWhitespace(ch))
@ -340,7 +331,7 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz
switch(ch) switch(ch)
{ {
'\\' => state.* = TokenizerState.SingleQuoteBackslash, '\\' => state.* = TokenizerState.SingleQuoteBackslash,
'\'' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.StringLiteral, TokenizerState.String, location), '\'' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.StringLiteral, TokenizerState.String, region),
else => try tokenStr.append(ch), else => try tokenStr.append(ch),
} }
}, },
@ -410,7 +401,7 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz
switch(ch) switch(ch)
{ {
'\\' => state.* = TokenizerState.SingleQuoteBackslash, '\\' => state.* = TokenizerState.SingleQuoteBackslash,
'\'' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.StringLiteral, TokenizerState.String, location), '\'' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.StringLiteral, TokenizerState.String, region),
else => else =>
{ {
if(!std.ascii.isWhitespace(ch)) if(!std.ascii.isWhitespace(ch))
@ -426,8 +417,8 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz
} }
} }
}, },
TokenizerState.String => try tokenizeBacktrackCustomToken(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, TokenType.StringLiteral, allocator, location), TokenizerState.String => try tokenizeBacktrackCustomToken(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, TokenType.StringLiteral, allocator, region),
TokenizerState.Name => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), TokenizerState.Name => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
TokenizerState.Zero => TokenizerState.Zero =>
{ {
switch(ch) switch(ch)
@ -457,7 +448,7 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz
} }
else else
{ {
try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location); try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, region);
} }
} }
} }
@ -479,7 +470,7 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz
} }
else else
{ {
try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location); try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, region);
} }
}, },
TokenizerState.HexNumber => TokenizerState.HexNumber =>
@ -509,7 +500,7 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz
} }
else else
{ {
try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location); try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, region);
} }
} }
} }
@ -543,7 +534,7 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz
} }
else else
{ {
try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location); try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, region);
} }
} }
@ -556,79 +547,79 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz
TokenizerState.SquareClosed, TokenizerState.Pipe, TokenizerState.Ampersand, TokenizerState.SquareClosed, TokenizerState.Pipe, TokenizerState.Ampersand,
TokenizerState.Percent, TokenizerState.Star, TokenizerState.Semicolon, TokenizerState.Percent, TokenizerState.Star, TokenizerState.Semicolon,
TokenizerState.Caret, TokenizerState.DotDotDot, TokenizerState.GtGt, TokenizerState.Caret, TokenizerState.DotDotDot, TokenizerState.GtGt,
TokenizerState.LtLt, TokenizerState.SlashSlash => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location), TokenizerState.LtLt, TokenizerState.SlashSlash => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, region),
TokenizerState.Tilde => TokenizerState.Tilde =>
{ {
switch(ch) switch(ch)
{ {
'=' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.TildeEquals, TokenizerState.TildeEquals, location), '=' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.TildeEquals, TokenizerState.TildeEquals, region),
else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location), else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, region),
} }
}, },
TokenizerState.Gt => TokenizerState.Gt =>
{ {
switch (ch) switch (ch)
{ {
'>' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.GtGt, TokenizerState.GtGt, location), '>' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.GtGt, TokenizerState.GtGt, region),
'=' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.GtEquals, TokenizerState.GtEquals, location), '=' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.GtEquals, TokenizerState.GtEquals, region),
else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location), else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, region),
} }
}, },
TokenizerState.Lt => TokenizerState.Lt =>
{ {
switch(ch) switch(ch)
{ {
'<' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.LtLt, TokenizerState.LtLt, location), '<' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.LtLt, TokenizerState.LtLt, region),
'=' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.LtEquals, TokenizerState.LtEquals, location), '=' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.LtEquals, TokenizerState.LtEquals, region),
else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location), else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, region),
} }
}, },
TokenizerState.Slash => TokenizerState.Slash =>
{ {
switch(ch) switch(ch)
{ {
'/' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.SlashSlash, TokenizerState.SlashSlash, location), '/' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.SlashSlash, TokenizerState.SlashSlash, region),
else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location), else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, region),
} }
}, },
TokenizerState.Dot => TokenizerState.Dot =>
{ {
switch(ch) switch(ch)
{ {
'.' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.DotDot, TokenizerState.DotDot, location), '.' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.DotDot, TokenizerState.DotDot, region),
else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location), else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, region),
} }
}, },
TokenizerState.DotDot => TokenizerState.DotDot =>
{ {
switch(ch) switch(ch)
{ {
'.' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.DotDotDot, TokenizerState.DotDotDot, location), '.' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.DotDotDot, TokenizerState.DotDotDot, region),
else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location), else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, region),
} }
}, },
TokenizerState.Colon => TokenizerState.Colon =>
{ {
switch(ch) switch(ch)
{ {
':' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.ColonColon, TokenizerState.ColonColon, location), ':' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.ColonColon, TokenizerState.ColonColon, region),
else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location), else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, region),
} }
}, },
TokenizerState.Equals => TokenizerState.Equals =>
{ {
switch(ch) switch(ch)
{ {
'=' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.EqualsEquals, TokenizerState.EqualsEquals, location), '=' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.EqualsEquals, TokenizerState.EqualsEquals, region),
else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location), else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, region),
} }
}, },
TokenizerState.Minus => TokenizerState.Minus =>
{ {
switch(ch) switch(ch)
{ {
'-' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, null, TokenizerState.SmallCommentStart, location), '-' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, null, TokenizerState.SmallCommentStart, region),
else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location), else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, region),
} }
}, },
TokenizerState.SmallCommentStart => TokenizerState.SmallCommentStart =>
@ -716,486 +707,486 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz
{ {
switch(ch) switch(ch)
{ {
'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.An, tokenStr, ch, location), 'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.An, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.An => TokenizerState.An =>
{ {
switch(ch) switch(ch)
{ {
'd' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.And, tokenStr, ch, location), 'd' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.And, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.And => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.And, allocator, location), TokenizerState.And => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.And, allocator, region),
TokenizerState.W => TokenizerState.W =>
{ {
switch(ch) switch(ch)
{ {
'h' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Wh, tokenStr, ch, location), 'h' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Wh, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Wh => TokenizerState.Wh =>
{ {
switch(ch) switch(ch)
{ {
'i' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Whi, tokenStr, ch, location), 'i' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Whi, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Whi => TokenizerState.Whi =>
{ {
switch(ch) switch(ch)
{ {
'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Whil, tokenStr, ch, location), 'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Whil, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Whil => TokenizerState.Whil =>
{ {
switch(ch) switch(ch)
{ {
'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.While, tokenStr, ch, location), 'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.While, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.While => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.While, allocator, location), TokenizerState.While => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.While, allocator, region),
TokenizerState.B => TokenizerState.B =>
{ {
switch(ch) switch(ch)
{ {
'r' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Br, tokenStr, ch, location), 'r' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Br, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Br => TokenizerState.Br =>
{ {
switch(ch) switch(ch)
{ {
'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Bre, tokenStr, ch, location), 'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Bre, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Bre => TokenizerState.Bre =>
{ {
switch(ch) switch(ch)
{ {
'a' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Brea, tokenStr, ch, location), 'a' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Brea, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Brea => TokenizerState.Brea =>
{ {
switch(ch) switch(ch)
{ {
'k' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Break, tokenStr, ch, location), 'k' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Break, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Break => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Break, allocator, location), TokenizerState.Break => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Break, allocator, region),
TokenizerState.G => TokenizerState.G =>
{ {
switch(ch) switch(ch)
{ {
'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Go, tokenStr, ch, location), 'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Go, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Go => TokenizerState.Go =>
{ {
switch(ch) switch(ch)
{ {
't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Got, tokenStr, ch, location), 't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Got, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Got => TokenizerState.Got =>
{ {
switch(ch) switch(ch)
{ {
'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Goto, tokenStr, ch, location), 'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Goto, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Goto => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Goto, allocator, location), TokenizerState.Goto => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Goto, allocator, region),
TokenizerState.R => TokenizerState.R =>
{ {
switch(ch) switch(ch)
{ {
'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Re, tokenStr, ch, location), 'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Re, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Re => TokenizerState.Re =>
{ {
switch(ch) switch(ch)
{ {
't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Ret, tokenStr, ch, location), 't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Ret, tokenStr, ch, region),
'p' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Rep, tokenStr, ch, location), 'p' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Rep, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Ret => TokenizerState.Ret =>
{ {
switch(ch) switch(ch)
{ {
'u' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Retu, tokenStr, ch, location), 'u' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Retu, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Retu => TokenizerState.Retu =>
{ {
switch(ch) switch(ch)
{ {
'r' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Retur, tokenStr, ch, location), 'r' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Retur, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Retur => TokenizerState.Retur =>
{ {
switch(ch) switch(ch)
{ {
'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Return, tokenStr, ch, location), 'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Return, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Return => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Return, allocator, location), TokenizerState.Return => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Return, allocator, region),
TokenizerState.Rep => TokenizerState.Rep =>
{ {
switch(ch) switch(ch)
{ {
'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Repe, tokenStr, ch, location), 'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Repe, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Repe => TokenizerState.Repe =>
{ {
switch(ch) switch(ch)
{ {
'a' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Repea, tokenStr, ch, location), 'a' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Repea, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Repea => TokenizerState.Repea =>
{ {
switch(ch) switch(ch)
{ {
't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Repeat, tokenStr, ch, location), 't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Repeat, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Repeat => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Repeat, allocator, location), TokenizerState.Repeat => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Repeat, allocator, region),
TokenizerState.N => TokenizerState.N =>
{ {
switch(ch) switch(ch)
{ {
'i' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Ni, tokenStr, ch, location), 'i' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Ni, tokenStr, ch, region),
'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.No, tokenStr, ch, location), 'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.No, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.No => TokenizerState.No =>
{ {
switch(ch) switch(ch)
{ {
't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Not, tokenStr, ch, location), 't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Not, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Not => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Not, allocator, location), TokenizerState.Not => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Not, allocator, region),
TokenizerState.Ni => TokenizerState.Ni =>
{ {
switch(ch) switch(ch)
{ {
'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Nil, tokenStr, ch, location), 'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Nil, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Nil => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Nil, allocator, location), TokenizerState.Nil => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Nil, allocator, region),
TokenizerState.T => TokenizerState.T =>
{ {
switch(ch) switch(ch)
{ {
'h' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Th, tokenStr, ch, location), 'h' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Th, tokenStr, ch, region),
'r' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Tr, tokenStr, ch, location), 'r' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Tr, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Th => TokenizerState.Th =>
{ {
switch(ch) switch(ch)
{ {
'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.The, tokenStr, ch, location), 'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.The, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.The => TokenizerState.The =>
{ {
switch(ch) switch(ch)
{ {
'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Then, tokenStr, ch, location), 'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Then, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Then => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Then, allocator, location), TokenizerState.Then => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Then, allocator, region),
TokenizerState.Tr => TokenizerState.Tr =>
{ {
switch(ch) switch(ch)
{ {
'u' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Tru, tokenStr, ch, location), 'u' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Tru, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Tru => TokenizerState.Tru =>
{ {
switch(ch) switch(ch)
{ {
'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.True, tokenStr, ch, location), 'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.True, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.True => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.True, allocator, location), TokenizerState.True => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.True, allocator, region),
TokenizerState.E => TokenizerState.E =>
{ {
switch(ch) switch(ch)
{ {
'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.El, tokenStr, ch, location), 'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.El, tokenStr, ch, region),
'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.En, tokenStr, ch, location), 'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.En, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.En => TokenizerState.En =>
{ {
switch(ch) switch(ch)
{ {
'd' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.End, tokenStr, ch, location), 'd' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.End, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.End => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.End, allocator, location), TokenizerState.End => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.End, allocator, region),
TokenizerState.El => TokenizerState.El =>
{ {
switch(ch) switch(ch)
{ {
's' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Els, tokenStr, ch, location), 's' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Els, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Els => TokenizerState.Els =>
{ {
switch(ch) switch(ch)
{ {
'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Else, tokenStr, ch, location), 'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Else, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Else => TokenizerState.Else =>
{ {
switch(ch) switch(ch)
{ {
'i' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Elsei, tokenStr, ch, location), 'i' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Elsei, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Else, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Else, allocator, region),
} }
}, },
TokenizerState.Elsei => TokenizerState.Elsei =>
{ {
switch(ch) switch(ch)
{ {
'f' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Elseif, tokenStr, ch, location), 'f' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Elseif, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Elseif => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Elseif, allocator, location), TokenizerState.Elseif => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Elseif, allocator, region),
TokenizerState.O => TokenizerState.O =>
{ {
switch(ch) switch(ch)
{ {
'r' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Or, tokenStr, ch, location), 'r' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Or, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Or => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Or, allocator, location), TokenizerState.Or => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Or, allocator, region),
TokenizerState.D => TokenizerState.D =>
{ {
switch(ch) switch(ch)
{ {
'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Do, tokenStr, ch, location), 'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Do, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Do => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Do, allocator, location), TokenizerState.Do => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Do, allocator, region),
TokenizerState.I => TokenizerState.I =>
{ {
switch(ch) switch(ch)
{ {
'f' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.If, tokenStr, ch, location), 'f' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.If, tokenStr, ch, region),
'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.In, tokenStr, ch, location), 'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.In, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.In => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.In, allocator, location), TokenizerState.In => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.In, allocator, region),
TokenizerState.If => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.If, allocator, location), TokenizerState.If => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.If, allocator, region),
TokenizerState.F => TokenizerState.F =>
{ {
switch(ch) switch(ch)
{ {
'a' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fa, tokenStr, ch, location), 'a' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fa, tokenStr, ch, region),
'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fo, tokenStr, ch, location), 'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fo, tokenStr, ch, region),
'u' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fu, tokenStr, ch, location), 'u' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fu, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Fu => TokenizerState.Fu =>
{ {
switch(ch) switch(ch)
{ {
'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fun, tokenStr, ch, location), 'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fun, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Fun => TokenizerState.Fun =>
{ {
switch(ch) switch(ch)
{ {
'c' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Func, tokenStr, ch, location), 'c' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Func, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Func => TokenizerState.Func =>
{ {
switch(ch) switch(ch)
{ {
't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Funct, tokenStr, ch, location), 't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Funct, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Funct => TokenizerState.Funct =>
{ {
switch(ch) switch(ch)
{ {
'i' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Functi, tokenStr, ch, location), 'i' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Functi, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Functi => TokenizerState.Functi =>
{ {
switch(ch) switch(ch)
{ {
'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Functio, tokenStr, ch, location), 'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Functio, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Functio => TokenizerState.Functio =>
{ {
switch(ch) switch(ch)
{ {
'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Function, tokenStr, ch, location), 'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Function, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Function => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Function, allocator, location), TokenizerState.Function => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Function, allocator, region),
TokenizerState.Fa => TokenizerState.Fa =>
{ {
switch(ch) switch(ch)
{ {
'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fal, tokenStr, ch, location), 'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fal, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Fal => TokenizerState.Fal =>
{ {
switch(ch) switch(ch)
{ {
's' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fals, tokenStr, ch, location), 's' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fals, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Fals => TokenizerState.Fals =>
{ {
switch(ch) switch(ch)
{ {
'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.False, tokenStr, ch, location), 'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.False, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.False => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.False, allocator, location), TokenizerState.False => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.False, allocator, region),
TokenizerState.Fo => TokenizerState.Fo =>
{ {
switch(ch) switch(ch)
{ {
'r' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.For, tokenStr, ch, location), 'r' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.For, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.For => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.For, allocator, location), TokenizerState.For => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.For, allocator, region),
TokenizerState.L => TokenizerState.L =>
{ {
switch(ch) switch(ch)
{ {
'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Lo, tokenStr, ch, location), 'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Lo, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Lo => TokenizerState.Lo =>
{ {
switch(ch) switch(ch)
{ {
'c' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Loc, tokenStr, ch, location), 'c' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Loc, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Loc => TokenizerState.Loc =>
{ {
switch(ch) switch(ch)
{ {
'a' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Loca, tokenStr, ch, location), 'a' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Loca, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Loca => TokenizerState.Loca =>
{ {
switch(ch) switch(ch)
{ {
'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Local, tokenStr, ch, location), 'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Local, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Local => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Local, allocator, location), TokenizerState.Local => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Local, allocator, region),
TokenizerState.U => TokenizerState.U =>
{ {
switch(ch) switch(ch)
{ {
'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Un, tokenStr, ch, location), 'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Un, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Un => TokenizerState.Un =>
{ {
switch(ch) switch(ch)
{ {
't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Unt, tokenStr, ch, location), 't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Unt, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Unt => TokenizerState.Unt =>
{ {
switch(ch) switch(ch)
{ {
'i' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Unti, tokenStr, ch, location), 'i' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Unti, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Unti => TokenizerState.Unti =>
{ {
switch(ch) switch(ch)
{ {
'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Until, tokenStr, ch, location), 'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Until, tokenStr, ch, region),
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location), else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, region),
} }
}, },
TokenizerState.Until => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Until, allocator, location), TokenizerState.Until => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Until, allocator, region),
else => else =>
{ {
std.debug.print("{}\n", . {state.*}); std.debug.print("{}\n", . {state.*});
@ -1215,24 +1206,24 @@ pub fn tokenize(fileContent: []u8, allocator: std.mem.Allocator) ![]Token
defer tokenStr.deinit(); defer tokenStr.deinit();
var tokenNumeral: ?types.Numeral = null; var tokenNumeral: ?types.Numeral = null;
var longBracketLevel: u32 = 0; var longBracketLevel: u32 = 0;
var location = Location { .start = null, .length = 0 }; var region = CodeRegion { .start = null, .length = 0 };
while(index < fileContent.len) while(index < fileContent.len)
{ {
const ch = fileContent[index]; const ch = fileContent[index];
try tokenizeChar(&state, ch, &lastIndex, &index, &tokenType, &tokenStr, &tokenNumeral, &tokens, &longBracketLevel, &location, allocator); try tokenizeChar(&state, ch, &lastIndex, &index, &tokenType, &tokenStr, &tokenNumeral, &tokens, &longBracketLevel, &region, allocator);
if(location.start != null and location.start.?.col == 0 and location.start.?.line == 0) if(region.start != null and region.start.?.col == 0 and region.start.?.line == 0)
{ {
location.start = calculatePoint(fileContent, index); region.start = calculatePoint(fileContent, index);
} }
index += 1; index += 1;
} }
return tokens.toOwnedSlice(); return tokens.toOwnedSlice();
} }
fn calculatePoint(fileContent: []u8, index: usize) Point fn calculatePoint(fileContent: []u8, index: usize) CodeLocation
{ {
var ret = Point { .col = 1, .line = 1 }; var ret = CodeLocation { .col = 1, .line = 1 };
for(0..index) |i| for(0..index) |i|
{ {
ret.col += 1; ret.col += 1;

111
src/treewalker.zig Normal file
View File

@ -0,0 +1,111 @@
const std = @import("std");
const parser = @import("parser.zig");
const types = @import("types.zig");
pub fn interpret(root: parser.ChunkNode, allocator: std.mem.Allocator) !void
{
var _ENV = types.Table { .items = std.AutoArrayHashMap(types.Value, types.Value).init(allocator) };
try walkChunk(root, &_ENV, allocator);
}
fn walkChunk(node: parser.ChunkNode, environment: *types.Table, allocator: std.mem.Allocator) !void
{
try walkBlock(node.block, environment, allocator);
}
fn walkBlock(node: parser.BlockNode, environment: *types.Table, allocator: std.mem.Allocator) !void
{
for(node.stats.items) |stat|
{
try walkStat(stat, environment, allocator);
}
if(node.retstat != null)
{
try walkRetstat(node.retstat.?, environment, allocator);
}
}
fn walkStat(node: parser.StatNode, environment: *types.Table, allocator: std.mem.Allocator) !void
{
switch(node)
{
.Assignment => |assignmentNode|
{
return try walkAssignmentNode(assignmentNode, environment, allocator);
},
else =>
{
std.debug.print("{any}\n", .{node});
return error.NotImplemented;
}
}
}
fn walkRetstat(node: parser.RetstatNode, environment: *types.Table, allocator: std.mem.Allocator) !void
{
_ = node;
_ = environment;
_ = allocator;
return error.NotImplemented;
}
fn walkAssignmentNode(node: parser.AssignmentNode, environment: *types.Table, allocator: std.mem.Allocator) !void
{
const results = try walkExplist(node.rhs, environment, allocator);
var i: usize = 0;
_ = results;
_ = i;
for(node.lhs.vars.items) |variable|
{
switch(variable)
{
.Indexed => |indexedNode|
{
_ = indexedNode;
return error.NotImplemented;
},
else => return error.NotImplemented,
}
}
}
fn walkExplist(node: parser.ExplistNode, environment: *types.Table, allocator: std.mem.Allocator) ![]types.Value
{
var results = std.ArrayList(types.Value).init(allocator);
for(node.exps.items) |exp|
{
try results.append(try walkExp(exp, environment, allocator));
}
return results.toOwnedSlice();
}
fn walkExp(node: parser.ExpNode, environment: *types.Table, allocator: std.mem.Allocator, isVariadicFunction: bool) !types.Value
{
switch(node)
{
.Nil => return types.Value.Nil,
.False => return types.Value { .Bool = false },
.True => return types.Value { .Bool = true },
.Numeral => |numeral| return types.Value { .Numeral = numeral },
.LiteralString => |string| return types.Value { .String = string },
.Varargs =>
{
if(isVariadicFunction)
{
return error.NotImplemented;
}
else
{
return error.UseVarargsOutsideVariadicFunction;
}
},
else =>
{
std.debug.print("{}\n", .{node});
return error.NotImplemented;
}
}
_ = environment;
_ = allocator;
return error.NotImplemented;
}

View File

@ -1,5 +1,41 @@
const std = @import("std");
pub const Numeral = union(enum) pub const Numeral = union(enum)
{ {
Integer: i64, Integer: i64,
Float: f64, Float: f64,
}; };
pub const Table = struct
{
items: std.AutoArrayHashMap(Value, Value),
pub fn insert(self: *const Table, key: Value, value: Value) !bool
{
self.items.put(key, value);
}
pub fn get(self: *const Table, key: Value) !Value
{
const value = self.items.get(key);
return if(value == null) Value.Nil else value.?;
}
};
pub const Value = union(enum)
{
Nil,
Bool: bool,
Numeral: Numeral,
String: []u8,
Table: *Table,
};
pub const CodeRegion = struct
{
start: ?CodeLocation,
length: usize,
};
pub const CodeLocation = struct
{
line: usize,
col: usize,
};