Implement parsing and token locations
This commit is contained in:
parent
721383a043
commit
b00a99ab6a
@ -28,7 +28,10 @@ pub fn build(b: *std.Build) void {
|
|||||||
.source_file = .{ .path = "src/types.zig" }
|
.source_file = .{ .path = "src/types.zig" }
|
||||||
}));
|
}));
|
||||||
exe.addModule("tokenizer", b.addModule("tokenizer", .{
|
exe.addModule("tokenizer", b.addModule("tokenizer", .{
|
||||||
.source_file = .{ .path = "src/tokenizer.zig" }
|
.source_file = .{ .path = "src/tokenizer.zig" },
|
||||||
|
}));
|
||||||
|
exe.addModule("parser", b.addModule("parser", .{
|
||||||
|
.source_file = .{ .path = "src/parser.zig" },
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// This declares intent for the executable to be installed into the
|
// This declares intent for the executable to be installed into the
|
||||||
|
17
src/main.zig
17
src/main.zig
@ -1,5 +1,6 @@
|
|||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const tokenize = @import("tokenizer").tokenize;
|
const tokenize = @import("tokenizer.zig").tokenize;
|
||||||
|
const parse = @import("parser.zig").parse;
|
||||||
|
|
||||||
pub fn main() !void
|
pub fn main() !void
|
||||||
{
|
{
|
||||||
@ -14,16 +15,6 @@ pub fn main() !void
|
|||||||
const content = try file.readToEndAlloc(allocator, 13000);
|
const content = try file.readToEndAlloc(allocator, 13000);
|
||||||
defer allocator.free(content);
|
defer allocator.free(content);
|
||||||
const tokens = try tokenize(content, allocator);
|
const tokens = try tokenize(content, allocator);
|
||||||
//std.debug.print("tokens: {any}", .{tokens});
|
|
||||||
for(tokens) |token|
|
|
||||||
{
|
|
||||||
switch(token.tokenData)
|
|
||||||
{
|
|
||||||
.string => |*data| std.debug.print("string: {s} {*}\n", .{data.*, data.ptr}),
|
|
||||||
.numeral => |*data| std.debug.print("numeral: {any} {*}\n", .{data.*, data}),
|
|
||||||
.none => |*data| std.debug.print("none {*}\n", .{data})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
defer
|
defer
|
||||||
{
|
{
|
||||||
var i: usize = 0;
|
var i: usize = 0;
|
||||||
@ -41,4 +32,8 @@ pub fn main() !void
|
|||||||
}
|
}
|
||||||
allocator.free(tokens);
|
allocator.free(tokens);
|
||||||
}
|
}
|
||||||
|
var parserAllocator = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||||
|
defer parserAllocator.deinit();
|
||||||
|
const root = try parse(tokens, &parserAllocator);
|
||||||
|
root.dump(0);
|
||||||
}
|
}
|
||||||
|
2418
src/parser.zig
Normal file
2418
src/parser.zig
Normal file
@ -0,0 +1,2418 @@
|
|||||||
|
const Token = @import("tokenizer.zig").Token;
|
||||||
|
const TokenType = @import("tokenizer.zig").TokenType;
|
||||||
|
const std = @import("std");
|
||||||
|
const types = @import("types.zig");
|
||||||
|
|
||||||
|
pub const ChunkNode = struct
|
||||||
|
{
|
||||||
|
block: BlockNode,
|
||||||
|
|
||||||
|
pub fn dump(self: *const ChunkNode, indent: usize) void
|
||||||
|
{
|
||||||
|
std.debug.print("ChunkNode:\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("{{\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("block: ", .{});
|
||||||
|
self.block.dump(indent + 1);
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("}}\n", .{});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const BlockNode = struct
|
||||||
|
{
|
||||||
|
stats: std.ArrayList(StatNode),
|
||||||
|
retstat: ?RetstatNode,
|
||||||
|
|
||||||
|
fn dump(self: *const BlockNode, indent: usize) void
|
||||||
|
{
|
||||||
|
std.debug.print("BlockNode:\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("{{\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("stats:\n", .{});
|
||||||
|
for (0..indent + 1) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("[\n", .{});
|
||||||
|
for(self.stats.items) |stat|
|
||||||
|
{
|
||||||
|
for (0..indent + 2) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
dumpStatNode(stat, indent + 2);
|
||||||
|
}
|
||||||
|
for (0..indent + 1) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("]\n", .{});
|
||||||
|
for (0..indent + 1) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("retstat: ", .{});
|
||||||
|
if(self.retstat == null)
|
||||||
|
{
|
||||||
|
std.debug.print("null\n", .{});
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
self.retstat.?.dump(indent + 1);
|
||||||
|
}
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("}}\n", .{});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const RetstatNode = struct
|
||||||
|
{
|
||||||
|
values: ?ExplistNode,
|
||||||
|
fn dump(self: *const RetstatNode, indent: usize) void
|
||||||
|
{
|
||||||
|
std.debug.print("Retstat Node:\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("{{\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("values: ", .{});
|
||||||
|
if(self.values == null)
|
||||||
|
{
|
||||||
|
std.debug.print("null\n", .{});
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
self.values.?.dump(indent + 1);
|
||||||
|
}
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("}}\n", .{});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const StatNode = union(enum)
|
||||||
|
{
|
||||||
|
Semicolon,
|
||||||
|
Assignment: AssignmentNode,
|
||||||
|
Functioncall: FunctioncallNode,
|
||||||
|
Label: []u8,
|
||||||
|
Break,
|
||||||
|
Goto: []u8,
|
||||||
|
Do: BlockNode,
|
||||||
|
While: WhileNode,
|
||||||
|
Repeat: RepeatNode,
|
||||||
|
If: IfNode,
|
||||||
|
ForNumerical: ForNumericalNode,
|
||||||
|
ForGeneric: ForGenericNode,
|
||||||
|
Function: FunctionNode,
|
||||||
|
LocalFunction: LocalFunctionNode,
|
||||||
|
Local: LocalNode,
|
||||||
|
};
|
||||||
|
fn dumpStatNode(stat: StatNode, indent: usize) void
|
||||||
|
{
|
||||||
|
switch(stat)
|
||||||
|
{
|
||||||
|
.Semicolon => std.debug.print("Semicolon\n", .{}),
|
||||||
|
.Assignment => |*node| node.dump(indent),
|
||||||
|
.Functioncall => |*node| node.dump(indent),
|
||||||
|
.Label => |*label| std.debug.print("Label: '{s}'\n", .{label.*}),
|
||||||
|
.Break => std.debug.print("Break\n", .{}),
|
||||||
|
.Goto => |*label| std.debug.print("Goto: '{s}'\n", .{label.*}),
|
||||||
|
.Do => |*node| node.dump(indent),
|
||||||
|
.While => |*node| node.dump(indent),
|
||||||
|
.Repeat => |*node| node.dump(indent),
|
||||||
|
.If => |*node| node.dump(indent),
|
||||||
|
.ForNumerical => |*node| node.dump(indent),
|
||||||
|
.ForGeneric => |*node| node.dump(indent),
|
||||||
|
.Function => |*node| node.dump(indent),
|
||||||
|
.LocalFunction => |*node| node.dump(indent),
|
||||||
|
.Local => |*node| node.dump(indent),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const AssignmentNode = struct
|
||||||
|
{
|
||||||
|
lhs: VarlistNode, rhs: ExplistNode,
|
||||||
|
fn dump(self: *const AssignmentNode, indent: usize) void
|
||||||
|
{
|
||||||
|
std.debug.print("Assignment Node:\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("{{\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("lhs: ", .{});
|
||||||
|
self.lhs.dump(indent + 1);
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("rhs: ", .{});
|
||||||
|
self.rhs.dump(indent + 1);
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("}}\n", .{});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const WhileNode = struct
|
||||||
|
{
|
||||||
|
condition: ExpNode,
|
||||||
|
body: BlockNode,
|
||||||
|
fn dump(self: *const WhileNode, indent: usize) void
|
||||||
|
{
|
||||||
|
std.debug.print("While Node:\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("{{\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("condition: ", .{});
|
||||||
|
dumpExpNode(self.condition, indent + 1);
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("body: ", .{});
|
||||||
|
self.body.dump(indent + 1);
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("}}\n", .{});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const RepeatNode = struct
|
||||||
|
{
|
||||||
|
condition: ExpNode,
|
||||||
|
body: BlockNode,
|
||||||
|
fn dump(self: *const RepeatNode, indent: usize) void
|
||||||
|
{
|
||||||
|
std.debug.print("Repeat Node:\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("{{\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("condition: ", .{});
|
||||||
|
dumpExpNode(self.condition, indent + 1);
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("body: ", .{});
|
||||||
|
self.body.dump(indent + 1);
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("}}\n", .{});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const IfNode = struct
|
||||||
|
{
|
||||||
|
condition: ExpNode,
|
||||||
|
body: BlockNode,
|
||||||
|
elseifs: std.ArrayList(ElseifNode),
|
||||||
|
else_: ?BlockNode,
|
||||||
|
fn dump(self: *const IfNode, indent: usize) void
|
||||||
|
{
|
||||||
|
std.debug.print("If Node:\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("{{\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("condition: ", .{});
|
||||||
|
dumpExpNode(self.condition, indent + 1);
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("body: ", .{});
|
||||||
|
self.body.dump(indent + 1);
|
||||||
|
for (0..indent + 1) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("elseifs:\n", .{});
|
||||||
|
for (0..indent + 1) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("[\n", .{});
|
||||||
|
for(self.elseifs.items) |elseif|
|
||||||
|
{
|
||||||
|
for (0..indent + 2) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
elseif.dump(indent + 2);
|
||||||
|
}
|
||||||
|
for (0..indent + 1) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("]\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("else: ", .{});
|
||||||
|
if(self.else_ == null)
|
||||||
|
{
|
||||||
|
std.debug.print("null\n", .{});
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
self.else_.?.dump(indent + 1);
|
||||||
|
}
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("}}\n", .{});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const ForNumericalNode = struct
|
||||||
|
{
|
||||||
|
variable: []u8,
|
||||||
|
start: ExpNode,
|
||||||
|
end: ExpNode,
|
||||||
|
change: ?ExpNode,
|
||||||
|
body: BlockNode,
|
||||||
|
fn dump(self: *const ForNumericalNode, indent: usize) void
|
||||||
|
{
|
||||||
|
std.debug.print("For Numerical Node:\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("{{\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("variable: '{s}'\n", .{self.variable});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("start: ", .{});
|
||||||
|
dumpExpNode(self.start, indent + 1);
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("end: ", .{});
|
||||||
|
dumpExpNode(self.end, indent + 1);
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("change: ", .{});
|
||||||
|
if(self.change == null)
|
||||||
|
{
|
||||||
|
std.debug.print("null\n", .{});
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
dumpExpNode(self.start, indent + 1);
|
||||||
|
}
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("body: ", .{});
|
||||||
|
self.body.dump(indent + 1);
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("}}\n", .{});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const ForGenericNode = struct
|
||||||
|
{
|
||||||
|
vars: std.ArrayList([]u8),
|
||||||
|
exps: ExplistNode,
|
||||||
|
body: BlockNode,
|
||||||
|
fn dump(self: *const ForGenericNode, indent: usize) void
|
||||||
|
{
|
||||||
|
std.debug.print("For Generic Node:\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("{{\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("vars:\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("[\n", .{});
|
||||||
|
for(self.vars.items) |v|
|
||||||
|
{
|
||||||
|
for (0..(indent + 2)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("'{s}'\n", .{v});
|
||||||
|
}
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("]\n", .{});
|
||||||
|
std.debug.print("exps: ", .{});
|
||||||
|
self.exps.dump(indent + 1);
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("body: ", .{});
|
||||||
|
self.body.dump(indent + 1);
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("}}\n", .{});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const FunctionNode = struct
|
||||||
|
{
|
||||||
|
name: FuncnameNode,
|
||||||
|
body: FuncbodyNode,
|
||||||
|
fn dump(self: *const FunctionNode, indent: usize) void
|
||||||
|
{
|
||||||
|
std.debug.print("Function Node:\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("{{\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("name: ", .{});
|
||||||
|
self.name.dump(indent + 1);
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("body: ", .{});
|
||||||
|
self.body.dump(indent + 1);
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("}}\n", .{});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const LocalFunctionNode = struct
|
||||||
|
{
|
||||||
|
name: []u8,
|
||||||
|
body: FuncbodyNode,
|
||||||
|
fn dump(self: *const LocalFunctionNode, indent: usize) void
|
||||||
|
{
|
||||||
|
std.debug.print("Local Function Node:\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("{{\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("name: '{s}'\n", .{self.name});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("body: ", .{});
|
||||||
|
self.body.dump(indent + 1);
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("}}\n", .{});
|
||||||
|
}
|
||||||
|
|
||||||
|
};
|
||||||
|
const LocalNode = struct
|
||||||
|
{
|
||||||
|
attnames: AttnamelistNode,
|
||||||
|
values: ?ExplistNode,
|
||||||
|
fn dump(self: *const LocalNode, indent: usize) void
|
||||||
|
{
|
||||||
|
std.debug.print("Local Node:\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("{{\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("attnames: ", .{});
|
||||||
|
self.attnames.dump(indent + 1);
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("values: ", .{});
|
||||||
|
if(self.values == null)
|
||||||
|
{
|
||||||
|
std.debug.print("null\n", .{});
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
self.values.?.dump(indent + 1);
|
||||||
|
}
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("}}\n", .{});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const FunctioncallNode = struct
|
||||||
|
{
|
||||||
|
function: SuffixexpNode,
|
||||||
|
objectArg: ?[]u8,
|
||||||
|
args: ArgsNode,
|
||||||
|
fn dump(self: *const FunctioncallNode, indent: usize) void
|
||||||
|
{
|
||||||
|
std.debug.print("Functioncall Node:\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("{{\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("function: ", .{});
|
||||||
|
dumpSuffixExpNode(self.function, indent + 1);
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("object arg: ", .{});
|
||||||
|
if(self.objectArg == null)
|
||||||
|
{
|
||||||
|
std.debug.print("null\n", .{});
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
std.debug.print("'{s}'\n", .{self.objectArg.?});
|
||||||
|
}
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("args: ", .{});
|
||||||
|
dumpArgsNode(self.args, indent + 1);
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("}}\n", .{});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const VarlistNode = struct
|
||||||
|
{
|
||||||
|
vars: std.ArrayList(VarNode),
|
||||||
|
fn dump(self: *const VarlistNode, indent: usize) void
|
||||||
|
{
|
||||||
|
std.debug.print("Varlist Node:\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("{{\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("vars:\n", .{});
|
||||||
|
for (0..indent + 1) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("[\n", .{});
|
||||||
|
for(self.vars.items) |item|
|
||||||
|
{
|
||||||
|
for (0..indent + 2) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
dumpVarNode(item, indent + 2);
|
||||||
|
}
|
||||||
|
for (0..indent + 1) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("]\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("}}\n", .{});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const ExplistNode = struct
|
||||||
|
{
|
||||||
|
exps: std.ArrayList(ExpNode),
|
||||||
|
fn dump(self: *const ExplistNode, indent: usize) void
|
||||||
|
{
|
||||||
|
std.debug.print("Explist Node:\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("{{\n", .{});
|
||||||
|
for (0..indent + 1) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("exps:\n", .{});
|
||||||
|
for (0..indent + 1) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("[\n", .{});
|
||||||
|
for(self.exps.items) |item|
|
||||||
|
{
|
||||||
|
for (0..indent + 2) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
dumpExpNode(item, indent + 2);
|
||||||
|
}
|
||||||
|
for (0..indent + 1) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("]\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("}}\n", .{});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const ExpNode = union(enum)
|
||||||
|
{
|
||||||
|
Nil,
|
||||||
|
False,
|
||||||
|
True,
|
||||||
|
Numeral: types.Numeral,
|
||||||
|
LiteralString: []u8,
|
||||||
|
Varargs,
|
||||||
|
Functiondef: FuncbodyNode,
|
||||||
|
Suffixexp: *SuffixexpNode,
|
||||||
|
Tableconstructor: TableconstructorNode,
|
||||||
|
Unop: UnopNode,
|
||||||
|
Binop: *BinopNode,
|
||||||
|
};
|
||||||
|
fn dumpExpNode(expNode: ExpNode, indent: usize) void
|
||||||
|
{
|
||||||
|
switch(expNode)
|
||||||
|
{
|
||||||
|
.Nil => std.debug.print("Nil\n", .{}),
|
||||||
|
.False => std.debug.print("False\n", .{}),
|
||||||
|
.True => std.debug.print("True\n", .{}),
|
||||||
|
.Numeral => |*numeral| std.debug.print("{}\n", .{numeral.*}),
|
||||||
|
.LiteralString => |*string| std.debug.print("LiteralString: '{s}'\n", .{string.*}),
|
||||||
|
.Varargs => std.debug.print("Varargs", .{}),
|
||||||
|
.Functiondef => |*node| node.dump(indent),
|
||||||
|
.Suffixexp => |*node| dumpSuffixExpNode(node.*.*, indent),
|
||||||
|
.Tableconstructor => |*node| node.dump(indent),
|
||||||
|
.Unop => |*node| node.dump(indent),
|
||||||
|
.Binop => |*node| node.*.dump(indent),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const UnopNode = struct
|
||||||
|
{
|
||||||
|
unopType: UnopType,
|
||||||
|
exp: *ExpNode,
|
||||||
|
fn dump(self: *const UnopNode, indent: usize) void
|
||||||
|
{
|
||||||
|
std.debug.print("Unop Node:\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("{{\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("unop type: {}\n", .{self.unopType});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("exp: ", .{});
|
||||||
|
dumpExpNode(self.exp.*, indent + 1);
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("}}\n", .{});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const BinopNode = struct
|
||||||
|
{
|
||||||
|
lhs: ExpNode,
|
||||||
|
op: BinopType,
|
||||||
|
rhs: ExpNode,
|
||||||
|
fn dump(self: *const BinopNode, indent: usize) void
|
||||||
|
{
|
||||||
|
std.debug.print("Binop Node:\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("{{\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("lhs: ", .{});
|
||||||
|
dumpExpNode(self.lhs, indent + 1);
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("op: {}\n", .{self.op});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("rhs: ", .{});
|
||||||
|
dumpExpNode(self.rhs, indent + 1);
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("}}\n", .{});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const ElseifNode = struct
|
||||||
|
{
|
||||||
|
condition: ExpNode,
|
||||||
|
body: BlockNode,
|
||||||
|
fn dump(self: *const ElseifNode, indent: usize) void
|
||||||
|
{
|
||||||
|
std.debug.print("Elseif Node:\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("{{\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("condition: ", .{});
|
||||||
|
dumpExpNode(self.condition, indent + 1);
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("body: ", .{});
|
||||||
|
self.body.dump(indent + 1);
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("}}\n", .{});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const FuncnameNode = struct
|
||||||
|
{
|
||||||
|
name: []u8,
|
||||||
|
dottedNames: std.ArrayList([]u8),
|
||||||
|
firstArg: ?[]u8,
|
||||||
|
fn dump(self: *const FuncnameNode, indent: usize) void
|
||||||
|
{
|
||||||
|
std.debug.print("Funcname Node:\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("{{\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("name: '{s}'\n", .{self.name});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("dottedNames:\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("[\n", .{});
|
||||||
|
for(self.dottedNames.items) |dottedName|
|
||||||
|
{
|
||||||
|
for (0..(indent + 2)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("'{s}'\n", .{dottedName});
|
||||||
|
}
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("]\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("firstArg: ", .{});
|
||||||
|
if(self.firstArg == null)
|
||||||
|
{
|
||||||
|
std.debug.print("null\n", .{});
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
std.debug.print("'{s}'\n", .{self.firstArg.?});
|
||||||
|
}
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("}}\n", .{});
|
||||||
|
}
|
||||||
|
|
||||||
|
};
|
||||||
|
const FuncbodyNode = struct
|
||||||
|
{
|
||||||
|
pars: ?ParlistNode,
|
||||||
|
body: BlockNode,
|
||||||
|
|
||||||
|
fn dump(self: *const FuncbodyNode, indent: usize) void
|
||||||
|
{
|
||||||
|
std.debug.print("Funcbody Node:\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("{{\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("pars: ", .{});
|
||||||
|
if(self.pars == null)
|
||||||
|
{
|
||||||
|
std.debug.print("null\n", .{});
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
self.pars.?.dump(indent + 1);
|
||||||
|
}
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
self.body.dump(indent + 1);
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("}}\n", .{});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const AttnamelistNode = struct
|
||||||
|
{
|
||||||
|
attnames: std.ArrayList(AttnameNode),
|
||||||
|
fn dump(self: *const AttnamelistNode, indent: usize) void
|
||||||
|
{
|
||||||
|
std.debug.print("Attnamelist Node:\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("{{\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("attNames:\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("[\n", .{});
|
||||||
|
for(self.attnames.items) |attNames|
|
||||||
|
{
|
||||||
|
for (0..(indent + 2)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
attNames.dump(indent + 2);
|
||||||
|
}
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("]\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("}}\n", .{});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const AttnameNode = struct
|
||||||
|
{
|
||||||
|
name: []u8,
|
||||||
|
attribute: ?[]u8,
|
||||||
|
fn dump(self: *const AttnameNode, indent: usize) void
|
||||||
|
{
|
||||||
|
std.debug.print("Funcname Node:\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("{{\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("name: '{s}'\n", .{self.name});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("attribute: ", .{});
|
||||||
|
if(self.attribute == null)
|
||||||
|
{
|
||||||
|
std.debug.print("null\n", .{});
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
std.debug.print("'{s}'\n", .{self.attribute.?});
|
||||||
|
}
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("}}\n", .{});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const SuffixexpNode = union(enum)
|
||||||
|
{
|
||||||
|
Normal: NormalSuffixNode,
|
||||||
|
Functioncall: *FunctioncallNode,
|
||||||
|
};
|
||||||
|
fn dumpSuffixExpNode(suffixexpNode: SuffixexpNode, indent: usize) void
|
||||||
|
{
|
||||||
|
switch(suffixexpNode)
|
||||||
|
{
|
||||||
|
.Normal => |*node| node.dump(indent),
|
||||||
|
.Functioncall => |*node| node.*.dump(indent),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const ArgsNode = union(enum)
|
||||||
|
{
|
||||||
|
Bracketed: ?ExplistNode,
|
||||||
|
Tableconstructor: TableconstructorNode,
|
||||||
|
Literal: []u8,
|
||||||
|
};
|
||||||
|
fn dumpArgsNode(argsNode: ArgsNode, indent: usize) void
|
||||||
|
{
|
||||||
|
switch(argsNode)
|
||||||
|
{
|
||||||
|
.Bracketed => |*name|
|
||||||
|
{
|
||||||
|
if(name.* == null)
|
||||||
|
{
|
||||||
|
std.debug.print("null\n", .{});
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
name.*.?.dump(indent);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
.Tableconstructor => |*node| node.dump(indent),
|
||||||
|
.Literal => |*string| std.debug.print("Literal: '{s}'\n", .{string}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const VarNode = union(enum)
|
||||||
|
{
|
||||||
|
Name: []u8,
|
||||||
|
Indexed: IndexedVarNode,
|
||||||
|
Member: MemberVarNode,
|
||||||
|
};
|
||||||
|
fn dumpVarNode(varNode: VarNode, indent: usize) void
|
||||||
|
{
|
||||||
|
switch(varNode)
|
||||||
|
{
|
||||||
|
.Name => |*name| std.debug.print("Name: '{s}'\n", .{name.*}),
|
||||||
|
.Indexed => |*node| node.dump(indent),
|
||||||
|
.Member => |*node| node.dump(indent),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const IndexedVarNode = struct
|
||||||
|
{
|
||||||
|
value: SuffixexpNode,
|
||||||
|
index: ExpNode,
|
||||||
|
fn dump(self: *const IndexedVarNode, indent: usize) void
|
||||||
|
{
|
||||||
|
std.debug.print("Indexed Var Node:\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("{{\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("value: ", .{});
|
||||||
|
dumpSuffixExpNode(self.value, indent + 1);
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("index: ", .{});
|
||||||
|
dumpExpNode(self.index, indent + 1);
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("}}\n", .{});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const MemberVarNode = struct
|
||||||
|
{
|
||||||
|
value: SuffixexpNode,
|
||||||
|
name: []u8,
|
||||||
|
fn dump(self: *const MemberVarNode, indent: usize) void
|
||||||
|
{
|
||||||
|
std.debug.print("Member Var Node:\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("{{\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("value: ", .{});
|
||||||
|
dumpSuffixExpNode(self.value, indent + 1);
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("name: '{s}'\n", .{self.name});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("}}\n", .{});
|
||||||
|
}
|
||||||
|
|
||||||
|
};
|
||||||
|
const TableconstructorNode = struct
|
||||||
|
{
|
||||||
|
exps: ?FieldlistNode,
|
||||||
|
fn dump(self: *const TableconstructorNode, indent: usize) void
|
||||||
|
{
|
||||||
|
std.debug.print("Tableconstructor Node:\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("{{\n", .{});
|
||||||
|
for (0..indent + 1) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("exps: ", .{});
|
||||||
|
if(self.exps == null)
|
||||||
|
{
|
||||||
|
std.debug.print("null\n", .{});
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
self.exps.?.dump(indent + 1);
|
||||||
|
}
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("}}\n", .{});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const UnopType = enum
|
||||||
|
{
|
||||||
|
Minus, LogicalNot, Length, BinaryNot,
|
||||||
|
};
|
||||||
|
const BinopType = enum
|
||||||
|
{
|
||||||
|
LogicalOr,
|
||||||
|
LocicalAnd,
|
||||||
|
Lt, Gt, LtEquals, GtEquals, NotEquals, Equals,
|
||||||
|
BinaryOr,
|
||||||
|
BinaryNot,
|
||||||
|
BinaryAnd,
|
||||||
|
Shl, Shr,
|
||||||
|
Concat,
|
||||||
|
Add, Sub,
|
||||||
|
Mul, Div, IntDiv, Mod,
|
||||||
|
Exp,
|
||||||
|
};
|
||||||
|
const ParlistNode = struct
|
||||||
|
{
|
||||||
|
names: std.ArrayList([]u8),
|
||||||
|
hasVarargs: bool,
|
||||||
|
fn dump(self: *const ParlistNode, indent: usize) void
|
||||||
|
{
|
||||||
|
std.debug.print("Parlist Node:\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("{{\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("names:\n", .{});
|
||||||
|
for (0..indent + 1) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("[\n", .{});
|
||||||
|
for(self.names.items) |name|
|
||||||
|
{
|
||||||
|
for (0..indent + 2) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("'{s}'\n", .{name});
|
||||||
|
}
|
||||||
|
for (0..indent + 1) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("]\n", .{});
|
||||||
|
for (0..indent + 1) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("has Varargs: {}\n", .{self.hasVarargs});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("}}\n", .{});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const NormalSuffixNode = struct
|
||||||
|
{
|
||||||
|
firstPart: SuffixexpFirstPart,
|
||||||
|
suffixes: std.ArrayList(SuffixexpSuffix),
|
||||||
|
fn dump(self: *const NormalSuffixNode, indent: usize) void
|
||||||
|
{
|
||||||
|
std.debug.print("Normal Suffix Node:\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("{{\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("First Part: ", .{});
|
||||||
|
dumpSuffixExpFirstPart(self.firstPart, indent + 1);
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("Suffixes:\n", .{});
|
||||||
|
for (0..indent + 1) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("[\n", .{});
|
||||||
|
for(self.suffixes.items) |suffix|
|
||||||
|
{
|
||||||
|
for (0..(indent + 2)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
dumpSuffixSuffix(suffix, indent + 2);
|
||||||
|
}
|
||||||
|
for (0..indent + 1) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("]\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("}}\n", .{});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const SuffixexpFirstPart = union(enum)
|
||||||
|
{
|
||||||
|
Name: []u8,
|
||||||
|
BracketedExpr: ExpNode,
|
||||||
|
};
|
||||||
|
fn dumpSuffixExpFirstPart(suffixexpFirstPart: SuffixexpFirstPart, indent: usize) void
|
||||||
|
{
|
||||||
|
switch(suffixexpFirstPart)
|
||||||
|
{
|
||||||
|
.Name => |*name| std.debug.print("Name: '{s}'\n", .{name.*}),
|
||||||
|
.BracketedExpr => |*node| dumpExpNode(node.*, indent),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const SuffixexpSuffix = union(enum)
|
||||||
|
{
|
||||||
|
Dot: []u8,
|
||||||
|
Indexed: ExpNode,
|
||||||
|
Args: ArgsNode,
|
||||||
|
ArgsFirstArg: ArgsFirstArgNode,
|
||||||
|
};
|
||||||
|
fn dumpSuffixSuffix(suffixexpSuffix: SuffixexpSuffix, indent: usize) void
|
||||||
|
{
|
||||||
|
switch(suffixexpSuffix)
|
||||||
|
{
|
||||||
|
.Dot => |*name| std.debug.print("Dot: '{s}'\n", .{name.*}),
|
||||||
|
.Indexed => |*node| dumpExpNode(node.*, indent),
|
||||||
|
.Args => |*node| dumpArgsNode(node.*, indent),
|
||||||
|
.ArgsFirstArg => |*node| node.dump(indent),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const ArgsFirstArgNode = struct
|
||||||
|
{
|
||||||
|
name: []u8,
|
||||||
|
rest: ArgsNode,
|
||||||
|
fn dump(self: *const ArgsFirstArgNode, indent: usize) void
|
||||||
|
{
|
||||||
|
std.debug.print("Args First Arg Node:\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("{{\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("name: '{s}'\n", .{self.name});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("rest: ", .{});
|
||||||
|
dumpArgsNode(self.rest, indent + 1);
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("}}\n", .{});
|
||||||
|
}
|
||||||
|
|
||||||
|
};
|
||||||
|
const FieldlistNode = struct
|
||||||
|
{
|
||||||
|
exps: std.ArrayList(FieldNode),
|
||||||
|
fn dump(self: *const FieldlistNode, indent: usize) void
|
||||||
|
{
|
||||||
|
std.debug.print("Fieldlist Node:\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("{{\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("exps: ", .{});
|
||||||
|
for (0..indent + 1) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("[\n", .{});
|
||||||
|
for(self.exps.items) |exp|
|
||||||
|
{
|
||||||
|
for (0..(indent + 2)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
dumpFieldNode(exp, indent + 2);
|
||||||
|
}
|
||||||
|
for (0..indent + 1) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("]\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("}}\n", .{});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const FieldNode = union(enum)
|
||||||
|
{
|
||||||
|
IndexedAssignment: IndexedAssignmentNode,
|
||||||
|
Assignment: FieldAssignmentNode,
|
||||||
|
Exp: ExpNode,
|
||||||
|
};
|
||||||
|
const FieldAssignmentNode = struct
|
||||||
|
{
|
||||||
|
lhs: []u8,
|
||||||
|
rhs: ExpNode,
|
||||||
|
fn dump(self: *const FieldAssignmentNode, indent: usize) void
|
||||||
|
{
|
||||||
|
std.debug.print("Field Assignment Node:\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("{{\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("lhs: {s}\n", .{self.lhs});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("rhs: ", .{});
|
||||||
|
dumpExpNode(self.rhs, indent + 1);
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("}}\n", .{});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
fn dumpFieldNode(fieldNode: FieldNode, indent: usize) void
|
||||||
|
{
|
||||||
|
switch(fieldNode)
|
||||||
|
{
|
||||||
|
.IndexedAssignment => |*node| node.dump(indent),
|
||||||
|
.Assignment => |*node| node.dump(indent),
|
||||||
|
.Exp => |*node| dumpExpNode(node.*, indent),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const IndexedAssignmentNode = struct
|
||||||
|
{
|
||||||
|
index: ExpNode,
|
||||||
|
rhs: ExpNode,
|
||||||
|
fn dump(self: *const IndexedAssignmentNode, indent: usize) void
|
||||||
|
{
|
||||||
|
std.debug.print("Indexed Assignment Node:\n", .{});
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("{{\n", .{});
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("index: ", .{});
|
||||||
|
dumpExpNode(self.index, indent + 1);
|
||||||
|
for (0..(indent + 1)) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("rhs: ", .{});
|
||||||
|
dumpExpNode(self.rhs, indent + 1);
|
||||||
|
for (0..indent) |_|
|
||||||
|
{
|
||||||
|
std.debug.print("\t", .{});
|
||||||
|
}
|
||||||
|
std.debug.print("}}\n", .{});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn parse(tokens: []Token, allocator: *std.heap.ArenaAllocator) !ChunkNode
|
||||||
|
{
|
||||||
|
var i: usize = 0;
|
||||||
|
const maybeParsedChunk = parseChunk(tokens, &i, allocator) catch |err|
|
||||||
|
{
|
||||||
|
std.debug.print("{any}: data: {any}, type: {any}\n", .{tokens[i].location, tokens[i].tokenData, tokens[i].tokenType});
|
||||||
|
return err;
|
||||||
|
};
|
||||||
|
return maybeParsedChunk;
|
||||||
|
}
|
||||||
|
|
||||||
|
const ParserError = error
|
||||||
|
{
|
||||||
|
NotImplemented,
|
||||||
|
ReachedEOFParsing,
|
||||||
|
ReachedEOFExpectedNameForGoto,
|
||||||
|
ExpectedNameForGoto,
|
||||||
|
MissingEndForDoBlock,
|
||||||
|
MissingDoAfterWhileCondition,
|
||||||
|
MissingEndForWhileBody,
|
||||||
|
ExpectedUntilAfterRepeatBody,
|
||||||
|
ExpectedThenAfterIfCondition,
|
||||||
|
ReachedEOFAfterIfBody,
|
||||||
|
ExpectedThenAfterElseifCondition,
|
||||||
|
ReachedEOFAfterElseifs,
|
||||||
|
ExpectedEndClosingIf,
|
||||||
|
ExpectedNameAfterFor,
|
||||||
|
ExpectedCommaAfterForEqStartValue,
|
||||||
|
ReachedEOFAfterForEqEndValue,
|
||||||
|
ExpectedDoAfterForEqHead,
|
||||||
|
ExpectedAnotherNameInForInNamelist,
|
||||||
|
ReachedEOFAfterNameInForInNamelist,
|
||||||
|
ExpectedInAfterForInNamelist,
|
||||||
|
ExpectedDoAfterForInExplist,
|
||||||
|
ExpectedEndAfterForInBody,
|
||||||
|
ExpectedEndAfterForEqBody,
|
||||||
|
UnexpectedTokenAfterFirstNameInFor,
|
||||||
|
ReachedEOFInLocal,
|
||||||
|
ExpectedLocalFunctionName,
|
||||||
|
ExpectedNameAfterDoubleColonInLabelDeclaration,
|
||||||
|
ExpectedDoubleColonAfterNameInLabelDeclaration,
|
||||||
|
ExpectedFunctioncall,
|
||||||
|
ExpectedEqAfterAssignmentVarList,
|
||||||
|
ReachedEOFInSuffixExp,
|
||||||
|
ExpectedRoundClosedClosingBracketedPrimaryExp,
|
||||||
|
UnexpectedTokenAsFirstPartOfSuffixExp,
|
||||||
|
ExpectedNameInDottedSuffixExp,
|
||||||
|
ExpectedSquareClosedClosingIndexedSuffixExp,
|
||||||
|
ExpectedNameInArgsFirstArgSuffixExp,
|
||||||
|
ExpectedDotOrIndexedSuffixWhenConvertingSuffixExpToVar,
|
||||||
|
ReachedEOFExpectedPrimaryExpression,
|
||||||
|
ReachedEOFInArgs,
|
||||||
|
ReachedEOFInBracketedArgs,
|
||||||
|
ExpectedRoundClosedClosingBracketedArgs,
|
||||||
|
UnexpectedTokenInArgs,
|
||||||
|
NoPrecedenceForOperator,
|
||||||
|
NoBinopTypeForOperator,
|
||||||
|
ExpectednameInAttribName,
|
||||||
|
ExpectedAttributeInAttrib,
|
||||||
|
ExpectedGtInAttrib,
|
||||||
|
ExpectedFuncname,
|
||||||
|
ExpectedNameInDottedFuncname,
|
||||||
|
ExpectedNameOfFirstArgInFuncname,
|
||||||
|
ExpectedRoundOpenStartingFuncbody,
|
||||||
|
ReachedEOFInFuncbodyParlist,
|
||||||
|
ExpectedRoundClosedClosingFuncbodyParlist,
|
||||||
|
ExpectedEndClosingFuncbody,
|
||||||
|
ReachedEOFInParlist,
|
||||||
|
ExpectedNameStartingParlist,
|
||||||
|
ReachedEOFInParlistNameList,
|
||||||
|
UnexpectedTokenInParlistNameList,
|
||||||
|
ExpectedReturnStartingRetstat,
|
||||||
|
ExpectedCurlyOpenOpeningTableconstructor,
|
||||||
|
ExpectedCurlyClosedClosingTableconstructor,
|
||||||
|
ReachedEOFInField,
|
||||||
|
ExpectedSquareClosedClosingIndexedField,
|
||||||
|
ExpectedEqualsInIndexedFieldExpression,
|
||||||
|
OutOfMemory,
|
||||||
|
};
|
||||||
|
|
||||||
|
fn parseChunk(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !ChunkNode
|
||||||
|
{
|
||||||
|
return ChunkNode { .block = try parseBlock(tokens, i, allocator) };
|
||||||
|
}
|
||||||
|
fn parseBlock(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) ParserError!BlockNode
|
||||||
|
{
|
||||||
|
var ret = BlockNode { .stats = std.ArrayList(StatNode).init(allocator.*.allocator()), .retstat = null };
|
||||||
|
while(i.* < tokens.len and
|
||||||
|
tokens[i.*].tokenType != TokenType.Return and
|
||||||
|
tokens[i.*].tokenType != TokenType.End and
|
||||||
|
tokens[i.*].tokenType != TokenType.Elseif and
|
||||||
|
tokens[i.*].tokenType != TokenType.Else
|
||||||
|
)
|
||||||
|
{
|
||||||
|
try ret.stats.append(try parseStat(tokens, i, allocator));
|
||||||
|
}
|
||||||
|
if(i.* < tokens.len and tokens[i.*].tokenType == TokenType.Return)
|
||||||
|
{
|
||||||
|
ret.retstat = try parseRetstat(tokens, i, allocator);
|
||||||
|
}
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
fn parseStat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !StatNode
|
||||||
|
{
|
||||||
|
if(i.* >= tokens.len)
|
||||||
|
{
|
||||||
|
return error.ReachedEOFParsing;
|
||||||
|
}
|
||||||
|
switch(tokens[i.*].tokenType)
|
||||||
|
{
|
||||||
|
TokenType.Semicolon =>
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
return StatNode.Semicolon;
|
||||||
|
},
|
||||||
|
TokenType.Break =>
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
return StatNode.Break;
|
||||||
|
},
|
||||||
|
TokenType.Goto =>
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
if(i.* >= tokens.len)
|
||||||
|
{
|
||||||
|
return error.ReachedEOFExpectedNameForGoto;
|
||||||
|
}
|
||||||
|
if(tokens[i.*].tokenType == TokenType.Name)
|
||||||
|
{
|
||||||
|
return StatNode { .Goto = tokens[i.*].tokenData.string };
|
||||||
|
}
|
||||||
|
return error.ExpectedNameForGoto;
|
||||||
|
},
|
||||||
|
TokenType.Do =>
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
const body = try parseBlock(tokens, i, allocator);
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.End)
|
||||||
|
{
|
||||||
|
return error.MissingEndForDoBlock;
|
||||||
|
}
|
||||||
|
i.* += 1;
|
||||||
|
return StatNode { .Do = body };
|
||||||
|
},
|
||||||
|
TokenType.While =>
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
const condition = try parseExp(tokens, i, allocator);
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Do)
|
||||||
|
{
|
||||||
|
return error.MissingDoAfterWhileCondition;
|
||||||
|
}
|
||||||
|
const body = try parseBlock(tokens, i, allocator);
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.End)
|
||||||
|
{
|
||||||
|
return error.MissingEndForWhileBody;
|
||||||
|
}
|
||||||
|
i.* += 1;
|
||||||
|
return StatNode { .While = WhileNode { .body = body, .condition = condition } };
|
||||||
|
},
|
||||||
|
TokenType.Repeat =>
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
const body = try parseBlock(tokens, i, allocator);
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Until)
|
||||||
|
{
|
||||||
|
return error.ExpectedUntilAfterRepeatBody;
|
||||||
|
}
|
||||||
|
i.* += 1;
|
||||||
|
return StatNode { .Repeat = RepeatNode { .body = body, .condition = try parseExp(tokens, i, allocator) } };
|
||||||
|
},
|
||||||
|
TokenType.If =>
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
const condition = try parseExp(tokens, i, allocator);
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Then)
|
||||||
|
{
|
||||||
|
return error.ExpectedThenAfterIfCondition;
|
||||||
|
}
|
||||||
|
i.* += 1;
|
||||||
|
const body = try parseBlock(tokens, i, allocator);
|
||||||
|
if(i.* >= tokens.len)
|
||||||
|
{
|
||||||
|
return error.ReachedEOFAfterIfBody;
|
||||||
|
}
|
||||||
|
var ifNode = IfNode { .body = body, .condition = condition, .elseifs = std.ArrayList(ElseifNode).init(allocator.*.allocator()), .else_ = null};
|
||||||
|
while(tokens[i.*].tokenType == TokenType.Elseif)
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
const elseifCondition = try parseExp(tokens, i, allocator);
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Then)
|
||||||
|
{
|
||||||
|
return error.ExpectedThenAfterElseifCondition;
|
||||||
|
}
|
||||||
|
i.* += 1;
|
||||||
|
try ifNode.elseifs.append(ElseifNode { .body = try parseBlock(tokens, i, allocator), .condition = elseifCondition });
|
||||||
|
}
|
||||||
|
if(i.* >= tokens.len)
|
||||||
|
{
|
||||||
|
return error.ReachedEOFAfterElseifs;
|
||||||
|
}
|
||||||
|
if(tokens[i.*].tokenType == TokenType.Else)
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
ifNode.else_ = try parseBlock(tokens, i, allocator);
|
||||||
|
}
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.End)
|
||||||
|
{
|
||||||
|
return error.ExpectedEndClosingIf;
|
||||||
|
}
|
||||||
|
i.* += 1;
|
||||||
|
return StatNode { .If = ifNode };
|
||||||
|
},
|
||||||
|
TokenType.For =>
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Name)
|
||||||
|
{
|
||||||
|
return error.ExpectedNameAfterFor;
|
||||||
|
}
|
||||||
|
const variable = tokens[i.*].tokenData.string;
|
||||||
|
i.* += 1;
|
||||||
|
switch(tokens[i.*].tokenType)
|
||||||
|
{
|
||||||
|
TokenType.Equals =>
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
const start = try parseExp(tokens, i, allocator);
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Comma)
|
||||||
|
{
|
||||||
|
return error.ExpectedCommaAfterForEqStartValue;
|
||||||
|
}
|
||||||
|
i.* += 1;
|
||||||
|
const end = try parseExp(tokens, i, allocator);
|
||||||
|
if(i.* >= tokens.len)
|
||||||
|
{
|
||||||
|
return error.ReachedEOFAfterForEqEndValue;
|
||||||
|
}
|
||||||
|
var change: ?ExpNode = null;
|
||||||
|
if(tokens[i.*].tokenType == TokenType.Comma)
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
change = try parseExp(tokens, i, allocator);
|
||||||
|
}
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Do)
|
||||||
|
{
|
||||||
|
return error.ExpectedDoAfterForEqHead;
|
||||||
|
}
|
||||||
|
i.* += 1;
|
||||||
|
const body = try parseBlock(tokens, i, allocator);
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.End)
|
||||||
|
{
|
||||||
|
return error.ExpectedEndAfterForEqBody;
|
||||||
|
}
|
||||||
|
i.* += 1;
|
||||||
|
return StatNode { .ForNumerical = ForNumericalNode { .variable = variable, .start = start, .end = end, .change = change, .body = body } };
|
||||||
|
},
|
||||||
|
TokenType.Comma =>
|
||||||
|
{
|
||||||
|
var names = std.ArrayList([]u8).init(allocator.*.allocator());
|
||||||
|
try names.append(variable);
|
||||||
|
while(tokens[i.*].tokenType == TokenType.Comma)
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Name)
|
||||||
|
{
|
||||||
|
return error.ExpectedAnotherNameInForInNamelist;
|
||||||
|
}
|
||||||
|
try names.append(tokens[i.*].tokenData.string);
|
||||||
|
i.* += 1;
|
||||||
|
if(i.* >= tokens.len)
|
||||||
|
{
|
||||||
|
return error.ReachedEOFAfterNameInForInNamelist;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.In)
|
||||||
|
{
|
||||||
|
return error.ExpectedInAfterForInNamelist;
|
||||||
|
}
|
||||||
|
i.* += 1;
|
||||||
|
const exps = try parseExplist(tokens, i, allocator);
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Do)
|
||||||
|
{
|
||||||
|
return error.ExpectedDoAfterForInExplist;
|
||||||
|
}
|
||||||
|
i.* += 1;
|
||||||
|
const body = try parseBlock(tokens, i, allocator);
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.End)
|
||||||
|
{
|
||||||
|
return error.ExpectedEndAfterForInBody;
|
||||||
|
}
|
||||||
|
i.* += 1;
|
||||||
|
return StatNode { .ForGeneric = ForGenericNode { .vars = names, .exps = exps, .body = body } };
|
||||||
|
},
|
||||||
|
TokenType.In =>
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
const exps = try parseExplist(tokens, i, allocator);
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Do)
|
||||||
|
{
|
||||||
|
return error.ExpectedDoAfterForInExplist;
|
||||||
|
}
|
||||||
|
i.* += 1;
|
||||||
|
const body = try parseBlock(tokens, i, allocator);
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.End)
|
||||||
|
{
|
||||||
|
return error.ExpectedEndAfterForInBody;
|
||||||
|
}
|
||||||
|
i.* += 1;
|
||||||
|
var names = try std.ArrayList([]u8).initCapacity(allocator.allocator(), 1);
|
||||||
|
try names.insert(0, variable);
|
||||||
|
return StatNode { .ForGeneric = ForGenericNode { .vars = names, .exps = exps, .body = body } };
|
||||||
|
},
|
||||||
|
else => return error.UnexpectedTokenAfterFirstNameInFor,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
TokenType.Function =>
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
const name = try parseFuncname(tokens, i, allocator);
|
||||||
|
return StatNode { .Function = FunctionNode { .name = name, .body = try parseFuncbody(tokens, i, allocator) } };
|
||||||
|
},
|
||||||
|
TokenType.Local =>
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
if(i.* >= tokens.len)
|
||||||
|
{
|
||||||
|
return error.ReachedEOFInLocal;
|
||||||
|
}
|
||||||
|
if(tokens[i.*].tokenType == TokenType.Function)
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Name)
|
||||||
|
{
|
||||||
|
return error.ExpectedLocalFunctionName;
|
||||||
|
}
|
||||||
|
const name = tokens[i.*].tokenData.string;
|
||||||
|
i.* += 1;
|
||||||
|
return StatNode { .LocalFunction = LocalFunctionNode { .name = name, .body = try parseFuncbody(tokens, i, allocator) } };
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
var ret = LocalNode { .attnames = try parseAttnamelist(tokens, i, allocator), .values = null };
|
||||||
|
if(i.* < tokens.len and tokens[i.*].tokenType == TokenType.Equals)
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
ret.values = try parseExplist(tokens, i, allocator);
|
||||||
|
}
|
||||||
|
return StatNode { .Local = ret };
|
||||||
|
}
|
||||||
|
},
|
||||||
|
TokenType.ColonColon =>
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Name)
|
||||||
|
{
|
||||||
|
return error.ExpectedNameAfterDoubleColonInLabelDeclaration;
|
||||||
|
}
|
||||||
|
const name = tokens[i.*].tokenData.string;
|
||||||
|
i.* += 1;
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.ColonColon)
|
||||||
|
{
|
||||||
|
return error.ExpectedDoubleColonAfterNameInLabelDeclaration;
|
||||||
|
}
|
||||||
|
i.* += 1;
|
||||||
|
return StatNode { .Label = name };
|
||||||
|
},
|
||||||
|
TokenType.Name, TokenType.RoundOpen =>
|
||||||
|
{
|
||||||
|
const suffixExp = try parseSuffixExp(tokens, i, allocator);
|
||||||
|
if(i.* >= tokens.len)
|
||||||
|
{
|
||||||
|
switch(suffixExp)
|
||||||
|
{
|
||||||
|
.Normal => return error.ExpectedFunctioncall,
|
||||||
|
.Functioncall => |functioncall| return StatNode { .Functioncall = functioncall.* },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
switch(tokens[i.*].tokenType)
|
||||||
|
{
|
||||||
|
TokenType.Equals =>
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
var lhs = std.ArrayList(VarNode).init(allocator.allocator());
|
||||||
|
try lhs.append(try suffixExpToVar(suffixExp));
|
||||||
|
return StatNode { .Assignment = AssignmentNode { .lhs = VarlistNode { .vars = lhs }, .rhs = try parseExplist(tokens, i, allocator) } };
|
||||||
|
},
|
||||||
|
TokenType.Comma =>
|
||||||
|
{
|
||||||
|
var varlistNode = VarlistNode { .vars = std.ArrayList(VarNode).init(allocator.allocator()) };
|
||||||
|
try varlistNode.vars.append(try suffixExpToVar(suffixExp));
|
||||||
|
while(tokens[i.*].tokenType == TokenType.Comma)
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
try varlistNode.vars.append(try parseVar(tokens, i, allocator));
|
||||||
|
}
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Equals)
|
||||||
|
{
|
||||||
|
return error.ExpectedEqAfterAssignmentVarList;
|
||||||
|
}
|
||||||
|
i.* += 1;
|
||||||
|
return StatNode { .Assignment = AssignmentNode { .lhs = varlistNode, .rhs = try parseExplist(tokens, i, allocator) } };
|
||||||
|
},
|
||||||
|
else =>
|
||||||
|
{
|
||||||
|
switch(suffixExp)
|
||||||
|
{
|
||||||
|
.Normal => return error.ExpectedFunctioncall,
|
||||||
|
.Functioncall => |functioncall| return StatNode { .Functioncall = functioncall.* },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
else =>
|
||||||
|
{
|
||||||
|
std.debug.print("{}\n", .{tokens[i.*]});
|
||||||
|
return error.NotImplemented;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn parseRetstat(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !RetstatNode
|
||||||
|
{
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Return)
|
||||||
|
{
|
||||||
|
return error.ExpectedReturnStartingRetstat;
|
||||||
|
}
|
||||||
|
i.* += 1;
|
||||||
|
if(i.* >= tokens.len or
|
||||||
|
tokens[i.*].tokenType == TokenType.Semicolon or tokens[i.*].tokenType == TokenType.Else or tokens[i.*].tokenType == TokenType.Elseif or tokens[i.*].tokenType == TokenType.End)
|
||||||
|
{
|
||||||
|
if(i.* < tokens.len and tokens[i.*].tokenType == TokenType.Semicolon)
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
}
|
||||||
|
return RetstatNode { .values = null };
|
||||||
|
}
|
||||||
|
const ret = RetstatNode { .values = try parseExplist(tokens, i, allocator) };
|
||||||
|
if(i.* < tokens.len and tokens[i.*].tokenType == TokenType.Semicolon)
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
}
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
fn parseExp(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) ParserError!ExpNode
|
||||||
|
{
|
||||||
|
const lhs = try parseExpPrimary(tokens, i, allocator);
|
||||||
|
return parseExpPrecedence(tokens, i, allocator, lhs, 0);
|
||||||
|
}
|
||||||
|
fn parseExpPrimary(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !ExpNode
|
||||||
|
{
|
||||||
|
if(i.* >= tokens.len)
|
||||||
|
{
|
||||||
|
return error.ReachedEOFExpectedPrimaryExpression;
|
||||||
|
}
|
||||||
|
switch(tokens[i.*].tokenType)
|
||||||
|
{
|
||||||
|
TokenType.Nil =>
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
return ExpNode.Nil;
|
||||||
|
},
|
||||||
|
TokenType.True =>
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
return ExpNode.True;
|
||||||
|
},
|
||||||
|
TokenType.False =>
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
return ExpNode.False;
|
||||||
|
},
|
||||||
|
TokenType.Numeral =>
|
||||||
|
{
|
||||||
|
const numeral = tokens[i.*].tokenData.numeral;
|
||||||
|
i.* += 1;
|
||||||
|
return ExpNode { .Numeral = numeral };
|
||||||
|
},
|
||||||
|
TokenType.StringLiteral =>
|
||||||
|
{
|
||||||
|
const string = tokens[i.*].tokenData.string;
|
||||||
|
i.* += 1;
|
||||||
|
return ExpNode { .LiteralString = string };
|
||||||
|
},
|
||||||
|
TokenType.DotDotDot =>
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
return ExpNode.Varargs;
|
||||||
|
},
|
||||||
|
TokenType.Function =>
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
return ExpNode { .Functiondef = try parseFuncbody(tokens, i, allocator) };
|
||||||
|
},
|
||||||
|
TokenType.CurlyOpen => return ExpNode { .Tableconstructor = try parseTableconstructor(tokens, i, allocator) },
|
||||||
|
TokenType.Minus =>
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
const unop = try allocator.allocator().create(ExpNode);
|
||||||
|
unop.* = try parseExp(tokens, i, allocator);
|
||||||
|
return ExpNode { .Unop = UnopNode { .unopType = UnopType.Minus, .exp = unop } };
|
||||||
|
},
|
||||||
|
TokenType.Hash =>
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
const unop = try allocator.allocator().create(ExpNode);
|
||||||
|
unop.* = try parseExp(tokens, i, allocator);
|
||||||
|
return ExpNode { .Unop = UnopNode { .unopType = UnopType.Length, .exp = unop } };
|
||||||
|
},
|
||||||
|
TokenType.Not =>
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
const unop = try allocator.allocator().create(ExpNode);
|
||||||
|
unop.* = try parseExp(tokens, i, allocator);
|
||||||
|
return ExpNode { .Unop = UnopNode { .unopType = UnopType.LogicalNot, .exp = unop } };
|
||||||
|
},
|
||||||
|
TokenType.Tilde =>
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
const unop = try allocator.allocator().create(ExpNode);
|
||||||
|
unop.* = try parseExp(tokens, i, allocator);
|
||||||
|
return ExpNode { .Unop = UnopNode { .unopType = UnopType.BinaryNot, .exp = unop } };
|
||||||
|
},
|
||||||
|
else =>
|
||||||
|
{
|
||||||
|
const suffixexp = try allocator.allocator().create(SuffixexpNode);
|
||||||
|
suffixexp.* = try parseSuffixExp(tokens, i, allocator);
|
||||||
|
return ExpNode { .Suffixexp = suffixexp };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn parseExpPrecedence(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator, lhs: ExpNode, minPrecedence: u8) !ExpNode
|
||||||
|
{
|
||||||
|
var currentLhs = lhs;
|
||||||
|
while(i.* < tokens.len and isBinop(tokens[i.*]))
|
||||||
|
{
|
||||||
|
const precedence = try getPrecedence(tokens[i.*]);
|
||||||
|
if(precedence < minPrecedence)
|
||||||
|
{
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
const op = try getBinopType(tokens[i.*]);
|
||||||
|
i.* += 1;
|
||||||
|
var rhs = try parseExpPrimary(tokens, i, allocator);
|
||||||
|
while(i.* < tokens.len and isBinop(tokens[i.*]) and
|
||||||
|
(try getPrecedence(tokens[i.*]) > precedence or
|
||||||
|
(try getPrecedence(tokens[i.*]) == precedence and isRightAssociative(tokens[i.*]))))
|
||||||
|
{
|
||||||
|
const associativityBoost: u8 = if(try getPrecedence(tokens[i.*]) == precedence) 0 else 1;
|
||||||
|
rhs = try parseExpPrecedence(tokens, i, allocator, rhs, precedence + associativityBoost);
|
||||||
|
}
|
||||||
|
const binop = try allocator.allocator().create(BinopNode);
|
||||||
|
binop.* = BinopNode { .lhs = currentLhs, .op = op, .rhs = rhs };
|
||||||
|
currentLhs = ExpNode { .Binop = binop };
|
||||||
|
}
|
||||||
|
return currentLhs;
|
||||||
|
}
|
||||||
|
fn isRightAssociative(token: Token) bool
|
||||||
|
{
|
||||||
|
return token.tokenType == TokenType.DotDot or token.tokenType == TokenType.Caret;
|
||||||
|
}
|
||||||
|
fn getBinopType(token: Token) !BinopType
|
||||||
|
{
|
||||||
|
return switch(token.tokenType)
|
||||||
|
{
|
||||||
|
TokenType.Or => BinopType.LogicalOr,
|
||||||
|
TokenType.And => BinopType.LocicalAnd,
|
||||||
|
TokenType.Lt => BinopType.Lt,
|
||||||
|
TokenType.Gt => BinopType.Gt,
|
||||||
|
TokenType.LtEquals => BinopType.LtEquals,
|
||||||
|
TokenType.GtEquals => BinopType.GtEquals,
|
||||||
|
TokenType.LtLt => BinopType.Shl,
|
||||||
|
TokenType.GtGt=> BinopType.Shr,
|
||||||
|
TokenType.TildeEquals => BinopType.NotEquals,
|
||||||
|
TokenType.EqualsEquals => BinopType.Equals,
|
||||||
|
TokenType.Pipe => BinopType.BinaryOr,
|
||||||
|
TokenType.Tilde => BinopType.BinaryNot,
|
||||||
|
TokenType.Ampersand => BinopType.BinaryAnd,
|
||||||
|
TokenType.DotDot => BinopType.Concat,
|
||||||
|
TokenType.Plus => BinopType.Add,
|
||||||
|
TokenType.Minus => BinopType.Sub,
|
||||||
|
TokenType.Star => BinopType.Mul,
|
||||||
|
TokenType.Slash => BinopType.Div,
|
||||||
|
TokenType.SlashSlash => BinopType.IntDiv,
|
||||||
|
TokenType.Percent => BinopType.Mod,
|
||||||
|
TokenType.Caret => BinopType.Exp,
|
||||||
|
else => error.NoBinopTypeForOperator,
|
||||||
|
};
|
||||||
|
|
||||||
|
}
|
||||||
|
fn getPrecedence(token: Token) !u8
|
||||||
|
{
|
||||||
|
return switch(token.tokenType)
|
||||||
|
{
|
||||||
|
TokenType.Or => 2,
|
||||||
|
TokenType.And => 4,
|
||||||
|
TokenType.Lt, TokenType.Gt, TokenType.LtEquals, TokenType.GtEquals, TokenType.TildeEquals, TokenType.EqualsEquals => 6,
|
||||||
|
TokenType.Pipe => 8,
|
||||||
|
TokenType.Tilde => 10,
|
||||||
|
TokenType.Ampersand => 12,
|
||||||
|
TokenType.LtLt, TokenType.GtGt => 14,
|
||||||
|
TokenType.DotDot => 16,
|
||||||
|
TokenType.Plus, TokenType.Minus => 18,
|
||||||
|
TokenType.Star, TokenType.Slash, TokenType.SlashSlash, TokenType.Percent => 20,
|
||||||
|
TokenType.Caret => 22,
|
||||||
|
else => error.NoPrecedenceForOperator,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
fn isBinop(token: Token) bool
|
||||||
|
{
|
||||||
|
return switch(token.tokenType)
|
||||||
|
{
|
||||||
|
TokenType.Or, TokenType.And, TokenType.Lt, TokenType.Gt, TokenType.LtEquals, TokenType.GtEquals, TokenType.TildeEquals, TokenType.EqualsEquals,
|
||||||
|
TokenType.Pipe, TokenType.Tilde, TokenType.Ampersand, TokenType.LtLt, TokenType.GtGt, TokenType.DotDot, TokenType.Plus, TokenType.Minus,
|
||||||
|
TokenType.Star, TokenType.Slash, TokenType.SlashSlash, TokenType.Percent, TokenType.Caret => true,
|
||||||
|
else => false
|
||||||
|
};
|
||||||
|
}
|
||||||
|
fn parseExplist(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !ExplistNode
|
||||||
|
{
|
||||||
|
var ret = ExplistNode { .exps = std.ArrayList(ExpNode).init(allocator.allocator()) };
|
||||||
|
try ret.exps.append(try parseExp(tokens, i, allocator));
|
||||||
|
while(i.* < tokens.len and tokens[i.*].tokenType == TokenType.Comma)
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
try ret.exps.append(try parseExp(tokens, i, allocator));
|
||||||
|
}
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
fn parseFuncname(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !FuncnameNode
|
||||||
|
{
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Name)
|
||||||
|
{
|
||||||
|
return error.ExpectedFuncname;
|
||||||
|
}
|
||||||
|
var ret = FuncnameNode { .name = tokens[i.*].tokenData.string, .dottedNames = std.ArrayList([]u8).init(allocator.allocator()), .firstArg = null };
|
||||||
|
i.* += 1;
|
||||||
|
while(i.* < tokens.len and tokens[i.*].tokenType == TokenType.Dot)
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Name)
|
||||||
|
{
|
||||||
|
return error.ExpectedNameInDottedFuncname;
|
||||||
|
}
|
||||||
|
try ret.dottedNames.append(tokens[i.*].tokenData.string);
|
||||||
|
}
|
||||||
|
if(i.* < tokens.len and tokens[i.*].tokenType == TokenType.Colon)
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Name)
|
||||||
|
{
|
||||||
|
return error.ExpectedNameOfFirstArgInFuncname;
|
||||||
|
}
|
||||||
|
ret.firstArg = tokens[i.*].tokenData.string;
|
||||||
|
}
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
fn parseFuncbody(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !FuncbodyNode
|
||||||
|
{
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.RoundOpen)
|
||||||
|
{
|
||||||
|
return error.ExpectedRoundOpenStartingFuncbody;
|
||||||
|
}
|
||||||
|
i.* += 1;
|
||||||
|
if(i.* >= tokens.len)
|
||||||
|
{
|
||||||
|
return error.ReachedEOFInFuncbodyParlist;
|
||||||
|
}
|
||||||
|
var pars: ?ParlistNode = null;
|
||||||
|
if(tokens[i.*].tokenType == TokenType.RoundClosed)
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
pars = try parseParlist(tokens, i, allocator);
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.RoundClosed)
|
||||||
|
{
|
||||||
|
return error.ExpectedRoundClosedClosingFuncbodyParlist;
|
||||||
|
}
|
||||||
|
i.* += 1;
|
||||||
|
}
|
||||||
|
const ret = FuncbodyNode { .body = try parseBlock(tokens, i, allocator), .pars = pars };
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.End)
|
||||||
|
{
|
||||||
|
return error.ExpectedEndClosingFuncbody;
|
||||||
|
}
|
||||||
|
i.* += 1;
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
fn parseParlist(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !ParlistNode
|
||||||
|
{
|
||||||
|
if(i.* >= tokens.len)
|
||||||
|
{
|
||||||
|
return error.ReachedEOFInParlist;
|
||||||
|
}
|
||||||
|
if(tokens[i.*].tokenType == TokenType.DotDotDot)
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
return ParlistNode { .names = std.ArrayList([]u8).init(allocator.allocator()), .hasVarargs = true };
|
||||||
|
}
|
||||||
|
if(tokens[i.*].tokenType != TokenType.Name)
|
||||||
|
{
|
||||||
|
return error.ExpectedNameStartingParlist;
|
||||||
|
}
|
||||||
|
var ret = ParlistNode { .names = std.ArrayList([]u8).init(allocator.allocator()), .hasVarargs = false };
|
||||||
|
try ret.names.append(tokens[i.*].tokenData.string);
|
||||||
|
i.* += 1;
|
||||||
|
while(i.* < tokens.len and tokens[i.*].tokenType == TokenType.Comma)
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
if(i.* >= tokens.len)
|
||||||
|
{
|
||||||
|
return error.ReachedEOFInParlistNameList;
|
||||||
|
}
|
||||||
|
switch(tokens[i.*].tokenType)
|
||||||
|
{
|
||||||
|
TokenType.Name =>
|
||||||
|
{
|
||||||
|
try ret.names.append(tokens[i.*].tokenData.string);
|
||||||
|
i.* += 1;
|
||||||
|
},
|
||||||
|
TokenType.DotDotDot =>
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
ret.hasVarargs = true;
|
||||||
|
break;
|
||||||
|
},
|
||||||
|
else => return error.UnexpectedTokenInParlistNameList,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
fn parseAttnamelist(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !AttnamelistNode
|
||||||
|
{
|
||||||
|
var ret = AttnamelistNode { .attnames = std.ArrayList(AttnameNode).init(allocator.allocator()) };
|
||||||
|
try ret.attnames.append(try parseAttname(tokens, i));
|
||||||
|
while(i.* < tokens.len and tokens[i.*].tokenType == TokenType.Comma)
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
try ret.attnames.append(try parseAttname(tokens, i));
|
||||||
|
}
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
fn parseSuffixExp(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !SuffixexpNode
|
||||||
|
{
|
||||||
|
// primaryexp { '.' 'Name' | '[' exp']' | ':' 'Name' args | args }
|
||||||
|
if(i.* >= tokens.len)
|
||||||
|
{
|
||||||
|
return error.ReachedEOFInSuffixExp;
|
||||||
|
}
|
||||||
|
const firstPart = try switch(tokens[i.*].tokenType)
|
||||||
|
{
|
||||||
|
TokenType.Name =>
|
||||||
|
nameBlock: {
|
||||||
|
const name = tokens[i.*].tokenData.string;
|
||||||
|
i.* += 1;
|
||||||
|
break :nameBlock SuffixexpFirstPart { .Name = name };
|
||||||
|
},
|
||||||
|
TokenType.RoundOpen =>
|
||||||
|
roundOpenBlock: {
|
||||||
|
i.* += 1;
|
||||||
|
const ret = SuffixexpFirstPart { .BracketedExpr = try parseExp(tokens, i, allocator) };
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.RoundClosed)
|
||||||
|
{
|
||||||
|
return error.ExpectedRoundClosedClosingBracketedPrimaryExp;
|
||||||
|
}
|
||||||
|
i.* += 1;
|
||||||
|
break :roundOpenBlock ret;
|
||||||
|
},
|
||||||
|
else => error.UnexpectedTokenAsFirstPartOfSuffixExp,
|
||||||
|
};
|
||||||
|
var suffixes = std.ArrayList(SuffixexpSuffix).init(allocator.allocator());
|
||||||
|
while(i.* < tokens.len)
|
||||||
|
{
|
||||||
|
switch(tokens[i.*].tokenType)
|
||||||
|
{
|
||||||
|
TokenType.Dot =>
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Name)
|
||||||
|
{
|
||||||
|
return error.ExpectedNameInDottedSuffixExp;
|
||||||
|
}
|
||||||
|
const name = tokens[i.*].tokenData.string;
|
||||||
|
i.* += 1;
|
||||||
|
try suffixes.append(SuffixexpSuffix { .Dot = name });
|
||||||
|
},
|
||||||
|
TokenType.SquareOpen =>
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
try suffixes.append(SuffixexpSuffix { .Indexed = try parseExp(tokens, i, allocator) });
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.SquareClosed)
|
||||||
|
{
|
||||||
|
return error.ExpectedSquareClosedClosingIndexedSuffixExp;
|
||||||
|
}
|
||||||
|
i.* += 1;
|
||||||
|
},
|
||||||
|
TokenType.Colon =>
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Name)
|
||||||
|
{
|
||||||
|
return error.ExpectedNameInArgsFirstArgSuffixExp;
|
||||||
|
}
|
||||||
|
const name = tokens[i.*].tokenData.string;
|
||||||
|
i.* += 1;
|
||||||
|
try suffixes.append(SuffixexpSuffix { .ArgsFirstArg = ArgsFirstArgNode { .name = name, .rest = try parseArgs(tokens, i, allocator) } });
|
||||||
|
},
|
||||||
|
TokenType.RoundOpen, TokenType.CurlyOpen, TokenType.StringLiteral =>
|
||||||
|
{
|
||||||
|
try suffixes.append(SuffixexpSuffix { .Args = try parseArgs(tokens, i, allocator) });
|
||||||
|
},
|
||||||
|
else => break,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const last = suffixes.getLastOrNull();
|
||||||
|
if(last != null)
|
||||||
|
{
|
||||||
|
switch(last.?)
|
||||||
|
{
|
||||||
|
SuffixexpSuffix.Args => |*args|
|
||||||
|
{
|
||||||
|
_ = suffixes.pop();
|
||||||
|
const functioncall = try allocator.allocator().create(FunctioncallNode);
|
||||||
|
functioncall.* = FunctioncallNode
|
||||||
|
{
|
||||||
|
.function = SuffixexpNode { .Normal = NormalSuffixNode { .firstPart = firstPart, .suffixes = suffixes } },
|
||||||
|
.args = args.*,
|
||||||
|
.objectArg = null,
|
||||||
|
};
|
||||||
|
return SuffixexpNode
|
||||||
|
{
|
||||||
|
.Functioncall = functioncall,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
SuffixexpSuffix.ArgsFirstArg => |*node|
|
||||||
|
{
|
||||||
|
_ = suffixes.pop();
|
||||||
|
const functioncall = try allocator.allocator().create(FunctioncallNode);
|
||||||
|
functioncall.* = FunctioncallNode
|
||||||
|
{
|
||||||
|
.function = SuffixexpNode { .Normal = NormalSuffixNode { .firstPart = firstPart, .suffixes = suffixes } },
|
||||||
|
.args = node.rest,
|
||||||
|
.objectArg = node.name,
|
||||||
|
};
|
||||||
|
return SuffixexpNode
|
||||||
|
{
|
||||||
|
.Functioncall = functioncall,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
else => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return SuffixexpNode { .Normal = NormalSuffixNode { .firstPart = firstPart, .suffixes = suffixes } };
|
||||||
|
}
|
||||||
|
fn parseVar(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !VarNode
|
||||||
|
{
|
||||||
|
return suffixExpToVar(try parseSuffixExp(tokens, i, allocator));
|
||||||
|
}
|
||||||
|
fn parseAttname(tokens: []Token, i: *usize) !AttnameNode
|
||||||
|
{
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Name)
|
||||||
|
{
|
||||||
|
return error.ExpectednameInAttribName;
|
||||||
|
}
|
||||||
|
const name = tokens[i.*].tokenData.string;
|
||||||
|
i.* += 1;
|
||||||
|
var ret = AttnameNode { .name = name, .attribute = null };
|
||||||
|
if(i.* < tokens.len and tokens[i.*].tokenType == TokenType.Lt)
|
||||||
|
{
|
||||||
|
ret.attribute = tokens[i.*].tokenData.string;
|
||||||
|
i.* += 1;
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Name)
|
||||||
|
{
|
||||||
|
return error.ExpectedAttributeInAttrib;
|
||||||
|
}
|
||||||
|
i.* += 1;
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Gt)
|
||||||
|
{
|
||||||
|
return error.ExpectedGtInAttrib;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
fn parseArgs(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !ArgsNode
|
||||||
|
{
|
||||||
|
if(i.* >= tokens.len)
|
||||||
|
{
|
||||||
|
return error.ReachedEOFInArgs;
|
||||||
|
}
|
||||||
|
switch(tokens[i.*].tokenType)
|
||||||
|
{
|
||||||
|
TokenType.RoundOpen =>
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
if(i.* >= tokens.len)
|
||||||
|
{
|
||||||
|
return error.ReachedEOFInBracketedArgs;
|
||||||
|
}
|
||||||
|
if(tokens[i.*].tokenType == TokenType.RoundClosed)
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
return ArgsNode { .Bracketed = null };
|
||||||
|
}
|
||||||
|
const exps = try parseExplist(tokens, i, allocator);
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.RoundClosed)
|
||||||
|
{
|
||||||
|
return error.ExpectedRoundClosedClosingBracketedArgs;
|
||||||
|
}
|
||||||
|
i.* += 1;
|
||||||
|
return ArgsNode { .Bracketed = exps };
|
||||||
|
},
|
||||||
|
TokenType.CurlyOpen => return ArgsNode { .Tableconstructor = try parseTableconstructor(tokens, i, allocator) },
|
||||||
|
TokenType.StringLiteral =>
|
||||||
|
{
|
||||||
|
const value = tokens[i.*].tokenData.string;
|
||||||
|
i.* += 1;
|
||||||
|
return ArgsNode { .Literal = value};
|
||||||
|
},
|
||||||
|
else => return error.UnexpectedTokenInArgs,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn parseTableconstructor(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !TableconstructorNode
|
||||||
|
{
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.CurlyOpen)
|
||||||
|
{
|
||||||
|
return error.ExpectedCurlyOpenOpeningTableconstructor;
|
||||||
|
}
|
||||||
|
i.* += 1;
|
||||||
|
if(i.* < tokens.len and tokens[i.*].tokenType == TokenType.CurlyClosed)
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
return TableconstructorNode { .exps = null };
|
||||||
|
}
|
||||||
|
const ret = TableconstructorNode { .exps = try parseFieldlist(tokens, i, allocator) };
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.CurlyClosed)
|
||||||
|
{
|
||||||
|
return error.ExpectedCurlyClosedClosingTableconstructor;
|
||||||
|
}
|
||||||
|
i.* += 1;
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
fn parseFieldlist(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !FieldlistNode
|
||||||
|
{
|
||||||
|
var ret = FieldlistNode { .exps = std.ArrayList(FieldNode).init(allocator.allocator()) };
|
||||||
|
try ret.exps.append(try parseField(tokens, i, allocator));
|
||||||
|
while(i.* < tokens.len and isFieldsep(tokens[i.*]))
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
try ret.exps.append(try parseField(tokens, i, allocator));
|
||||||
|
}
|
||||||
|
if(i.* < tokens.len and isFieldsep(tokens[i.*]))
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
}
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
fn isFieldsep(token: Token) bool
|
||||||
|
{
|
||||||
|
return token.tokenType == TokenType.Comma or token.tokenType == TokenType.Semicolon;
|
||||||
|
}
|
||||||
|
fn parseField(tokens: []Token, i: *usize, allocator: *std.heap.ArenaAllocator) !FieldNode
|
||||||
|
{
|
||||||
|
if(i.* >= tokens.len)
|
||||||
|
{
|
||||||
|
return error.ReachedEOFInField;
|
||||||
|
}
|
||||||
|
switch(tokens[i.*].tokenType)
|
||||||
|
{
|
||||||
|
TokenType.SquareOpen =>
|
||||||
|
{
|
||||||
|
i.* += 1;
|
||||||
|
const index = try parseExp(tokens, i, allocator);
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.SquareClosed)
|
||||||
|
{
|
||||||
|
return error.ExpectedSquareClosedClosingIndexedField;
|
||||||
|
}
|
||||||
|
i.* += 1;
|
||||||
|
if(i.* >= tokens.len or tokens[i.*].tokenType != TokenType.Equals)
|
||||||
|
{
|
||||||
|
return error.ExpectedEqualsInIndexedFieldExpression;
|
||||||
|
}
|
||||||
|
i.* += 1;
|
||||||
|
return FieldNode { .IndexedAssignment = IndexedAssignmentNode { .index = index, .rhs = try parseExp(tokens, i, allocator) } };
|
||||||
|
},
|
||||||
|
TokenType.Name =>
|
||||||
|
{
|
||||||
|
if(i.* + 1 < tokens.len and tokens[i.* + 1].tokenType == TokenType.Equals)
|
||||||
|
{
|
||||||
|
const name = tokens[i.*].tokenData.string;
|
||||||
|
i.* += 2;
|
||||||
|
return FieldNode { .Assignment = FieldAssignmentNode { .lhs = name, .rhs = try parseExp(tokens, i, allocator) } };
|
||||||
|
}
|
||||||
|
return FieldNode { .Exp = try parseExp(tokens, i, allocator) };
|
||||||
|
},
|
||||||
|
else => return FieldNode { .Exp = try parseExp(tokens, i, allocator) },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn suffixExpToVar(suffixexp: SuffixexpNode) !VarNode
|
||||||
|
{
|
||||||
|
var exp = suffixexp.Normal;
|
||||||
|
if(exp.suffixes.items.len == 0)
|
||||||
|
{
|
||||||
|
return VarNode { .Name = exp.firstPart.Name };
|
||||||
|
}
|
||||||
|
const last = exp.suffixes.pop();
|
||||||
|
return switch(last)
|
||||||
|
{
|
||||||
|
SuffixexpSuffix.Dot => |*name| VarNode { .Member = MemberVarNode { .name = name.*, .value = SuffixexpNode { .Normal = exp } } },
|
||||||
|
SuffixexpSuffix.Indexed => |*index| VarNode { .Indexed = IndexedVarNode { .index = index.*, .value = SuffixexpNode { .Normal = exp } } },
|
||||||
|
else => error.ExpectedDotOrIndexedSuffixWhenConvertingSuffixExpToVar,
|
||||||
|
};
|
||||||
|
}
|
@ -1,7 +1,7 @@
|
|||||||
const types = @import("types.zig");
|
const types = @import("types.zig");
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
|
|
||||||
const TokenType = enum
|
pub const TokenType = enum
|
||||||
{
|
{
|
||||||
Name,
|
Name,
|
||||||
And, Break, Do, Else, Elseif, End,
|
And, Break, Do, Else, Elseif, End,
|
||||||
@ -25,10 +25,22 @@ const TokenData = union(enum)
|
|||||||
none,
|
none,
|
||||||
};
|
};
|
||||||
|
|
||||||
const Token = struct
|
pub const Token = struct
|
||||||
{
|
{
|
||||||
tokenType: TokenType,
|
tokenType: TokenType,
|
||||||
tokenData: TokenData,
|
tokenData: TokenData,
|
||||||
|
location: Location,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const Location = struct
|
||||||
|
{
|
||||||
|
start: ?Point,
|
||||||
|
length: usize,
|
||||||
|
};
|
||||||
|
pub const Point = struct
|
||||||
|
{
|
||||||
|
line: usize,
|
||||||
|
col: usize,
|
||||||
};
|
};
|
||||||
|
|
||||||
const TokenizerState = enum
|
const TokenizerState = enum
|
||||||
@ -60,30 +72,44 @@ const TokenizerState = enum
|
|||||||
Function,
|
Function,
|
||||||
};
|
};
|
||||||
|
|
||||||
fn tokenizeUpdateIndexAndState(lastIndex: *?usize, index: ?usize, state: *TokenizerState, newState: TokenizerState) void
|
fn tokenizeUpdateIndexAndState(lastIndex: *?usize, index: ?usize, state: *TokenizerState, newState: TokenizerState, location: *Location) void
|
||||||
{
|
{
|
||||||
lastIndex.* = index;
|
lastIndex.* = index;
|
||||||
state.* = newState;
|
state.* = newState;
|
||||||
|
if(index == null)
|
||||||
|
{
|
||||||
|
location.*.start = null;
|
||||||
|
location.*.length = 0;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
if(location.*.start == null)
|
||||||
|
{
|
||||||
|
// TODO: There is no line/col info here and plumbing it to here would be pain.
|
||||||
|
location.*.start = Point { .col = 0, .line = 0 };
|
||||||
|
}
|
||||||
|
location.*.length += 1;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
fn tokenizeTerminalBase(lastIndex: *?usize, index: ?usize, tokenType: *?TokenType, state: *TokenizerState, newTokenType: ?TokenType, newState: TokenizerState) void
|
fn tokenizeTerminalBase(lastIndex: *?usize, index: ?usize, tokenType: *?TokenType, state: *TokenizerState, newTokenType: ?TokenType, newState: TokenizerState, location: *Location) void
|
||||||
{
|
{
|
||||||
tokenizeUpdateIndexAndState(lastIndex, index, state, newState);
|
tokenizeUpdateIndexAndState(lastIndex, index, state, newState, location);
|
||||||
tokenType.* = newTokenType;
|
tokenType.* = newTokenType;
|
||||||
}
|
}
|
||||||
fn tokenizeTerminalStr(lastIndex: *?usize, index: usize, tokenType: *?TokenType, state: *TokenizerState, newTokenType: ?TokenType, newState: TokenizerState, tokenStr: *std.ArrayList(u8), ch: u8) !void
|
fn tokenizeTerminalStr(lastIndex: *?usize, index: usize, tokenType: *?TokenType, state: *TokenizerState, newTokenType: ?TokenType, newState: TokenizerState, tokenStr: *std.ArrayList(u8), ch: u8, location: *Location) !void
|
||||||
{
|
{
|
||||||
tokenizeTerminalBase(lastIndex, index, tokenType, state, newTokenType, newState);
|
tokenizeTerminalBase(lastIndex, index, tokenType, state, newTokenType, newState, location);
|
||||||
try tokenStr.append(ch);
|
try tokenStr.append(ch);
|
||||||
}
|
}
|
||||||
fn tokenizeTerminalIntNum(lastIndex: *?usize, index: usize, tokenType: *?TokenType, state: *TokenizerState, newTokenType: TokenType, newState: TokenizerState, tokenNumeral: *?types.Numeral, ch: u8) !void
|
fn tokenizeTerminalIntNum(lastIndex: *?usize, index: usize, tokenType: *?TokenType, state: *TokenizerState, newTokenType: TokenType, newState: TokenizerState, tokenNumeral: *?types.Numeral, ch: u8, location: *Location) !void
|
||||||
{
|
{
|
||||||
tokenizeTerminalBase(lastIndex, index, tokenType, state, newTokenType, newState);
|
tokenizeTerminalBase(lastIndex, index, tokenType, state, newTokenType, newState, location);
|
||||||
if(!std.ascii.isDigit(ch))
|
if(!std.ascii.isDigit(ch))
|
||||||
{
|
{
|
||||||
return error.NoDigit;
|
return error.NoDigit;
|
||||||
}
|
}
|
||||||
const digitValue = @as(i64, ch - '0');
|
const digitValue = @as(i64, ch - '0');
|
||||||
if(tokenNumeral.* != null)
|
if(tokenNumeral.* == null)
|
||||||
{
|
{
|
||||||
tokenNumeral.* = types.Numeral { .Integer = digitValue };
|
tokenNumeral.* = types.Numeral { .Integer = digitValue };
|
||||||
}
|
}
|
||||||
@ -96,16 +122,16 @@ fn tokenizeTerminalIntNum(lastIndex: *?usize, index: usize, tokenType: *?TokenTy
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn tokenizeTerminalNoToken(lastIndex: *?usize, index: usize, state: *TokenizerState, newState: TokenizerState, tokenStr: *std.ArrayList(u8), ch: u8) !void
|
fn tokenizeTerminalNoToken(lastIndex: *?usize, index: usize, state: *TokenizerState, newState: TokenizerState, tokenStr: *std.ArrayList(u8), ch: u8, location: *Location) !void
|
||||||
{
|
{
|
||||||
tokenizeUpdateIndexAndState(lastIndex, index, state, newState);
|
tokenizeUpdateIndexAndState(lastIndex, index, state, newState, location);
|
||||||
try tokenStr.*.append(ch);
|
try tokenStr.*.append(ch);
|
||||||
}
|
}
|
||||||
fn tokenizeBacktrack(lastIndex: *?usize, index: *usize, tokens: *std.ArrayList(Token), tokenType: *?TokenType, tokenStr: *std.ArrayList(u8), tokenNumeral: *?types.Numeral, state: *TokenizerState, allocator: std.mem.Allocator) !void
|
fn tokenizeBacktrack(lastIndex: *?usize, index: *usize, tokens: *std.ArrayList(Token), tokenType: *?TokenType, tokenStr: *std.ArrayList(u8), tokenNumeral: *?types.Numeral, state: *TokenizerState, allocator: std.mem.Allocator, location: *Location) !void
|
||||||
{
|
{
|
||||||
try tokenizeBacktrackCustomToken(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, tokenType.*.?, allocator);
|
try tokenizeBacktrackCustomToken(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, tokenType.*.?, allocator, location);
|
||||||
}
|
}
|
||||||
fn tokenizeBacktrackCustomToken(lastIndex: *?usize, index: *usize, tokens: *std.ArrayList(Token), tokenType: *?TokenType, tokenStr: *std.ArrayList(u8), tokenNumeral: *?types.Numeral, state: *TokenizerState, newTokenType: TokenType, allocator: std.mem.Allocator) !void
|
fn tokenizeBacktrackCustomToken(lastIndex: *?usize, index: *usize, tokens: *std.ArrayList(Token), tokenType: *?TokenType, tokenStr: *std.ArrayList(u8), tokenNumeral: *?types.Numeral, state: *TokenizerState, newTokenType: TokenType, allocator: std.mem.Allocator, location: *Location) !void
|
||||||
{
|
{
|
||||||
if(lastIndex.* == null or tokenType.* == null)
|
if(lastIndex.* == null or tokenType.* == null)
|
||||||
{
|
{
|
||||||
@ -115,31 +141,32 @@ fn tokenizeBacktrackCustomToken(lastIndex: *?usize, index: *usize, tokens: *std.
|
|||||||
{
|
{
|
||||||
const content = try allocator.alloc(u8, tokenStr.*.items.len);
|
const content = try allocator.alloc(u8, tokenStr.*.items.len);
|
||||||
@memcpy(content, tokenStr.*.items);
|
@memcpy(content, tokenStr.*.items);
|
||||||
try tokens.append(Token { .tokenType = newTokenType, .tokenData = TokenData { .string = content } });
|
try tokens.append(Token { .tokenType = newTokenType, .tokenData = TokenData { .string = content }, .location = location.* });
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
try tokens.append(Token { .tokenType = newTokenType, .tokenData = if(tokenType.*.? == TokenType.Numeral) TokenData { .numeral = tokenNumeral.*.? }
|
try tokens.append(Token { .tokenType = newTokenType, .location = location.*, .tokenData = if(tokenType.*.? == TokenType.Numeral) TokenData { .numeral = tokenNumeral.*.? }
|
||||||
else TokenData.none
|
else TokenData.none
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
tokenNumeral.* = null;
|
tokenNumeral.* = null;
|
||||||
index.* = lastIndex.*.?;
|
index.* = lastIndex.*.?;
|
||||||
tokenStr.*.clearAndFree();
|
tokenStr.*.clearAndFree();
|
||||||
tokenizeTerminalBase(lastIndex, null, tokenType, state, null, TokenizerState.Start);
|
// location is reset in tokenizeTerminalBase since null is passed as index
|
||||||
|
tokenizeTerminalBase(lastIndex, null, tokenType, state, null, TokenizerState.Start, location);
|
||||||
}
|
}
|
||||||
fn tokenizeAlphanumericNonstart(lastIndex: *?usize, index: *usize, tokens: *std.ArrayList(Token), tokenType: *?TokenType, tokenStr: *std.ArrayList(u8), tokenNumeral: *?types.Numeral, state: *TokenizerState, ch: u8, newTokenType: TokenType, allocator: std.mem.Allocator) !void
|
fn tokenizeAlphanumericNonstart(lastIndex: *?usize, index: *usize, tokens: *std.ArrayList(Token), tokenType: *?TokenType, tokenStr: *std.ArrayList(u8), tokenNumeral: *?types.Numeral, state: *TokenizerState, ch: u8, newTokenType: TokenType, allocator: std.mem.Allocator, location: *Location) !void
|
||||||
{
|
{
|
||||||
if(std.ascii.isAlphanumeric(ch) or ch == '_')
|
if(std.ascii.isAlphanumeric(ch) or ch == '_')
|
||||||
{
|
{
|
||||||
try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.Name, tokenStr, ch);
|
try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.Name, tokenStr, ch, location);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
try tokenizeBacktrackCustomToken(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, newTokenType, allocator);
|
try tokenizeBacktrackCustomToken(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, newTokenType, allocator, location);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usize, tokenType: *?TokenType, tokenStr: *std.ArrayList(u8), tokenNumeral: *?types.Numeral, tokens: *std.ArrayList(Token), longBracketLevel: *u32, allocator: std.mem.Allocator) !void
|
fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usize, tokenType: *?TokenType, tokenStr: *std.ArrayList(u8), tokenNumeral: *?types.Numeral, tokens: *std.ArrayList(Token), longBracketLevel: *u32, location: *Location, allocator: std.mem.Allocator) !void
|
||||||
{
|
{
|
||||||
switch(state.*)
|
switch(state.*)
|
||||||
{
|
{
|
||||||
@ -147,44 +174,44 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz
|
|||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'-' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Minus, TokenizerState.Minus),
|
'-' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Minus, TokenizerState.Minus, location),
|
||||||
',' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Comma, TokenizerState.Comma),
|
',' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Comma, TokenizerState.Comma, location),
|
||||||
'=' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Equals, TokenizerState.Equals),
|
'=' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Equals, TokenizerState.Equals, location),
|
||||||
'(' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.RoundOpen, TokenizerState.RoundOpen),
|
'(' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.RoundOpen, TokenizerState.RoundOpen, location),
|
||||||
')' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.RoundClosed, TokenizerState.RoundClosed),
|
')' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.RoundClosed, TokenizerState.RoundClosed, location),
|
||||||
'.' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Dot, TokenizerState.Dot),
|
'.' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Dot, TokenizerState.Dot, location),
|
||||||
':' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Colon, TokenizerState.Colon),
|
':' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Colon, TokenizerState.Colon, location),
|
||||||
'{' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.CurlyOpen, TokenizerState.CurlyOpen),
|
'{' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.CurlyOpen, TokenizerState.CurlyOpen, location),
|
||||||
'}' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.CurlyClosed, TokenizerState.CurlyClosed),
|
'}' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.CurlyClosed, TokenizerState.CurlyClosed, location),
|
||||||
'[' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.SquareOpen, TokenizerState.SquareOpen),
|
'[' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.SquareOpen, TokenizerState.SquareOpen, location),
|
||||||
']' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.SquareClosed, TokenizerState.SquareClosed),
|
']' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.SquareClosed, TokenizerState.SquareClosed, location),
|
||||||
'+' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Plus, TokenizerState.Plus),
|
'+' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Plus, TokenizerState.Plus, location),
|
||||||
'~' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Tilde, TokenizerState.Tilde),
|
'~' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Tilde, TokenizerState.Tilde, location),
|
||||||
'>' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Gt, TokenizerState.Gt),
|
'>' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Gt, TokenizerState.Gt, location),
|
||||||
'<' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Lt, TokenizerState.Lt),
|
'<' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Lt, TokenizerState.Lt, location),
|
||||||
'#' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Hash, TokenizerState.Hash),
|
'#' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Hash, TokenizerState.Hash, location),
|
||||||
'|' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Pipe, TokenizerState.Pipe),
|
'|' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Pipe, TokenizerState.Pipe, location),
|
||||||
'&' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Ampersand, TokenizerState.Ampersand),
|
'&' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Ampersand, TokenizerState.Ampersand, location),
|
||||||
'%' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Percent, TokenizerState.Percent),
|
'%' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Percent, TokenizerState.Percent, location),
|
||||||
'*' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Star, TokenizerState.Star),
|
'*' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Star, TokenizerState.Star, location),
|
||||||
'/' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Slash, TokenizerState.Slash),
|
'/' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Slash, TokenizerState.Slash, location),
|
||||||
';' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Semicolon, TokenizerState.Semicolon),
|
';' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Semicolon, TokenizerState.Semicolon, location),
|
||||||
'^' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Caret, TokenizerState.Caret),
|
'^' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.Caret, TokenizerState.Caret, location),
|
||||||
'a' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.A, tokenStr, ch),
|
'a' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.A, tokenStr, ch, location),
|
||||||
'b' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.B, tokenStr, ch),
|
'b' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.B, tokenStr, ch, location),
|
||||||
'd' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.D, tokenStr, ch),
|
'd' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.D, tokenStr, ch, location),
|
||||||
'e' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.E, tokenStr, ch),
|
'e' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.E, tokenStr, ch, location),
|
||||||
'f' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.F, tokenStr, ch),
|
'f' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.F, tokenStr, ch, location),
|
||||||
'i' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.I, tokenStr, ch),
|
'i' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.I, tokenStr, ch, location),
|
||||||
'g' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.G, tokenStr, ch),
|
'g' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.G, tokenStr, ch, location),
|
||||||
'l' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.L, tokenStr, ch),
|
'l' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.L, tokenStr, ch, location),
|
||||||
'n' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.N, tokenStr, ch),
|
'n' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.N, tokenStr, ch, location),
|
||||||
'o' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.O, tokenStr, ch),
|
'o' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.O, tokenStr, ch, location),
|
||||||
'r' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.R, tokenStr, ch),
|
'r' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.R, tokenStr, ch, location),
|
||||||
't' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.T, tokenStr, ch),
|
't' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.T, tokenStr, ch, location),
|
||||||
'u' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.U, tokenStr, ch),
|
'u' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.U, tokenStr, ch, location),
|
||||||
'w' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.W, tokenStr, ch),
|
'w' => try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.W, tokenStr, ch, location),
|
||||||
'0' => try tokenizeTerminalIntNum(lastIndex, index.*, tokenType, state, TokenType.Numeral, TokenizerState.Zero, tokenNumeral, ch),
|
'0' => try tokenizeTerminalIntNum(lastIndex, index.*, tokenType, state, TokenType.Numeral, TokenizerState.Zero, tokenNumeral, ch, location),
|
||||||
'"' =>
|
'"' =>
|
||||||
{
|
{
|
||||||
tokenType.* = null;
|
tokenType.* = null;
|
||||||
@ -203,11 +230,11 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz
|
|||||||
}
|
}
|
||||||
else if(std.ascii.isAlphabetic(ch) or ch == '_')
|
else if(std.ascii.isAlphabetic(ch) or ch == '_')
|
||||||
{
|
{
|
||||||
try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.Name, tokenStr, ch);
|
try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Name, TokenizerState.Name, tokenStr, ch, location);
|
||||||
}
|
}
|
||||||
else if(std.ascii.isDigit(ch))
|
else if(std.ascii.isDigit(ch))
|
||||||
{
|
{
|
||||||
try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Numeral, TokenizerState.Name, tokenStr, ch);
|
try tokenizeTerminalStr(lastIndex, index.*, tokenType, state, TokenType.Numeral, TokenizerState.Name, tokenStr, ch, location);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@ -222,7 +249,7 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz
|
|||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'\\' => state.* = TokenizerState.QuoteBackslash,
|
'\\' => state.* = TokenizerState.QuoteBackslash,
|
||||||
'"' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.StringLiteral, TokenizerState.String),
|
'"' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.StringLiteral, TokenizerState.String, location),
|
||||||
else => try tokenStr.*.append(ch),
|
else => try tokenStr.*.append(ch),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -292,7 +319,7 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz
|
|||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'\\' => state.* = TokenizerState.QuoteBackslash,
|
'\\' => state.* = TokenizerState.QuoteBackslash,
|
||||||
'"' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.StringLiteral, TokenizerState.String),
|
'"' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.StringLiteral, TokenizerState.String, location),
|
||||||
else =>
|
else =>
|
||||||
{
|
{
|
||||||
if(!std.ascii.isWhitespace(ch))
|
if(!std.ascii.isWhitespace(ch))
|
||||||
@ -313,7 +340,7 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz
|
|||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'\\' => state.* = TokenizerState.SingleQuoteBackslash,
|
'\\' => state.* = TokenizerState.SingleQuoteBackslash,
|
||||||
'\'' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.StringLiteral, TokenizerState.String),
|
'\'' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.StringLiteral, TokenizerState.String, location),
|
||||||
else => try tokenStr.append(ch),
|
else => try tokenStr.append(ch),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -383,7 +410,7 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz
|
|||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'\\' => state.* = TokenizerState.SingleQuoteBackslash,
|
'\\' => state.* = TokenizerState.SingleQuoteBackslash,
|
||||||
'\'' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.StringLiteral, TokenizerState.String),
|
'\'' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.StringLiteral, TokenizerState.String, location),
|
||||||
else =>
|
else =>
|
||||||
{
|
{
|
||||||
if(!std.ascii.isWhitespace(ch))
|
if(!std.ascii.isWhitespace(ch))
|
||||||
@ -399,8 +426,8 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.String => try tokenizeBacktrackCustomToken(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, TokenType.StringLiteral, allocator),
|
TokenizerState.String => try tokenizeBacktrackCustomToken(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, TokenType.StringLiteral, allocator, location),
|
||||||
TokenizerState.Name => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
TokenizerState.Name => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
TokenizerState.Zero =>
|
TokenizerState.Zero =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
@ -430,7 +457,7 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator);
|
try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -452,7 +479,7 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator);
|
try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.HexNumber =>
|
TokenizerState.HexNumber =>
|
||||||
@ -482,7 +509,7 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator);
|
try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -516,7 +543,7 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator);
|
try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
@ -529,79 +556,79 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz
|
|||||||
TokenizerState.SquareClosed, TokenizerState.Pipe, TokenizerState.Ampersand,
|
TokenizerState.SquareClosed, TokenizerState.Pipe, TokenizerState.Ampersand,
|
||||||
TokenizerState.Percent, TokenizerState.Star, TokenizerState.Semicolon,
|
TokenizerState.Percent, TokenizerState.Star, TokenizerState.Semicolon,
|
||||||
TokenizerState.Caret, TokenizerState.DotDotDot, TokenizerState.GtGt,
|
TokenizerState.Caret, TokenizerState.DotDotDot, TokenizerState.GtGt,
|
||||||
TokenizerState.LtLt, TokenizerState.SlashSlash => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator),
|
TokenizerState.LtLt, TokenizerState.SlashSlash => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location),
|
||||||
TokenizerState.Tilde =>
|
TokenizerState.Tilde =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'=' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.TildeEquals, TokenizerState.TildeEquals),
|
'=' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.TildeEquals, TokenizerState.TildeEquals, location),
|
||||||
else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator),
|
else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Gt =>
|
TokenizerState.Gt =>
|
||||||
{
|
{
|
||||||
switch (ch)
|
switch (ch)
|
||||||
{
|
{
|
||||||
'>' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.GtGt, TokenizerState.GtGt),
|
'>' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.GtGt, TokenizerState.GtGt, location),
|
||||||
'=' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.GtEquals, TokenizerState.GtEquals),
|
'=' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.GtEquals, TokenizerState.GtEquals, location),
|
||||||
else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator),
|
else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Lt =>
|
TokenizerState.Lt =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'<' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.LtLt, TokenizerState.LtLt),
|
'<' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.LtLt, TokenizerState.LtLt, location),
|
||||||
'=' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.LtEquals, TokenizerState.LtEquals),
|
'=' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.LtEquals, TokenizerState.LtEquals, location),
|
||||||
else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator),
|
else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Slash =>
|
TokenizerState.Slash =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'/' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.SlashSlash, TokenizerState.SlashSlash),
|
'/' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.SlashSlash, TokenizerState.SlashSlash, location),
|
||||||
else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator),
|
else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Dot =>
|
TokenizerState.Dot =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'.' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.DotDot, TokenizerState.DotDot),
|
'.' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.DotDot, TokenizerState.DotDot, location),
|
||||||
else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator),
|
else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.DotDot =>
|
TokenizerState.DotDot =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'.' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.DotDotDot, TokenizerState.DotDotDot),
|
'.' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.DotDotDot, TokenizerState.DotDotDot, location),
|
||||||
else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator),
|
else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Colon =>
|
TokenizerState.Colon =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
':' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.ColonColon, TokenizerState.ColonColon),
|
':' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.ColonColon, TokenizerState.ColonColon, location),
|
||||||
else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator),
|
else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Equals =>
|
TokenizerState.Equals =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'=' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.EqualsEquals, TokenizerState.EqualsEquals),
|
'=' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, TokenType.EqualsEquals, TokenizerState.EqualsEquals, location),
|
||||||
else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator),
|
else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Minus =>
|
TokenizerState.Minus =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'-' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, null, TokenizerState.SmallCommentStart),
|
'-' => tokenizeTerminalBase(lastIndex, index.*, tokenType, state, null, TokenizerState.SmallCommentStart, location),
|
||||||
else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator),
|
else => try tokenizeBacktrack(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.SmallCommentStart =>
|
TokenizerState.SmallCommentStart =>
|
||||||
@ -689,486 +716,486 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz
|
|||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.An, tokenStr, ch),
|
'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.An, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.An =>
|
TokenizerState.An =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'd' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.And, tokenStr, ch),
|
'd' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.And, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.And => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.And, allocator),
|
TokenizerState.And => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.And, allocator, location),
|
||||||
TokenizerState.W =>
|
TokenizerState.W =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'h' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Wh, tokenStr, ch),
|
'h' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Wh, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Wh =>
|
TokenizerState.Wh =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'i' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Whi, tokenStr, ch),
|
'i' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Whi, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Whi =>
|
TokenizerState.Whi =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Whil, tokenStr, ch),
|
'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Whil, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Whil =>
|
TokenizerState.Whil =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.While, tokenStr, ch),
|
'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.While, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.While => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.While, allocator),
|
TokenizerState.While => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.While, allocator, location),
|
||||||
TokenizerState.B =>
|
TokenizerState.B =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'r' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Br, tokenStr, ch),
|
'r' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Br, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Br =>
|
TokenizerState.Br =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Bre, tokenStr, ch),
|
'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Bre, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Bre =>
|
TokenizerState.Bre =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'a' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Brea, tokenStr, ch),
|
'a' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Brea, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Brea =>
|
TokenizerState.Brea =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'k' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Break, tokenStr, ch),
|
'k' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Break, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Break => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Break, allocator),
|
TokenizerState.Break => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Break, allocator, location),
|
||||||
TokenizerState.G =>
|
TokenizerState.G =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Go, tokenStr, ch),
|
'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Go, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Go =>
|
TokenizerState.Go =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Got, tokenStr, ch),
|
't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Got, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Got =>
|
TokenizerState.Got =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Goto, tokenStr, ch),
|
'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Goto, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Goto => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Goto, allocator),
|
TokenizerState.Goto => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Goto, allocator, location),
|
||||||
TokenizerState.R =>
|
TokenizerState.R =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Re, tokenStr, ch),
|
'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Re, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Re =>
|
TokenizerState.Re =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Ret, tokenStr, ch),
|
't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Ret, tokenStr, ch, location),
|
||||||
'p' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Rep, tokenStr, ch),
|
'p' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Rep, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Ret =>
|
TokenizerState.Ret =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'u' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Retu, tokenStr, ch),
|
'u' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Retu, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Retu =>
|
TokenizerState.Retu =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'r' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Retur, tokenStr, ch),
|
'r' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Retur, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Retur =>
|
TokenizerState.Retur =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Return, tokenStr, ch),
|
'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Return, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Return => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Return, allocator),
|
TokenizerState.Return => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Return, allocator, location),
|
||||||
TokenizerState.Rep =>
|
TokenizerState.Rep =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Repe, tokenStr, ch),
|
'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Repe, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Repe =>
|
TokenizerState.Repe =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'a' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Repea, tokenStr, ch),
|
'a' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Repea, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Repea =>
|
TokenizerState.Repea =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Repeat, tokenStr, ch),
|
't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Repeat, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Repeat => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Repeat, allocator),
|
TokenizerState.Repeat => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Repeat, allocator, location),
|
||||||
TokenizerState.N =>
|
TokenizerState.N =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'i' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Ni, tokenStr, ch),
|
'i' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Ni, tokenStr, ch, location),
|
||||||
'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.No, tokenStr, ch),
|
'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.No, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.No =>
|
TokenizerState.No =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Not, tokenStr, ch),
|
't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Not, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Not => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Not, allocator),
|
TokenizerState.Not => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Not, allocator, location),
|
||||||
TokenizerState.Ni =>
|
TokenizerState.Ni =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Nil, tokenStr, ch),
|
'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Nil, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Nil => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Nil, allocator),
|
TokenizerState.Nil => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Nil, allocator, location),
|
||||||
TokenizerState.T =>
|
TokenizerState.T =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'h' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Th, tokenStr, ch),
|
'h' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Th, tokenStr, ch, location),
|
||||||
'r' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Tr, tokenStr, ch),
|
'r' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Tr, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Th =>
|
TokenizerState.Th =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.The, tokenStr, ch),
|
'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.The, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.The =>
|
TokenizerState.The =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Then, tokenStr, ch),
|
'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Then, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Then => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Then, allocator),
|
TokenizerState.Then => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Then, allocator, location),
|
||||||
TokenizerState.Tr =>
|
TokenizerState.Tr =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'u' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Tru, tokenStr, ch),
|
'u' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Tru, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Tru =>
|
TokenizerState.Tru =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.True, tokenStr, ch),
|
'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.True, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.True => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.True, allocator),
|
TokenizerState.True => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.True, allocator, location),
|
||||||
TokenizerState.E =>
|
TokenizerState.E =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.El, tokenStr, ch),
|
'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.El, tokenStr, ch, location),
|
||||||
'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.En, tokenStr, ch),
|
'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.En, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.En =>
|
TokenizerState.En =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'd' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.End, tokenStr, ch),
|
'd' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.End, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.End => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.End, allocator),
|
TokenizerState.End => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.End, allocator, location),
|
||||||
TokenizerState.El =>
|
TokenizerState.El =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
's' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Els, tokenStr, ch),
|
's' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Els, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Els =>
|
TokenizerState.Els =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Else, tokenStr, ch),
|
'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Else, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Else =>
|
TokenizerState.Else =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'i' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Elsei, tokenStr, ch),
|
'i' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Elsei, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Else, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Else, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Elsei =>
|
TokenizerState.Elsei =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'f' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Elseif, tokenStr, ch),
|
'f' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Elseif, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Elseif => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Elseif, allocator),
|
TokenizerState.Elseif => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Elseif, allocator, location),
|
||||||
TokenizerState.O =>
|
TokenizerState.O =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'r' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Or, tokenStr, ch),
|
'r' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Or, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Or => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Or, allocator),
|
TokenizerState.Or => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Or, allocator, location),
|
||||||
TokenizerState.D =>
|
TokenizerState.D =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Do, tokenStr, ch),
|
'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Do, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Do => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Do, allocator),
|
TokenizerState.Do => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Do, allocator, location),
|
||||||
TokenizerState.I =>
|
TokenizerState.I =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'f' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.If, tokenStr, ch),
|
'f' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.If, tokenStr, ch, location),
|
||||||
'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.In, tokenStr, ch),
|
'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.In, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.In => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.In, allocator),
|
TokenizerState.In => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.In, allocator, location),
|
||||||
TokenizerState.If => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.If, allocator),
|
TokenizerState.If => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.If, allocator, location),
|
||||||
TokenizerState.F =>
|
TokenizerState.F =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'a' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fa, tokenStr, ch),
|
'a' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fa, tokenStr, ch, location),
|
||||||
'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fo, tokenStr, ch),
|
'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fo, tokenStr, ch, location),
|
||||||
'u' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fu, tokenStr, ch),
|
'u' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fu, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Fu =>
|
TokenizerState.Fu =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fun, tokenStr, ch),
|
'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fun, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Fun =>
|
TokenizerState.Fun =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'c' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Func, tokenStr, ch),
|
'c' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Func, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Func =>
|
TokenizerState.Func =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Funct, tokenStr, ch),
|
't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Funct, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Funct =>
|
TokenizerState.Funct =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'i' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Functi, tokenStr, ch),
|
'i' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Functi, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Functi =>
|
TokenizerState.Functi =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Functio, tokenStr, ch),
|
'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Functio, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Functio =>
|
TokenizerState.Functio =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Function, tokenStr, ch),
|
'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Function, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Function => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Function, allocator),
|
TokenizerState.Function => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Function, allocator, location),
|
||||||
TokenizerState.Fa =>
|
TokenizerState.Fa =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fal, tokenStr, ch),
|
'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fal, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Fal =>
|
TokenizerState.Fal =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
's' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fals, tokenStr, ch),
|
's' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Fals, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Fals =>
|
TokenizerState.Fals =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.False, tokenStr, ch),
|
'e' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.False, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.False => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.False, allocator),
|
TokenizerState.False => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.False, allocator, location),
|
||||||
TokenizerState.Fo =>
|
TokenizerState.Fo =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'r' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.For, tokenStr, ch),
|
'r' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.For, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.For => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.For, allocator),
|
TokenizerState.For => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.For, allocator, location),
|
||||||
TokenizerState.L =>
|
TokenizerState.L =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Lo, tokenStr, ch),
|
'o' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Lo, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Lo =>
|
TokenizerState.Lo =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'c' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Loc, tokenStr, ch),
|
'c' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Loc, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Loc =>
|
TokenizerState.Loc =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'a' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Loca, tokenStr, ch),
|
'a' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Loca, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Loca =>
|
TokenizerState.Loca =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Local, tokenStr, ch),
|
'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Local, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Local => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Local, allocator),
|
TokenizerState.Local => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Local, allocator, location),
|
||||||
TokenizerState.U =>
|
TokenizerState.U =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Un, tokenStr, ch),
|
'n' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Un, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Un =>
|
TokenizerState.Un =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Unt, tokenStr, ch),
|
't' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Unt, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Unt =>
|
TokenizerState.Unt =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'i' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Unti, tokenStr, ch),
|
'i' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Unti, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Unti =>
|
TokenizerState.Unti =>
|
||||||
{
|
{
|
||||||
switch(ch)
|
switch(ch)
|
||||||
{
|
{
|
||||||
'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Until, tokenStr, ch),
|
'l' => try tokenizeTerminalNoToken(lastIndex, index.*, state, TokenizerState.Until, tokenStr, ch, location),
|
||||||
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator),
|
else => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Name, allocator, location),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TokenizerState.Until => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Until, allocator),
|
TokenizerState.Until => try tokenizeAlphanumericNonstart(lastIndex, index, tokens, tokenType, tokenStr, tokenNumeral, state, ch, TokenType.Until, allocator, location),
|
||||||
else =>
|
else =>
|
||||||
{
|
{
|
||||||
std.debug.print("{}\n", . {state.*});
|
std.debug.print("{}\n", . {state.*});
|
||||||
@ -1177,7 +1204,7 @@ fn tokenizeChar(state: *TokenizerState, ch: u8, lastIndex: *?usize, index: *usiz
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn tokenize(file_content: []u8, allocator: std.mem.Allocator) ![]Token
|
pub fn tokenize(fileContent: []u8, allocator: std.mem.Allocator) ![]Token
|
||||||
{
|
{
|
||||||
var tokens = std.ArrayList(Token).init(allocator);
|
var tokens = std.ArrayList(Token).init(allocator);
|
||||||
var state: TokenizerState = TokenizerState.Start;
|
var state: TokenizerState = TokenizerState.Start;
|
||||||
@ -1188,12 +1215,32 @@ pub fn tokenize(file_content: []u8, allocator: std.mem.Allocator) ![]Token
|
|||||||
defer tokenStr.deinit();
|
defer tokenStr.deinit();
|
||||||
var tokenNumeral: ?types.Numeral = null;
|
var tokenNumeral: ?types.Numeral = null;
|
||||||
var longBracketLevel: u32 = 0;
|
var longBracketLevel: u32 = 0;
|
||||||
|
var location = Location { .start = null, .length = 0 };
|
||||||
|
|
||||||
while(index < file_content.len)
|
while(index < fileContent.len)
|
||||||
{
|
{
|
||||||
const ch = file_content[index];
|
const ch = fileContent[index];
|
||||||
try tokenizeChar(&state, ch, &lastIndex, &index, &tokenType, &tokenStr, &tokenNumeral, &tokens, &longBracketLevel, allocator);
|
try tokenizeChar(&state, ch, &lastIndex, &index, &tokenType, &tokenStr, &tokenNumeral, &tokens, &longBracketLevel, &location, allocator);
|
||||||
|
if(location.start != null and location.start.?.col == 0 and location.start.?.line == 0)
|
||||||
|
{
|
||||||
|
location.start = calculatePoint(fileContent, index);
|
||||||
|
}
|
||||||
index += 1;
|
index += 1;
|
||||||
}
|
}
|
||||||
return tokens.toOwnedSlice();
|
return tokens.toOwnedSlice();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn calculatePoint(fileContent: []u8, index: usize) Point
|
||||||
|
{
|
||||||
|
var ret = Point { .col = 1, .line = 1 };
|
||||||
|
for(0..index) |i|
|
||||||
|
{
|
||||||
|
ret.col += 1;
|
||||||
|
if(fileContent[i] == '\n')
|
||||||
|
{
|
||||||
|
ret.line += 1;
|
||||||
|
ret.col = 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
@ -1 +1,8 @@
|
|||||||
|
a, b = 12, test(32, 4)
|
||||||
local t=(string.find(originalField.af,'m') and originalField.tableAction) or c.tableAction or originalField.tableAction or tableActionGeneric
|
local t=(string.find(originalField.af,'m') and originalField.tableAction) or c.tableAction or originalField.tableAction or tableActionGeneric
|
||||||
|
b = {["a"] = 23}
|
||||||
|
for i=0, 10 do b[i] = 2^23 end
|
||||||
|
print("asdf")
|
||||||
|
function test(a, b)
|
||||||
|
return 12 + a / b
|
||||||
|
end
|
||||||
|
Reference in New Issue
Block a user