Implement parsing and token locations

This commit is contained in:
0x4261756D
2023-09-21 18:30:50 +02:00
parent 721383a043
commit b00a99ab6a
5 changed files with 2721 additions and 251 deletions

View File

@ -1,5 +1,6 @@
const std = @import("std");
const tokenize = @import("tokenizer").tokenize;
const tokenize = @import("tokenizer.zig").tokenize;
const parse = @import("parser.zig").parse;
pub fn main() !void
{
@ -14,16 +15,6 @@ pub fn main() !void
const content = try file.readToEndAlloc(allocator, 13000);
defer allocator.free(content);
const tokens = try tokenize(content, allocator);
//std.debug.print("tokens: {any}", .{tokens});
for(tokens) |token|
{
switch(token.tokenData)
{
.string => |*data| std.debug.print("string: {s} {*}\n", .{data.*, data.ptr}),
.numeral => |*data| std.debug.print("numeral: {any} {*}\n", .{data.*, data}),
.none => |*data| std.debug.print("none {*}\n", .{data})
}
}
defer
{
var i: usize = 0;
@ -41,4 +32,8 @@ pub fn main() !void
}
allocator.free(tokens);
}
var parserAllocator = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer parserAllocator.deinit();
const root = try parse(tokens, &parserAllocator);
root.dump(0);
}