Add tokenizer
This commit is contained in:
44
src/main.zig
Normal file
44
src/main.zig
Normal file
@ -0,0 +1,44 @@
|
||||
const std = @import("std");
|
||||
const tokenize = @import("tokenizer").tokenize;
|
||||
|
||||
pub fn main() !void
|
||||
{
|
||||
var gpa = std.heap.GeneralPurposeAllocator(.{}){};
|
||||
defer _ = gpa.deinit();
|
||||
const allocator = gpa.allocator();
|
||||
|
||||
const args = try std.process.argsAlloc(allocator);
|
||||
defer std.process.argsFree(allocator, args);
|
||||
const file = try std.fs.cwd().openFile(args[1], .{});
|
||||
defer file.close();
|
||||
const content = try file.readToEndAlloc(allocator, 13000);
|
||||
defer allocator.free(content);
|
||||
const tokens = try tokenize(content, allocator);
|
||||
//std.debug.print("tokens: {any}", .{tokens});
|
||||
for(tokens) |token|
|
||||
{
|
||||
switch(token.tokenData)
|
||||
{
|
||||
.string => |*data| std.debug.print("string: {s} {*}\n", .{data.*, data.ptr}),
|
||||
.numeral => |*data| std.debug.print("numeral: {any} {*}\n", .{data.*, data}),
|
||||
.none => |*data| std.debug.print("none {*}\n", .{data})
|
||||
}
|
||||
}
|
||||
defer
|
||||
{
|
||||
var i: usize = 0;
|
||||
while(i < tokens.len)
|
||||
{
|
||||
switch(tokens[i].tokenData)
|
||||
{
|
||||
.string => |*data|
|
||||
{
|
||||
allocator.free(data.*);
|
||||
},
|
||||
else => {}
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
allocator.free(tokens);
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user