Merge branch 'csharp' of https://gittea.dev/0x4261756D/luaaaaah into csharp

This commit is contained in:
0x4261756D 2024-03-19 20:29:23 +01:00
commit 5b3da8b4b7
3 changed files with 445 additions and 1545 deletions

71
LuaTypes.cs Normal file
View File

@ -0,0 +1,71 @@
using System.Text.Json.Serialization;
namespace luaaaaah;
[JsonDerivedType(typeof(Integer), typeDiscriminator: "int")]
[JsonDerivedType(typeof(Float), typeDiscriminator: "float")]
public interface INumeral
{
public class Integer(int value) : INumeral
{
public int value = value;
public bool RawEqual(INumeral other)
{
if(other is Integer integer)
{
return integer.value == value;
}
// TODO: Check if this is actually doing what is expected
return ((Float)other).value == value;
}
public override string ToString()
{
return $"Numeral Integer {value}";
}
}
public class Float(float value) : INumeral
{
public float value = value;
public bool RawEqual(INumeral other)
{
if(other is Float float_val)
{
return float_val.value == value;
}
// TODO: Check if this is actually doing what is expected
return ((Integer)other).value == value;
}
public override string ToString()
{
return $"Numeral Float {value}";
}
}
public bool RawEqual(INumeral other);
}
class CodeRegion(CodeLocation start, CodeLocation end)
{
public CodeLocation start = start;
public CodeLocation end = end;
public override string ToString()
{
return $"{start}-{end}";
}
}
class CodeLocation(int line, int col)
{
public int line = line;
public int col = col;
public CodeLocation(CodeLocation other) : this(line: other.line, col: other.col) { }
public override string ToString()
{
return $"{line + 1}:{col + 1}";
}
}

300
Parser.cs
View File

@ -4,7 +4,7 @@ using System.Text.Json.Serialization;
namespace luaaaaah; namespace luaaaaah;
class Parser internal class Parser
{ {
public class ChunkNode(BlockNode block, CodeRegion startRegion, CodeRegion endRegion) public class ChunkNode(BlockNode block, CodeRegion startRegion, CodeRegion endRegion)
{ {
@ -32,95 +32,57 @@ class Parser
[JsonDerivedType(typeof(Function), typeDiscriminator: "st Function")] [JsonDerivedType(typeof(Function), typeDiscriminator: "st Function")]
[JsonDerivedType(typeof(LocalFunction), typeDiscriminator: "st LocalFunction")] [JsonDerivedType(typeof(LocalFunction), typeDiscriminator: "st LocalFunction")]
[JsonDerivedType(typeof(Local), typeDiscriminator: "st Local")] [JsonDerivedType(typeof(Local), typeDiscriminator: "st Local")]
public abstract class StatNode(CodeRegion startRegion, CodeRegion endRegion) public abstract class StatNode
{ {
public CodeRegion startRegion = startRegion, endRegion = endRegion; public class Semicolon(CodeRegion region) : StatNode
public class Semicolon(CodeRegion region) : StatNode(region, region) { }
public class Assignment(AssignmentNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion)
{ {
public AssignmentNode node = node; public CodeRegion region = region;
} }
public class Functioncall(FunctioncallNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion) public class Assignment(VarlistNode lhs, ExplistNode rhs, CodeRegion startRegion, CodeRegion endRegion) : StatNode
{
public FunctioncallNode node = node;
}
public class Label(string label, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion)
{
public string label = label;
}
public class Break(CodeRegion region) : StatNode(region, region) { }
public class Goto(string label, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion)
{
public string label = label;
}
public class Do(BlockNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion)
{
public BlockNode node = node;
}
public class While(WhileNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion)
{
public WhileNode node = node;
}
public class Repeat(RepeatNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion)
{
public RepeatNode node = node;
}
public class If(IfNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion)
{
public IfNode node = node;
}
public class ForNumerical(ForNumericalNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion)
{
public ForNumericalNode node = node;
}
public class ForGeneric(ForGenericNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion)
{
public ForGenericNode node = node;
}
public class Function(FunctionNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion)
{
public FunctionNode node = node;
}
public class LocalFunction(LocalFunctionNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion)
{
public LocalFunctionNode node = node;
}
public class Local(LocalNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion)
{
public LocalNode node = node;
}
}
public class RetstatNode(ExplistNode? values, CodeRegion startRegion, CodeRegion endRegion)
{
public ExplistNode? values = values;
public CodeRegion startRegion = startRegion, endRegion = endRegion;
}
public class AssignmentNode(VarlistNode lhs, ExplistNode rhs, CodeRegion startRegion, CodeRegion endRegion)
{ {
public VarlistNode lhs = lhs; public VarlistNode lhs = lhs;
public ExplistNode rhs = rhs; public ExplistNode rhs = rhs;
public CodeRegion startRegion = startRegion, endRegion = endRegion; public CodeRegion startRegion = startRegion, endRegion = endRegion;
} }
public class FunctioncallNode(SuffixexpNode function, string? objectArg, ArgsNode args, CodeRegion startRegion, CodeRegion endRegion) public class Functioncall(FunctioncallNode node) : StatNode
{ {
public SuffixexpNode function = function; public FunctioncallNode node = node;
public string? objectArg = objectArg;
public ArgsNode args = args;
public CodeRegion startRegion = startRegion, endRegion = endRegion;
} }
public class WhileNode(ExpNode condition, BlockNode body, CodeRegion startRegion, CodeRegion endRegion) public class Label(string label, CodeRegion startRegion, CodeRegion endRegion) : StatNode
{
public CodeRegion startRegion = startRegion;
public CodeRegion endRegion = endRegion;
public string label = label;
}
public class Break(CodeRegion region) : StatNode
{
public CodeRegion region = region;
}
public class Goto(string label, CodeRegion startRegion, CodeRegion endRegion) : StatNode
{
public CodeRegion startRegion = startRegion;
public CodeRegion endRegion = endRegion;
public string label = label;
}
public class Do(BlockNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode
{
public CodeRegion startRegion = startRegion;
public CodeRegion endRegion = endRegion;
public BlockNode node = node;
}
public class While(ExpNode condition, BlockNode body, CodeRegion startRegion, CodeRegion endRegion) : StatNode
{ {
public ExpNode condition = condition; public ExpNode condition = condition;
public BlockNode body = body; public BlockNode body = body;
public CodeRegion startRegion = startRegion, endRegion = endRegion; public CodeRegion startRegion = startRegion, endRegion = endRegion;
} }
public class RepeatNode(ExpNode condition, BlockNode body, CodeRegion startRegion, CodeRegion endRegion) public class Repeat(ExpNode condition, BlockNode body, CodeRegion startRegion, CodeRegion endRegion) : StatNode
{ {
public ExpNode condition = condition; public ExpNode condition = condition;
public BlockNode body = body; public BlockNode body = body;
public CodeRegion startRegion = startRegion, endRegion = endRegion; public CodeRegion startRegion = startRegion, endRegion = endRegion;
} }
public class IfNode(ExpNode condition, BlockNode body, List<ElseifNode> elseifs, CodeRegion startRegion, CodeRegion endRegion) public class If(ExpNode condition, BlockNode body, List<ElseifNode> elseifs, CodeRegion startRegion, CodeRegion endRegion) : StatNode
{ {
public ExpNode condition = condition; public ExpNode condition = condition;
public BlockNode body = body; public BlockNode body = body;
@ -128,7 +90,7 @@ class Parser
public BlockNode? else_; public BlockNode? else_;
public CodeRegion startRegion = startRegion, endRegion = endRegion; public CodeRegion startRegion = startRegion, endRegion = endRegion;
} }
public class ForNumericalNode(string variable, ExpNode start, ExpNode end, ExpNode? change, BlockNode body, CodeRegion startRegion, CodeRegion endRegion) public class ForNumerical(string variable, ExpNode start, ExpNode end, ExpNode? change, BlockNode body, CodeRegion startRegion, CodeRegion endRegion) : StatNode
{ {
public string variable = variable; public string variable = variable;
public ExpNode start = start; public ExpNode start = start;
@ -137,31 +99,48 @@ class Parser
public BlockNode body = body; public BlockNode body = body;
public CodeRegion startRegion = startRegion, endRegion = endRegion; public CodeRegion startRegion = startRegion, endRegion = endRegion;
} }
public class ForGenericNode(List<string> vars, ExplistNode exps, BlockNode body, CodeRegion startRegion, CodeRegion endRegion) public class ForGeneric(List<string> vars, ExplistNode exps, BlockNode body, CodeRegion startRegion, CodeRegion endRegion) : StatNode
{ {
public List<string> vars = vars; public List<string> vars = vars;
public ExplistNode exps = exps; public ExplistNode exps = exps;
public BlockNode body = body; public BlockNode body = body;
public CodeRegion startRegion = startRegion, endRegion = endRegion; public CodeRegion startRegion = startRegion, endRegion = endRegion;
} }
public class FunctionNode(FuncnameNode name, FuncbodyNode body, CodeRegion startRegion, CodeRegion endRegion) public class Function(FunctionNode node) : StatNode
{ {
public FuncnameNode name = name; public FunctionNode node = node;
public FuncbodyNode body = body;
public CodeRegion startRegion = startRegion, endRegion = endRegion;
} }
public class LocalFunctionNode(string name, FuncbodyNode body, CodeRegion startRegion, CodeRegion endRegion) public class LocalFunction(string name, FuncbodyNode body, CodeRegion startRegion, CodeRegion endRegion) : StatNode
{ {
public string name = name; public string name = name;
public FuncbodyNode body = body; public FuncbodyNode body = body;
public CodeRegion startRegion = startRegion, endRegion = endRegion; public CodeRegion startRegion = startRegion, endRegion = endRegion;
} }
public class LocalNode(AttnamelistNode attnames, ExplistNode? values, CodeRegion startRegion, CodeRegion endRegion) public class Local(AttnamelistNode attnames, ExplistNode? values, CodeRegion startRegion, CodeRegion endRegion) : StatNode
{ {
public AttnamelistNode attnames = attnames; public AttnamelistNode attnames = attnames;
public ExplistNode? values = values; public ExplistNode? values = values;
public CodeRegion startRegion = startRegion, endRegion = endRegion; public CodeRegion startRegion = startRegion, endRegion = endRegion;
} }
}
public class RetstatNode(ExplistNode? values, CodeRegion startRegion, CodeRegion endRegion)
{
public ExplistNode? values = values;
public CodeRegion startRegion = startRegion, endRegion = endRegion;
}
public class FunctioncallNode(SuffixexpNode function, string? objectArg, ArgsNode args)
{
public SuffixexpNode function = function;
public string? objectArg = objectArg;
public ArgsNode args = args;
public CodeRegion startRegion = function.startRegion, endRegion = function.endRegion;
}
public class FunctionNode(FuncnameNode name, FuncbodyNode body, CodeRegion startRegion, CodeRegion endRegion)
{
public FuncnameNode name = name;
public FuncbodyNode body = body;
public CodeRegion startRegion = startRegion, endRegion = endRegion;
}
public class ExplistNode(List<ExpNode> exps, CodeRegion startRegion, CodeRegion endRegion) public class ExplistNode(List<ExpNode> exps, CodeRegion startRegion, CodeRegion endRegion)
{ {
public List<ExpNode> exps = exps; public List<ExpNode> exps = exps;
@ -177,11 +156,12 @@ class Parser
public abstract class SuffixexpNode(CodeRegion startRegion, CodeRegion endRegion) public abstract class SuffixexpNode(CodeRegion startRegion, CodeRegion endRegion)
{ {
public CodeRegion startRegion = startRegion, endRegion = endRegion; public CodeRegion startRegion = startRegion, endRegion = endRegion;
public class Normal(NormalSuffixNode node, CodeRegion startRegion, CodeRegion endRegion) : SuffixexpNode(startRegion, endRegion) public class Normal(SuffixexpFirstPart firstPart, List<SuffixexpSuffix> suffixes, CodeRegion startRegion, CodeRegion endRegion) : SuffixexpNode(startRegion, endRegion)
{ {
public NormalSuffixNode node = node; public SuffixexpFirstPart firstPart = firstPart;
public List<SuffixexpSuffix> suffixes = suffixes;
} }
public class Functioncall(FunctioncallNode node, CodeRegion startRegion, CodeRegion endRegion) : SuffixexpNode(startRegion, endRegion) public class Functioncall(FunctioncallNode node) : SuffixexpNode(node.startRegion, node.endRegion)
{ {
public FunctioncallNode node = node; public FunctioncallNode node = node;
} }
@ -216,38 +196,53 @@ class Parser
[JsonDerivedType(typeof(Tableconstructor), typeDiscriminator: "e Tableconstructor")] [JsonDerivedType(typeof(Tableconstructor), typeDiscriminator: "e Tableconstructor")]
[JsonDerivedType(typeof(Unop), typeDiscriminator: "e Unop")] [JsonDerivedType(typeof(Unop), typeDiscriminator: "e Unop")]
[JsonDerivedType(typeof(Binop), typeDiscriminator: "e Binop")] [JsonDerivedType(typeof(Binop), typeDiscriminator: "e Binop")]
public abstract class ExpNode(CodeRegion startRegion, CodeRegion endRegion) public abstract class ExpNode
{ {
public CodeRegion startRegion = startRegion, endRegion = endRegion; public class Nil(CodeRegion region) : ExpNode
public class Nil(CodeRegion region) : ExpNode(region, region) { }
public class False(CodeRegion region) : ExpNode(region, region) { }
public class True(CodeRegion region) : ExpNode(region, region) { }
public class Numeral(INumeral value, CodeRegion region) : ExpNode(region, region)
{ {
public CodeRegion region = region;
}
public class False(CodeRegion region) : ExpNode
{
public CodeRegion region = region;
}
public class True(CodeRegion region) : ExpNode
{
public CodeRegion region = region;
}
public class Numeral(INumeral value, CodeRegion region) : ExpNode
{
public CodeRegion region = region;
public INumeral value = value; public INumeral value = value;
} }
public class LiteralString(string value, CodeRegion region) : ExpNode(region, region) public class LiteralString(string value, CodeRegion region) : ExpNode
{ {
public CodeRegion region = region;
public string value = value; public string value = value;
} }
public class Varargs(CodeRegion region) : ExpNode(region, region) { } public class Varargs(CodeRegion region) : ExpNode
public class Functiondef(FuncbodyNode node, CodeRegion startRegion, CodeRegion endRegion) : ExpNode(startRegion, endRegion)
{ {
public CodeRegion region = region;
}
public class Functiondef(FuncbodyNode node, CodeRegion startRegion, CodeRegion endRegion) : ExpNode
{
public CodeRegion startRegion = startRegion;
public CodeRegion endRegion = endRegion;
public FuncbodyNode node = node; public FuncbodyNode node = node;
} }
public class Suffixexp(SuffixexpNode node, CodeRegion startRegion, CodeRegion endRegion) : ExpNode(startRegion, endRegion) public class Suffixexp(SuffixexpNode node) : ExpNode
{ {
public SuffixexpNode node = node; public SuffixexpNode node = node;
} }
public class Tableconstructor(TableconstructorNode node, CodeRegion startRegion, CodeRegion endRegion) : ExpNode(startRegion, endRegion) public class Tableconstructor(TableconstructorNode node) : ExpNode
{ {
public TableconstructorNode node = node; public TableconstructorNode node = node;
} }
public class Unop(UnopNode node, CodeRegion startRegion, CodeRegion endRegion) : ExpNode(startRegion, endRegion) public class Unop(UnopNode node) : ExpNode
{ {
public UnopNode node = node; public UnopNode node = node;
} }
public class Binop(BinopNode node, CodeRegion startRegion, CodeRegion endRegion) : ExpNode(startRegion, endRegion) public class Binop(BinopNode node) : ExpNode
{ {
public BinopNode node = node; public BinopNode node = node;
} }
@ -295,12 +290,6 @@ class Parser
public MemberVarNode node = node; public MemberVarNode node = node;
} }
} }
public class NormalSuffixNode(SuffixexpFirstPart firstPart, List<SuffixexpSuffix> suffixes, CodeRegion startRegion, CodeRegion endRegion)
{
public SuffixexpFirstPart firstPart = firstPart;
public List<SuffixexpSuffix> suffixes = suffixes;
public CodeRegion startRegion = startRegion, endRegion = endRegion;
}
public class TableconstructorNode(FieldlistNode? exps, CodeRegion startRegion, CodeRegion endRegion) public class TableconstructorNode(FieldlistNode? exps, CodeRegion startRegion, CodeRegion endRegion)
{ {
public FieldlistNode? exps = exps; public FieldlistNode? exps = exps;
@ -394,7 +383,7 @@ class Parser
{ {
public ArgsNode node = node; public ArgsNode node = node;
} }
public class ArgsFirstArg(ArgsFirstArgNode node, CodeRegion startRegion, CodeRegion endRegion) : SuffixexpSuffix(startRegion, endRegion) public class ArgsFirstArg(ArgsFirstArgNode node) : SuffixexpSuffix(node.startRegion, node.endRegion)
{ {
public ArgsFirstArgNode node = node; public ArgsFirstArgNode node = node;
} }
@ -416,11 +405,11 @@ class Parser
public abstract class FieldNode(CodeRegion startRegion, CodeRegion endRegion) public abstract class FieldNode(CodeRegion startRegion, CodeRegion endRegion)
{ {
public CodeRegion startRegion = startRegion, endRegion = endRegion; public CodeRegion startRegion = startRegion, endRegion = endRegion;
public class IndexedAssignment(IndexedAssignmentNode node, CodeRegion startRegion, CodeRegion endRegion) : FieldNode(startRegion, endRegion) public class IndexedAssignment(IndexedAssignmentNode node) : FieldNode(node.startRegion, node.endRegion)
{ {
public IndexedAssignmentNode node = node; public IndexedAssignmentNode node = node;
} }
public class Assignment(FieldAssignmentNode node, CodeRegion startRegion, CodeRegion endRegion) : FieldNode(startRegion, endRegion) public class Assignment(FieldAssignmentNode node) : FieldNode(node.startRegion, node.endRegion)
{ {
public FieldAssignmentNode node = node; public FieldAssignmentNode node = node;
} }
@ -468,7 +457,7 @@ class Parser
{ {
stats.Add(ParseStat(tokens)); stats.Add(ParseStat(tokens));
} }
BlockNode ret = new(stats: stats, startRegion: startRegion, endRegion: (stats.Count == 0) ? startRegion : stats[^1].endRegion); BlockNode ret = new(stats: stats, startRegion: startRegion, endRegion: (stats.Count == 0 && index > 0) ? startRegion : tokens[index - 1].region);
if(index < tokens.Length && tokens[index].type == TokenType.Return) if(index < tokens.Length && tokens[index].type == TokenType.Return)
{ {
ret.retstat = ParseRetstat(tokens); ret.retstat = ParseRetstat(tokens);
@ -546,7 +535,7 @@ class Parser
} }
CodeRegion endRegion = tokens[index].region; CodeRegion endRegion = tokens[index].region;
index += 1; index += 1;
return new StatNode.While(new(condition: condition, body: body, startRegion: startRegion, endRegion: endRegion), startRegion: startRegion, endRegion: endRegion); return new StatNode.While(condition: condition, body: body, startRegion: startRegion, endRegion: endRegion);
} }
case TokenType.Repeat: case TokenType.Repeat:
{ {
@ -562,7 +551,7 @@ class Parser
} }
index += 1; index += 1;
ExpNode conditon = ParseExp(tokens); ExpNode conditon = ParseExp(tokens);
return new StatNode.Repeat(new(condition: conditon, body: body, startRegion: startRegion, endRegion: conditon.endRegion), startRegion: startRegion, endRegion: conditon.endRegion); return new StatNode.Repeat(condition: conditon, body: body, startRegion: startRegion, endRegion: tokens[index - 1].region);
} }
case TokenType.If: case TokenType.If:
{ {
@ -604,7 +593,7 @@ class Parser
{ {
throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected `end` after else-ifs of if starting at {startRegion}"); throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected `end` after else-ifs of if starting at {startRegion}");
} }
IfNode ret = new(condition: condition, body: body, elseifs: elseifs, startRegion: startRegion, endRegion: tokens[index - 1].region); StatNode.If ret = new(condition: condition, body: body, elseifs: elseifs, startRegion: startRegion, endRegion: tokens[index - 1].region);
if(tokens[index].type == TokenType.Else) if(tokens[index].type == TokenType.Else)
{ {
index += 1; index += 1;
@ -620,7 +609,7 @@ class Parser
} }
ret.endRegion = tokens[index].region; ret.endRegion = tokens[index].region;
index += 1; index += 1;
return new StatNode.If(node: ret, startRegion: startRegion, endRegion: ret.endRegion); return ret;
} }
case TokenType.For: case TokenType.For:
{ {
@ -683,7 +672,7 @@ class Parser
} }
CodeRegion endRegion = tokens[index].region; CodeRegion endRegion = tokens[index].region;
index += 1; index += 1;
return new StatNode.ForNumerical(new(variable: variable, start: start, end: end, change: change, body: body, startRegion: startRegion, endRegion: endRegion), startRegion: startRegion, endRegion: endRegion); return new StatNode.ForNumerical(variable: variable, start: start, end: end, change: change, body: body, startRegion: startRegion, endRegion: endRegion);
} }
case TokenType.Comma: case TokenType.Comma:
{ {
@ -736,7 +725,7 @@ class Parser
} }
CodeRegion endRegion = tokens[index].region; CodeRegion endRegion = tokens[index].region;
index += 1; index += 1;
return new StatNode.ForGeneric(new(vars: names, exps: exps, body: body, startRegion: startRegion, endRegion: endRegion), startRegion: startRegion, endRegion: endRegion); return new StatNode.ForGeneric(vars: names, exps: exps, body: body, startRegion: startRegion, endRegion: endRegion);
} }
case TokenType.In: case TokenType.In:
{ {
@ -762,7 +751,7 @@ class Parser
} }
CodeRegion endRegion = tokens[index].region; CodeRegion endRegion = tokens[index].region;
index += 1; index += 1;
return new StatNode.ForGeneric(new(vars: [variable], exps: exps, body: body, startRegion: startRegion, endRegion: endRegion), startRegion: startRegion, endRegion: endRegion); return new StatNode.ForGeneric(vars: [variable], exps: exps, body: body, startRegion: startRegion, endRegion: endRegion);
} }
default: default:
{ {
@ -775,7 +764,7 @@ class Parser
index += 1; index += 1;
FuncnameNode name = ParseFuncname(tokens); FuncnameNode name = ParseFuncname(tokens);
FuncbodyNode body = ParseFuncbody(tokens); FuncbodyNode body = ParseFuncbody(tokens);
return new StatNode.Function(new(name: name, body: body, startRegion: startRegion, endRegion: body.endRegion), startRegion: startRegion, endRegion: body.endRegion); return new StatNode.Function(new(name: name, body: body, startRegion: startRegion, endRegion: body.endRegion));
} }
case TokenType.Local: case TokenType.Local:
{ {
@ -798,19 +787,19 @@ class Parser
string name = ((Token.StringData)tokens[index].data!).data; string name = ((Token.StringData)tokens[index].data!).data;
index += 1; index += 1;
FuncbodyNode body = ParseFuncbody(tokens); FuncbodyNode body = ParseFuncbody(tokens);
return new StatNode.LocalFunction(new(name: name, body: body, startRegion: startRegion, endRegion: body.endRegion), startRegion: startRegion, endRegion: body.endRegion); return new StatNode.LocalFunction(name: name, body: body, startRegion: startRegion, endRegion: body.endRegion);
} }
else else
{ {
AttnamelistNode attnames = ParseAttnamelist(tokens); AttnamelistNode attnames = ParseAttnamelist(tokens);
LocalNode ret = new(attnames: attnames, values: null, startRegion: startRegion, endRegion: attnames.endRegion); StatNode.Local ret = new(attnames: attnames, values: null, startRegion: startRegion, endRegion: attnames.endRegion);
if(index < tokens.Length && tokens[index].type == TokenType.Equals) if(index < tokens.Length && tokens[index].type == TokenType.Equals)
{ {
index += 1; index += 1;
ret.values = ParseExplist(tokens); ret.values = ParseExplist(tokens);
ret.endRegion = ret.values.endRegion; ret.endRegion = ret.values.endRegion;
} }
return new StatNode.Local(ret, startRegion: startRegion, endRegion: ret.endRegion); return ret;
} }
} }
case TokenType.ColonColon: case TokenType.ColonColon:
@ -850,7 +839,7 @@ class Parser
} }
if(suffixExp is SuffixexpNode.Functioncall functioncall) if(suffixExp is SuffixexpNode.Functioncall functioncall)
{ {
return new StatNode.Functioncall(node: functioncall.node, startRegion: functioncall.startRegion, endRegion: functioncall.endRegion); return new StatNode.Functioncall(node: functioncall.node);
} }
} }
else else
@ -862,7 +851,7 @@ class Parser
index += 1; index += 1;
List<VarNode> lhs = [SuffixExpToVar(suffixExp)]; List<VarNode> lhs = [SuffixExpToVar(suffixExp)];
ExplistNode rhs = ParseExplist(tokens); ExplistNode rhs = ParseExplist(tokens);
return new StatNode.Assignment(new(lhs: new(vars: lhs, startRegion: startRegion, endRegion: suffixExp.endRegion), rhs: rhs, startRegion: startRegion, endRegion: rhs.endRegion), startRegion: startRegion, endRegion: rhs.endRegion); return new StatNode.Assignment(lhs: new(vars: lhs, startRegion: startRegion, endRegion: suffixExp.endRegion), rhs: rhs, startRegion: startRegion, endRegion: rhs.endRegion);
} }
case TokenType.Comma: case TokenType.Comma:
{ {
@ -883,7 +872,7 @@ class Parser
index += 1; index += 1;
VarlistNode varlistNode = new(vars: vars, startRegion: startRegion, endRegion: vars[^1].endRegion); VarlistNode varlistNode = new(vars: vars, startRegion: startRegion, endRegion: vars[^1].endRegion);
ExplistNode rhs = ParseExplist(tokens); ExplistNode rhs = ParseExplist(tokens);
return new StatNode.Assignment(new(lhs: varlistNode, rhs: rhs, startRegion: startRegion, endRegion: rhs.endRegion), startRegion: startRegion, endRegion: rhs.endRegion); return new StatNode.Assignment(lhs: varlistNode, rhs: rhs, startRegion: startRegion, endRegion: rhs.endRegion);
} }
} }
if(suffixExp is SuffixexpNode.Normal) if(suffixExp is SuffixexpNode.Normal)
@ -892,7 +881,7 @@ class Parser
} }
if(suffixExp is SuffixexpNode.Functioncall functioncall) if(suffixExp is SuffixexpNode.Functioncall functioncall)
{ {
return new StatNode.Functioncall(node: functioncall.node, startRegion: functioncall.startRegion, endRegion: functioncall.endRegion); return new StatNode.Functioncall(node: functioncall.node);
} }
} }
} }
@ -916,16 +905,16 @@ class Parser
{ {
throw new Exception($"Expected a normal suffix expression to convert to var at {suffixExp.startRegion}-{suffixExp.endRegion}"); throw new Exception($"Expected a normal suffix expression to convert to var at {suffixExp.startRegion}-{suffixExp.endRegion}");
} }
if(normal.node.suffixes.Count == 0) if(normal.suffixes.Count == 0)
{ {
if(normal.node.firstPart is not SuffixexpFirstPart.Name name) if(normal.firstPart is not SuffixexpFirstPart.Name name)
{ {
throw new Exception($"Expected a name as first part of suffix expression to convert to var at {normal.node.firstPart.startRegion}-{normal.node.firstPart.endRegion}"); throw new Exception($"Expected a name as first part of suffix expression to convert to var at {normal.firstPart.startRegion}-{normal.firstPart.endRegion}");
} }
return new VarNode.Name(name: name.name, startRegion: suffixExp.startRegion, endRegion: suffixExp.endRegion); return new VarNode.Name(name: name.name, startRegion: suffixExp.startRegion, endRegion: suffixExp.endRegion);
} }
SuffixexpSuffix last = normal.node.suffixes[^1]; SuffixexpSuffix last = normal.suffixes[^1];
_ = normal.node.suffixes.Remove(last); _ = normal.suffixes.Remove(last);
return last switch return last switch
{ {
SuffixexpSuffix.Dot dot => new VarNode.Member(node: new(name: dot.name, value: normal, startRegion: suffixExp.startRegion, endRegion: suffixExp.endRegion), startRegion: suffixExp.startRegion, endRegion: dot.endRegion), SuffixexpSuffix.Dot dot => new VarNode.Member(node: new(name: dot.name, value: normal, startRegion: suffixExp.startRegion, endRegion: suffixExp.endRegion), startRegion: suffixExp.startRegion, endRegion: dot.endRegion),
@ -999,7 +988,7 @@ class Parser
{ {
index += 1; index += 1;
ExpNode inner = ParseExp(tokens); ExpNode inner = ParseExp(tokens);
suffixes.Add(new SuffixexpSuffix.Indexed(node: inner, startRegion: suffixStartRegion, endRegion: inner.endRegion)); suffixes.Add(new SuffixexpSuffix.Indexed(node: inner, startRegion: suffixStartRegion, endRegion: tokens[index - 1].region));
if(index >= tokens.Length) if(index >= tokens.Length)
{ {
throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected `]` to close indexed suffix of suffix-expression starting at {suffixStartRegion}"); throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected `]` to close indexed suffix of suffix-expression starting at {suffixStartRegion}");
@ -1025,7 +1014,7 @@ class Parser
string name = ((Token.StringData)tokens[index].data!).data; string name = ((Token.StringData)tokens[index].data!).data;
index += 1; index += 1;
ArgsNode args = ParseArgs(tokens); ArgsNode args = ParseArgs(tokens);
suffixes.Add(new SuffixexpSuffix.ArgsFirstArg(new(name, rest: args, startRegion: suffixStartRegion, endRegion: args.endRegion), startRegion: suffixStartRegion, endRegion: args.endRegion)); suffixes.Add(new SuffixexpSuffix.ArgsFirstArg(new(name, rest: args, startRegion: suffixStartRegion, endRegion: args.endRegion)));
} }
break; break;
case TokenType.RoundOpen: case TokenType.RoundOpen:
@ -1051,33 +1040,17 @@ class Parser
{ {
SuffixexpSuffix.Args args => new SuffixexpNode.Functioncall( SuffixexpSuffix.Args args => new SuffixexpNode.Functioncall(
node: new( node: new(
function: new SuffixexpNode.Normal( function: new SuffixexpNode.Normal(firstPart, suffixes[..^1], startRegion, args.endRegion),
node: new NormalSuffixNode(firstPart, suffixes[..^1], startRegion, args.endRegion),
startRegion: startRegion,
endRegion: args.endRegion
),
args: args.node, args: args.node,
objectArg: null, objectArg: null
startRegion: startRegion, )
endRegion: args.endRegion
),
startRegion: startRegion,
endRegion: args.endRegion
), ),
SuffixexpSuffix.ArgsFirstArg node => new SuffixexpNode.Functioncall( SuffixexpSuffix.ArgsFirstArg node => new SuffixexpNode.Functioncall(
node: new( node: new(
function: new SuffixexpNode.Normal( function: new SuffixexpNode.Normal(firstPart: firstPart, suffixes: suffixes[..^1], startRegion, node.endRegion),
node: new NormalSuffixNode(firstPart: firstPart, suffixes: suffixes[..^1], startRegion, node.endRegion),
startRegion: startRegion,
endRegion: node.endRegion
),
objectArg: node.node.name, objectArg: node.node.name,
args: node.node.rest, args: node.node.rest
startRegion: startRegion, )
endRegion: node.endRegion
),
startRegion: startRegion,
endRegion: node.endRegion
), ),
_ => null, _ => null,
}; };
@ -1091,11 +1064,7 @@ class Parser
endRegion = firstPart.endRegion; endRegion = firstPart.endRegion;
} }
return new SuffixexpNode.Normal( return new SuffixexpNode.Normal(firstPart: firstPart, suffixes: suffixes, startRegion: startRegion, endRegion: endRegion);
node: new(firstPart: firstPart, suffixes: suffixes, startRegion: startRegion, endRegion: endRegion),
startRegion: startRegion,
endRegion: endRegion
);
} }
private ArgsNode ParseArgs(Token[] tokens) private ArgsNode ParseArgs(Token[] tokens)
@ -1231,7 +1200,7 @@ class Parser
} }
index += 1; index += 1;
ExpNode rhs = ParseExp(tokens); ExpNode rhs = ParseExp(tokens);
return new FieldNode.IndexedAssignment(node: new(index: indexNode, rhs: rhs, startRegion: startRegion, endRegion: rhs.endRegion), startRegion: startRegion, endRegion: rhs.endRegion); return new FieldNode.IndexedAssignment(node: new(index: indexNode, rhs: rhs, startRegion: startRegion, endRegion: tokens[index - 1].region));
} }
case TokenType.Name: case TokenType.Name:
{ {
@ -1240,15 +1209,15 @@ class Parser
string name = ((Token.StringData)tokens[index].data!).data; string name = ((Token.StringData)tokens[index].data!).data;
index += 2; index += 2;
ExpNode rhs = ParseExp(tokens); ExpNode rhs = ParseExp(tokens);
return new FieldNode.Assignment(node: new(lhs: name, rhs: rhs, startRegion: startRegion, endRegion: rhs.endRegion), startRegion: startRegion, endRegion: rhs.endRegion); return new FieldNode.Assignment(node: new(lhs: name, rhs: rhs, startRegion: startRegion, endRegion: tokens[index - 1].region));
} }
ExpNode exp = ParseExp(tokens); ExpNode exp = ParseExp(tokens);
return new FieldNode.Exp(node: exp, startRegion: startRegion, endRegion: exp.endRegion); return new FieldNode.Exp(node: exp, startRegion: startRegion, endRegion: tokens[index - 1].region);
} }
default: default:
{ {
ExpNode exp = ParseExp(tokens); ExpNode exp = ParseExp(tokens);
return new FieldNode.Exp(node: exp, startRegion: startRegion, endRegion: exp.endRegion); return new FieldNode.Exp(node: exp, startRegion: startRegion, endRegion: tokens[index - 1].region);
} }
} }
} }
@ -1448,13 +1417,14 @@ class Parser
private ExplistNode ParseExplist(Token[] tokens) private ExplistNode ParseExplist(Token[] tokens)
{ {
CodeRegion startRegion = tokens[index].region;
List<ExpNode> exps = [ParseExp(tokens)]; List<ExpNode> exps = [ParseExp(tokens)];
while(index < tokens.Length && tokens[index].type == TokenType.Comma) while(index < tokens.Length && tokens[index].type == TokenType.Comma)
{ {
index += 1; index += 1;
exps.Add(ParseExp(tokens)); exps.Add(ParseExp(tokens));
} }
return new ExplistNode(exps: exps, startRegion: exps[0].startRegion, endRegion: exps[^1].endRegion); return new ExplistNode(exps: exps, startRegion: startRegion, endRegion: tokens[index - 1].region);
} }
private ExpNode ParseExp(Token[] tokens) private ExpNode ParseExp(Token[] tokens)
@ -1482,7 +1452,7 @@ class Parser
int associativityBoost = (GetPrecedence(tokens[index]) == precedence) ? 0 : 1; int associativityBoost = (GetPrecedence(tokens[index]) == precedence) ? 0 : 1;
rhs = ParseExpPrecedence(tokens, lhs: rhs, minPrecedence: precedence + associativityBoost); rhs = ParseExpPrecedence(tokens, lhs: rhs, minPrecedence: precedence + associativityBoost);
} }
currentLhs = new ExpNode.Binop(node: new(lhs: currentLhs, type: op, rhs: rhs, startRegion: startRegion, endRegion: rhs.endRegion), startRegion: startRegion, endRegion: rhs.endRegion); currentLhs = new ExpNode.Binop(node: new(lhs: currentLhs, type: op, rhs: rhs, startRegion: startRegion, endRegion: tokens[index - 1].region));
} }
return currentLhs; return currentLhs;
} }
@ -1583,7 +1553,7 @@ class Parser
case TokenType.CurlyOpen: case TokenType.CurlyOpen:
{ {
TableconstructorNode inner = ParseTableconstructor(tokens); TableconstructorNode inner = ParseTableconstructor(tokens);
return new ExpNode.Tableconstructor(node: inner, startRegion: inner.startRegion, endRegion: inner.endRegion); return new ExpNode.Tableconstructor(node: inner);
} }
case TokenType.Function: case TokenType.Function:
{ {
@ -1595,30 +1565,30 @@ class Parser
{ {
index += 1; index += 1;
ExpNode unop = ParseExp(tokens); ExpNode unop = ParseExp(tokens);
return new ExpNode.Unop(node: new(type: UnopType.Minus, exp: unop, startRegion: startRegion, endRegion: unop.endRegion), startRegion: startRegion, endRegion: unop.endRegion); return new ExpNode.Unop(node: new(type: UnopType.Minus, exp: unop, startRegion: startRegion, endRegion: tokens[index - 1].region));
} }
case TokenType.Hash: case TokenType.Hash:
{ {
index += 1; index += 1;
ExpNode unop = ParseExp(tokens); ExpNode unop = ParseExp(tokens);
return new ExpNode.Unop(node: new(type: UnopType.Length, exp: unop, startRegion: startRegion, endRegion: unop.endRegion), startRegion: startRegion, endRegion: unop.endRegion); return new ExpNode.Unop(node: new(type: UnopType.Length, exp: unop, startRegion: startRegion, endRegion: tokens[index - 1].region));
} }
case TokenType.Not: case TokenType.Not:
{ {
index += 1; index += 1;
ExpNode unop = ParseExp(tokens); ExpNode unop = ParseExp(tokens);
return new ExpNode.Unop(node: new(type: UnopType.LogicalNot, exp: unop, startRegion: startRegion, endRegion: unop.endRegion), startRegion: startRegion, endRegion: unop.endRegion); return new ExpNode.Unop(node: new(type: UnopType.LogicalNot, exp: unop, startRegion: startRegion, endRegion: tokens[index - 1].region));
} }
case TokenType.Tilde: case TokenType.Tilde:
{ {
index += 1; index += 1;
ExpNode unop = ParseExp(tokens); ExpNode unop = ParseExp(tokens);
return new ExpNode.Unop(node: new(type: UnopType.BinaryNot, exp: unop, startRegion: startRegion, endRegion: unop.endRegion), startRegion: startRegion, endRegion: unop.endRegion); return new ExpNode.Unop(node: new(type: UnopType.BinaryNot, exp: unop, startRegion: startRegion, endRegion: tokens[index - 1].region));
} }
default: default:
{ {
SuffixexpNode suffixexp = ParseSuffixExp(tokens); SuffixexpNode suffixexp = ParseSuffixExp(tokens);
return new ExpNode.Suffixexp(node: suffixexp, startRegion: suffixexp.startRegion, endRegion: suffixexp.endRegion); return new ExpNode.Suffixexp(node: suffixexp);
} }
} }
} }

View File

@ -1,7 +1,6 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Text; using System.Text;
using System.Text.Json.Serialization;
namespace luaaaaah; namespace luaaaaah;
class Tokenizer class Tokenizer
@ -82,6 +81,62 @@ class Tokenizer
currentToken.region.end = new(currentLocation); currentToken.region.end = new(currentLocation);
} }
private void TokenizeTerminal(State newState, TokenType type)
{
lastIndex = index;
state = newState;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: type);
}
private void TokenizeTerminalName(State newState, char ch)
{
lastIndex = index;
state = newState;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
}
private void Backtrack(TokenType newType)
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = newType;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
}
private void BacktrackNoClear(TokenType newType)
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = newType;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
}
private void BacktrackNoTypeChange()
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
}
private void TokenizeChar(char ch) private void TokenizeChar(char ch)
{ {
switch(state) switch(state)
@ -91,263 +146,115 @@ class Tokenizer
switch(ch) switch(ch)
{ {
case '-': case '-':
{ TokenizeTerminal(State.Minus, TokenType.Minus);
lastIndex = index;
state = State.Minus;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Minus);
} /* tokenizeTerminalBase(TokenType.Minus, TokenizerState.Minus); */
break; break;
case ',': case ',':
{ TokenizeTerminal(State.Comma, TokenType.Comma);
lastIndex = index;
state = State.Comma;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Comma);
} /* tokenizeTerminalBase(TokenType.Comma, TokenizerState.Comma); */
break; break;
case '=': case '=':
{ TokenizeTerminal(State.Equals, TokenType.Equals);
lastIndex = index;
state = State.Equals;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Equals);
} /* tokenizeTerminalBase(TokenType.Equals, TokenizerState.Equals); */
break; break;
case '(': case '(':
{ TokenizeTerminal(State.RoundOpen, TokenType.RoundOpen);
lastIndex = index;
state = State.RoundOpen;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.RoundOpen);
} /* tokenizeTerminalBase(TokenType.RoundOpen, TokenizerState.RoundOpen); */
break; break;
case ')': case ')':
{ TokenizeTerminal(State.RoundClosed, TokenType.RoundClosed);
lastIndex = index;
state = State.RoundClosed;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.RoundClosed);
} /* tokenizeTerminalBase(TokenType.RoundClosed, TokenizerState.RoundClosed); */
break; break;
case '.': case '.':
{ TokenizeTerminal(State.Dot, TokenType.Dot);
lastIndex = index;
state = State.Dot;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Dot);
} /* tokenizeTerminalBase(TokenType.Dot, TokenizerState.Dot); */
break; break;
case ':': case ':':
{ TokenizeTerminal(State.Colon, TokenType.Colon);
lastIndex = index;
state = State.Colon;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Colon);
} /* tokenizeTerminalBase(TokenType.Colon, TokenizerState.Colon); */
break; break;
case '{': case '{':
{ TokenizeTerminal(State.CurlyOpen, TokenType.CurlyOpen);
lastIndex = index;
state = State.CurlyOpen;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.CurlyOpen);
} /* tokenizeTerminalBase(TokenType.CurlyOpen, TokenizerState.CurlyOpen); */
break; break;
case '}': case '}':
{ TokenizeTerminal(State.CurlyClosed, TokenType.CurlyClosed);
lastIndex = index;
state = State.CurlyClosed;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.CurlyClosed);
} /* tokenizeTerminalBase(TokenType.CurlyClosed, TokenizerState.CurlyClosed); */
break; break;
case '[': case '[':
{ TokenizeTerminal(State.SquareOpen, TokenType.SquareOpen);
lastIndex = index;
state = State.SquareOpen;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.SquareOpen);
} /* tokenizeTerminalBase(TokenType.SquareOpen, TokenizerState.SquareOpen); */
break; break;
case ']': case ']':
{ TokenizeTerminal(State.SquareClosed, TokenType.SquareClosed);
lastIndex = index;
state = State.SquareClosed;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.SquareClosed);
} /* tokenizeTerminalBase(TokenType.SquareClosed, TokenizerState.SquareClosed); */
break; break;
case '+': case '+':
{ TokenizeTerminal(State.Plus, TokenType.Plus);
lastIndex = index;
state = State.Plus;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Plus);
} /* tokenizeTerminalBase(TokenType.Plus, TokenizerState.Plus); */
break; break;
case '~': case '~':
{ TokenizeTerminal(State.Tilde, TokenType.Tilde);
lastIndex = index;
state = State.Tilde;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Tilde);
} /* tokenizeTerminalBase(TokenType.Tilde, TokenizerState.Tilde); */
break; break;
case '>': case '>':
{ TokenizeTerminal(State.Gt, TokenType.Gt);
lastIndex = index;
state = State.Gt;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Gt);
} /* tokenizeTerminalBase(TokenType.Gt, TokenizerState.Gt); */
break; break;
case '<': case '<':
{ TokenizeTerminal(State.Lt, TokenType.Lt);
lastIndex = index;
state = State.Lt;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Lt);
} /* tokenizeTerminalBase(TokenType.Lt, TokenizerState.Lt); */
break; break;
case '#': case '#':
{ TokenizeTerminal(State.Hash, TokenType.Hash);
lastIndex = index;
state = State.Hash;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Hash);
} /* tokenizeTerminalBase(TokenType.Hash, TokenizerState.Hash); */
break; break;
case '|': case '|':
{ TokenizeTerminal(State.Pipe, TokenType.Pipe);
lastIndex = index;
state = State.Pipe;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Pipe);
} /* tokenizeTerminalBase(TokenType.Pipe, TokenizerState.Pipe); */
break; break;
case '&': case '&':
{ TokenizeTerminal(State.Ampersand, TokenType.Ampersand);
lastIndex = index;
state = State.Ampersand;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Ampersand);
} /* tokenizeTerminalBase(TokenType.Ampersand, TokenizerState.Ampersand); */
break; break;
case '%': case '%':
{ TokenizeTerminal(State.Percent, TokenType.Percent);
lastIndex = index;
state = State.Percent;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Percent);
} /* tokenizeTerminalBase(TokenType.Percent, TokenizerState.Percent); */
break; break;
case '*': case '*':
{ TokenizeTerminal(State.Star, TokenType.Star);
lastIndex = index;
state = State.Star;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Star);
} /* tokenizeTerminalBase(TokenType.Star, TokenizerState.Star); */
break; break;
case '/': case '/':
{ TokenizeTerminal(State.Slash, TokenType.Slash);
lastIndex = index;
state = State.Slash;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Slash);
} /* tokenizeTerminalBase(TokenType.Slash, TokenizerState.Slash); */
break; break;
case ';': case ';':
{ TokenizeTerminal(State.Semicolon, TokenType.Semicolon);
lastIndex = index;
state = State.Semicolon;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Semicolon);
} /* tokenizeTerminalBase(TokenType.Semicolon, TokenizerState.Semicolon); */
break; break;
case '^': case '^':
{ TokenizeTerminal(State.Caret, TokenType.Caret);
lastIndex = index;
state = State.Caret;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Caret);
} /* tokenizeTerminalBase(TokenType.Caret, TokenizerState.Caret); */
break; break;
case 'a': case 'a':
{ TokenizeTerminalName(State.A, ch);
lastIndex = index;
state = State.A;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.A, tokenStr, ch); */
break; break;
case 'b': case 'b':
{ TokenizeTerminalName(State.B, ch);
lastIndex = index;
state = State.B;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.B, tokenStr, ch); */
break; break;
case 'd': case 'd':
{ TokenizeTerminalName(State.D, ch);
lastIndex = index;
state = State.D;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.D, tokenStr, ch); */
break; break;
case 'e': case 'e':
{ TokenizeTerminalName(State.E, ch);
lastIndex = index;
state = State.E;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.E, tokenStr, ch); */
break; break;
case 'f': case 'f':
{ TokenizeTerminalName(State.F, ch);
lastIndex = index;
state = State.F;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.F, tokenStr, ch); */
break; break;
case 'i': case 'i':
{ TokenizeTerminalName(State.I, ch);
lastIndex = index;
state = State.I;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.I, tokenStr, ch); */
break; break;
case 'g': case 'g':
{ TokenizeTerminalName(State.G, ch);
lastIndex = index;
state = State.G;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.G, tokenStr, ch); */
break; break;
case 'l': case 'l':
{ TokenizeTerminalName(State.L, ch);
lastIndex = index;
state = State.L;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.L, tokenStr, ch); */
break; break;
case 'n': case 'n':
{ TokenizeTerminalName(State.N, ch);
lastIndex = index;
state = State.N;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.N, tokenStr, ch); */
break; break;
case 'o': case 'o':
{ TokenizeTerminalName(State.O, ch);
lastIndex = index;
state = State.O;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.O, tokenStr, ch); */
break; break;
case 'r': case 'r':
{ TokenizeTerminalName(State.R, ch);
lastIndex = index;
state = State.R;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.R, tokenStr, ch); */
break; break;
case 't': case 't':
{ TokenizeTerminalName(State.T, ch);
lastIndex = index;
state = State.T;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.T, tokenStr, ch); */
break; break;
case 'u': case 'u':
{ TokenizeTerminalName(State.U, ch);
lastIndex = index;
state = State.U;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.U, tokenStr, ch); */
break; break;
case 'w': case 'w':
{ TokenizeTerminalName(State.W, ch);
lastIndex = index;
state = State.W;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.W, tokenStr, ch); */
break; break;
case '0': case '0':
{ {
@ -682,35 +589,22 @@ class Tokenizer
} }
break; break;
case 'z': case 'z':
{
state = State.SingleQuoteBackslashZ; state = State.SingleQuoteBackslashZ;
}
break; break;
case 'x': case 'x':
{
state = State.SingleQuoteBackslashX; state = State.SingleQuoteBackslashX;
}
break; break;
case 'u': case 'u':
{
state = State.SingleQuoteBackslashU; state = State.SingleQuoteBackslashU;
}
break; break;
default: throw new Exception($"Unknown escape sequence: \\{ch}"); default: throw new Exception($"Unknown escape sequence: \\{ch}");
} }
} }
break; break;
case State.SingleQuoteBackslashU: case State.SingleQuoteBackslashU:
{ state = ch == '{'
if(ch == '{') ? State.SingleQuoteBackslashUBracket
{ : throw new Exception($"Expected `{{` to continue \\u escape sequence at {currentLocation}, got {ch}");
state = State.SingleQuoteBackslashUBracket;
}
else
{
throw new Exception($"Expected `{{` to continue \\u escape sequence at {currentLocation}, got {ch}");
}
}
break; break;
case State.SingleQuoteBackslashUBracket: case State.SingleQuoteBackslashUBracket:
{ {
@ -850,18 +744,7 @@ class Tokenizer
break; break;
case State.String: case State.String:
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.StringLiteral);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.StringLiteral;
//currentToken.region.end = new(currentLocation);
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
break; break;
case State.Name: case State.Name:
@ -875,17 +758,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -911,16 +784,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoTypeChange();
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -964,16 +828,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoTypeChange();
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -996,16 +851,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoTypeChange();
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -1030,16 +876,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoTypeChange();
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -1057,16 +894,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoTypeChange();
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -1094,16 +922,7 @@ class Tokenizer
case State.ColonColon: case State.ColonColon:
case State.SlashSlash: case State.SlashSlash:
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoTypeChange();
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
break; break;
case State.Tilde: case State.Tilde:
@ -1116,16 +935,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoTypeChange();
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -1145,16 +955,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoTypeChange();
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -1174,16 +975,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoTypeChange();
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -1197,16 +989,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoTypeChange();
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -1220,16 +1003,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoTypeChange();
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -1244,16 +1018,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoTypeChange();
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -1267,16 +1032,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoTypeChange();
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -1290,16 +1046,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoTypeChange();
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -1313,16 +1060,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoTypeChange();
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -1417,16 +1155,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoTypeChange();
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -1499,17 +1228,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -1530,17 +1249,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -1555,18 +1264,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) Backtrack(TokenType.And);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.And;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -1587,17 +1285,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -1618,17 +1306,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -1649,17 +1327,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -1680,17 +1348,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -1705,18 +1363,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) Backtrack(TokenType.While);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.While;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -1737,17 +1384,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -1768,17 +1405,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -1799,17 +1426,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -1830,17 +1447,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -1855,18 +1462,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) Backtrack(TokenType.Break);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Break;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -1887,17 +1483,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -1918,17 +1504,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -1949,17 +1525,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -1974,18 +1540,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) Backtrack(TokenType.Goto);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Goto;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2006,17 +1561,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2043,17 +1588,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2074,17 +1609,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2105,17 +1630,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2136,17 +1651,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2161,18 +1666,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) Backtrack(TokenType.Return);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Return;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2193,17 +1687,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2224,17 +1708,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2255,17 +1729,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2280,18 +1744,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) Backtrack(TokenType.Repeat);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Repeat;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2318,17 +1771,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2349,17 +1792,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2374,18 +1807,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) Backtrack(TokenType.Nil);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Nil;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2406,17 +1828,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2431,18 +1843,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) Backtrack(TokenType.Not);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Not;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2469,17 +1870,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2500,17 +1891,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2531,17 +1912,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2556,18 +1927,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) Backtrack(TokenType.Then);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Then;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2588,17 +1948,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2619,17 +1969,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2644,18 +1984,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) Backtrack(TokenType.True);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.True;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2682,17 +2011,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2713,17 +2032,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2744,17 +2053,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2775,18 +2074,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) Backtrack(TokenType.Else);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Else;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2807,17 +2095,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2832,18 +2110,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) Backtrack(TokenType.Elseif);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Elseif;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2864,17 +2131,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2889,18 +2146,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) Backtrack(TokenType.End);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.End;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2921,17 +2167,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2946,18 +2182,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) Backtrack(TokenType.Or);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Or;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -2978,17 +2203,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -3003,18 +2218,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) Backtrack(TokenType.Do);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Do;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -3041,17 +2245,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -3066,18 +2260,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) Backtrack(TokenType.In);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.In;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -3092,18 +2275,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) Backtrack(TokenType.If);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.If;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -3136,17 +2308,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -3167,17 +2329,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -3198,17 +2350,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -3229,17 +2371,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -3260,17 +2392,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -3291,17 +2413,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -3322,17 +2434,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -3347,18 +2449,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) Backtrack(TokenType.Function);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Function;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -3379,17 +2470,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -3410,17 +2491,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -3441,17 +2512,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -3466,18 +2527,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) Backtrack(TokenType.False);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.False;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -3498,17 +2548,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -3523,18 +2563,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) Backtrack(TokenType.For);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.For;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -3555,17 +2584,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -3586,17 +2605,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -3617,17 +2626,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -3648,17 +2647,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -3673,18 +2662,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) Backtrack(TokenType.Local);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Local;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -3705,17 +2683,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -3736,17 +2704,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -3767,17 +2725,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -3798,17 +2746,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) BacktrackNoClear(TokenType.Name);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -3823,18 +2761,7 @@ class Tokenizer
} }
else else
{ {
if(currentToken == null || currentToken.type == null) Backtrack(TokenType.Until);
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Until;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
} }
} }
break; break;
@ -3875,31 +2802,7 @@ class Tokenizer
} }
} }
class CodeRegion(CodeLocation start, CodeLocation end) internal class Token(CodeRegion region, TokenType? type = null, Token.IData? data = null)
{
public CodeLocation start = start;
public CodeLocation end = end;
public override string ToString()
{
return $"{start}-{end}";
}
}
class CodeLocation(int line, int col)
{
public int line = line;
public int col = col;
public CodeLocation(CodeLocation other) : this(line: other.line, col: other.col) { }
public override string ToString()
{
return $"{line + 1}:{col + 1}";
}
}
class Token(CodeRegion region, TokenType? type = null, Token.IData? data = null)
{ {
public CodeRegion region = region; public CodeRegion region = region;
public IData? data = data; public IData? data = data;
@ -3938,47 +2841,3 @@ public enum TokenType
Numeral, Numeral,
StringLiteral, StringLiteral,
} }
[JsonDerivedType(typeof(Integer), typeDiscriminator: "int")]
[JsonDerivedType(typeof(Float), typeDiscriminator: "float")]
public interface INumeral
{
public class Integer(int value) : INumeral
{
public int value = value;
public bool RawEqual(INumeral other)
{
if(other is Integer integer)
{
return integer.value == value;
}
// TODO: Check if this is actually doing what is expected
return ((Float)other).value == value;
}
public override string ToString()
{
return $"Numeral Integer {value}";
}
}
public class Float(float value) : INumeral
{
public float value = value;
public bool RawEqual(INumeral other)
{
if(other is Float float_val)
{
return float_val.value == value;
}
// TODO: Check if this is actually doing what is expected
return ((Integer)other).value == value;
}
public override string ToString()
{
return $"Numeral Float {value}";
}
}
public bool RawEqual(INumeral other);
}