Merge branch 'csharp' of https://gittea.dev/0x4261756D/luaaaaah into csharp

This commit is contained in:
0x4261756D 2024-03-19 20:29:23 +01:00
commit 5b3da8b4b7
3 changed files with 445 additions and 1545 deletions

71
LuaTypes.cs Normal file
View File

@ -0,0 +1,71 @@
using System.Text.Json.Serialization;
namespace luaaaaah;
[JsonDerivedType(typeof(Integer), typeDiscriminator: "int")]
[JsonDerivedType(typeof(Float), typeDiscriminator: "float")]
public interface INumeral
{
public class Integer(int value) : INumeral
{
public int value = value;
public bool RawEqual(INumeral other)
{
if(other is Integer integer)
{
return integer.value == value;
}
// TODO: Check if this is actually doing what is expected
return ((Float)other).value == value;
}
public override string ToString()
{
return $"Numeral Integer {value}";
}
}
public class Float(float value) : INumeral
{
public float value = value;
public bool RawEqual(INumeral other)
{
if(other is Float float_val)
{
return float_val.value == value;
}
// TODO: Check if this is actually doing what is expected
return ((Integer)other).value == value;
}
public override string ToString()
{
return $"Numeral Float {value}";
}
}
public bool RawEqual(INumeral other);
}
class CodeRegion(CodeLocation start, CodeLocation end)
{
public CodeLocation start = start;
public CodeLocation end = end;
public override string ToString()
{
return $"{start}-{end}";
}
}
class CodeLocation(int line, int col)
{
public int line = line;
public int col = col;
public CodeLocation(CodeLocation other) : this(line: other.line, col: other.col) { }
public override string ToString()
{
return $"{line + 1}:{col + 1}";
}
}

300
Parser.cs
View File

@ -4,7 +4,7 @@ using System.Text.Json.Serialization;
namespace luaaaaah;
class Parser
internal class Parser
{
public class ChunkNode(BlockNode block, CodeRegion startRegion, CodeRegion endRegion)
{
@ -32,95 +32,57 @@ class Parser
[JsonDerivedType(typeof(Function), typeDiscriminator: "st Function")]
[JsonDerivedType(typeof(LocalFunction), typeDiscriminator: "st LocalFunction")]
[JsonDerivedType(typeof(Local), typeDiscriminator: "st Local")]
public abstract class StatNode(CodeRegion startRegion, CodeRegion endRegion)
public abstract class StatNode
{
public CodeRegion startRegion = startRegion, endRegion = endRegion;
public class Semicolon(CodeRegion region) : StatNode(region, region) { }
public class Assignment(AssignmentNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion)
public class Semicolon(CodeRegion region) : StatNode
{
public AssignmentNode node = node;
public CodeRegion region = region;
}
public class Functioncall(FunctioncallNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion)
{
public FunctioncallNode node = node;
}
public class Label(string label, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion)
{
public string label = label;
}
public class Break(CodeRegion region) : StatNode(region, region) { }
public class Goto(string label, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion)
{
public string label = label;
}
public class Do(BlockNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion)
{
public BlockNode node = node;
}
public class While(WhileNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion)
{
public WhileNode node = node;
}
public class Repeat(RepeatNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion)
{
public RepeatNode node = node;
}
public class If(IfNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion)
{
public IfNode node = node;
}
public class ForNumerical(ForNumericalNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion)
{
public ForNumericalNode node = node;
}
public class ForGeneric(ForGenericNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion)
{
public ForGenericNode node = node;
}
public class Function(FunctionNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion)
{
public FunctionNode node = node;
}
public class LocalFunction(LocalFunctionNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion)
{
public LocalFunctionNode node = node;
}
public class Local(LocalNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion)
{
public LocalNode node = node;
}
}
public class RetstatNode(ExplistNode? values, CodeRegion startRegion, CodeRegion endRegion)
{
public ExplistNode? values = values;
public CodeRegion startRegion = startRegion, endRegion = endRegion;
}
public class AssignmentNode(VarlistNode lhs, ExplistNode rhs, CodeRegion startRegion, CodeRegion endRegion)
public class Assignment(VarlistNode lhs, ExplistNode rhs, CodeRegion startRegion, CodeRegion endRegion) : StatNode
{
public VarlistNode lhs = lhs;
public ExplistNode rhs = rhs;
public CodeRegion startRegion = startRegion, endRegion = endRegion;
}
public class FunctioncallNode(SuffixexpNode function, string? objectArg, ArgsNode args, CodeRegion startRegion, CodeRegion endRegion)
public class Functioncall(FunctioncallNode node) : StatNode
{
public SuffixexpNode function = function;
public string? objectArg = objectArg;
public ArgsNode args = args;
public CodeRegion startRegion = startRegion, endRegion = endRegion;
public FunctioncallNode node = node;
}
public class WhileNode(ExpNode condition, BlockNode body, CodeRegion startRegion, CodeRegion endRegion)
public class Label(string label, CodeRegion startRegion, CodeRegion endRegion) : StatNode
{
public CodeRegion startRegion = startRegion;
public CodeRegion endRegion = endRegion;
public string label = label;
}
public class Break(CodeRegion region) : StatNode
{
public CodeRegion region = region;
}
public class Goto(string label, CodeRegion startRegion, CodeRegion endRegion) : StatNode
{
public CodeRegion startRegion = startRegion;
public CodeRegion endRegion = endRegion;
public string label = label;
}
public class Do(BlockNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode
{
public CodeRegion startRegion = startRegion;
public CodeRegion endRegion = endRegion;
public BlockNode node = node;
}
public class While(ExpNode condition, BlockNode body, CodeRegion startRegion, CodeRegion endRegion) : StatNode
{
public ExpNode condition = condition;
public BlockNode body = body;
public CodeRegion startRegion = startRegion, endRegion = endRegion;
}
public class RepeatNode(ExpNode condition, BlockNode body, CodeRegion startRegion, CodeRegion endRegion)
public class Repeat(ExpNode condition, BlockNode body, CodeRegion startRegion, CodeRegion endRegion) : StatNode
{
public ExpNode condition = condition;
public BlockNode body = body;
public CodeRegion startRegion = startRegion, endRegion = endRegion;
}
public class IfNode(ExpNode condition, BlockNode body, List<ElseifNode> elseifs, CodeRegion startRegion, CodeRegion endRegion)
public class If(ExpNode condition, BlockNode body, List<ElseifNode> elseifs, CodeRegion startRegion, CodeRegion endRegion) : StatNode
{
public ExpNode condition = condition;
public BlockNode body = body;
@ -128,7 +90,7 @@ class Parser
public BlockNode? else_;
public CodeRegion startRegion = startRegion, endRegion = endRegion;
}
public class ForNumericalNode(string variable, ExpNode start, ExpNode end, ExpNode? change, BlockNode body, CodeRegion startRegion, CodeRegion endRegion)
public class ForNumerical(string variable, ExpNode start, ExpNode end, ExpNode? change, BlockNode body, CodeRegion startRegion, CodeRegion endRegion) : StatNode
{
public string variable = variable;
public ExpNode start = start;
@ -137,31 +99,48 @@ class Parser
public BlockNode body = body;
public CodeRegion startRegion = startRegion, endRegion = endRegion;
}
public class ForGenericNode(List<string> vars, ExplistNode exps, BlockNode body, CodeRegion startRegion, CodeRegion endRegion)
public class ForGeneric(List<string> vars, ExplistNode exps, BlockNode body, CodeRegion startRegion, CodeRegion endRegion) : StatNode
{
public List<string> vars = vars;
public ExplistNode exps = exps;
public BlockNode body = body;
public CodeRegion startRegion = startRegion, endRegion = endRegion;
}
public class FunctionNode(FuncnameNode name, FuncbodyNode body, CodeRegion startRegion, CodeRegion endRegion)
public class Function(FunctionNode node) : StatNode
{
public FuncnameNode name = name;
public FuncbodyNode body = body;
public CodeRegion startRegion = startRegion, endRegion = endRegion;
public FunctionNode node = node;
}
public class LocalFunctionNode(string name, FuncbodyNode body, CodeRegion startRegion, CodeRegion endRegion)
public class LocalFunction(string name, FuncbodyNode body, CodeRegion startRegion, CodeRegion endRegion) : StatNode
{
public string name = name;
public FuncbodyNode body = body;
public CodeRegion startRegion = startRegion, endRegion = endRegion;
}
public class LocalNode(AttnamelistNode attnames, ExplistNode? values, CodeRegion startRegion, CodeRegion endRegion)
public class Local(AttnamelistNode attnames, ExplistNode? values, CodeRegion startRegion, CodeRegion endRegion) : StatNode
{
public AttnamelistNode attnames = attnames;
public ExplistNode? values = values;
public CodeRegion startRegion = startRegion, endRegion = endRegion;
}
}
public class RetstatNode(ExplistNode? values, CodeRegion startRegion, CodeRegion endRegion)
{
public ExplistNode? values = values;
public CodeRegion startRegion = startRegion, endRegion = endRegion;
}
public class FunctioncallNode(SuffixexpNode function, string? objectArg, ArgsNode args)
{
public SuffixexpNode function = function;
public string? objectArg = objectArg;
public ArgsNode args = args;
public CodeRegion startRegion = function.startRegion, endRegion = function.endRegion;
}
public class FunctionNode(FuncnameNode name, FuncbodyNode body, CodeRegion startRegion, CodeRegion endRegion)
{
public FuncnameNode name = name;
public FuncbodyNode body = body;
public CodeRegion startRegion = startRegion, endRegion = endRegion;
}
public class ExplistNode(List<ExpNode> exps, CodeRegion startRegion, CodeRegion endRegion)
{
public List<ExpNode> exps = exps;
@ -177,11 +156,12 @@ class Parser
public abstract class SuffixexpNode(CodeRegion startRegion, CodeRegion endRegion)
{
public CodeRegion startRegion = startRegion, endRegion = endRegion;
public class Normal(NormalSuffixNode node, CodeRegion startRegion, CodeRegion endRegion) : SuffixexpNode(startRegion, endRegion)
public class Normal(SuffixexpFirstPart firstPart, List<SuffixexpSuffix> suffixes, CodeRegion startRegion, CodeRegion endRegion) : SuffixexpNode(startRegion, endRegion)
{
public NormalSuffixNode node = node;
public SuffixexpFirstPart firstPart = firstPart;
public List<SuffixexpSuffix> suffixes = suffixes;
}
public class Functioncall(FunctioncallNode node, CodeRegion startRegion, CodeRegion endRegion) : SuffixexpNode(startRegion, endRegion)
public class Functioncall(FunctioncallNode node) : SuffixexpNode(node.startRegion, node.endRegion)
{
public FunctioncallNode node = node;
}
@ -216,38 +196,53 @@ class Parser
[JsonDerivedType(typeof(Tableconstructor), typeDiscriminator: "e Tableconstructor")]
[JsonDerivedType(typeof(Unop), typeDiscriminator: "e Unop")]
[JsonDerivedType(typeof(Binop), typeDiscriminator: "e Binop")]
public abstract class ExpNode(CodeRegion startRegion, CodeRegion endRegion)
public abstract class ExpNode
{
public CodeRegion startRegion = startRegion, endRegion = endRegion;
public class Nil(CodeRegion region) : ExpNode(region, region) { }
public class False(CodeRegion region) : ExpNode(region, region) { }
public class True(CodeRegion region) : ExpNode(region, region) { }
public class Numeral(INumeral value, CodeRegion region) : ExpNode(region, region)
public class Nil(CodeRegion region) : ExpNode
{
public CodeRegion region = region;
}
public class False(CodeRegion region) : ExpNode
{
public CodeRegion region = region;
}
public class True(CodeRegion region) : ExpNode
{
public CodeRegion region = region;
}
public class Numeral(INumeral value, CodeRegion region) : ExpNode
{
public CodeRegion region = region;
public INumeral value = value;
}
public class LiteralString(string value, CodeRegion region) : ExpNode(region, region)
public class LiteralString(string value, CodeRegion region) : ExpNode
{
public CodeRegion region = region;
public string value = value;
}
public class Varargs(CodeRegion region) : ExpNode(region, region) { }
public class Functiondef(FuncbodyNode node, CodeRegion startRegion, CodeRegion endRegion) : ExpNode(startRegion, endRegion)
public class Varargs(CodeRegion region) : ExpNode
{
public CodeRegion region = region;
}
public class Functiondef(FuncbodyNode node, CodeRegion startRegion, CodeRegion endRegion) : ExpNode
{
public CodeRegion startRegion = startRegion;
public CodeRegion endRegion = endRegion;
public FuncbodyNode node = node;
}
public class Suffixexp(SuffixexpNode node, CodeRegion startRegion, CodeRegion endRegion) : ExpNode(startRegion, endRegion)
public class Suffixexp(SuffixexpNode node) : ExpNode
{
public SuffixexpNode node = node;
}
public class Tableconstructor(TableconstructorNode node, CodeRegion startRegion, CodeRegion endRegion) : ExpNode(startRegion, endRegion)
public class Tableconstructor(TableconstructorNode node) : ExpNode
{
public TableconstructorNode node = node;
}
public class Unop(UnopNode node, CodeRegion startRegion, CodeRegion endRegion) : ExpNode(startRegion, endRegion)
public class Unop(UnopNode node) : ExpNode
{
public UnopNode node = node;
}
public class Binop(BinopNode node, CodeRegion startRegion, CodeRegion endRegion) : ExpNode(startRegion, endRegion)
public class Binop(BinopNode node) : ExpNode
{
public BinopNode node = node;
}
@ -295,12 +290,6 @@ class Parser
public MemberVarNode node = node;
}
}
public class NormalSuffixNode(SuffixexpFirstPart firstPart, List<SuffixexpSuffix> suffixes, CodeRegion startRegion, CodeRegion endRegion)
{
public SuffixexpFirstPart firstPart = firstPart;
public List<SuffixexpSuffix> suffixes = suffixes;
public CodeRegion startRegion = startRegion, endRegion = endRegion;
}
public class TableconstructorNode(FieldlistNode? exps, CodeRegion startRegion, CodeRegion endRegion)
{
public FieldlistNode? exps = exps;
@ -394,7 +383,7 @@ class Parser
{
public ArgsNode node = node;
}
public class ArgsFirstArg(ArgsFirstArgNode node, CodeRegion startRegion, CodeRegion endRegion) : SuffixexpSuffix(startRegion, endRegion)
public class ArgsFirstArg(ArgsFirstArgNode node) : SuffixexpSuffix(node.startRegion, node.endRegion)
{
public ArgsFirstArgNode node = node;
}
@ -416,11 +405,11 @@ class Parser
public abstract class FieldNode(CodeRegion startRegion, CodeRegion endRegion)
{
public CodeRegion startRegion = startRegion, endRegion = endRegion;
public class IndexedAssignment(IndexedAssignmentNode node, CodeRegion startRegion, CodeRegion endRegion) : FieldNode(startRegion, endRegion)
public class IndexedAssignment(IndexedAssignmentNode node) : FieldNode(node.startRegion, node.endRegion)
{
public IndexedAssignmentNode node = node;
}
public class Assignment(FieldAssignmentNode node, CodeRegion startRegion, CodeRegion endRegion) : FieldNode(startRegion, endRegion)
public class Assignment(FieldAssignmentNode node) : FieldNode(node.startRegion, node.endRegion)
{
public FieldAssignmentNode node = node;
}
@ -468,7 +457,7 @@ class Parser
{
stats.Add(ParseStat(tokens));
}
BlockNode ret = new(stats: stats, startRegion: startRegion, endRegion: (stats.Count == 0) ? startRegion : stats[^1].endRegion);
BlockNode ret = new(stats: stats, startRegion: startRegion, endRegion: (stats.Count == 0 && index > 0) ? startRegion : tokens[index - 1].region);
if(index < tokens.Length && tokens[index].type == TokenType.Return)
{
ret.retstat = ParseRetstat(tokens);
@ -546,7 +535,7 @@ class Parser
}
CodeRegion endRegion = tokens[index].region;
index += 1;
return new StatNode.While(new(condition: condition, body: body, startRegion: startRegion, endRegion: endRegion), startRegion: startRegion, endRegion: endRegion);
return new StatNode.While(condition: condition, body: body, startRegion: startRegion, endRegion: endRegion);
}
case TokenType.Repeat:
{
@ -562,7 +551,7 @@ class Parser
}
index += 1;
ExpNode conditon = ParseExp(tokens);
return new StatNode.Repeat(new(condition: conditon, body: body, startRegion: startRegion, endRegion: conditon.endRegion), startRegion: startRegion, endRegion: conditon.endRegion);
return new StatNode.Repeat(condition: conditon, body: body, startRegion: startRegion, endRegion: tokens[index - 1].region);
}
case TokenType.If:
{
@ -604,7 +593,7 @@ class Parser
{
throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected `end` after else-ifs of if starting at {startRegion}");
}
IfNode ret = new(condition: condition, body: body, elseifs: elseifs, startRegion: startRegion, endRegion: tokens[index - 1].region);
StatNode.If ret = new(condition: condition, body: body, elseifs: elseifs, startRegion: startRegion, endRegion: tokens[index - 1].region);
if(tokens[index].type == TokenType.Else)
{
index += 1;
@ -620,7 +609,7 @@ class Parser
}
ret.endRegion = tokens[index].region;
index += 1;
return new StatNode.If(node: ret, startRegion: startRegion, endRegion: ret.endRegion);
return ret;
}
case TokenType.For:
{
@ -683,7 +672,7 @@ class Parser
}
CodeRegion endRegion = tokens[index].region;
index += 1;
return new StatNode.ForNumerical(new(variable: variable, start: start, end: end, change: change, body: body, startRegion: startRegion, endRegion: endRegion), startRegion: startRegion, endRegion: endRegion);
return new StatNode.ForNumerical(variable: variable, start: start, end: end, change: change, body: body, startRegion: startRegion, endRegion: endRegion);
}
case TokenType.Comma:
{
@ -736,7 +725,7 @@ class Parser
}
CodeRegion endRegion = tokens[index].region;
index += 1;
return new StatNode.ForGeneric(new(vars: names, exps: exps, body: body, startRegion: startRegion, endRegion: endRegion), startRegion: startRegion, endRegion: endRegion);
return new StatNode.ForGeneric(vars: names, exps: exps, body: body, startRegion: startRegion, endRegion: endRegion);
}
case TokenType.In:
{
@ -762,7 +751,7 @@ class Parser
}
CodeRegion endRegion = tokens[index].region;
index += 1;
return new StatNode.ForGeneric(new(vars: [variable], exps: exps, body: body, startRegion: startRegion, endRegion: endRegion), startRegion: startRegion, endRegion: endRegion);
return new StatNode.ForGeneric(vars: [variable], exps: exps, body: body, startRegion: startRegion, endRegion: endRegion);
}
default:
{
@ -775,7 +764,7 @@ class Parser
index += 1;
FuncnameNode name = ParseFuncname(tokens);
FuncbodyNode body = ParseFuncbody(tokens);
return new StatNode.Function(new(name: name, body: body, startRegion: startRegion, endRegion: body.endRegion), startRegion: startRegion, endRegion: body.endRegion);
return new StatNode.Function(new(name: name, body: body, startRegion: startRegion, endRegion: body.endRegion));
}
case TokenType.Local:
{
@ -798,19 +787,19 @@ class Parser
string name = ((Token.StringData)tokens[index].data!).data;
index += 1;
FuncbodyNode body = ParseFuncbody(tokens);
return new StatNode.LocalFunction(new(name: name, body: body, startRegion: startRegion, endRegion: body.endRegion), startRegion: startRegion, endRegion: body.endRegion);
return new StatNode.LocalFunction(name: name, body: body, startRegion: startRegion, endRegion: body.endRegion);
}
else
{
AttnamelistNode attnames = ParseAttnamelist(tokens);
LocalNode ret = new(attnames: attnames, values: null, startRegion: startRegion, endRegion: attnames.endRegion);
StatNode.Local ret = new(attnames: attnames, values: null, startRegion: startRegion, endRegion: attnames.endRegion);
if(index < tokens.Length && tokens[index].type == TokenType.Equals)
{
index += 1;
ret.values = ParseExplist(tokens);
ret.endRegion = ret.values.endRegion;
}
return new StatNode.Local(ret, startRegion: startRegion, endRegion: ret.endRegion);
return ret;
}
}
case TokenType.ColonColon:
@ -850,7 +839,7 @@ class Parser
}
if(suffixExp is SuffixexpNode.Functioncall functioncall)
{
return new StatNode.Functioncall(node: functioncall.node, startRegion: functioncall.startRegion, endRegion: functioncall.endRegion);
return new StatNode.Functioncall(node: functioncall.node);
}
}
else
@ -862,7 +851,7 @@ class Parser
index += 1;
List<VarNode> lhs = [SuffixExpToVar(suffixExp)];
ExplistNode rhs = ParseExplist(tokens);
return new StatNode.Assignment(new(lhs: new(vars: lhs, startRegion: startRegion, endRegion: suffixExp.endRegion), rhs: rhs, startRegion: startRegion, endRegion: rhs.endRegion), startRegion: startRegion, endRegion: rhs.endRegion);
return new StatNode.Assignment(lhs: new(vars: lhs, startRegion: startRegion, endRegion: suffixExp.endRegion), rhs: rhs, startRegion: startRegion, endRegion: rhs.endRegion);
}
case TokenType.Comma:
{
@ -883,7 +872,7 @@ class Parser
index += 1;
VarlistNode varlistNode = new(vars: vars, startRegion: startRegion, endRegion: vars[^1].endRegion);
ExplistNode rhs = ParseExplist(tokens);
return new StatNode.Assignment(new(lhs: varlistNode, rhs: rhs, startRegion: startRegion, endRegion: rhs.endRegion), startRegion: startRegion, endRegion: rhs.endRegion);
return new StatNode.Assignment(lhs: varlistNode, rhs: rhs, startRegion: startRegion, endRegion: rhs.endRegion);
}
}
if(suffixExp is SuffixexpNode.Normal)
@ -892,7 +881,7 @@ class Parser
}
if(suffixExp is SuffixexpNode.Functioncall functioncall)
{
return new StatNode.Functioncall(node: functioncall.node, startRegion: functioncall.startRegion, endRegion: functioncall.endRegion);
return new StatNode.Functioncall(node: functioncall.node);
}
}
}
@ -916,16 +905,16 @@ class Parser
{
throw new Exception($"Expected a normal suffix expression to convert to var at {suffixExp.startRegion}-{suffixExp.endRegion}");
}
if(normal.node.suffixes.Count == 0)
if(normal.suffixes.Count == 0)
{
if(normal.node.firstPart is not SuffixexpFirstPart.Name name)
if(normal.firstPart is not SuffixexpFirstPart.Name name)
{
throw new Exception($"Expected a name as first part of suffix expression to convert to var at {normal.node.firstPart.startRegion}-{normal.node.firstPart.endRegion}");
throw new Exception($"Expected a name as first part of suffix expression to convert to var at {normal.firstPart.startRegion}-{normal.firstPart.endRegion}");
}
return new VarNode.Name(name: name.name, startRegion: suffixExp.startRegion, endRegion: suffixExp.endRegion);
}
SuffixexpSuffix last = normal.node.suffixes[^1];
_ = normal.node.suffixes.Remove(last);
SuffixexpSuffix last = normal.suffixes[^1];
_ = normal.suffixes.Remove(last);
return last switch
{
SuffixexpSuffix.Dot dot => new VarNode.Member(node: new(name: dot.name, value: normal, startRegion: suffixExp.startRegion, endRegion: suffixExp.endRegion), startRegion: suffixExp.startRegion, endRegion: dot.endRegion),
@ -999,7 +988,7 @@ class Parser
{
index += 1;
ExpNode inner = ParseExp(tokens);
suffixes.Add(new SuffixexpSuffix.Indexed(node: inner, startRegion: suffixStartRegion, endRegion: inner.endRegion));
suffixes.Add(new SuffixexpSuffix.Indexed(node: inner, startRegion: suffixStartRegion, endRegion: tokens[index - 1].region));
if(index >= tokens.Length)
{
throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected `]` to close indexed suffix of suffix-expression starting at {suffixStartRegion}");
@ -1025,7 +1014,7 @@ class Parser
string name = ((Token.StringData)tokens[index].data!).data;
index += 1;
ArgsNode args = ParseArgs(tokens);
suffixes.Add(new SuffixexpSuffix.ArgsFirstArg(new(name, rest: args, startRegion: suffixStartRegion, endRegion: args.endRegion), startRegion: suffixStartRegion, endRegion: args.endRegion));
suffixes.Add(new SuffixexpSuffix.ArgsFirstArg(new(name, rest: args, startRegion: suffixStartRegion, endRegion: args.endRegion)));
}
break;
case TokenType.RoundOpen:
@ -1051,33 +1040,17 @@ class Parser
{
SuffixexpSuffix.Args args => new SuffixexpNode.Functioncall(
node: new(
function: new SuffixexpNode.Normal(
node: new NormalSuffixNode(firstPart, suffixes[..^1], startRegion, args.endRegion),
startRegion: startRegion,
endRegion: args.endRegion
),
function: new SuffixexpNode.Normal(firstPart, suffixes[..^1], startRegion, args.endRegion),
args: args.node,
objectArg: null,
startRegion: startRegion,
endRegion: args.endRegion
),
startRegion: startRegion,
endRegion: args.endRegion
objectArg: null
)
),
SuffixexpSuffix.ArgsFirstArg node => new SuffixexpNode.Functioncall(
node: new(
function: new SuffixexpNode.Normal(
node: new NormalSuffixNode(firstPart: firstPart, suffixes: suffixes[..^1], startRegion, node.endRegion),
startRegion: startRegion,
endRegion: node.endRegion
),
function: new SuffixexpNode.Normal(firstPart: firstPart, suffixes: suffixes[..^1], startRegion, node.endRegion),
objectArg: node.node.name,
args: node.node.rest,
startRegion: startRegion,
endRegion: node.endRegion
),
startRegion: startRegion,
endRegion: node.endRegion
args: node.node.rest
)
),
_ => null,
};
@ -1091,11 +1064,7 @@ class Parser
endRegion = firstPart.endRegion;
}
return new SuffixexpNode.Normal(
node: new(firstPart: firstPart, suffixes: suffixes, startRegion: startRegion, endRegion: endRegion),
startRegion: startRegion,
endRegion: endRegion
);
return new SuffixexpNode.Normal(firstPart: firstPart, suffixes: suffixes, startRegion: startRegion, endRegion: endRegion);
}
private ArgsNode ParseArgs(Token[] tokens)
@ -1231,7 +1200,7 @@ class Parser
}
index += 1;
ExpNode rhs = ParseExp(tokens);
return new FieldNode.IndexedAssignment(node: new(index: indexNode, rhs: rhs, startRegion: startRegion, endRegion: rhs.endRegion), startRegion: startRegion, endRegion: rhs.endRegion);
return new FieldNode.IndexedAssignment(node: new(index: indexNode, rhs: rhs, startRegion: startRegion, endRegion: tokens[index - 1].region));
}
case TokenType.Name:
{
@ -1240,15 +1209,15 @@ class Parser
string name = ((Token.StringData)tokens[index].data!).data;
index += 2;
ExpNode rhs = ParseExp(tokens);
return new FieldNode.Assignment(node: new(lhs: name, rhs: rhs, startRegion: startRegion, endRegion: rhs.endRegion), startRegion: startRegion, endRegion: rhs.endRegion);
return new FieldNode.Assignment(node: new(lhs: name, rhs: rhs, startRegion: startRegion, endRegion: tokens[index - 1].region));
}
ExpNode exp = ParseExp(tokens);
return new FieldNode.Exp(node: exp, startRegion: startRegion, endRegion: exp.endRegion);
return new FieldNode.Exp(node: exp, startRegion: startRegion, endRegion: tokens[index - 1].region);
}
default:
{
ExpNode exp = ParseExp(tokens);
return new FieldNode.Exp(node: exp, startRegion: startRegion, endRegion: exp.endRegion);
return new FieldNode.Exp(node: exp, startRegion: startRegion, endRegion: tokens[index - 1].region);
}
}
}
@ -1448,13 +1417,14 @@ class Parser
private ExplistNode ParseExplist(Token[] tokens)
{
CodeRegion startRegion = tokens[index].region;
List<ExpNode> exps = [ParseExp(tokens)];
while(index < tokens.Length && tokens[index].type == TokenType.Comma)
{
index += 1;
exps.Add(ParseExp(tokens));
}
return new ExplistNode(exps: exps, startRegion: exps[0].startRegion, endRegion: exps[^1].endRegion);
return new ExplistNode(exps: exps, startRegion: startRegion, endRegion: tokens[index - 1].region);
}
private ExpNode ParseExp(Token[] tokens)
@ -1482,7 +1452,7 @@ class Parser
int associativityBoost = (GetPrecedence(tokens[index]) == precedence) ? 0 : 1;
rhs = ParseExpPrecedence(tokens, lhs: rhs, minPrecedence: precedence + associativityBoost);
}
currentLhs = new ExpNode.Binop(node: new(lhs: currentLhs, type: op, rhs: rhs, startRegion: startRegion, endRegion: rhs.endRegion), startRegion: startRegion, endRegion: rhs.endRegion);
currentLhs = new ExpNode.Binop(node: new(lhs: currentLhs, type: op, rhs: rhs, startRegion: startRegion, endRegion: tokens[index - 1].region));
}
return currentLhs;
}
@ -1583,7 +1553,7 @@ class Parser
case TokenType.CurlyOpen:
{
TableconstructorNode inner = ParseTableconstructor(tokens);
return new ExpNode.Tableconstructor(node: inner, startRegion: inner.startRegion, endRegion: inner.endRegion);
return new ExpNode.Tableconstructor(node: inner);
}
case TokenType.Function:
{
@ -1595,30 +1565,30 @@ class Parser
{
index += 1;
ExpNode unop = ParseExp(tokens);
return new ExpNode.Unop(node: new(type: UnopType.Minus, exp: unop, startRegion: startRegion, endRegion: unop.endRegion), startRegion: startRegion, endRegion: unop.endRegion);
return new ExpNode.Unop(node: new(type: UnopType.Minus, exp: unop, startRegion: startRegion, endRegion: tokens[index - 1].region));
}
case TokenType.Hash:
{
index += 1;
ExpNode unop = ParseExp(tokens);
return new ExpNode.Unop(node: new(type: UnopType.Length, exp: unop, startRegion: startRegion, endRegion: unop.endRegion), startRegion: startRegion, endRegion: unop.endRegion);
return new ExpNode.Unop(node: new(type: UnopType.Length, exp: unop, startRegion: startRegion, endRegion: tokens[index - 1].region));
}
case TokenType.Not:
{
index += 1;
ExpNode unop = ParseExp(tokens);
return new ExpNode.Unop(node: new(type: UnopType.LogicalNot, exp: unop, startRegion: startRegion, endRegion: unop.endRegion), startRegion: startRegion, endRegion: unop.endRegion);
return new ExpNode.Unop(node: new(type: UnopType.LogicalNot, exp: unop, startRegion: startRegion, endRegion: tokens[index - 1].region));
}
case TokenType.Tilde:
{
index += 1;
ExpNode unop = ParseExp(tokens);
return new ExpNode.Unop(node: new(type: UnopType.BinaryNot, exp: unop, startRegion: startRegion, endRegion: unop.endRegion), startRegion: startRegion, endRegion: unop.endRegion);
return new ExpNode.Unop(node: new(type: UnopType.BinaryNot, exp: unop, startRegion: startRegion, endRegion: tokens[index - 1].region));
}
default:
{
SuffixexpNode suffixexp = ParseSuffixExp(tokens);
return new ExpNode.Suffixexp(node: suffixexp, startRegion: suffixexp.startRegion, endRegion: suffixexp.endRegion);
return new ExpNode.Suffixexp(node: suffixexp);
}
}
}

View File

@ -1,7 +1,6 @@
using System;
using System.Collections.Generic;
using System.Text;
using System.Text.Json.Serialization;
namespace luaaaaah;
class Tokenizer
@ -82,6 +81,62 @@ class Tokenizer
currentToken.region.end = new(currentLocation);
}
private void TokenizeTerminal(State newState, TokenType type)
{
lastIndex = index;
state = newState;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: type);
}
private void TokenizeTerminalName(State newState, char ch)
{
lastIndex = index;
state = newState;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
}
private void Backtrack(TokenType newType)
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = newType;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
}
private void BacktrackNoClear(TokenType newType)
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = newType;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
}
private void BacktrackNoTypeChange()
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
}
private void TokenizeChar(char ch)
{
switch(state)
@ -91,263 +146,115 @@ class Tokenizer
switch(ch)
{
case '-':
{
lastIndex = index;
state = State.Minus;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Minus);
} /* tokenizeTerminalBase(TokenType.Minus, TokenizerState.Minus); */
TokenizeTerminal(State.Minus, TokenType.Minus);
break;
case ',':
{
lastIndex = index;
state = State.Comma;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Comma);
} /* tokenizeTerminalBase(TokenType.Comma, TokenizerState.Comma); */
TokenizeTerminal(State.Comma, TokenType.Comma);
break;
case '=':
{
lastIndex = index;
state = State.Equals;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Equals);
} /* tokenizeTerminalBase(TokenType.Equals, TokenizerState.Equals); */
TokenizeTerminal(State.Equals, TokenType.Equals);
break;
case '(':
{
lastIndex = index;
state = State.RoundOpen;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.RoundOpen);
} /* tokenizeTerminalBase(TokenType.RoundOpen, TokenizerState.RoundOpen); */
TokenizeTerminal(State.RoundOpen, TokenType.RoundOpen);
break;
case ')':
{
lastIndex = index;
state = State.RoundClosed;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.RoundClosed);
} /* tokenizeTerminalBase(TokenType.RoundClosed, TokenizerState.RoundClosed); */
TokenizeTerminal(State.RoundClosed, TokenType.RoundClosed);
break;
case '.':
{
lastIndex = index;
state = State.Dot;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Dot);
} /* tokenizeTerminalBase(TokenType.Dot, TokenizerState.Dot); */
TokenizeTerminal(State.Dot, TokenType.Dot);
break;
case ':':
{
lastIndex = index;
state = State.Colon;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Colon);
} /* tokenizeTerminalBase(TokenType.Colon, TokenizerState.Colon); */
TokenizeTerminal(State.Colon, TokenType.Colon);
break;
case '{':
{
lastIndex = index;
state = State.CurlyOpen;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.CurlyOpen);
} /* tokenizeTerminalBase(TokenType.CurlyOpen, TokenizerState.CurlyOpen); */
TokenizeTerminal(State.CurlyOpen, TokenType.CurlyOpen);
break;
case '}':
{
lastIndex = index;
state = State.CurlyClosed;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.CurlyClosed);
} /* tokenizeTerminalBase(TokenType.CurlyClosed, TokenizerState.CurlyClosed); */
TokenizeTerminal(State.CurlyClosed, TokenType.CurlyClosed);
break;
case '[':
{
lastIndex = index;
state = State.SquareOpen;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.SquareOpen);
} /* tokenizeTerminalBase(TokenType.SquareOpen, TokenizerState.SquareOpen); */
TokenizeTerminal(State.SquareOpen, TokenType.SquareOpen);
break;
case ']':
{
lastIndex = index;
state = State.SquareClosed;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.SquareClosed);
} /* tokenizeTerminalBase(TokenType.SquareClosed, TokenizerState.SquareClosed); */
TokenizeTerminal(State.SquareClosed, TokenType.SquareClosed);
break;
case '+':
{
lastIndex = index;
state = State.Plus;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Plus);
} /* tokenizeTerminalBase(TokenType.Plus, TokenizerState.Plus); */
TokenizeTerminal(State.Plus, TokenType.Plus);
break;
case '~':
{
lastIndex = index;
state = State.Tilde;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Tilde);
} /* tokenizeTerminalBase(TokenType.Tilde, TokenizerState.Tilde); */
TokenizeTerminal(State.Tilde, TokenType.Tilde);
break;
case '>':
{
lastIndex = index;
state = State.Gt;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Gt);
} /* tokenizeTerminalBase(TokenType.Gt, TokenizerState.Gt); */
TokenizeTerminal(State.Gt, TokenType.Gt);
break;
case '<':
{
lastIndex = index;
state = State.Lt;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Lt);
} /* tokenizeTerminalBase(TokenType.Lt, TokenizerState.Lt); */
TokenizeTerminal(State.Lt, TokenType.Lt);
break;
case '#':
{
lastIndex = index;
state = State.Hash;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Hash);
} /* tokenizeTerminalBase(TokenType.Hash, TokenizerState.Hash); */
TokenizeTerminal(State.Hash, TokenType.Hash);
break;
case '|':
{
lastIndex = index;
state = State.Pipe;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Pipe);
} /* tokenizeTerminalBase(TokenType.Pipe, TokenizerState.Pipe); */
TokenizeTerminal(State.Pipe, TokenType.Pipe);
break;
case '&':
{
lastIndex = index;
state = State.Ampersand;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Ampersand);
} /* tokenizeTerminalBase(TokenType.Ampersand, TokenizerState.Ampersand); */
TokenizeTerminal(State.Ampersand, TokenType.Ampersand);
break;
case '%':
{
lastIndex = index;
state = State.Percent;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Percent);
} /* tokenizeTerminalBase(TokenType.Percent, TokenizerState.Percent); */
TokenizeTerminal(State.Percent, TokenType.Percent);
break;
case '*':
{
lastIndex = index;
state = State.Star;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Star);
} /* tokenizeTerminalBase(TokenType.Star, TokenizerState.Star); */
TokenizeTerminal(State.Star, TokenType.Star);
break;
case '/':
{
lastIndex = index;
state = State.Slash;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Slash);
} /* tokenizeTerminalBase(TokenType.Slash, TokenizerState.Slash); */
TokenizeTerminal(State.Slash, TokenType.Slash);
break;
case ';':
{
lastIndex = index;
state = State.Semicolon;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Semicolon);
} /* tokenizeTerminalBase(TokenType.Semicolon, TokenizerState.Semicolon); */
TokenizeTerminal(State.Semicolon, TokenType.Semicolon);
break;
case '^':
{
lastIndex = index;
state = State.Caret;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Caret);
} /* tokenizeTerminalBase(TokenType.Caret, TokenizerState.Caret); */
TokenizeTerminal(State.Caret, TokenType.Caret);
break;
case 'a':
{
lastIndex = index;
state = State.A;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.A, tokenStr, ch); */
TokenizeTerminalName(State.A, ch);
break;
case 'b':
{
lastIndex = index;
state = State.B;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.B, tokenStr, ch); */
TokenizeTerminalName(State.B, ch);
break;
case 'd':
{
lastIndex = index;
state = State.D;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.D, tokenStr, ch); */
TokenizeTerminalName(State.D, ch);
break;
case 'e':
{
lastIndex = index;
state = State.E;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.E, tokenStr, ch); */
TokenizeTerminalName(State.E, ch);
break;
case 'f':
{
lastIndex = index;
state = State.F;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.F, tokenStr, ch); */
TokenizeTerminalName(State.F, ch);
break;
case 'i':
{
lastIndex = index;
state = State.I;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.I, tokenStr, ch); */
TokenizeTerminalName(State.I, ch);
break;
case 'g':
{
lastIndex = index;
state = State.G;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.G, tokenStr, ch); */
TokenizeTerminalName(State.G, ch);
break;
case 'l':
{
lastIndex = index;
state = State.L;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.L, tokenStr, ch); */
TokenizeTerminalName(State.L, ch);
break;
case 'n':
{
lastIndex = index;
state = State.N;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.N, tokenStr, ch); */
TokenizeTerminalName(State.N, ch);
break;
case 'o':
{
lastIndex = index;
state = State.O;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.O, tokenStr, ch); */
TokenizeTerminalName(State.O, ch);
break;
case 'r':
{
lastIndex = index;
state = State.R;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.R, tokenStr, ch); */
TokenizeTerminalName(State.R, ch);
break;
case 't':
{
lastIndex = index;
state = State.T;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.T, tokenStr, ch); */
TokenizeTerminalName(State.T, ch);
break;
case 'u':
{
lastIndex = index;
state = State.U;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.U, tokenStr, ch); */
TokenizeTerminalName(State.U, ch);
break;
case 'w':
{
lastIndex = index;
state = State.W;
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.W, tokenStr, ch); */
TokenizeTerminalName(State.W, ch);
break;
case '0':
{
@ -682,35 +589,22 @@ class Tokenizer
}
break;
case 'z':
{
state = State.SingleQuoteBackslashZ;
}
break;
case 'x':
{
state = State.SingleQuoteBackslashX;
}
break;
case 'u':
{
state = State.SingleQuoteBackslashU;
}
break;
default: throw new Exception($"Unknown escape sequence: \\{ch}");
}
}
break;
case State.SingleQuoteBackslashU:
{
if(ch == '{')
{
state = State.SingleQuoteBackslashUBracket;
}
else
{
throw new Exception($"Expected `{{` to continue \\u escape sequence at {currentLocation}, got {ch}");
}
}
state = ch == '{'
? State.SingleQuoteBackslashUBracket
: throw new Exception($"Expected `{{` to continue \\u escape sequence at {currentLocation}, got {ch}");
break;
case State.SingleQuoteBackslashUBracket:
{
@ -850,18 +744,7 @@ class Tokenizer
break;
case State.String:
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.StringLiteral;
//currentToken.region.end = new(currentLocation);
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.StringLiteral);
}
break;
case State.Name:
@ -875,17 +758,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -911,16 +784,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
}
break;
@ -964,16 +828,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
}
break;
@ -996,16 +851,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
}
break;
@ -1030,16 +876,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
}
break;
@ -1057,16 +894,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
}
break;
@ -1094,16 +922,7 @@ class Tokenizer
case State.ColonColon:
case State.SlashSlash:
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
break;
case State.Tilde:
@ -1116,16 +935,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
}
break;
@ -1145,16 +955,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
}
break;
@ -1174,16 +975,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
}
break;
@ -1197,16 +989,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
}
break;
@ -1220,16 +1003,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
}
break;
@ -1244,16 +1018,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
}
break;
@ -1267,16 +1032,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
}
break;
@ -1290,16 +1046,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
}
break;
@ -1313,16 +1060,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
}
break;
@ -1417,16 +1155,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
}
break;
@ -1499,17 +1228,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -1530,17 +1249,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -1555,18 +1264,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.And;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.And);
}
}
break;
@ -1587,17 +1285,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -1618,17 +1306,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -1649,17 +1327,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -1680,17 +1348,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -1705,18 +1363,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.While;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.While);
}
}
break;
@ -1737,17 +1384,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -1768,17 +1405,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -1799,17 +1426,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -1830,17 +1447,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -1855,18 +1462,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Break;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.Break);
}
}
break;
@ -1887,17 +1483,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -1918,17 +1504,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -1949,17 +1525,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -1974,18 +1540,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Goto;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.Goto);
}
}
break;
@ -2006,17 +1561,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2043,17 +1588,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2074,17 +1609,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2105,17 +1630,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2136,17 +1651,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2161,18 +1666,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Return;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.Return);
}
}
break;
@ -2193,17 +1687,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2224,17 +1708,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2255,17 +1729,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2280,18 +1744,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Repeat;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.Repeat);
}
}
break;
@ -2318,17 +1771,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2349,17 +1792,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2374,18 +1807,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Nil;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.Nil);
}
}
break;
@ -2406,17 +1828,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2431,18 +1843,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Not;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.Not);
}
}
break;
@ -2469,17 +1870,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2500,17 +1891,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2531,17 +1912,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2556,18 +1927,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Then;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.Then);
}
}
break;
@ -2588,17 +1948,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2619,17 +1969,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2644,18 +1984,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.True;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.True);
}
}
break;
@ -2682,17 +2011,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2713,17 +2032,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2744,17 +2053,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2775,18 +2074,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Else;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.Else);
}
}
break;
@ -2807,17 +2095,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2832,18 +2110,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Elseif;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.Elseif);
}
}
break;
@ -2864,17 +2131,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2889,18 +2146,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.End;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.End);
}
}
break;
@ -2921,17 +2167,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2946,18 +2182,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Or;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.Or);
}
}
break;
@ -2978,17 +2203,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3003,18 +2218,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Do;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.Do);
}
}
break;
@ -3041,17 +2245,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3066,18 +2260,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.In;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.In);
}
}
break;
@ -3092,18 +2275,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.If;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.If);
}
}
break;
@ -3136,17 +2308,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3167,17 +2329,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3198,17 +2350,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3229,17 +2371,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3260,17 +2392,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3291,17 +2413,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3322,17 +2434,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3347,18 +2449,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Function;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.Function);
}
}
break;
@ -3379,17 +2470,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3410,17 +2491,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3441,17 +2512,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3466,18 +2527,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.False;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.False);
}
}
break;
@ -3498,17 +2548,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3523,18 +2563,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.For;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.For);
}
}
break;
@ -3555,17 +2584,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3586,17 +2605,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3617,17 +2626,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3648,17 +2647,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3673,18 +2662,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Local;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.Local);
}
}
break;
@ -3705,17 +2683,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3736,17 +2704,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3767,17 +2725,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3798,17 +2746,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3823,18 +2761,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Until;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.Until);
}
}
break;
@ -3875,31 +2802,7 @@ class Tokenizer
}
}
class CodeRegion(CodeLocation start, CodeLocation end)
{
public CodeLocation start = start;
public CodeLocation end = end;
public override string ToString()
{
return $"{start}-{end}";
}
}
class CodeLocation(int line, int col)
{
public int line = line;
public int col = col;
public CodeLocation(CodeLocation other) : this(line: other.line, col: other.col) { }
public override string ToString()
{
return $"{line + 1}:{col + 1}";
}
}
class Token(CodeRegion region, TokenType? type = null, Token.IData? data = null)
internal class Token(CodeRegion region, TokenType? type = null, Token.IData? data = null)
{
public CodeRegion region = region;
public IData? data = data;
@ -3938,47 +2841,3 @@ public enum TokenType
Numeral,
StringLiteral,
}
[JsonDerivedType(typeof(Integer), typeDiscriminator: "int")]
[JsonDerivedType(typeof(Float), typeDiscriminator: "float")]
public interface INumeral
{
public class Integer(int value) : INumeral
{
public int value = value;
public bool RawEqual(INumeral other)
{
if(other is Integer integer)
{
return integer.value == value;
}
// TODO: Check if this is actually doing what is expected
return ((Float)other).value == value;
}
public override string ToString()
{
return $"Numeral Integer {value}";
}
}
public class Float(float value) : INumeral
{
public float value = value;
public bool RawEqual(INumeral other)
{
if(other is Float float_val)
{
return float_val.value == value;
}
// TODO: Check if this is actually doing what is expected
return ((Integer)other).value == value;
}
public override string ToString()
{
return $"Numeral Float {value}";
}
}
public bool RawEqual(INumeral other);
}