From 40c744119ce8ab3b67dc81644cb7a9c1c3fd746f Mon Sep 17 00:00:00 2001 From: 0x4261756D <38735823+0x4261756D@users.noreply.github.com> Date: Wed, 28 Feb 2024 17:02:23 +0100 Subject: [PATCH 1/5] Make regions not part of superclass StatNode Since the majority of subclasses already has a node with the same region this can be omitted --- Parser.cs | 73 +++++++++++++++++++++++++++++++++---------------------- 1 file changed, 44 insertions(+), 29 deletions(-) diff --git a/Parser.cs b/Parser.cs index 3113a2e..683f9b7 100644 --- a/Parser.cs +++ b/Parser.cs @@ -32,61 +32,76 @@ class Parser [JsonDerivedType(typeof(Function), typeDiscriminator: "st Function")] [JsonDerivedType(typeof(LocalFunction), typeDiscriminator: "st LocalFunction")] [JsonDerivedType(typeof(Local), typeDiscriminator: "st Local")] - public abstract class StatNode(CodeRegion startRegion, CodeRegion endRegion) + public abstract class StatNode { - public CodeRegion startRegion = startRegion, endRegion = endRegion; - public class Semicolon(CodeRegion region) : StatNode(region, region) { } - public class Assignment(AssignmentNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion) + public class Semicolon(CodeRegion region) : StatNode + { + public CodeRegion region = region; + } + public class Assignment(AssignmentNode node) : StatNode { public AssignmentNode node = node; } - public class Functioncall(FunctioncallNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion) + public class Functioncall(FunctioncallNode node) : StatNode { public FunctioncallNode node = node; } - public class Label(string label, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion) + public class Label(string label, CodeRegion startRegion, CodeRegion endRegion) : StatNode { + public CodeRegion startRegion = startRegion; + public CodeRegion endRegion = endRegion; public string label = label; } - public class Break(CodeRegion region) : StatNode(region, region) { } - public class Goto(string label, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion) + public class Break(CodeRegion region) : StatNode { + public CodeRegion region = region; + } + public class Goto(string label, CodeRegion startRegion, CodeRegion endRegion) : StatNode + { + public CodeRegion startRegion = startRegion; + public CodeRegion endRegion = endRegion; public string label = label; } - public class Do(BlockNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion) + public class Do(BlockNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode { + public CodeRegion startRegion = startRegion; + public CodeRegion endRegion = endRegion; public BlockNode node = node; } - public class While(WhileNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion) + public class While(WhileNode node) : StatNode { public WhileNode node = node; } - public class Repeat(RepeatNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion) + public class Repeat(RepeatNode node) : StatNode { public RepeatNode node = node; } - public class If(IfNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion) + public class If(IfNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode { + public CodeRegion startRegion = startRegion; + public CodeRegion endRegion = endRegion; public IfNode node = node; } - public class ForNumerical(ForNumericalNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion) + public class ForNumerical(ForNumericalNode node) : StatNode { public ForNumericalNode node = node; } - public class ForGeneric(ForGenericNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion) + public class ForGeneric(ForGenericNode node) : StatNode { public ForGenericNode node = node; } - public class Function(FunctionNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion) + public class Function(FunctionNode node) : StatNode { public FunctionNode node = node; } - public class LocalFunction(LocalFunctionNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion) + public class LocalFunction(LocalFunctionNode node) : StatNode { public LocalFunctionNode node = node; } - public class Local(LocalNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode(startRegion, endRegion) + public class Local(LocalNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode { + public CodeRegion startRegion = startRegion; + public CodeRegion endRegion = endRegion; public LocalNode node = node; } } @@ -468,7 +483,7 @@ class Parser { stats.Add(ParseStat(tokens)); } - BlockNode ret = new(stats: stats, startRegion: startRegion, endRegion: (stats.Count == 0) ? startRegion : stats[^1].endRegion); + BlockNode ret = new(stats: stats, startRegion: startRegion, endRegion: (stats.Count == 0 && index > 0) ? startRegion : tokens[index - 1].region); if(index < tokens.Length && tokens[index].type == TokenType.Return) { ret.retstat = ParseRetstat(tokens); @@ -546,7 +561,7 @@ class Parser } CodeRegion endRegion = tokens[index].region; index += 1; - return new StatNode.While(new(condition: condition, body: body, startRegion: startRegion, endRegion: endRegion), startRegion: startRegion, endRegion: endRegion); + return new StatNode.While(new(condition: condition, body: body, startRegion: startRegion, endRegion: endRegion)); } case TokenType.Repeat: { @@ -562,7 +577,7 @@ class Parser } index += 1; ExpNode conditon = ParseExp(tokens); - return new StatNode.Repeat(new(condition: conditon, body: body, startRegion: startRegion, endRegion: conditon.endRegion), startRegion: startRegion, endRegion: conditon.endRegion); + return new StatNode.Repeat(new(condition: conditon, body: body, startRegion: startRegion, endRegion: conditon.endRegion)); } case TokenType.If: { @@ -683,7 +698,7 @@ class Parser } CodeRegion endRegion = tokens[index].region; index += 1; - return new StatNode.ForNumerical(new(variable: variable, start: start, end: end, change: change, body: body, startRegion: startRegion, endRegion: endRegion), startRegion: startRegion, endRegion: endRegion); + return new StatNode.ForNumerical(new(variable: variable, start: start, end: end, change: change, body: body, startRegion: startRegion, endRegion: endRegion)); } case TokenType.Comma: { @@ -736,7 +751,7 @@ class Parser } CodeRegion endRegion = tokens[index].region; index += 1; - return new StatNode.ForGeneric(new(vars: names, exps: exps, body: body, startRegion: startRegion, endRegion: endRegion), startRegion: startRegion, endRegion: endRegion); + return new StatNode.ForGeneric(new(vars: names, exps: exps, body: body, startRegion: startRegion, endRegion: endRegion)); } case TokenType.In: { @@ -762,7 +777,7 @@ class Parser } CodeRegion endRegion = tokens[index].region; index += 1; - return new StatNode.ForGeneric(new(vars: [variable], exps: exps, body: body, startRegion: startRegion, endRegion: endRegion), startRegion: startRegion, endRegion: endRegion); + return new StatNode.ForGeneric(new(vars: [variable], exps: exps, body: body, startRegion: startRegion, endRegion: endRegion)); } default: { @@ -775,7 +790,7 @@ class Parser index += 1; FuncnameNode name = ParseFuncname(tokens); FuncbodyNode body = ParseFuncbody(tokens); - return new StatNode.Function(new(name: name, body: body, startRegion: startRegion, endRegion: body.endRegion), startRegion: startRegion, endRegion: body.endRegion); + return new StatNode.Function(new(name: name, body: body, startRegion: startRegion, endRegion: body.endRegion)); } case TokenType.Local: { @@ -798,7 +813,7 @@ class Parser string name = ((Token.StringData)tokens[index].data!).data; index += 1; FuncbodyNode body = ParseFuncbody(tokens); - return new StatNode.LocalFunction(new(name: name, body: body, startRegion: startRegion, endRegion: body.endRegion), startRegion: startRegion, endRegion: body.endRegion); + return new StatNode.LocalFunction(new(name: name, body: body, startRegion: startRegion, endRegion: body.endRegion)); } else { @@ -850,7 +865,7 @@ class Parser } if(suffixExp is SuffixexpNode.Functioncall functioncall) { - return new StatNode.Functioncall(node: functioncall.node, startRegion: functioncall.startRegion, endRegion: functioncall.endRegion); + return new StatNode.Functioncall(node: functioncall.node); } } else @@ -862,7 +877,7 @@ class Parser index += 1; List lhs = [SuffixExpToVar(suffixExp)]; ExplistNode rhs = ParseExplist(tokens); - return new StatNode.Assignment(new(lhs: new(vars: lhs, startRegion: startRegion, endRegion: suffixExp.endRegion), rhs: rhs, startRegion: startRegion, endRegion: rhs.endRegion), startRegion: startRegion, endRegion: rhs.endRegion); + return new StatNode.Assignment(new(lhs: new(vars: lhs, startRegion: startRegion, endRegion: suffixExp.endRegion), rhs: rhs, startRegion: startRegion, endRegion: rhs.endRegion)); } case TokenType.Comma: { @@ -883,7 +898,7 @@ class Parser index += 1; VarlistNode varlistNode = new(vars: vars, startRegion: startRegion, endRegion: vars[^1].endRegion); ExplistNode rhs = ParseExplist(tokens); - return new StatNode.Assignment(new(lhs: varlistNode, rhs: rhs, startRegion: startRegion, endRegion: rhs.endRegion), startRegion: startRegion, endRegion: rhs.endRegion); + return new StatNode.Assignment(new(lhs: varlistNode, rhs: rhs, startRegion: startRegion, endRegion: rhs.endRegion)); } } if(suffixExp is SuffixexpNode.Normal) @@ -892,7 +907,7 @@ class Parser } if(suffixExp is SuffixexpNode.Functioncall functioncall) { - return new StatNode.Functioncall(node: functioncall.node, startRegion: functioncall.startRegion, endRegion: functioncall.endRegion); + return new StatNode.Functioncall(node: functioncall.node); } } } From 28f110e2c4e98b2f45bde5c81ab0347a2a85818c Mon Sep 17 00:00:00 2001 From: 0x4261756D <38735823+0x4261756D@users.noreply.github.com> Date: Wed, 28 Feb 2024 17:41:36 +0100 Subject: [PATCH 2/5] Further cut down the amount of unnecessary coderegions --- Parser.cs | 118 +++++++++++++++++++++++++++--------------------------- 1 file changed, 59 insertions(+), 59 deletions(-) diff --git a/Parser.cs b/Parser.cs index 683f9b7..d551dcc 100644 --- a/Parser.cs +++ b/Parser.cs @@ -4,7 +4,7 @@ using System.Text.Json.Serialization; namespace luaaaaah; -class Parser +internal class Parser { public class ChunkNode(BlockNode block, CodeRegion startRegion, CodeRegion endRegion) { @@ -116,12 +116,12 @@ class Parser public ExplistNode rhs = rhs; public CodeRegion startRegion = startRegion, endRegion = endRegion; } - public class FunctioncallNode(SuffixexpNode function, string? objectArg, ArgsNode args, CodeRegion startRegion, CodeRegion endRegion) + public class FunctioncallNode(SuffixexpNode function, string? objectArg, ArgsNode args) { public SuffixexpNode function = function; public string? objectArg = objectArg; public ArgsNode args = args; - public CodeRegion startRegion = startRegion, endRegion = endRegion; + public CodeRegion startRegion = function.startRegion, endRegion = function.endRegion; } public class WhileNode(ExpNode condition, BlockNode body, CodeRegion startRegion, CodeRegion endRegion) { @@ -192,11 +192,11 @@ class Parser public abstract class SuffixexpNode(CodeRegion startRegion, CodeRegion endRegion) { public CodeRegion startRegion = startRegion, endRegion = endRegion; - public class Normal(NormalSuffixNode node, CodeRegion startRegion, CodeRegion endRegion) : SuffixexpNode(startRegion, endRegion) + public class Normal(NormalSuffixNode node) : SuffixexpNode(node.startRegion, node.endRegion) { public NormalSuffixNode node = node; } - public class Functioncall(FunctioncallNode node, CodeRegion startRegion, CodeRegion endRegion) : SuffixexpNode(startRegion, endRegion) + public class Functioncall(FunctioncallNode node) : SuffixexpNode(node.startRegion, node.endRegion) { public FunctioncallNode node = node; } @@ -231,38 +231,53 @@ class Parser [JsonDerivedType(typeof(Tableconstructor), typeDiscriminator: "e Tableconstructor")] [JsonDerivedType(typeof(Unop), typeDiscriminator: "e Unop")] [JsonDerivedType(typeof(Binop), typeDiscriminator: "e Binop")] - public abstract class ExpNode(CodeRegion startRegion, CodeRegion endRegion) + public abstract class ExpNode { - public CodeRegion startRegion = startRegion, endRegion = endRegion; - public class Nil(CodeRegion region) : ExpNode(region, region) { } - public class False(CodeRegion region) : ExpNode(region, region) { } - public class True(CodeRegion region) : ExpNode(region, region) { } - public class Numeral(INumeral value, CodeRegion region) : ExpNode(region, region) + public class Nil(CodeRegion region) : ExpNode { + public CodeRegion region = region; + } + public class False(CodeRegion region) : ExpNode + { + public CodeRegion region = region; + } + public class True(CodeRegion region) : ExpNode + { + public CodeRegion region = region; + } + public class Numeral(INumeral value, CodeRegion region) : ExpNode + { + public CodeRegion region = region; public INumeral value = value; } - public class LiteralString(string value, CodeRegion region) : ExpNode(region, region) + public class LiteralString(string value, CodeRegion region) : ExpNode { + public CodeRegion region = region; public string value = value; } - public class Varargs(CodeRegion region) : ExpNode(region, region) { } - public class Functiondef(FuncbodyNode node, CodeRegion startRegion, CodeRegion endRegion) : ExpNode(startRegion, endRegion) + public class Varargs(CodeRegion region) : ExpNode { + public CodeRegion region = region; + } + public class Functiondef(FuncbodyNode node, CodeRegion startRegion, CodeRegion endRegion) : ExpNode + { + public CodeRegion startRegion = startRegion; + public CodeRegion endRegion = endRegion; public FuncbodyNode node = node; } - public class Suffixexp(SuffixexpNode node, CodeRegion startRegion, CodeRegion endRegion) : ExpNode(startRegion, endRegion) + public class Suffixexp(SuffixexpNode node) : ExpNode { public SuffixexpNode node = node; } - public class Tableconstructor(TableconstructorNode node, CodeRegion startRegion, CodeRegion endRegion) : ExpNode(startRegion, endRegion) + public class Tableconstructor(TableconstructorNode node) : ExpNode { public TableconstructorNode node = node; } - public class Unop(UnopNode node, CodeRegion startRegion, CodeRegion endRegion) : ExpNode(startRegion, endRegion) + public class Unop(UnopNode node) : ExpNode { public UnopNode node = node; } - public class Binop(BinopNode node, CodeRegion startRegion, CodeRegion endRegion) : ExpNode(startRegion, endRegion) + public class Binop(BinopNode node) : ExpNode { public BinopNode node = node; } @@ -409,7 +424,7 @@ class Parser { public ArgsNode node = node; } - public class ArgsFirstArg(ArgsFirstArgNode node, CodeRegion startRegion, CodeRegion endRegion) : SuffixexpSuffix(startRegion, endRegion) + public class ArgsFirstArg(ArgsFirstArgNode node) : SuffixexpSuffix(node.startRegion, node.endRegion) { public ArgsFirstArgNode node = node; } @@ -431,11 +446,11 @@ class Parser public abstract class FieldNode(CodeRegion startRegion, CodeRegion endRegion) { public CodeRegion startRegion = startRegion, endRegion = endRegion; - public class IndexedAssignment(IndexedAssignmentNode node, CodeRegion startRegion, CodeRegion endRegion) : FieldNode(startRegion, endRegion) + public class IndexedAssignment(IndexedAssignmentNode node) : FieldNode(node.startRegion, node.endRegion) { public IndexedAssignmentNode node = node; } - public class Assignment(FieldAssignmentNode node, CodeRegion startRegion, CodeRegion endRegion) : FieldNode(startRegion, endRegion) + public class Assignment(FieldAssignmentNode node) : FieldNode(node.startRegion, node.endRegion) { public FieldAssignmentNode node = node; } @@ -577,7 +592,7 @@ class Parser } index += 1; ExpNode conditon = ParseExp(tokens); - return new StatNode.Repeat(new(condition: conditon, body: body, startRegion: startRegion, endRegion: conditon.endRegion)); + return new StatNode.Repeat(new(condition: conditon, body: body, startRegion: startRegion, endRegion: tokens[index - 1].region)); } case TokenType.If: { @@ -1014,7 +1029,7 @@ class Parser { index += 1; ExpNode inner = ParseExp(tokens); - suffixes.Add(new SuffixexpSuffix.Indexed(node: inner, startRegion: suffixStartRegion, endRegion: inner.endRegion)); + suffixes.Add(new SuffixexpSuffix.Indexed(node: inner, startRegion: suffixStartRegion, endRegion: tokens[index - 1].region)); if(index >= tokens.Length) { throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected `]` to close indexed suffix of suffix-expression starting at {suffixStartRegion}"); @@ -1040,7 +1055,7 @@ class Parser string name = ((Token.StringData)tokens[index].data!).data; index += 1; ArgsNode args = ParseArgs(tokens); - suffixes.Add(new SuffixexpSuffix.ArgsFirstArg(new(name, rest: args, startRegion: suffixStartRegion, endRegion: args.endRegion), startRegion: suffixStartRegion, endRegion: args.endRegion)); + suffixes.Add(new SuffixexpSuffix.ArgsFirstArg(new(name, rest: args, startRegion: suffixStartRegion, endRegion: args.endRegion))); } break; case TokenType.RoundOpen: @@ -1067,32 +1082,20 @@ class Parser SuffixexpSuffix.Args args => new SuffixexpNode.Functioncall( node: new( function: new SuffixexpNode.Normal( - node: new NormalSuffixNode(firstPart, suffixes[..^1], startRegion, args.endRegion), - startRegion: startRegion, - endRegion: args.endRegion + node: new NormalSuffixNode(firstPart, suffixes[..^1], startRegion, args.endRegion) ), args: args.node, - objectArg: null, - startRegion: startRegion, - endRegion: args.endRegion - ), - startRegion: startRegion, - endRegion: args.endRegion + objectArg: null + ) ), SuffixexpSuffix.ArgsFirstArg node => new SuffixexpNode.Functioncall( node: new( function: new SuffixexpNode.Normal( - node: new NormalSuffixNode(firstPart: firstPart, suffixes: suffixes[..^1], startRegion, node.endRegion), - startRegion: startRegion, - endRegion: node.endRegion + node: new NormalSuffixNode(firstPart: firstPart, suffixes: suffixes[..^1], startRegion, node.endRegion) ), objectArg: node.node.name, - args: node.node.rest, - startRegion: startRegion, - endRegion: node.endRegion - ), - startRegion: startRegion, - endRegion: node.endRegion + args: node.node.rest + ) ), _ => null, }; @@ -1106,11 +1109,7 @@ class Parser endRegion = firstPart.endRegion; } - return new SuffixexpNode.Normal( - node: new(firstPart: firstPart, suffixes: suffixes, startRegion: startRegion, endRegion: endRegion), - startRegion: startRegion, - endRegion: endRegion - ); + return new SuffixexpNode.Normal(node: new(firstPart: firstPart, suffixes: suffixes, startRegion: startRegion, endRegion: endRegion)); } private ArgsNode ParseArgs(Token[] tokens) @@ -1246,7 +1245,7 @@ class Parser } index += 1; ExpNode rhs = ParseExp(tokens); - return new FieldNode.IndexedAssignment(node: new(index: indexNode, rhs: rhs, startRegion: startRegion, endRegion: rhs.endRegion), startRegion: startRegion, endRegion: rhs.endRegion); + return new FieldNode.IndexedAssignment(node: new(index: indexNode, rhs: rhs, startRegion: startRegion, endRegion: tokens[index - 1].region)); } case TokenType.Name: { @@ -1255,15 +1254,15 @@ class Parser string name = ((Token.StringData)tokens[index].data!).data; index += 2; ExpNode rhs = ParseExp(tokens); - return new FieldNode.Assignment(node: new(lhs: name, rhs: rhs, startRegion: startRegion, endRegion: rhs.endRegion), startRegion: startRegion, endRegion: rhs.endRegion); + return new FieldNode.Assignment(node: new(lhs: name, rhs: rhs, startRegion: startRegion, endRegion: tokens[index - 1].region)); } ExpNode exp = ParseExp(tokens); - return new FieldNode.Exp(node: exp, startRegion: startRegion, endRegion: exp.endRegion); + return new FieldNode.Exp(node: exp, startRegion: startRegion, endRegion: tokens[index - 1].region); } default: { ExpNode exp = ParseExp(tokens); - return new FieldNode.Exp(node: exp, startRegion: startRegion, endRegion: exp.endRegion); + return new FieldNode.Exp(node: exp, startRegion: startRegion, endRegion: tokens[index - 1].region); } } } @@ -1463,13 +1462,14 @@ class Parser private ExplistNode ParseExplist(Token[] tokens) { + CodeRegion startRegion = tokens[index].region; List exps = [ParseExp(tokens)]; while(index < tokens.Length && tokens[index].type == TokenType.Comma) { index += 1; exps.Add(ParseExp(tokens)); } - return new ExplistNode(exps: exps, startRegion: exps[0].startRegion, endRegion: exps[^1].endRegion); + return new ExplistNode(exps: exps, startRegion: startRegion, endRegion: tokens[index - 1].region); } private ExpNode ParseExp(Token[] tokens) @@ -1497,7 +1497,7 @@ class Parser int associativityBoost = (GetPrecedence(tokens[index]) == precedence) ? 0 : 1; rhs = ParseExpPrecedence(tokens, lhs: rhs, minPrecedence: precedence + associativityBoost); } - currentLhs = new ExpNode.Binop(node: new(lhs: currentLhs, type: op, rhs: rhs, startRegion: startRegion, endRegion: rhs.endRegion), startRegion: startRegion, endRegion: rhs.endRegion); + currentLhs = new ExpNode.Binop(node: new(lhs: currentLhs, type: op, rhs: rhs, startRegion: startRegion, endRegion: tokens[index - 1].region)); } return currentLhs; } @@ -1598,7 +1598,7 @@ class Parser case TokenType.CurlyOpen: { TableconstructorNode inner = ParseTableconstructor(tokens); - return new ExpNode.Tableconstructor(node: inner, startRegion: inner.startRegion, endRegion: inner.endRegion); + return new ExpNode.Tableconstructor(node: inner); } case TokenType.Function: { @@ -1610,30 +1610,30 @@ class Parser { index += 1; ExpNode unop = ParseExp(tokens); - return new ExpNode.Unop(node: new(type: UnopType.Minus, exp: unop, startRegion: startRegion, endRegion: unop.endRegion), startRegion: startRegion, endRegion: unop.endRegion); + return new ExpNode.Unop(node: new(type: UnopType.Minus, exp: unop, startRegion: startRegion, endRegion: tokens[index - 1].region)); } case TokenType.Hash: { index += 1; ExpNode unop = ParseExp(tokens); - return new ExpNode.Unop(node: new(type: UnopType.Length, exp: unop, startRegion: startRegion, endRegion: unop.endRegion), startRegion: startRegion, endRegion: unop.endRegion); + return new ExpNode.Unop(node: new(type: UnopType.Length, exp: unop, startRegion: startRegion, endRegion: tokens[index - 1].region)); } case TokenType.Not: { index += 1; ExpNode unop = ParseExp(tokens); - return new ExpNode.Unop(node: new(type: UnopType.LogicalNot, exp: unop, startRegion: startRegion, endRegion: unop.endRegion), startRegion: startRegion, endRegion: unop.endRegion); + return new ExpNode.Unop(node: new(type: UnopType.LogicalNot, exp: unop, startRegion: startRegion, endRegion: tokens[index - 1].region)); } case TokenType.Tilde: { index += 1; ExpNode unop = ParseExp(tokens); - return new ExpNode.Unop(node: new(type: UnopType.BinaryNot, exp: unop, startRegion: startRegion, endRegion: unop.endRegion), startRegion: startRegion, endRegion: unop.endRegion); + return new ExpNode.Unop(node: new(type: UnopType.BinaryNot, exp: unop, startRegion: startRegion, endRegion: tokens[index - 1].region)); } default: { SuffixexpNode suffixexp = ParseSuffixExp(tokens); - return new ExpNode.Suffixexp(node: suffixexp, startRegion: suffixexp.startRegion, endRegion: suffixexp.endRegion); + return new ExpNode.Suffixexp(node: suffixexp); } } } From 0c93d45dbd55f427929fd1c558024895b7a02613 Mon Sep 17 00:00:00 2001 From: 0x4261756D <38735823+0x4261756D@users.noreply.github.com> Date: Wed, 28 Feb 2024 18:22:14 +0100 Subject: [PATCH 3/5] deduplicate some tokenizer code --- LuaTypes.cs | 47 +++++++ Tokenizer.cs | 356 +++++++++++++-------------------------------------- 2 files changed, 135 insertions(+), 268 deletions(-) create mode 100644 LuaTypes.cs diff --git a/LuaTypes.cs b/LuaTypes.cs new file mode 100644 index 0000000..49f479d --- /dev/null +++ b/LuaTypes.cs @@ -0,0 +1,47 @@ +using System.Text.Json.Serialization; + +namespace luaaaaah; + +[JsonDerivedType(typeof(Integer), typeDiscriminator: "int")] +[JsonDerivedType(typeof(Float), typeDiscriminator: "float")] +public interface INumeral +{ + public class Integer(int value) : INumeral + { + public int value = value; + + public bool RawEqual(INumeral other) + { + if(other is Integer integer) + { + return integer.value == value; + } + // TODO: Check if this is actually doing what is expected + return ((Float)other).value == value; + } + public override string ToString() + { + return $"Numeral Integer {value}"; + } + } + public class Float(float value) : INumeral + { + public float value = value; + + public bool RawEqual(INumeral other) + { + if(other is Float float_val) + { + return float_val.value == value; + } + // TODO: Check if this is actually doing what is expected + return ((Integer)other).value == value; + } + public override string ToString() + { + return $"Numeral Float {value}"; + } + } + + public bool RawEqual(INumeral other); +} diff --git a/Tokenizer.cs b/Tokenizer.cs index 562bf91..cdec0a5 100644 --- a/Tokenizer.cs +++ b/Tokenizer.cs @@ -1,7 +1,6 @@ using System; using System.Collections.Generic; using System.Text; -using System.Text.Json.Serialization; namespace luaaaaah; class Tokenizer @@ -82,6 +81,19 @@ class Tokenizer currentToken.region.end = new(currentLocation); } + private void TokenizeTerminal(State newState, TokenType type) + { + lastIndex = index; + state = newState; + currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: type); + } + private void TokenizeTerminalName(State newState, char ch) + { + lastIndex = index; + state = newState; + currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}")); + } + private void TokenizeChar(char ch) { switch(state) @@ -91,264 +103,116 @@ class Tokenizer switch(ch) { case '-': - { - lastIndex = index; - state = State.Minus; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Minus); - } /* tokenizeTerminalBase(TokenType.Minus, TokenizerState.Minus); */ - break; + TokenizeTerminal(State.Minus, TokenType.Minus); + break; case ',': - { - lastIndex = index; - state = State.Comma; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Comma); - } /* tokenizeTerminalBase(TokenType.Comma, TokenizerState.Comma); */ - break; + TokenizeTerminal(State.Comma, TokenType.Comma); + break; case '=': - { - lastIndex = index; - state = State.Equals; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Equals); - } /* tokenizeTerminalBase(TokenType.Equals, TokenizerState.Equals); */ - break; + TokenizeTerminal(State.Equals, TokenType.Equals); + break; case '(': - { - lastIndex = index; - state = State.RoundOpen; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.RoundOpen); - } /* tokenizeTerminalBase(TokenType.RoundOpen, TokenizerState.RoundOpen); */ - break; + TokenizeTerminal(State.RoundOpen, TokenType.RoundOpen); + break; case ')': - { - lastIndex = index; - state = State.RoundClosed; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.RoundClosed); - } /* tokenizeTerminalBase(TokenType.RoundClosed, TokenizerState.RoundClosed); */ - break; + TokenizeTerminal(State.RoundClosed, TokenType.RoundClosed); + break; case '.': - { - lastIndex = index; - state = State.Dot; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Dot); - } /* tokenizeTerminalBase(TokenType.Dot, TokenizerState.Dot); */ - break; + TokenizeTerminal(State.Dot, TokenType.Dot); + break; case ':': - { - lastIndex = index; - state = State.Colon; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Colon); - } /* tokenizeTerminalBase(TokenType.Colon, TokenizerState.Colon); */ - break; + TokenizeTerminal(State.Colon, TokenType.Colon); + break; case '{': - { - lastIndex = index; - state = State.CurlyOpen; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.CurlyOpen); - } /* tokenizeTerminalBase(TokenType.CurlyOpen, TokenizerState.CurlyOpen); */ - break; + TokenizeTerminal(State.CurlyOpen, TokenType.CurlyOpen); + break; case '}': - { - lastIndex = index; - state = State.CurlyClosed; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.CurlyClosed); - } /* tokenizeTerminalBase(TokenType.CurlyClosed, TokenizerState.CurlyClosed); */ - break; + TokenizeTerminal(State.CurlyClosed, TokenType.CurlyClosed); + break; case '[': - { - lastIndex = index; - state = State.SquareOpen; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.SquareOpen); - } /* tokenizeTerminalBase(TokenType.SquareOpen, TokenizerState.SquareOpen); */ - break; + TokenizeTerminal(State.SquareOpen, TokenType.SquareOpen); + break; case ']': - { - lastIndex = index; - state = State.SquareClosed; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.SquareClosed); - } /* tokenizeTerminalBase(TokenType.SquareClosed, TokenizerState.SquareClosed); */ - break; + TokenizeTerminal(State.SquareClosed, TokenType.SquareClosed); + break; case '+': - { - lastIndex = index; - state = State.Plus; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Plus); - } /* tokenizeTerminalBase(TokenType.Plus, TokenizerState.Plus); */ - break; + TokenizeTerminal(State.Plus, TokenType.Plus); + break; case '~': - { - lastIndex = index; - state = State.Tilde; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Tilde); - } /* tokenizeTerminalBase(TokenType.Tilde, TokenizerState.Tilde); */ - break; + TokenizeTerminal(State.Tilde, TokenType.Tilde); + break; case '>': - { - lastIndex = index; - state = State.Gt; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Gt); - } /* tokenizeTerminalBase(TokenType.Gt, TokenizerState.Gt); */ - break; + TokenizeTerminal(State.Gt, TokenType.Gt); + break; case '<': - { - lastIndex = index; - state = State.Lt; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Lt); - } /* tokenizeTerminalBase(TokenType.Lt, TokenizerState.Lt); */ - break; + TokenizeTerminal(State.Lt, TokenType.Lt); + break; case '#': - { - lastIndex = index; - state = State.Hash; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Hash); - } /* tokenizeTerminalBase(TokenType.Hash, TokenizerState.Hash); */ - break; + TokenizeTerminal(State.Hash, TokenType.Hash); + break; case '|': - { - lastIndex = index; - state = State.Pipe; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Pipe); - } /* tokenizeTerminalBase(TokenType.Pipe, TokenizerState.Pipe); */ - break; + TokenizeTerminal(State.Pipe, TokenType.Pipe); + break; case '&': - { - lastIndex = index; - state = State.Ampersand; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Ampersand); - } /* tokenizeTerminalBase(TokenType.Ampersand, TokenizerState.Ampersand); */ - break; + TokenizeTerminal(State.Ampersand, TokenType.Ampersand); + break; case '%': - { - lastIndex = index; - state = State.Percent; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Percent); - } /* tokenizeTerminalBase(TokenType.Percent, TokenizerState.Percent); */ - break; + TokenizeTerminal(State.Percent, TokenType.Percent); + break; case '*': - { - lastIndex = index; - state = State.Star; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Star); - } /* tokenizeTerminalBase(TokenType.Star, TokenizerState.Star); */ - break; + TokenizeTerminal(State.Star, TokenType.Star); + break; case '/': - { - lastIndex = index; - state = State.Slash; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Slash); - } /* tokenizeTerminalBase(TokenType.Slash, TokenizerState.Slash); */ - break; + TokenizeTerminal(State.Slash, TokenType.Slash); + break; case ';': - { - lastIndex = index; - state = State.Semicolon; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Semicolon); - } /* tokenizeTerminalBase(TokenType.Semicolon, TokenizerState.Semicolon); */ - break; + TokenizeTerminal(State.Semicolon, TokenType.Semicolon); + break; case '^': - { - lastIndex = index; - state = State.Caret; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Caret); - } /* tokenizeTerminalBase(TokenType.Caret, TokenizerState.Caret); */ - break; + TokenizeTerminal(State.Caret, TokenType.Caret); + break; case 'a': - { - lastIndex = index; - state = State.A; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}")); - } /* tokenizeTerminalStr(TokenType.Name, TokenizerState.A, tokenStr, ch); */ - break; + TokenizeTerminalName(State.A, ch); + break; case 'b': - { - lastIndex = index; - state = State.B; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}")); - } /* tokenizeTerminalStr(TokenType.Name, TokenizerState.B, tokenStr, ch); */ - break; + TokenizeTerminalName(State.B, ch); + break; case 'd': - { - lastIndex = index; - state = State.D; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}")); - } /* tokenizeTerminalStr(TokenType.Name, TokenizerState.D, tokenStr, ch); */ - break; + TokenizeTerminalName(State.D, ch); + break; case 'e': - { - lastIndex = index; - state = State.E; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}")); - } /* tokenizeTerminalStr(TokenType.Name, TokenizerState.E, tokenStr, ch); */ - break; + TokenizeTerminalName(State.E, ch); + break; case 'f': - { - lastIndex = index; - state = State.F; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}")); - } /* tokenizeTerminalStr(TokenType.Name, TokenizerState.F, tokenStr, ch); */ - break; + TokenizeTerminalName(State.F, ch); + break; case 'i': - { - lastIndex = index; - state = State.I; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}")); - } /* tokenizeTerminalStr(TokenType.Name, TokenizerState.I, tokenStr, ch); */ - break; + TokenizeTerminalName(State.I, ch); + break; case 'g': - { - lastIndex = index; - state = State.G; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}")); - } /* tokenizeTerminalStr(TokenType.Name, TokenizerState.G, tokenStr, ch); */ - break; + TokenizeTerminalName(State.G, ch); + break; case 'l': - { - lastIndex = index; - state = State.L; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}")); - } /* tokenizeTerminalStr(TokenType.Name, TokenizerState.L, tokenStr, ch); */ - break; + TokenizeTerminalName(State.L, ch); + break; case 'n': - { - lastIndex = index; - state = State.N; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}")); - } /* tokenizeTerminalStr(TokenType.Name, TokenizerState.N, tokenStr, ch); */ - break; + TokenizeTerminalName(State.N, ch); + break; case 'o': - { - lastIndex = index; - state = State.O; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}")); - } /* tokenizeTerminalStr(TokenType.Name, TokenizerState.O, tokenStr, ch); */ - break; + TokenizeTerminalName(State.O, ch); + break; case 'r': - { - lastIndex = index; - state = State.R; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}")); - } /* tokenizeTerminalStr(TokenType.Name, TokenizerState.R, tokenStr, ch); */ - break; + TokenizeTerminalName(State.R, ch); + break; case 't': - { - lastIndex = index; - state = State.T; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}")); - } /* tokenizeTerminalStr(TokenType.Name, TokenizerState.T, tokenStr, ch); */ - break; + TokenizeTerminalName(State.T, ch); + break; case 'u': - { - lastIndex = index; - state = State.U; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}")); - } /* tokenizeTerminalStr(TokenType.Name, TokenizerState.U, tokenStr, ch); */ - break; + TokenizeTerminalName(State.U, ch); + break; case 'w': - { - lastIndex = index; - state = State.W; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}")); - } /* tokenizeTerminalStr(TokenType.Name, TokenizerState.W, tokenStr, ch); */ - break; + TokenizeTerminalName(State.W, ch); + break; case '0': { lastIndex = index; @@ -3930,48 +3794,4 @@ public enum TokenType Semicolon, Colon, Comma, Dot, DotDot, DotDotDot, Numeral, StringLiteral, -} - -[JsonDerivedType(typeof(Integer), typeDiscriminator: "int")] -[JsonDerivedType(typeof(Float), typeDiscriminator: "float")] -public interface INumeral -{ - public class Integer(int value) : INumeral - { - public int value = value; - - public bool RawEqual(INumeral other) - { - if(other is Integer integer) - { - return integer.value == value; - } - // TODO: Check if this is actually doing what is expected - return ((Float)other).value == value; - } - public override string ToString() - { - return $"Numeral Integer {value}"; - } - } - public class Float(float value) : INumeral - { - public float value = value; - - public bool RawEqual(INumeral other) - { - if(other is Float float_val) - { - return float_val.value == value; - } - // TODO: Check if this is actually doing what is expected - return ((Integer)other).value == value; - } - public override string ToString() - { - return $"Numeral Float {value}"; - } - } - - public bool RawEqual(INumeral other); -} +} \ No newline at end of file From 51390b24d351d3439e945d074699a55a401ca1f8 Mon Sep 17 00:00:00 2001 From: 0x4261756D <38735823+0x4261756D@users.noreply.github.com> Date: Wed, 28 Feb 2024 18:55:31 +0100 Subject: [PATCH 4/5] More deduplication in the tokenizer --- LuaTypes.cs | 24 + Tokenizer.cs | 1255 ++++++-------------------------------------------- 2 files changed, 171 insertions(+), 1108 deletions(-) diff --git a/LuaTypes.cs b/LuaTypes.cs index 49f479d..1e463a4 100644 --- a/LuaTypes.cs +++ b/LuaTypes.cs @@ -45,3 +45,27 @@ public interface INumeral public bool RawEqual(INumeral other); } + +class CodeRegion(CodeLocation start, CodeLocation end) +{ + public CodeLocation start = start; + public CodeLocation end = end; + + public override string ToString() + { + return $"{start}-{end}"; + } +} + +class CodeLocation(int line, int col) +{ + public int line = line; + public int col = col; + + public CodeLocation(CodeLocation other) : this(line: other.line, col: other.col) { } + + public override string ToString() + { + return $"{line + 1}:{col + 1}"; + } +} diff --git a/Tokenizer.cs b/Tokenizer.cs index cdec0a5..76d5a93 100644 --- a/Tokenizer.cs +++ b/Tokenizer.cs @@ -94,6 +94,49 @@ class Tokenizer currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}")); } + private void Backtrack(TokenType newType) + { + if(currentToken == null || currentToken.type == null) + { + throw new Exception($"Lexer error at {currentLocation}"); + } + currentToken.type = newType; + currentToken.data = null; + currentLocation = new(currentToken.region.end); + tokens.Add(currentToken); + currentToken = null; + index = lastIndex!.Value; + lastIndex = null; + state = State.Start; + } + private void BacktrackNoClear(TokenType newType) + { + if(currentToken == null || currentToken.type == null) + { + throw new Exception($"Lexer error at {currentLocation}"); + } + currentToken.type = newType; + currentLocation = new(currentToken.region.end); + tokens.Add(currentToken); + currentToken = null; + index = lastIndex!.Value; + lastIndex = null; + state = State.Start; + } + private void BacktrackNoTypeChange() + { + if(currentToken == null || currentToken.type == null) + { + throw new Exception($"Lexer error at {currentLocation}"); + } + currentLocation = new(currentToken.region.end); + tokens.Add(currentToken); + currentToken = null; + index = lastIndex!.Value; + lastIndex = null; + state = State.Start; + } + private void TokenizeChar(char ch) { switch(state) @@ -547,36 +590,23 @@ class Tokenizer } break; case 'z': - { state = State.SingleQuoteBackslashZ; - } - break; + break; case 'x': - { state = State.SingleQuoteBackslashX; - } - break; + break; case 'u': - { state = State.SingleQuoteBackslashU; - } - break; + break; default: throw new Exception($"Unknown escape sequence: \\{ch}"); } } break; case State.SingleQuoteBackslashU: - { - if(ch == '{') - { - state = State.SingleQuoteBackslashUBracket; - } - else - { - throw new Exception($"Expected `{{` to continue \\u escape sequence at {currentLocation}, got {ch}"); - } - } - break; + state = ch == '{' + ? State.SingleQuoteBackslashUBracket + : throw new Exception($"Expected `{{` to continue \\u escape sequence at {currentLocation}, got {ch}"); + break; case State.SingleQuoteBackslashUBracket: { if(char.IsAsciiHexDigit(ch)) @@ -715,18 +745,7 @@ class Tokenizer break; case State.String: { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.StringLiteral; - //currentToken.region.end = new(currentLocation); - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.StringLiteral); } break; case State.Name: @@ -740,17 +759,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -776,16 +785,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoTypeChange(); } } break; @@ -825,16 +825,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoTypeChange(); } } break; @@ -853,16 +844,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoTypeChange(); } } break; @@ -887,16 +869,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoTypeChange(); } } break; @@ -914,16 +887,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoTypeChange(); } } break; @@ -951,16 +915,7 @@ class Tokenizer case State.ColonColon: case State.SlashSlash: { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoTypeChange(); } break; case State.Tilde: @@ -973,16 +928,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoTypeChange(); } } break; @@ -1002,16 +948,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoTypeChange(); } } break; @@ -1031,16 +968,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoTypeChange(); } } break; @@ -1054,16 +982,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoTypeChange(); } } break; @@ -1077,16 +996,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoTypeChange(); } } break; @@ -1101,16 +1011,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoTypeChange(); } } break; @@ -1124,16 +1025,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoTypeChange(); } } break; @@ -1147,16 +1039,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoTypeChange(); } } break; @@ -1170,16 +1053,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoTypeChange(); } } break; @@ -1274,16 +1148,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoTypeChange(); } } break; @@ -1356,17 +1221,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -1387,17 +1242,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -1412,18 +1257,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.And; - currentToken.data = null; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + Backtrack(TokenType.And); } } break; @@ -1444,17 +1278,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -1475,17 +1299,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -1506,17 +1320,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -1537,17 +1341,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -1562,18 +1356,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.While; - currentToken.data = null; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + Backtrack(TokenType.While); } } break; @@ -1594,17 +1377,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -1625,17 +1398,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -1656,17 +1419,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -1687,17 +1440,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -1712,18 +1455,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Break; - currentToken.data = null; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + Backtrack(TokenType.Break); } } break; @@ -1744,17 +1476,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -1775,17 +1497,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -1806,17 +1518,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -1831,18 +1533,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Goto; - currentToken.data = null; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + Backtrack(TokenType.Goto); } } break; @@ -1863,17 +1554,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -1900,17 +1581,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -1931,17 +1602,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -1962,17 +1623,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -1993,17 +1644,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -2018,18 +1659,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Return; - currentToken.data = null; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + Backtrack(TokenType.Return); } } break; @@ -2050,17 +1680,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -2081,17 +1701,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -2112,17 +1722,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -2137,18 +1737,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Repeat; - currentToken.data = null; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + Backtrack(TokenType.Repeat); } } break; @@ -2175,17 +1764,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -2206,17 +1785,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -2231,18 +1800,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Nil; - currentToken.data = null; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + Backtrack(TokenType.Nil); } } break; @@ -2263,17 +1821,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -2288,18 +1836,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Not; - currentToken.data = null; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + Backtrack(TokenType.Not); } } break; @@ -2326,17 +1863,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -2357,17 +1884,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -2388,17 +1905,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -2413,18 +1920,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Then; - currentToken.data = null; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + Backtrack(TokenType.Then); } } break; @@ -2445,17 +1941,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -2476,17 +1962,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -2501,18 +1977,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.True; - currentToken.data = null; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + Backtrack(TokenType.True); } } break; @@ -2539,17 +2004,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -2570,17 +2025,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -2601,17 +2046,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -2632,18 +2067,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Else; - currentToken.data = null; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + Backtrack(TokenType.Else); } } break; @@ -2664,17 +2088,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -2689,18 +2103,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Elseif; - currentToken.data = null; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + Backtrack(TokenType.Elseif); } } break; @@ -2721,17 +2124,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -2746,18 +2139,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.End; - currentToken.data = null; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + Backtrack(TokenType.End); } } break; @@ -2778,17 +2160,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -2803,18 +2175,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Or; - currentToken.data = null; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + Backtrack(TokenType.Or); } } break; @@ -2835,17 +2196,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -2860,18 +2211,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Do; - currentToken.data = null; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + Backtrack(TokenType.Do); } } break; @@ -2898,17 +2238,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -2923,18 +2253,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.In; - currentToken.data = null; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + Backtrack(TokenType.In); } } break; @@ -2949,18 +2268,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.If; - currentToken.data = null; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + Backtrack(TokenType.If); } } break; @@ -2993,17 +2301,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -3024,17 +2322,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -3055,17 +2343,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -3086,17 +2364,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -3117,17 +2385,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -3148,17 +2406,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -3179,17 +2427,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -3204,18 +2442,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Function; - currentToken.data = null; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + Backtrack(TokenType.Function); } } break; @@ -3236,17 +2463,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -3267,17 +2484,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -3298,17 +2505,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -3323,18 +2520,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.False; - currentToken.data = null; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + Backtrack(TokenType.False); } } break; @@ -3355,17 +2541,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -3380,18 +2556,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.For; - currentToken.data = null; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + Backtrack(TokenType.For); } } break; @@ -3412,17 +2577,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -3443,17 +2598,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -3474,17 +2619,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -3505,17 +2640,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -3530,18 +2655,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Local; - currentToken.data = null; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + Backtrack(TokenType.Local); } } break; @@ -3562,17 +2676,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -3593,17 +2697,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -3624,17 +2718,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -3655,17 +2739,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Name; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + BacktrackNoClear(TokenType.Name); } } break; @@ -3680,18 +2754,7 @@ class Tokenizer } else { - if(currentToken == null || currentToken.type == null) - { - throw new Exception($"Lexer error at {currentLocation}"); - } - currentToken.type = TokenType.Until; - currentToken.data = null; - currentLocation = new(currentToken.region.end); - tokens.Add(currentToken); - currentToken = null; - index = lastIndex!.Value; - lastIndex = null; - state = State.Start; + Backtrack(TokenType.Until); } } break; @@ -3732,31 +2795,7 @@ class Tokenizer } } -class CodeRegion(CodeLocation start, CodeLocation end) -{ - public CodeLocation start = start; - public CodeLocation end = end; - - public override string ToString() - { - return $"{start}-{end}"; - } -} - -class CodeLocation(int line, int col) -{ - public int line = line; - public int col = col; - - public CodeLocation(CodeLocation other) : this(line: other.line, col: other.col) { } - - public override string ToString() - { - return $"{line + 1}:{col + 1}"; - } -} - -class Token(CodeRegion region, TokenType? type = null, Token.IData? data = null) +internal class Token(CodeRegion region, TokenType? type = null, Token.IData? data = null) { public CodeRegion region = region; public IData? data = data; From 820fc7a82e4ee979fd48981b9c67ec8c6beeff7c Mon Sep 17 00:00:00 2001 From: 0x4261756D <38735823+0x4261756D@users.noreply.github.com> Date: Wed, 28 Feb 2024 19:13:17 +0100 Subject: [PATCH 5/5] Remove some unnecessary nesting in the parser --- Parser.cs | 167 ++++++++++++++++++++---------------------------------- 1 file changed, 61 insertions(+), 106 deletions(-) diff --git a/Parser.cs b/Parser.cs index d551dcc..a8c2c43 100644 --- a/Parser.cs +++ b/Parser.cs @@ -38,9 +38,11 @@ internal class Parser { public CodeRegion region = region; } - public class Assignment(AssignmentNode node) : StatNode + public class Assignment(VarlistNode lhs, ExplistNode rhs, CodeRegion startRegion, CodeRegion endRegion) : StatNode { - public AssignmentNode node = node; + public VarlistNode lhs = lhs; + public ExplistNode rhs = rhs; + public CodeRegion startRegion = startRegion, endRegion = endRegion; } public class Functioncall(FunctioncallNode node) : StatNode { @@ -68,41 +70,57 @@ internal class Parser public CodeRegion endRegion = endRegion; public BlockNode node = node; } - public class While(WhileNode node) : StatNode + public class While(ExpNode condition, BlockNode body, CodeRegion startRegion, CodeRegion endRegion) : StatNode { - public WhileNode node = node; + public ExpNode condition = condition; + public BlockNode body = body; + public CodeRegion startRegion = startRegion, endRegion = endRegion; } - public class Repeat(RepeatNode node) : StatNode + public class Repeat(ExpNode condition, BlockNode body, CodeRegion startRegion, CodeRegion endRegion) : StatNode { - public RepeatNode node = node; + public ExpNode condition = condition; + public BlockNode body = body; + public CodeRegion startRegion = startRegion, endRegion = endRegion; } - public class If(IfNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode + public class If(ExpNode condition, BlockNode body, List elseifs, CodeRegion startRegion, CodeRegion endRegion) : StatNode { - public CodeRegion startRegion = startRegion; - public CodeRegion endRegion = endRegion; - public IfNode node = node; + public ExpNode condition = condition; + public BlockNode body = body; + public List elseifs = elseifs; + public BlockNode? else_; + public CodeRegion startRegion = startRegion, endRegion = endRegion; } - public class ForNumerical(ForNumericalNode node) : StatNode + public class ForNumerical(string variable, ExpNode start, ExpNode end, ExpNode? change, BlockNode body, CodeRegion startRegion, CodeRegion endRegion) : StatNode { - public ForNumericalNode node = node; + public string variable = variable; + public ExpNode start = start; + public ExpNode end = end; + public ExpNode? change = change; + public BlockNode body = body; + public CodeRegion startRegion = startRegion, endRegion = endRegion; } - public class ForGeneric(ForGenericNode node) : StatNode + public class ForGeneric(List vars, ExplistNode exps, BlockNode body, CodeRegion startRegion, CodeRegion endRegion) : StatNode { - public ForGenericNode node = node; + public List vars = vars; + public ExplistNode exps = exps; + public BlockNode body = body; + public CodeRegion startRegion = startRegion, endRegion = endRegion; } public class Function(FunctionNode node) : StatNode { public FunctionNode node = node; } - public class LocalFunction(LocalFunctionNode node) : StatNode + public class LocalFunction(string name, FuncbodyNode body, CodeRegion startRegion, CodeRegion endRegion) : StatNode { - public LocalFunctionNode node = node; + public string name = name; + public FuncbodyNode body = body; + public CodeRegion startRegion = startRegion, endRegion = endRegion; } - public class Local(LocalNode node, CodeRegion startRegion, CodeRegion endRegion) : StatNode + public class Local(AttnamelistNode attnames, ExplistNode? values, CodeRegion startRegion, CodeRegion endRegion) : StatNode { - public CodeRegion startRegion = startRegion; - public CodeRegion endRegion = endRegion; - public LocalNode node = node; + public AttnamelistNode attnames = attnames; + public ExplistNode? values = values; + public CodeRegion startRegion = startRegion, endRegion = endRegion; } } public class RetstatNode(ExplistNode? values, CodeRegion startRegion, CodeRegion endRegion) @@ -110,12 +128,6 @@ internal class Parser public ExplistNode? values = values; public CodeRegion startRegion = startRegion, endRegion = endRegion; } - public class AssignmentNode(VarlistNode lhs, ExplistNode rhs, CodeRegion startRegion, CodeRegion endRegion) - { - public VarlistNode lhs = lhs; - public ExplistNode rhs = rhs; - public CodeRegion startRegion = startRegion, endRegion = endRegion; - } public class FunctioncallNode(SuffixexpNode function, string? objectArg, ArgsNode args) { public SuffixexpNode function = function; @@ -123,60 +135,12 @@ internal class Parser public ArgsNode args = args; public CodeRegion startRegion = function.startRegion, endRegion = function.endRegion; } - public class WhileNode(ExpNode condition, BlockNode body, CodeRegion startRegion, CodeRegion endRegion) - { - public ExpNode condition = condition; - public BlockNode body = body; - public CodeRegion startRegion = startRegion, endRegion = endRegion; - } - public class RepeatNode(ExpNode condition, BlockNode body, CodeRegion startRegion, CodeRegion endRegion) - { - public ExpNode condition = condition; - public BlockNode body = body; - public CodeRegion startRegion = startRegion, endRegion = endRegion; - } - public class IfNode(ExpNode condition, BlockNode body, List elseifs, CodeRegion startRegion, CodeRegion endRegion) - { - public ExpNode condition = condition; - public BlockNode body = body; - public List elseifs = elseifs; - public BlockNode? else_; - public CodeRegion startRegion = startRegion, endRegion = endRegion; - } - public class ForNumericalNode(string variable, ExpNode start, ExpNode end, ExpNode? change, BlockNode body, CodeRegion startRegion, CodeRegion endRegion) - { - public string variable = variable; - public ExpNode start = start; - public ExpNode end = end; - public ExpNode? change = change; - public BlockNode body = body; - public CodeRegion startRegion = startRegion, endRegion = endRegion; - } - public class ForGenericNode(List vars, ExplistNode exps, BlockNode body, CodeRegion startRegion, CodeRegion endRegion) - { - public List vars = vars; - public ExplistNode exps = exps; - public BlockNode body = body; - public CodeRegion startRegion = startRegion, endRegion = endRegion; - } public class FunctionNode(FuncnameNode name, FuncbodyNode body, CodeRegion startRegion, CodeRegion endRegion) { public FuncnameNode name = name; public FuncbodyNode body = body; public CodeRegion startRegion = startRegion, endRegion = endRegion; } - public class LocalFunctionNode(string name, FuncbodyNode body, CodeRegion startRegion, CodeRegion endRegion) - { - public string name = name; - public FuncbodyNode body = body; - public CodeRegion startRegion = startRegion, endRegion = endRegion; - } - public class LocalNode(AttnamelistNode attnames, ExplistNode? values, CodeRegion startRegion, CodeRegion endRegion) - { - public AttnamelistNode attnames = attnames; - public ExplistNode? values = values; - public CodeRegion startRegion = startRegion, endRegion = endRegion; - } public class ExplistNode(List exps, CodeRegion startRegion, CodeRegion endRegion) { public List exps = exps; @@ -192,9 +156,10 @@ internal class Parser public abstract class SuffixexpNode(CodeRegion startRegion, CodeRegion endRegion) { public CodeRegion startRegion = startRegion, endRegion = endRegion; - public class Normal(NormalSuffixNode node) : SuffixexpNode(node.startRegion, node.endRegion) + public class Normal(SuffixexpFirstPart firstPart, List suffixes, CodeRegion startRegion, CodeRegion endRegion) : SuffixexpNode(startRegion, endRegion) { - public NormalSuffixNode node = node; + public SuffixexpFirstPart firstPart = firstPart; + public List suffixes = suffixes; } public class Functioncall(FunctioncallNode node) : SuffixexpNode(node.startRegion, node.endRegion) { @@ -325,12 +290,6 @@ internal class Parser public MemberVarNode node = node; } } - public class NormalSuffixNode(SuffixexpFirstPart firstPart, List suffixes, CodeRegion startRegion, CodeRegion endRegion) - { - public SuffixexpFirstPart firstPart = firstPart; - public List suffixes = suffixes; - public CodeRegion startRegion = startRegion, endRegion = endRegion; - } public class TableconstructorNode(FieldlistNode? exps, CodeRegion startRegion, CodeRegion endRegion) { public FieldlistNode? exps = exps; @@ -576,7 +535,7 @@ internal class Parser } CodeRegion endRegion = tokens[index].region; index += 1; - return new StatNode.While(new(condition: condition, body: body, startRegion: startRegion, endRegion: endRegion)); + return new StatNode.While(condition: condition, body: body, startRegion: startRegion, endRegion: endRegion); } case TokenType.Repeat: { @@ -592,7 +551,7 @@ internal class Parser } index += 1; ExpNode conditon = ParseExp(tokens); - return new StatNode.Repeat(new(condition: conditon, body: body, startRegion: startRegion, endRegion: tokens[index - 1].region)); + return new StatNode.Repeat(condition: conditon, body: body, startRegion: startRegion, endRegion: tokens[index - 1].region); } case TokenType.If: { @@ -634,7 +593,7 @@ internal class Parser { throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected `end` after else-ifs of if starting at {startRegion}"); } - IfNode ret = new(condition: condition, body: body, elseifs: elseifs, startRegion: startRegion, endRegion: tokens[index - 1].region); + StatNode.If ret = new(condition: condition, body: body, elseifs: elseifs, startRegion: startRegion, endRegion: tokens[index - 1].region); if(tokens[index].type == TokenType.Else) { index += 1; @@ -650,7 +609,7 @@ internal class Parser } ret.endRegion = tokens[index].region; index += 1; - return new StatNode.If(node: ret, startRegion: startRegion, endRegion: ret.endRegion); + return ret; } case TokenType.For: { @@ -713,7 +672,7 @@ internal class Parser } CodeRegion endRegion = tokens[index].region; index += 1; - return new StatNode.ForNumerical(new(variable: variable, start: start, end: end, change: change, body: body, startRegion: startRegion, endRegion: endRegion)); + return new StatNode.ForNumerical(variable: variable, start: start, end: end, change: change, body: body, startRegion: startRegion, endRegion: endRegion); } case TokenType.Comma: { @@ -766,7 +725,7 @@ internal class Parser } CodeRegion endRegion = tokens[index].region; index += 1; - return new StatNode.ForGeneric(new(vars: names, exps: exps, body: body, startRegion: startRegion, endRegion: endRegion)); + return new StatNode.ForGeneric(vars: names, exps: exps, body: body, startRegion: startRegion, endRegion: endRegion); } case TokenType.In: { @@ -792,7 +751,7 @@ internal class Parser } CodeRegion endRegion = tokens[index].region; index += 1; - return new StatNode.ForGeneric(new(vars: [variable], exps: exps, body: body, startRegion: startRegion, endRegion: endRegion)); + return new StatNode.ForGeneric(vars: [variable], exps: exps, body: body, startRegion: startRegion, endRegion: endRegion); } default: { @@ -828,19 +787,19 @@ internal class Parser string name = ((Token.StringData)tokens[index].data!).data; index += 1; FuncbodyNode body = ParseFuncbody(tokens); - return new StatNode.LocalFunction(new(name: name, body: body, startRegion: startRegion, endRegion: body.endRegion)); + return new StatNode.LocalFunction(name: name, body: body, startRegion: startRegion, endRegion: body.endRegion); } else { AttnamelistNode attnames = ParseAttnamelist(tokens); - LocalNode ret = new(attnames: attnames, values: null, startRegion: startRegion, endRegion: attnames.endRegion); + StatNode.Local ret = new(attnames: attnames, values: null, startRegion: startRegion, endRegion: attnames.endRegion); if(index < tokens.Length && tokens[index].type == TokenType.Equals) { index += 1; ret.values = ParseExplist(tokens); ret.endRegion = ret.values.endRegion; } - return new StatNode.Local(ret, startRegion: startRegion, endRegion: ret.endRegion); + return ret; } } case TokenType.ColonColon: @@ -892,7 +851,7 @@ internal class Parser index += 1; List lhs = [SuffixExpToVar(suffixExp)]; ExplistNode rhs = ParseExplist(tokens); - return new StatNode.Assignment(new(lhs: new(vars: lhs, startRegion: startRegion, endRegion: suffixExp.endRegion), rhs: rhs, startRegion: startRegion, endRegion: rhs.endRegion)); + return new StatNode.Assignment(lhs: new(vars: lhs, startRegion: startRegion, endRegion: suffixExp.endRegion), rhs: rhs, startRegion: startRegion, endRegion: rhs.endRegion); } case TokenType.Comma: { @@ -913,7 +872,7 @@ internal class Parser index += 1; VarlistNode varlistNode = new(vars: vars, startRegion: startRegion, endRegion: vars[^1].endRegion); ExplistNode rhs = ParseExplist(tokens); - return new StatNode.Assignment(new(lhs: varlistNode, rhs: rhs, startRegion: startRegion, endRegion: rhs.endRegion)); + return new StatNode.Assignment(lhs: varlistNode, rhs: rhs, startRegion: startRegion, endRegion: rhs.endRegion); } } if(suffixExp is SuffixexpNode.Normal) @@ -946,16 +905,16 @@ internal class Parser { throw new Exception($"Expected a normal suffix expression to convert to var at {suffixExp.startRegion}-{suffixExp.endRegion}"); } - if(normal.node.suffixes.Count == 0) + if(normal.suffixes.Count == 0) { - if(normal.node.firstPart is not SuffixexpFirstPart.Name name) + if(normal.firstPart is not SuffixexpFirstPart.Name name) { - throw new Exception($"Expected a name as first part of suffix expression to convert to var at {normal.node.firstPart.startRegion}-{normal.node.firstPart.endRegion}"); + throw new Exception($"Expected a name as first part of suffix expression to convert to var at {normal.firstPart.startRegion}-{normal.firstPart.endRegion}"); } return new VarNode.Name(name: name.name, startRegion: suffixExp.startRegion, endRegion: suffixExp.endRegion); } - SuffixexpSuffix last = normal.node.suffixes[^1]; - _ = normal.node.suffixes.Remove(last); + SuffixexpSuffix last = normal.suffixes[^1]; + _ = normal.suffixes.Remove(last); return last switch { SuffixexpSuffix.Dot dot => new VarNode.Member(node: new(name: dot.name, value: normal, startRegion: suffixExp.startRegion, endRegion: suffixExp.endRegion), startRegion: suffixExp.startRegion, endRegion: dot.endRegion), @@ -1081,18 +1040,14 @@ internal class Parser { SuffixexpSuffix.Args args => new SuffixexpNode.Functioncall( node: new( - function: new SuffixexpNode.Normal( - node: new NormalSuffixNode(firstPart, suffixes[..^1], startRegion, args.endRegion) - ), + function: new SuffixexpNode.Normal(firstPart, suffixes[..^1], startRegion, args.endRegion), args: args.node, objectArg: null ) ), SuffixexpSuffix.ArgsFirstArg node => new SuffixexpNode.Functioncall( node: new( - function: new SuffixexpNode.Normal( - node: new NormalSuffixNode(firstPart: firstPart, suffixes: suffixes[..^1], startRegion, node.endRegion) - ), + function: new SuffixexpNode.Normal(firstPart: firstPart, suffixes: suffixes[..^1], startRegion, node.endRegion), objectArg: node.node.name, args: node.node.rest ) @@ -1109,7 +1064,7 @@ internal class Parser endRegion = firstPart.endRegion; } - return new SuffixexpNode.Normal(node: new(firstPart: firstPart, suffixes: suffixes, startRegion: startRegion, endRegion: endRegion)); + return new SuffixexpNode.Normal(firstPart: firstPart, suffixes: suffixes, startRegion: startRegion, endRegion: endRegion); } private ArgsNode ParseArgs(Token[] tokens)