Implement missing parser methods
This commit is contained in:
parent
b7f725afee
commit
27f2917670
733
Parser.cs
733
Parser.cs
@ -862,46 +862,759 @@ class Parser
|
||||
|
||||
private VarNode ParseVar(Token[] tokens)
|
||||
{
|
||||
throw new NotImplementedException();
|
||||
return SuffixExpToVar(ParseSuffixExp(tokens));
|
||||
}
|
||||
|
||||
private VarNode SuffixExpToVar(SuffixexpNode suffixExp)
|
||||
private static VarNode SuffixExpToVar(SuffixexpNode suffixExp)
|
||||
{
|
||||
throw new NotImplementedException();
|
||||
if(suffixExp is not SuffixexpNode.Normal normal)
|
||||
{
|
||||
throw new Exception($"Expected a normal suffix expression to convert to var at {suffixExp.startRegion}-{suffixExp.endRegion}");
|
||||
}
|
||||
if(normal.node.suffixes.Count == 0)
|
||||
{
|
||||
if(normal.node.firstPart is not SuffixexpFirstPart.Name name)
|
||||
{
|
||||
throw new Exception($"Expected a name as first part of suffix expression to convert to var at {normal.node.firstPart.startRegion}-{normal.node.firstPart.endRegion}");
|
||||
}
|
||||
return new VarNode.Name(name: name.name, startRegion: suffixExp.startRegion, endRegion: suffixExp.endRegion);
|
||||
}
|
||||
SuffixexpSuffix last = normal.node.suffixes[^1];
|
||||
return last switch
|
||||
{
|
||||
SuffixexpSuffix.Dot dot => new VarNode.Member(node: new(name: dot.name, value: normal, startRegion: suffixExp.startRegion, endRegion: suffixExp.endRegion), startRegion: suffixExp.startRegion, endRegion: dot.endRegion),
|
||||
SuffixexpSuffix.Indexed indexed => new VarNode.Indexed(node: new(index: indexed.node, value: normal, startRegion: suffixExp.startRegion, endRegion: suffixExp.endRegion), startRegion: suffixExp.startRegion, endRegion: indexed.endRegion),
|
||||
_ => throw new Exception($"Expected dot or indexed suffix expression to convert to var at {last.startRegion}-{last.endRegion}")
|
||||
};
|
||||
}
|
||||
|
||||
private SuffixexpNode ParseSuffixExp(Token[] tokens)
|
||||
{
|
||||
throw new NotImplementedException();
|
||||
// primaryexp { '.' 'Name' | '[' exp']' | ':' 'Name' args | args }
|
||||
if(index >= tokens.Length)
|
||||
{
|
||||
throw new Exception($"Index {index} out of bounds of {tokens.Length}");
|
||||
}
|
||||
CodeRegion startRegion = tokens[index].region;
|
||||
SuffixexpFirstPart firstPart;
|
||||
switch(tokens[index].type)
|
||||
{
|
||||
case TokenType.Name:
|
||||
{
|
||||
string name = ((Token.StringData)tokens[index].data!).data;
|
||||
index += 1;
|
||||
firstPart = new SuffixexpFirstPart.Name(name, startRegion, startRegion);
|
||||
}
|
||||
break;
|
||||
case TokenType.RoundOpen:
|
||||
{
|
||||
index += 1;
|
||||
ExpNode inner = ParseExp(tokens);
|
||||
if(index >= tokens.Length)
|
||||
{
|
||||
throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected `)` to close bracketed expression starting at {startRegion}");
|
||||
}
|
||||
if(tokens[index].type != TokenType.RoundClosed)
|
||||
{
|
||||
throw new Exception($"{tokens[index].region}: Expected `)` to close bracketed expression at {startRegion}, got {tokens[index].type}");
|
||||
}
|
||||
firstPart = new SuffixexpFirstPart.BracketedExp(node: inner, startRegion: startRegion, endRegion: tokens[index].region);
|
||||
index += 1;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
throw new Exception($"{startRegion}: Expected either `)` or name as first part of suffix-expression, got {tokens[index].type}");
|
||||
}
|
||||
List<SuffixexpSuffix> suffixes = [];
|
||||
bool shouldContinue = true;
|
||||
while(shouldContinue && index < tokens.Length)
|
||||
{
|
||||
CodeRegion suffixStartRegion = tokens[index].region;
|
||||
switch(tokens[index].type)
|
||||
{
|
||||
case TokenType.Dot:
|
||||
{
|
||||
index += 1;
|
||||
if(index >= tokens.Length)
|
||||
{
|
||||
throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected name in dotted suffix of suffix expression starting at {startRegion}");
|
||||
}
|
||||
if(tokens[index].type != TokenType.Name)
|
||||
{
|
||||
throw new Exception($"{tokens[index].region}: Expected name in dotted suffix of suffix expression at {startRegion}, got {tokens[index].type}");
|
||||
}
|
||||
CodeRegion suffixEndRegion = tokens[index].region;
|
||||
string name = ((Token.StringData)tokens[index].data!).data;
|
||||
index += 1;
|
||||
suffixes.Add(new SuffixexpSuffix.Dot(name, startRegion: suffixStartRegion, endRegion: suffixEndRegion));
|
||||
}
|
||||
break;
|
||||
case TokenType.SquareOpen:
|
||||
{
|
||||
index += 1;
|
||||
ExpNode inner = ParseExp(tokens);
|
||||
suffixes.Add(new SuffixexpSuffix.Indexed(node: inner, startRegion: suffixStartRegion, endRegion: inner.endRegion));
|
||||
if(index >= tokens.Length)
|
||||
{
|
||||
throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected `]` to close indexed suffix of suffix-expression starting at {suffixStartRegion}");
|
||||
}
|
||||
if(tokens[index].type != TokenType.SquareClosed)
|
||||
{
|
||||
throw new Exception($"{tokens[index].region}: Expected `]` to close indexed suffix of suffix-expression at {suffixStartRegion}, got {tokens[index].type}");
|
||||
}
|
||||
index += 1;
|
||||
}
|
||||
break;
|
||||
case TokenType.Colon:
|
||||
{
|
||||
index += 1;
|
||||
if(index >= tokens.Length)
|
||||
{
|
||||
throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected name as first arg after `:` in args suffix in suffix-expression starting at {suffixStartRegion}");
|
||||
}
|
||||
if(tokens[index].type != TokenType.RoundClosed)
|
||||
{
|
||||
throw new Exception($"{tokens[index].region}: Expected name as first arg after `:` in args suffix in suffix-expression at {suffixStartRegion}, got {tokens[index].type}");
|
||||
}
|
||||
string name = ((Token.StringData)tokens[index].data!).data;
|
||||
index += 1;
|
||||
ArgsNode args = ParseArgs(tokens);
|
||||
suffixes.Add(new SuffixexpSuffix.ArgsFirstArg(new(name, rest: args, startRegion: suffixStartRegion, endRegion: args.endRegion), startRegion: suffixStartRegion, endRegion: args.endRegion));
|
||||
}
|
||||
break;
|
||||
case TokenType.RoundOpen:
|
||||
case TokenType.CurlyOpen:
|
||||
case TokenType.StringLiteral:
|
||||
{
|
||||
ArgsNode args = ParseArgs(tokens);
|
||||
suffixes.Add(new SuffixexpSuffix.Args(node: args, startRegion: suffixStartRegion, endRegion: args.endRegion));
|
||||
}
|
||||
break;
|
||||
default:
|
||||
{
|
||||
shouldContinue = false;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
CodeRegion endRegion;
|
||||
if(suffixes.Count > 0)
|
||||
{
|
||||
endRegion = suffixes[^1].endRegion;
|
||||
SuffixexpNode? ret = suffixes[^1] switch
|
||||
{
|
||||
SuffixexpSuffix.Args args => new SuffixexpNode.Functioncall(
|
||||
node: new(
|
||||
function: new SuffixexpNode.Normal(
|
||||
node: new NormalSuffixNode(firstPart, suffixes[..^1], startRegion, args.endRegion),
|
||||
startRegion: startRegion,
|
||||
endRegion: args.endRegion
|
||||
),
|
||||
args: args.node,
|
||||
objectArg: null,
|
||||
startRegion: startRegion,
|
||||
endRegion: args.endRegion
|
||||
),
|
||||
startRegion: startRegion,
|
||||
endRegion: args.endRegion
|
||||
),
|
||||
SuffixexpSuffix.ArgsFirstArg node => new SuffixexpNode.Functioncall(
|
||||
node: new(
|
||||
function: new SuffixexpNode.Normal(
|
||||
node: new NormalSuffixNode(firstPart: firstPart, suffixes: suffixes[..^1], startRegion, node.endRegion),
|
||||
startRegion: startRegion,
|
||||
endRegion: node.endRegion
|
||||
),
|
||||
objectArg: node.node.name,
|
||||
args: node.node.rest,
|
||||
startRegion: startRegion,
|
||||
endRegion: node.endRegion
|
||||
),
|
||||
startRegion: startRegion,
|
||||
endRegion: node.endRegion
|
||||
),
|
||||
_ => null,
|
||||
};
|
||||
if(ret is not null)
|
||||
{
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
endRegion = firstPart.endRegion;
|
||||
}
|
||||
|
||||
return new SuffixexpNode.Normal(
|
||||
node: new(firstPart: firstPart, suffixes: suffixes, startRegion: startRegion, endRegion: endRegion),
|
||||
startRegion: startRegion,
|
||||
endRegion: endRegion
|
||||
);
|
||||
}
|
||||
|
||||
private ArgsNode ParseArgs(Token[] tokens)
|
||||
{
|
||||
if(index >= tokens.Length)
|
||||
{
|
||||
throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected `(`, `{{` or string to start args");
|
||||
}
|
||||
CodeRegion startRegion = tokens[index].region;
|
||||
switch(tokens[index].type)
|
||||
{
|
||||
case TokenType.RoundOpen:
|
||||
{
|
||||
index += 1;
|
||||
if(index >= tokens.Length)
|
||||
{
|
||||
throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected explist or `)` to continue args starting at {startRegion}");
|
||||
}
|
||||
if(tokens[index].type == TokenType.RoundClosed)
|
||||
{
|
||||
CodeRegion endRegion = tokens[index].region;
|
||||
index += 1;
|
||||
return new ArgsNode.Bracketed(null, startRegion: startRegion, endRegion: endRegion);
|
||||
}
|
||||
ExplistNode exps = ParseExplist(tokens);
|
||||
if(index >= tokens.Length)
|
||||
{
|
||||
throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected `)` to close args starting at {startRegion}");
|
||||
}
|
||||
if(tokens[index].type != TokenType.RoundClosed)
|
||||
{
|
||||
throw new Exception($"{tokens[index].region}: Expected `)` to close args starting at {startRegion}, got {tokens[index].type}");
|
||||
}
|
||||
index += 1;
|
||||
return new ArgsNode.Bracketed(node: exps, startRegion: startRegion, endRegion: exps.endRegion);
|
||||
}
|
||||
case TokenType.CurlyOpen:
|
||||
{
|
||||
TableconstructorNode node = ParseTableconstructor(tokens);
|
||||
return new ArgsNode.Tableconstructor(node: node, startRegion: startRegion, endRegion: node.endRegion);
|
||||
}
|
||||
case TokenType.StringLiteral:
|
||||
{
|
||||
string value = ((Token.StringData)tokens[index].data!).data;
|
||||
index += 1;
|
||||
return new ArgsNode.Literal(name: value, startRegion: startRegion, endRegion: startRegion);
|
||||
}
|
||||
default:
|
||||
throw new Exception($"{tokens[index].region}: Expected explist or `)` to continue args starting at {startRegion}");
|
||||
}
|
||||
}
|
||||
|
||||
private TableconstructorNode ParseTableconstructor(Token[] tokens)
|
||||
{
|
||||
if(index >= tokens.Length)
|
||||
{
|
||||
throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected `{{` to start tableconstructor");
|
||||
}
|
||||
CodeRegion startRegion = tokens[index].region;
|
||||
if(tokens[index].type != TokenType.CurlyOpen)
|
||||
{
|
||||
throw new Exception($"{startRegion}: Expected `{{` to start tableconstructor, got {tokens[index].type}");
|
||||
}
|
||||
index += 1;
|
||||
if(index < tokens.Length && tokens[index].type == TokenType.CurlyClosed)
|
||||
{
|
||||
CodeRegion emptyEndRegion = tokens[index].region;
|
||||
index += 1;
|
||||
return new TableconstructorNode(exps: null, startRegion: startRegion, endRegion: emptyEndRegion);
|
||||
}
|
||||
FieldlistNode fields = ParseFieldlist(tokens);
|
||||
if(index >= tokens.Length)
|
||||
{
|
||||
throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected `}}` to close tableconstructor starting at {startRegion}");
|
||||
}
|
||||
if(tokens[index].type != TokenType.CurlyClosed)
|
||||
{
|
||||
throw new Exception($"{tokens[index].region}: Expected `}}` to close tableconstructor starting at {startRegion}, got {tokens[index].type}");
|
||||
}
|
||||
CodeRegion endRegion = tokens[index].region;
|
||||
index += 1;
|
||||
return new TableconstructorNode(exps: fields, startRegion: startRegion, endRegion: endRegion);
|
||||
}
|
||||
|
||||
private FieldlistNode ParseFieldlist(Token[] tokens)
|
||||
{
|
||||
List<FieldNode> fields = [ParseField(tokens)];
|
||||
while(index < tokens.Length && IsFieldsep(tokens[index]))
|
||||
{
|
||||
index += 1;
|
||||
fields.Add(ParseField(tokens));
|
||||
}
|
||||
if(index < tokens.Length && IsFieldsep(tokens[index]))
|
||||
{
|
||||
index += 1;
|
||||
}
|
||||
// NOTE: Since at least 1 field is parsed the list accesses are safe
|
||||
return new FieldlistNode(exps: fields, startRegion: fields[0].startRegion, endRegion: fields[^1].endRegion);
|
||||
}
|
||||
|
||||
private static bool IsFieldsep(Token token) => token.type is TokenType.Comma or TokenType.Semicolon;
|
||||
|
||||
private FieldNode ParseField(Token[] tokens)
|
||||
{
|
||||
if(index >= tokens.Length)
|
||||
{
|
||||
throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected `[` or name to start field");
|
||||
}
|
||||
CodeRegion startRegion = tokens[index].region;
|
||||
switch(tokens[index].type)
|
||||
{
|
||||
case TokenType.SquareOpen:
|
||||
{
|
||||
index += 1;
|
||||
ExpNode indexNode = ParseExp(tokens);
|
||||
if(index >= tokens.Length)
|
||||
{
|
||||
throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected `]` to close indexed field in indexed field assignment starting at {startRegion}");
|
||||
}
|
||||
if(tokens[index].type != TokenType.SquareClosed)
|
||||
{
|
||||
throw new Exception($"{tokens[index].region}: Expected `]` to close indexed field starting in indexed field assignment at {startRegion}, got {tokens[index].type}");
|
||||
}
|
||||
index += 1;
|
||||
if(index >= tokens.Length)
|
||||
{
|
||||
throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected `=` to continue indexed field assignment starting at {startRegion}");
|
||||
}
|
||||
if(tokens[index].type != TokenType.Equals)
|
||||
{
|
||||
throw new Exception($"{tokens[index].region}: Expected `=` to continue indexed field assignment starting at {startRegion}, got {tokens[index].type}");
|
||||
}
|
||||
index += 1;
|
||||
ExpNode rhs = ParseExp(tokens);
|
||||
return new FieldNode.IndexedAssignment(node: new(index: indexNode, rhs: rhs, startRegion: startRegion, endRegion: rhs.endRegion), startRegion: startRegion, endRegion: rhs.endRegion);
|
||||
}
|
||||
case TokenType.Name:
|
||||
{
|
||||
if(index + 1 < tokens.Length && tokens[index + 1].type == TokenType.Equals)
|
||||
{
|
||||
string name = ((Token.StringData)tokens[index].data!).data;
|
||||
index += 2;
|
||||
ExpNode rhs = ParseExp(tokens);
|
||||
return new FieldNode.Assignment(node: new(lhs: name, rhs: rhs, startRegion: startRegion, endRegion: rhs.endRegion), startRegion: startRegion, endRegion: rhs.endRegion);
|
||||
}
|
||||
ExpNode exp = ParseExp(tokens);
|
||||
return new FieldNode.Exp(node: exp, startRegion: startRegion, endRegion: exp.endRegion);
|
||||
}
|
||||
default:
|
||||
{
|
||||
ExpNode exp = ParseExp(tokens);
|
||||
return new FieldNode.Exp(node: exp, startRegion: startRegion, endRegion: exp.endRegion);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private AttnamelistNode ParseAttnamelist(Token[] tokens)
|
||||
{
|
||||
throw new NotImplementedException();
|
||||
List<AttnameNode> attnames = [ParseAttname(tokens)];
|
||||
while(index < tokens.Length && tokens[index].type == TokenType.Comma)
|
||||
{
|
||||
index += 1;
|
||||
attnames.Add(ParseAttname(tokens));
|
||||
}
|
||||
// NOTE: Since at least 1 attname is parsed the list accesses are safe
|
||||
return new(attnames: attnames, startRegion: attnames[0].startRegion, endRegion: attnames[^1].endRegion);
|
||||
}
|
||||
|
||||
private AttnameNode ParseAttname(Token[] tokens)
|
||||
{
|
||||
if(index >= tokens.Length)
|
||||
{
|
||||
throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected name to start attname");
|
||||
}
|
||||
CodeRegion startRegion = tokens[index].region;
|
||||
if(tokens[index].type != TokenType.Name)
|
||||
{
|
||||
throw new Exception($"{tokens[index].region}: Expected name to start attname at {startRegion}, got {tokens[index].type}");
|
||||
}
|
||||
string name = ((Token.StringData)tokens[index].data!).data;
|
||||
index += 1;
|
||||
if(index < tokens.Length && tokens[index].type == TokenType.Lt)
|
||||
{
|
||||
index += 1;
|
||||
if(index >= tokens.Length)
|
||||
{
|
||||
throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected attribute name of attname starting at {startRegion}");
|
||||
}
|
||||
if(tokens[index].type != TokenType.Name)
|
||||
{
|
||||
throw new Exception($"{tokens[index].region}: Expected attribute name of attname at {startRegion}, got {tokens[index].type}");
|
||||
}
|
||||
string attribute = ((Token.StringData)tokens[index].data!).data;
|
||||
index += 1;
|
||||
if(index >= tokens.Length)
|
||||
{
|
||||
throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected `>` to close attribute of attname starting at {startRegion}");
|
||||
}
|
||||
if(tokens[index].type != TokenType.Gt)
|
||||
{
|
||||
throw new Exception($"{tokens[index].region}: Expected `>` to close attribute of attname starting at {startRegion}, got {tokens[index].type}");
|
||||
}
|
||||
CodeRegion endRegion = tokens[index].region;
|
||||
index += 1;
|
||||
return new AttnameNode(name: name, attribute: attribute, startRegion: startRegion, endRegion: endRegion);
|
||||
}
|
||||
return new AttnameNode(name: name, attribute: null, startRegion: startRegion, endRegion: startRegion);
|
||||
}
|
||||
|
||||
private FuncbodyNode ParseFuncbody(Token[] tokens)
|
||||
{
|
||||
throw new NotImplementedException();
|
||||
if(index >= tokens.Length)
|
||||
{
|
||||
throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected `(` to start funcbody");
|
||||
}
|
||||
CodeRegion startRegion = tokens[index].region;
|
||||
if(tokens[index].type != TokenType.RoundOpen)
|
||||
{
|
||||
throw new Exception($"{tokens[index].region}: Expected `(` to start funcbody at {startRegion}, got {tokens[index].type}");
|
||||
}
|
||||
index += 1;
|
||||
ParlistNode? pars;
|
||||
if(index < tokens.Length && tokens[index].type == TokenType.RoundClosed)
|
||||
{
|
||||
index += 1;
|
||||
pars = null;
|
||||
}
|
||||
else
|
||||
{
|
||||
pars = ParseParlist(tokens);
|
||||
if(index >= tokens.Length)
|
||||
{
|
||||
throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected `)` to close parlist of funcbody starting at {startRegion}");
|
||||
}
|
||||
if(tokens[index].type != TokenType.RoundClosed)
|
||||
{
|
||||
throw new Exception($"{tokens[index].region}: Expected `)` to close parlist of funcbody at {startRegion}, got {tokens[index].type}");
|
||||
}
|
||||
index += 1;
|
||||
}
|
||||
BlockNode body = ParseBlock(tokens);
|
||||
if(index >= tokens.Length)
|
||||
{
|
||||
throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected `end` to close funcbody starting at {startRegion}");
|
||||
}
|
||||
if(tokens[index].type != TokenType.End)
|
||||
{
|
||||
throw new Exception($"{tokens[index].region}: Expected `end` to close funcbody starting at {startRegion}, got {tokens[index].type}");
|
||||
}
|
||||
CodeRegion endRegion = tokens[index].region;
|
||||
index += 1;
|
||||
return new FuncbodyNode(pars: pars, body: body, startRegion: startRegion, endRegion: endRegion);
|
||||
}
|
||||
|
||||
private ParlistNode ParseParlist(Token[] tokens)
|
||||
{
|
||||
if(index >= tokens.Length)
|
||||
{
|
||||
throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected `...` or name to start parlist");
|
||||
}
|
||||
CodeRegion startRegion = tokens[index].region;
|
||||
if(tokens[index].type == TokenType.DotDotDot)
|
||||
{
|
||||
return new ParlistNode(names: [], hasVarargs: true, startRegion: startRegion, endRegion: startRegion);
|
||||
}
|
||||
if(tokens[index].type != TokenType.Name)
|
||||
{
|
||||
throw new Exception($"{startRegion}: Expected `...` or name to start parlist, got {tokens[index].type}");
|
||||
}
|
||||
List<string> names = [((Token.StringData)tokens[index].data!).data];
|
||||
index += 1;
|
||||
while(index < tokens.Length && tokens[index].type == TokenType.Comma)
|
||||
{
|
||||
index += 1;
|
||||
if(index >= tokens.Length)
|
||||
{
|
||||
throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected `...` or name to continue parlist starting at {startRegion}");
|
||||
}
|
||||
switch(tokens[index].type)
|
||||
{
|
||||
case TokenType.Name:
|
||||
{
|
||||
names.Add(((Token.StringData)tokens[index].data!).data);
|
||||
index += 1;
|
||||
}
|
||||
break;
|
||||
case TokenType.DotDotDot:
|
||||
{
|
||||
CodeRegion endRegion = tokens[index].region;
|
||||
index += 1;
|
||||
return new ParlistNode(names: names, hasVarargs: true, startRegion: startRegion, endRegion: endRegion);
|
||||
};
|
||||
default:
|
||||
{
|
||||
throw new Exception($"{tokens[index].region}: Expected `...` or name to continue parlist starting at {startRegion}, got {tokens[index].type}");
|
||||
}
|
||||
}
|
||||
}
|
||||
return new ParlistNode(names: names, hasVarargs: false, startRegion: startRegion, endRegion: tokens[index - 1].region);
|
||||
}
|
||||
|
||||
private FuncnameNode ParseFuncname(Token[] tokens)
|
||||
{
|
||||
throw new NotImplementedException();
|
||||
if(index >= tokens.Length)
|
||||
{
|
||||
throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected name to start funcname");
|
||||
}
|
||||
CodeRegion startRegion = tokens[index].region;
|
||||
if(tokens[index].type != TokenType.Name)
|
||||
{
|
||||
throw new Exception($"{startRegion}: Expected name to start funcname, got {tokens[index].type}");
|
||||
}
|
||||
string name = ((Token.StringData)tokens[index].data!).data;
|
||||
index += 1;
|
||||
List<string> dottedNames = [];
|
||||
while(index < tokens.Length && tokens[index].type == TokenType.Dot)
|
||||
{
|
||||
index += 1;
|
||||
if(index >= tokens.Length)
|
||||
{
|
||||
throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected name in dotted funcname starting at {startRegion}");
|
||||
}
|
||||
if(tokens[index].type != TokenType.Name)
|
||||
{
|
||||
throw new Exception($"{tokens[index].region}: Expected name in dotted funcname starting at {startRegion}, got {tokens[index].type}");
|
||||
}
|
||||
dottedNames.Add(((Token.StringData)tokens[index].data!).data);
|
||||
}
|
||||
if(index < tokens.Length && tokens[index].type == TokenType.Colon)
|
||||
{
|
||||
index += 1;
|
||||
if(index >= tokens.Length)
|
||||
{
|
||||
throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected name as first arg name after `:` in funcname starting at {startRegion}");
|
||||
}
|
||||
if(tokens[index].type != TokenType.Name)
|
||||
{
|
||||
throw new Exception($"{tokens[index].region}: Expected name as first arg name after `:` in funcname starting at {startRegion}, got {tokens[index].type}");
|
||||
}
|
||||
string firstArg = ((Token.StringData)tokens[index].data!).data;
|
||||
CodeRegion endRegion = tokens[index].region;
|
||||
index += 1;
|
||||
return new FuncnameNode(name: name, dottedNames: dottedNames, firstArg: firstArg, startRegion: startRegion, endRegion: endRegion);
|
||||
}
|
||||
return new FuncnameNode(name: name, dottedNames: dottedNames, firstArg: null, startRegion: startRegion, endRegion: tokens[index - 1].region);
|
||||
}
|
||||
|
||||
private ExplistNode ParseExplist(Token[] tokens)
|
||||
{
|
||||
throw new NotImplementedException();
|
||||
List<ExpNode> exps = [ParseExp(tokens)];
|
||||
while(index < tokens.Length && tokens[index].type == TokenType.Comma)
|
||||
{
|
||||
index += 1;
|
||||
exps.Add(ParseExp(tokens));
|
||||
}
|
||||
return new ExplistNode(exps: exps, startRegion: exps[0].startRegion, endRegion: exps[^1].endRegion);
|
||||
}
|
||||
|
||||
private ExpNode ParseExp(Token[] tokens)
|
||||
{
|
||||
throw new NotImplementedException();
|
||||
ExpNode lhs = ParseExpPrimary(tokens);
|
||||
return ParseExpPrecedence(tokens, lhs, 0);
|
||||
}
|
||||
|
||||
private ExpNode ParseExpPrecedence(Token[] tokens, ExpNode lhs, int minPrecedence)
|
||||
{
|
||||
ExpNode currentLhs = lhs;
|
||||
while(index < tokens.Length && IsBinop(tokens[index]))
|
||||
{
|
||||
CodeRegion startRegion = tokens[index].region;
|
||||
int precedence = GetPrecedence(tokens[index]);
|
||||
if(precedence < minPrecedence)
|
||||
{
|
||||
break;
|
||||
}
|
||||
BinopType op = GetBinopType(tokens[index]);
|
||||
index += 1;
|
||||
ExpNode rhs = ParseExpPrimary(tokens);
|
||||
while(index < tokens.Length && IsBinop(tokens[index]) && (GetPrecedence(tokens[index]) > precedence || (GetPrecedence(tokens[index]) == precedence && IsRightAssociative(tokens[index]))))
|
||||
{
|
||||
int associativityBoost = (GetPrecedence(tokens[index]) == precedence) ? 0 : 1;
|
||||
rhs = ParseExpPrecedence(tokens, lhs: rhs, minPrecedence: precedence + associativityBoost);
|
||||
}
|
||||
currentLhs = new ExpNode.Binop(node: new(lhs: currentLhs, type: op, rhs: rhs, startRegion: startRegion, endRegion: rhs.endRegion), startRegion: startRegion, endRegion: rhs.endRegion);
|
||||
}
|
||||
return currentLhs;
|
||||
}
|
||||
|
||||
private static bool IsRightAssociative(Token token) => token.type is TokenType.DotDot or TokenType.Caret;
|
||||
|
||||
private static BinopType GetBinopType(Token token) => token.type switch
|
||||
{
|
||||
TokenType.Or => BinopType.LogicalOr,
|
||||
TokenType.And => BinopType.LogicalAnd,
|
||||
TokenType.Lt => BinopType.Lt,
|
||||
TokenType.Gt => BinopType.Gt,
|
||||
TokenType.LtEquals => BinopType.LtEquals,
|
||||
TokenType.GtEquals => BinopType.GtEquals,
|
||||
TokenType.LtLt => BinopType.Shl,
|
||||
TokenType.GtGt => BinopType.Shr,
|
||||
TokenType.TildeEquals => BinopType.NotEquals,
|
||||
TokenType.EqualsEquals => BinopType.Equals,
|
||||
TokenType.Pipe => BinopType.BinaryOr,
|
||||
TokenType.Tilde => BinopType.BinaryNot,
|
||||
TokenType.Ampersand => BinopType.BinaryAnd,
|
||||
TokenType.DotDot => BinopType.Concat,
|
||||
TokenType.Plus => BinopType.Add,
|
||||
TokenType.Minus => BinopType.Sub,
|
||||
TokenType.Star => BinopType.Mul,
|
||||
TokenType.Slash => BinopType.Div,
|
||||
TokenType.SlashSlash => BinopType.IntDiv,
|
||||
TokenType.Percent => BinopType.Mod,
|
||||
TokenType.Caret => BinopType.Exp,
|
||||
_ => throw new Exception($"{token.region}: Expected binary operator with precedence, got {token.type}"),
|
||||
};
|
||||
|
||||
private static int GetPrecedence(Token token) => token.type switch
|
||||
{
|
||||
TokenType.Or => 2,
|
||||
TokenType.And => 4,
|
||||
TokenType.Lt or TokenType.Gt or TokenType.LtEquals or TokenType.GtEquals or TokenType.TildeEquals or TokenType.EqualsEquals => 6,
|
||||
TokenType.Pipe => 8,
|
||||
TokenType.Tilde => 10,
|
||||
TokenType.Ampersand => 12,
|
||||
TokenType.LtLt or TokenType.GtGt => 14,
|
||||
TokenType.DotDot => 16,
|
||||
TokenType.Plus or TokenType.Minus => 18,
|
||||
TokenType.Star or TokenType.Slash or TokenType.SlashSlash or TokenType.Percent => 20,
|
||||
TokenType.Caret => 22,
|
||||
_ => throw new Exception($"{token.region}: Expected binary operator with precedence, got {token.type}"),
|
||||
};
|
||||
|
||||
private static bool IsBinop(Token token) => token.type switch
|
||||
{
|
||||
TokenType.Or or TokenType.And or TokenType.Lt or TokenType.Gt or TokenType.LtEquals or TokenType.GtEquals or TokenType.TildeEquals or TokenType.EqualsEquals or
|
||||
TokenType.Pipe or TokenType.Tilde or TokenType.Ampersand or TokenType.LtLt or TokenType.GtGt or TokenType.DotDot or TokenType.Plus or TokenType.Minus or
|
||||
TokenType.Star or TokenType.Slash or TokenType.SlashSlash or TokenType.Percent or TokenType.Caret => true,
|
||||
_ => false
|
||||
};
|
||||
|
||||
private ExpNode ParseExpPrimary(Token[] tokens)
|
||||
{
|
||||
if(index >= tokens.Length)
|
||||
{
|
||||
throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected primary expression (`nil`, `true`, `false`, numeral, string, `...`, `function`, `{{`, `#`, `not`, `~`)");
|
||||
}
|
||||
CodeRegion startRegion = tokens[index].region;
|
||||
switch(tokens[index].type)
|
||||
{
|
||||
case TokenType.Nil:
|
||||
{
|
||||
index += 1;
|
||||
return new ExpNode.Nil(region: startRegion);
|
||||
}
|
||||
case TokenType.True:
|
||||
{
|
||||
index += 1;
|
||||
return new ExpNode.True(region: startRegion);
|
||||
}
|
||||
case TokenType.False:
|
||||
{
|
||||
index += 1;
|
||||
return new ExpNode.False(region: startRegion);
|
||||
}
|
||||
case TokenType.Numeral:
|
||||
{
|
||||
INumeral numeral = ((Token.NumeralData)tokens[index].data!).numeral;
|
||||
index += 1;
|
||||
return new ExpNode.Numeral(value: numeral, region: startRegion);
|
||||
}
|
||||
case TokenType.StringLiteral:
|
||||
{
|
||||
string value = ((Token.StringData)tokens[index].data!).data;
|
||||
index += 1;
|
||||
return new ExpNode.LiteralString(value: value, region: startRegion);
|
||||
}
|
||||
case TokenType.DotDotDot:
|
||||
{
|
||||
index += 1;
|
||||
return new ExpNode.Varargs(region: startRegion);
|
||||
}
|
||||
case TokenType.CurlyOpen:
|
||||
{
|
||||
TableconstructorNode inner = ParseTableconstructor(tokens);
|
||||
return new ExpNode.Tableconstructor(node: inner, startRegion: inner.startRegion, endRegion: inner.endRegion);
|
||||
}
|
||||
case TokenType.Function:
|
||||
{
|
||||
index += 1;
|
||||
FuncbodyNode body = ParseFuncbody(tokens);
|
||||
return new ExpNode.Functiondef(node: body, startRegion: startRegion, endRegion: body.endRegion);
|
||||
}
|
||||
case TokenType.Minus:
|
||||
{
|
||||
index += 1;
|
||||
ExpNode unop = ParseExp(tokens);
|
||||
return new ExpNode.Unop(node: new(type: UnopType.Minus, exp: unop, startRegion: startRegion, endRegion: unop.endRegion), startRegion: startRegion, endRegion: unop.endRegion);
|
||||
}
|
||||
case TokenType.Hash:
|
||||
{
|
||||
index += 1;
|
||||
ExpNode unop = ParseExp(tokens);
|
||||
return new ExpNode.Unop(node: new(type: UnopType.Length, exp: unop, startRegion: startRegion, endRegion: unop.endRegion), startRegion: startRegion, endRegion: unop.endRegion);
|
||||
}
|
||||
case TokenType.Not:
|
||||
{
|
||||
index += 1;
|
||||
ExpNode unop = ParseExp(tokens);
|
||||
return new ExpNode.Unop(node: new(type: UnopType.LogicalNot, exp: unop, startRegion: startRegion, endRegion: unop.endRegion), startRegion: startRegion, endRegion: unop.endRegion);
|
||||
}
|
||||
case TokenType.Tilde:
|
||||
{
|
||||
index += 1;
|
||||
ExpNode unop = ParseExp(tokens);
|
||||
return new ExpNode.Unop(node: new(type: UnopType.BinaryNot, exp: unop, startRegion: startRegion, endRegion: unop.endRegion), startRegion: startRegion, endRegion: unop.endRegion);
|
||||
}
|
||||
default:
|
||||
{
|
||||
SuffixexpNode suffixexp = ParseSuffixExp(tokens);
|
||||
return new ExpNode.Suffixexp(node: suffixexp, startRegion: suffixexp.startRegion, endRegion: suffixexp.endRegion);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private RetstatNode ParseRetstat(Token[] tokens)
|
||||
{
|
||||
throw new NotImplementedException();
|
||||
if(index >= tokens.Length)
|
||||
{
|
||||
throw new Exception($"Index {index} out of bounds of {tokens.Length}, expected `return` to start retstat");
|
||||
}
|
||||
CodeRegion startRegion = tokens[index].region;
|
||||
if(tokens[index].type != TokenType.Return)
|
||||
{
|
||||
throw new Exception($"{startRegion}: Expected `return` to start retstat, got {tokens[index].type}");
|
||||
}
|
||||
index += 1;
|
||||
if(index >= tokens.Length)
|
||||
{
|
||||
return new RetstatNode(values: null, startRegion: startRegion, endRegion: startRegion);
|
||||
}
|
||||
if(tokens[index].type is TokenType.Semicolon or TokenType.Else or TokenType.Elseif or TokenType.End)
|
||||
{
|
||||
CodeRegion emptyEndRegion;
|
||||
if(tokens[index].type == TokenType.Semicolon)
|
||||
{
|
||||
emptyEndRegion = tokens[index].region;
|
||||
index += 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
emptyEndRegion = startRegion;
|
||||
}
|
||||
return new RetstatNode(values: null, startRegion: startRegion, endRegion: emptyEndRegion);
|
||||
}
|
||||
ExplistNode values = ParseExplist(tokens);
|
||||
CodeRegion endRegion;
|
||||
if(index < tokens.Length && tokens[index].type == TokenType.Semicolon)
|
||||
{
|
||||
endRegion = tokens[index].region;
|
||||
index += 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
endRegion = values.endRegion;
|
||||
}
|
||||
return new RetstatNode(values: values, startRegion: startRegion, endRegion: endRegion);
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user