Merge branch 'csharp' of https://gittea.dev/0x4261756D/luaaaaah into csharp
This commit is contained in:
1611
Tokenizer.cs
1611
Tokenizer.cs
@ -1,7 +1,6 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace luaaaaah;
|
||||
class Tokenizer
|
||||
@ -82,6 +81,62 @@ class Tokenizer
|
||||
currentToken.region.end = new(currentLocation);
|
||||
}
|
||||
|
||||
private void TokenizeTerminal(State newState, TokenType type)
|
||||
{
|
||||
lastIndex = index;
|
||||
state = newState;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: type);
|
||||
}
|
||||
private void TokenizeTerminalName(State newState, char ch)
|
||||
{
|
||||
lastIndex = index;
|
||||
state = newState;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
|
||||
}
|
||||
|
||||
private void Backtrack(TokenType newType)
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = newType;
|
||||
currentToken.data = null;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
}
|
||||
private void BacktrackNoClear(TokenType newType)
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = newType;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
}
|
||||
private void BacktrackNoTypeChange()
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
}
|
||||
|
||||
private void TokenizeChar(char ch)
|
||||
{
|
||||
switch(state)
|
||||
@ -91,264 +146,116 @@ class Tokenizer
|
||||
switch(ch)
|
||||
{
|
||||
case '-':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.Minus;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Minus);
|
||||
} /* tokenizeTerminalBase(TokenType.Minus, TokenizerState.Minus); */
|
||||
break;
|
||||
TokenizeTerminal(State.Minus, TokenType.Minus);
|
||||
break;
|
||||
case ',':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.Comma;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Comma);
|
||||
} /* tokenizeTerminalBase(TokenType.Comma, TokenizerState.Comma); */
|
||||
break;
|
||||
TokenizeTerminal(State.Comma, TokenType.Comma);
|
||||
break;
|
||||
case '=':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.Equals;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Equals);
|
||||
} /* tokenizeTerminalBase(TokenType.Equals, TokenizerState.Equals); */
|
||||
break;
|
||||
TokenizeTerminal(State.Equals, TokenType.Equals);
|
||||
break;
|
||||
case '(':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.RoundOpen;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.RoundOpen);
|
||||
} /* tokenizeTerminalBase(TokenType.RoundOpen, TokenizerState.RoundOpen); */
|
||||
break;
|
||||
TokenizeTerminal(State.RoundOpen, TokenType.RoundOpen);
|
||||
break;
|
||||
case ')':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.RoundClosed;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.RoundClosed);
|
||||
} /* tokenizeTerminalBase(TokenType.RoundClosed, TokenizerState.RoundClosed); */
|
||||
break;
|
||||
TokenizeTerminal(State.RoundClosed, TokenType.RoundClosed);
|
||||
break;
|
||||
case '.':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.Dot;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Dot);
|
||||
} /* tokenizeTerminalBase(TokenType.Dot, TokenizerState.Dot); */
|
||||
break;
|
||||
TokenizeTerminal(State.Dot, TokenType.Dot);
|
||||
break;
|
||||
case ':':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.Colon;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Colon);
|
||||
} /* tokenizeTerminalBase(TokenType.Colon, TokenizerState.Colon); */
|
||||
break;
|
||||
TokenizeTerminal(State.Colon, TokenType.Colon);
|
||||
break;
|
||||
case '{':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.CurlyOpen;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.CurlyOpen);
|
||||
} /* tokenizeTerminalBase(TokenType.CurlyOpen, TokenizerState.CurlyOpen); */
|
||||
break;
|
||||
TokenizeTerminal(State.CurlyOpen, TokenType.CurlyOpen);
|
||||
break;
|
||||
case '}':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.CurlyClosed;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.CurlyClosed);
|
||||
} /* tokenizeTerminalBase(TokenType.CurlyClosed, TokenizerState.CurlyClosed); */
|
||||
break;
|
||||
TokenizeTerminal(State.CurlyClosed, TokenType.CurlyClosed);
|
||||
break;
|
||||
case '[':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.SquareOpen;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.SquareOpen);
|
||||
} /* tokenizeTerminalBase(TokenType.SquareOpen, TokenizerState.SquareOpen); */
|
||||
break;
|
||||
TokenizeTerminal(State.SquareOpen, TokenType.SquareOpen);
|
||||
break;
|
||||
case ']':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.SquareClosed;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.SquareClosed);
|
||||
} /* tokenizeTerminalBase(TokenType.SquareClosed, TokenizerState.SquareClosed); */
|
||||
break;
|
||||
TokenizeTerminal(State.SquareClosed, TokenType.SquareClosed);
|
||||
break;
|
||||
case '+':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.Plus;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Plus);
|
||||
} /* tokenizeTerminalBase(TokenType.Plus, TokenizerState.Plus); */
|
||||
break;
|
||||
TokenizeTerminal(State.Plus, TokenType.Plus);
|
||||
break;
|
||||
case '~':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.Tilde;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Tilde);
|
||||
} /* tokenizeTerminalBase(TokenType.Tilde, TokenizerState.Tilde); */
|
||||
break;
|
||||
TokenizeTerminal(State.Tilde, TokenType.Tilde);
|
||||
break;
|
||||
case '>':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.Gt;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Gt);
|
||||
} /* tokenizeTerminalBase(TokenType.Gt, TokenizerState.Gt); */
|
||||
break;
|
||||
TokenizeTerminal(State.Gt, TokenType.Gt);
|
||||
break;
|
||||
case '<':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.Lt;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Lt);
|
||||
} /* tokenizeTerminalBase(TokenType.Lt, TokenizerState.Lt); */
|
||||
break;
|
||||
TokenizeTerminal(State.Lt, TokenType.Lt);
|
||||
break;
|
||||
case '#':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.Hash;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Hash);
|
||||
} /* tokenizeTerminalBase(TokenType.Hash, TokenizerState.Hash); */
|
||||
break;
|
||||
TokenizeTerminal(State.Hash, TokenType.Hash);
|
||||
break;
|
||||
case '|':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.Pipe;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Pipe);
|
||||
} /* tokenizeTerminalBase(TokenType.Pipe, TokenizerState.Pipe); */
|
||||
break;
|
||||
TokenizeTerminal(State.Pipe, TokenType.Pipe);
|
||||
break;
|
||||
case '&':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.Ampersand;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Ampersand);
|
||||
} /* tokenizeTerminalBase(TokenType.Ampersand, TokenizerState.Ampersand); */
|
||||
break;
|
||||
TokenizeTerminal(State.Ampersand, TokenType.Ampersand);
|
||||
break;
|
||||
case '%':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.Percent;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Percent);
|
||||
} /* tokenizeTerminalBase(TokenType.Percent, TokenizerState.Percent); */
|
||||
break;
|
||||
TokenizeTerminal(State.Percent, TokenType.Percent);
|
||||
break;
|
||||
case '*':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.Star;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Star);
|
||||
} /* tokenizeTerminalBase(TokenType.Star, TokenizerState.Star); */
|
||||
break;
|
||||
TokenizeTerminal(State.Star, TokenType.Star);
|
||||
break;
|
||||
case '/':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.Slash;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Slash);
|
||||
} /* tokenizeTerminalBase(TokenType.Slash, TokenizerState.Slash); */
|
||||
break;
|
||||
TokenizeTerminal(State.Slash, TokenType.Slash);
|
||||
break;
|
||||
case ';':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.Semicolon;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Semicolon);
|
||||
} /* tokenizeTerminalBase(TokenType.Semicolon, TokenizerState.Semicolon); */
|
||||
break;
|
||||
TokenizeTerminal(State.Semicolon, TokenType.Semicolon);
|
||||
break;
|
||||
case '^':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.Caret;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Caret);
|
||||
} /* tokenizeTerminalBase(TokenType.Caret, TokenizerState.Caret); */
|
||||
break;
|
||||
TokenizeTerminal(State.Caret, TokenType.Caret);
|
||||
break;
|
||||
case 'a':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.A;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
|
||||
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.A, tokenStr, ch); */
|
||||
break;
|
||||
TokenizeTerminalName(State.A, ch);
|
||||
break;
|
||||
case 'b':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.B;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
|
||||
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.B, tokenStr, ch); */
|
||||
break;
|
||||
TokenizeTerminalName(State.B, ch);
|
||||
break;
|
||||
case 'd':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.D;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
|
||||
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.D, tokenStr, ch); */
|
||||
break;
|
||||
TokenizeTerminalName(State.D, ch);
|
||||
break;
|
||||
case 'e':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.E;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
|
||||
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.E, tokenStr, ch); */
|
||||
break;
|
||||
TokenizeTerminalName(State.E, ch);
|
||||
break;
|
||||
case 'f':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.F;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
|
||||
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.F, tokenStr, ch); */
|
||||
break;
|
||||
TokenizeTerminalName(State.F, ch);
|
||||
break;
|
||||
case 'i':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.I;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
|
||||
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.I, tokenStr, ch); */
|
||||
break;
|
||||
TokenizeTerminalName(State.I, ch);
|
||||
break;
|
||||
case 'g':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.G;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
|
||||
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.G, tokenStr, ch); */
|
||||
break;
|
||||
TokenizeTerminalName(State.G, ch);
|
||||
break;
|
||||
case 'l':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.L;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
|
||||
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.L, tokenStr, ch); */
|
||||
break;
|
||||
TokenizeTerminalName(State.L, ch);
|
||||
break;
|
||||
case 'n':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.N;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
|
||||
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.N, tokenStr, ch); */
|
||||
break;
|
||||
TokenizeTerminalName(State.N, ch);
|
||||
break;
|
||||
case 'o':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.O;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
|
||||
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.O, tokenStr, ch); */
|
||||
break;
|
||||
TokenizeTerminalName(State.O, ch);
|
||||
break;
|
||||
case 'r':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.R;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
|
||||
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.R, tokenStr, ch); */
|
||||
break;
|
||||
TokenizeTerminalName(State.R, ch);
|
||||
break;
|
||||
case 't':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.T;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
|
||||
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.T, tokenStr, ch); */
|
||||
break;
|
||||
TokenizeTerminalName(State.T, ch);
|
||||
break;
|
||||
case 'u':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.U;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
|
||||
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.U, tokenStr, ch); */
|
||||
break;
|
||||
TokenizeTerminalName(State.U, ch);
|
||||
break;
|
||||
case 'w':
|
||||
{
|
||||
lastIndex = index;
|
||||
state = State.W;
|
||||
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
|
||||
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.W, tokenStr, ch); */
|
||||
break;
|
||||
TokenizeTerminalName(State.W, ch);
|
||||
break;
|
||||
case '0':
|
||||
{
|
||||
lastIndex = index;
|
||||
@ -682,36 +589,23 @@ class Tokenizer
|
||||
}
|
||||
break;
|
||||
case 'z':
|
||||
{
|
||||
state = State.SingleQuoteBackslashZ;
|
||||
}
|
||||
break;
|
||||
break;
|
||||
case 'x':
|
||||
{
|
||||
state = State.SingleQuoteBackslashX;
|
||||
}
|
||||
break;
|
||||
break;
|
||||
case 'u':
|
||||
{
|
||||
state = State.SingleQuoteBackslashU;
|
||||
}
|
||||
break;
|
||||
break;
|
||||
default: throw new Exception($"Unknown escape sequence: \\{ch}");
|
||||
}
|
||||
}
|
||||
break;
|
||||
case State.SingleQuoteBackslashU:
|
||||
{
|
||||
if(ch == '{')
|
||||
{
|
||||
state = State.SingleQuoteBackslashUBracket;
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new Exception($"Expected `{{` to continue \\u escape sequence at {currentLocation}, got {ch}");
|
||||
}
|
||||
}
|
||||
break;
|
||||
state = ch == '{'
|
||||
? State.SingleQuoteBackslashUBracket
|
||||
: throw new Exception($"Expected `{{` to continue \\u escape sequence at {currentLocation}, got {ch}");
|
||||
break;
|
||||
case State.SingleQuoteBackslashUBracket:
|
||||
{
|
||||
if(char.IsAsciiHexDigit(ch))
|
||||
@ -850,18 +744,7 @@ class Tokenizer
|
||||
break;
|
||||
case State.String:
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.StringLiteral;
|
||||
//currentToken.region.end = new(currentLocation);
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.StringLiteral);
|
||||
}
|
||||
break;
|
||||
case State.Name:
|
||||
@ -875,17 +758,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -911,16 +784,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoTypeChange();
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -964,16 +828,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoTypeChange();
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -996,16 +851,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoTypeChange();
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -1030,16 +876,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoTypeChange();
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -1057,16 +894,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoTypeChange();
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -1094,16 +922,7 @@ class Tokenizer
|
||||
case State.ColonColon:
|
||||
case State.SlashSlash:
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoTypeChange();
|
||||
}
|
||||
break;
|
||||
case State.Tilde:
|
||||
@ -1116,16 +935,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoTypeChange();
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -1145,16 +955,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoTypeChange();
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -1174,16 +975,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoTypeChange();
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -1197,16 +989,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoTypeChange();
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -1220,16 +1003,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoTypeChange();
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -1244,16 +1018,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoTypeChange();
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -1267,16 +1032,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoTypeChange();
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -1290,16 +1046,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoTypeChange();
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -1313,16 +1060,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoTypeChange();
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -1417,16 +1155,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoTypeChange();
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -1499,17 +1228,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -1530,17 +1249,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -1555,18 +1264,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.And;
|
||||
currentToken.data = null;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
Backtrack(TokenType.And);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -1587,17 +1285,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -1618,17 +1306,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -1649,17 +1327,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -1680,17 +1348,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -1705,18 +1363,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.While;
|
||||
currentToken.data = null;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
Backtrack(TokenType.While);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -1737,17 +1384,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -1768,17 +1405,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -1799,17 +1426,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -1830,17 +1447,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -1855,18 +1462,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Break;
|
||||
currentToken.data = null;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
Backtrack(TokenType.Break);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -1887,17 +1483,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -1918,17 +1504,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -1949,17 +1525,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -1974,18 +1540,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Goto;
|
||||
currentToken.data = null;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
Backtrack(TokenType.Goto);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2006,17 +1561,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2043,17 +1588,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2074,17 +1609,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2105,17 +1630,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2136,17 +1651,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2161,18 +1666,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Return;
|
||||
currentToken.data = null;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
Backtrack(TokenType.Return);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2193,17 +1687,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2224,17 +1708,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2255,17 +1729,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2280,18 +1744,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Repeat;
|
||||
currentToken.data = null;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
Backtrack(TokenType.Repeat);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2318,17 +1771,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2349,17 +1792,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2374,18 +1807,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Nil;
|
||||
currentToken.data = null;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
Backtrack(TokenType.Nil);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2406,17 +1828,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2431,18 +1843,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Not;
|
||||
currentToken.data = null;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
Backtrack(TokenType.Not);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2469,17 +1870,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2500,17 +1891,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2531,17 +1912,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2556,18 +1927,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Then;
|
||||
currentToken.data = null;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
Backtrack(TokenType.Then);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2588,17 +1948,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2619,17 +1969,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2644,18 +1984,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.True;
|
||||
currentToken.data = null;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
Backtrack(TokenType.True);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2682,17 +2011,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2713,17 +2032,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2744,17 +2053,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2775,18 +2074,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Else;
|
||||
currentToken.data = null;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
Backtrack(TokenType.Else);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2807,17 +2095,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2832,18 +2110,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Elseif;
|
||||
currentToken.data = null;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
Backtrack(TokenType.Elseif);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2864,17 +2131,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2889,18 +2146,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.End;
|
||||
currentToken.data = null;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
Backtrack(TokenType.End);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2921,17 +2167,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2946,18 +2182,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Or;
|
||||
currentToken.data = null;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
Backtrack(TokenType.Or);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -2978,17 +2203,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -3003,18 +2218,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Do;
|
||||
currentToken.data = null;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
Backtrack(TokenType.Do);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -3041,17 +2245,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -3066,18 +2260,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.In;
|
||||
currentToken.data = null;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
Backtrack(TokenType.In);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -3092,18 +2275,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.If;
|
||||
currentToken.data = null;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
Backtrack(TokenType.If);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -3136,17 +2308,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -3167,17 +2329,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -3198,17 +2350,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -3229,17 +2371,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -3260,17 +2392,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -3291,17 +2413,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -3322,17 +2434,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -3347,18 +2449,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Function;
|
||||
currentToken.data = null;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
Backtrack(TokenType.Function);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -3379,17 +2470,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -3410,17 +2491,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -3441,17 +2512,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -3466,18 +2527,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.False;
|
||||
currentToken.data = null;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
Backtrack(TokenType.False);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -3498,17 +2548,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -3523,18 +2563,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.For;
|
||||
currentToken.data = null;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
Backtrack(TokenType.For);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -3555,17 +2584,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -3586,17 +2605,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -3617,17 +2626,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -3648,17 +2647,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -3673,18 +2662,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Local;
|
||||
currentToken.data = null;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
Backtrack(TokenType.Local);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -3705,17 +2683,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -3736,17 +2704,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -3767,17 +2725,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -3798,17 +2746,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Name;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
BacktrackNoClear(TokenType.Name);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -3823,18 +2761,7 @@ class Tokenizer
|
||||
}
|
||||
else
|
||||
{
|
||||
if(currentToken == null || currentToken.type == null)
|
||||
{
|
||||
throw new Exception($"Lexer error at {currentLocation}");
|
||||
}
|
||||
currentToken.type = TokenType.Until;
|
||||
currentToken.data = null;
|
||||
currentLocation = new(currentToken.region.end);
|
||||
tokens.Add(currentToken);
|
||||
currentToken = null;
|
||||
index = lastIndex!.Value;
|
||||
lastIndex = null;
|
||||
state = State.Start;
|
||||
Backtrack(TokenType.Until);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -3875,31 +2802,7 @@ class Tokenizer
|
||||
}
|
||||
}
|
||||
|
||||
class CodeRegion(CodeLocation start, CodeLocation end)
|
||||
{
|
||||
public CodeLocation start = start;
|
||||
public CodeLocation end = end;
|
||||
|
||||
public override string ToString()
|
||||
{
|
||||
return $"{start}-{end}";
|
||||
}
|
||||
}
|
||||
|
||||
class CodeLocation(int line, int col)
|
||||
{
|
||||
public int line = line;
|
||||
public int col = col;
|
||||
|
||||
public CodeLocation(CodeLocation other) : this(line: other.line, col: other.col) { }
|
||||
|
||||
public override string ToString()
|
||||
{
|
||||
return $"{line + 1}:{col + 1}";
|
||||
}
|
||||
}
|
||||
|
||||
class Token(CodeRegion region, TokenType? type = null, Token.IData? data = null)
|
||||
internal class Token(CodeRegion region, TokenType? type = null, Token.IData? data = null)
|
||||
{
|
||||
public CodeRegion region = region;
|
||||
public IData? data = data;
|
||||
@ -3937,48 +2840,4 @@ public enum TokenType
|
||||
Semicolon, Colon, Comma, Dot, DotDot, DotDotDot,
|
||||
Numeral,
|
||||
StringLiteral,
|
||||
}
|
||||
|
||||
[JsonDerivedType(typeof(Integer), typeDiscriminator: "int")]
|
||||
[JsonDerivedType(typeof(Float), typeDiscriminator: "float")]
|
||||
public interface INumeral
|
||||
{
|
||||
public class Integer(int value) : INumeral
|
||||
{
|
||||
public int value = value;
|
||||
|
||||
public bool RawEqual(INumeral other)
|
||||
{
|
||||
if(other is Integer integer)
|
||||
{
|
||||
return integer.value == value;
|
||||
}
|
||||
// TODO: Check if this is actually doing what is expected
|
||||
return ((Float)other).value == value;
|
||||
}
|
||||
public override string ToString()
|
||||
{
|
||||
return $"Numeral Integer {value}";
|
||||
}
|
||||
}
|
||||
public class Float(float value) : INumeral
|
||||
{
|
||||
public float value = value;
|
||||
|
||||
public bool RawEqual(INumeral other)
|
||||
{
|
||||
if(other is Float float_val)
|
||||
{
|
||||
return float_val.value == value;
|
||||
}
|
||||
// TODO: Check if this is actually doing what is expected
|
||||
return ((Integer)other).value == value;
|
||||
}
|
||||
public override string ToString()
|
||||
{
|
||||
return $"Numeral Float {value}";
|
||||
}
|
||||
}
|
||||
|
||||
public bool RawEqual(INumeral other);
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user