3783 lines
91 KiB
C#
3783 lines
91 KiB
C#
using System;
|
|
using System.Collections.Generic;
|
|
using System.Text.Json.Serialization;
|
|
|
|
namespace luaaaaah;
|
|
class Tokenizer
|
|
{
|
|
private readonly List<Token> tokens = [];
|
|
private State state = State.Start;
|
|
int? lastIndex;
|
|
int index;
|
|
int openingLongBracketLevel;
|
|
int closingLongBracketLevel;
|
|
Token? currentToken;
|
|
CodeLocation currentLocation = new(line: 0, col: 0);
|
|
|
|
public Token[] Tokenize(string content)
|
|
{
|
|
if(content.StartsWith("#!"))
|
|
{
|
|
content = content[content.IndexOf('\n')..];
|
|
}
|
|
while(index < content.Length)
|
|
{
|
|
TokenizeChar(content[index]);
|
|
if(content[index] == '\n')
|
|
{
|
|
currentLocation.line += 1;
|
|
currentLocation.col = 0;
|
|
}
|
|
else
|
|
{
|
|
currentLocation.col += 1;
|
|
}
|
|
index += 1;
|
|
}
|
|
TokenizeChar('\n');
|
|
return [.. tokens];
|
|
}
|
|
|
|
private void AppendDataChar(char ch)
|
|
{
|
|
if((Token.StringData?)currentToken!.data == null)
|
|
{
|
|
currentToken!.data = new Token.StringData($"{ch}");
|
|
}
|
|
else
|
|
{
|
|
((Token.StringData?)currentToken!.data!).data += ch;
|
|
}
|
|
currentToken.region.end = new(currentLocation);
|
|
}
|
|
|
|
private void AppendDataInt(char ch)
|
|
{
|
|
if((Token.NumeralData?)currentToken!.data == null)
|
|
{
|
|
currentToken!.data = new Token.NumeralData(new INumeral.Integer(ch - '0'));
|
|
}
|
|
else
|
|
{
|
|
((INumeral.Integer)((Token.NumeralData?)currentToken!.data!).numeral).value *= 10;
|
|
((INumeral.Integer)((Token.NumeralData?)currentToken!.data!).numeral).value += ch - '0';
|
|
}
|
|
currentToken.region.end = new(currentLocation);
|
|
}
|
|
|
|
private void AppendDataIntHex(char ch)
|
|
{
|
|
int v = char.IsAsciiDigit(ch) ? ch - '0' : 10 + char.ToLower(ch) - 'a';
|
|
if((Token.NumeralData?)currentToken!.data == null)
|
|
{
|
|
currentToken!.data = new Token.NumeralData(new INumeral.Integer(v));
|
|
}
|
|
else
|
|
{
|
|
((INumeral.Integer)((Token.NumeralData?)currentToken!.data!).numeral).value *= 16;
|
|
((INumeral.Integer)((Token.NumeralData?)currentToken!.data!).numeral).value += v;
|
|
}
|
|
currentToken.region.end = new(currentLocation);
|
|
}
|
|
|
|
private void TokenizeChar(char ch)
|
|
{
|
|
switch(state)
|
|
{
|
|
case State.Start:
|
|
{
|
|
switch(ch)
|
|
{
|
|
case '-':
|
|
{
|
|
lastIndex = index;
|
|
state = State.Minus;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Minus);
|
|
} /* tokenizeTerminalBase(TokenType.Minus, TokenizerState.Minus); */
|
|
break;
|
|
case ',':
|
|
{
|
|
lastIndex = index;
|
|
state = State.Comma;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Comma);
|
|
} /* tokenizeTerminalBase(TokenType.Comma, TokenizerState.Comma); */
|
|
break;
|
|
case '=':
|
|
{
|
|
lastIndex = index;
|
|
state = State.Equals;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Equals);
|
|
} /* tokenizeTerminalBase(TokenType.Equals, TokenizerState.Equals); */
|
|
break;
|
|
case '(':
|
|
{
|
|
lastIndex = index;
|
|
state = State.RoundOpen;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.RoundOpen);
|
|
} /* tokenizeTerminalBase(TokenType.RoundOpen, TokenizerState.RoundOpen); */
|
|
break;
|
|
case ')':
|
|
{
|
|
lastIndex = index;
|
|
state = State.RoundClosed;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.RoundClosed);
|
|
} /* tokenizeTerminalBase(TokenType.RoundClosed, TokenizerState.RoundClosed); */
|
|
break;
|
|
case '.':
|
|
{
|
|
lastIndex = index;
|
|
state = State.Dot;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Dot);
|
|
} /* tokenizeTerminalBase(TokenType.Dot, TokenizerState.Dot); */
|
|
break;
|
|
case ':':
|
|
{
|
|
lastIndex = index;
|
|
state = State.Colon;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Colon);
|
|
} /* tokenizeTerminalBase(TokenType.Colon, TokenizerState.Colon); */
|
|
break;
|
|
case '{':
|
|
{
|
|
lastIndex = index;
|
|
state = State.CurlyOpen;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.CurlyOpen);
|
|
} /* tokenizeTerminalBase(TokenType.CurlyOpen, TokenizerState.CurlyOpen); */
|
|
break;
|
|
case '}':
|
|
{
|
|
lastIndex = index;
|
|
state = State.CurlyClosed;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.CurlyClosed);
|
|
} /* tokenizeTerminalBase(TokenType.CurlyClosed, TokenizerState.CurlyClosed); */
|
|
break;
|
|
case '[':
|
|
{
|
|
lastIndex = index;
|
|
state = State.SquareOpen;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.SquareOpen);
|
|
} /* tokenizeTerminalBase(TokenType.SquareOpen, TokenizerState.SquareOpen); */
|
|
break;
|
|
case ']':
|
|
{
|
|
lastIndex = index;
|
|
state = State.SquareClosed;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.SquareClosed);
|
|
} /* tokenizeTerminalBase(TokenType.SquareClosed, TokenizerState.SquareClosed); */
|
|
break;
|
|
case '+':
|
|
{
|
|
lastIndex = index;
|
|
state = State.Plus;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Plus);
|
|
} /* tokenizeTerminalBase(TokenType.Plus, TokenizerState.Plus); */
|
|
break;
|
|
case '~':
|
|
{
|
|
lastIndex = index;
|
|
state = State.Tilde;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Tilde);
|
|
} /* tokenizeTerminalBase(TokenType.Tilde, TokenizerState.Tilde); */
|
|
break;
|
|
case '>':
|
|
{
|
|
lastIndex = index;
|
|
state = State.Gt;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Gt);
|
|
} /* tokenizeTerminalBase(TokenType.Gt, TokenizerState.Gt); */
|
|
break;
|
|
case '<':
|
|
{
|
|
lastIndex = index;
|
|
state = State.Lt;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Lt);
|
|
} /* tokenizeTerminalBase(TokenType.Lt, TokenizerState.Lt); */
|
|
break;
|
|
case '#':
|
|
{
|
|
lastIndex = index;
|
|
state = State.Hash;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Hash);
|
|
} /* tokenizeTerminalBase(TokenType.Hash, TokenizerState.Hash); */
|
|
break;
|
|
case '|':
|
|
{
|
|
lastIndex = index;
|
|
state = State.Pipe;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Pipe);
|
|
} /* tokenizeTerminalBase(TokenType.Pipe, TokenizerState.Pipe); */
|
|
break;
|
|
case '&':
|
|
{
|
|
lastIndex = index;
|
|
state = State.Ampersand;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Ampersand);
|
|
} /* tokenizeTerminalBase(TokenType.Ampersand, TokenizerState.Ampersand); */
|
|
break;
|
|
case '%':
|
|
{
|
|
lastIndex = index;
|
|
state = State.Percent;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Percent);
|
|
} /* tokenizeTerminalBase(TokenType.Percent, TokenizerState.Percent); */
|
|
break;
|
|
case '*':
|
|
{
|
|
lastIndex = index;
|
|
state = State.Star;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Star);
|
|
} /* tokenizeTerminalBase(TokenType.Star, TokenizerState.Star); */
|
|
break;
|
|
case '/':
|
|
{
|
|
lastIndex = index;
|
|
state = State.Slash;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Slash);
|
|
} /* tokenizeTerminalBase(TokenType.Slash, TokenizerState.Slash); */
|
|
break;
|
|
case ';':
|
|
{
|
|
lastIndex = index;
|
|
state = State.Semicolon;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Semicolon);
|
|
} /* tokenizeTerminalBase(TokenType.Semicolon, TokenizerState.Semicolon); */
|
|
break;
|
|
case '^':
|
|
{
|
|
lastIndex = index;
|
|
state = State.Caret;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Caret);
|
|
} /* tokenizeTerminalBase(TokenType.Caret, TokenizerState.Caret); */
|
|
break;
|
|
case 'a':
|
|
{
|
|
lastIndex = index;
|
|
state = State.A;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
|
|
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.A, tokenStr, ch); */
|
|
break;
|
|
case 'b':
|
|
{
|
|
lastIndex = index;
|
|
state = State.B;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
|
|
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.B, tokenStr, ch); */
|
|
break;
|
|
case 'd':
|
|
{
|
|
lastIndex = index;
|
|
state = State.D;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
|
|
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.D, tokenStr, ch); */
|
|
break;
|
|
case 'e':
|
|
{
|
|
lastIndex = index;
|
|
state = State.E;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
|
|
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.E, tokenStr, ch); */
|
|
break;
|
|
case 'f':
|
|
{
|
|
lastIndex = index;
|
|
state = State.F;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
|
|
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.F, tokenStr, ch); */
|
|
break;
|
|
case 'i':
|
|
{
|
|
lastIndex = index;
|
|
state = State.I;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
|
|
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.I, tokenStr, ch); */
|
|
break;
|
|
case 'g':
|
|
{
|
|
lastIndex = index;
|
|
state = State.G;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
|
|
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.G, tokenStr, ch); */
|
|
break;
|
|
case 'l':
|
|
{
|
|
lastIndex = index;
|
|
state = State.L;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
|
|
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.L, tokenStr, ch); */
|
|
break;
|
|
case 'n':
|
|
{
|
|
lastIndex = index;
|
|
state = State.N;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
|
|
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.N, tokenStr, ch); */
|
|
break;
|
|
case 'o':
|
|
{
|
|
lastIndex = index;
|
|
state = State.O;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
|
|
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.O, tokenStr, ch); */
|
|
break;
|
|
case 'r':
|
|
{
|
|
lastIndex = index;
|
|
state = State.R;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
|
|
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.R, tokenStr, ch); */
|
|
break;
|
|
case 't':
|
|
{
|
|
lastIndex = index;
|
|
state = State.T;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
|
|
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.T, tokenStr, ch); */
|
|
break;
|
|
case 'u':
|
|
{
|
|
lastIndex = index;
|
|
state = State.U;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
|
|
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.U, tokenStr, ch); */
|
|
break;
|
|
case 'w':
|
|
{
|
|
lastIndex = index;
|
|
state = State.W;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
|
|
} /* tokenizeTerminalStr(TokenType.Name, TokenizerState.W, tokenStr, ch); */
|
|
break;
|
|
case '0':
|
|
{
|
|
lastIndex = index;
|
|
state = State.Zero;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Numeral, data: new Token.NumeralData(new INumeral.Integer(0)));
|
|
} /* tokenizeTerminalIntNum(TokenType.Numeral, TokenizerState.Zero, tokenNumeral, ch); */
|
|
break;
|
|
case '"':
|
|
{
|
|
currentToken = null;
|
|
state = State.Quote;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.StringLiteral);
|
|
}
|
|
break;
|
|
case '\'':
|
|
{
|
|
currentToken = null;
|
|
state = State.SingleQuote;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.StringLiteral);
|
|
}
|
|
break;
|
|
default:
|
|
{
|
|
if(char.IsWhiteSpace(ch)) { }
|
|
else if(char.IsAsciiLetter(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
|
|
}
|
|
else if(char.IsDigit(ch))
|
|
{
|
|
lastIndex = index;
|
|
state = State.Integer;
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Numeral, data: new Token.NumeralData(new INumeral.Integer(ch - '0')));
|
|
}
|
|
else
|
|
{
|
|
throw new NotImplementedException(ch.ToString());
|
|
}
|
|
}
|
|
break;
|
|
}
|
|
}
|
|
break;
|
|
case State.Quote:
|
|
{
|
|
if(ch == '\\')
|
|
{
|
|
state = State.QuoteBackslash;
|
|
}
|
|
else if(ch == '"')
|
|
{
|
|
lastIndex = index;
|
|
state = State.String;
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.StringLiteral);
|
|
}
|
|
else
|
|
{
|
|
currentToken.type = TokenType.StringLiteral;
|
|
currentToken.region.end = new(currentLocation);
|
|
}
|
|
}
|
|
else
|
|
{
|
|
AppendDataChar(ch);
|
|
}
|
|
}
|
|
break;
|
|
case State.QuoteBackslash:
|
|
{
|
|
switch(ch)
|
|
{
|
|
case 'a':
|
|
{
|
|
AppendDataChar('\u0007');
|
|
state = State.Quote;
|
|
}
|
|
break;
|
|
case 'b':
|
|
{
|
|
AppendDataChar('\u0008');
|
|
state = State.Quote;
|
|
}
|
|
break;
|
|
case 't':
|
|
{
|
|
AppendDataChar('\t');
|
|
state = State.Quote;
|
|
}
|
|
break;
|
|
case 'n':
|
|
case '\n':
|
|
{
|
|
AppendDataChar('\n');
|
|
state = State.Quote;
|
|
}
|
|
break;
|
|
case 'v':
|
|
{
|
|
AppendDataChar('\u000b');
|
|
state = State.Quote;
|
|
}
|
|
break;
|
|
case 'f':
|
|
{
|
|
AppendDataChar('\u000c');
|
|
state = State.Quote;
|
|
}
|
|
break;
|
|
case 'r':
|
|
{
|
|
AppendDataChar('\r');
|
|
state = State.Quote;
|
|
}
|
|
break;
|
|
case '\\':
|
|
{
|
|
AppendDataChar('\\');
|
|
state = State.Quote;
|
|
}
|
|
break;
|
|
case '"':
|
|
{
|
|
AppendDataChar('"');
|
|
state = State.Quote;
|
|
}
|
|
break;
|
|
case '\'':
|
|
{
|
|
AppendDataChar('\'');
|
|
state = State.Quote;
|
|
}
|
|
break;
|
|
case 'z':
|
|
{
|
|
state = State.QuoteBackslashZ;
|
|
}
|
|
break;
|
|
default: throw new Exception($"Unknown escape sequence: \\{ch}");
|
|
}
|
|
}
|
|
break;
|
|
case State.QuoteBackslashZ:
|
|
{
|
|
if(ch == '\\')
|
|
{
|
|
state = State.QuoteBackslash;
|
|
}
|
|
else if(ch == '"')
|
|
{
|
|
lastIndex = index;
|
|
state = State.String;
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.StringLiteral);
|
|
}
|
|
else
|
|
{
|
|
currentToken.type = TokenType.StringLiteral;
|
|
currentToken.region.end = new(currentLocation);
|
|
currentToken.data = new Token.StringData("");
|
|
}
|
|
}
|
|
else if(!char.IsWhiteSpace(ch))
|
|
{
|
|
AppendDataChar(ch);
|
|
state = State.Quote;
|
|
}
|
|
else
|
|
{
|
|
// Noop, https://www.lua.org/manual/5.4/manual.html#3.1:
|
|
// "The escape sequence '\z' skips the following span of whitespace characters, including line breaks;"
|
|
}
|
|
}
|
|
break;
|
|
case State.SingleQuote:
|
|
{
|
|
if(ch == '\\')
|
|
{
|
|
state = State.SingleQuoteBackslash;
|
|
}
|
|
else if(ch == '\'')
|
|
{
|
|
lastIndex = index;
|
|
state = State.String;
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.StringLiteral);
|
|
}
|
|
else
|
|
{
|
|
currentToken.type = TokenType.StringLiteral;
|
|
currentToken.region.end = new(currentLocation);
|
|
currentToken.data = new Token.StringData("");
|
|
}
|
|
}
|
|
else
|
|
{
|
|
AppendDataChar(ch);
|
|
}
|
|
}
|
|
break;
|
|
case State.SingleQuoteBackslash:
|
|
{
|
|
switch(ch)
|
|
{
|
|
case 'a':
|
|
{
|
|
AppendDataChar('\u0007');
|
|
state = State.SingleQuote;
|
|
}
|
|
break;
|
|
case 'b':
|
|
{
|
|
AppendDataChar('\u0008');
|
|
state = State.SingleQuote;
|
|
}
|
|
break;
|
|
case 't':
|
|
{
|
|
AppendDataChar('\t');
|
|
state = State.SingleQuote;
|
|
}
|
|
break;
|
|
case 'n':
|
|
case '\n':
|
|
{
|
|
AppendDataChar('\n');
|
|
state = State.SingleQuote;
|
|
}
|
|
break;
|
|
case 'v':
|
|
{
|
|
AppendDataChar('\u000b');
|
|
state = State.SingleQuote;
|
|
}
|
|
break;
|
|
case 'f':
|
|
{
|
|
AppendDataChar('\u000c');
|
|
state = State.SingleQuote;
|
|
}
|
|
break;
|
|
case 'r':
|
|
{
|
|
AppendDataChar('\r');
|
|
state = State.SingleQuote;
|
|
}
|
|
break;
|
|
case '\\':
|
|
{
|
|
AppendDataChar('\\');
|
|
state = State.SingleQuote;
|
|
}
|
|
break;
|
|
case '"':
|
|
{
|
|
AppendDataChar('"');
|
|
state = State.SingleQuote;
|
|
}
|
|
break;
|
|
case '\'':
|
|
{
|
|
AppendDataChar('\'');
|
|
state = State.SingleQuote;
|
|
}
|
|
break;
|
|
case 'z':
|
|
{
|
|
state = State.SingleQuoteBackslashZ;
|
|
}
|
|
break;
|
|
default: throw new Exception($"Unknown escape sequence: \\{ch}");
|
|
}
|
|
}
|
|
break;
|
|
case State.SingleQuoteBackslashZ:
|
|
{
|
|
if(ch == '\\')
|
|
{
|
|
state = State.SingleQuoteBackslash;
|
|
}
|
|
else if(ch == '\'')
|
|
{
|
|
lastIndex = index;
|
|
state = State.String;
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.StringLiteral);
|
|
}
|
|
else
|
|
{
|
|
currentToken.type = TokenType.StringLiteral;
|
|
currentToken.region.end = new(currentLocation);
|
|
}
|
|
}
|
|
else if(!char.IsWhiteSpace(ch))
|
|
{
|
|
AppendDataChar(ch);
|
|
state = State.Quote;
|
|
}
|
|
else
|
|
{
|
|
// Noop, https://www.lua.org/manual/5.4/manual.html#3.1:
|
|
// "The escape sequence '\z' skips the following span of whitespace characters, including line breaks;"
|
|
}
|
|
}
|
|
break;
|
|
case State.String:
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.StringLiteral;
|
|
//currentToken.region.end = new(currentLocation);
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
break;
|
|
case State.Name:
|
|
{
|
|
if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Zero:
|
|
{
|
|
if(ch is 'x' or 'X')
|
|
{
|
|
currentToken!.type = null;
|
|
state = State.HexNumberX;
|
|
}
|
|
else if(ch == '.')
|
|
{
|
|
state = State.Float;
|
|
currentToken!.type = null;
|
|
currentToken!.data = null;
|
|
AppendDataChar('0');
|
|
AppendDataChar('.');
|
|
}
|
|
else if(char.IsAsciiDigit(ch))
|
|
{
|
|
lastIndex = index;
|
|
AppendDataInt(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Float:
|
|
{
|
|
if(char.IsAsciiDigit(ch))
|
|
{
|
|
lastIndex = index;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentLocation = new(currentToken.region.end);
|
|
currentToken.type = TokenType.Numeral;
|
|
currentToken.data = new Token.NumeralData(new INumeral.Float(float.Parse(((Token.StringData)currentToken.data!).data)));
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
|
|
}
|
|
}
|
|
break;
|
|
case State.HexNumberX:
|
|
{
|
|
if(char.IsAsciiHexDigit(ch))
|
|
{
|
|
lastIndex = index;
|
|
currentToken!.type = TokenType.Numeral;
|
|
AppendDataIntHex(ch);
|
|
state = State.HexNumber;
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.HexNumber:
|
|
{
|
|
if(ch == 'p')
|
|
{
|
|
currentToken!.type = null;
|
|
state = State.HexExpNumber;
|
|
}
|
|
else if(char.IsAsciiHexDigit(ch))
|
|
{
|
|
lastIndex = index;
|
|
currentToken!.type = TokenType.Numeral;
|
|
AppendDataIntHex(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Integer:
|
|
{
|
|
if(ch == 'e')
|
|
{
|
|
currentToken!.type = null;
|
|
state = State.ExpNumber;
|
|
}
|
|
else if(ch == '.')
|
|
{
|
|
currentToken!.type = null;
|
|
currentToken.data = new Token.StringData($"{((INumeral.Integer)((Token.NumeralData)currentToken!.data!).numeral).value}.");
|
|
state = State.Float;
|
|
}
|
|
else if(char.IsAsciiDigit(ch))
|
|
{
|
|
lastIndex = index;
|
|
currentToken!.type = TokenType.Numeral;
|
|
AppendDataInt(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.SquareOpen:
|
|
{
|
|
if(ch == '[')
|
|
{
|
|
currentToken = new Token(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.StringLiteral);
|
|
state = State.StringWithLongBracket;
|
|
}
|
|
else if(ch == '=')
|
|
{
|
|
openingLongBracketLevel = 1;
|
|
state = State.StringStartLongBracket;
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Comma:
|
|
case State.RoundOpen:
|
|
case State.RoundClosed:
|
|
case State.CurlyOpen:
|
|
case State.CurlyClosed:
|
|
case State.Plus:
|
|
case State.TildeEquals:
|
|
case State.EqualsEquals:
|
|
case State.Hash:
|
|
case State.GtEquals:
|
|
case State.LtEquals:
|
|
case State.SquareClosed:
|
|
case State.Pipe:
|
|
case State.Ampersand:
|
|
case State.Percent:
|
|
case State.Star:
|
|
case State.Semicolon:
|
|
case State.Caret:
|
|
case State.DotDotDot:
|
|
case State.GtGt:
|
|
case State.LtLt:
|
|
case State.ColonColon:
|
|
case State.SlashSlash:
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
break;
|
|
case State.Tilde:
|
|
{
|
|
if(ch == '=')
|
|
{
|
|
lastIndex = index;
|
|
state = State.TildeEquals;
|
|
currentToken!.type = TokenType.TildeEquals;
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Gt:
|
|
{
|
|
if(ch == '=')
|
|
{
|
|
lastIndex = index;
|
|
state = State.GtEquals;
|
|
currentToken!.type = TokenType.GtEquals;
|
|
}
|
|
else if(ch == '>')
|
|
{
|
|
lastIndex = index;
|
|
state = State.GtGt;
|
|
currentToken!.type = TokenType.GtGt;
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Lt:
|
|
{
|
|
if(ch == '=')
|
|
{
|
|
lastIndex = index;
|
|
state = State.LtEquals;
|
|
currentToken!.type = TokenType.LtEquals;
|
|
}
|
|
else if(ch == '<')
|
|
{
|
|
lastIndex = index;
|
|
state = State.LtLt;
|
|
currentToken!.type = TokenType.LtLt;
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Slash:
|
|
{
|
|
if(ch == '/')
|
|
{
|
|
lastIndex = index;
|
|
state = State.SlashSlash;
|
|
currentToken!.type = TokenType.SlashSlash;
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Dot:
|
|
{
|
|
if(ch == '.')
|
|
{
|
|
lastIndex = index;
|
|
state = State.DotDot;
|
|
currentToken!.type = TokenType.DotDot;
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
|
|
case State.DotDot:
|
|
{
|
|
if(ch == '.')
|
|
{
|
|
lastIndex = index;
|
|
state = State.DotDotDot;
|
|
currentToken!.type = TokenType.DotDotDot;
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Colon:
|
|
{
|
|
if(ch == ':')
|
|
{
|
|
lastIndex = index;
|
|
state = State.ColonColon;
|
|
currentToken!.type = TokenType.ColonColon;
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Equals:
|
|
{
|
|
if(ch == '=')
|
|
{
|
|
lastIndex = index;
|
|
state = State.EqualsEquals;
|
|
currentToken!.type = TokenType.EqualsEquals;
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Minus:
|
|
{
|
|
if(ch == '-')
|
|
{
|
|
lastIndex = index;
|
|
state = State.SmallCommentStart;
|
|
currentToken = null;
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.SmallCommentStart:
|
|
{
|
|
if(ch == '[')
|
|
{
|
|
state = State.BigCommentStartLongBracket;
|
|
}
|
|
else if(ch == '\n')
|
|
{
|
|
state = State.Start;
|
|
lastIndex = null;
|
|
}
|
|
else
|
|
{
|
|
state = State.SmallComment;
|
|
}
|
|
}
|
|
break;
|
|
case State.SmallComment:
|
|
{
|
|
if(ch == '\n')
|
|
{
|
|
state = State.Start;
|
|
lastIndex = null;
|
|
}
|
|
}
|
|
break;
|
|
case State.BigCommentStartLongBracket:
|
|
{
|
|
if(ch == '=')
|
|
{
|
|
openingLongBracketLevel += 1;
|
|
}
|
|
else if(ch == '[')
|
|
{
|
|
state = State.BigComment;
|
|
}
|
|
else if(ch == '\n')
|
|
{
|
|
state = State.Start;
|
|
}
|
|
else
|
|
{
|
|
state = State.SmallComment;
|
|
}
|
|
}
|
|
break;
|
|
case State.BigComment:
|
|
{
|
|
if(ch == ']')
|
|
{
|
|
state = State.BigCommentEndLongBracket;
|
|
closingLongBracketLevel = 0;
|
|
}
|
|
}
|
|
break;
|
|
case State.BigCommentEndLongBracket:
|
|
{
|
|
if(ch == '=')
|
|
{
|
|
closingLongBracketLevel += 1;
|
|
if(openingLongBracketLevel < closingLongBracketLevel)
|
|
{
|
|
state = State.BigComment;
|
|
}
|
|
}
|
|
else if(ch == ']' && openingLongBracketLevel == closingLongBracketLevel)
|
|
{
|
|
state = State.Start;
|
|
openingLongBracketLevel = 0;
|
|
closingLongBracketLevel = 0;
|
|
}
|
|
else
|
|
{
|
|
closingLongBracketLevel = 0;
|
|
}
|
|
}
|
|
break;
|
|
case State.StringStartLongBracket:
|
|
{
|
|
if(ch == '=')
|
|
{
|
|
openingLongBracketLevel += 1;
|
|
|
|
}
|
|
else if(ch == '[')
|
|
{
|
|
state = State.StringWithLongBracket;
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.StringWithLongBracket:
|
|
{
|
|
if(ch == ']')
|
|
{
|
|
state = State.StringEndLongBracket;
|
|
closingLongBracketLevel = 0;
|
|
}
|
|
else
|
|
{
|
|
AppendDataChar(ch);
|
|
}
|
|
}
|
|
break;
|
|
case State.StringEndLongBracket:
|
|
{
|
|
if(ch == '=')
|
|
{
|
|
closingLongBracketLevel += 1;
|
|
if(openingLongBracketLevel < closingLongBracketLevel)
|
|
{
|
|
state = State.StringWithLongBracket;
|
|
}
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(ch == ']' && openingLongBracketLevel == closingLongBracketLevel)
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
if((Token.StringData?)currentToken.data == null)
|
|
{
|
|
currentToken.data = new Token.StringData("");
|
|
}
|
|
currentToken.type = TokenType.StringLiteral;
|
|
((Token.StringData)currentToken.data).data = ((Token.StringData)currentToken.data).data.Remove(((Token.StringData)currentToken.data).data.Length - closingLongBracketLevel);
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
openingLongBracketLevel = 0;
|
|
closingLongBracketLevel = 0;
|
|
}
|
|
else
|
|
{
|
|
closingLongBracketLevel = 0;
|
|
AppendDataChar(ch);
|
|
}
|
|
}
|
|
break;
|
|
case State.A:
|
|
{
|
|
if(ch == 'n')
|
|
{
|
|
lastIndex = index;
|
|
state = State.An;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.An:
|
|
{
|
|
if(ch == 'd')
|
|
{
|
|
lastIndex = index;
|
|
state = State.And;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.And:
|
|
{
|
|
if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.And;
|
|
currentToken.data = null;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.W:
|
|
{
|
|
if(ch == 'h')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Wh;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Wh:
|
|
{
|
|
if(ch == 'i')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Whi;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Whi:
|
|
{
|
|
if(ch == 'l')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Whil;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Whil:
|
|
{
|
|
if(ch == 'e')
|
|
{
|
|
lastIndex = index;
|
|
state = State.While;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.While:
|
|
{
|
|
if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.While;
|
|
currentToken.data = null;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.B:
|
|
{
|
|
if(ch == 'r')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Br;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Br:
|
|
{
|
|
if(ch == 'e')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Bre;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Bre:
|
|
{
|
|
if(ch == 'a')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Brea;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Brea:
|
|
{
|
|
if(ch == 'k')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Break;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Break:
|
|
{
|
|
if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Break;
|
|
currentToken.data = null;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.G:
|
|
{
|
|
if(ch == 'o')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Go;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Go:
|
|
{
|
|
if(ch == 't')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Got;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Got:
|
|
{
|
|
if(ch == 'o')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Goto;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Goto:
|
|
{
|
|
if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Goto;
|
|
currentToken.data = null;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.R:
|
|
{
|
|
if(ch == 'e')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Re;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Re:
|
|
{
|
|
if(ch == 't')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Ret;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(ch == 'p')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Rep;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Ret:
|
|
{
|
|
if(ch == 'u')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Retu;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Retu:
|
|
{
|
|
if(ch == 'r')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Retur;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Retur:
|
|
{
|
|
if(ch == 'n')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Return;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Return:
|
|
{
|
|
if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Return;
|
|
currentToken.data = null;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Rep:
|
|
{
|
|
if(ch == 'e')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Repe;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Repe:
|
|
{
|
|
if(ch == 'a')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Repea;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Repea:
|
|
{
|
|
if(ch == 't')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Repeat;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Repeat:
|
|
{
|
|
if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Repeat;
|
|
currentToken.data = null;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.N:
|
|
{
|
|
if(ch == 'i')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Ni;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(ch == 'o')
|
|
{
|
|
lastIndex = index;
|
|
state = State.No;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Ni:
|
|
{
|
|
if(ch == 'l')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Nil;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Nil:
|
|
{
|
|
if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Nil;
|
|
currentToken.data = null;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.No:
|
|
{
|
|
if(ch == 't')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Not;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Not:
|
|
{
|
|
if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Not;
|
|
currentToken.data = null;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.T:
|
|
{
|
|
if(ch == 'h')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Th;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(ch == 'r')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Tr;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Th:
|
|
{
|
|
if(ch == 'e')
|
|
{
|
|
lastIndex = index;
|
|
state = State.The;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.The:
|
|
{
|
|
if(ch == 'n')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Then;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Then:
|
|
{
|
|
if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Then;
|
|
currentToken.data = null;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Tr:
|
|
{
|
|
if(ch == 'u')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Tru;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Tru:
|
|
{
|
|
if(ch == 'e')
|
|
{
|
|
lastIndex = index;
|
|
state = State.True;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.True:
|
|
{
|
|
if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.True;
|
|
currentToken.data = null;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.E:
|
|
{
|
|
if(ch == 'l')
|
|
{
|
|
lastIndex = index;
|
|
state = State.El;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(ch == 'n')
|
|
{
|
|
lastIndex = index;
|
|
state = State.En;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.El:
|
|
{
|
|
if(ch == 's')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Els;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Els:
|
|
{
|
|
if(ch == 'e')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Else;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Else:
|
|
{
|
|
if(ch == 'i')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Elsei;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Else;
|
|
currentToken.data = null;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Elsei:
|
|
{
|
|
if(ch == 'f')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Elseif;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Elseif:
|
|
{
|
|
if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Elseif;
|
|
currentToken.data = null;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.En:
|
|
{
|
|
if(ch == 'd')
|
|
{
|
|
lastIndex = index;
|
|
state = State.End;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.End:
|
|
{
|
|
if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.End;
|
|
currentToken.data = null;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.O:
|
|
{
|
|
if(ch == 'r')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Or;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Or:
|
|
{
|
|
if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Or;
|
|
currentToken.data = null;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.D:
|
|
{
|
|
if(ch == 'o')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Do;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Do:
|
|
{
|
|
if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Do;
|
|
currentToken.data = null;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.I:
|
|
{
|
|
if(ch == 'f')
|
|
{
|
|
lastIndex = index;
|
|
state = State.If;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(ch == 'n')
|
|
{
|
|
lastIndex = index;
|
|
state = State.In;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.In:
|
|
{
|
|
if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.In;
|
|
currentToken.data = null;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.If:
|
|
{
|
|
if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.If;
|
|
currentToken.data = null;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.F:
|
|
{
|
|
if(ch == 'u')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Fu;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(ch == 'a')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Fa;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(ch == 'o')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Fo;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Fu:
|
|
{
|
|
if(ch == 'n')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Fun;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Fun:
|
|
{
|
|
if(ch == 'c')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Func;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Func:
|
|
{
|
|
if(ch == 't')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Funct;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Funct:
|
|
{
|
|
if(ch == 'i')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Functi;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Functi:
|
|
{
|
|
if(ch == 'o')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Functio;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Functio:
|
|
{
|
|
if(ch == 'n')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Function;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Function:
|
|
{
|
|
if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Function;
|
|
currentToken.data = null;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Fa:
|
|
{
|
|
if(ch == 'l')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Fal;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Fal:
|
|
{
|
|
if(ch == 's')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Fals;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Fals:
|
|
{
|
|
if(ch == 'e')
|
|
{
|
|
lastIndex = index;
|
|
state = State.False;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.False:
|
|
{
|
|
if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.False;
|
|
currentToken.data = null;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Fo:
|
|
{
|
|
if(ch == 'r')
|
|
{
|
|
lastIndex = index;
|
|
state = State.For;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.For:
|
|
{
|
|
if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.For;
|
|
currentToken.data = null;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.L:
|
|
{
|
|
if(ch == 'o')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Lo;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Lo:
|
|
{
|
|
if(ch == 'c')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Loc;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Loc:
|
|
{
|
|
if(ch == 'a')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Loca;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Loca:
|
|
{
|
|
if(ch == 'l')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Local;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Local:
|
|
{
|
|
if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Local;
|
|
currentToken.data = null;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.U:
|
|
{
|
|
if(ch == 'n')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Un;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Un:
|
|
{
|
|
if(ch == 't')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Unt;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Unt:
|
|
{
|
|
if(ch == 'i')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Unti;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Unti:
|
|
{
|
|
if(ch == 'l')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Until;
|
|
AppendDataChar(ch);
|
|
}
|
|
else if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Name;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
case State.Until:
|
|
{
|
|
if(char.IsAsciiLetterOrDigit(ch) || ch == '_')
|
|
{
|
|
lastIndex = index;
|
|
state = State.Name;
|
|
currentToken!.type = TokenType.Name;
|
|
AppendDataChar(ch);
|
|
}
|
|
else
|
|
{
|
|
if(currentToken == null || currentToken.type == null)
|
|
{
|
|
throw new Exception($"Lexer error at {currentLocation}");
|
|
}
|
|
currentToken.type = TokenType.Until;
|
|
currentToken.data = null;
|
|
currentLocation = new(currentToken.region.end);
|
|
tokens.Add(currentToken);
|
|
currentToken = null;
|
|
index = lastIndex!.Value;
|
|
lastIndex = null;
|
|
state = State.Start;
|
|
}
|
|
}
|
|
break;
|
|
|
|
default:
|
|
throw new NotImplementedException(state.ToString());
|
|
}
|
|
}
|
|
|
|
private enum State
|
|
{
|
|
Start,
|
|
Quote, SingleQuote, Name, Integer, Float, Zero,
|
|
A, B, D, E, F, G, I, L, N, O, R, T, U, W,
|
|
Plus, Minus, Star, Slash, Percent, Caret, Hash,
|
|
Ampersand, Tilde, Pipe, Lt, Gt, Equals, RoundOpen, RoundClosed, CurlyOpen, CurlyClosed, SquareOpen, SquareClosed, StringStartLongBracket, StringWithLongBracket, StringEndLongBracket,
|
|
Colon, Semicolon, Comma, Dot,
|
|
|
|
An, Br, Do, El, En, Fa, Fo, Fu, Go, If, In, Lo, Ni, No, Or, Re, Th, Tr, Un, Wh,
|
|
LtLt, GtGt, SlashSlash, EqualsEquals, TildeEquals, LtEquals, GtEquals, ColonColon, DotDot,
|
|
SmallCommentStart, QuoteBackslash, SingleQuoteBackslash, String, HexNumberX, ExpNumber,
|
|
|
|
And, Bre, Els, End, Fal, For, Fun, Got, Loc, Nil, Not, Rep, Ret, The, Tru, Unt, Whi,
|
|
DotDotDot, HexNumber, QuoteBackslashZ, SingleQuoteBackslashZ,
|
|
SmallComment, BigComment, BigCommentStartLongBracket, BigCommentEndLongBracket,
|
|
|
|
Brea, Else, Fals, Func, Goto, Loca, Repe, Retu, Then, True, Unti, Whil, HexExpNumber,
|
|
|
|
Break, Elsei, False, Funct, Local, Repea, Retur, Until, While,
|
|
|
|
Elseif, Functi, Repeat, Return,
|
|
|
|
Functio,
|
|
|
|
Function,
|
|
}
|
|
}
|
|
|
|
class CodeRegion(CodeLocation start, CodeLocation end)
|
|
{
|
|
public CodeLocation start = start;
|
|
public CodeLocation end = end;
|
|
|
|
public override string ToString()
|
|
{
|
|
return $"{start}-{end}";
|
|
}
|
|
}
|
|
|
|
class CodeLocation(int line, int col)
|
|
{
|
|
public int line = line;
|
|
public int col = col;
|
|
|
|
public CodeLocation(CodeLocation other) : this(line: other.line, col: other.col) { }
|
|
|
|
public override string ToString()
|
|
{
|
|
return $"{line + 1}:{col + 1}";
|
|
}
|
|
}
|
|
|
|
class Token(CodeRegion region, TokenType? type = null, Token.IData? data = null)
|
|
{
|
|
public CodeRegion region = region;
|
|
public IData? data = data;
|
|
public TokenType? type = type;
|
|
|
|
public interface IData { }
|
|
public class NumeralData(INumeral numeral) : IData
|
|
{
|
|
public INumeral numeral = numeral;
|
|
public override string ToString()
|
|
{
|
|
return $"NumeralData {numeral}";
|
|
}
|
|
}
|
|
public class StringData(string data) : IData
|
|
{
|
|
public string data = data;
|
|
public override string ToString()
|
|
{
|
|
return $"StringData \"{data}\"";
|
|
}
|
|
}
|
|
}
|
|
public enum TokenType
|
|
{
|
|
Name,
|
|
And, Break, Do, Else, Elseif, End,
|
|
False, For, Function, Goto, If, In,
|
|
Local, Nil, Not, Or, Repeat, Return,
|
|
Then, True, Until, While,
|
|
Plus, Minus, Star, Slash, Percent, Caret, Hash,
|
|
Ampersand, Tilde, Pipe, LtLt, GtGt, SlashSlash,
|
|
EqualsEquals, TildeEquals, LtEquals, GtEquals, Lt, Gt, Equals,
|
|
RoundOpen, RoundClosed, CurlyOpen, CurlyClosed, SquareOpen, SquareClosed, ColonColon,
|
|
Semicolon, Colon, Comma, Dot, DotDot, DotDotDot,
|
|
Numeral,
|
|
StringLiteral,
|
|
}
|
|
|
|
[JsonDerivedType(typeof(Integer), typeDiscriminator: "int")]
|
|
[JsonDerivedType(typeof(Float), typeDiscriminator: "float")]
|
|
public interface INumeral
|
|
{
|
|
public class Integer(int value) : INumeral
|
|
{
|
|
public int value = value;
|
|
|
|
public bool RawEqual(INumeral other)
|
|
{
|
|
if(other is Integer integer)
|
|
{
|
|
return integer.value == value;
|
|
}
|
|
// TODO: Check if this is actually doing what is expected
|
|
return ((Float)other).value == value;
|
|
}
|
|
public override string ToString()
|
|
{
|
|
return $"Numeral Integer {value}";
|
|
}
|
|
}
|
|
public class Float(float value) : INumeral
|
|
{
|
|
public float value = value;
|
|
|
|
public bool RawEqual(INumeral other)
|
|
{
|
|
if(other is Float float_val)
|
|
{
|
|
return float_val.value == value;
|
|
}
|
|
// TODO: Check if this is actually doing what is expected
|
|
return ((Integer)other).value == value;
|
|
}
|
|
public override string ToString()
|
|
{
|
|
return $"Numeral Float {value}";
|
|
}
|
|
}
|
|
|
|
public bool RawEqual(INumeral other);
|
|
}
|