More deduplication in the tokenizer

This commit is contained in:
0x4261756D 2024-02-28 18:55:31 +01:00
parent 0c93d45dbd
commit 51390b24d3
2 changed files with 171 additions and 1108 deletions

View File

@ -45,3 +45,27 @@ public interface INumeral
public bool RawEqual(INumeral other);
}
class CodeRegion(CodeLocation start, CodeLocation end)
{
public CodeLocation start = start;
public CodeLocation end = end;
public override string ToString()
{
return $"{start}-{end}";
}
}
class CodeLocation(int line, int col)
{
public int line = line;
public int col = col;
public CodeLocation(CodeLocation other) : this(line: other.line, col: other.col) { }
public override string ToString()
{
return $"{line + 1}:{col + 1}";
}
}

View File

@ -94,6 +94,49 @@ class Tokenizer
currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}"));
}
private void Backtrack(TokenType newType)
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = newType;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
}
private void BacktrackNoClear(TokenType newType)
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = newType;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
}
private void BacktrackNoTypeChange()
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
}
private void TokenizeChar(char ch)
{
switch(state)
@ -547,36 +590,23 @@ class Tokenizer
}
break;
case 'z':
{
state = State.SingleQuoteBackslashZ;
}
break;
break;
case 'x':
{
state = State.SingleQuoteBackslashX;
}
break;
break;
case 'u':
{
state = State.SingleQuoteBackslashU;
}
break;
break;
default: throw new Exception($"Unknown escape sequence: \\{ch}");
}
}
break;
case State.SingleQuoteBackslashU:
{
if(ch == '{')
{
state = State.SingleQuoteBackslashUBracket;
}
else
{
throw new Exception($"Expected `{{` to continue \\u escape sequence at {currentLocation}, got {ch}");
}
}
break;
state = ch == '{'
? State.SingleQuoteBackslashUBracket
: throw new Exception($"Expected `{{` to continue \\u escape sequence at {currentLocation}, got {ch}");
break;
case State.SingleQuoteBackslashUBracket:
{
if(char.IsAsciiHexDigit(ch))
@ -715,18 +745,7 @@ class Tokenizer
break;
case State.String:
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.StringLiteral;
//currentToken.region.end = new(currentLocation);
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.StringLiteral);
}
break;
case State.Name:
@ -740,17 +759,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -776,16 +785,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
}
break;
@ -825,16 +825,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
}
break;
@ -853,16 +844,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
}
break;
@ -887,16 +869,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
}
break;
@ -914,16 +887,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
}
break;
@ -951,16 +915,7 @@ class Tokenizer
case State.ColonColon:
case State.SlashSlash:
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
break;
case State.Tilde:
@ -973,16 +928,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
}
break;
@ -1002,16 +948,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
}
break;
@ -1031,16 +968,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
}
break;
@ -1054,16 +982,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
}
break;
@ -1077,16 +996,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
}
break;
@ -1101,16 +1011,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
}
break;
@ -1124,16 +1025,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
}
break;
@ -1147,16 +1039,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
}
break;
@ -1170,16 +1053,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
}
break;
@ -1274,16 +1148,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoTypeChange();
}
}
break;
@ -1356,17 +1221,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -1387,17 +1242,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -1412,18 +1257,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.And;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.And);
}
}
break;
@ -1444,17 +1278,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -1475,17 +1299,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -1506,17 +1320,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -1537,17 +1341,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -1562,18 +1356,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.While;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.While);
}
}
break;
@ -1594,17 +1377,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -1625,17 +1398,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -1656,17 +1419,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -1687,17 +1440,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -1712,18 +1455,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Break;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.Break);
}
}
break;
@ -1744,17 +1476,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -1775,17 +1497,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -1806,17 +1518,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -1831,18 +1533,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Goto;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.Goto);
}
}
break;
@ -1863,17 +1554,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -1900,17 +1581,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -1931,17 +1602,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -1962,17 +1623,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -1993,17 +1644,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2018,18 +1659,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Return;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.Return);
}
}
break;
@ -2050,17 +1680,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2081,17 +1701,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2112,17 +1722,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2137,18 +1737,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Repeat;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.Repeat);
}
}
break;
@ -2175,17 +1764,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2206,17 +1785,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2231,18 +1800,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Nil;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.Nil);
}
}
break;
@ -2263,17 +1821,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2288,18 +1836,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Not;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.Not);
}
}
break;
@ -2326,17 +1863,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2357,17 +1884,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2388,17 +1905,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2413,18 +1920,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Then;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.Then);
}
}
break;
@ -2445,17 +1941,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2476,17 +1962,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2501,18 +1977,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.True;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.True);
}
}
break;
@ -2539,17 +2004,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2570,17 +2025,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2601,17 +2046,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2632,18 +2067,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Else;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.Else);
}
}
break;
@ -2664,17 +2088,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2689,18 +2103,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Elseif;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.Elseif);
}
}
break;
@ -2721,17 +2124,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2746,18 +2139,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.End;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.End);
}
}
break;
@ -2778,17 +2160,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2803,18 +2175,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Or;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.Or);
}
}
break;
@ -2835,17 +2196,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2860,18 +2211,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Do;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.Do);
}
}
break;
@ -2898,17 +2238,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -2923,18 +2253,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.In;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.In);
}
}
break;
@ -2949,18 +2268,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.If;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.If);
}
}
break;
@ -2993,17 +2301,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3024,17 +2322,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3055,17 +2343,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3086,17 +2364,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3117,17 +2385,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3148,17 +2406,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3179,17 +2427,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3204,18 +2442,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Function;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.Function);
}
}
break;
@ -3236,17 +2463,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3267,17 +2484,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3298,17 +2505,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3323,18 +2520,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.False;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.False);
}
}
break;
@ -3355,17 +2541,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3380,18 +2556,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.For;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.For);
}
}
break;
@ -3412,17 +2577,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3443,17 +2598,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3474,17 +2619,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3505,17 +2640,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3530,18 +2655,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Local;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.Local);
}
}
break;
@ -3562,17 +2676,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3593,17 +2697,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3624,17 +2718,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3655,17 +2739,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Name;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
BacktrackNoClear(TokenType.Name);
}
}
break;
@ -3680,18 +2754,7 @@ class Tokenizer
}
else
{
if(currentToken == null || currentToken.type == null)
{
throw new Exception($"Lexer error at {currentLocation}");
}
currentToken.type = TokenType.Until;
currentToken.data = null;
currentLocation = new(currentToken.region.end);
tokens.Add(currentToken);
currentToken = null;
index = lastIndex!.Value;
lastIndex = null;
state = State.Start;
Backtrack(TokenType.Until);
}
}
break;
@ -3732,31 +2795,7 @@ class Tokenizer
}
}
class CodeRegion(CodeLocation start, CodeLocation end)
{
public CodeLocation start = start;
public CodeLocation end = end;
public override string ToString()
{
return $"{start}-{end}";
}
}
class CodeLocation(int line, int col)
{
public int line = line;
public int col = col;
public CodeLocation(CodeLocation other) : this(line: other.line, col: other.col) { }
public override string ToString()
{
return $"{line + 1}:{col + 1}";
}
}
class Token(CodeRegion region, TokenType? type = null, Token.IData? data = null)
internal class Token(CodeRegion region, TokenType? type = null, Token.IData? data = null)
{
public CodeRegion region = region;
public IData? data = data;