diff --git a/LuaTypes.cs b/LuaTypes.cs new file mode 100644 index 0000000..49f479d --- /dev/null +++ b/LuaTypes.cs @@ -0,0 +1,47 @@ +using System.Text.Json.Serialization; + +namespace luaaaaah; + +[JsonDerivedType(typeof(Integer), typeDiscriminator: "int")] +[JsonDerivedType(typeof(Float), typeDiscriminator: "float")] +public interface INumeral +{ + public class Integer(int value) : INumeral + { + public int value = value; + + public bool RawEqual(INumeral other) + { + if(other is Integer integer) + { + return integer.value == value; + } + // TODO: Check if this is actually doing what is expected + return ((Float)other).value == value; + } + public override string ToString() + { + return $"Numeral Integer {value}"; + } + } + public class Float(float value) : INumeral + { + public float value = value; + + public bool RawEqual(INumeral other) + { + if(other is Float float_val) + { + return float_val.value == value; + } + // TODO: Check if this is actually doing what is expected + return ((Integer)other).value == value; + } + public override string ToString() + { + return $"Numeral Float {value}"; + } + } + + public bool RawEqual(INumeral other); +} diff --git a/Tokenizer.cs b/Tokenizer.cs index 562bf91..cdec0a5 100644 --- a/Tokenizer.cs +++ b/Tokenizer.cs @@ -1,7 +1,6 @@ using System; using System.Collections.Generic; using System.Text; -using System.Text.Json.Serialization; namespace luaaaaah; class Tokenizer @@ -82,6 +81,19 @@ class Tokenizer currentToken.region.end = new(currentLocation); } + private void TokenizeTerminal(State newState, TokenType type) + { + lastIndex = index; + state = newState; + currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: type); + } + private void TokenizeTerminalName(State newState, char ch) + { + lastIndex = index; + state = newState; + currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}")); + } + private void TokenizeChar(char ch) { switch(state) @@ -91,264 +103,116 @@ class Tokenizer switch(ch) { case '-': - { - lastIndex = index; - state = State.Minus; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Minus); - } /* tokenizeTerminalBase(TokenType.Minus, TokenizerState.Minus); */ - break; + TokenizeTerminal(State.Minus, TokenType.Minus); + break; case ',': - { - lastIndex = index; - state = State.Comma; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Comma); - } /* tokenizeTerminalBase(TokenType.Comma, TokenizerState.Comma); */ - break; + TokenizeTerminal(State.Comma, TokenType.Comma); + break; case '=': - { - lastIndex = index; - state = State.Equals; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Equals); - } /* tokenizeTerminalBase(TokenType.Equals, TokenizerState.Equals); */ - break; + TokenizeTerminal(State.Equals, TokenType.Equals); + break; case '(': - { - lastIndex = index; - state = State.RoundOpen; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.RoundOpen); - } /* tokenizeTerminalBase(TokenType.RoundOpen, TokenizerState.RoundOpen); */ - break; + TokenizeTerminal(State.RoundOpen, TokenType.RoundOpen); + break; case ')': - { - lastIndex = index; - state = State.RoundClosed; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.RoundClosed); - } /* tokenizeTerminalBase(TokenType.RoundClosed, TokenizerState.RoundClosed); */ - break; + TokenizeTerminal(State.RoundClosed, TokenType.RoundClosed); + break; case '.': - { - lastIndex = index; - state = State.Dot; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Dot); - } /* tokenizeTerminalBase(TokenType.Dot, TokenizerState.Dot); */ - break; + TokenizeTerminal(State.Dot, TokenType.Dot); + break; case ':': - { - lastIndex = index; - state = State.Colon; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Colon); - } /* tokenizeTerminalBase(TokenType.Colon, TokenizerState.Colon); */ - break; + TokenizeTerminal(State.Colon, TokenType.Colon); + break; case '{': - { - lastIndex = index; - state = State.CurlyOpen; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.CurlyOpen); - } /* tokenizeTerminalBase(TokenType.CurlyOpen, TokenizerState.CurlyOpen); */ - break; + TokenizeTerminal(State.CurlyOpen, TokenType.CurlyOpen); + break; case '}': - { - lastIndex = index; - state = State.CurlyClosed; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.CurlyClosed); - } /* tokenizeTerminalBase(TokenType.CurlyClosed, TokenizerState.CurlyClosed); */ - break; + TokenizeTerminal(State.CurlyClosed, TokenType.CurlyClosed); + break; case '[': - { - lastIndex = index; - state = State.SquareOpen; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.SquareOpen); - } /* tokenizeTerminalBase(TokenType.SquareOpen, TokenizerState.SquareOpen); */ - break; + TokenizeTerminal(State.SquareOpen, TokenType.SquareOpen); + break; case ']': - { - lastIndex = index; - state = State.SquareClosed; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.SquareClosed); - } /* tokenizeTerminalBase(TokenType.SquareClosed, TokenizerState.SquareClosed); */ - break; + TokenizeTerminal(State.SquareClosed, TokenType.SquareClosed); + break; case '+': - { - lastIndex = index; - state = State.Plus; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Plus); - } /* tokenizeTerminalBase(TokenType.Plus, TokenizerState.Plus); */ - break; + TokenizeTerminal(State.Plus, TokenType.Plus); + break; case '~': - { - lastIndex = index; - state = State.Tilde; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Tilde); - } /* tokenizeTerminalBase(TokenType.Tilde, TokenizerState.Tilde); */ - break; + TokenizeTerminal(State.Tilde, TokenType.Tilde); + break; case '>': - { - lastIndex = index; - state = State.Gt; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Gt); - } /* tokenizeTerminalBase(TokenType.Gt, TokenizerState.Gt); */ - break; + TokenizeTerminal(State.Gt, TokenType.Gt); + break; case '<': - { - lastIndex = index; - state = State.Lt; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Lt); - } /* tokenizeTerminalBase(TokenType.Lt, TokenizerState.Lt); */ - break; + TokenizeTerminal(State.Lt, TokenType.Lt); + break; case '#': - { - lastIndex = index; - state = State.Hash; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Hash); - } /* tokenizeTerminalBase(TokenType.Hash, TokenizerState.Hash); */ - break; + TokenizeTerminal(State.Hash, TokenType.Hash); + break; case '|': - { - lastIndex = index; - state = State.Pipe; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Pipe); - } /* tokenizeTerminalBase(TokenType.Pipe, TokenizerState.Pipe); */ - break; + TokenizeTerminal(State.Pipe, TokenType.Pipe); + break; case '&': - { - lastIndex = index; - state = State.Ampersand; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Ampersand); - } /* tokenizeTerminalBase(TokenType.Ampersand, TokenizerState.Ampersand); */ - break; + TokenizeTerminal(State.Ampersand, TokenType.Ampersand); + break; case '%': - { - lastIndex = index; - state = State.Percent; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Percent); - } /* tokenizeTerminalBase(TokenType.Percent, TokenizerState.Percent); */ - break; + TokenizeTerminal(State.Percent, TokenType.Percent); + break; case '*': - { - lastIndex = index; - state = State.Star; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Star); - } /* tokenizeTerminalBase(TokenType.Star, TokenizerState.Star); */ - break; + TokenizeTerminal(State.Star, TokenType.Star); + break; case '/': - { - lastIndex = index; - state = State.Slash; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Slash); - } /* tokenizeTerminalBase(TokenType.Slash, TokenizerState.Slash); */ - break; + TokenizeTerminal(State.Slash, TokenType.Slash); + break; case ';': - { - lastIndex = index; - state = State.Semicolon; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Semicolon); - } /* tokenizeTerminalBase(TokenType.Semicolon, TokenizerState.Semicolon); */ - break; + TokenizeTerminal(State.Semicolon, TokenType.Semicolon); + break; case '^': - { - lastIndex = index; - state = State.Caret; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Caret); - } /* tokenizeTerminalBase(TokenType.Caret, TokenizerState.Caret); */ - break; + TokenizeTerminal(State.Caret, TokenType.Caret); + break; case 'a': - { - lastIndex = index; - state = State.A; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}")); - } /* tokenizeTerminalStr(TokenType.Name, TokenizerState.A, tokenStr, ch); */ - break; + TokenizeTerminalName(State.A, ch); + break; case 'b': - { - lastIndex = index; - state = State.B; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}")); - } /* tokenizeTerminalStr(TokenType.Name, TokenizerState.B, tokenStr, ch); */ - break; + TokenizeTerminalName(State.B, ch); + break; case 'd': - { - lastIndex = index; - state = State.D; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}")); - } /* tokenizeTerminalStr(TokenType.Name, TokenizerState.D, tokenStr, ch); */ - break; + TokenizeTerminalName(State.D, ch); + break; case 'e': - { - lastIndex = index; - state = State.E; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}")); - } /* tokenizeTerminalStr(TokenType.Name, TokenizerState.E, tokenStr, ch); */ - break; + TokenizeTerminalName(State.E, ch); + break; case 'f': - { - lastIndex = index; - state = State.F; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}")); - } /* tokenizeTerminalStr(TokenType.Name, TokenizerState.F, tokenStr, ch); */ - break; + TokenizeTerminalName(State.F, ch); + break; case 'i': - { - lastIndex = index; - state = State.I; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}")); - } /* tokenizeTerminalStr(TokenType.Name, TokenizerState.I, tokenStr, ch); */ - break; + TokenizeTerminalName(State.I, ch); + break; case 'g': - { - lastIndex = index; - state = State.G; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}")); - } /* tokenizeTerminalStr(TokenType.Name, TokenizerState.G, tokenStr, ch); */ - break; + TokenizeTerminalName(State.G, ch); + break; case 'l': - { - lastIndex = index; - state = State.L; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}")); - } /* tokenizeTerminalStr(TokenType.Name, TokenizerState.L, tokenStr, ch); */ - break; + TokenizeTerminalName(State.L, ch); + break; case 'n': - { - lastIndex = index; - state = State.N; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}")); - } /* tokenizeTerminalStr(TokenType.Name, TokenizerState.N, tokenStr, ch); */ - break; + TokenizeTerminalName(State.N, ch); + break; case 'o': - { - lastIndex = index; - state = State.O; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}")); - } /* tokenizeTerminalStr(TokenType.Name, TokenizerState.O, tokenStr, ch); */ - break; + TokenizeTerminalName(State.O, ch); + break; case 'r': - { - lastIndex = index; - state = State.R; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}")); - } /* tokenizeTerminalStr(TokenType.Name, TokenizerState.R, tokenStr, ch); */ - break; + TokenizeTerminalName(State.R, ch); + break; case 't': - { - lastIndex = index; - state = State.T; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}")); - } /* tokenizeTerminalStr(TokenType.Name, TokenizerState.T, tokenStr, ch); */ - break; + TokenizeTerminalName(State.T, ch); + break; case 'u': - { - lastIndex = index; - state = State.U; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}")); - } /* tokenizeTerminalStr(TokenType.Name, TokenizerState.U, tokenStr, ch); */ - break; + TokenizeTerminalName(State.U, ch); + break; case 'w': - { - lastIndex = index; - state = State.W; - currentToken = new(region: new(start: new(currentLocation), end: new(currentLocation)), type: TokenType.Name, data: new Token.StringData($"{ch}")); - } /* tokenizeTerminalStr(TokenType.Name, TokenizerState.W, tokenStr, ch); */ - break; + TokenizeTerminalName(State.W, ch); + break; case '0': { lastIndex = index; @@ -3930,48 +3794,4 @@ public enum TokenType Semicolon, Colon, Comma, Dot, DotDot, DotDotDot, Numeral, StringLiteral, -} - -[JsonDerivedType(typeof(Integer), typeDiscriminator: "int")] -[JsonDerivedType(typeof(Float), typeDiscriminator: "float")] -public interface INumeral -{ - public class Integer(int value) : INumeral - { - public int value = value; - - public bool RawEqual(INumeral other) - { - if(other is Integer integer) - { - return integer.value == value; - } - // TODO: Check if this is actually doing what is expected - return ((Float)other).value == value; - } - public override string ToString() - { - return $"Numeral Integer {value}"; - } - } - public class Float(float value) : INumeral - { - public float value = value; - - public bool RawEqual(INumeral other) - { - if(other is Float float_val) - { - return float_val.value == value; - } - // TODO: Check if this is actually doing what is expected - return ((Integer)other).value == value; - } - public override string ToString() - { - return $"Numeral Float {value}"; - } - } - - public bool RawEqual(INumeral other); -} +} \ No newline at end of file