Initial commit, tokenizer stands (barely)

This commit is contained in:
0x4261756D 2023-06-06 02:16:29 +02:00
commit 294ecb7712
4 changed files with 2970 additions and 0 deletions

2
.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
/target
.vscode/launch.json

7
Cargo.lock generated Normal file
View File

@ -0,0 +1,7 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "luaaaaah"
version = "0.1.0"

8
Cargo.toml Normal file
View File

@ -0,0 +1,8 @@
[package]
name = "luaaaaah"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]

2953
src/main.rs Normal file
View File

@ -0,0 +1,2953 @@
use std::{env, fs, fmt::Error};
fn main()
{
let args: Vec<String> = env::args().collect();
let file_content = fs::read_to_string(&args[1]).expect("Could not read source file");
match compile(&file_content)
{
Ok(()) =>
{
println!("Done compiling");
}
Err(msg) => println!("ERROR: {}", msg)
}
}
fn compile(file_content: &String) -> Result<(), &'static str>
{
let tokens: Vec<Token> = tokenize(&file_content)?;
println!("{:?}", tokens);
return Ok(());
}
#[derive(Debug, Clone)]
enum Token
{
Name(String),
And, Break, Do, Else, Elseif, End,
False, For, Function, Goto, If, In,
Local, Nil, Not, Or, Repeat, Return,
Then, True, Until, While,
Plus, Minus, Star, Slash, Percent, Caret, Hash,
Ampersand, Tilde, Pipe, LtLt, GtGt, SlashSlash,
EqualsEquals, TildeEquals, LtEquals, GtEquals, Lt, Gt, Equals,
RoundOpen, RoundClosed, CurlyOpen, CurlyClosed, SquareOpen, SquareClosed, ColonColon,
Semicolon, Colon, Comma, Dot, DotDot, DotDotDot,
IntLiteral(String),
HexLiteral(String),
StringLiteral(String),
}
#[derive(Debug, Clone, Copy, PartialEq)]
enum TokenizerState
{
Start,
Quote, Name, Number, Zero,
A, B, D, E, F, G, I, L, N, O, R, T, U, W,
Plus, Minus, Star, Slash, Percent, Caret, Hash,
Ampersand, Tilde, Pipe, Lt, Gt, Equals, RoundOpen, RoundClosed, CurlyOpen, CurlyClosed, SquareOpen, SquareClosed,
Colon, Semicolon, Comma, Dot,
An, Br, Do, El, En, Fa, Fo, Fu, Go, If, In, Lo, Ni, No, Or, Re, Th, Tr, Un, Wh,
LtLt, GtGt, SlashSlash, EqualsEquals, TildeEquals, LtEquals, GtEquals, ColonColon, DotDot,
SmallComment, QuoteBackslash, String, HexNumberX, ExpNumber,
And, Bre, Els, End, Fal, For, Fun, Got, Loc, Nil, Not, Rep, Ret, The, Tru, Unt, Whi,
DotDotDot, HexNumber,
BigComment,
Brea, Else, Fals, Func, Goto, Loca, Repe, Retu, Then, True, Unti, Whil, HexExpNumber,
Break, Elsei, False, Funct, Local, Repea, Retur, Until, While,
Elseif, Functi, Repeat, Return,
Functio,
Function,
}
fn tokenize(file_content: &String) -> Result<Vec<Token>, &'static str>
{
let mut tokens: Vec<Token> = Vec::new();
let mut state = TokenizerState::Start;
let char_vec: Vec<char> = file_content.chars().collect();
let mut last_index: i32 = -1;
let mut index = 0;
let mut token: Option<Token> = None;
let mut token_str: String = String::new();
while index < char_vec.len()
{
let ch = char_vec[index];
match state
{
TokenizerState::Start =>
{
match ch
{
'-' =>
{
last_index = index as i32;
token = Some(Token::Minus);
state = TokenizerState::Minus;
}
'a' =>
{
last_index = index as i32;
token = Some(Token::Name("a".to_string()));
token_str.push(ch);
state = TokenizerState::A;
}
'b' =>
{
last_index = index as i32;
token = Some(Token::Name("b".to_string()));
token_str.push(ch);
state = TokenizerState::B;
}
'd' =>
{
last_index = index as i32;
token = Some(Token::Name("d".to_string()));
token_str.push(ch);
state = TokenizerState::D;
}
'e' =>
{
last_index = index as i32;
token = Some(Token::Name("e".to_string()));
token_str.push(ch);
state = TokenizerState::E;
}
'f' =>
{
last_index = index as i32;
token = Some(Token::Name("f".to_string()));
token_str.push(ch);
state = TokenizerState::F;
}
'i' =>
{
last_index = index as i32;
token = Some(Token::Name("i".to_string()));
token_str.push(ch);
state = TokenizerState::I;
}
'g' =>
{
last_index = index as i32;
token = Some(Token::Name("g".to_string()));
token_str.push(ch);
state = TokenizerState::G;
}
'l' =>
{
last_index = index as i32;
token = Some(Token::Name("l".to_string()));
token_str.push(ch);
state = TokenizerState::L;
}
'n' =>
{
last_index = index as i32;
token = Some(Token::Name("n".to_string()));
token_str.push(ch);
state = TokenizerState::N;
}
'o' =>
{
last_index = index as i32;
token = Some(Token::Name("o".to_string()));
token_str.push(ch);
state = TokenizerState::O;
}
'r' =>
{
last_index = index as i32;
token = Some(Token::Name("r".to_string()));
token_str.push(ch);
state = TokenizerState::R;
}
't' =>
{
last_index = index as i32;
token = Some(Token::Name("t".to_string()));
token_str.push(ch);
state = TokenizerState::T;
}
'u' =>
{
last_index = index as i32;
token = Some(Token::Name("u".to_string()));
token_str.push(ch);
state = TokenizerState::U;
}
'w' =>
{
last_index = index as i32;
token = Some(Token::Name("w".to_string()));
token_str.push(ch);
state = TokenizerState::W;
}
',' =>
{
last_index = index as i32;
token = Some(Token::Comma);
state = TokenizerState::Comma;
}
'=' =>
{
last_index = index as i32;
token = Some(Token::Equals);
state = TokenizerState::Equals;
}
'(' =>
{
last_index = index as i32;
token = Some(Token::RoundOpen);
state = TokenizerState::RoundOpen;
}
')' =>
{
last_index = index as i32;
token = Some(Token::RoundClosed);
state = TokenizerState::RoundClosed;
}
'.' =>
{
last_index = index as i32;
token = Some(Token::Dot);
state = TokenizerState::Dot;
}
':' =>
{
last_index = index as i32;
token = Some(Token::Colon);
state = TokenizerState::Colon;
}
'{' =>
{
last_index = index as i32;
token = Some(Token::CurlyOpen);
state = TokenizerState::CurlyOpen;
}
'}' =>
{
last_index = index as i32;
token = Some(Token::CurlyClosed);
state = TokenizerState::CurlyClosed;
}
'+' =>
{
last_index = index as i32;
token = Some(Token::Plus);
state = TokenizerState::Plus;
}
'~' =>
{
last_index = index as i32;
token = Some(Token::Tilde);
state = TokenizerState::Tilde;
}
'>' =>
{
last_index = index as i32;
token = Some(Token::Gt);
state = TokenizerState::Gt;
}
'<' =>
{
last_index = index as i32;
token = Some(Token::Lt);
state = TokenizerState::Lt;
}
'#' =>
{
last_index = index as i32;
token = Some(Token::Hash);
state = TokenizerState::Hash;
}
'0' =>
{
last_index = index as i32;
token = Some(Token::IntLiteral("0".to_string()));
token_str.push(ch);
state = TokenizerState::Zero;
}
_ =>
{
if ch.is_whitespace() { }
else if ch.is_ascii_alphabetic()
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else if ch.is_numeric() && ch.is_ascii()
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::IntLiteral(token_str.clone()));
state = TokenizerState::Number;
}
else
{
todo!("State {:?}, Char {}, {:?}", state, ch, tokens);
}
}
}
}
TokenizerState::Name =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.clone().unwrap());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
TokenizerState::Zero =>
{
match ch
{
'x' =>
{
last_index = index as i32;
token_str.push(ch);
token = None;
state = TokenizerState::HexNumberX;
}
_ =>
{
if ch.is_numeric() && ch.is_ascii()
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::IntLiteral(token_str.clone()));
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.clone().unwrap());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::HexNumberX =>
{
if ch.is_ascii() && ch.is_numeric() || match ch
{
'A'..='F' | 'a'..='f' => true,
_ => false,
}
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::HexLiteral(token_str.clone()));
state = TokenizerState::HexNumber;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.clone().unwrap());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
TokenizerState::HexNumber =>
{
match ch
{
'p' =>
{
last_index = index as i32;
token_str.push(ch);
token = None;
state = TokenizerState::HexExpNumber;
}
_ =>
{
if ch.is_ascii() && ch.is_numeric() || match ch
{
'A'..='F' | 'a'..='f' => true,
_ => false,
}
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::HexLiteral(token_str.clone()));
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.clone().unwrap());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Number =>
{
match ch
{
'e' =>
{
last_index = index as i32;
token_str.push(ch);
token = None;
state = TokenizerState::ExpNumber;
}
_ =>
{
if ch.is_numeric() && ch.is_ascii()
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::IntLiteral(token_str.clone()));
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.clone().unwrap());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Comma | TokenizerState::RoundOpen | TokenizerState::RoundClosed |
TokenizerState::CurlyOpen | TokenizerState::CurlyClosed | TokenizerState::Plus |
TokenizerState::TildeEquals | TokenizerState::EqualsEquals | TokenizerState::Hash |
TokenizerState::GtEquals | TokenizerState::LtEquals =>
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.clone().unwrap());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
TokenizerState::Tilde =>
{
match ch
{
'=' =>
{
last_index = index as i32;
token = Some(Token::TildeEquals);
state = TokenizerState::TildeEquals;
}
_ =>
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.clone().unwrap());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
TokenizerState::Gt =>
{
match ch
{
'>' =>
{
last_index = index as i32;
token = Some(Token::GtGt);
state = TokenizerState::GtGt;
}
'=' =>
{
last_index = index as i32;
token = Some(Token::GtEquals);
state = TokenizerState::GtEquals;
}
_ =>
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.clone().unwrap());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
TokenizerState::Lt =>
{
match ch
{
'>' =>
{
last_index = index as i32;
token = Some(Token::LtLt);
state = TokenizerState::LtLt;
}
'=' =>
{
last_index = index as i32;
token = Some(Token::LtEquals);
state = TokenizerState::LtEquals;
}
_ =>
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.clone().unwrap());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
TokenizerState::Dot =>
{
match ch
{
'.' =>
{
last_index = index as i32;
token = Some(Token::DotDot);
state = TokenizerState::DotDot;
}
_ =>
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.clone().unwrap());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
TokenizerState::Colon =>
{
match ch
{
':' =>
{
last_index = index as i32;
token = Some(Token::ColonColon);
state = TokenizerState::ColonColon;
}
_ =>
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.clone().unwrap());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
TokenizerState::Equals =>
{
match ch
{
'=' =>
{
last_index = index as i32;
token = Some(Token::EqualsEquals);
state = TokenizerState::EqualsEquals;
}
_ =>
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.clone().unwrap());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
TokenizerState::Minus =>
{
match ch
{
'-' =>
{
last_index = index as i32;
token = None;
state = TokenizerState::SmallComment;
}
_ =>
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.clone().unwrap());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
TokenizerState::SmallComment =>
{
match ch
{
'[' =>
{
last_index = index as i32;
token = None;
state = TokenizerState::BigComment;
}
'\n' =>
{
state = TokenizerState::Start;
last_index = -1;
}
_ =>
{
last_index = index as i32;
}
}
}
TokenizerState::A =>
{
match ch
{
'n' =>
{
last_index = index as i32;
token = Some(Token::Name("an".to_string()));
token_str.push(ch);
state = TokenizerState::An;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.clone().unwrap());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::An =>
{
match ch
{
'd' =>
{
last_index = index as i32;
token = Some(Token::Name("and".to_string()));
token_str.push(ch);
state = TokenizerState::And;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::And =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
token = None;
token_str.clear();
state = TokenizerState::Start;
tokens.push(Token::And);
}
}
TokenizerState::W =>
{
match ch
{
'h' =>
{
last_index = index as i32;
token = Some(Token::Name("wh".to_string()));
token_str.push(ch);
state = TokenizerState::Wh;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.clone().unwrap());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Wh =>
{
match ch
{
'i' =>
{
last_index = index as i32;
token = Some(Token::Name("whi".to_string()));
token_str.push(ch);
state = TokenizerState::Whi;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Whi =>
{
match ch
{
'l' =>
{
last_index = index as i32;
token = Some(Token::Name("whil".to_string()));
token_str.push(ch);
state = TokenizerState::Whil;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Whil =>
{
match ch
{
'e' =>
{
last_index = index as i32;
token = Some(Token::Name("while".to_string()));
token_str.push(ch);
state = TokenizerState::While;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::While =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
token = None;
token_str.clear();
state = TokenizerState::Start;
tokens.push(Token::While);
}
}
TokenizerState::B =>
{
match ch
{
'r' =>
{
last_index = index as i32;
token = Some(Token::Name("br".to_string()));
token_str.push(ch);
state = TokenizerState::Br;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.clone().unwrap());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Br =>
{
match ch
{
'e' =>
{
last_index = index as i32;
token = Some(Token::Name("bre".to_string()));
token_str.push(ch);
state = TokenizerState::Bre;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Bre =>
{
match ch
{
'a' =>
{
last_index = index as i32;
token = Some(Token::Name("brea".to_string()));
token_str.push(ch);
state = TokenizerState::Brea;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Brea =>
{
match ch
{
'k' =>
{
last_index = index as i32;
token = Some(Token::Name("break".to_string()));
token_str.push(ch);
state = TokenizerState::Break;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Break =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
token = None;
token_str.clear();
state = TokenizerState::Start;
tokens.push(Token::Break);
}
}
TokenizerState::G =>
{
match ch
{
'o' =>
{
last_index = index as i32;
token = Some(Token::Name("go".to_string()));
token_str.push(ch);
state = TokenizerState::Go;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.clone().unwrap());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Go =>
{
match ch
{
't' =>
{
last_index = index as i32;
token = Some(Token::Name("got".to_string()));
token_str.push(ch);
state = TokenizerState::Got;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Got =>
{
match ch
{
'o' =>
{
last_index = index as i32;
token = Some(Token::Name("goto".to_string()));
token_str.push(ch);
state = TokenizerState::Goto;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Goto =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
token = None;
token_str.clear();
state = TokenizerState::Start;
tokens.push(Token::Goto);
}
}
TokenizerState::R =>
{
match ch
{
'e' =>
{
last_index = index as i32;
token = Some(Token::Name("re".to_string()));
token_str.push(ch);
state = TokenizerState::Re;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.clone().unwrap());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Re =>
{
match ch
{
't' =>
{
last_index = index as i32;
token = Some(Token::Name("ret".to_string()));
token_str.push(ch);
state = TokenizerState::Ret;
}
'p' =>
{
last_index = index as i32;
token = Some(Token::Name("rep".to_string()));
token_str.push(ch);
state = TokenizerState::Rep;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Ret =>
{
match ch
{
'u' =>
{
last_index = index as i32;
token = Some(Token::Name("retu".to_string()));
token_str.push(ch);
state = TokenizerState::Retu;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Retu =>
{
match ch
{
'r' =>
{
last_index = index as i32;
token = Some(Token::Name("retur".to_string()));
token_str.push(ch);
state = TokenizerState::Retur;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Retur =>
{
match ch
{
'n' =>
{
last_index = index as i32;
token = Some(Token::Name("return".to_string()));
token_str.push(ch);
state = TokenizerState::Return;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Return =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
token = None;
token_str.clear();
state = TokenizerState::Start;
tokens.push(Token::Return);
}
}
TokenizerState::N =>
{
match ch
{
'i' =>
{
last_index = index as i32;
token = Some(Token::Name("ni".to_string()));
token_str.push(ch);
state = TokenizerState::Ni;
}
'o' =>
{
last_index = index as i32;
token = Some(Token::Name("no".to_string()));
token_str.push(ch);
state = TokenizerState::No;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.clone().unwrap());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::No =>
{
match ch
{
't' =>
{
last_index = index as i32;
token = Some(Token::Name("not".to_string()));
token_str.push(ch);
state = TokenizerState::Not;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Not =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
token = None;
token_str.clear();
state = TokenizerState::Start;
tokens.push(Token::Not);
}
}
TokenizerState::Ni =>
{
match ch
{
'l' =>
{
last_index = index as i32;
token = Some(Token::Name("nil".to_string()));
token_str.push(ch);
state = TokenizerState::Nil;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Nil =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
token = None;
token_str.clear();
state = TokenizerState::Start;
tokens.push(Token::Nil);
}
}
TokenizerState::T =>
{
match ch
{
'h' =>
{
last_index = index as i32;
token = Some(Token::Name("th".to_string()));
token_str.push(ch);
state = TokenizerState::Th;
}
'r' =>
{
last_index = index as i32;
token = Some(Token::Name("tr".to_string()));
token_str.push(ch);
state = TokenizerState::Tr;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.clone().unwrap());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Th =>
{
match ch
{
'e' =>
{
last_index = index as i32;
token = Some(Token::Name("the".to_string()));
token_str.push(ch);
state = TokenizerState::The;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::The =>
{
match ch
{
'n' =>
{
last_index = index as i32;
token = Some(Token::Name("then".to_string()));
token_str.push(ch);
state = TokenizerState::Then;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Then =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
token = None;
token_str.clear();
state = TokenizerState::Start;
tokens.push(Token::Then);
}
}
TokenizerState::Tr =>
{
match ch
{
'u' =>
{
last_index = index as i32;
token = Some(Token::Name("tru".to_string()));
token_str.push(ch);
state = TokenizerState::Tru;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Tru =>
{
match ch
{
'e' =>
{
last_index = index as i32;
token = Some(Token::Name("true".to_string()));
token_str.push(ch);
state = TokenizerState::True;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::True =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
token = None;
token_str.clear();
state = TokenizerState::Start;
tokens.push(Token::True);
}
}
TokenizerState::E =>
{
match ch
{
'l' =>
{
last_index = index as i32;
token = Some(Token::Name("el".to_string()));
token_str.push(ch);
state = TokenizerState::El;
}
'n' =>
{
last_index = index as i32;
token = Some(Token::Name("en".to_string()));
token_str.push(ch);
state = TokenizerState::En;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.clone().unwrap());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::En =>
{
match ch
{
'd' =>
{
last_index = index as i32;
token = Some(Token::Name("end".to_string()));
token_str.push(ch);
state = TokenizerState::End;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::End =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
token = None;
token_str.clear();
state = TokenizerState::Start;
tokens.push(Token::End);
}
}
TokenizerState::El =>
{
match ch
{
's' =>
{
last_index = index as i32;
token = Some(Token::Name("els".to_string()));
token_str.push(ch);
state = TokenizerState::Els;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Els =>
{
match ch
{
'e' =>
{
last_index = index as i32;
token = Some(Token::Name("else".to_string()));
token_str.push(ch);
state = TokenizerState::Else;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Else =>
{
match ch
{
'i' =>
{
last_index = index as i32;
token = Some(Token::Name("elsei".to_string()));
token_str.push(ch);
state = TokenizerState::Elsei;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
token = None;
token_str.clear();
state = TokenizerState::Start;
tokens.push(Token::Else);
}
}
}
}
TokenizerState::Elsei =>
{
match ch
{
'f' =>
{
last_index = index as i32;
token = Some(Token::Name("elseif".to_string()));
token_str.push(ch);
state = TokenizerState::Elseif;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Elseif =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
token = None;
token_str.clear();
state = TokenizerState::Start;
tokens.push(Token::Elseif);
}
}
TokenizerState::O =>
{
match ch
{
'r' =>
{
last_index = index as i32;
token = Some(Token::Name("or".to_string()));
token_str.push(ch);
state = TokenizerState::Or;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.clone().unwrap());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Or =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
token = None;
token_str.clear();
state = TokenizerState::Start;
tokens.push(Token::Or);
}
}
TokenizerState::D =>
{
match ch
{
'o' =>
{
last_index = index as i32;
token = Some(Token::Name("do".to_string()));
token_str.push(ch);
state = TokenizerState::Do;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.clone().unwrap());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Do =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
token = None;
token_str.clear();
state = TokenizerState::Start;
tokens.push(Token::Do);
}
}
TokenizerState::I =>
{
match ch
{
'f' =>
{
last_index = index as i32;
token = Some(Token::Name("if".to_string()));
token_str.push(ch);
state = TokenizerState::If;
}
'n' =>
{
last_index = index as i32;
token = Some(Token::Name("in".to_string()));
token_str.push(ch);
state = TokenizerState::In;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.clone().unwrap());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::In =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
token = None;
token_str.clear();
state = TokenizerState::Start;
tokens.push(Token::In);
}
}
TokenizerState::If =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
token = None;
token_str.clear();
state = TokenizerState::Start;
tokens.push(Token::If);
}
}
TokenizerState::F =>
{
match ch
{
'a' =>
{
last_index = index as i32;
token = Some(Token::Name("fa".to_string()));
token_str.push(ch);
state = TokenizerState::Fa;
}
'o' =>
{
last_index = index as i32;
token = Some(Token::Name("fo".to_string()));
token_str.push(ch);
state = TokenizerState::Fo;
}
'u' =>
{
last_index = index as i32;
token = Some(Token::Name("fu".to_string()));
token_str.push(ch);
state = TokenizerState::Fu;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.clone().unwrap());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Fu =>
{
match ch
{
'n' =>
{
last_index = index as i32;
token = Some(Token::Name("fun".to_string()));
token_str.push(ch);
state = TokenizerState::Fun;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Fun =>
{
match ch
{
'c' =>
{
last_index = index as i32;
token = Some(Token::Name("func".to_string()));
token_str.push(ch);
state = TokenizerState::Func;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Func =>
{
match ch
{
't' =>
{
last_index = index as i32;
token = Some(Token::Name("funct".to_string()));
token_str.push(ch);
state = TokenizerState::Funct;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Funct =>
{
match ch
{
'i' =>
{
last_index = index as i32;
token = Some(Token::Name("functi".to_string()));
token_str.push(ch);
state = TokenizerState::Functi;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Functi =>
{
match ch
{
'o' =>
{
last_index = index as i32;
token = Some(Token::Name("functio".to_string()));
token_str.push(ch);
state = TokenizerState::Functio;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Functio =>
{
match ch
{
'n' =>
{
last_index = index as i32;
token = Some(Token::Name("function".to_string()));
token_str.push(ch);
state = TokenizerState::Function;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Function =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
token = None;
token_str.clear();
state = TokenizerState::Start;
tokens.push(Token::Function);
}
}
TokenizerState::Fa =>
{
match ch
{
'l' =>
{
last_index = index as i32;
token = Some(Token::Name("fal".to_string()));
token_str.push(ch);
state = TokenizerState::Fal;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Fal =>
{
match ch
{
's' =>
{
last_index = index as i32;
token = Some(Token::Name("fals".to_string()));
token_str.push(ch);
state = TokenizerState::Fals;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Fals =>
{
match ch
{
'e' =>
{
last_index = index as i32;
token = Some(Token::Name("false".to_string()));
token_str.push(ch);
state = TokenizerState::False;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::False =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
token = None;
token_str.clear();
state = TokenizerState::Start;
tokens.push(Token::False);
}
}
TokenizerState::L =>
{
match ch
{
'o' =>
{
last_index = index as i32;
token = Some(Token::Name("lo".to_string()));
token_str.push(ch);
state = TokenizerState::Lo;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.clone().unwrap());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Lo =>
{
match ch
{
'c' =>
{
last_index = index as i32;
token = Some(Token::Name("loc".to_string()));
token_str.push(ch);
state = TokenizerState::Loc;
}
_ => todo!("State {:?}, Char {}", state, ch)
}
}
TokenizerState::Loc =>
{
match ch
{
'a' =>
{
last_index = index as i32;
token = Some(Token::Name("loca".to_string()));
token_str.push(ch);
state = TokenizerState::Loca;
}
_ =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
tokens.push(token.unwrap().clone());
token = None;
token_str.clear();
state = TokenizerState::Start;
}
}
}
}
TokenizerState::Loca =>
{
match ch
{
'l' =>
{
last_index = index as i32;
token = Some(Token::Name("local".to_string()));
token_str.push(ch);
state = TokenizerState::Local;
}
_ => todo!("State {:?}, Char {}", state, ch)
}
}
TokenizerState::Local =>
{
if ch.is_ascii_alphanumeric() || ch == '_'
{
last_index = index as i32;
token_str.push(ch);
token = Some(Token::Name(token_str.clone()));
state = TokenizerState::Name;
}
else
{
if last_index == -1 || token.is_none()
{
println!("{}|{}|{:?} | {:?}", last_index, index, token, tokens);
return Err("Lexerr");
}
index = last_index as usize;
last_index = -1;
token = None;
token_str.clear();
state = TokenizerState::Start;
tokens.push(Token::Local);
}
}
_ => todo!("State {:?} | {:?}", state, tokens)
}
index += 1;
}
return Ok(tokens);
}