Implement LL-like parser

This commit is contained in:
0x4261756D 2023-07-20 15:20:28 +02:00
parent c3e2565ff9
commit 0ca8a731a9
2 changed files with 667 additions and 32 deletions

View File

@ -44,12 +44,42 @@ pub enum StatNode
#[derive(Debug)]
pub struct RetstatNode
{
values: ExplistNode
values: Option<ExplistNode>
}
#[derive(Debug)]
pub struct ExpNode
pub enum ExpNode
{
Nil,
False,
True,
Numeral(f64),
LiteralString(String),
Varargs,
Functiondef(FuncbodyNode),
Suffixexp(Box<SuffixexpNode>),
Tableconstructor(TableconstructorNode),
Unop(UnopType, Box<ExpNode>),
Binop { lhs: Box<ExpNode>, op: BinopType, rhs: Box<ExpNode> }
}
#[derive(Debug)]
pub enum UnopType
{
Minus, LogicalNot, Length, BinaryNot,
}
#[derive(Debug)]
pub enum BinopType
{
LogicalOr,
LocicalAnd,
Lt, Gt, LtEquals, GtEquals, NotEquals, Equals,
BinaryOr,
BinaryNot,
BinaryAnd,
Shl, Shr,
Concat,
Add, Sub,
Mul, Div, IntDiv, Mod,
Exp,
}
#[derive(Debug)]
pub struct ExplistNode
@ -57,6 +87,23 @@ pub struct ExplistNode
exps: Vec<ExpNode>
}
#[derive(Debug)]
pub struct TableconstructorNode
{
exps: Option<FieldlistNode>
}
#[derive(Debug)]
pub struct FieldlistNode
{
exps: Vec<FieldNode>
}
#[derive(Debug)]
pub enum FieldNode
{
IndexedAssignment { index: ExpNode, rhs: ExpNode },
Assignment { lhs: String, rhs: ExpNode },
Exp(ExpNode),
}
#[derive(Debug)]
pub struct VarlistNode
{
vars: Vec<VarNode>
@ -69,9 +116,11 @@ pub struct FunctioncallNode
args: ArgsNode,
}
#[derive(Debug)]
pub struct ArgsNode
pub enum ArgsNode
{
Bracketed(Option<ExplistNode>),
Tableconstructor(TableconstructorNode),
Literal(String),
}
#[derive(Debug)]
pub struct ElseifNode
@ -82,28 +131,59 @@ pub struct ElseifNode
#[derive(Debug)]
pub struct FuncnameNode
{
}#[derive(Debug)]
name: String,
dotted_names: Vec<String>,
first_arg: Option<String>,
}
#[derive(Debug)]
pub struct ParlistNode
{
names: Vec<String>,
has_varargs: bool,
}
#[derive(Debug)]
pub struct FuncbodyNode
{
pars: Option<ParlistNode>,
body: BlockNode,
}
#[derive(Debug)]
pub struct AttnamelistNode
{
attnames: Vec<AttnameNode>
}
#[derive(Debug)]
pub struct AttnameNode
{
name: String,
attribute: Option<String>,
}
#[derive(Debug)]
pub enum VarNode
{
Name(String),
Indexed { value: PrefixexpNode, index: ExpNode },
Member { value: PrefixexpNode, name: String }
Indexed { value: SuffixexpNode, index: ExpNode },
Member { value: SuffixexpNode, name: String }
}
#[derive(Debug)]
pub enum PrefixexpNode
pub struct SuffixexpNode
{
first_part: SuffixexpFirstPart,
suffixes: Vec<SuffixexpSuffix>,
}
#[derive(Debug)]
pub enum SuffixexpFirstPart // a:b:test() => a:b.test(b) => a.b.test(a, b)
{
Name(String),
BracketedExpr(ExpNode),
}
#[derive(Debug)]
pub enum SuffixexpSuffix
{
Dot(String),
Indexed(ExpNode),
Args(ArgsNode),
ArgsFirstArg(String, ArgsNode),
}
fn parse_chunk(tokens: &Vec<Token>, i: &mut usize) -> Result<ChunkNode, &'static str>
{
@ -112,7 +192,8 @@ fn parse_chunk(tokens: &Vec<Token>, i: &mut usize) -> Result<ChunkNode, &'static
fn parse_block(tokens: &Vec<Token>, i: &mut usize) -> Result<BlockNode, &'static str>
{
let mut stats: Vec<StatNode> = Vec::new();
while *i < tokens.len() && tokens[*i] != Token::Return
while *i < tokens.len() && tokens[*i] != Token::Return && tokens[*i] != Token::End && tokens[*i] != Token::Elseif &&
tokens[*i] != Token::Else
{
stats.push(parse_stat(tokens, i)?);
}
@ -416,11 +497,11 @@ fn parse_stat(tokens: &Vec<Token>, i: &mut usize) -> Result<StatNode, &'static s
Token::Equals =>
{
*i += 1;
return Ok(StatNode::Assignment { lhs: VarlistNode { vars: Vec::from([suffix_expression_to_var(suffix_expression)]) }, rhs: parse_explist(tokens, i)? });
return Ok(StatNode::Assignment { lhs: VarlistNode { vars: Vec::from([suffix_expression_to_var(suffix_expression)?]) }, rhs: parse_explist(tokens, i)? });
}
Token::Comma =>
{
let mut vars = Vec::from([suffix_expression_to_var(suffix_expression)]);
let mut vars = Vec::from([suffix_expression_to_var(suffix_expression)?]);
while tokens[*i] == Token::Comma
{
*i += 1;
@ -435,16 +516,72 @@ fn parse_stat(tokens: &Vec<Token>, i: &mut usize) -> Result<StatNode, &'static s
}
_ =>
{
if let SuffixexpNode()
if suffix_expression.suffixes.is_empty()
{
println!("{:?} {} {:?}", tokens[*i], i, suffix_expression);
return Err("Expected function call but suffix is empty");
}
if let Some(SuffixexpSuffix::Args(_)) = suffix_expression.suffixes.last()
{
return Ok(StatNode::Functioncall(suffix_expression_to_functioncall(suffix_expression)?));
}
if let Some(SuffixexpSuffix::ArgsFirstArg(_, _)) = suffix_expression.suffixes.last()
{
return Ok(StatNode::Functioncall(suffix_expression_to_functioncall(suffix_expression)?));
}
else
{
println!("{:?} {} {:?}", tokens[*i], i, suffix_expression.suffixes.last());
return Err("Expected function call");
}
}
}
}
_ => Err("Unexpected token while parsing stat")
_ =>
{
println!("{:?} {:?} {:?}", tokens[*i - 2], tokens[*i - 1], tokens[*i]);
Err("Unexpected token while parsing stat")
}
}
}
fn suffix_expression_to_var(suffixexp: SuffixexpNode) -> VarNode
fn suffix_expression_to_functioncall(suffixexp: SuffixexpNode) -> Result<FunctioncallNode, &'static str>
{
todo!()
let mut new_suffixexp = suffixexp;
let last = new_suffixexp.suffixes.pop();
if let Some(SuffixexpSuffix::Args(args)) = last
{
return Ok(FunctioncallNode { function: new_suffixexp, object_arg: None, args });
}
if let Some(SuffixexpSuffix::ArgsFirstArg(first_arg, args)) = last
{
return Ok(FunctioncallNode { function: new_suffixexp, object_arg: Some(first_arg.clone()), args });
}
return Err("Cannot convert suffixexp to functioncall");
}
fn suffix_expression_to_var(suffixexp: SuffixexpNode) -> Result<VarNode, &'static str>
{
if suffixexp.suffixes.is_empty()
{
return if let SuffixexpFirstPart::Name(name) = suffixexp.first_part
{
Ok(VarNode::Name(name.clone()))
}
else
{
Err("Can only convert suffix exp without suffix to var if its first part is a name")
};
}
let mut new_suffixexp = suffixexp;
let last = new_suffixexp.suffixes.pop();
if let Some(SuffixexpSuffix::Dot(name)) = last
{
return Ok(VarNode::Member { value: new_suffixexp, name: name.clone() });
}
if let Some(SuffixexpSuffix::Indexed(index)) = last
{
return Ok(VarNode::Indexed { value: new_suffixexp, index: index });
}
return Err("Cannot convert suffixexp to var");
}
fn parse_var(tokens: &Vec<Token>, i: &mut usize) -> Result<VarNode, &'static str>
{
@ -452,36 +589,509 @@ fn parse_var(tokens: &Vec<Token>, i: &mut usize) -> Result<VarNode, &'static str
}
fn parse_args(tokens: &Vec<Token>, i: &mut usize) -> Result<ArgsNode, &'static str>
{
todo!()
if *i > tokens.len()
{
return Err("Reached end of tokens while parsing args");
}
match &tokens[*i]
{
Token::RoundOpen =>
{
*i += 1;
if *i >= tokens.len()
{
return Err("Reached end of tokens while paring bracketed args");
}
if tokens[*i] == Token::RoundClosed
{
*i += 1;
return Ok(ArgsNode::Bracketed(None));
}
let exps = parse_explist(tokens, i)?;
if *i >= tokens.len() || tokens[*i] != Token::RoundClosed
{
println!("|{:?}|{}|{:?}|", tokens[*i], i, exps);
return Err("Expected ')' to close bracketed args");
}
*i += 1;
return Ok(ArgsNode::Bracketed(Some(exps)));
}
Token::CurlyOpen =>
{
return Ok(ArgsNode::Tableconstructor(parse_tableconstructor(tokens, i)?));
}
Token::StringLiteral(name) =>
{
*i += 1;
return Ok(ArgsNode::Literal(name.clone()));
}
_ => return Err("Unexpected token while parsing args")
}
}
fn parse_suffixexp(tokens: &Vec<Token>, i: &mut usize) -> Result<SuffixexpNode, &'static str>
{
// primaryexp { '.' 'Name' | '[' exp']' | ':' 'Name' args | args }
todo!()
if *i >= tokens.len()
{
return Err("Reached end of tokens while parsing suffixexp");
}
let first_part = match &tokens[*i]
{
Token::Name(name) =>
{
*i += 1;
SuffixexpFirstPart::Name(name.clone())
},
Token::RoundOpen =>
{
*i += 1;
let ret = SuffixexpFirstPart::BracketedExpr(parse_exp(tokens, i)?);
if *i >= tokens.len() || tokens[*i] != Token::RoundClosed
{
return Err("Expected ')' to close bracketed primary expression");
}
*i += 1;
ret
}
_ => return Err("Unexpected token as first part of suffixexp")
};
let mut suffixes = Vec::new();
while *i < tokens.len()
{
match tokens[*i]
{
Token::Dot =>
{
*i += 1;
if *i >= tokens.len()
{
return Err("Reached end of tokens but expected name for dotted suffix expression");
}
if let Token::Name(name) = &tokens[*i]
{
*i += 1;
suffixes.push(SuffixexpSuffix::Dot(name.clone()));
}
else
{
return Err("Expected name for dotted suffix expression");
}
}
Token::SquareOpen =>
{
*i += 1;
suffixes.push(SuffixexpSuffix::Indexed(parse_exp(tokens, i)?));
if *i >= tokens.len() || tokens[*i] != Token::SquareClosed
{
return Err("Expected ']' to close indexed suffix expression");
}
*i += 1;
}
Token::Colon =>
{
*i += 1;
if *i >= tokens.len()
{
return Err("Reached end of tokens but expected name for dotted suffix expression");
}
if let Token::Name(name) = &tokens[*i]
{
*i += 1;
suffixes.push(SuffixexpSuffix::ArgsFirstArg(name.clone(), parse_args(tokens, i)?));
}
else
{
return Err("Expected name for dotted suffix expression");
}
}
Token::RoundOpen | Token::CurlyOpen | Token::StringLiteral(_) =>
{
suffixes.push(SuffixexpSuffix::Args(parse_args(tokens, i)?));
}
_ => break,
}
}
return Ok(SuffixexpNode { first_part, suffixes });
}
fn parse_retstat(tokens: &Vec<Token>, i: &mut usize) -> Result<RetstatNode, &'static str>
{
todo!("{:?}", tokens[*i])
if *i >= tokens.len() || tokens[*i] != Token::Return
{
return Err("Expected 'return' to start retstat");
}
*i += 1;
if *i >= tokens.len() || tokens[*i] == Token::Semicolon || tokens[*i] == Token::Else || tokens[*i] == Token::Elseif ||
tokens[*i] == Token::End
{
if *i < tokens.len() && tokens[*i] == Token::Semicolon
{
*i += 1;
}
return Ok(RetstatNode { values: None });
}
let values = parse_explist(tokens, i)?;
if *i < tokens.len() && tokens[*i] == Token::Semicolon
{
*i += 1;
}
return Ok(RetstatNode { values: Some(values) });
}
fn parse_exp(tokens: &Vec<Token>, i: &mut usize) -> Result<ExpNode, &'static str>
{
todo!("{:?}", tokens[*i])
let lhs = parse_exp_primary(tokens, i)?;
return parse_exp_precedence(tokens, i, lhs, 0);
}
fn get_precedence(token: &Token) -> Result<u8, &'static str>
{
match token
{
Token::Or => Ok(2),
Token::And => Ok(4),
Token::Lt | Token::Gt | Token::LtEquals | Token::GtEquals | Token::TildeEquals | Token::EqualsEquals => Ok(6),
Token::Pipe => Ok(8),
Token::Tilde => Ok(10),
Token::Ampersand => Ok(12),
Token::LtLt | Token::GtGt => Ok(14),
Token::DotDot => Ok(16),
Token::Plus | Token::Minus => Ok(18),
Token::Star | Token::Slash | Token::SlashSlash | Token::Percent => Ok(20),
Token::Caret => Ok(22),
_ => Err("Tried to get precedence for unknown operator"),
}
}
fn get_binop(token: &Token) -> Result<BinopType, &'static str>
{
match token
{
Token::Or => Ok(BinopType::LogicalOr),
Token::And => Ok(BinopType::LocicalAnd),
Token::Lt => Ok(BinopType::Lt),
Token::Gt => Ok(BinopType::Lt),
Token::LtEquals => Ok(BinopType::LtEquals),
Token::GtEquals => Ok(BinopType::GtEquals),
Token::TildeEquals => Ok(BinopType::NotEquals),
Token::EqualsEquals => Ok(BinopType::Equals),
Token::Pipe => Ok(BinopType::BinaryOr),
Token::Tilde => Ok(BinopType::BinaryNot),
Token::Ampersand => Ok(BinopType::BinaryAnd),
Token::DotDot => Ok(BinopType::Concat),
Token::Plus => Ok(BinopType::Add),
Token::Minus => Ok(BinopType::Sub),
Token::Star => Ok(BinopType::Mul),
Token::Slash => Ok(BinopType::Div),
Token::SlashSlash => Ok(BinopType::IntDiv),
Token::Percent => Ok(BinopType::Mod),
Token::Caret => Ok(BinopType::Exp),
_ =>
{
println!("{:?}", token);
Err("Tried to get binop type for unknown operator")
}
}
}
fn is_binop(token: &Token) -> bool
{
match token
{
Token::Or | Token::And | Token::Lt | Token::Gt | Token::LtEquals | Token::GtEquals | Token::TildeEquals | Token::EqualsEquals |
Token::Pipe | Token::Tilde | Token::Ampersand | Token::LtLt | Token::GtGt | Token::DotDot | Token::Plus | Token::Minus |
Token::Star | Token::Slash | Token::SlashSlash | Token::Percent | Token::Caret =>
{
true
}
_ => false
}
}
fn is_right_associative(token: &Token) -> bool
{
return token == &Token::DotDot || token == &Token::Caret;
}
fn parse_exp_precedence(tokens: &Vec<Token>, i: &mut usize, lhs: ExpNode, min_precedence: u8) -> Result<ExpNode, &'static str>
{
let mut lhs = lhs;
while *i < tokens.len() && is_binop(&tokens[*i])
{
let precedence = get_precedence(&tokens[*i])?;
if precedence < min_precedence
{
break;
}
let op = get_binop(&tokens[*i])?;
*i += 1;
let mut rhs = parse_exp_primary(tokens, i)?;
while *i < tokens.len() && is_binop(&tokens[*i]) && (get_precedence(&tokens[*i])? > precedence ||
(get_precedence(&tokens[*i])? == precedence && is_right_associative(&tokens[*i])))
{
rhs = parse_exp_precedence(tokens, i, rhs, precedence + if precedence == get_precedence(&tokens[*i])? {0} else {1})?;
}
lhs = ExpNode::Binop { lhs: Box::new(lhs), op, rhs: Box::new(rhs) };
}
return Ok(lhs);
}
fn parse_exp_primary(tokens: &Vec<Token>, i: &mut usize) -> Result<ExpNode, &'static str>
{
if *i >= tokens.len()
{
return Err("Reached end of tokens but expected primary expression");
}
match &tokens[*i]
{
Token::Nil =>
{
*i += 1;
Ok(ExpNode::Nil)
},
Token::True =>
{
*i += 1;
Ok(ExpNode::True)
},
Token::False =>
{
*i += 1;
Ok(ExpNode::False)
},
Token::Numeral(number_str) =>
{
*i += 1;
Ok(ExpNode::Numeral(number_str.parse::<f64>().map_err(|_| "Could not parse number")?))
},
Token::StringLiteral(string) =>
{
*i += 1;
Ok(ExpNode::LiteralString(string.clone()))
},
Token::DotDotDot =>
{
*i += 1;
Ok(ExpNode::Varargs)
},
Token::Function =>
{
*i += 1;
Ok(ExpNode::Functiondef(parse_funcbody(tokens, i)?))
}
Token::CurlyOpen => Ok(ExpNode::Tableconstructor(parse_tableconstructor(tokens, i)?)),
Token::Minus =>
{
Ok(ExpNode::Unop(UnopType::Minus, Box::new(parse_exp(tokens, i)?)))
}
Token::Hash =>
{
Ok(ExpNode::Unop(UnopType::Length, Box::new(parse_exp(tokens, i)?)))
}
Token::Not =>
{
Ok(ExpNode::Unop(UnopType::LogicalNot, Box::new(parse_exp(tokens, i)?)))
}
Token::Tilde =>
{
Ok(ExpNode::Unop(UnopType::BinaryNot, Box::new(parse_exp(tokens, i)?)))
}
_ => Ok(ExpNode::Suffixexp(Box::new(parse_suffixexp(tokens, i)?))),
}
}
fn parse_tableconstructor(tokens: &Vec<Token>, i: &mut usize) -> Result<TableconstructorNode, &'static str>
{
todo!()
}
fn parse_explist(tokens: &Vec<Token>, i: &mut usize) -> Result<ExplistNode, &'static str>
{
todo!("{:?}", tokens[*i])
let mut exps: Vec<ExpNode> = Vec::from([parse_exp(tokens, i)?]);
while *i < tokens.len() && tokens[*i] == Token::Comma
{
*i += 1;
exps.push(parse_exp(tokens, i)?);
}
return Ok(ExplistNode { exps });
}
fn parse_funcname(tokens: &Vec<Token>, i: &mut usize) -> Result<FuncnameNode, &'static str>
{
todo!("{:?}", tokens[*i])
if *i >= tokens.len()
{
return Err("Reached end of tokens while parsing funcname");
}
if let Token::Name(name) = &tokens[*i]
{
*i += 1;
let mut dotted_names = Vec::new();
while *i < tokens.len() && tokens[*i] == Token::Dot
{
*i += 1;
if *i >= tokens.len()
{
return Err("Reached end of tokens while parsing dotted part of funcname");
}
if let Token::Name(dotted_name) = &tokens[*i]
{
*i += 1;
dotted_names.push(dotted_name.clone());
}
else
{
return Err("Expected name in dotted funcname");
}
}
let first_arg = if *i < tokens.len() && tokens[*i] == Token::Colon
{
*i += 1;
if *i >= tokens.len()
{
return Err("Reached end of tokens while parsing funcname first arg");
}
if let Token::Name(arg_name) = &tokens[*i]
{
*i += 1;
Some(arg_name.clone())
}
else
{
return Err("Expected name of first arg in funcname");
}
}
else
{
None
};
return Ok(FuncnameNode { name: name.clone(), dotted_names, first_arg });
}
else
{
return Err("Expected func name");
}
}
fn parse_funcbody(tokens: &Vec<Token>, i: &mut usize) -> Result<FuncbodyNode, &'static str>
{
todo!("{:?}", tokens[*i])
if *i >= tokens.len() || tokens[*i] != Token::RoundOpen
{
return Err("Expected '(' to start funcbody");
}
*i += 1;
if *i >= tokens.len()
{
return Err("Reached end of tokens while parsing funcbody parlist");
}
let pars = if tokens[*i] == Token::RoundClosed
{
*i += 1;
None
}
else
{
let ret = Some(parse_parlist(tokens, i)?);
if *i >= tokens.len() || tokens[*i] != Token::RoundClosed
{
return Err("Expected ')' to close funcbody parlist");
}
*i += 1;
ret
};
let block = parse_block(tokens, i)?;
if *i >= tokens.len() || tokens[*i] != Token::End
{
println!("{:?}", &tokens[(*i - 10)..(*i + 10)]);
return Err("Expected 'end' to close funcbody");
}
*i += 1;
return Ok(FuncbodyNode { pars, body: block });
}
fn parse_parlist(tokens: &Vec<Token>, i: &mut usize) -> Result<ParlistNode, &'static str>
{
if *i >= tokens.len()
{
return Err("Reached end of tokens while parsing parlist");
}
if tokens[*i] == Token::DotDotDot
{
*i += 1;
return Ok(ParlistNode { names: Vec::new(), has_varargs: true });
}
let first_name = if let Token::Name(name) = &tokens[*i]
{
*i += 1;
name.clone()
}
else
{
return Err("Expected name to start parlist");
};
let mut names = Vec::from([first_name]);
let mut has_varargs = false;
while *i < tokens.len() && tokens[*i] == Token::Comma
{
*i += 1;
if *i >= tokens.len()
{
return Err("Reached end of tokens while parsing parlist name list");
}
match &tokens[*i]
{
Token::Name(name) =>
{
*i += 1;
names.push(name.clone());
}
Token::DotDotDot =>
{
*i += 1;
has_varargs = true;
break;
}
_ => return Err("Unexpected token while parsing parlist name list"),
}
}
return Ok(ParlistNode { names, has_varargs });
}
fn parse_attnamelist(tokens: &Vec<Token>, i: &mut usize) -> Result<AttnamelistNode, &'static str>
{
todo!("{:?}", tokens[*i])
let mut attnames: Vec<AttnameNode> = Vec::from([parse_attname(tokens, i)?]);
while *i < tokens.len() && tokens[*i] == Token::Comma
{
*i += 1;
attnames.push(parse_attname(tokens, i)?);
}
return Ok(AttnamelistNode { attnames });
}
fn parse_attname(tokens: &Vec<Token>, i: &mut usize) -> Result<AttnameNode, &'static str>
{
if *i >= tokens.len()
{
return Err("Reached end of tokens but expected name for attrib name");
}
if let Token::Name(name) = &tokens[*i]
{
*i += 1;
let attribute = if *i < tokens.len() && tokens[*i] == Token::Lt
{
*i += 1;
if *i >= tokens.len()
{
return Err("Reached end of tokens but expected attribute");
}
if let Token::Name(attrib) = &tokens[*i]
{
*i += 1;
if *i >= tokens.len() || tokens[*i] != Token::Gt
{
return Err("Exptected '>' to close attribute name");
}
Some(attrib.clone())
}
else
{
return Err("Expected attribute in attrib name");
}
}
else
{
None
};
return Ok(AttnameNode { name: name.clone(), attribute });
}
else
{
return Err("Expected name for attrib name");
}
}
//===============================================================================================================================================
//===============================================================================================================================================

View File

@ -1,4 +1,4 @@
#[derive(Debug, Clone, PartialEq)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Token
{
Name(String),
@ -186,11 +186,11 @@ fn tokenize_char(state: &mut TokenizerState, ch: char, last_index: &mut i32, ind
if ch.is_whitespace() { }
else if ch.is_ascii_alphabetic() || ch == '_'
{
tokenize_terminal(last_index, *index, token, state, Some(Token::Name(token_str.clone())), TokenizerState::Name, token_str, ch);
tokenize_terminal(last_index, *index, token, state, Some(Token::Name(ch.to_string())), TokenizerState::Name, token_str, ch);
}
else if ch.is_numeric() && ch.is_ascii()
{
tokenize_terminal(last_index, *index, token, state, Some(Token::Numeral(token_str.clone())), TokenizerState::Number, token_str, ch);
tokenize_terminal(last_index, *index, token, state, Some(Token::Numeral(ch.to_string())), TokenizerState::Number, token_str, ch);
}
else
{
@ -1178,7 +1178,32 @@ pub fn tokenize(file_content: &String) -> Result<Vec<Token>, &'static str>
TokenizerState::End => tokenize_backtrack_custom_token(&mut last_index, &mut index, &mut tokens, &mut token, &mut token_str, &mut state, Token::End)?,
TokenizerState::And => tokenize_backtrack_custom_token(&mut last_index, &mut index, &mut tokens, &mut token, &mut token_str, &mut state, Token::And)?,
TokenizerState::Semicolon => tokenize_backtrack_custom_token(&mut last_index, &mut index, &mut tokens, &mut token, &mut token_str, &mut state, Token::Semicolon)?,
_ => todo!("state: {:?}", state),
TokenizerState::Number =>
{
if let Some(numeral_token) = token
{
if let Token::Numeral(_) = numeral_token
{
tokens.push(numeral_token);
}
else
{
return Err("In number state but current token is not a numeral")
}
}
else
{
return Err("In number state but no current token")
}
}
TokenizerState::Start =>
{
if token.is_some()
{
return Err("Finished tokenizing in the start state but the token was non-empty");
}
}
_ => todo!("state: {:?} {:?}", state, token),
}
return Ok(tokens);