Compare commits

...

3 Commits

Author SHA1 Message Date
0x4261756D
c3e2565ff9 Start work on a LL inspired parser since CYK is too slow 2023-07-19 12:57:37 +02:00
0x4261756D
e53734dcca Add terribly slow cyk parser 2023-07-12 23:25:20 +02:00
0x4261756D
5a2171b023 Add grammar 2023-07-11 17:38:26 +02:00
4 changed files with 1080 additions and 9 deletions

480
src/grammar.rs Normal file
View File

@ -0,0 +1,480 @@
use crate::tokenizer::Token;
pub enum Rule
{
Terminal(u8, Token),
NonTerminal(u8, u8, u8)
}
pub const NONTERMINAL_NAMES: [&str; 115] =
[
"stat__15",
"funcbody__50",
"fieldlist",
"fieldlist__1",
"namelist",
"parlist",
">_non",
"chunk",
",_non",
"field__29",
"stat__11",
"stat__45",
"stat__33",
"funcname",
"prefixexp__25",
";_?",
"stat__5",
"stat__38",
"else_non",
"stat__16",
"<_non",
"explist",
"stat_*",
"{_non",
"funcnamedotexpansion_*",
"stat__6",
"funcnamedotexpansion",
"moreattribs_*",
"field__28",
"retstat_?",
"stat__9",
"stat__37",
"field",
"local_non",
")_non",
"morevars_*",
"moreattribs__19",
"morevars",
"forthirdarg_?",
"::_non",
"stat",
"morefields_*",
"]_non",
"[_non",
"assign",
"field__31",
"stat__4",
"do_non",
"unop",
"elseifblocks__18",
"elseblock_?",
"label__21",
"funcbody__51",
"in_non",
"stat__34",
"exp",
"funcnamecolonexpansion_?",
"(_non",
"if_non",
"stat__7",
"attrib__20",
"elseifblocks__17",
"fieldsep_?",
"while_non",
"elseifblocks_*",
"binop",
"..._non",
"then_non",
"return_non",
"._non",
"stat__35",
"prefixexp",
"attnamelist",
"functioncall__26",
"elseifblocks",
"function_non",
"}_non",
"var__22",
"tableconstructor__53",
"varlist",
"S_0",
"Name_non",
"goto_non",
"parlistvarargs_?",
"elseif_non",
"for_non",
"stat__8",
"args",
"stat__41",
"stat__14",
"until_non",
"morenames_*",
"stat__39",
"stat__32",
"end_non",
"attnamelist__46",
"moreexps_*",
"stat__10",
"=_non",
":_non",
"stat__36",
"functioncall__27",
"funcname__48",
"attrib",
"repeat_non",
"args__49",
"var",
"var__23",
"stat__40",
"exp__0",
"stat__42",
"morefields",
"moreattribs",
"funcbody",
"retstat__47",
];
pub const TERMINAL_RULES: [(u8, Token); 125] =
[
(57, Token::RoundOpen),
(34, Token::RoundClosed),
(8, Token::Comma),
(66, Token::DotDotDot),
(69, Token::Dot),
(39, Token::ColonColon),
(99, Token::Colon),
(15, Token::Semicolon),
(20, Token::Lt),
(98, Token::Equals),
(6, Token::Gt),
(81, Token::Name(String::new())),
(80, Token::Return),
(80, Token::Semicolon),
(80, Token::Break),
(43, Token::SquareOpen),
(42, Token::SquareClosed),
(87, Token::StringLiteral(String::new())),
(105, Token::RoundClosed),
(72, Token::Name(String::new())),
(65, Token::Plus),
(65, Token::Minus),
(65, Token::Star),
(65, Token::Slash),
(65, Token::SlashSlash),
(65, Token::Caret),
(65, Token::Percent),
(65, Token::Ampersand),
(65, Token::Pipe),
(65, Token::GtGt),
(65, Token::LtLt),
(65, Token::DotDot),
(65, Token::Lt),
(65, Token::LtEquals),
(65, Token::Gt),
(65, Token::GtEquals),
(65, Token::EqualsEquals),
(65, Token::TildeEquals),
(65, Token::And),
(65, Token::Or),
(7, Token::Return),
(7, Token::Semicolon),
(7, Token::Break),
(47, Token::Do),
(18, Token::Else),
(50, Token::Else),
(84, Token::Elseif),
(49, Token::Then),
(94, Token::End),
(55, Token::Nil),
(55, Token::False),
(55, Token::True),
(55, Token::Numeral(String::new())),
(55, Token::StringLiteral(String::new())),
(55, Token::DotDotDot),
(55, Token::Name(String::new())),
(21, Token::Nil),
(21, Token::False),
(21, Token::True),
(21, Token::Numeral(String::new())),
(21, Token::StringLiteral(String::new())),
(21, Token::DotDotDot),
(21, Token::Name(String::new())),
(32, Token::Nil),
(32, Token::False),
(32, Token::True),
(32, Token::Numeral(String::new())),
(32, Token::StringLiteral(String::new())),
(32, Token::DotDotDot),
(32, Token::Name(String::new())),
(2, Token::Nil),
(2, Token::False),
(2, Token::True),
(2, Token::Numeral(String::new())),
(2, Token::StringLiteral(String::new())),
(2, Token::DotDotDot),
(2, Token::Name(String::new())),
(3, Token::Comma),
(3, Token::Semicolon),
(62, Token::Comma),
(62, Token::Semicolon),
(85, Token::For),
(13, Token::Name(String::new())),
(75, Token::Function),
(82, Token::Goto),
(58, Token::If),
(53, Token::In),
(33, Token::Local),
(36, Token::Name(String::new())),
(4, Token::Name(String::new())),
(5, Token::DotDotDot),
(5, Token::Name(String::new())),
(71, Token::Name(String::new())),
(104, Token::Repeat),
(29, Token::Return),
(114, Token::Semicolon),
(114, Token::Nil),
(114, Token::False),
(114, Token::True),
(114, Token::Numeral(String::new())),
(114, Token::StringLiteral(String::new())),
(114, Token::DotDotDot),
(114, Token::Name(String::new())),
(68, Token::Return),
(40, Token::Semicolon),
(40, Token::Break),
(22, Token::Semicolon),
(22, Token::Break),
(54, Token::End),
(70, Token::End),
(100, Token::End),
(11, Token::Name(String::new())),
(25, Token::End),
(78, Token::CurlyClosed),
(67, Token::Then),
(48, Token::Minus),
(48, Token::Not),
(48, Token::Hash),
(48, Token::Tilde),
(90, Token::Until),
(106, Token::Name(String::new())),
(79, Token::Name(String::new())),
(63, Token::While),
(23, Token::CurlyOpen),
(76, Token::CurlyClosed),
];
pub const NONTERMINAL_RULES: [(u8, u8, u8); 219] =
[
(80, 22, 29),
(80, 68, 114),
(80, 40, 22),
(80, 82, 81),
(80, 79, 44),
(80, 47, 25),
(80, 63, 46),
(80, 104, 59),
(80, 85, 30),
(80, 75, 89),
(80, 33, 0),
(80, 58, 93),
(80, 85, 31),
(80, 33, 11),
(80, 71, 87),
(80, 71, 73),
(80, 39, 51),
(87, 57, 105),
(87, 23, 78),
(105, 21, 34),
(44, 98, 21),
(72, 81, 95),
(95, 103, 27),
(95, 20, 60),
(95, 112, 27),
(95, 8, 36),
(103, 20, 60),
(60, 81, 6),
(7, 22, 29),
(7, 68, 114),
(7, 40, 22),
(7, 82, 81),
(7, 79, 44),
(7, 47, 25),
(7, 63, 46),
(7, 104, 59),
(7, 85, 30),
(7, 75, 89),
(7, 33, 0),
(7, 58, 93),
(7, 85, 31),
(7, 33, 11),
(7, 71, 87),
(7, 71, 73),
(7, 39, 51),
(50, 18, 7),
(74, 84, 61),
(64, 74, 64),
(64, 84, 61),
(61, 55, 49),
(49, 67, 7),
(55, 48, 55),
(55, 55, 109),
(55, 75, 113),
(55, 23, 78),
(55, 57, 14),
(55, 71, 77),
(55, 71, 26),
(55, 71, 87),
(55, 71, 73),
(109, 65, 55),
(21, 55, 96),
(21, 48, 55),
(21, 55, 109),
(21, 75, 113),
(21, 23, 78),
(21, 57, 14),
(21, 71, 77),
(21, 71, 26),
(21, 71, 87),
(21, 71, 73),
(32, 43, 28),
(32, 81, 45),
(32, 48, 55),
(32, 55, 109),
(32, 75, 113),
(32, 23, 78),
(32, 57, 14),
(32, 71, 77),
(32, 71, 26),
(32, 71, 87),
(32, 71, 73),
(28, 55, 9),
(9, 42, 45),
(45, 98, 55),
(2, 32, 3),
(2, 43, 28),
(2, 81, 45),
(2, 48, 55),
(2, 55, 109),
(2, 75, 113),
(2, 23, 78),
(2, 57, 14),
(2, 71, 77),
(2, 71, 26),
(2, 71, 87),
(2, 71, 73),
(3, 41, 62),
(3, 111, 41),
(3, 62, 32),
(38, 8, 55),
(113, 57, 1),
(1, 5, 52),
(1, 34, 25),
(52, 34, 25),
(13, 81, 102),
(102, 24, 56),
(102, 26, 24),
(102, 69, 81),
(102, 99, 81),
(56, 99, 81),
(26, 69, 81),
(24, 26, 24),
(24, 69, 81),
(73, 99, 101),
(101, 81, 87),
(51, 81, 39),
(112, 8, 36),
(27, 112, 27),
(27, 8, 36),
(36, 81, 103),
(96, 38, 96),
(96, 8, 55),
(111, 62, 32),
(41, 111, 41),
(41, 62, 32),
(91, 37, 91),
(91, 8, 81),
(37, 8, 81),
(35, 37, 35),
(35, 8, 81),
(4, 81, 91),
(5, 4, 83),
(5, 81, 91),
(83, 8, 66),
(71, 57, 14),
(71, 71, 77),
(71, 71, 26),
(71, 71, 87),
(71, 71, 73),
(14, 55, 34),
(29, 68, 114),
(114, 21, 15),
(114, 55, 96),
(114, 48, 55),
(114, 55, 109),
(114, 75, 113),
(114, 23, 78),
(114, 57, 14),
(114, 71, 77),
(114, 71, 26),
(114, 71, 87),
(114, 71, 73),
(40, 82, 81),
(40, 79, 44),
(40, 47, 25),
(40, 63, 46),
(40, 104, 59),
(40, 85, 30),
(40, 75, 89),
(40, 33, 0),
(40, 58, 93),
(40, 85, 31),
(40, 33, 11),
(40, 71, 87),
(40, 71, 73),
(40, 39, 51),
(22, 40, 22),
(22, 82, 81),
(22, 79, 44),
(22, 47, 25),
(22, 63, 46),
(22, 104, 59),
(22, 85, 30),
(22, 75, 89),
(22, 33, 0),
(22, 58, 93),
(22, 85, 31),
(22, 33, 11),
(22, 71, 87),
(22, 71, 73),
(22, 39, 51),
(97, 53, 10),
(10, 21, 16),
(89, 13, 113),
(0, 75, 19),
(19, 81, 113),
(93, 55, 12),
(12, 67, 54),
(54, 7, 70),
(54, 64, 100),
(54, 50, 94),
(70, 64, 100),
(70, 50, 94),
(100, 50, 94),
(31, 81, 17),
(17, 98, 92),
(92, 55, 108),
(46, 55, 16),
(108, 8, 88),
(88, 55, 110),
(110, 38, 16),
(110, 47, 25),
(11, 72, 44),
(11, 81, 95),
(16, 47, 25),
(25, 7, 94),
(59, 7, 86),
(59, 90, 55),
(86, 90, 55),
(30, 4, 97),
(78, 2, 76),
(106, 71, 77),
(106, 71, 26),
(77, 43, 107),
(107, 55, 42),
(79, 106, 35),
(79, 71, 77),
(79, 71, 26),
];

View File

@ -1,5 +1,6 @@
pub mod tokenizer; pub mod tokenizer;
pub mod parser; pub mod parser;
pub mod grammar;
use std::{env, fs}; use std::{env, fs};

591
src/parser.rs Normal file
View File

@ -0,0 +1,591 @@
use crate::tokenizer::Token;
use crate::grammar::{NONTERMINAL_NAMES, NONTERMINAL_RULES, TERMINAL_RULES};
pub fn parse(tokens: Vec<Token>) -> Result<ChunkNode, &'static str>
{
return own(tokens);
}
fn own(tokens: Vec<Token>) -> Result<ChunkNode, &'static str>
{
return parse_chunk(&tokens, &mut 0);
}
#[derive(Debug)]
pub struct ChunkNode
{
block: BlockNode
}
#[derive(Debug)]
pub struct BlockNode
{
stats: Vec<StatNode>,
retstat: Option<RetstatNode>
}
#[derive(Debug)]
pub enum StatNode
{
Semicolon,
Assignment { lhs: VarlistNode, rhs: ExplistNode },
Functioncall(FunctioncallNode),
Label(String),
Break,
Goto(String),
Do(BlockNode),
While { condition: ExpNode, body: BlockNode },
Repeat { condition: ExpNode, body: BlockNode },
If { condition: ExpNode, body: BlockNode, elseifs: Vec<ElseifNode>, else_: Option<BlockNode> },
ForEq { var: String, start: ExpNode, end: ExpNode, change: Option<ExpNode>, body: BlockNode },
ForIn { vars: Vec<String>, exps: ExplistNode, body: BlockNode },
Function { name: FuncnameNode, body: FuncbodyNode },
LocalFunction { name: String, body: FuncbodyNode },
Local { attnames: AttnamelistNode, values: Option<ExplistNode> }
}
#[derive(Debug)]
pub struct RetstatNode
{
values: ExplistNode
}
#[derive(Debug)]
pub struct ExpNode
{
}
#[derive(Debug)]
pub struct ExplistNode
{
exps: Vec<ExpNode>
}
#[derive(Debug)]
pub struct VarlistNode
{
vars: Vec<VarNode>
}
#[derive(Debug)]
pub struct FunctioncallNode
{
function: SuffixexpNode,
object_arg: Option<String>,
args: ArgsNode,
}
#[derive(Debug)]
pub struct ArgsNode
{
}
#[derive(Debug)]
pub struct ElseifNode
{
condition: ExpNode,
body: BlockNode,
}
#[derive(Debug)]
pub struct FuncnameNode
{
}#[derive(Debug)]
pub struct FuncbodyNode
{
}
#[derive(Debug)]
pub struct AttnamelistNode
{
}
#[derive(Debug)]
pub enum VarNode
{
Name(String),
Indexed { value: PrefixexpNode, index: ExpNode },
Member { value: PrefixexpNode, name: String }
}
#[derive(Debug)]
pub enum PrefixexpNode
{
}
fn parse_chunk(tokens: &Vec<Token>, i: &mut usize) -> Result<ChunkNode, &'static str>
{
return Ok(ChunkNode { block: parse_block(tokens, i)? });
}
fn parse_block(tokens: &Vec<Token>, i: &mut usize) -> Result<BlockNode, &'static str>
{
let mut stats: Vec<StatNode> = Vec::new();
while *i < tokens.len() && tokens[*i] != Token::Return
{
stats.push(parse_stat(tokens, i)?);
}
let retstat =
if *i < tokens.len() && tokens[*i] == Token::Return { Some(parse_retstat(tokens, i)?) }
else { None };
return Ok(BlockNode { stats, retstat });
}
fn parse_stat(tokens: &Vec<Token>, i: &mut usize) -> Result<StatNode, &'static str>
{
if *i >= tokens.len()
{
return Err("Reached end of file while parsing stat");
}
match tokens[*i]
{
Token::Semicolon =>
{
*i += 1;
Ok(StatNode::Semicolon)
}
Token::Break =>
{
*i += 1;
Ok(StatNode::Break)
}
Token::Goto =>
{
*i += 1;
if *i >= tokens.len()
{
return Err("Reached end of stream but expected name for goto");
}
return if let Token::Name(name) = &tokens[*i]
{
*i += 1;
Ok(StatNode::Goto(name.clone()))
}
else
{
Err("Expecting name for goto")
};
}
Token::Do =>
{
*i += 1;
let body = parse_block(tokens, i)?;
if *i >= tokens.len() || tokens[*i] != Token::End
{
return Err("Missing 'end' for do block");
}
*i += 1;
return Ok(StatNode::Do(body));
}
Token::While =>
{
*i += 1;
let condition = parse_exp(tokens, i)?;
if *i >= tokens.len() || tokens[*i] != Token::Do
{
return Err("Expected 'do' after while condition")
}
*i += 1;
let body = parse_block(tokens, i)?;
if *i >= tokens.len() || tokens[*i] != Token::End
{
return Err("Missing 'end' for do block");
}
*i += 1;
return Ok(StatNode::While { condition, body });
}
Token::Repeat =>
{
*i += 1;
let body = parse_block(tokens, i)?;
if *i >= tokens.len() || tokens[*i] != Token::Until
{
return Err("Expected 'until' after repeat body");
}
*i += 1;
return Ok(StatNode::Repeat { condition: parse_exp(tokens, i)?, body });
}
Token::If =>
{
*i += 1;
let condition = parse_exp(tokens, i)?;
if *i >= tokens.len() || tokens[*i] != Token::Then
{
return Err("Expected 'then' after if condition");
}
*i += 1;
let body = parse_block(tokens, i)?;
if *i >= tokens.len()
{
return Err("Reached end of tokens while parsing if");
}
let mut elseifs: Vec<ElseifNode> = Vec::new();
while tokens[*i] == Token::Elseif
{
*i += 1;
let elseif_condition = parse_exp(tokens, i)?;
if *i >= tokens.len() || tokens[*i] != Token::Then
{
return Err("Expected 'then' after elseif condition");
}
*i += 1;
elseifs.push(ElseifNode { condition: elseif_condition, body: parse_block(tokens, i)? });
}
if *i >= tokens.len()
{
return Err("Reached end of tokens while parsing if");
}
let else_ = if tokens[*i] == Token::Else
{
*i += 1;
Some(parse_block(tokens, i)?)
}
else
{
None
};
if *i >= tokens.len() || tokens[*i] != Token::End
{
return Err("Expected 'end' to close if");
}
*i += 1;
return Ok(StatNode::If { condition, body, elseifs, else_ });
}
Token::For =>
{
*i += 1;
if *i >= tokens.len()
{
return Err("Reached end of tokens while parsing for");
}
if let Token::Name(name) = &tokens[*i]
{
*i += 1;
if *i >= tokens.len()
{
return Err("Reached end of tokens while parsing for after first name");
}
match tokens[*i]
{
Token::Equals =>
{
*i += 1;
let start = parse_exp(tokens, i)?;
if *i >= tokens.len() || tokens[*i] != Token::Comma
{
return Err("Expected ',' after 'for eq' start value");
}
*i += 1;
let end = parse_exp(tokens, i)?;
if *i >= tokens.len()
{
return Err("Reached end of tokens after end value in 'for eq'");
}
let change = if tokens[*i] == Token::Comma
{
*i += 1;
Some(parse_exp(tokens, i)?)
}
else
{
None
};
if *i >= tokens.len() || tokens[*i] != Token::Do
{
return Err("Expected 'do' after 'for eq' head");
}
*i += 1;
let body = parse_block(tokens, i)?;
if *i >= tokens.len() || tokens[*i] != Token::End
{
return Err("Expected 'end' to close 'for eq'");
}
return Ok(StatNode::ForEq { var: name.clone(), start, end, change, body });
}
Token::Comma =>
{
let mut names = Vec::from([name.clone()]);
while tokens[*i] == Token::Comma
{
*i += 1;
if *i >= tokens.len()
{
return Err("Reached end of tokens while parsing 'for in' namelist");
}
if let Token::Name(next_name) = &tokens[*i]
{
names.push(next_name.clone());
}
else
{
return Err("Expected another name in 'for in' namelist");
}
*i += 1;
if *i >= tokens.len()
{
return Err("Reached end of tokens while parsing 'for in' namelist after name");
}
}
if tokens[*i] != Token::In
{
return Err("Expected 'in' after 'for in' namelist");
}
*i += 1;
let exps = parse_explist(tokens, i)?;
if *i >= tokens.len() || tokens[*i] != Token::Do
{
return Err("Expected 'do' after 'for in' explist");
}
*i += 1;
let body = parse_block(tokens, i)?;
if *i >= tokens.len() || tokens[*i] != Token::End
{
return Err("Expected 'end' after 'for in' body");
}
*i += 1;
return Ok(StatNode::ForIn { vars: names, exps, body });
}
_ => Err("Unexpected token after first name in for")
}
}
else
{
return Err("Expected name after 'for'");
}
}
Token::Function =>
{
*i += 1;
let funcname = parse_funcname(tokens, i)?;
return Ok(StatNode::Function { name: funcname, body: parse_funcbody(tokens, i)? });
}
Token::Local =>
{
*i += 1;
if *i >= tokens.len()
{
return Err("Reached end of tokens while parsing local");
}
if tokens[*i] == Token::Function
{
*i += 1;
if *i >= tokens.len()
{
return Err("Reached end of tokens while parsing local function");
}
if let Token::Name(name) = &tokens[*i]
{
*i += 1;
return Ok(StatNode::LocalFunction { name: name.clone(), body: parse_funcbody(tokens, i)? });
}
else
{
return Err("Expected local function name");
}
}
let attnames = parse_attnamelist(tokens, i)?;
let initials = if *i < tokens.len() && tokens[*i] == Token::Equals
{
*i += 1;
Some(parse_explist(tokens, i)?)
}
else
{
None
};
return Ok(StatNode::Local { attnames, values: initials });
}
Token::ColonColon =>
{
*i += 1;
if *i >= tokens.len()
{
return Err("Reached end of tokens while parsing label");
}
if let Token::Name(name) = &tokens[*i]
{
*i += 1;
if *i >= tokens.len() || tokens[*i] != Token::ColonColon
{
return Err("Expected '::' after name in label declaration");
}
*i += 1;
return Ok(StatNode::Label(name.clone()));
}
else
{
return Err("Expected a name after '::' in label declaration")
}
}
Token::Name(_) | Token::RoundOpen =>
{
// assignment or functioncall
let suffix_expression = parse_suffixexp(tokens, i)?;
match tokens[*i]
{
Token::Equals =>
{
*i += 1;
return Ok(StatNode::Assignment { lhs: VarlistNode { vars: Vec::from([suffix_expression_to_var(suffix_expression)]) }, rhs: parse_explist(tokens, i)? });
}
Token::Comma =>
{
let mut vars = Vec::from([suffix_expression_to_var(suffix_expression)]);
while tokens[*i] == Token::Comma
{
*i += 1;
vars.push(parse_var(tokens, i)?);
}
if *i >= tokens.len() || tokens[*i] != Token::Equals
{
return Err("Expected '=' for assignment");
}
*i += 1;
return Ok(StatNode::Assignment { lhs: VarlistNode { vars }, rhs: parse_explist(tokens, i)? });
}
_ =>
{
if let SuffixexpNode()
}
}
}
_ => Err("Unexpected token while parsing stat")
}
}
fn suffix_expression_to_var(suffixexp: SuffixexpNode) -> VarNode
{
todo!()
}
fn parse_var(tokens: &Vec<Token>, i: &mut usize) -> Result<VarNode, &'static str>
{
todo!()
}
fn parse_args(tokens: &Vec<Token>, i: &mut usize) -> Result<ArgsNode, &'static str>
{
todo!()
}
fn parse_suffixexp(tokens: &Vec<Token>, i: &mut usize) -> Result<SuffixexpNode, &'static str>
{
// primaryexp { '.' 'Name' | '[' exp']' | ':' 'Name' args | args }
todo!()
}
fn parse_retstat(tokens: &Vec<Token>, i: &mut usize) -> Result<RetstatNode, &'static str>
{
todo!("{:?}", tokens[*i])
}
fn parse_exp(tokens: &Vec<Token>, i: &mut usize) -> Result<ExpNode, &'static str>
{
todo!("{:?}", tokens[*i])
}
fn parse_explist(tokens: &Vec<Token>, i: &mut usize) -> Result<ExplistNode, &'static str>
{
todo!("{:?}", tokens[*i])
}
fn parse_funcname(tokens: &Vec<Token>, i: &mut usize) -> Result<FuncnameNode, &'static str>
{
todo!("{:?}", tokens[*i])
}
fn parse_funcbody(tokens: &Vec<Token>, i: &mut usize) -> Result<FuncbodyNode, &'static str>
{
todo!("{:?}", tokens[*i])
}
fn parse_attnamelist(tokens: &Vec<Token>, i: &mut usize) -> Result<AttnamelistNode, &'static str>
{
todo!("{:?}", tokens[*i])
}
//===============================================================================================================================================
//===============================================================================================================================================
//===============================================================================================================================================
//===============================================================================================================================================
//===============================================================================================================================================
//===============================================================================================================================================
#[derive(Debug, Clone, Copy)]
pub struct Node
{
}
#[derive(Debug, Clone, Copy)]
pub struct AmbiguousNode
{
}
pub fn cyk(tokens: Vec<Token>) -> Result<ChunkNode, &'static str>
{
let r = NONTERMINAL_NAMES.len();
let n = tokens.len();
macro_rules! index {
($x:expr, $y:expr, $z:expr) => {
($x + $y * n + ($z as usize) * n * n)
};
}
let mut p = vec![false; n * n * r];
//let mut back: Vec<Vec<(usize, u8, u8)>> = vec![Vec::new(); n * n * r];
println!("{n}, {r}, {}", p.len());
for s in 0..n
{
for (index, token) in TERMINAL_RULES
{
if let Token::Name(_) = tokens[s]
{
if let Token::Name(_) = token
{
p[index!(0, s, index)] = true
}
}
else if let Token::StringLiteral(_) = tokens[s]
{
if let Token::StringLiteral(_) = token
{
p[index!(0, s, index)] = true
}
}
else if let Token::Numeral(_) = tokens[s]
{
if let Token::Numeral(_) = token
{
p[index!(0, s, index)] = true
}
}
else if token == tokens[s]
{
p[index!(0, s, index)] = true
}
}
}
println!("Done initializing");
for l in 2..=n
{
for s in 1..=(n - l + 1)
{
for _p in 1..=(l-1)
{
for &(a, b, c) in &NONTERMINAL_RULES
{
if p[index!(_p - 1, s - 1, b)] && p[index!(l - _p - 1, s + _p - 1, c)]
{
let index = index!(l - 1, s - 1, a);
p[index] = true;
/* if !back[index].contains(&(_p, b, c))
{
back[index].push((_p, b, c));
}*/
}
}
}
}
println!("{l}");
}
let start_index = NONTERMINAL_NAMES.iter().position(|x| x == &"S_0").expect("no start index found");
if p[index!(n - 1, 0, start_index)]
{
println!("Is part of the language");
todo!()
//return Ok(disambiguate(traverse_back(back, tokens, n, 1, start_index)));
}
else
{
return Err("Input is not part of the language")
}
}
fn traverse_back(back: Vec<Vec<(usize, u8, u8)>>, tokens: Vec<Token>, l: usize, s: usize, a: usize) -> AmbiguousNode
{
todo!()
}
fn disambiguate(root: AmbiguousNode) -> Node
{
todo!()
}

View File

@ -1,4 +1,4 @@
#[derive(Debug, Clone)] #[derive(Debug, Clone, PartialEq)]
pub enum Token pub enum Token
{ {
Name(String), Name(String),
@ -11,8 +11,7 @@ pub enum Token
EqualsEquals, TildeEquals, LtEquals, GtEquals, Lt, Gt, Equals, EqualsEquals, TildeEquals, LtEquals, GtEquals, Lt, Gt, Equals,
RoundOpen, RoundClosed, CurlyOpen, CurlyClosed, SquareOpen, SquareClosed, ColonColon, RoundOpen, RoundClosed, CurlyOpen, CurlyClosed, SquareOpen, SquareClosed, ColonColon,
Semicolon, Colon, Comma, Dot, DotDot, DotDotDot, Semicolon, Colon, Comma, Dot, DotDot, DotDotDot,
IntLiteral(String), Numeral(String),
HexLiteral(String),
StringLiteral(String), StringLiteral(String),
} }
@ -171,7 +170,7 @@ fn tokenize_char(state: &mut TokenizerState, ch: char, last_index: &mut i32, ind
'/' => tokenize_terminal_no_str(last_index, *index, token, state, Some(Token::Slash), TokenizerState::Slash), '/' => tokenize_terminal_no_str(last_index, *index, token, state, Some(Token::Slash), TokenizerState::Slash),
';' => tokenize_terminal_no_str(last_index, *index, token, state, Some(Token::Semicolon), TokenizerState::Semicolon), ';' => tokenize_terminal_no_str(last_index, *index, token, state, Some(Token::Semicolon), TokenizerState::Semicolon),
'^' => tokenize_terminal_no_str(last_index, *index, token, state, Some(Token::Caret), TokenizerState::Caret), '^' => tokenize_terminal_no_str(last_index, *index, token, state, Some(Token::Caret), TokenizerState::Caret),
'0' => tokenize_terminal(last_index, *index, token, state, Some(Token::IntLiteral("0".to_string())), TokenizerState::Zero, token_str, ch), '0' => tokenize_terminal(last_index, *index, token, state, Some(Token::Numeral("0".to_string())), TokenizerState::Zero, token_str, ch),
'"' => '"' =>
{ {
*token = None; *token = None;
@ -191,7 +190,7 @@ fn tokenize_char(state: &mut TokenizerState, ch: char, last_index: &mut i32, ind
} }
else if ch.is_numeric() && ch.is_ascii() else if ch.is_numeric() && ch.is_ascii()
{ {
tokenize_terminal(last_index, *index, token, state, Some(Token::IntLiteral(token_str.clone())), TokenizerState::Number, token_str, ch); tokenize_terminal(last_index, *index, token, state, Some(Token::Numeral(token_str.clone())), TokenizerState::Number, token_str, ch);
} }
else else
{ {
@ -422,7 +421,7 @@ fn tokenize_char(state: &mut TokenizerState, ch: char, last_index: &mut i32, ind
{ {
*last_index = *index as i32; *last_index = *index as i32;
token_str.push(ch); token_str.push(ch);
*token = Some(Token::IntLiteral(token_str.clone())); *token = Some(Token::Numeral(token_str.clone()));
} }
else else
{ {
@ -441,7 +440,7 @@ fn tokenize_char(state: &mut TokenizerState, ch: char, last_index: &mut i32, ind
{ {
*last_index = *index as i32; *last_index = *index as i32;
token_str.push(ch); token_str.push(ch);
*token = Some(Token::HexLiteral(token_str.clone())); *token = Some(Token::Numeral(token_str.clone()));
*state = TokenizerState::HexNumber; *state = TokenizerState::HexNumber;
} }
else else
@ -469,7 +468,7 @@ fn tokenize_char(state: &mut TokenizerState, ch: char, last_index: &mut i32, ind
{ {
*last_index = *index as i32; *last_index = *index as i32;
token_str.push(ch); token_str.push(ch);
*token = Some(Token::HexLiteral(token_str.clone())); *token = Some(Token::Numeral(token_str.clone()));
} }
else else
{ {
@ -494,7 +493,7 @@ fn tokenize_char(state: &mut TokenizerState, ch: char, last_index: &mut i32, ind
{ {
*last_index = *index as i32; *last_index = *index as i32;
token_str.push(ch); token_str.push(ch);
*token = Some(Token::IntLiteral(token_str.clone())); *token = Some(Token::Numeral(token_str.clone()));
} }
else else
{ {