Implement importing

This commit is contained in:
0x4261756D 2023-01-23 05:28:13 +01:00
parent 4286fb4424
commit 476aa8bfb2
5 changed files with 92 additions and 6 deletions

View File

@ -3,6 +3,7 @@ use std::collections::HashMap;
use std::env;
use std::fs;
use std::iter::Peekable;
use std::path::PathBuf;
use std::process::Command;
use std::process::Stdio;
use std::process::exit;
@ -130,7 +131,7 @@ fn main()
let f = f.unwrap();
let file_content = fs::read_to_string(f.path()).unwrap().replace("\r\n", "\n");
println!("========NOW TESTING {:?}========", f.path());
match compile(&file_content, &intrinsics, interpret, run, debug)
match compile(&file_content, f.path().to_str().unwrap(), &intrinsics, interpret, run, debug)
{
Ok(maybe_msg) =>
{
@ -188,7 +189,7 @@ fn main()
"-c" | "--compile" =>
{
let file_content = fs::read_to_string(&args[2]).expect("Could not read the source file");
match compile(&file_content, &intrinsics, interpret, run, debug)
match compile(&file_content, &args[2], &intrinsics, interpret, run, debug)
{
Ok(maybe_msg) =>
{
@ -204,12 +205,14 @@ fn main()
}
}
fn compile(file_content: &String, intrinsics: &HashMap<&str, (Vec<Datatype>, Vec<Datatype>)>, interpret: bool, run: bool, debug: bool) -> Result<Option<String>, String>
fn compile(file_content: &String, file_path: &str, intrinsics: &HashMap<&str, (Vec<Datatype>, Vec<Datatype>)>, interpret: bool, run: bool, debug: bool) -> Result<Option<String>, String>
{
let mut tokens: Vec<Token> = tokenize(&file_content)?;
println!("---Done tokenizing, got {} tokens---", tokens.len());
let functions: Vec<Function> = extract_functions(&mut tokens, &intrinsics, debug)?;
let mut functions: Vec<Function> = extract_functions(&mut tokens, &intrinsics, debug)?;
println!("---Done extracting functions, got {} functions and reduced the token count to {}---", functions.len(), tokens.len());
resolve_imports(&mut tokens, &mut functions, file_path, &mut Vec::from([PathBuf::from(file_path)]), intrinsics, debug)?;
println!("---Done importing files---");
let mut arrays: Vec<Arr> = extract_arrays(&mut tokens, &intrinsics, &functions, debug)?;
println!("---Done extracting arrays, got {} arrays and reduced the token count to {}---", arrays.len(), tokens.len());
let operations = parse_until_delimiter(&mut tokens.iter().peekable(), &intrinsics, None, debug)?;
@ -290,6 +293,62 @@ fn compile(file_content: &String, intrinsics: &HashMap<&str, (Vec<Datatype>, Vec
return Ok(output);
}
fn resolve_imports(tokens: &mut Vec<Token>, functions: &mut Vec<Function>, file_path: &str, visited_paths: &mut Vec<PathBuf>, intrinsics: &HashMap<&str, (Vec<Datatype>, Vec<Datatype>)>, debug: bool) -> Result<(), String>
{
let mut tokens_iter = tokens.iter();
let mut new_tokens: Vec<Token> = Vec::new();
while let Some(token) = tokens_iter.next()
{
if let Token::Import(line, col) = token
{
if let Some(Token::StringLit(import_path, _, _)) = tokens_iter.next()
{
match fs::canonicalize(format!("{}/{}", PathBuf::from(file_path).parent().unwrap_or(&PathBuf::from(".")).display(), import_path))
{
Ok(full_import_path) =>
{
if visited_paths.contains(&full_import_path)
{
println!("--Already visited {}--", full_import_path.display());
}
else
{
visited_paths.push(full_import_path.clone());
let maybe_file_content = fs::read_to_string(full_import_path);
match maybe_file_content
{
Ok(file_content) =>
{
let mut import_tokens: Vec<Token> = tokenize(&file_content)?;
println!("--Done tokenizing the imported file at {}:{}, got {} tokens--", line, col, tokens.len());
let import_functions = extract_functions(&mut import_tokens, &intrinsics, debug)?;
resolve_imports(&mut import_tokens, functions, file_path, visited_paths, intrinsics, debug)?;
println!("--Done extracting {} functions--", import_functions.len());
functions.extend(import_functions);
println!("--Now totalling {} functions--", functions.len());
}
Err(e) => return Err(format!("{}: {}", line!(), e.to_string()))
}
}
}
Err(e) => return Err(format!("{}: {} {}/{}", line!(), e.to_string(), file_path, import_path))
}
}
else
{
return Err(format!("Expected an import location at {}:{}", line, col));
}
}
else
{
new_tokens.push(token.clone());
}
}
tokens.clear();
tokens.extend_from_slice(&new_tokens);
return Ok(());
}
struct AssemblyData
{
strings: String,
@ -432,7 +491,7 @@ fn generate_assembly_linux_x64_block(operations: &Vec<Operation>, functions: &Ve
}
Datatype::String =>
{
data.strings += format!("\tstr_{}_{}: db {}, {}, {}, {}, {}, {}, {}, {}, \"{}\", 0\n",
data.strings += format!("\tstr_{}_{}: db {}, {}, {}, {}, {}, {}, {}, {}, \"{}\", 0\n",
line, col,
value.len() % 256,
(value.len() >> 8) % 256,
@ -1589,7 +1648,7 @@ fn parse_until_delimiter(tokens_iter: &mut Peekable<std::slice::Iter<Token>>, in
}
Token::Import(line, col) =>
{
todo!()
return Err(format!("Unexpected import token at {}:{}, should have been resolved before, probably a compiler bug", line, col));
}
Token::Keyword(word, line, col) =>
{
@ -1758,6 +1817,10 @@ fn tokenize(text: &str) -> Result<Vec<Token>, String>
{
tokens.push(Token::BoolLit(word.clone(), line, col));
}
else if word == "import"
{
tokens.push(Token::Import(line, col));
}
else
{
tokens.push(Token::Keyword(word.clone(), line, col));

11
std.qbl Normal file
View File

@ -0,0 +1,11 @@
function bool => str boolToStr
{
if
{
"true"
}
else
{
"false"
}
}

4
tests/basic_import.qbl Normal file
View File

@ -0,0 +1,4 @@
import "../std.qbl"
true boolToStr println

View File

@ -0,0 +1,7 @@
import "recursive_import_2.qbl"
import "recursive_import.qbl"
function => test
{
}

View File

@ -0,0 +1 @@
import "recursive_import.qbl"