use crate::parser::Module;
-use crate::tokenizer::Token;
+use crate::tokenizer::{Token, tokenize};
use hylo_ir::*;
+use std::collections::HashSet;
+use std::path::PathBuf;
+
macro_rules! push_num {
($num:ident) => { IR::StackPush(*$num as u64) }
}
-pub fn generate(module: &Module) -> IRModule {
+fn import(specifier: &str, imported: &mut HashSet<PathBuf>) -> Option<ModuleWithImports> {
+ // TODO paths relative to the files, not just the invocation
+ let path = PathBuf::from(specifier).canonicalize().unwrap();
+ if imported.contains(&path) {
+ return None;
+ }
+
+ let contents = std::fs::read_to_string(&path).unwrap();
+
+ Some(generate_internal(&Module::parse(tokenize(&contents)), imported))
+}
+
+fn collapse_module(mut module_w: ModuleWithImports) -> IRModule {
+ let mut module = module_w.module.take().unwrap();
+ let mut data = std::mem::take(&mut module.data);
+ let mut prev_data_len = data.len();
+ let mut text = std::mem::take(&mut module.text);
+
+ module_w.imports.take().unwrap().into_iter().for_each(|imported| {
+ let mut ir_mod = collapse_module(imported);
+ let mut mod_data = std::mem::take(&mut ir_mod.data);
+ let mod_data_len = mod_data.len();
+ data.append(&mut mod_data);
+
+ let mut mod_text = std::mem::take(&mut ir_mod.text).into_iter().map(|ir| {
+ if let IR::StackPushString(num) = ir {
+ IR::StackPushString(num + prev_data_len)
+ } else {
+ ir
+ }
+ }).collect::<Vec<_>>();
+ text.append(&mut mod_text);
+
+ prev_data_len += mod_data_len;
+ });
+
+ IRModule {
+ data,
+ text,
+ }
+}
+
+pub fn compile(path: &str) -> IRModule {
+ let mut imported = HashSet::new();
+ let module = import(path, &mut imported).unwrap();
+ collapse_module(module) // TODO remove unused words
+}
+
+struct ModuleWithImports {
+ module: Option<IRModule>,
+ imports: Option<Vec<ModuleWithImports>>
+}
+
+fn generate_internal(module: &Module, imported: &mut HashSet<PathBuf>) -> ModuleWithImports {
// Eventually these will end up being sections in assembly
let mut text = vec![];
let mut data = vec![];
+ let mut imports = vec![];
+
+ let mut last_was_import = false;
+
text.push(module.words.iter().map(|def| {
let mut body = def.instructions.iter().map(|inst| {
- match inst {
+ let mapped_ir = match inst {
Token::Word(word) => {
match *word {
"@" => IR::Load,
"-" => IR::SubtractU64,
"*" => IR::MultiplyU64,
"/" => IR::DivideU64,
+ "import" => IR::Import,
// TODO num type specfic math like `+:i32`, etc.
_ => IR::Call(String::from(*word))
}
},
Token::String(text) => {
- data.push(IR::StringDef(String::from(*text)));
- IR::StackPushString(data.len() - 1)
+ if last_was_import {
+ if let Some(module) = import(text, imported) {
+ imports.push(module);
+ }
+ IR::ImportString // This will be elided later
+ } else {
+ data.push(IR::StringDef(String::from(*text)));
+ IR::StackPushString(data.len() - 1)
+ }
},
Token::NumU8(num) => push_num!(num),
Token::NumI8(num) => push_num!(num),
Token::NumI64(num) => push_num!(num),
Token::NumF32(num) => push_num!(num),
Token::NumF64(num) => push_num!(num),
- }
+ };
+ last_was_import = match mapped_ir {
+ IR::Import => true,
+ _ => false,
+ };
+ mapped_ir
}).collect::<Vec<_>>();
let mut result = vec![IR::Label(def.name.to_string())];
}).flatten().collect::<Vec<_>>());
- IRModule {
- text: text.into_iter().flatten().collect::<Vec<_>>(),
- data
+ ModuleWithImports {
+ module: Some(IRModule {
+ text: text.into_iter().flatten().collect::<Vec<_>>(),
+ data,
+ }),
+ imports: Some(Vec::new()) // TODO
}
}