Croissant Runtime
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
crsn/crsn/src/asm/parse/parse_instr.rs

198 lines
6.7 KiB

use sexp::{Sexp, SourcePosition, Atom, atom_s};
use crate::asm::error::CrsnError;
use crate::asm::instr::{Flatten, InstrWithBranches};
use crate::asm::parse::arg_parser::TokenParser;
use crate::asm::parse::parse_cond::parse_cond_branch;
use crate::asm::parse::parse_routine::parse_routine;
use crate::asm::parse::ParserContext;
use crate::asm::parse::sexp_expect::{expect_list, expect_string_atom, expect_any_string_atom};
use crate::asm::patches::NextOrErr;
use crate::module::ParseRes;
use crate::asm::patches::ErrSetFile;
use super::parse_op::parse_op;
use std::path::{PathBuf};
use std::convert::TryFrom;
use crate::asm::read_source_file;
use crate::asm::instr::flatten::jumps_to_skips;
pub fn parse_instructions(items: impl Iterator<Item=Sexp>, pos: &SourcePosition, pcx: &ParserContext) -> Result<Box<dyn Flatten>, CrsnError> {
let mut parsed: Vec<Box<dyn Flatten>> = vec![];
'exprs: for expr in items {
let (tokens, listpos) = expect_list(expr, false)?;
let mut toki = tokens.into_iter();
let (mut name, namepos) = expect_string_atom(toki.next_or_err(listpos.clone(), "Expected instruction name token")?)?;
let parsing_expr = pcx.state.borrow().parsing_expr;
if parsing_expr {
if let Some(n) = name.strip_prefix('=') {
name = n.to_string();
}
}
if name == "include" {
// TODO move this to a separate function and get rid of err_file()
if parsing_expr {
return Err(CrsnError::Parse("Illegal syntax in const expression".into(), pos.clone()));
}
let (mut path, _namepos) = expect_any_string_atom(toki.next_or_err(listpos.clone(), "Expected file path or name to include")?)?;
// add extension if missing
let last_piece = path.split('/').rev().next().unwrap();
if !last_piece.contains('.') {
path.push_str(".csn");
}
trace!("*** include, raw path: {}", path);
let state = pcx.state.borrow();
let this_file = &state.files[state.active_file];
let new_file = PathBuf::try_from(path).unwrap();
let new_pb = if !new_file.is_absolute() {
let parent = this_file.parent().expect("file has parent");
let fixed = parent.join(&new_file);
trace!("Try to resolve: {}", fixed.display());
fixed.canonicalize()?
} else {
new_file.to_owned()
};
drop(state);
drop(new_file);
let (old_af, af) = {
let mut state = pcx.state.borrow_mut();
let old_af = state.active_file;
let af = state.files.len();
state.active_file = af;
state.files.push(new_pb.clone());
(old_af, af as u32)
};
let loaded = read_source_file(&new_pb)
.err_file(af)?;
let (items, _pos) =
expect_list(
sexp::parse(&loaded)
.map_err(|mut e| {
e.pos.file = af;
e
})?,
true)
.err_file(af)?;
let sub_parsed = parse_instructions(items.into_iter(), pos, pcx)
.err_file(af)?;
let mut flat = sub_parsed.flatten(&pcx.state.borrow().label_num)
.err_file(af)?;
flat.iter_mut().for_each(|op| {
op.pos.file = af as u32;
});
trace!("inner falt {:#?}", flat);
parsed.push(Box::new(flat));
{
let mut state = pcx.state.borrow_mut();
state.active_file = old_af;
}
continue;
}
if name == "proc" {
if parsing_expr {
return Err(CrsnError::Parse("Illegal syntax in const expression".into(), pos.clone()));
}
parsed.push(parse_routine(toki, pos, pcx)?);
continue;
}
// Let extensions parse custom syntax
let mut token_parser = TokenParser::new(toki.collect(), &listpos, pcx);
for p in pcx.parsers {
token_parser = match p.parse_syntax(pos, &name, token_parser) {
Ok(ParseRes::Parsed(op)) => {
parsed.push(op);
continue 'exprs;
},
Ok(ParseRes::ParsedNone) => {
continue 'exprs;
},
Ok(ParseRes::Unknown(to_reuse)) => {
if to_reuse.parsing_started() {
panic!("Module \"{}\" started parsing syntax, but returned Unknown!", p.name());
}
to_reuse
}
Err(err) => {
return Err(err);
}
}
}
// Get back the original iterator
let token_count = token_parser.len();
let toki : std::vec::IntoIter<Sexp> = token_parser.into_iter();
let branch_tokens = toki.clone().rev()
.take_while(|e| {
if let Sexp::List(ref items, _) = e {
if items.len() > 1 {
if let Sexp::Atom(Atom::S(ref kw), _) = items[0] {
if kw.ends_with('?') {
return true;
}
}
}
}
false
})
.collect::<Vec<_>>();
if parsing_expr && !branch_tokens.is_empty() {
return Err(CrsnError::Parse(format!("Conditional branches are not allowed in const expression: {:?}", branch_tokens).into(), pos.clone()));
}
let mut arg_parser = TokenParser::new(toki.take(token_count - branch_tokens.len()).collect(), &listpos, pcx);
trace!("Parse op: {}\nargs {:?}\nbranches {:?}", name, arg_parser, branch_tokens);
let branches = {
let mut branches = vec![];
for t in branch_tokens.into_iter().rev() {
branches.push(parse_cond_branch(t, pcx)?);
}
if branches.is_empty() {
None
} else {
Some(branches)
}
};
if parsing_expr {
// hackity hack for const expr eval
arg_parser.prepend(atom_s("=result"));
}
if let Some(op) = parse_op(name.as_str(), arg_parser, &namepos)? {
parsed.push(Box::new(InstrWithBranches {
op,
pos: namepos,
branches,
}));
}
}
Ok(Box::new(parsed))
}