|
|
|
use std::collections::HashMap;
|
|
|
|
use std::sync::atomic::AtomicU32;
|
|
|
|
|
|
|
|
use crate::asm::data::{Rd, SrcDisp};
|
|
|
|
use crate::asm::data::literal::{Label, Value};
|
|
|
|
use crate::asm::error::{AsmError, Error};
|
|
|
|
use crate::asm::instr::{Cond, OpWrapper, Instr, Op, Routine};
|
|
|
|
|
|
|
|
/// A trait for something that can turn into multiple instructions
|
|
|
|
pub trait Flatten {
|
|
|
|
fn flatten(self, label_num: &AtomicU32) -> Result<Vec<OpWrapper>, Error>;
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Flatten for Instr {
|
|
|
|
fn flatten(self, label_num: &AtomicU32) -> Result<Vec<OpWrapper>, Error> {
|
|
|
|
let mut ops = vec![self.op];
|
|
|
|
|
|
|
|
if let Some(branches) = self.branches {
|
|
|
|
let labels = HashMap::<Cond, u32>::new();
|
|
|
|
let branch_count = branches.len();
|
|
|
|
let end_lbl = Label::unique(label_num);
|
|
|
|
for (cnt, (cond, branch)) in branches.into_iter().enumerate() {
|
|
|
|
if labels.contains_key(&cond) {
|
|
|
|
return Err(Error::Asm(AsmError::ConditionalAlreadyUsed(cond)));
|
|
|
|
}
|
|
|
|
|
|
|
|
let next_lbl = if cnt == branch_count - 1 {
|
|
|
|
end_lbl.clone()
|
|
|
|
} else {
|
|
|
|
Label::unique(label_num)
|
|
|
|
};
|
|
|
|
ops.push(OpWrapper::JumpIf(!cond, next_lbl.clone()));
|
|
|
|
|
|
|
|
for branch_instr in branch {
|
|
|
|
ops.extend(branch_instr.flatten(label_num)?);
|
|
|
|
}
|
|
|
|
|
|
|
|
if cnt != branch_count - 1 {
|
|
|
|
ops.push(OpWrapper::Jump(end_lbl.clone()));
|
|
|
|
ops.push(OpWrapper::Label(next_lbl));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
ops.push(OpWrapper::Label(end_lbl));
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(ops)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Flatten for Routine {
|
|
|
|
fn flatten(self, label_num: &AtomicU32) -> Result<Vec<OpWrapper>, Error> {
|
|
|
|
let mut ops = vec![
|
|
|
|
Op::Routine(self.name.clone()).into(),
|
|
|
|
];
|
|
|
|
|
|
|
|
for instr in self.body {
|
|
|
|
ops.extend(instr.flatten(label_num)?);
|
|
|
|
}
|
|
|
|
|
|
|
|
ops.push(Op::Barrier(Some(format!("Routine \"{}\" overrun", self.name).into())).into());
|
|
|
|
Ok(ops)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Convert jumps to relative skips
|
|
|
|
pub fn lower(ops: Vec<OpWrapper>) -> Result<Vec<Op>, Error> {
|
|
|
|
let mut label_positions = HashMap::<Label, usize>::new();
|
|
|
|
for (n, op) in ops.iter().enumerate() {
|
|
|
|
if let OpWrapper::Label(name) = op {
|
|
|
|
label_positions.insert(name.clone(), n - label_positions.len());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut cleaned = vec![];
|
|
|
|
let mut skipped = 0;
|
|
|
|
for (n, op) in ops.into_iter().enumerate() {
|
|
|
|
match op {
|
|
|
|
OpWrapper::Label(_) => {
|
|
|
|
skipped += 1;
|
|
|
|
}
|
|
|
|
OpWrapper::Jump(target) => {
|
|
|
|
if let Some(dest) = label_positions.get(&target) {
|
|
|
|
let skip = *dest as isize - n as isize + skipped;
|
|
|
|
cleaned.push(Op::Skip(Rd::new(SrcDisp::Immediate(skip as Value))));
|
|
|
|
} else {
|
|
|
|
return Err(Error::Asm(AsmError::LabelNotDefined(target)));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
OpWrapper::JumpIf(cond, target) => {
|
|
|
|
if let Some(dest) = label_positions.get(&target) {
|
|
|
|
let skip = *dest as isize - n as isize + skipped;
|
|
|
|
cleaned.push(Op::SkipIf(cond, Rd::new(SrcDisp::Immediate(skip as Value))));
|
|
|
|
} else {
|
|
|
|
return Err(Error::Asm(AsmError::LabelNotDefined(target)));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
OpWrapper::Op(op) => {
|
|
|
|
cleaned.push(op);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(cleaned)
|
|
|
|
}
|