/// LRU-based stack manager for stack-machine VMs.
///
/// When live variables exceed the maximum stack depth, the manager automatically
/// spills the least-recently-used variable to RAM and reloads it when accessed.
///
/// RAM addresses for spilled values start at a configurable base to avoid
/// collision with user-accessible RAM.
///
/// Default values match Triton VM (depth=16, spill base=2^30) but can be
/// configured for other stack-machine targets via TerrainConfig.
const DEFAULT_SPILL_RAM_BASE: u64 = 1 << 30;
const DEFAULT_MAX_STACK_DEPTH: u32 = 16;
/// Instruction formatting for stack spill/reload operations.
/// Allows the StackManager to emit target-specific instructions.
pub(crate) struct SpillFormatter {
pub(crate) fmt_swap: Box<dyn Fn(u32) -> String>,
pub(crate) fmt_push: Box<dyn Fn(u64) -> String>,
pub(crate) fmt_pop1: String,
pub(crate) fmt_write_mem1: String,
pub(crate) fmt_read_mem1: String,
}
impl Default for SpillFormatter {
fn default() -> Self {
Self {
fmt_swap: Box::new(|d| format!(" swap {}", d)),
fmt_push: Box::new(|v| format!(" push {}", v)),
fmt_pop1: " pop 1".to_string(),
fmt_write_mem1: " write_mem 1".to_string(),
fmt_read_mem1: " read_mem 1".to_string(),
}
}
}
/// A variable tracked by the stack manager.
#[derive(Clone, Debug)]
pub(crate) struct ManagedVar {
pub(crate) name: Option<String>,
pub(crate) width: u32,
/// For array entries: the width of a single element.
pub(crate) elem_width: Option<u32>,
/// Where this variable currently lives.
pub(crate) location: VarLocation,
/// Monotonic access counter for LRU eviction.
pub(crate) last_access: u64,
}
#[derive(Clone, Debug, PartialEq)]
pub(crate) enum VarLocation {
/// On the operand stack (tracked by position in the on_stack vec).
Stack,
/// Spilled to RAM starting at this address.
Ram(u64),
}
/// Manages the operand stack and automatic RAM spilling.
pub(crate) struct StackManager {
/// Variables currently on the operand stack (bottom to top).
on_stack: Vec<ManagedVar>,
/// Variables spilled to RAM.
spilled: Vec<ManagedVar>,
/// Next RAM address for spilling.
next_spill_addr: u64,
/// Monotonic counter for LRU tracking.
access_counter: u64,
/// Instructions generated by spill/reload operations.
pub(crate) side_effects: Vec<String>,
/// Maximum operand stack depth before spilling (from TerrainConfig).
max_stack_depth: u32,
/// Target-specific instruction formatter for spill/reload.
formatter: SpillFormatter,
}
impl Default for StackManager {
fn default() -> Self {
Self::new()
}
}
impl StackManager {
pub(crate) fn new() -> Self {
Self::with_config(DEFAULT_MAX_STACK_DEPTH, DEFAULT_SPILL_RAM_BASE)
}
pub(crate) fn with_config(max_stack_depth: u32, spill_ram_base: u64) -> Self {
Self::with_formatter(max_stack_depth, spill_ram_base, SpillFormatter::default())
}
pub(crate) fn with_formatter(
max_stack_depth: u32,
spill_ram_base: u64,
formatter: SpillFormatter,
) -> Self {
Self {
on_stack: Vec::new(),
spilled: Vec::new(),
next_spill_addr: spill_ram_base,
access_counter: 0,
side_effects: Vec::new(),
max_stack_depth,
formatter,
}
}
/// Clear all state (for new function).
pub(crate) fn clear(&mut self) {
self.on_stack.clear();
self.spilled.clear();
self.side_effects.clear();
// Don't reset next_spill_addr โ keep allocating forward
}
/// Total width of values currently on the operand stack.
pub(crate) fn stack_depth(&self) -> u32 {
self.on_stack.iter().map(|v| v.width).sum()
}
/// Allocate `n` contiguous RAM addresses for scratch use.
/// Returns the base address. Advances the spill pointer so these
/// addresses won't conflict with future spills.
pub(crate) fn alloc_scratch(&mut self, n: u32) -> u64 {
let base = self.next_spill_addr;
self.next_spill_addr += n as u64;
base
}
/// Number of entries on the operand stack.
pub(crate) fn stack_len(&self) -> usize {
self.on_stack.len()
}
/// Push an anonymous temporary onto the stack.
/// If the stack would exceed 16 elements, spill the LRU variable first.
pub(crate) fn push_temp(&mut self, width: u32) {
if width == 0 {
return;
}
self.ensure_space(width);
let ts = self.tick();
self.on_stack.push(ManagedVar {
name: None,
width,
elem_width: None,
location: VarLocation::Stack,
last_access: ts,
});
}
/// Push a named variable onto the stack.
pub(crate) fn push_named(&mut self, name: &str, width: u32) {
if width == 0 {
return;
}
self.ensure_space(width);
let ts = self.tick();
self.on_stack.push(ManagedVar {
name: Some(name.to_string()),
width,
elem_width: None,
location: VarLocation::Stack,
last_access: ts,
});
}
/// Pop the top entry from the stack model.
pub(crate) fn pop(&mut self) -> Option<ManagedVar> {
self.on_stack.pop()
}
/// Get a reference to the top entry.
pub(crate) fn last(&self) -> Option<&ManagedVar> {
self.on_stack.last()
}
/// Get a mutable reference to the top entry.
pub(crate) fn last_mut(&mut self) -> Option<&mut ManagedVar> {
self.on_stack.last_mut()
}
/// Find the depth (in field elements from stack top) of a named variable.
/// If the variable is spilled, reloads it first and returns the new depth.
/// Returns the depth from top of stack.
pub(crate) fn access_var(&mut self, name: &str) -> u32 {
// Check if it's on stack
let ts = self.tick();
let mut depth: u32 = 0;
for entry in self.on_stack.iter_mut().rev() {
if entry.name.as_deref() == Some(name) {
entry.last_access = ts;
return depth;
}
depth += entry.width;
}
// Check if it's spilled
if let Some(idx) = self
.spilled
.iter()
.position(|v| v.name.as_deref() == Some(name))
{
let var = self.spilled.remove(idx);
self.reload_var(var);
return 0; // reloaded to top of stack
}
// Not found โ return 0 (caller handles error)
0
}
/// Find depth and width of a named variable (without reloading).
pub(crate) fn find_var_depth_and_width(&mut self, name: &str) -> Option<(u32, u32)> {
let ts = self.tick();
let mut depth: u32 = 0;
for entry in self.on_stack.iter_mut().rev() {
if entry.name.as_deref() == Some(name) {
entry.last_access = ts;
return Some((depth, entry.width));
}
depth += entry.width;
}
// Check spilled
if let Some(idx) = self
.spilled
.iter()
.position(|v| v.name.as_deref() == Some(name))
{
let var = self.spilled.remove(idx);
let width = var.width;
self.reload_var(var);
return Some((0, width));
}
None
}
/// Find depth, width, and elem_width of a named variable.
/// Like find_var_depth_and_width but also returns elem_width for arrays.
pub(crate) fn find_var_with_elem_width(&mut self, name: &str) -> Option<(u32, u32, u32)> {
let ts = self.tick();
let mut depth: u32 = 0;
for entry in self.on_stack.iter_mut().rev() {
if entry.name.as_deref() == Some(name) {
entry.last_access = ts;
let ew = entry.elem_width.unwrap_or(1);
return Some((depth, entry.width, ew));
}
depth += entry.width;
}
// Check spilled
if let Some(idx) = self
.spilled
.iter()
.position(|v| v.name.as_deref() == Some(name))
{
let var = self.spilled.remove(idx);
let width = var.width;
let ew = var.elem_width.unwrap_or(1);
self.reload_var(var);
return Some((0, width, ew));
}
None
}
/// Drain any side-effect TASM instructions generated by spill/reload.
pub(crate) fn drain_side_effects(&mut self) -> Vec<String> {
std::mem::take(&mut self.side_effects)
}
/// Spill every named variable on the operand stack to RAM.
/// Used before inline asm blocks to isolate the managed stack.
pub(crate) fn spill_all_named(&mut self) {
loop {
let has_named = self.on_stack.iter().any(|v| v.name.is_some());
if !has_named {
break;
}
if !self.spill_lru() {
break;
}
}
}
/// Get the entire on-stack vec (for save/restore in loops).
pub(crate) fn save_state(&self) -> (Vec<ManagedVar>, Vec<ManagedVar>) {
(self.on_stack.clone(), self.spilled.clone())
}
/// Restore saved state.
pub(crate) fn restore_state(&mut self, state: (Vec<ManagedVar>, Vec<ManagedVar>)) {
self.on_stack = state.0;
self.spilled = state.1;
}
// --- Internal ---
fn tick(&mut self) -> u64 {
self.access_counter += 1;
self.access_counter
}
/// Ensure there's room for `width` more elements on the stack.
/// Spills the LRU named variable if necessary.
pub(crate) fn ensure_space(&mut self, width: u32) {
while self.stack_depth() + width > self.max_stack_depth {
if !self.spill_lru() {
break; // no more named variables to spill
}
}
}
/// Spill the least-recently-used named variable to RAM.
/// Returns true if a variable was spilled.
fn spill_lru(&mut self) -> bool {
// Find the named variable with the lowest last_access that isn't a temp
let mut best_idx = None;
let mut best_access = u64::MAX;
for (i, entry) in self.on_stack.iter().enumerate() {
if entry.name.is_some() && entry.last_access < best_access {
best_access = entry.last_access;
best_idx = Some(i);
}
}
if let Some(idx) = best_idx {
let mut var = self.on_stack.remove(idx);
let addr = self.next_spill_addr;
self.next_spill_addr += var.width as u64;
// Compute the stack depth of this variable from top AFTER removal
// We need to emit instructions to move it from its current stack position to RAM
// The variable was at index `idx` in on_stack (0 = bottom).
// After removal, we need to calculate where it was on the real stack.
// Depth from top of the var (before removal): sum of widths of entries above it
let depth_from_top: u32 = self.on_stack[idx..].iter().map(|e| e.width).sum();
// For each element of the variable (width elements), spill to RAM
for i in 0..var.width {
let elem_depth = depth_from_top + (var.width - 1 - i);
let ram_addr = addr + i as u64;
// Bring element to top, write to RAM
if elem_depth > 0 && elem_depth <= self.max_stack_depth - 1 {
self.side_effects
.push((self.formatter.fmt_swap)(elem_depth));
}
self.side_effects.push((self.formatter.fmt_push)(ram_addr));
self.side_effects.push((self.formatter.fmt_swap)(1));
self.side_effects
.push(self.formatter.fmt_write_mem1.clone());
self.side_effects.push(self.formatter.fmt_pop1.clone());
}
var.location = VarLocation::Ram(addr);
self.spilled.push(var);
true
} else {
false
}
}
/// Reload a spilled variable from RAM onto the top of the stack.
fn reload_var(&mut self, mut var: ManagedVar) {
if let VarLocation::Ram(addr) = var.location {
// Make room if needed
self.ensure_space(var.width);
// Read each element from RAM
for i in 0..var.width {
let ram_addr = addr + i as u64;
self.side_effects.push((self.formatter.fmt_push)(ram_addr));
self.side_effects.push(self.formatter.fmt_read_mem1.clone());
self.side_effects.push(self.formatter.fmt_pop1.clone());
}
var.location = VarLocation::Stack;
var.last_access = self.tick();
self.on_stack.push(var);
}
}
}
#[cfg(test)]
mod tests;
trident/src/ir/tir/stack/mod.rs
ฯ 0.0%
/// LRU-based stack manager for stack-machine VMs.
///
/// When live variables exceed the maximum stack depth, the manager automatically
/// spills the least-recently-used variable to RAM and reloads it when accessed.
///
/// RAM addresses for spilled values start at a configurable base to avoid
/// collision with user-accessible RAM.
///
/// Default values match Triton VM (depth=16, spill base=2^30) but can be
/// configured for other stack-machine targets via TerrainConfig.
const DEFAULT_SPILL_RAM_BASE: u64 = 1 << 30;
const DEFAULT_MAX_STACK_DEPTH: u32 = 16;
/// Instruction formatting for stack spill/reload operations.
/// Allows the StackManager to emit target-specific instructions.
pub
/// A variable tracked by the stack manager.
pub
pub
/// Manages the operand stack and automatic RAM spilling.
pub