//! StackLowering: consumes `Vec<TIROp>` and produces target assembly text.
//!
//! Each target implements `StackLowering` to control instruction selection
//! and control-flow structure. The speculative lowering wraps the classical
//! path with an optional neural v2 optimizer.

#[cfg(test)]
mod tests;
mod triton;

use super::neural::report::{
    BlockDecision, DecisionReason, OptimizerReport, OptimizerStatus, Winner,
};
use super::TIROp;
use crate::cost::scorer;

pub use triton::TritonLowering;

/// Lowers IR operations into target assembly lines.
pub trait StackLowering {
    /// Convert a sequence of IR operations into assembly text lines.
    fn lower(&self, ops: &[TIROp]) -> Vec<String>;
}

/// Create a stack lowering backend for the given target name.
pub fn create_stack_lowering(_target: &str) -> Box<dyn StackLowering> {
    Box::new(TritonLowering::new())
}

/// Create a speculative stack lowering that can accept neural v2 candidates.
///
/// The v2 neural model runs externally (beam search) and injects results
/// via `inject_neural_candidate`. This replaces the v1 inline MLP approach.
pub fn create_speculative_lowering(
    _target: &str,
    meta_generation: u64,
    meta_hash: String,
    meta_status: OptimizerStatus,
) -> SpeculativeLowering {
    SpeculativeLowering {
        classical: TritonLowering::new(),
        report: std::cell::RefCell::new(OptimizerReport {
            status: meta_status,
            generation: meta_generation,
            weight_hash: meta_hash,
            decisions: Vec::new(),
            total_neural_cost: 0,
            total_classical_cost: 0,
        }),
    }
}

/// Speculative lowering: classical path always runs, neural candidates injected externally.
pub struct SpeculativeLowering {
    classical: TritonLowering,
    report: std::cell::RefCell<OptimizerReport>,
}

impl SpeculativeLowering {
    /// Get the accumulated optimizer report.
    pub fn report(&self) -> OptimizerReport {
        self.report.borrow().clone()
    }

    /// Inject a neural v2 candidate result for a block.
    ///
    /// Called after beam search + validation. Records the decision
    /// (neural win or classical win) in the report.
    pub fn inject_neural_candidate(
        &self,
        block_id: &str,
        candidate_tasm: &[String],
        baseline_cost: u64,
    ) {
        let mut report = self.report.borrow_mut();

        if candidate_tasm.is_empty() {
            report.decisions.push(BlockDecision {
                block_id: block_id.to_string(),
                winner: Winner::Classical,
                winner_cost: baseline_cost,
                loser_cost: baseline_cost,
                reason: DecisionReason::NoCandidate,
            });
            return;
        }

        let candidate_profile = scorer::profile_tasm(
            &candidate_tasm
                .iter()
                .map(|s| s.as_str())
                .collect::<Vec<_>>(),
        );
        let candidate_cost = candidate_profile.cost();

        let baseline_profile = scorer::profile_tasm_str(&format!(
            "push 0\n" // dummy for comparison (actual baseline cost passed in)
        ));

        if candidate_cost < baseline_cost {
            let reason = if candidate_profile.is_cliff_jump(&baseline_profile) {
                DecisionReason::CliffJump
            } else {
                DecisionReason::StackScheduling
            };

            report.decisions.push(BlockDecision {
                block_id: block_id.to_string(),
                winner: Winner::Neural,
                winner_cost: candidate_cost,
                loser_cost: baseline_cost,
                reason,
            });
        } else {
            report.decisions.push(BlockDecision {
                block_id: block_id.to_string(),
                winner: Winner::Classical,
                winner_cost: baseline_cost,
                loser_cost: baseline_cost,
                reason: DecisionReason::NeuralWorse(candidate_cost),
            });
        }
    }
}

impl StackLowering for SpeculativeLowering {
    fn lower(&self, ops: &[TIROp]) -> Vec<String> {
        // Classical path always runs. Neural v2 candidates are injected
        // externally via inject_neural_candidate after beam search.
        self.classical.lower(ops)
    }
}

/// Decode neural output codes to TASM instruction strings.
/// Each code maps to a basic TASM instruction.
///
/// Every entry must be in the verifier's ALLOWED list. Side-effect ops
/// (split, assert, write_io, divine, halt, assert_vector) are included โ€”
/// the verifier handles them via side-channel comparison. Ops that remain
/// remapped (hash, read_io, read_mem, etc.) use dummy values the verifier
/// can't meaningfully compare. The vocab size stays 64 to match the model
/// architecture and GPU shader.
pub fn decode_output(codes: &[u64]) -> Vec<String> {
    const VOCAB: &[&str] = &[
        "",              // 0: end of sequence
        "push 0",        // 1
        "push 1",        // 2
        "push -1",       // 3
        "pop 1",         // 4
        "pop 2",         // 5
        "pop 3",         // 6
        "pop 4",         // 7
        "pop 5",         // 8
        "dup 0",         // 9
        "dup 1",         // 10
        "dup 2",         // 11
        "dup 3",         // 12
        "dup 4",         // 13
        "dup 5",         // 14
        "swap 1",        // 15
        "swap 2",        // 16
        "swap 3",        // 17
        "swap 4",        // 18
        "swap 5",        // 19
        "add",           // 20
        "mul",           // 21
        "eq",            // 22
        "lt",            // 23
        "and",           // 24
        "xor",           // 25
        "div_mod",       // 26  (was: invert)
        "split",         // 27
        "pop_count",     // 28
        "log_2_floor",   // 29
        "nop",           // 30  (was: hash)
        "assert",        // 31
        "dup 9",         // 32  (was: read_io 1)
        "write_io 1",    // 33
        "dup 11",        // 34  (was: read_mem 1)
        "dup 12",        // 35  (was: write_mem 1)
        "divine 1",      // 36
        "dup 14",        // 37  (was: sponge_init)
        "dup 15",        // 38  (was: sponge_absorb)
        "swap 10",       // 39  (was: sponge_squeeze)
        "swap 11",       // 40  (was: nop at 40)
        "swap 12",       // 41  (was: skiz)
        "swap 13",       // 42  (was: return)
        "halt",          // 43
        "swap 15",       // 44  (was: read_io 5)
        "write_io 5",    // 45
        "pick 2",        // 46  (was: read_mem 5)
        "pick 3",        // 47  (was: write_mem 5)
        "divine 5",      // 48
        "pick 5",        // 49  (was: pop_count, moved to 28)
        "place 1",       // 50  (was: log_2_floor, moved to 29)
        "place 2",       // 51  (was: merkle_step)
        "place 3",       // 52  (was: sponge_absorb_mem)
        "place 4",       // 53  (was: xb_mul)
        "place 5",       // 54  (was: x_invert)
        "push 2",        // 55  (was: xx_dot_step)
        "push 3",        // 56  (was: xb_dot_step)
        "assert_vector", // 57
        "dup 6",         // 58
        "dup 7",         // 59
        "swap 6",        // 60
        "swap 7",        // 61
        "swap 8",        // 62
        "swap 9",        // 63
    ];

    let mut out = Vec::new();
    for &code in codes {
        let idx = code as usize;
        if idx == 0 || idx >= VOCAB.len() {
            break;
        }
        out.push(VOCAB[idx].to_string());
    }
    out
}

/// Encode a single TASM instruction line to its VOCAB code (reverse of decode_output).
/// Returns None if the instruction is not in the vocabulary.
pub fn encode_tasm_line(line: &str) -> Option<u64> {
    const VOCAB: &[&str] = &[
        "",              // 0: end of sequence
        "push 0",        // 1
        "push 1",        // 2
        "push -1",       // 3
        "pop 1",         // 4
        "pop 2",         // 5
        "pop 3",         // 6
        "pop 4",         // 7
        "pop 5",         // 8
        "dup 0",         // 9
        "dup 1",         // 10
        "dup 2",         // 11
        "dup 3",         // 12
        "dup 4",         // 13
        "dup 5",         // 14
        "swap 1",        // 15
        "swap 2",        // 16
        "swap 3",        // 17
        "swap 4",        // 18
        "swap 5",        // 19
        "add",           // 20
        "mul",           // 21
        "eq",            // 22
        "lt",            // 23
        "and",           // 24
        "xor",           // 25
        "div_mod",       // 26
        "split",         // 27
        "pop_count",     // 28
        "log_2_floor",   // 29
        "nop",           // 30
        "assert",        // 31
        "dup 9",         // 32
        "write_io 1",    // 33
        "dup 11",        // 34
        "dup 12",        // 35
        "divine 1",      // 36
        "dup 14",        // 37
        "dup 15",        // 38
        "swap 10",       // 39
        "swap 11",       // 40
        "swap 12",       // 41
        "swap 13",       // 42
        "halt",          // 43
        "swap 15",       // 44
        "write_io 5",    // 45
        "pick 2",        // 46
        "pick 3",        // 47
        "divine 5",      // 48
        "pick 5",        // 49
        "place 1",       // 50
        "place 2",       // 51
        "place 3",       // 52
        "place 4",       // 53
        "place 5",       // 54
        "push 2",        // 55
        "push 3",        // 56
        "assert_vector", // 57
        "dup 6",         // 58
        "dup 7",         // 59
        "swap 6",        // 60
        "swap 7",        // 61
        "swap 8",        // 62
        "swap 9",        // 63
    ];

    let trimmed = line.trim();
    for (i, &entry) in VOCAB.iter().enumerate().skip(1) {
        if trimmed == entry {
            return Some(i as u64);
        }
    }
    None
}

/// Encode a sequence of TASM instruction lines to VOCAB codes.
/// Lines not in the vocabulary are skipped.
pub fn encode_tasm_block(lines: &[String]) -> Vec<u64> {
    lines.iter().filter_map(|l| encode_tasm_line(l)).collect()
}

Dimensions

trident/src/diagnostic/mod.rs
trident/src/ir/mod.rs
trident/src/deploy/mod.rs
trident/src/syntax/mod.rs
trident/src/api/mod.rs
nebu/rs/extension/mod.rs
optica/src/render/mod.rs
trident/src/config/mod.rs
trident/src/field/mod.rs
trident/src/cli/mod.rs
optica/src/parser/mod.rs
trident/src/neural/mod.rs
trident/src/cost/mod.rs
trident/src/typecheck/mod.rs
optica/src/server/mod.rs
trident/src/package/mod.rs
optica/src/scanner/mod.rs
optica/src/output/mod.rs
trident/src/verify/mod.rs
optica/src/graph/mod.rs
trident/src/ast/mod.rs
trident/src/lsp/mod.rs
trident/src/runtime/mod.rs
trident/src/gpu/mod.rs
optica/src/query/mod.rs
trident/src/lsp/semantic/mod.rs
trident/src/verify/equiv/mod.rs
trident/src/package/hash/mod.rs
trident/src/neural/training/mod.rs
trident/src/verify/synthesize/mod.rs
trident/src/ir/tir/mod.rs
rs/macros/src/addressed/mod.rs
trident/src/package/registry/mod.rs
rs/rsc/src/lints/mod.rs
trident/src/verify/report/mod.rs
trident/src/config/resolve/mod.rs
trident/src/verify/solve/mod.rs
rs/macros/src/registers/mod.rs
trident/src/verify/smt/mod.rs
rs/macros/src/cell/mod.rs
rs/core/src/fixed_point/mod.rs
trident/src/neural/data/mod.rs
rs/core/src/bounded/mod.rs
trident/src/lsp/util/mod.rs
trident/src/typecheck/tests/mod.rs
trident/src/neural/model/mod.rs
trident/src/cost/stack_verifier/mod.rs
trident/src/syntax/grammar/mod.rs
trident/src/package/manifest/mod.rs
trident/src/syntax/parser/mod.rs
trident/src/ir/kir/mod.rs
trident/src/neural/inference/mod.rs
trident/src/syntax/lexer/mod.rs
trident/src/cost/model/mod.rs
trident/src/ir/lir/mod.rs
trident/src/syntax/format/mod.rs
trident/src/config/scaffold/mod.rs
trident/src/verify/sym/mod.rs
trident/src/api/tests/mod.rs
trident/src/package/store/mod.rs
trident/src/ir/tree/mod.rs
trident/src/ir/kir/lower/mod.rs
trident/src/ir/lir/lower/mod.rs
trident/src/ir/tir/builder/mod.rs
trident/src/ir/tir/neural/mod.rs
trident/src/neural/data/tir_graph/mod.rs
trident/src/syntax/parser/tests/mod.rs
cw-cyber/packages/cyber-std/src/tokenfactory/mod.rs
trident/src/ir/tree/lower/mod.rs
trident/src/ir/tir/stack/mod.rs
cw-cyber/contracts/cybernet/src/tests/mod.rs
trident/src/ir/tir/optimize/mod.rs

Local Graph