diff --git a/Cargo.lock b/Cargo.lock index a4963f2..ef5e666 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -943,6 +943,34 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "dap" +version = "0.4.1-alpha1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35c7fc89d334ab745ba679f94c7314c9b17ecdcd923c111df6206e9fd7729fa9" +dependencies = [ + "serde", + "serde_json", + "thiserror 1.0.69", +] + +[[package]] +name = "debugger-dap" +version = "0.1.0" +dependencies = [ + "blake2b_simd", + "dap", + "debugger-session", + "kaspa-consensus-core", + "kaspa-txscript", + "kaspa-txscript-errors", + "rand 0.8.5", + "secp256k1", + "serde", + "serde_json", + "silverscript-lang", +] + [[package]] name = "debugger-session" version = "0.1.0" diff --git a/Cargo.toml b/Cargo.toml index dfaa641..42de97c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,6 +3,7 @@ members = [ "silverscript-lang", "debugger/session", "debugger/cli", + "debugger/dap", ] exclude = ["tree-sitter"] resolver = "2" diff --git a/debugger/dap/Cargo.toml b/debugger/dap/Cargo.toml new file mode 100644 index 0000000..efbd0c2 --- /dev/null +++ b/debugger/dap/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "debugger-dap" +version.workspace = true +edition.workspace = true +license.workspace = true +authors.workspace = true +repository.workspace = true +rust-version.workspace = true + +[[bin]] +name = "debugger-dap" +path = "src/main.rs" + +[dependencies] +dap = "0.4.1-alpha1" +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +debugger-session = { path = "../session" } +silverscript-lang = { path = "../../silverscript-lang" } +kaspa-consensus-core.workspace = true +kaspa-txscript.workspace = true +kaspa-txscript-errors.workspace = true +secp256k1.workspace = true +blake2b_simd.workspace = true +rand.workspace = true + +[dev-dependencies] diff --git a/debugger/dap/src/adapter.rs b/debugger/dap/src/adapter.rs new file mode 100644 index 0000000..85c0523 --- /dev/null +++ b/debugger/dap/src/adapter.rs @@ -0,0 +1,584 @@ +use std::collections::{HashMap, HashSet}; +use std::fs; +use std::path::{Path, PathBuf}; + +use dap::events::{Event, OutputEventBody, StoppedEventBody}; +use dap::prelude::{Command, Request, Response, ResponseBody}; +use dap::responses::{ + ContinueResponse, ScopesResponse, SetBreakpointsResponse, StackTraceResponse, ThreadsResponse, VariablesResponse, +}; +use dap::types::{ + Breakpoint, Capabilities, OutputEventCategory, Scope, ScopePresentationhint, Source, StackFrame, StoppedEventReason, Thread, + Variable, +}; +use debugger_session::session::{DebugSession, VariableOrigin}; +use debugger_session::{format_failure_report, format_value}; + +use crate::launch_config::LaunchConfig; +use crate::refs::{RefAllocator, RefTarget, ScopeKind}; +use crate::runtime_builder::{OwnedRuntime, build_launch}; + +const MAIN_THREAD_ID: i64 = 1; + +pub struct AdapterResult { + pub response: Response, + pub events: Vec, + pub should_exit: bool, +} + +pub struct DapAdapter { + runtime: Option, + refs: RefAllocator, + frame_sequence: i64, + configured: bool, +} + +struct Runtime { + runtime: OwnedRuntime, + source_path: PathBuf, + source_name: String, + stop_on_entry: bool, + no_debug: bool, + breakpoints_by_source: HashMap>, + frame_map: Vec, +} + +#[derive(Clone)] +struct FrameMeta { + frame_id: i64, + sequence: u32, + frame_token: u32, +} + +impl DapAdapter { + pub fn new() -> Self { + Self { runtime: None, refs: RefAllocator::new(), frame_sequence: 1, configured: false } + } + + pub fn handle_request(&mut self, req: Request) -> AdapterResult { + match self.handle_request_inner(req.clone()) { + Ok(result) => result, + Err(err) => { + AdapterResult { response: req.error(&format!("internal adapter error: {err}")), events: vec![], should_exit: false } + } + } + } + + fn handle_request_inner(&mut self, req: Request) -> Result { + match req.command.clone() { + Command::Initialize(_) => { + let capabilities = Capabilities { + supports_configuration_done_request: Some(true), + supports_step_in_targets_request: Some(false), + supports_function_breakpoints: Some(false), + supports_conditional_breakpoints: Some(false), + support_terminate_debuggee: Some(true), + supports_loaded_sources_request: Some(false), + supports_evaluate_for_hovers: Some(false), + ..Default::default() + }; + Ok(AdapterResult { + response: req.success(ResponseBody::Initialize(capabilities)), + events: vec![Event::Initialized], + should_exit: false, + }) + } + Command::Launch(args) => { + let launch = match LaunchConfig::from_launch_args(&args) { + Ok(cfg) => cfg, + Err(err) => { + return Ok(AdapterResult { + response: req.error(&err), + events: vec![self.output_stderr(err)], + should_exit: false, + }); + } + }; + + match build_runtime(launch) { + Ok(runtime) => { + self.runtime = Some(runtime); + self.configured = false; + Ok(AdapterResult { response: req.success(ResponseBody::Launch), events: vec![], should_exit: false }) + } + Err(err) => { + Ok(AdapterResult { response: req.error(&err), events: vec![self.output_stderr(err)], should_exit: false }) + } + } + } + Command::SetBreakpoints(args) => { + let runtime = self.runtime.as_mut().ok_or_else(|| "setBreakpoints before launch".to_string())?; + let requested_source_path = + args.source.path.as_deref().map(PathBuf::from).unwrap_or_else(|| runtime.source_path.clone()); + let source_key = canonical_source_key(&requested_source_path); + + if let Some(existing) = runtime.breakpoints_by_source.remove(&source_key) { + for line in existing { + runtime.runtime.session_mut().clear_breakpoint(line); + } + } + let requested_lines: Vec = if let Some(requested) = args.breakpoints { + Some(requested.into_iter().map(|source_bp| source_bp.line).collect::>()) + } else { + #[allow(deprecated)] + args.lines + } + .unwrap_or_default(); + + let runtime_source_key = canonical_source_key(&runtime.source_path); + if source_key != runtime_source_key { + // This adapter session executes one script file. Keep breakpoints + // from other files isolated and report them as unverified. + runtime.breakpoints_by_source.insert(source_key, HashSet::new()); + let breakpoints = requested_lines + .into_iter() + .map(|line| Breakpoint { verified: false, line: Some(line), ..Default::default() }) + .collect(); + return Ok(AdapterResult { + response: req.success(ResponseBody::SetBreakpoints(SetBreakpointsResponse { breakpoints })), + events: vec![], + should_exit: false, + }); + } + + let mut breakpoints = Vec::new(); + let mut resolved_for_source = HashSet::new(); + for line_value in requested_lines { + if line_value <= 0 { + breakpoints.push(Breakpoint { verified: false, line: Some(line_value), ..Default::default() }); + continue; + } + let line = line_value as u32; + let resolved = runtime.runtime.session_mut().add_breakpoint_resolved(line); + let verified = resolved.is_some(); + if let Some(actual_line) = resolved { + resolved_for_source.insert(actual_line); + } + breakpoints.push(Breakpoint { verified, line: Some(resolved.unwrap_or(line) as i64), ..Default::default() }); + } + + runtime.breakpoints_by_source.insert(source_key, resolved_for_source); + + Ok(AdapterResult { + response: req.success(ResponseBody::SetBreakpoints(SetBreakpointsResponse { breakpoints })), + events: vec![], + should_exit: false, + }) + } + Command::SetExceptionBreakpoints(_) => Ok(AdapterResult { + response: req.success(ResponseBody::SetExceptionBreakpoints(Default::default())), + events: vec![], + should_exit: false, + }), + Command::ConfigurationDone => { + let runtime = self.runtime.as_mut().ok_or_else(|| "configurationDone before launch".to_string())?; + self.configured = true; + + runtime + .runtime + .session_mut() + .run_to_first_executed_statement() + .map_err(|err| format!("failed to start session: {err}"))?; + + let events = if runtime.no_debug { + match runtime.runtime.session_mut().run_to_completion() { + Ok(()) => vec![self.output_stdout("Execution completed successfully."), Event::Terminated(None)], + Err(err) => { + let report = runtime.runtime.session().build_failure_report(&err); + let formatted = format_failure_report(&report, &format_value); + vec![self.output_stderr(formatted), Event::Terminated(None)] + } + } + } else if runtime.stop_on_entry { + vec![self.make_stopped_event(StoppedEventReason::Entry, None)] + } else { + match runtime.runtime.session_mut().continue_to_breakpoint() { + Ok(Some(_)) => vec![self.make_stopped_event(StoppedEventReason::Breakpoint, None)], + Ok(None) => vec![self.output_stdout("Execution completed successfully."), Event::Terminated(None)], + Err(err) => { + let report = runtime.runtime.session().build_failure_report(&err); + let formatted = format_failure_report(&report, &format_value); + if runtime.no_debug { + vec![self.output_stderr(formatted), Event::Terminated(None)] + } else { + vec![ + self.output_stderr(formatted.clone()), + self.make_stopped_event(StoppedEventReason::Exception, Some(formatted)), + ] + } + } + } + }; + + Ok(AdapterResult { response: req.success(ResponseBody::ConfigurationDone), events, should_exit: false }) + } + Command::Threads => Ok(AdapterResult { + response: req.success(ResponseBody::Threads(ThreadsResponse { + threads: vec![Thread { id: MAIN_THREAD_ID, name: "main".to_string() }], + })), + events: vec![], + should_exit: false, + }), + Command::StackTrace(_) => { + let (span, current_step, source, current_function_name, call_stack) = { + let runtime = self.runtime.as_ref().ok_or_else(|| "stackTrace before launch".to_string())?; + let span = runtime.runtime.session().current_span(); + let current_step = runtime.runtime.session().current_step(); + let source = Source { + name: Some(runtime.source_name.clone()), + path: Some(runtime.source_path.to_string_lossy().to_string()), + ..Default::default() + }; + let current_function_name = + runtime.runtime.session().current_function_name().unwrap_or_else(|| "".to_string()); + let call_stack = runtime.runtime.session().call_stack_with_spans(); + (span, current_step, source, current_function_name, call_stack) + }; + + let mut frames = Vec::new(); + let mut frame_map = Vec::new(); + + let current_line = span.map(|s| s.line as i64).unwrap_or(1); + let current_col = span.map(|s| s.col as i64).unwrap_or(1); + + let frame_id = self.next_frame_id(); + frames.push(StackFrame { + id: frame_id, + name: current_function_name, + source: Some(source.clone()), + line: current_line, + column: current_col, + ..Default::default() + }); + frame_map.push(FrameMeta { + frame_id, + sequence: current_step.as_ref().map(|step| step.sequence).unwrap_or(0), + frame_token: current_step.as_ref().map(|step| step.frame_id).unwrap_or(0), + }); + + for entry in call_stack.into_iter().rev() { + let id = self.next_frame_id(); + let frame_line = entry.call_site_span.map(|s| s.line as i64).unwrap_or(current_line); + let frame_col = entry.call_site_span.map(|s| s.col as i64).unwrap_or(current_col); + frames.push(StackFrame { + id, + name: entry.callee_name, + source: Some(source.clone()), + line: frame_line, + column: frame_col, + ..Default::default() + }); + frame_map.push(FrameMeta { frame_id: id, sequence: entry.sequence, frame_token: entry.frame_id }); + } + + if let Some(runtime_mut) = self.runtime.as_mut() { + runtime_mut.frame_map = frame_map; + } + + Ok(AdapterResult { + response: req.success(ResponseBody::StackTrace(StackTraceResponse { + total_frames: Some(frames.len() as i64), + stack_frames: frames, + })), + events: vec![], + should_exit: false, + }) + } + Command::Scopes(args) => { + let runtime = self.runtime.as_ref().ok_or_else(|| "scopes before launch".to_string())?; + let frame_meta = runtime + .frame_map + .iter() + .find(|frame| frame.frame_id == args.frame_id) + .cloned() + .unwrap_or(FrameMeta { frame_id: args.frame_id, sequence: 0, frame_token: 0 }); + + let variables_ref = self.refs.alloc(scope_target(ScopeKind::Variables, &frame_meta)); + let dstack_ref = self.refs.alloc(scope_target(ScopeKind::DataStack, &frame_meta)); + let astack_ref = self.refs.alloc(scope_target(ScopeKind::AltStack, &frame_meta)); + let scoped_vars = runtime + .runtime + .session() + .list_variables_at_sequence(frame_meta.sequence, frame_meta.frame_token) + .unwrap_or_default(); + let stack_snapshot = runtime.runtime.session().stack_snapshot(); + + let scopes = vec![ + Scope { + name: "Variables".to_string(), + presentation_hint: Some(ScopePresentationhint::Locals), + variables_reference: variables_ref, + named_variables: Some(scoped_vars.len() as i64), + expensive: false, + ..Default::default() + }, + Scope { + name: "Data Stack".to_string(), + presentation_hint: Some(ScopePresentationhint::Registers), + variables_reference: dstack_ref, + indexed_variables: Some(stack_snapshot.dstack.len() as i64), + expensive: false, + ..Default::default() + }, + Scope { + name: "Alt Stack".to_string(), + presentation_hint: Some(ScopePresentationhint::Registers), + variables_reference: astack_ref, + indexed_variables: Some(stack_snapshot.astack.len() as i64), + expensive: false, + ..Default::default() + }, + ]; + + Ok(AdapterResult { + response: req.success(ResponseBody::Scopes(ScopesResponse { scopes })), + events: vec![], + should_exit: false, + }) + } + Command::Variables(args) => { + let runtime = self.runtime.as_ref().ok_or_else(|| "variables before launch".to_string())?; + let target = self + .refs + .get(args.variables_reference) + .cloned() + .ok_or_else(|| format!("unknown variablesReference {}", args.variables_reference))?; + + let variables = match target { + RefTarget::Scope { kind: ScopeKind::Variables, sequence, frame_token } => { + let vars = runtime + .runtime + .session() + .list_variables_at_sequence(sequence, frame_token) + .map_err(|err| format!("variables unavailable: {err}"))?; + let mut bindings = vars; + bindings.sort_by_key(|item| { + let rank = match item.origin { + VariableOrigin::Param => 0, + VariableOrigin::Local => 1, + VariableOrigin::ContractField | VariableOrigin::ConstructorArg => 2, + VariableOrigin::Constant => 3, + }; + (rank, item.name.clone()) + }); + bindings + .into_iter() + .map(|item| Variable { + name: binding_name(&item), + value: format_value(&item.type_name, &item.value), + type_field: Some(item.type_name), + evaluate_name: Some(item.name), + variables_reference: 0, + ..Default::default() + }) + .collect::>() + } + RefTarget::Scope { kind: ScopeKind::DataStack, .. } => { + let snapshot = runtime.runtime.session().stack_snapshot(); + stack_scope_variables("dstack", &snapshot.dstack) + } + RefTarget::Scope { kind: ScopeKind::AltStack, .. } => { + let snapshot = runtime.runtime.session().stack_snapshot(); + stack_scope_variables("astack", &snapshot.astack) + } + }; + + Ok(AdapterResult { + response: req.success(ResponseBody::Variables(VariablesResponse { variables })), + events: vec![], + should_exit: false, + }) + } + Command::Next(_) => self.handle_step(req, StepKind::Next, |session| session.step_over()), + Command::StepIn(_) => self.handle_step(req, StepKind::StepIn, |session| session.step_into()), + Command::StepOut(_) => self.handle_step(req, StepKind::StepOut, |session| session.step_out()), + Command::Continue(_) => { + let runtime = self.runtime.as_mut().ok_or_else(|| "continue before launch".to_string())?; + let no_debug = runtime.no_debug; + let mut events = Vec::new(); + match runtime.runtime.session_mut().continue_to_breakpoint() { + Ok(Some(_)) => events.push(self.make_stopped_event(StoppedEventReason::Breakpoint, None)), + Ok(None) => { + events.push(self.output_stdout("Execution completed successfully.")); + events.push(Event::Terminated(None)); + } + Err(err) => { + let report = runtime.runtime.session().build_failure_report(&err); + let formatted = format_failure_report(&report, &format_value); + events.push(self.output_stderr(formatted.clone())); + if no_debug { + events.push(Event::Terminated(None)); + } else { + events.push(self.make_stopped_event(StoppedEventReason::Exception, Some(formatted))); + } + } + } + Ok(AdapterResult { + response: req.success(ResponseBody::Continue(ContinueResponse { all_threads_continued: Some(true) })), + events, + should_exit: false, + }) + } + Command::Disconnect(_) => { + self.runtime = None; + Ok(AdapterResult { response: req.success(ResponseBody::Disconnect), events: vec![], should_exit: true }) + } + _ => Ok(AdapterResult { response: req.error("unsupported request"), events: vec![], should_exit: false }), + } + } + + fn handle_step( + &mut self, + req: Request, + step_kind: StepKind, + mut step_fn: impl FnMut( + &mut DebugSession<'static, 'static>, + ) + -> Result>, kaspa_txscript_errors::TxScriptError>, + ) -> Result { + if !self.configured { + return Ok(AdapterResult { + response: req.error("cannot step before configurationDone"), + events: vec![], + should_exit: false, + }); + } + + let runtime = self.runtime.as_mut().ok_or_else(|| "step request before launch".to_string())?; + let mut events = Vec::new(); + let before_location = current_location_key(runtime.runtime.session()); + let mut step_result = step_fn(runtime.runtime.session_mut()); + + let mut guard = 0usize; + while matches!(step_result, Ok(Some(_))) && guard < 32 { + let after_location = current_location_key(runtime.runtime.session()); + if after_location != before_location { + break; + } + step_result = step_fn(runtime.runtime.session_mut()); + guard += 1; + } + + match step_result { + Ok(Some(_)) => events.push(self.make_stopped_event(StoppedEventReason::Step, None)), + Ok(None) => { + events.push(self.output_stdout("Execution completed successfully.")); + events.push(Event::Terminated(None)); + } + Err(err) => { + let report = runtime.runtime.session().build_failure_report(&err); + let formatted = format_failure_report(&report, &format_value); + events.push(self.output_stderr(formatted.clone())); + events.push(self.make_stopped_event(StoppedEventReason::Exception, Some(formatted))); + } + } + + let body = match step_kind { + StepKind::Next => ResponseBody::Next, + StepKind::StepIn => ResponseBody::StepIn, + StepKind::StepOut => ResponseBody::StepOut, + }; + + Ok(AdapterResult { response: req.success(body), events, should_exit: false }) + } + + fn make_stopped_event(&mut self, reason: StoppedEventReason, text: Option) -> Event { + self.refs.reset(); + Event::Stopped(StoppedEventBody { + reason, + description: None, + thread_id: Some(MAIN_THREAD_ID), + preserve_focus_hint: None, + text, + all_threads_stopped: Some(true), + hit_breakpoint_ids: None, + }) + } + + fn next_frame_id(&mut self) -> i64 { + let id = self.frame_sequence; + self.frame_sequence += 1; + id + } + + fn output_stderr(&self, msg: impl Into) -> Event { + Event::Output(OutputEventBody { + category: Some(OutputEventCategory::Stderr), + output: format!("{}\n", msg.into()), + ..Default::default() + }) + } + + fn output_stdout(&self, msg: impl Into) -> Event { + Event::Output(OutputEventBody { + category: Some(OutputEventCategory::Stdout), + output: format!("{}\n", msg.into()), + ..Default::default() + }) + } +} + +enum StepKind { + Next, + StepIn, + StepOut, +} + +fn current_location_key(session: &DebugSession<'static, 'static>) -> Option { + session.current_span().map(|span| span.line) +} + +fn canonical_source_key(path: &Path) -> String { + fs::canonicalize(path).unwrap_or_else(|_| path.to_path_buf()).to_string_lossy().to_string() +} + +fn scope_target(kind: ScopeKind, frame_meta: &FrameMeta) -> RefTarget { + RefTarget::Scope { kind, sequence: frame_meta.sequence, frame_token: frame_meta.frame_token } +} + +fn binding_name(variable: &debugger_session::session::Variable) -> String { + match variable.origin { + VariableOrigin::Param | VariableOrigin::Local | VariableOrigin::ContractField => variable.name.clone(), + VariableOrigin::ConstructorArg => format!("{} (ctor)", variable.name), + VariableOrigin::Constant => format!("{} (const)", variable.name), + } +} + +fn stack_scope_variables(scope_name: &str, items: &[String]) -> Vec { + if items.is_empty() { + return vec![Variable { + name: "(empty)".to_string(), + value: "".to_string(), + variables_reference: 0, + ..Default::default() + }]; + } + + items + .iter() + .enumerate() + .map(|(index, item)| Variable { + name: format!("{scope_name}[{index}]"), + value: stack_item_value(item), + variables_reference: 0, + ..Default::default() + }) + .collect() +} + +fn stack_item_value(item: &str) -> String { + if item.is_empty() { " (script 0 / false)".to_string() } else { format!("0x{item}") } +} + +fn build_runtime(config: LaunchConfig) -> Result { + let built = build_launch(config.resolve(None)?)?; + Ok(Runtime { + runtime: built.runtime, + source_path: built.source_path, + source_name: built.source_name, + stop_on_entry: built.stop_on_entry, + no_debug: built.no_debug, + breakpoints_by_source: HashMap::new(), + frame_map: Vec::new(), + }) +} diff --git a/debugger/dap/src/launch_config.rs b/debugger/dap/src/launch_config.rs new file mode 100644 index 0000000..0c9f97e --- /dev/null +++ b/debugger/dap/src/launch_config.rs @@ -0,0 +1,120 @@ +use std::collections::BTreeMap; +use std::path::{Path, PathBuf}; + +use dap::requests::LaunchRequestArguments; +use debugger_session::args::values_to_args; +use debugger_session::test_runner::{TestTxScenario, TestTxScenarioResolved, resolve_tx_scenario}; +use serde::Deserialize; +use serde_json::{Map, Value}; + +#[derive(Debug, Clone, Default, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct LaunchConfig { + pub script_path: Option, + pub function: Option, + pub constructor_args: Option, + pub args: Option, + pub tx: Option, + pub no_debug: Option, + pub stop_on_entry: Option, +} + +#[derive(Debug, Clone)] +pub struct ResolvedLaunchConfig { + pub script_path: PathBuf, + pub function: Option, + pub constructor_args: Option, + pub args: Option, + pub tx: Option, + pub no_debug: bool, + pub stop_on_entry: bool, +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(untagged)] +pub enum ArgInput { + Values(Vec), + Named(BTreeMap), +} + +impl LaunchConfig { + pub fn from_launch_args(args: &LaunchRequestArguments) -> Result { + let mut launch_data = match args.additional_data.clone() { + Some(Value::Object(map)) => map, + Some(Value::Null) | None => Map::new(), + Some(_) => return Err("invalid launch config: expected launch arguments to deserialize into an object".to_string()), + }; + + if let Some(no_debug) = args.no_debug { + launch_data.insert("noDebug".to_string(), Value::Bool(no_debug)); + } + + Self::from_value(Value::Object(launch_data)) + } + + pub fn from_value(value: Value) -> Result { + let config: Self = serde_json::from_value(value).map_err(|err| format!("invalid launch config: {err}"))?; + + if config.script_path.is_none() { + return Err("launch config must include 'scriptPath'".to_string()); + } + + Ok(config) + } + + pub fn resolve(self, workspace_root: Option<&Path>) -> Result { + let script_path = self.resolve_script_path(workspace_root)?; + let tx = self.tx.map(resolve_tx_scenario).transpose()?; + + Ok(ResolvedLaunchConfig { + script_path, + function: self.function, + constructor_args: self.constructor_args, + args: self.args, + tx, + no_debug: self.no_debug.unwrap_or(false), + stop_on_entry: self.stop_on_entry.unwrap_or(!self.no_debug.unwrap_or(false)), + }) + } + + fn resolve_script_path(&self, workspace_root: Option<&Path>) -> Result { + let raw = self.script_path.as_deref().ok_or_else(|| "scriptPath is required".to_string())?; + canonicalize_with_workspace(raw, workspace_root) + } +} + +fn canonicalize_with_workspace(raw: &str, workspace_root: Option<&Path>) -> Result { + let candidate = PathBuf::from(raw); + let resolved = if candidate.is_absolute() { + candidate + } else if let Some(root) = workspace_root { + root.join(candidate) + } else { + std::env::current_dir().map_err(|err| format!("failed to resolve current_dir: {err}"))?.join(candidate) + }; + + std::fs::canonicalize(&resolved).map_err(|err| format!("failed to canonicalize '{}': {err}", resolved.display())) +} + +pub fn resolve_arg_input(input: Option<&ArgInput>, param_names: &[String], label: &str) -> Result, String> { + match input { + None => Ok(Vec::new()), + Some(ArgInput::Values(values)) => values_to_args(values), + Some(ArgInput::Named(named)) => { + let mut remaining = named.clone(); + let mut ordered = Vec::with_capacity(param_names.len()); + + for name in param_names { + let value = remaining.remove(name).ok_or_else(|| format!("{label} missing value for '{name}'"))?; + ordered.push(value); + } + + if !remaining.is_empty() { + let extras = remaining.keys().cloned().collect::>().join(", "); + return Err(format!("{label} has unknown name(s): {extras}")); + } + + values_to_args(&ordered) + } + } +} diff --git a/debugger/dap/src/main.rs b/debugger/dap/src/main.rs new file mode 100644 index 0000000..5c6fb71 --- /dev/null +++ b/debugger/dap/src/main.rs @@ -0,0 +1,74 @@ +use std::io::{BufReader, BufWriter}; + +use dap::prelude::Server; +use debugger_session::{format_failure_report, format_value}; +use serde_json::Value; + +mod adapter; +mod launch_config; +mod refs; +mod runtime_builder; + +use adapter::DapAdapter; +use launch_config::LaunchConfig; +use runtime_builder::build_launch; + +fn main() -> Result<(), Box> { + let mut args = std::env::args().skip(1); + if let Some(arg) = args.next() { + if arg == "--run-config-json" { + let raw = args.next().ok_or("--run-config-json requires a JSON argument")?; + return run_config_json(&raw); + } + } + + let input = BufReader::new(std::io::stdin()); + let output = BufWriter::new(std::io::stdout()); + let mut server = Server::new(input, output); + let mut adapter = DapAdapter::new(); + + loop { + let req = match server.poll_request() { + Ok(Some(req)) => req, + Ok(None) => break, + Err(err) => return Err(Box::new(err)), + }; + + let result = adapter.handle_request(req); + if let Err(err) = server.respond(result.response) { + return Err(Box::new(err)); + } + + for event in result.events { + if let Err(err) = server.send_event(event) { + return Err(Box::new(err)); + } + } + + if result.should_exit { + break; + } + } + Ok(()) +} + +fn run_config_json(raw: &str) -> Result<(), Box> { + let value: Value = serde_json::from_str(raw)?; + let launch = LaunchConfig::from_value(value)?; + let mut built = build_launch(launch.resolve(None)?)?; + let session = built.runtime.session_mut(); + + session.run_to_first_executed_statement()?; + match session.run_to_completion() { + Ok(()) => { + println!("Execution completed successfully."); + Ok(()) + } + Err(err) => { + let report = session.build_failure_report(&err); + let formatted = format_failure_report(&report, &format_value); + eprintln!("{formatted}"); + std::process::exit(1); + } + } +} diff --git a/debugger/dap/src/refs.rs b/debugger/dap/src/refs.rs new file mode 100644 index 0000000..a0f14dc --- /dev/null +++ b/debugger/dap/src/refs.rs @@ -0,0 +1,41 @@ +use std::collections::HashMap; + +#[derive(Debug, Clone)] +pub enum ScopeKind { + Variables, + DataStack, + AltStack, +} + +#[derive(Debug, Clone)] +pub enum RefTarget { + Scope { kind: ScopeKind, sequence: u32, frame_token: u32 }, +} + +#[derive(Debug, Default)] +pub struct RefAllocator { + next_id: i64, + refs: HashMap, +} + +impl RefAllocator { + pub fn new() -> Self { + Self { next_id: 1, refs: HashMap::new() } + } + + pub fn reset(&mut self) { + self.next_id = 1; + self.refs.clear(); + } + + pub fn alloc(&mut self, target: RefTarget) -> i64 { + let id = self.next_id; + self.next_id += 1; + self.refs.insert(id, target); + id + } + + pub fn get(&self, id: i64) -> Option<&RefTarget> { + self.refs.get(&id) + } +} diff --git a/debugger/dap/src/runtime_builder.rs b/debugger/dap/src/runtime_builder.rs new file mode 100644 index 0000000..ed1fa77 --- /dev/null +++ b/debugger/dap/src/runtime_builder.rs @@ -0,0 +1,1183 @@ +use std::collections::HashMap; +use std::fs; +use std::path::PathBuf; +use std::ptr::NonNull; + +use debugger_session::args::{parse_call_args, parse_call_args_with_prefix, parse_ctor_args, parse_hex_bytes, parse_state_value}; +use debugger_session::covenant::{CovenantBinding as DebugCovenantBinding, ResolvedCovenantCallTarget, resolve_covenant_call_target}; +use debugger_session::session::{DebugEngine, DebugSession, DebugValue, ShadowTxContext}; +use debugger_session::test_runner::{TestTxInputScenarioResolved, TestTxOutputScenarioResolved, TestTxScenarioResolved}; +use kaspa_consensus_core::Hash; +use kaspa_consensus_core::hashing::sighash::{SigHashReusedValuesUnsync, calc_schnorr_signature_hash}; +use kaspa_consensus_core::hashing::sighash_type::SIG_HASH_ALL; +use kaspa_consensus_core::tx::{ + CovenantBinding, PopulatedTransaction, ScriptPublicKey, Transaction, TransactionId, TransactionInput, TransactionOutpoint, + TransactionOutput, TxInputMass, UtxoEntry, VerifiableTransaction, +}; +use kaspa_txscript::caches::Cache; +use kaspa_txscript::covenants::CovenantsContext; +use kaspa_txscript::script_builder::ScriptBuilder; +use kaspa_txscript::{EngineCtx, EngineFlags, SigCacheKey, pay_to_script_hash_script}; +use secp256k1::{Keypair, Message, Secp256k1, SecretKey, rand::thread_rng}; +use serde_json::Value; +use silverscript_lang::ast::{ContractAst, Expr, ExprKind, StateFieldExpr, TypeBase, TypeRef, parse_contract_ast}; +use silverscript_lang::compiler::{CompileOptions, CompiledContract, compile_contract, compile_contract_ast}; + +use crate::launch_config::{ArgInput, ResolvedLaunchConfig, resolve_arg_input}; + +pub struct BuiltLaunch { + pub runtime: OwnedRuntime, + pub source_path: PathBuf, + pub source_name: String, + pub stop_on_entry: bool, + pub no_debug: bool, +} + +pub fn build_launch(mut config: ResolvedLaunchConfig) -> Result { + resolve_launch_identities(&mut config)?; + + let source_owned = fs::read_to_string(&config.script_path) + .map_err(|err| format!("failed to read source '{}': {err}", config.script_path.display()))?; + let source_box = source_owned.into_boxed_str(); + let source_ptr = Box::into_raw(source_box); + let source: &'static str = unsafe { &*source_ptr }; + + let parsed_contract = parse_contract_ast(source).map_err(|err| format!("parse error: {err}"))?; + let ctor_param_names = parsed_contract.params.iter().map(|param| param.name.clone()).collect::>(); + let mut raw_ctor_args = resolve_arg_input(config.constructor_args.as_ref(), &ctor_param_names, "constructor arguments")?; + let tx = config.tx.unwrap_or_else(default_tx_scenario); + if raw_ctor_args.is_empty() + && let Some(active_input_ctor_args) = tx.inputs.get(tx.active_input_index).and_then(|input| input.constructor_args.clone()) + { + raw_ctor_args = active_input_ctor_args; + } + let ctor_args = parse_ctor_args(&parsed_contract, &raw_ctor_args)?; + + let compile_opts = CompileOptions { record_debug_infos: true, ..Default::default() }; + let compiled = compile_contract(source, &ctor_args, compile_opts).map_err(|err| format!("compile error: {err}"))?; + let selected_name = resolve_entrypoint_name(&compiled.abi, config.function)?; + let selected_function = parsed_contract + .functions + .iter() + .find(|function| function.name == selected_name) + .ok_or_else(|| format!("function '{selected_name}' not found"))?; + let input_names = selected_function.params.iter().map(|param| param.name.clone()).collect::>(); + let raw_args = resolve_arg_input(config.args.as_ref(), &input_names, "function arguments")?; + + if tx.inputs.is_empty() { + return Err("tx.inputs must contain at least one input".to_string()); + } + if tx.active_input_index >= tx.inputs.len() { + return Err(format!("tx.active_input_index {} out of range for {} inputs", tx.active_input_index, tx.inputs.len())); + } + + let covenant_target = resolve_covenant_call_target(&parsed_contract, &compiled, &selected_name); + let covenant_binding = covenant_target.as_ref().map(|target| target.binding); + let enable_covenant_session_mode = covenant_target.is_some(); + + let mut ctor_script_cache = HashMap::, Vec>::new(); + let mut ctor_state_cache = HashMap::, DebugValue>::new(); + let mut explicit_state_cache = HashMap::::new(); + ctor_script_cache.insert(raw_ctor_args.clone(), compiled.script.clone()); + if !parsed_contract.fields.is_empty() { + let root_state = resolve_state_for_ctor_args(&parsed_contract, &raw_ctor_args, &mut ctor_state_cache)?; + ctor_state_cache.insert(raw_ctor_args.clone(), root_state); + } + + let mut input_prev_outpoints = Vec::with_capacity(tx.inputs.len()); + let mut input_sequences = Vec::with_capacity(tx.inputs.len()); + let mut input_sig_op_counts = Vec::with_capacity(tx.inputs.len()); + let mut explicit_input_sigs = Vec::with_capacity(tx.inputs.len()); + let mut utxo_specs = Vec::with_capacity(tx.inputs.len()); + let mut input_covenant_ids = Vec::with_capacity(tx.inputs.len()); + let mut input_covenant_states = Vec::with_capacity(tx.inputs.len()); + let mut input_redeem_scripts = Vec::with_capacity(tx.inputs.len()); + for (input_idx, input) in tx.inputs.iter().enumerate() { + let mut default_prev_txid = [0u8; 32]; + default_prev_txid.fill(input_idx as u8); + let prev_txid = if let Some(raw_txid) = input.prev_txid.as_deref() { + parse_txid32(raw_txid)? + } else { + TransactionId::from_bytes(default_prev_txid) + }; + + let input_ctor_raw = input.constructor_args.clone().unwrap_or_else(|| raw_ctor_args.clone()); + let input_covenant_state = if let Some(raw_state) = input.state.as_deref() { + Some(resolve_state_from_raw(&parsed_contract, raw_state, &mut explicit_state_cache)?) + } else if input.utxo_script_hex.is_none() || input.constructor_args.is_some() { + Some(resolve_state_for_ctor_args(&parsed_contract, &input_ctor_raw, &mut ctor_state_cache)?) + } else { + None + }; + let redeem_script = if input.utxo_script_hex.is_none() { + if let Some(raw_state) = input.state.as_deref() { + Some(materialize_script_for_explicit_state(source, &parsed_contract, &input_ctor_raw, raw_state)?) + } else { + Some(compile_script_for_ctor_args(source, &parsed_contract, &input_ctor_raw, &mut ctor_script_cache)?) + } + } else { + None + }; + + let utxo_spk = if let Some(raw_script) = input.utxo_script_hex.as_deref() { + ScriptPublicKey::new(0, parse_hex_bytes(raw_script)?.into()) + } else { + let redeem = redeem_script + .as_ref() + .ok_or_else(|| "internal error: missing redeem script for tx input without utxo_script_hex".to_string())?; + pay_to_script_hash_script(redeem) + }; + + let covenant_id = input.covenant_id.as_deref().map(parse_hash32).transpose()?; + input_prev_outpoints.push(TransactionOutpoint { transaction_id: prev_txid, index: input.prev_index }); + input_sequences.push(input.sequence); + input_sig_op_counts.push(input.sig_op_count); + explicit_input_sigs.push(input.signature_script_hex.as_deref().map(parse_hex_bytes).transpose()?); + utxo_specs.push((input.utxo_value, utxo_spk, covenant_id)); + input_covenant_ids.push(covenant_id); + input_covenant_states.push(input_covenant_state); + input_redeem_scripts.push(redeem_script); + } + + let mut tx_outputs = Vec::with_capacity(tx.outputs.len()); + let mut output_covenant_ids = Vec::with_capacity(tx.outputs.len()); + let mut output_covenant_states = Vec::with_capacity(tx.outputs.len()); + for output in tx.outputs.iter() { + let output_ctor_raw = output.constructor_args.clone().unwrap_or_else(|| raw_ctor_args.clone()); + let output_state = if let Some(raw_state) = output.state.as_deref() { + Some(resolve_state_from_raw(&parsed_contract, raw_state, &mut explicit_state_cache)?) + } else if output.script_hex.is_none() || output.constructor_args.is_some() { + Some(resolve_state_for_ctor_args(&parsed_contract, &output_ctor_raw, &mut ctor_state_cache)?) + } else { + None + }; + let script_public_key = if let Some(raw_script) = output.script_hex.as_deref() { + ScriptPublicKey::new(0, parse_hex_bytes(raw_script)?.into()) + } else if let Some(raw_pubkey) = output.p2pk_pubkey.as_deref() { + let pubkey_bytes = parse_hex_bytes(raw_pubkey)?; + ScriptPublicKey::new(0, build_p2pk_script(&pubkey_bytes).into()) + } else { + let output_script = if let Some(raw_state) = output.state.as_deref() { + materialize_script_for_explicit_state(source, &parsed_contract, &output_ctor_raw, raw_state)? + } else { + compile_script_for_ctor_args(source, &parsed_contract, &output_ctor_raw, &mut ctor_script_cache)? + }; + pay_to_script_hash_script(&output_script) + }; + + let covenant = output + .covenant_id + .as_deref() + .map(|raw| -> Result { + Ok(CovenantBinding { + authorizing_input: output.authorizing_input.unwrap_or(tx.active_input_index as u16), + covenant_id: parse_hash32(raw)?, + }) + }) + .transpose()?; + let output_covenant_id = covenant.as_ref().map(|binding| binding.covenant_id); + tx_outputs.push(TransactionOutput { value: output.value, script_public_key, covenant }); + output_covenant_ids.push(output_covenant_id); + output_covenant_states.push(output_state); + } + + let active_covenant_id = input_covenant_ids.get(tx.active_input_index).copied().flatten(); + let companion_leader_index = if covenant_target.as_ref().is_some_and(|target| target.binding == DebugCovenantBinding::Cov) { + active_covenant_id.and_then(|covenant_id| { + input_covenant_ids + .iter() + .enumerate() + .filter_map(|(index, input_covenant_id)| (*input_covenant_id == Some(covenant_id)).then_some(index)) + .min() + }) + } else { + None + }; + let active_authorized_output_states = tx + .outputs + .iter() + .zip(output_covenant_states.iter()) + .filter_map(|(output, output_state)| { + (output.authorizing_input.unwrap_or(tx.active_input_index as u16) == tx.active_input_index as u16) + .then_some(output_state.clone()) + }) + .collect::>>(); + let covenant_group_output_states = active_covenant_id.and_then(|covenant_id| { + output_covenant_ids + .iter() + .zip(output_covenant_states.iter()) + .filter_map(|(output_covenant_id, output_state)| { + (*output_covenant_id == Some(covenant_id)).then_some(output_state.clone()) + }) + .collect::>>() + }); + + let active_input_ctor_raw = tx.inputs[tx.active_input_index].constructor_args.clone().unwrap_or_else(|| raw_ctor_args.clone()); + let active_compiled = compile_contract_for_raw_ctor_args(source, &parsed_contract, &active_input_ctor_raw)?; + let active_is_cov_leader = companion_leader_index.map(|index| index == tx.active_input_index).unwrap_or(true); + let active_sigscript = if let Some(target) = covenant_target.as_ref() { + match target.binding { + DebugCovenantBinding::Auth => { + build_covenant_input_sigscript(&active_compiled, target, true, &raw_args, active_authorized_output_states.as_deref())? + } + DebugCovenantBinding::Cov => build_covenant_input_sigscript( + &active_compiled, + target, + active_is_cov_leader, + &raw_args, + covenant_group_output_states.as_deref(), + )?, + } + } else { + let active_raw_args = + resolve_auto_sign_args(&selected_name, &raw_args, source, &parsed_contract, &raw_ctor_args, &tx, &mut ctor_script_cache)?; + let typed_args = parse_call_args(&parsed_contract, &selected_name, &active_raw_args)?; + active_compiled.build_sig_script(&selected_name, typed_args).map_err(|err| format!("failed to build sigscript: {err}"))? + }; + + let mut tx_inputs = Vec::with_capacity(tx.inputs.len()); + for input_idx in 0..tx.inputs.len() { + let signature_script = if let Some(signature_script) = explicit_input_sigs[input_idx].clone() { + signature_script + } else if input_idx == tx.active_input_index { + if let Some(redeem) = input_redeem_scripts[input_idx].as_ref() { + combine_action_and_redeem(&active_sigscript, redeem)? + } else { + active_sigscript.clone() + } + } else if let Some(target) = covenant_target.as_ref() + && target.binding == DebugCovenantBinding::Cov + && input_covenant_ids[input_idx] == active_covenant_id + && input_redeem_scripts[input_idx].is_some() + { + let is_leader = Some(input_idx) == companion_leader_index; + let input_ctor_raw = tx.inputs[input_idx].constructor_args.clone().unwrap_or_else(|| raw_ctor_args.clone()); + let input_compiled = compile_contract_for_raw_ctor_args(source, &parsed_contract, &input_ctor_raw)?; + let auto_action = build_covenant_input_sigscript( + &input_compiled, + target, + is_leader, + &raw_args, + covenant_group_output_states.as_deref(), + )?; + combine_action_and_redeem(&auto_action, input_redeem_scripts[input_idx].as_ref().expect("checked is_some above"))? + } else if let Some(redeem) = input_redeem_scripts[input_idx].as_ref() { + sigscript_push_script(redeem) + } else { + vec![] + }; + + tx_inputs.push(TransactionInput { + previous_outpoint: input_prev_outpoints[input_idx], + signature_script, + sequence: input_sequences[input_idx], + mass: TxInputMass::SigopCount(input_sig_op_counts[input_idx].into()), + }); + } + + let transaction = + Box::into_raw(Box::new(Transaction::new(tx.version, tx_inputs, tx_outputs, tx.lock_time, Default::default(), 0, vec![]))); + let transaction_ref = unsafe { &*transaction }; + let reused_values = Box::into_raw(Box::new(SigHashReusedValuesUnsync::new())); + let reused_values_ref = unsafe { &*reused_values }; + let utxos = utxo_specs + .into_iter() + .map(|(value, spk, covenant_id)| UtxoEntry::new(value, spk, 0, transaction_ref.is_coinbase(), covenant_id)) + .collect::>(); + let populated_tx = Box::into_raw(Box::new(PopulatedTransaction::new(transaction_ref, utxos))); + let populated_tx_ref = unsafe { &*populated_tx }; + let covenants_ctx = Box::into_raw(Box::new( + CovenantsContext::from_tx(populated_tx_ref).map_err(|err| format!("failed to build covenant context: {err}"))?, + )); + let covenants_ctx_ref = unsafe { &*covenants_ctx }; + let active_input = transaction_ref + .inputs + .get(tx.active_input_index) + .ok_or_else(|| format!("missing tx input at index {}", tx.active_input_index))?; + let active_utxo = populated_tx_ref + .utxo(tx.active_input_index) + .ok_or_else(|| format!("missing utxo entry for input {}", tx.active_input_index))?; + let active_covenant_input_state = input_covenant_states.get(tx.active_input_index).cloned().flatten(); + let active_lockscript = + input_redeem_scripts.get(tx.active_input_index).cloned().flatten().unwrap_or_else(|| compiled.script.clone()); + let covenant_input_states = active_utxo.covenant_id.and_then(|covenant_id| { + let mut values = Vec::new(); + for (input_covenant_id, covenant_input_state) in input_covenant_ids.iter().zip(input_covenant_states.iter()) { + if *input_covenant_id != Some(covenant_id) { + continue; + } + values.push(covenant_input_state.clone()?); + } + Some(values) + }); + let covenant_param_value = match covenant_binding { + Some(DebugCovenantBinding::Auth) => active_covenant_input_state.clone(), + Some(DebugCovenantBinding::Cov) => covenant_input_states.clone().map(DebugValue::Array), + None => None, + }; + + let cache_ptr = Box::into_raw(Box::new(Cache::new(10_000))); + let cache = unsafe { &*cache_ptr }; + let flags = EngineFlags { covenants_enabled: true, ..Default::default() }; + let ctx = EngineCtx::new(cache).with_reused(reused_values_ref).with_covenants_ctx(covenants_ctx_ref); + let engine = DebugEngine::from_transaction_input(populated_tx_ref, active_input, tx.active_input_index, active_utxo, ctx, flags); + let shadow_tx_context = ShadowTxContext { + tx: populated_tx_ref, + input: active_input, + input_index: tx.active_input_index, + utxo_entry: active_utxo, + covenants_ctx: covenants_ctx_ref, + }; + let mut session = DebugSession::full(&active_sigscript, &active_lockscript, source, compiled.debug_info.clone(), engine) + .map_err(|err| format!("failed to create debug session: {err}"))? + .with_shadow_tx_context(shadow_tx_context); + if enable_covenant_session_mode { + session = session.with_covenant_mode(covenant_param_value, covenant_target); + } + let runtime = OwnedRuntime { + session, + _backing: RuntimeBacking { + source: Some(unsafe { NonNull::new_unchecked(source_ptr) }), + cache: Some(unsafe { NonNull::new_unchecked(cache_ptr) }), + transaction: unsafe { NonNull::new_unchecked(transaction) }, + populated_tx: unsafe { NonNull::new_unchecked(populated_tx) }, + covenants_ctx: unsafe { NonNull::new_unchecked(covenants_ctx) }, + reused_values: unsafe { NonNull::new_unchecked(reused_values) }, + }, + }; + + let source_name = config + .script_path + .file_name() + .and_then(|name| name.to_str()) + .map(ToOwned::to_owned) + .unwrap_or_else(|| config.script_path.to_string_lossy().to_string()); + + Ok(BuiltLaunch { + runtime, + source_path: config.script_path, + source_name, + stop_on_entry: config.stop_on_entry, + no_debug: config.no_debug, + }) +} + +fn resolve_entrypoint_name( + abi: &[silverscript_lang::compiler::FunctionAbiEntry], + requested: Option, +) -> Result { + if let Some(function) = requested { + return Ok(function); + } + + match abi { + [] => Err("contract has no functions".to_string()), + [entry] => Ok(entry.name.clone()), + entries => { + let names = entries.iter().map(|entry| entry.name.as_str()).collect::>().join(", "); + Err(format!("launch config must include 'function' for multi-entrypoint contract (available: {names})")) + } + } +} + +fn default_tx_scenario() -> TestTxScenarioResolved { + TestTxScenarioResolved { + version: 1, + lock_time: 0, + active_input_index: 0, + inputs: vec![TestTxInputScenarioResolved { + prev_txid: None, + prev_index: 0, + sequence: 0, + sig_op_count: 100, + utxo_value: 5000, + covenant_id: None, + constructor_args: None, + state: None, + signature_script_hex: None, + utxo_script_hex: None, + }], + outputs: vec![TestTxOutputScenarioResolved { + value: 5000, + covenant_id: None, + authorizing_input: None, + constructor_args: None, + state: None, + script_hex: None, + p2pk_pubkey: None, + }], + } +} + +fn resolve_auto_sign_args( + function_name: &str, + raw_args: &[String], + source: &str, + parsed_contract: &ContractAst<'_>, + raw_ctor_args: &[String], + tx: &TestTxScenarioResolved, + ctor_script_cache: &mut HashMap, Vec>, +) -> Result, String> { + let mut resolved = raw_args.to_vec(); + let mut has_secret_sig = false; + let function = parsed_contract + .functions + .iter() + .find(|function| function.name == function_name) + .ok_or_else(|| format!("function '{function_name}' not found"))?; + + for (param, raw) in function.params.iter().zip(raw_args.iter()) { + let type_name = param.type_ref.type_name(); + if type_name != "sig" && type_name != "datasig" { + continue; + } + let bytes = parse_hex_bytes(raw)?; + if type_name == "sig" && bytes.len() == 32 { + has_secret_sig = true; + continue; + } + if type_name == "datasig" && bytes.len() == 32 { + return Err(format!( + "function argument '{}' uses a 32-byte secret key for datasig, but debugger launch only auto-signs 'sig' arguments", + param.name + )); + } + } + + if !has_secret_sig { + return Ok(resolved); + } + + let (signing_transaction, signing_utxos, signing_reused_values) = + build_signing_tx_parts(source, parsed_contract, raw_ctor_args, tx, ctor_script_cache)?; + let signing_populated = PopulatedTransaction::new(&signing_transaction, signing_utxos); + + for (index, param) in function.params.iter().enumerate() { + let type_name = param.type_ref.type_name(); + if type_name != "sig" { + continue; + } + let secret_bytes = parse_hex_bytes(&resolved[index])?; + if secret_bytes.len() != 32 { + continue; + } + resolved[index] = sign_tx_input(&secret_bytes, &signing_populated, tx.active_input_index, &signing_reused_values) + .map_err(|err| format!("failed to auto-sign argument '{}': {err}", param.name))?; + } + + Ok(resolved) +} + +#[derive(Debug, Clone)] +struct IdentityMaterial { + pubkey: String, + secret: String, + pkh: String, +} + +#[derive(Default)] +struct IdentityResolver { + cache: HashMap, +} + +impl IdentityResolver { + fn resolve_string(&mut self, raw: &str) -> Result { + let Some((index, field)) = parse_identity_token(raw)? else { + return Ok(raw.to_string()); + }; + let identity = self.cache.entry(index).or_insert_with(generate_identity_material); + Ok(match field { + IdentityField::Pubkey => identity.pubkey.clone(), + IdentityField::Secret => identity.secret.clone(), + IdentityField::Pkh => identity.pkh.clone(), + }) + } +} + +#[derive(Debug, Clone, Copy)] +enum IdentityField { + Pubkey, + Secret, + Pkh, +} + +fn resolve_launch_identities(config: &mut ResolvedLaunchConfig) -> Result<(), String> { + let mut resolver = IdentityResolver::default(); + + if let Some(input) = config.constructor_args.as_mut() { + resolve_arg_input_identities(input, &mut resolver)?; + } + if let Some(input) = config.args.as_mut() { + resolve_arg_input_identities(input, &mut resolver)?; + } + if let Some(tx) = config.tx.as_mut() { + resolve_tx_identities(tx, &mut resolver)?; + } + + Ok(()) +} + +fn resolve_arg_input_identities(input: &mut ArgInput, resolver: &mut IdentityResolver) -> Result<(), String> { + match input { + ArgInput::Values(values) => { + for value in values { + resolve_json_value_identities(value, resolver)?; + } + } + ArgInput::Named(named) => { + for value in named.values_mut() { + resolve_json_value_identities(value, resolver)?; + } + } + } + Ok(()) +} + +fn resolve_json_value_identities(value: &mut Value, resolver: &mut IdentityResolver) -> Result<(), String> { + match value { + Value::String(raw) => { + *raw = resolver.resolve_string(raw)?; + } + Value::Array(items) => { + for item in items { + resolve_json_value_identities(item, resolver)?; + } + } + Value::Object(entries) => { + for entry in entries.values_mut() { + resolve_json_value_identities(entry, resolver)?; + } + } + Value::Null | Value::Bool(_) | Value::Number(_) => {} + } + Ok(()) +} + +fn resolve_tx_identities(tx: &mut TestTxScenarioResolved, resolver: &mut IdentityResolver) -> Result<(), String> { + for input in &mut tx.inputs { + resolve_optional_string(&mut input.prev_txid, resolver)?; + resolve_optional_string(&mut input.covenant_id, resolver)?; + resolve_optional_strings(&mut input.constructor_args, resolver)?; + resolve_optional_string(&mut input.signature_script_hex, resolver)?; + resolve_optional_string(&mut input.utxo_script_hex, resolver)?; + } + + for output in &mut tx.outputs { + resolve_optional_string(&mut output.covenant_id, resolver)?; + resolve_optional_strings(&mut output.constructor_args, resolver)?; + resolve_optional_string(&mut output.script_hex, resolver)?; + resolve_optional_string(&mut output.p2pk_pubkey, resolver)?; + } + + Ok(()) +} + +fn resolve_optional_string(raw: &mut Option, resolver: &mut IdentityResolver) -> Result<(), String> { + if let Some(value) = raw.as_mut() { + *value = resolver.resolve_string(value)?; + } + Ok(()) +} + +fn resolve_optional_strings(values: &mut Option>, resolver: &mut IdentityResolver) -> Result<(), String> { + if let Some(entries) = values.as_mut() { + for value in entries { + *value = resolver.resolve_string(value)?; + } + } + Ok(()) +} + +fn parse_identity_token(raw: &str) -> Result, String> { + let trimmed = raw.trim(); + if !trimmed.starts_with("keypair") && !trimmed.starts_with("identity") { + return Ok(None); + } + + let Some((head, suffix)) = trimmed.split_once('.') else { + return Err(format!("invalid identity token '{raw}'; expected keypair.pubkey, keypair.secret, or keypair.pkh")); + }; + + let index_raw = if let Some(value) = head.strip_prefix("keypair") { + value + } else if let Some(value) = head.strip_prefix("identity") { + value + } else { + return Err(format!("invalid identity token '{raw}'; expected keypair.pubkey, keypair.secret, or keypair.pkh")); + }; + + if index_raw.is_empty() { + return Err(format!("invalid identity token '{raw}'; expected keypair.pubkey, keypair.secret, or keypair.pkh")); + } + + let index = index_raw + .parse::() + .map_err(|_| format!("invalid identity token '{raw}'; expected keypair.pubkey, keypair.secret, or keypair.pkh"))?; + if index == 0 { + return Err(format!("invalid identity token '{raw}'; keypair index must be >= 1")); + } + + let field = match suffix { + "pubkey" => IdentityField::Pubkey, + "secret" => IdentityField::Secret, + "pkh" => IdentityField::Pkh, + _ => { + return Err(format!("invalid identity token '{raw}'; expected keypair.pubkey, keypair.secret, or keypair.pkh")); + } + }; + + Ok(Some((index, field))) +} + +fn generate_identity_material() -> IdentityMaterial { + let secp = Secp256k1::new(); + let keypair = Keypair::new(&secp, &mut thread_rng()); + let (xonly, _parity) = keypair.x_only_public_key(); + let secret_bytes = keypair.secret_key().secret_bytes(); + let pubkey_bytes = xonly.serialize(); + let pkh = blake2b_simd::Params::new().hash_length(32).hash(&pubkey_bytes); + + IdentityMaterial { + pubkey: format!("0x{}", encode_hex(&pubkey_bytes)), + secret: format!("0x{}", encode_hex(&secret_bytes)), + pkh: format!("0x{}", encode_hex(pkh.as_bytes())), + } +} + +fn expr_to_debug_value(expr: &Expr<'_>) -> Result { + match &expr.kind { + ExprKind::Int(value) => Ok(DebugValue::Int(*value)), + ExprKind::Bool(value) => Ok(DebugValue::Bool(*value)), + ExprKind::Byte(value) => Ok(DebugValue::Bytes(vec![*value])), + ExprKind::String(value) => Ok(DebugValue::String(value.clone())), + ExprKind::Array(values) => { + if values.iter().all(|value| matches!(value.kind, ExprKind::Byte(_))) { + return Ok(DebugValue::Bytes( + values + .iter() + .map(|value| match value.kind { + ExprKind::Byte(byte) => byte, + _ => unreachable!("checked"), + }) + .collect(), + )); + } + Ok(DebugValue::Array(values.iter().map(expr_to_debug_value).collect::, _>>()?)) + } + ExprKind::StateObject(fields) => Ok(DebugValue::Object( + fields + .iter() + .map(|field| Ok((field.name.clone(), expr_to_debug_value(&field.expr)?))) + .collect::, String>>()?, + )), + other => Err(format!("unsupported resolved state expression in debugger: {other:?}")), + } +} + +fn debug_value_to_expr(value: &DebugValue) -> Option> { + Some(match value { + DebugValue::Int(value) => Expr::int(*value), + DebugValue::Bool(value) => Expr::new(ExprKind::Bool(*value), Default::default()), + DebugValue::Bytes(bytes) => Expr::new( + ExprKind::Array(bytes.iter().map(|byte| Expr::new(ExprKind::Byte(*byte), Default::default())).collect()), + Default::default(), + ), + DebugValue::String(value) => Expr::new(ExprKind::String(value.clone()), Default::default()), + DebugValue::Array(values) => { + Expr::new(ExprKind::Array(values.iter().map(debug_value_to_expr).collect::>>()?), Default::default()) + } + DebugValue::Object(fields) => Expr::new( + ExprKind::StateObject( + fields + .iter() + .map(|(name, value)| { + Some(StateFieldExpr { + name: name.clone(), + expr: debug_value_to_expr(value)?, + span: Default::default(), + name_span: Default::default(), + }) + }) + .collect::>>()?, + ), + Default::default(), + ), + DebugValue::Unknown(_) => return None, + }) +} + +fn is_state_type_ref(type_ref: &TypeRef) -> bool { + !type_ref.is_array() && matches!(&type_ref.base, TypeBase::Custom(name) if name == "State") +} + +fn is_state_array_type_ref(type_ref: &TypeRef) -> bool { + type_ref.is_array() && matches!(&type_ref.base, TypeBase::Custom(name) if name == "State") +} + +fn synthesized_covenant_prefix_args( + compiled: &CompiledContract<'_>, + entrypoint_name: &str, + target: &ResolvedCovenantCallTarget, + output_states: Option<&[DebugValue]>, +) -> Result>, String> { + if target.binding == DebugCovenantBinding::Cov && entrypoint_name.starts_with("__delegate_") { + return Ok(Vec::new()); + } + + let function = compiled + .ast + .functions + .iter() + .find(|function| function.name == entrypoint_name) + .ok_or_else(|| "generated covenant entrypoint not found".to_string())?; + let Some(first_param) = function.params.first() else { + return Ok(Vec::new()); + }; + + let states = + output_states.ok_or_else(|| "missing output states needed to synthesize covenant verification arguments".to_string())?; + if is_state_type_ref(&first_param.type_ref) { + if states.len() != 1 { + return Err(format!("expected exactly 1 output State for '{entrypoint_name}', got {}", states.len())); + } + return Ok(vec![debug_value_to_expr(&states[0]).ok_or_else(|| "failed to materialize synthesized output State".to_string())?]); + } + if is_state_array_type_ref(&first_param.type_ref) { + return Ok(vec![Expr::new( + ExprKind::Array( + states + .iter() + .map(debug_value_to_expr) + .collect::>>() + .ok_or_else(|| "failed to materialize synthesized output State[]".to_string())?, + ), + Default::default(), + )]); + } + + Ok(Vec::new()) +} + +fn build_covenant_input_sigscript<'i>( + compiled: &CompiledContract<'i>, + target: &ResolvedCovenantCallTarget, + is_leader: bool, + raw_args: &[String], + output_states: Option<&[DebugValue]>, +) -> Result, String> { + let entrypoint_name = target.generated_entrypoint_name_for(is_leader); + let typed_args = if target.binding == DebugCovenantBinding::Cov && !is_leader { + Vec::new() + } else { + let function = compiled + .ast + .functions + .iter() + .find(|function| function.name == entrypoint_name) + .ok_or_else(|| "generated covenant entrypoint not found".to_string())?; + if raw_args.len() == function.params.len() { + parse_call_args(&compiled.ast, &entrypoint_name, raw_args)? + } else { + let prefix_args = synthesized_covenant_prefix_args(compiled, &entrypoint_name, target, output_states)?; + parse_call_args_with_prefix(&compiled.ast, &entrypoint_name, prefix_args, raw_args)? + } + }; + compiled.build_sig_script(&entrypoint_name, typed_args).map_err(|err| format!("failed to build covenant sigscript: {err}")) +} + +fn resolve_state_for_ctor_args( + parsed_contract: &ContractAst<'_>, + raw_ctor_args: &[String], + cache: &mut HashMap, DebugValue>, +) -> Result { + if let Some(value) = cache.get(raw_ctor_args) { + return Ok(value.clone()); + } + + let ctor_args = parse_ctor_args(parsed_contract, raw_ctor_args)?; + let state_fields = parsed_contract.resolve_contract_state_values(&ctor_args).map_err(|err| err.to_string())?; + let value = DebugValue::Object( + state_fields + .iter() + .map(|field| Ok((field.name.clone(), expr_to_debug_value(&field.value)?))) + .collect::, String>>()?, + ); + cache.insert(raw_ctor_args.to_vec(), value.clone()); + Ok(value) +} + +fn resolve_state_from_raw( + parsed_contract: &ContractAst<'_>, + raw_state: &str, + cache: &mut HashMap, +) -> Result { + if let Some(value) = cache.get(raw_state) { + return Ok(value.clone()); + } + + let expr = parse_state_value(parsed_contract, raw_state)?; + let value = expr_to_debug_value(&expr)?; + cache.insert(raw_state.to_string(), value.clone()); + Ok(value) +} + +fn materialize_script_for_explicit_state( + source: &str, + parsed_contract: &ContractAst<'_>, + raw_instance_args: &[String], + raw_state: &str, +) -> Result, String> { + let instance_args = parse_ctor_args(parsed_contract, raw_instance_args)?; + let state = parse_state_value(parsed_contract, raw_state)?; + let compile_opts = CompileOptions { record_debug_infos: true, ..Default::default() }; + let base_compiled = compile_contract(source, &instance_args, compile_opts).map_err(|err| format!("compile error: {err}"))?; + let materialized_contract = contract_with_explicit_state(parsed_contract, &state)?; + let materialized = + compile_contract_ast(&materialized_contract, &instance_args, compile_opts).map_err(|err| format!("compile error: {err}"))?; + + let base_start = base_compiled.state_layout.start; + let base_end = base_start + base_compiled.state_layout.len; + let materialized_start = materialized.state_layout.start; + let materialized_end = materialized_start + materialized.state_layout.len; + if base_compiled.state_layout.len != materialized.state_layout.len { + return Err("explicit state changes encoded script size; provide raw script_hex instead".to_string()); + } + if base_compiled.script.len() < base_end || materialized.script.len() < materialized_end { + return Err("state layout exceeds compiled script length".to_string()); + } + if base_compiled.script[..base_start] != materialized.script[..materialized_start] + || base_compiled.script[base_end..] != materialized.script[materialized_end..] + { + return Err("explicit state changed non-state bytecode; provide raw script_hex instead".to_string()); + } + + let mut script = base_compiled.script; + script[base_start..base_end].copy_from_slice(&materialized.script[materialized_start..materialized_end]); + Ok(script) +} + +fn contract_with_explicit_state<'i>(contract: &ContractAst<'i>, state: &Expr<'i>) -> Result, String> { + let ExprKind::StateObject(entries) = &state.kind else { + return Err("State value must be an object literal".to_string()); + }; + + let mut provided = entries.iter().map(|entry| (entry.name.as_str(), entry.expr.clone())).collect::>(); + if provided.len() != contract.fields.len() { + return Err("State value must include all contract fields exactly once".to_string()); + } + + let mut materialized = contract.clone(); + for field in &mut materialized.fields { + field.expr = provided.remove(field.name.as_str()).ok_or_else(|| format!("missing state field '{}'", field.name))?; + } + if let Some(extra) = provided.keys().next() { + return Err(format!("unknown state field '{}'", extra)); + } + Ok(materialized) +} + +fn compile_contract_for_raw_ctor_args<'i>( + source: &'i str, + parsed_contract: &ContractAst<'i>, + raw_ctor_args: &[String], +) -> Result, String> { + let ctor_args = parse_ctor_args(parsed_contract, raw_ctor_args)?; + compile_contract(source, &ctor_args, CompileOptions { record_debug_infos: true, ..Default::default() }) + .map_err(|err| format!("compile error: {err}")) +} + +pub struct OwnedRuntime { + pub session: DebugSession<'static, 'static>, + _backing: RuntimeBacking, +} + +struct RuntimeBacking { + source: Option>, + cache: Option>>, + transaction: NonNull, + populated_tx: NonNull>, + covenants_ctx: NonNull, + reused_values: NonNull, +} + +impl OwnedRuntime { + pub fn session(&self) -> &DebugSession<'static, 'static> { + &self.session + } + + pub fn session_mut(&mut self) -> &mut DebugSession<'static, 'static> { + &mut self.session + } +} + +impl Drop for RuntimeBacking { + fn drop(&mut self) { + unsafe { + drop(Box::from_raw(self.covenants_ctx.as_ptr())); + drop(Box::from_raw(self.populated_tx.as_ptr())); + drop(Box::from_raw(self.transaction.as_ptr())); + drop(Box::from_raw(self.reused_values.as_ptr())); + if let Some(cache) = self.cache.take() { + drop(Box::from_raw(cache.as_ptr())); + } + if let Some(source) = self.source.take() { + drop(Box::from_raw(source.as_ptr())); + } + } + } +} + +fn compile_script_for_ctor_args( + source: &str, + parsed_contract: &ContractAst<'_>, + raw_ctor_args: &[String], + cache: &mut HashMap, Vec>, +) -> Result, String> { + if let Some(script) = cache.get(raw_ctor_args) { + return Ok(script.clone()); + } + let ctor_args = parse_ctor_args(parsed_contract, raw_ctor_args)?; + let compiled = compile_contract(source, &ctor_args, CompileOptions::default()).map_err(|err| format!("compile error: {err}"))?; + cache.insert(raw_ctor_args.to_vec(), compiled.script.clone()); + Ok(compiled.script) +} + +fn build_signing_tx_parts( + source: &str, + parsed_contract: &ContractAst<'_>, + raw_ctor_args: &[String], + tx: &TestTxScenarioResolved, + ctor_script_cache: &mut HashMap, Vec>, +) -> Result<(Transaction, Vec, SigHashReusedValuesUnsync), String> { + let mut tx_inputs = Vec::with_capacity(tx.inputs.len()); + let mut utxo_specs = Vec::with_capacity(tx.inputs.len()); + let mut explicit_state_cache = HashMap::::new(); + + for (input_idx, input) in tx.inputs.iter().enumerate() { + let mut default_prev_txid = [0u8; 32]; + default_prev_txid.fill(input_idx as u8); + let prev_txid = if let Some(raw_txid) = input.prev_txid.as_deref() { + parse_txid32(raw_txid)? + } else { + TransactionId::from_bytes(default_prev_txid) + }; + let input_ctor_raw = input.constructor_args.clone().unwrap_or_else(|| raw_ctor_args.to_vec()); + let redeem_script = if input.utxo_script_hex.is_none() { + if let Some(raw_state) = input.state.as_deref() { + Some(materialize_script_for_explicit_state(source, parsed_contract, &input_ctor_raw, raw_state)?) + } else { + Some(compile_script_for_ctor_args(source, parsed_contract, &input_ctor_raw, ctor_script_cache)?) + } + } else { + None + }; + let utxo_spk = if let Some(raw_script) = input.utxo_script_hex.as_deref() { + ScriptPublicKey::new(0, parse_hex_bytes(raw_script)?.into()) + } else { + let redeem = redeem_script + .as_ref() + .ok_or_else(|| "internal error: missing redeem script for tx input without utxo_script_hex".to_string())?; + pay_to_script_hash_script(redeem) + }; + let covenant_id = input.covenant_id.as_deref().map(parse_hash32).transpose()?; + tx_inputs.push(TransactionInput { + previous_outpoint: TransactionOutpoint { transaction_id: prev_txid, index: input.prev_index }, + signature_script: vec![], + sequence: input.sequence, + mass: TxInputMass::SigopCount(input.sig_op_count.into()), + }); + utxo_specs.push((input.utxo_value, utxo_spk, covenant_id)); + if let Some(raw_state) = input.state.as_deref() { + let _ = resolve_state_from_raw(parsed_contract, raw_state, &mut explicit_state_cache)?; + } + } + + let mut tx_outputs = Vec::with_capacity(tx.outputs.len()); + for output in &tx.outputs { + let output_ctor_raw = output.constructor_args.clone().unwrap_or_else(|| raw_ctor_args.to_vec()); + let script_public_key = if let Some(raw_script) = output.script_hex.as_deref() { + ScriptPublicKey::new(0, parse_hex_bytes(raw_script)?.into()) + } else if let Some(raw_pubkey) = output.p2pk_pubkey.as_deref() { + let pubkey_bytes = parse_hex_bytes(raw_pubkey)?; + ScriptPublicKey::new(0, build_p2pk_script(&pubkey_bytes).into()) + } else { + let output_script = if let Some(raw_state) = output.state.as_deref() { + materialize_script_for_explicit_state(source, parsed_contract, &output_ctor_raw, raw_state)? + } else { + compile_script_for_ctor_args(source, parsed_contract, &output_ctor_raw, ctor_script_cache)? + }; + pay_to_script_hash_script(&output_script) + }; + let covenant = output + .covenant_id + .as_deref() + .map(|raw| -> Result { + Ok(CovenantBinding { + authorizing_input: output.authorizing_input.unwrap_or(tx.active_input_index as u16), + covenant_id: parse_hash32(raw)?, + }) + }) + .transpose()?; + tx_outputs.push(TransactionOutput { value: output.value, script_public_key, covenant }); + } + + let transaction = Transaction::new(tx.version, tx_inputs, tx_outputs, tx.lock_time, Default::default(), 0, vec![]); + let utxos = utxo_specs + .into_iter() + .map(|(value, spk, covenant_id)| UtxoEntry::new(value, spk, 0, transaction.is_coinbase(), covenant_id)) + .collect::>(); + Ok((transaction, utxos, SigHashReusedValuesUnsync::new())) +} + +fn parse_hash32(raw: &str) -> Result { + let bytes = parse_hex_bytes(raw)?; + if bytes.len() != 32 { + return Err(format!("hash expects 32 bytes, got {}", bytes.len())); + } + let mut array = [0u8; 32]; + array.copy_from_slice(&bytes); + Ok(Hash::from_bytes(array)) +} + +fn parse_txid32(raw: &str) -> Result { + let bytes = parse_hex_bytes(raw)?; + if bytes.len() != 32 { + return Err(format!("txid expects 32 bytes, got {}", bytes.len())); + } + let mut array = [0u8; 32]; + array.copy_from_slice(&bytes); + Ok(TransactionId::from_bytes(array)) +} + +fn build_p2pk_script(pubkey: &[u8]) -> Vec { + ScriptBuilder::new() + .add_data(pubkey) + .expect("push pubkey") + .add_op(kaspa_txscript::opcodes::codes::OpCheckSig) + .expect("add OpCheckSig") + .drain() +} + +fn sigscript_push_script(script: &[u8]) -> Vec { + ScriptBuilder::new().add_data(script).expect("push script data").drain() +} + +fn combine_action_and_redeem(action: &[u8], redeem_script: &[u8]) -> Result, String> { + let mut builder = ScriptBuilder::new(); + builder.add_ops(action).map_err(|err| err.to_string())?; + builder.add_data(redeem_script).map_err(|err| err.to_string())?; + Ok(builder.drain()) +} + +fn sign_tx_input( + secret_key_bytes: &[u8], + tx: &PopulatedTransaction<'_>, + input_index: usize, + reused_values: &SigHashReusedValuesUnsync, +) -> Result { + let secret_key = SecretKey::from_slice(secret_key_bytes).map_err(|err| format!("invalid secret key: {err}"))?; + let secp = Secp256k1::new(); + let keypair = Keypair::from_secret_key(&secp, &secret_key); + let sig_hash = calc_schnorr_signature_hash(tx, input_index, SIG_HASH_ALL, reused_values); + let msg = Message::from_digest_slice(sig_hash.as_bytes().as_slice()).map_err(|err| format!("invalid sighash digest: {err}"))?; + let sig = keypair.sign_schnorr(msg); + let mut signature = Vec::with_capacity(65); + signature.extend_from_slice(sig.as_ref().as_slice()); + signature.push(SIG_HASH_ALL.to_u8()); + Ok(format!("0x{}", encode_hex(&signature))) +} + +fn encode_hex(bytes: &[u8]) -> String { + let mut out = String::with_capacity(bytes.len() * 2); + for byte in bytes { + out.push(char::from_digit((byte >> 4) as u32, 16).unwrap()); + out.push(char::from_digit((byte & 0x0f) as u32, 16).unwrap()); + } + out +} + +#[cfg(test)] +mod tests { + use std::fs; + use std::path::PathBuf; + use std::time::{SystemTime, UNIX_EPOCH}; + + use crate::launch_config::ResolvedLaunchConfig; + use debugger_session::test_runner::{TestTxInputScenarioResolved, TestTxOutputScenarioResolved, TestTxScenarioResolved}; + + use super::build_launch; + + const SIMPLE_SCRIPT: &str = r#"pragma silverscript ^0.1.0; + +contract Simple() { + entrypoint function main() { + int a = 1; + require(a == 1); + } +} +"#; + + struct TempScript { + path: PathBuf, + } + + impl TempScript { + fn new(source: &str) -> Self { + let unique = SystemTime::now().duration_since(UNIX_EPOCH).map(|duration| duration.as_nanos()).unwrap_or_default(); + let path = std::env::temp_dir().join(format!("silverscript-runtime-builder-{unique}.sil")); + fs::write(&path, source).expect("failed to write temp script"); + Self { path } + } + } + + impl Drop for TempScript { + fn drop(&mut self) { + let _ = fs::remove_file(&self.path); + } + } + + #[test] + fn build_launch_rejects_invalid_tx_override() { + let script = TempScript::new(SIMPLE_SCRIPT); + let config = ResolvedLaunchConfig { + script_path: script.path.clone(), + function: Some("main".to_string()), + constructor_args: None, + args: None, + tx: Some(TestTxScenarioResolved { + version: 1, + lock_time: 0, + active_input_index: 1, + inputs: vec![TestTxInputScenarioResolved { + prev_txid: None, + prev_index: 0, + sequence: 0, + sig_op_count: 100, + utxo_value: 5000, + covenant_id: None, + constructor_args: None, + state: None, + signature_script_hex: None, + utxo_script_hex: None, + }], + outputs: vec![TestTxOutputScenarioResolved { + value: 5000, + covenant_id: None, + authorizing_input: None, + constructor_args: None, + state: None, + script_hex: None, + p2pk_pubkey: None, + }], + }), + no_debug: false, + stop_on_entry: true, + }; + + let err = match build_launch(config) { + Ok(_) => panic!("invalid tx override should fail"), + Err(err) => err, + }; + assert!(err.contains("active_input_index 1 out of range"), "unexpected error: {err}"); + } +} diff --git a/debugger/dap/tests/harness.rs b/debugger/dap/tests/harness.rs new file mode 100644 index 0000000..1945727 --- /dev/null +++ b/debugger/dap/tests/harness.rs @@ -0,0 +1,226 @@ +use std::io::{BufRead, BufReader, Read, Write}; +use std::path::PathBuf; +use std::process::{Child, ChildStdin, Command, Stdio}; +use std::sync::{Arc, Mutex, mpsc}; +use std::thread; +use std::time::Duration; + +use serde_json::{Value, json}; + +const MESSAGE_TIMEOUT: Duration = Duration::from_secs(30); + +pub struct TestClient { + child: Child, + stdin: ChildStdin, + messages: mpsc::Receiver, + stderr_log: Arc>, + seq: i64, +} + +impl TestClient { + pub fn spawn() -> Self { + let binary = resolve_debugger_dap_binary(); + let mut child = Command::new(&binary) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .spawn() + .unwrap_or_else(|err| panic!("failed to spawn debugger-dap binary at {:?}: {err}", binary)); + + let stdin = child.stdin.take().expect("missing child stdin"); + let stdout = child.stdout.take().expect("missing child stdout"); + let stderr = child.stderr.take().expect("missing child stderr"); + let stderr_log = Arc::new(Mutex::new(String::new())); + let stderr_sink = Arc::clone(&stderr_log); + let (tx, rx) = mpsc::channel(); + + thread::spawn(move || read_stdout_messages(stdout, tx)); + thread::spawn(move || capture_stderr(stderr, stderr_sink)); + + Self { child, stdin, messages: rx, stderr_log, seq: 1 } + } + + pub fn send_request(&mut self, command: &str, arguments: Value) { + let message = json!({ + "seq": self.seq, + "type": "request", + "command": command, + "arguments": arguments, + }); + self.seq += 1; + self.write_message(&message); + } + + pub fn read_message(&mut self) -> Value { + match self.messages.recv_timeout(MESSAGE_TIMEOUT) { + Ok(message) => message, + Err(mpsc::RecvTimeoutError::Timeout) => { + let stderr = self.stderr_snapshot(); + panic!("timed out waiting for DAP message after {:?}; stderr: {}", MESSAGE_TIMEOUT, stderr); + } + Err(mpsc::RecvTimeoutError::Disconnected) => { + let stderr = self.stderr_snapshot(); + panic!("adapter closed message channel unexpectedly; stderr: {}", stderr); + } + } + } + + pub fn expect_response_success(&mut self, command: &str) -> Value { + loop { + let msg = self.read_message(); + if msg.get("type") == Some(&Value::String("response".to_string())) { + let actual = msg.get("command").and_then(|v| v.as_str()).unwrap_or_default(); + if actual == command { + let success = msg.get("success").and_then(|v| v.as_bool()).unwrap_or(false); + assert!(success, "expected successful response for {command}, got {msg:#}"); + return msg; + } + } + } + } + + pub fn expect_event(&mut self, event: &str) -> Value { + loop { + let msg = self.read_message(); + if msg.get("type") == Some(&Value::String("event".to_string())) { + let actual = msg.get("event").and_then(|v| v.as_str()).unwrap_or_default(); + if actual == event { + return msg; + } + } + } + } + + pub fn full_launch_sequence(&mut self, script_path: &str) -> Value { + self.send_request( + "initialize", + json!({ + "adapterID": "silverscript", + "pathFormat": "path", + "linesStartAt1": true, + "columnsStartAt1": true, + "supportsVariableType": true, + "supportsVariablePaging": false, + "supportsRunInTerminalRequest": false + }), + ); + self.expect_response_success("initialize"); + self.expect_event("initialized"); + + self.send_request( + "launch", + json!({ + "scriptPath": script_path, + "stopOnEntry": true + }), + ); + self.expect_response_success("launch"); + + self.send_request("setBreakpoints", json!({"source": {"path": script_path}, "breakpoints": []})); + self.expect_response_success("setBreakpoints"); + + self.send_request("setExceptionBreakpoints", json!({"filters": []})); + self.expect_response_success("setExceptionBreakpoints"); + + self.send_request("configurationDone", Value::Null); + self.expect_response_success("configurationDone"); + self.expect_event("stopped") + } + + fn write_message(&mut self, payload: &Value) { + let encoded = serde_json::to_vec(payload).expect("failed to serialize request"); + let header = format!("Content-Length: {}\r\n\r\n", encoded.len()); + self.stdin.write_all(header.as_bytes()).expect("failed to write header"); + self.stdin.write_all(&encoded).expect("failed to write body"); + self.stdin.flush().expect("failed to flush request"); + } + + fn stderr_snapshot(&self) -> String { + self.stderr_log.lock().map(|value| value.trim().to_string()).unwrap_or_else(|_| "".to_string()) + } +} + +fn read_stdout_messages(stdout: impl Read, tx: mpsc::Sender) { + let mut stdout = BufReader::new(stdout); + loop { + let mut content_length: usize = 0; + let mut raw_headers: Vec = Vec::new(); + loop { + let mut line = String::new(); + let bytes = match stdout.read_line(&mut line) { + Ok(bytes) => bytes, + Err(_) => return, + }; + if bytes == 0 { + return; + } + raw_headers.push(line.clone()); + if line.trim().is_empty() { + if content_length == 0 { + continue; + } + break; + } + if let Some(rest) = line.trim().strip_prefix("Content-Length: ") { + content_length = rest.trim().parse::().expect("invalid Content-Length header"); + } + } + + assert!(content_length > 0, "received DAP message with zero Content-Length; headers: {:?}", raw_headers); + + let mut body = vec![0u8; content_length]; + if stdout.read_exact(&mut body).is_err() { + return; + } + + let payload = serde_json::from_slice::(&body).expect("invalid JSON payload"); + if tx.send(payload).is_err() { + return; + } + } +} + +fn capture_stderr(stderr: impl Read, sink: Arc>) { + let mut stderr = BufReader::new(stderr); + let mut buffer = String::new(); + let _ = stderr.read_to_string(&mut buffer); + if let Ok(mut stored) = sink.lock() { + *stored = buffer; + } +} + +pub fn resolve_debugger_dap_binary() -> PathBuf { + let env_candidates = + ["CARGO_BIN_EXE_debugger-dap", "CARGO_BIN_EXE_debugger_dap"].iter().filter_map(|key| std::env::var_os(key).map(PathBuf::from)); + for candidate in env_candidates { + if candidate.exists() { + return candidate; + } + } + + let target_dir = std::env::var_os("CARGO_TARGET_DIR") + .map(PathBuf::from) + .unwrap_or_else(|| PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../../target")); + let exe = format!("debugger-dap{}", std::env::consts::EXE_SUFFIX); + let profiles = if cfg!(debug_assertions) { ["debug", "release"] } else { ["release", "debug"] }; + + for profile in profiles { + let candidate = target_dir.join(profile).join(&exe); + if candidate.exists() { + return candidate; + } + } + + panic!( + "could not locate debugger-dap binary via env vars or target dir {}; looked for {} in debug/release", + target_dir.display(), + exe + ); +} + +impl Drop for TestClient { + fn drop(&mut self) { + let _ = self.child.kill(); + let _ = self.child.wait(); + } +} diff --git a/debugger/dap/tests/test_launch.rs b/debugger/dap/tests/test_launch.rs new file mode 100644 index 0000000..6b0201d --- /dev/null +++ b/debugger/dap/tests/test_launch.rs @@ -0,0 +1,1344 @@ +mod harness; + +use std::fs; +use std::path::PathBuf; +use std::time::{SystemTime, UNIX_EPOCH}; + +use harness::TestClient; +use serde_json::json; + +const SIMPLE_SCRIPT: &str = r#"pragma silverscript ^0.1.0; + +contract Simple() { + entrypoint function main() { + int a = 1; + int b = 2; + require(a + b == 3); + } +} +"#; + +const MULTIFUNCTION_IF_STATEMENTS_SCRIPT: &str = r#"pragma silverscript ^0.1.0; + +contract MultiFunctionIfStatements(int x, int y) { + entrypoint function transfer(int a, int b) { + int d = a + b; + d = d - a; + if (d == x) { + int c = d + b; + d = a + c; + require(c > d); + } else { + d = a; + } + d = d + a; + require(d == y); + } + + entrypoint function timeout(int b) { + int d = b; + d = d + 2; + if (d == x) { + int c = d + b; + d = c + d; + require(c > d); + } + d = b; + require(d == y); + } +} +"#; + +const INLINE_CALL_BOUNCE_SCRIPT: &str = r#"pragma silverscript ^0.1.0; + +contract InlineBounce() { + function check_pair(int leftInput, int rightInput) { + int left = leftInput + rightInput; + int right = left * 2; + require(right >= left); + } + + entrypoint function main(int a, int b) { + check_pair(a, b); + require(a >= 0); + } +} +"#; + +const STACK_RENDER_SCRIPT: &str = r#"pragma silverscript ^0.1.0; + +contract StackRender() { + entrypoint function main(bool flag) { + require(!flag); + } +} +"#; + +const CHECKSIG_SCRIPT: &str = r#"pragma silverscript ^0.1.0; + +contract CheckSig(pubkey pk) { + entrypoint function main(sig s) { + require(checkSig(s, pk)); + } +} +"#; + +const P2PKH_SCRIPT: &str = r#"pragma silverscript ^0.1.0; + +contract P2PKH(byte[32] pkh) { + entrypoint function spend(pubkey pk, sig s) { + require(blake2b(pk) == pkh); + require(checkSig(s, pk)); + } +} +"#; + +struct TempScript { + path: PathBuf, +} + +impl TempScript { + fn new(source: &str) -> Self { + let unique = SystemTime::now().duration_since(UNIX_EPOCH).map(|duration| duration.as_nanos()).unwrap_or_default(); + let file_name = format!("silverscript-dap-test-{}-{}.sil", std::process::id(), unique); + let path = std::env::temp_dir().join(file_name); + fs::write(&path, source).expect("failed to write temp script"); + Self { path } + } + + fn path_str(&self) -> String { + self.path.to_string_lossy().to_string() + } +} + +impl Drop for TempScript { + fn drop(&mut self) { + let _ = fs::remove_file(&self.path); + } +} + +fn equivalent_path_variant(path: &str) -> String { + let path_buf = PathBuf::from(path); + let Some(parent) = path_buf.parent() else { + return path.to_string(); + }; + let Some(parent_name) = parent.file_name() else { + return path.to_string(); + }; + let Some(file_name) = path_buf.file_name() else { + return path.to_string(); + }; + parent.join("..").join(parent_name).join(file_name).to_string_lossy().to_string() +} + +#[test] +fn launch_stops_on_entry_and_disconnects() { + let script = TempScript::new(SIMPLE_SCRIPT); + let script_path = script.path_str(); + + let mut client = TestClient::spawn(); + let stopped = client.full_launch_sequence(&script_path); + + let reason = stopped.get("body").and_then(|v| v.get("reason")).and_then(|v| v.as_str()).unwrap_or_default(); + assert_eq!(reason, "entry"); + + client.send_request("threads", serde_json::Value::Null); + let threads = client.expect_response_success("threads"); + let size = threads.get("body").and_then(|v| v.get("threads")).and_then(|v| v.as_array()).map(|arr| arr.len()).unwrap_or(0); + assert!(size >= 1); + + client.send_request("disconnect", serde_json::json!({})); + client.expect_response_success("disconnect"); +} + +#[test] +fn breakpoint_snaps_and_continue_stops() { + let script = TempScript::new(SIMPLE_SCRIPT); + let script_path = script.path_str(); + + let mut client = TestClient::spawn(); + + client.send_request( + "initialize", + json!({ + "adapterID": "silverscript", + "pathFormat": "path", + "linesStartAt1": true, + "columnsStartAt1": true, + "supportsVariableType": true, + "supportsVariablePaging": false, + "supportsRunInTerminalRequest": false + }), + ); + client.expect_response_success("initialize"); + client.expect_event("initialized"); + + client.send_request( + "launch", + json!({ + "scriptPath": script_path, + "stopOnEntry": true + }), + ); + client.expect_response_success("launch"); + + client.send_request( + "setBreakpoints", + json!({ + "source": {"path": script_path}, + "breakpoints": [{"line": 2}, {"line": 6}] + }), + ); + let set_bp = client.expect_response_success("setBreakpoints"); + let breakpoints = set_bp.get("body").and_then(|v| v.get("breakpoints")).and_then(|v| v.as_array()).cloned().unwrap_or_default(); + assert_eq!(breakpoints.len(), 2, "expected two breakpoint responses: {set_bp:#}"); + + let first_verified = breakpoints.first().and_then(|v| v.get("verified")).and_then(|v| v.as_bool()).unwrap_or(false); + assert!(first_verified, "first breakpoint should be verified: {set_bp:#}"); + + let first_resolved = breakpoints.first().and_then(|v| v.get("line")).and_then(|v| v.as_i64()).unwrap_or_default(); + assert!(first_resolved >= 4, "expected first breakpoint to snap to executable line >= 4, got {first_resolved}"); + + let second_resolved = breakpoints.get(1).and_then(|v| v.get("line")).and_then(|v| v.as_i64()).unwrap_or_default(); + assert_eq!(second_resolved, 6, "expected second breakpoint to stay on line 6: {set_bp:#}"); + + client.send_request("setExceptionBreakpoints", json!({"filters": []})); + client.expect_response_success("setExceptionBreakpoints"); + + client.send_request("configurationDone", serde_json::Value::Null); + client.expect_response_success("configurationDone"); + let entry_stop = client.expect_event("stopped"); + let entry_reason = entry_stop.get("body").and_then(|v| v.get("reason")).and_then(|v| v.as_str()).unwrap_or_default(); + assert_eq!(entry_reason, "entry"); + + client.send_request("continue", json!({"threadId": 1})); + client.expect_response_success("continue"); + + let mut stopped_reason: Option = None; + let mut terminated_seen = false; + for _ in 0..12 { + let msg = client.read_message(); + if msg.get("type") == Some(&serde_json::Value::String("event".to_string())) { + let event = msg.get("event").and_then(|v| v.as_str()).unwrap_or_default(); + if event == "stopped" { + stopped_reason = msg.get("body").and_then(|v| v.get("reason")).and_then(|v| v.as_str()).map(|v| v.to_string()); + break; + } + if event == "terminated" { + terminated_seen = true; + break; + } + } + } + + assert!( + stopped_reason.as_deref() == Some("breakpoint"), + "expected breakpoint stop after continue; stopped_reason={stopped_reason:?}, terminated_seen={terminated_seen}" + ); + + client.send_request("disconnect", json!({})); + client.expect_response_success("disconnect"); +} + +#[test] +fn launch_auto_signs_sig_argument_from_secret_key() { + let script = TempScript::new(CHECKSIG_SCRIPT); + let script_path = script.path_str(); + + let mut client = TestClient::spawn(); + client.send_request( + "initialize", + json!({ + "adapterID": "silverscript", + "pathFormat": "path", + "linesStartAt1": true, + "columnsStartAt1": true, + "supportsVariableType": true, + "supportsVariablePaging": false, + "supportsRunInTerminalRequest": false + }), + ); + client.expect_response_success("initialize"); + client.expect_event("initialized"); + + client.send_request( + "launch", + json!({ + "scriptPath": script_path, + "function": "main", + "constructorArgs": ["keypair1.pubkey"], + "args": ["keypair1.secret"], + "stopOnEntry": true + }), + ); + client.expect_response_success("launch"); + client.send_request("setBreakpoints", json!({"source": {"path": script_path}, "breakpoints": []})); + client.expect_response_success("setBreakpoints"); + client.send_request("setExceptionBreakpoints", json!({"filters": []})); + client.expect_response_success("setExceptionBreakpoints"); + client.send_request("configurationDone", serde_json::Value::Null); + client.expect_response_success("configurationDone"); + let entry_stop = client.expect_event("stopped"); + let entry_reason = entry_stop.get("body").and_then(|v| v.get("reason")).and_then(|v| v.as_str()).unwrap_or_default(); + assert_eq!(entry_reason, "entry"); + + client.send_request("continue", json!({"threadId": 1})); + client.expect_response_success("continue"); + + let mut terminated = false; + for _ in 0..8 { + let msg = client.read_message(); + if msg.get("type") != Some(&serde_json::Value::String("event".to_string())) { + continue; + } + if msg.get("event").and_then(|v| v.as_str()) == Some("terminated") { + terminated = true; + break; + } + if msg.get("event").and_then(|v| v.as_str()) == Some("stopped") { + panic!("expected successful termination, got stop event: {msg:#}"); + } + } + + assert!(terminated, "expected debug session to terminate successfully after auto-sign"); + + client.send_request("disconnect", json!({})); + client.expect_response_success("disconnect"); +} + +#[test] +fn launch_resolves_symbolic_pkh_tokens() { + let script = TempScript::new(P2PKH_SCRIPT); + let script_path = script.path_str(); + + let mut client = TestClient::spawn(); + client.send_request( + "initialize", + json!({ + "adapterID": "silverscript", + "pathFormat": "path", + "linesStartAt1": true, + "columnsStartAt1": true, + "supportsVariableType": true, + "supportsVariablePaging": false, + "supportsRunInTerminalRequest": false + }), + ); + client.expect_response_success("initialize"); + client.expect_event("initialized"); + + client.send_request( + "launch", + json!({ + "scriptPath": script_path, + "function": "spend", + "constructorArgs": ["keypair1.pkh"], + "args": ["keypair1.pubkey", "keypair1.secret"], + "stopOnEntry": true + }), + ); + client.expect_response_success("launch"); + client.send_request("setBreakpoints", json!({"source": {"path": script_path}, "breakpoints": []})); + client.expect_response_success("setBreakpoints"); + client.send_request("setExceptionBreakpoints", json!({"filters": []})); + client.expect_response_success("setExceptionBreakpoints"); + client.send_request("configurationDone", serde_json::Value::Null); + client.expect_response_success("configurationDone"); + let entry_stop = client.expect_event("stopped"); + let entry_reason = entry_stop.get("body").and_then(|v| v.get("reason")).and_then(|v| v.as_str()).unwrap_or_default(); + assert_eq!(entry_reason, "entry"); + + client.send_request("continue", json!({"threadId": 1})); + client.expect_response_success("continue"); + + let mut terminated = false; + for _ in 0..8 { + let msg = client.read_message(); + if msg.get("type") != Some(&serde_json::Value::String("event".to_string())) { + continue; + } + if msg.get("event").and_then(|v| v.as_str()) == Some("terminated") { + terminated = true; + break; + } + if msg.get("event").and_then(|v| v.as_str()) == Some("stopped") { + panic!("expected successful termination, got stop event: {msg:#}"); + } + } + + assert!(terminated, "expected debug session to terminate successfully after resolving keypair.pkh"); + + client.send_request("disconnect", json!({})); + client.expect_response_success("disconnect"); +} + +#[test] +fn continue_hits_breakpoint_in_second_entrypoint() { + let script = TempScript::new(MULTIFUNCTION_IF_STATEMENTS_SCRIPT); + let script_path = script.path_str(); + + let mut client = TestClient::spawn(); + + client.send_request( + "initialize", + json!({ + "adapterID": "silverscript", + "pathFormat": "path", + "linesStartAt1": true, + "columnsStartAt1": true, + "supportsVariableType": true, + "supportsVariablePaging": false, + "supportsRunInTerminalRequest": false + }), + ); + client.expect_response_success("initialize"); + client.expect_event("initialized"); + + client.send_request( + "launch", + json!({ + "scriptPath": script_path, + "function": "timeout", + "constructorArgs": ["100", "9"], + "args": ["9"], + "stopOnEntry": true + }), + ); + client.expect_response_success("launch"); + + client.send_request( + "setBreakpoints", + json!({ + "source": {"path": script_path}, + "breakpoints": [{"line": 26}] + }), + ); + let set_bp = client.expect_response_success("setBreakpoints"); + let breakpoints = set_bp.get("body").and_then(|v| v.get("breakpoints")).and_then(|v| v.as_array()).cloned().unwrap_or_default(); + assert_eq!(breakpoints.len(), 1, "expected one breakpoint response: {set_bp:#}"); + let verified = breakpoints.first().and_then(|v| v.get("verified")).and_then(|v| v.as_bool()).unwrap_or(false); + assert!(verified, "breakpoint should be verified: {set_bp:#}"); + + client.send_request("setExceptionBreakpoints", json!({"filters": []})); + client.expect_response_success("setExceptionBreakpoints"); + + client.send_request("configurationDone", serde_json::Value::Null); + client.expect_response_success("configurationDone"); + let entry_stop = client.expect_event("stopped"); + let entry_reason = entry_stop.get("body").and_then(|v| v.get("reason")).and_then(|v| v.as_str()).unwrap_or_default(); + assert_eq!(entry_reason, "entry"); + + client.send_request("continue", json!({"threadId": 1})); + client.expect_response_success("continue"); + + let mut stopped_reason: Option = None; + let mut stopped_line: Option = None; + let mut terminated_seen = false; + for _ in 0..16 { + let msg = client.read_message(); + if msg.get("type") == Some(&serde_json::Value::String("event".to_string())) { + let event = msg.get("event").and_then(|v| v.as_str()).unwrap_or_default(); + if event == "stopped" { + stopped_reason = msg.get("body").and_then(|v| v.get("reason")).and_then(|v| v.as_str()).map(|v| v.to_string()); + + client.send_request("stackTrace", json!({"threadId": 1})); + let stack = client.expect_response_success("stackTrace"); + stopped_line = stack + .get("body") + .and_then(|v| v.get("stackFrames")) + .and_then(|v| v.as_array()) + .and_then(|frames| frames.first()) + .and_then(|frame| frame.get("line")) + .and_then(|v| v.as_i64()); + break; + } + if event == "terminated" { + terminated_seen = true; + break; + } + } + } + + assert!( + stopped_reason.as_deref() == Some("breakpoint"), + "expected breakpoint stop after continue; stopped_reason={stopped_reason:?}, terminated_seen={terminated_seen}" + ); + assert!(stopped_line.is_some(), "expected stack frame line to be present when stopped"); + + client.send_request("disconnect", json!({})); + client.expect_response_success("disconnect"); +} + +#[test] +fn run_config_json_resolves_symbolic_identities() { + let script = TempScript::new(P2PKH_SCRIPT); + let config = json!({ + "scriptPath": script.path_str(), + "function": "spend", + "constructorArgs": ["keypair1.pkh"], + "args": ["keypair1.pubkey", "keypair1.secret"] + }); + + let output = std::process::Command::new(harness::resolve_debugger_dap_binary()) + .arg("--run-config-json") + .arg(config.to_string()) + .output() + .expect("failed to run debugger-dap --run-config-json"); + + assert!( + output.status.success(), + "run-config-json failed: stdout={}, stderr={}", + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr) + ); + assert!( + String::from_utf8_lossy(&output.stdout).contains("Execution completed successfully."), + "unexpected stdout: {}", + String::from_utf8_lossy(&output.stdout) + ); +} + +#[test] +fn run_config_json_accepts_identity_tokens() { + let script = TempScript::new(P2PKH_SCRIPT); + let config = json!({ + "scriptPath": script.path_str(), + "function": "spend", + "constructorArgs": ["identity1.pkh"], + "args": ["identity1.pubkey", "identity1.secret"] + }); + + let output = std::process::Command::new(harness::resolve_debugger_dap_binary()) + .arg("--run-config-json") + .arg(config.to_string()) + .output() + .expect("failed to run debugger-dap --run-config-json"); + + assert!( + output.status.success(), + "identity token run-config-json failed: stdout={}, stderr={}", + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr) + ); +} + +#[test] +fn run_config_json_executes_kcc20_flow_fixtures() { + let fixture_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../fixtures/kcc20-flow"); + let fixture_names = [ + "01-init-kcc20-minter-branch.json", + "02-create-tokens-from-minter.json", + "03-burn-tokens-from-minter.json", + "04-transfer-created-tokens.json", + ]; + + for fixture_name in fixture_names { + let fixture_path = fixture_dir.join(fixture_name); + let raw = fs::read_to_string(&fixture_path).unwrap_or_else(|err| panic!("failed to read {}: {err}", fixture_path.display())); + let mut config = serde_json::from_str::(&raw).expect("fixture JSON parses"); + config["scriptPath"] = serde_json::Value::String( + PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("../../silverscript-lang/tests/examples/kcc20.sil") + .to_string_lossy() + .to_string(), + ); + let output = std::process::Command::new(harness::resolve_debugger_dap_binary()) + .arg("--run-config-json") + .arg(config.to_string()) + .output() + .unwrap_or_else(|err| panic!("failed to run debugger-dap for {}: {err}", fixture_path.display())); + + assert!( + output.status.success(), + "KCC20 fixture {} failed: stdout={}, stderr={}", + fixture_name, + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr) + ); + assert!( + String::from_utf8_lossy(&output.stdout).contains("Execution completed successfully."), + "unexpected stdout for {}: {}", + fixture_name, + String::from_utf8_lossy(&output.stdout) + ); + } +} + +#[test] +fn run_config_json_rejects_invalid_identity_tokens() { + let script = TempScript::new(CHECKSIG_SCRIPT); + let config = json!({ + "scriptPath": script.path_str(), + "function": "main", + "constructorArgs": ["keypair1.pubkey"], + "args": ["keypair1.invalid"] + }); + + let output = std::process::Command::new(harness::resolve_debugger_dap_binary()) + .arg("--run-config-json") + .arg(config.to_string()) + .output() + .expect("failed to run debugger-dap --run-config-json"); + + assert!( + !output.status.success(), + "expected invalid identity token failure: stdout={}, stderr={}", + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr) + ); + assert!( + String::from_utf8_lossy(&output.stderr).contains("invalid identity token"), + "unexpected stderr: {}", + String::from_utf8_lossy(&output.stderr) + ); +} + +#[test] +fn named_launch_arguments_select_breakpoint() { + let script = TempScript::new(MULTIFUNCTION_IF_STATEMENTS_SCRIPT); + let script_path = script.path_str(); + + let mut client = TestClient::spawn(); + + client.send_request( + "initialize", + json!({ + "adapterID": "silverscript", + "pathFormat": "path", + "linesStartAt1": true, + "columnsStartAt1": true, + "supportsVariableType": true, + "supportsVariablePaging": false, + "supportsRunInTerminalRequest": false + }), + ); + client.expect_response_success("initialize"); + client.expect_event("initialized"); + + client.send_request( + "launch", + json!({ + "scriptPath": script_path, + "function": "timeout", + "constructorArgs": { + "x": 100, + "y": 9 + }, + "args": { + "b": 9 + }, + "stopOnEntry": true + }), + ); + client.expect_response_success("launch"); + + client.send_request( + "setBreakpoints", + json!({ + "source": {"path": script_path}, + "breakpoints": [{"line": 24}] + }), + ); + client.expect_response_success("setBreakpoints"); + + client.send_request("setExceptionBreakpoints", json!({"filters": []})); + client.expect_response_success("setExceptionBreakpoints"); + + client.send_request("configurationDone", serde_json::Value::Null); + client.expect_response_success("configurationDone"); + let entry_stop = client.expect_event("stopped"); + let entry_reason = entry_stop.get("body").and_then(|v| v.get("reason")).and_then(|v| v.as_str()).unwrap_or_default(); + assert_eq!(entry_reason, "entry"); + + client.send_request("continue", json!({"threadId": 1})); + client.expect_response_success("continue"); + + let mut stopped_line: Option = None; + for _ in 0..16 { + let msg = client.read_message(); + if msg.get("type") == Some(&serde_json::Value::String("event".to_string())) + && msg.get("event").and_then(|v| v.as_str()) == Some("stopped") + { + client.send_request("stackTrace", json!({"threadId": 1})); + let stack = client.expect_response_success("stackTrace"); + stopped_line = stack + .get("body") + .and_then(|v| v.get("stackFrames")) + .and_then(|v| v.as_array()) + .and_then(|frames| frames.first()) + .and_then(|frame| frame.get("line")) + .and_then(|v| v.as_i64()); + break; + } + } + + let stopped_line = stopped_line.expect("expected named launch-config breakpoint stop"); + assert!((18..=27).contains(&stopped_line), "expected breakpoint inside timeout entrypoint, got line {stopped_line}",); + + client.send_request("disconnect", json!({})); + client.expect_response_success("disconnect"); +} + +#[test] +fn launch_without_stop_on_entry_still_stops_on_breakpoint() { + let script = TempScript::new(SIMPLE_SCRIPT); + let script_path = script.path_str(); + + let mut client = TestClient::spawn(); + + client.send_request( + "initialize", + json!({ + "adapterID": "silverscript", + "pathFormat": "path", + "linesStartAt1": true, + "columnsStartAt1": true, + "supportsVariableType": true, + "supportsVariablePaging": false, + "supportsRunInTerminalRequest": false + }), + ); + client.expect_response_success("initialize"); + client.expect_event("initialized"); + + client.send_request( + "launch", + json!({ + "scriptPath": script_path, + "stopOnEntry": false + }), + ); + client.expect_response_success("launch"); + + client.send_request( + "setBreakpoints", + json!({ + "source": {"path": script_path}, + "breakpoints": [{"line": 6}] + }), + ); + let set_bp = client.expect_response_success("setBreakpoints"); + let breakpoint = set_bp + .get("body") + .and_then(|v| v.get("breakpoints")) + .and_then(|v| v.as_array()) + .and_then(|items| items.first()) + .cloned() + .expect("expected breakpoint response"); + assert_eq!(breakpoint.get("verified").and_then(|v| v.as_bool()), Some(true), "expected verified breakpoint: {set_bp:#}"); + + client.send_request("setExceptionBreakpoints", json!({"filters": []})); + client.expect_response_success("setExceptionBreakpoints"); + + client.send_request("configurationDone", serde_json::Value::Null); + client.expect_response_success("configurationDone"); + + let stopped = client.expect_event("stopped"); + let reason = stopped.get("body").and_then(|v| v.get("reason")).and_then(|v| v.as_str()).unwrap_or_default(); + assert_eq!(reason, "breakpoint"); + + client.send_request("stackTrace", json!({"threadId": 1})); + let stack = client.expect_response_success("stackTrace"); + let line = stack + .get("body") + .and_then(|v| v.get("stackFrames")) + .and_then(|v| v.as_array()) + .and_then(|frames| frames.first()) + .and_then(|frame| frame.get("line")) + .and_then(|v| v.as_i64()) + .expect("expected stopped stack frame"); + assert_eq!(line, 6); + + client.send_request("disconnect", json!({})); + client.expect_response_success("disconnect"); +} + +#[test] +fn no_debug_launch_ignores_breakpoints_and_does_not_stop_on_entry() { + let script = TempScript::new(SIMPLE_SCRIPT); + let script_path = script.path_str(); + + let mut client = TestClient::spawn(); + + client.send_request( + "initialize", + json!({ + "adapterID": "silverscript", + "pathFormat": "path", + "linesStartAt1": true, + "columnsStartAt1": true, + "supportsVariableType": true, + "supportsVariablePaging": false, + "supportsRunInTerminalRequest": false + }), + ); + client.expect_response_success("initialize"); + client.expect_event("initialized"); + + client.send_request( + "launch", + json!({ + "scriptPath": script_path, + "noDebug": true, + "stopOnEntry": true + }), + ); + client.expect_response_success("launch"); + + client.send_request( + "setBreakpoints", + json!({ + "source": {"path": script_path}, + "breakpoints": [{"line": 6}] + }), + ); + client.expect_response_success("setBreakpoints"); + + client.send_request("setExceptionBreakpoints", json!({"filters": []})); + client.expect_response_success("setExceptionBreakpoints"); + + client.send_request("configurationDone", serde_json::Value::Null); + client.expect_response_success("configurationDone"); + + let mut terminated = false; + for _ in 0..8 { + let msg = client.read_message(); + if msg.get("type") != Some(&serde_json::Value::String("event".to_string())) { + continue; + } + match msg.get("event").and_then(|v| v.as_str()) { + Some("terminated") => { + terminated = true; + break; + } + Some("stopped") => panic!("expected no-debug launch to terminate, got stop event: {msg:#}"), + _ => {} + } + } + + assert!(terminated, "expected no-debug launch to terminate without stop events"); + + client.send_request("disconnect", json!({})); + client.expect_response_success("disconnect"); +} + +#[test] +fn scopes_expose_variables_and_stacks() { + let script = TempScript::new( + r#"pragma silverscript ^0.1.0; + +contract ScopeTest(int threshold) { + entrypoint function main(int a, int b) { + int local = a + b; + require(local > threshold); + } +} +"#, + ); + let script_path = script.path_str(); + + let mut client = TestClient::spawn(); + client.send_request( + "initialize", + json!({ + "adapterID": "silverscript", + "pathFormat": "path", + "linesStartAt1": true, + "columnsStartAt1": true, + "supportsVariableType": true, + "supportsVariablePaging": false, + "supportsRunInTerminalRequest": false + }), + ); + client.expect_response_success("initialize"); + client.expect_event("initialized"); + + client.send_request( + "launch", + json!({ + "scriptPath": script_path, + "function": "main", + "constructorArgs": ["3"], + "args": ["5", "4"], + "stopOnEntry": true + }), + ); + client.expect_response_success("launch"); + + client.send_request("setBreakpoints", json!({"source": {"path": script_path}, "breakpoints": []})); + client.expect_response_success("setBreakpoints"); + client.send_request("setExceptionBreakpoints", json!({"filters": []})); + client.expect_response_success("setExceptionBreakpoints"); + client.send_request("configurationDone", serde_json::Value::Null); + client.expect_response_success("configurationDone"); + let entry_stop = client.expect_event("stopped"); + let entry_reason = entry_stop.get("body").and_then(|v| v.get("reason")).and_then(|v| v.as_str()).unwrap_or_default(); + assert_eq!(entry_reason, "entry"); + + client.send_request("next", json!({"threadId": 1})); + client.expect_response_success("next"); + let step_stop = client.expect_event("stopped"); + let step_reason = step_stop.get("body").and_then(|v| v.get("reason")).and_then(|v| v.as_str()).unwrap_or_default(); + assert_eq!(step_reason, "step"); + + client.send_request("stackTrace", json!({"threadId": 1})); + let stack = client.expect_response_success("stackTrace"); + let frame_id = stack + .get("body") + .and_then(|v| v.get("stackFrames")) + .and_then(|v| v.as_array()) + .and_then(|frames| frames.first()) + .and_then(|frame| frame.get("id")) + .and_then(|v| v.as_i64()) + .expect("expected stack frame id"); + + client.send_request("scopes", json!({"frameId": frame_id})); + let scopes = client.expect_response_success("scopes"); + let scope_entries = scopes.get("body").and_then(|v| v.get("scopes")).and_then(|v| v.as_array()).cloned().unwrap_or_default(); + let scope_names = scope_entries.iter().filter_map(|scope| scope.get("name").and_then(|value| value.as_str())).collect::>(); + assert!(scope_names.contains(&"Variables")); + assert!(scope_names.contains(&"Data Stack")); + assert!(scope_names.contains(&"Alt Stack")); + + let variables_ref = scope_entries + .iter() + .find(|scope| scope.get("name").and_then(|value| value.as_str()) == Some("Variables")) + .and_then(|scope| scope.get("variablesReference")) + .and_then(|value| value.as_i64()) + .expect("expected variables scope"); + let dstack_ref = scope_entries + .iter() + .find(|scope| scope.get("name").and_then(|value| value.as_str()) == Some("Data Stack")) + .and_then(|scope| scope.get("variablesReference")) + .and_then(|value| value.as_i64()) + .expect("expected data stack scope"); + let astack_ref = scope_entries + .iter() + .find(|scope| scope.get("name").and_then(|value| value.as_str()) == Some("Alt Stack")) + .and_then(|scope| scope.get("variablesReference")) + .and_then(|value| value.as_i64()) + .expect("expected alt stack scope"); + + client.send_request("variables", json!({"variablesReference": variables_ref})); + let variables = client.expect_response_success("variables"); + let variable_names = variables + .get("body") + .and_then(|v| v.get("variables")) + .and_then(|v| v.as_array()) + .cloned() + .unwrap_or_default() + .into_iter() + .filter_map(|item| item.get("name").and_then(|value| value.as_str()).map(ToOwned::to_owned)) + .collect::>(); + assert_eq!(variable_names, vec!["a".to_string(), "b".to_string(), "local".to_string(), "threshold (ctor)".to_string()]); + + client.send_request("variables", json!({"variablesReference": dstack_ref})); + let dstack = client.expect_response_success("variables"); + let dstack_count = + dstack.get("body").and_then(|v| v.get("variables")).and_then(|v| v.as_array()).map(|items| items.len()).unwrap_or_default(); + assert!(dstack_count >= 2, "expected parameters to be visible on the data stack"); + + client.send_request("variables", json!({"variablesReference": astack_ref})); + let astack = client.expect_response_success("variables"); + let astack_entries = astack.get("body").and_then(|v| v.get("variables")).and_then(|v| v.as_array()).cloned().unwrap_or_default(); + assert_eq!(astack_entries.len(), 1, "expected empty alt stack placeholder"); + assert_eq!(astack_entries.first().and_then(|entry| entry.get("name")).and_then(|value| value.as_str()), Some("(empty)")); + assert_eq!(astack_entries.first().and_then(|entry| entry.get("value")).and_then(|value| value.as_str()), Some("")); + + client.send_request("disconnect", json!({})); + client.expect_response_success("disconnect"); +} + +#[test] +fn data_stack_renders_empty_bytes_without_bare_hex_prefix() { + let script = TempScript::new(STACK_RENDER_SCRIPT); + let script_path = script.path_str(); + + let mut client = TestClient::spawn(); + client.send_request( + "initialize", + json!({ + "adapterID": "silverscript", + "pathFormat": "path", + "linesStartAt1": true, + "columnsStartAt1": true, + "supportsVariableType": true, + "supportsVariablePaging": false, + "supportsRunInTerminalRequest": false + }), + ); + client.expect_response_success("initialize"); + client.expect_event("initialized"); + + client.send_request( + "launch", + json!({ + "scriptPath": script_path, + "function": "main", + "constructorArgs": {}, + "args": { + "flag": false + }, + "stopOnEntry": true + }), + ); + client.expect_response_success("launch"); + + client.send_request("setBreakpoints", json!({"source": {"path": script_path}, "breakpoints": []})); + client.expect_response_success("setBreakpoints"); + client.send_request("setExceptionBreakpoints", json!({"filters": []})); + client.expect_response_success("setExceptionBreakpoints"); + client.send_request("configurationDone", serde_json::Value::Null); + client.expect_response_success("configurationDone"); + client.expect_event("stopped"); + + client.send_request("stackTrace", json!({"threadId": 1})); + let stack = client.expect_response_success("stackTrace"); + let frame_id = stack + .get("body") + .and_then(|v| v.get("stackFrames")) + .and_then(|v| v.as_array()) + .and_then(|frames| frames.first()) + .and_then(|frame| frame.get("id")) + .and_then(|v| v.as_i64()) + .expect("expected stack frame id"); + + client.send_request("scopes", json!({"frameId": frame_id})); + let scopes = client.expect_response_success("scopes"); + let dstack_ref = scopes + .get("body") + .and_then(|v| v.get("scopes")) + .and_then(|v| v.as_array()) + .and_then(|entries| entries.iter().find(|scope| scope.get("name").and_then(|value| value.as_str()) == Some("Data Stack"))) + .and_then(|scope| scope.get("variablesReference")) + .and_then(|value| value.as_i64()) + .expect("expected data stack scope"); + + client.send_request("variables", json!({"variablesReference": dstack_ref})); + let dstack = client.expect_response_success("variables"); + let values = dstack + .get("body") + .and_then(|v| v.get("variables")) + .and_then(|v| v.as_array()) + .cloned() + .unwrap_or_default() + .into_iter() + .filter_map(|item| item.get("value").and_then(|value| value.as_str()).map(ToOwned::to_owned)) + .collect::>(); + assert!( + values.iter().any(|value| value.starts_with("")), + "expected empty bool stack item to describe empty bytes, got {values:?}", + ); + assert!(values.iter().all(|value| value != "0x"), "unexpected bare hex prefix in stack values: {values:?}",); + + client.send_request("disconnect", json!({})); + client.expect_response_success("disconnect"); +} + +#[test] +fn continue_with_inline_call_and_callee_breakpoints_does_not_bounce_back_to_call_site() { + let script = TempScript::new(INLINE_CALL_BOUNCE_SCRIPT); + let script_path = script.path_str(); + + let mut client = TestClient::spawn(); + + client.send_request( + "initialize", + json!({ + "adapterID": "silverscript", + "pathFormat": "path", + "linesStartAt1": true, + "columnsStartAt1": true, + "supportsVariableType": true, + "supportsVariablePaging": false, + "supportsRunInTerminalRequest": false + }), + ); + client.expect_response_success("initialize"); + client.expect_event("initialized"); + + client.send_request( + "launch", + json!({ + "scriptPath": script_path, + "constructorArgs": [], + "function": "main", + "args": ["1", "2"], + "stopOnEntry": true + }), + ); + client.expect_response_success("launch"); + + // Request one call-site breakpoint and one callee-body breakpoint. + client.send_request( + "setBreakpoints", + json!({ + "source": {"path": script_path}, + "breakpoints": [{"line": 11}, {"line": 5}] + }), + ); + let set_bp = client.expect_response_success("setBreakpoints"); + let breakpoints = set_bp.get("body").and_then(|v| v.get("breakpoints")).and_then(|v| v.as_array()).cloned().unwrap_or_default(); + assert_eq!(breakpoints.len(), 2, "expected two breakpoints: {set_bp:#}"); + let call_site_line = breakpoints.first().and_then(|v| v.get("line")).and_then(|v| v.as_i64()).unwrap_or_default(); + let callee_line = breakpoints.get(1).and_then(|v| v.get("line")).and_then(|v| v.as_i64()).unwrap_or_default(); + + client.send_request("setExceptionBreakpoints", json!({"filters": []})); + client.expect_response_success("setExceptionBreakpoints"); + + client.send_request("configurationDone", serde_json::Value::Null); + client.expect_response_success("configurationDone"); + let entry_stop = client.expect_event("stopped"); + let entry_reason = entry_stop.get("body").and_then(|v| v.get("reason")).and_then(|v| v.as_str()).unwrap_or_default(); + assert_eq!(entry_reason, "entry"); + + let continue_and_capture_line = |client: &mut TestClient| -> Option { + client.send_request("continue", json!({"threadId": 1})); + client.expect_response_success("continue"); + + for _ in 0..12 { + let msg = client.read_message(); + if msg.get("type") == Some(&serde_json::Value::String("event".to_string())) { + let event = msg.get("event").and_then(|v| v.as_str()).unwrap_or_default(); + if event == "terminated" { + return None; + } + if event == "stopped" { + client.send_request("stackTrace", json!({"threadId": 1})); + let stack = client.expect_response_success("stackTrace"); + return stack + .get("body") + .and_then(|v| v.get("stackFrames")) + .and_then(|v| v.as_array()) + .and_then(|frames| frames.first()) + .and_then(|frame| frame.get("line")) + .and_then(|v| v.as_i64()); + } + } + } + None + }; + + let first = continue_and_capture_line(&mut client); + let second = continue_and_capture_line(&mut client); + let third = continue_and_capture_line(&mut client); + + // Regression check for the user-reported bounce pattern: + // call-site -> callee -> same call-site. + let bounced = first == Some(call_site_line) && second == Some(callee_line) && third == Some(call_site_line); + assert!( + !bounced, + "inline breakpoint bounce reproduced: first={first:?}, second={second:?}, third={third:?}, call_site_line={call_site_line}, callee_line={callee_line}" + ); + + client.send_request("disconnect", json!({})); + client.expect_response_success("disconnect"); +} + +#[test] +fn continue_after_clearing_breakpoints_with_path_variant_does_not_stop() { + let script = TempScript::new(INLINE_CALL_BOUNCE_SCRIPT); + let script_path = script.path_str(); + let variant_path = equivalent_path_variant(&script_path); + + let mut client = TestClient::spawn(); + + client.send_request( + "initialize", + json!({ + "adapterID": "silverscript", + "pathFormat": "path", + "linesStartAt1": true, + "columnsStartAt1": true, + "supportsVariableType": true, + "supportsVariablePaging": false, + "supportsRunInTerminalRequest": false + }), + ); + client.expect_response_success("initialize"); + client.expect_event("initialized"); + + client.send_request( + "launch", + json!({ + "scriptPath": script_path, + "constructorArgs": [], + "function": "main", + "args": ["1", "2"], + "stopOnEntry": true + }), + ); + client.expect_response_success("launch"); + + // First set breakpoints on canonical path. + client.send_request( + "setBreakpoints", + json!({ + "source": {"path": script_path}, + "breakpoints": [{"line": 11}, {"line": 5}] + }), + ); + let initial_set = client.expect_response_success("setBreakpoints"); + let initial_breakpoints = + initial_set.get("body").and_then(|v| v.get("breakpoints")).and_then(|v| v.as_array()).cloned().unwrap_or_default(); + assert_eq!(initial_breakpoints.len(), 2, "expected two breakpoint responses: {initial_set:#}"); + + // Then clear using an equivalent but differently formatted path. + client.send_request( + "setBreakpoints", + json!({ + "source": {"path": variant_path}, + "breakpoints": [] + }), + ); + client.expect_response_success("setBreakpoints"); + + client.send_request("setExceptionBreakpoints", json!({"filters": []})); + client.expect_response_success("setExceptionBreakpoints"); + + client.send_request("configurationDone", serde_json::Value::Null); + client.expect_response_success("configurationDone"); + let entry_stop = client.expect_event("stopped"); + let entry_reason = entry_stop.get("body").and_then(|v| v.get("reason")).and_then(|v| v.as_str()).unwrap_or_default(); + assert_eq!(entry_reason, "entry"); + + client.send_request("continue", json!({"threadId": 1})); + client.expect_response_success("continue"); + + let mut stopped_reason: Option = None; + let mut terminated_seen = false; + for _ in 0..16 { + let msg = client.read_message(); + if msg.get("type") == Some(&serde_json::Value::String("event".to_string())) { + let event = msg.get("event").and_then(|v| v.as_str()).unwrap_or_default(); + if event == "stopped" { + stopped_reason = msg.get("body").and_then(|v| v.get("reason")).and_then(|v| v.as_str()).map(|v| v.to_string()); + break; + } + if event == "terminated" { + terminated_seen = true; + break; + } + } + } + + assert!( + stopped_reason.is_none() && terminated_seen, + "expected termination after clearing breakpoints; stopped_reason={stopped_reason:?}, terminated_seen={terminated_seen}" + ); + + client.send_request("disconnect", json!({})); + client.expect_response_success("disconnect"); +} + +#[test] +fn breakpoints_for_launch_source_survive_other_source_updates() { + let launch_script = TempScript::new(INLINE_CALL_BOUNCE_SCRIPT); + let launch_path = launch_script.path_str(); + let other_script = TempScript::new(SIMPLE_SCRIPT); + let other_path = other_script.path_str(); + + let mut client = TestClient::spawn(); + + client.send_request( + "initialize", + json!({ + "adapterID": "silverscript", + "pathFormat": "path", + "linesStartAt1": true, + "columnsStartAt1": true, + "supportsVariableType": true, + "supportsVariablePaging": false, + "supportsRunInTerminalRequest": false + }), + ); + client.expect_response_success("initialize"); + client.expect_event("initialized"); + + client.send_request( + "launch", + json!({ + "scriptPath": launch_path, + "constructorArgs": [], + "function": "main", + "args": ["1", "2"], + "stopOnEntry": true + }), + ); + client.expect_response_success("launch"); + + // Set one breakpoint in the launched source (call-site line). + client.send_request( + "setBreakpoints", + json!({ + "source": {"path": launch_path}, + "breakpoints": [{"line": 5}] + }), + ); + let launch_set = client.expect_response_success("setBreakpoints"); + let launch_bp = launch_set.get("body").and_then(|v| v.get("breakpoints")).and_then(|v| v.as_array()).cloned().unwrap_or_default(); + assert_eq!(launch_bp.len(), 1, "expected one launch-source breakpoint response: {launch_set:#}"); + let launch_line = launch_bp.first().and_then(|v| v.get("line")).and_then(|v| v.as_i64()).unwrap_or_default(); + assert!(launch_line > 0, "launch breakpoint should resolve to executable line: {launch_set:#}"); + + // Simulate a client sending setBreakpoints for a different source. + // It should not clear or override launch-source breakpoints. + client.send_request( + "setBreakpoints", + json!({ + "source": {"path": other_path}, + "breakpoints": [{"line": 5}] + }), + ); + let other_set = client.expect_response_success("setBreakpoints"); + let other_bp = other_set.get("body").and_then(|v| v.get("breakpoints")).and_then(|v| v.as_array()).cloned().unwrap_or_default(); + assert_eq!(other_bp.len(), 1, "expected one foreign-source breakpoint response: {other_set:#}"); + let other_verified = other_bp.first().and_then(|v| v.get("verified")).and_then(|v| v.as_bool()).unwrap_or(true); + assert!(!other_verified, "foreign-source breakpoint should be unverified: {other_set:#}"); + + client.send_request("setExceptionBreakpoints", json!({"filters": []})); + client.expect_response_success("setExceptionBreakpoints"); + + client.send_request("configurationDone", serde_json::Value::Null); + client.expect_response_success("configurationDone"); + let entry_stop = client.expect_event("stopped"); + let entry_reason = entry_stop.get("body").and_then(|v| v.get("reason")).and_then(|v| v.as_str()).unwrap_or_default(); + assert_eq!(entry_reason, "entry"); + + client.send_request("continue", json!({"threadId": 1})); + client.expect_response_success("continue"); + + let mut stopped_line: Option = None; + let mut terminated_seen = false; + for _ in 0..16 { + let msg = client.read_message(); + if msg.get("type") == Some(&serde_json::Value::String("event".to_string())) { + let event = msg.get("event").and_then(|v| v.as_str()).unwrap_or_default(); + if event == "stopped" { + let reason = msg.get("body").and_then(|v| v.get("reason")).and_then(|v| v.as_str()).unwrap_or_default(); + assert_eq!(reason, "breakpoint", "expected breakpoint stop event: {msg:#}"); + + client.send_request("stackTrace", json!({"threadId": 1})); + let stack = client.expect_response_success("stackTrace"); + stopped_line = stack + .get("body") + .and_then(|v| v.get("stackFrames")) + .and_then(|v| v.as_array()) + .and_then(|frames| frames.first()) + .and_then(|frame| frame.get("line")) + .and_then(|v| v.as_i64()); + break; + } + if event == "terminated" { + terminated_seen = true; + break; + } + } + } + + assert!(!terminated_seen, "launch-source breakpoint should still be active after foreign-source update"); + assert_eq!(stopped_line, Some(launch_line), "expected stop on launch-source breakpoint line after foreign-source update"); + + client.send_request("disconnect", json!({})); + client.expect_response_success("disconnect"); +} diff --git a/debugger/fixtures/kcc20-flow/01-init-kcc20-minter-branch.json b/debugger/fixtures/kcc20-flow/01-init-kcc20-minter-branch.json new file mode 100644 index 0000000..4bf5508 --- /dev/null +++ b/debugger/fixtures/kcc20-flow/01-init-kcc20-minter-branch.json @@ -0,0 +1,71 @@ +{ + "name": "KCC20 flow 01 - initialize token minter branch", + "type": "silverscript", + "request": "launch", + "scriptPath": "silverscript-lang/tests/examples/kcc20.sil", + "function": "transfer", + "constructorArgs": [ + "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + 0, + 2, + true, + 2, + 2 + ], + "args": [ + [ + { + "ownerIdentifier": "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "identifierType": 2, + "amount": 0, + "isMinter": true + } + ], + [], + [0] + ], + "tx": { + "active_input_index": 0, + "inputs": [ + { + "utxo_value": 1000, + "covenant_id": "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "constructor_args": [ + "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + 0, + 2, + true, + 2, + 2 + ], + "state": { + "ownerIdentifier": "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "identifierType": 2, + "amount": 0, + "isMinter": true + } + } + ], + "outputs": [ + { + "value": 1000, + "covenant_id": "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "authorizing_input": 0, + "constructor_args": [ + "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + 0, + 2, + true, + 2, + 2 + ], + "state": { + "ownerIdentifier": "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "identifierType": 2, + "amount": 0, + "isMinter": true + } + } + ] + } +} diff --git a/debugger/fixtures/kcc20-flow/02-create-tokens-from-minter.json b/debugger/fixtures/kcc20-flow/02-create-tokens-from-minter.json new file mode 100644 index 0000000..3cd85b2 --- /dev/null +++ b/debugger/fixtures/kcc20-flow/02-create-tokens-from-minter.json @@ -0,0 +1,96 @@ +{ + "name": "KCC20 flow 02 - create tokens from minter branch", + "type": "silverscript", + "request": "launch", + "scriptPath": "silverscript-lang/tests/examples/kcc20.sil", + "function": "transfer", + "constructorArgs": [ + "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + 0, + 2, + true, + 2, + 2 + ], + "args": [ + [ + { + "ownerIdentifier": "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "identifierType": 2, + "amount": 900, + "isMinter": true + }, + { + "ownerIdentifier": "0x2222222222222222222222222222222222222222222222222222222222222222", + "identifierType": 2, + "amount": 100, + "isMinter": false + } + ], + [], + [0] + ], + "tx": { + "active_input_index": 0, + "inputs": [ + { + "utxo_value": 1000, + "covenant_id": "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "constructor_args": [ + "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + 0, + 2, + true, + 2, + 2 + ], + "state": { + "ownerIdentifier": "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "identifierType": 2, + "amount": 0, + "isMinter": true + } + } + ], + "outputs": [ + { + "value": 1000, + "covenant_id": "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "authorizing_input": 0, + "constructor_args": [ + "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + 900, + 2, + true, + 2, + 2 + ], + "state": { + "ownerIdentifier": "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "identifierType": 2, + "amount": 900, + "isMinter": true + } + }, + { + "value": 1000, + "covenant_id": "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "authorizing_input": 0, + "constructor_args": [ + "0x2222222222222222222222222222222222222222222222222222222222222222", + 100, + 2, + false, + 2, + 2 + ], + "state": { + "ownerIdentifier": "0x2222222222222222222222222222222222222222222222222222222222222222", + "identifierType": 2, + "amount": 100, + "isMinter": false + } + } + ] + } +} diff --git a/debugger/fixtures/kcc20-flow/03-burn-tokens-from-minter.json b/debugger/fixtures/kcc20-flow/03-burn-tokens-from-minter.json new file mode 100644 index 0000000..099567a --- /dev/null +++ b/debugger/fixtures/kcc20-flow/03-burn-tokens-from-minter.json @@ -0,0 +1,71 @@ +{ + "name": "KCC20 flow 03 - burn tokens from minter branch", + "type": "silverscript", + "request": "launch", + "scriptPath": "silverscript-lang/tests/examples/kcc20.sil", + "function": "transfer", + "constructorArgs": [ + "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + 900, + 2, + true, + 2, + 2 + ], + "args": [ + [ + { + "ownerIdentifier": "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "identifierType": 2, + "amount": 500, + "isMinter": true + } + ], + [], + [0] + ], + "tx": { + "active_input_index": 0, + "inputs": [ + { + "utxo_value": 1000, + "covenant_id": "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "constructor_args": [ + "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + 900, + 2, + true, + 2, + 2 + ], + "state": { + "ownerIdentifier": "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "identifierType": 2, + "amount": 900, + "isMinter": true + } + } + ], + "outputs": [ + { + "value": 1000, + "covenant_id": "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "authorizing_input": 0, + "constructor_args": [ + "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + 500, + 2, + true, + 2, + 2 + ], + "state": { + "ownerIdentifier": "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "identifierType": 2, + "amount": 500, + "isMinter": true + } + } + ] + } +} diff --git a/debugger/fixtures/kcc20-flow/04-transfer-created-tokens.json b/debugger/fixtures/kcc20-flow/04-transfer-created-tokens.json new file mode 100644 index 0000000..18fa311 --- /dev/null +++ b/debugger/fixtures/kcc20-flow/04-transfer-created-tokens.json @@ -0,0 +1,89 @@ +{ + "name": "KCC20 flow 04 - transfer created non-minter tokens", + "type": "silverscript", + "request": "launch", + "scriptPath": "silverscript-lang/tests/examples/kcc20.sil", + "function": "transfer", + "constructorArgs": [ + "0x2222222222222222222222222222222222222222222222222222222222222222", + 100, + 2, + false, + 2, + 2 + ], + "args": [ + [ + { + "ownerIdentifier": "0x3333333333333333333333333333333333333333333333333333333333333333", + "identifierType": 2, + "amount": 100, + "isMinter": false + } + ], + [], + [1] + ], + "tx": { + "active_input_index": 0, + "inputs": [ + { + "utxo_value": 1000, + "covenant_id": "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "constructor_args": [ + "0x2222222222222222222222222222222222222222222222222222222222222222", + 100, + 2, + false, + 2, + 2 + ], + "state": { + "ownerIdentifier": "0x2222222222222222222222222222222222222222222222222222222222222222", + "identifierType": 2, + "amount": 100, + "isMinter": false + } + }, + { + "utxo_value": 1000, + "covenant_id": "0x2222222222222222222222222222222222222222222222222222222222222222", + "constructor_args": [ + "0x2222222222222222222222222222222222222222222222222222222222222222", + 0, + 2, + true, + 2, + 2 + ], + "state": { + "ownerIdentifier": "0x2222222222222222222222222222222222222222222222222222222222222222", + "identifierType": 2, + "amount": 0, + "isMinter": true + } + } + ], + "outputs": [ + { + "value": 1000, + "covenant_id": "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "authorizing_input": 0, + "constructor_args": [ + "0x3333333333333333333333333333333333333333333333333333333333333333", + 100, + 2, + false, + 2, + 2 + ], + "state": { + "ownerIdentifier": "0x3333333333333333333333333333333333333333333333333333333333333333", + "identifierType": 2, + "amount": 100, + "isMinter": false + } + } + ] + } +} diff --git a/debugger/fixtures/kcc20-flow/README.md b/debugger/fixtures/kcc20-flow/README.md new file mode 100644 index 0000000..27c83f4 --- /dev/null +++ b/debugger/fixtures/kcc20-flow/README.md @@ -0,0 +1,14 @@ +# KCC20 Debug Flow Fixtures + +These launch JSON files define the first debugger target for the rebased DAP work. + +They intentionally focus on the `KCC20` token covenant in `silverscript-lang/tests/examples/kcc20.sil` and avoid pubkey signatures by using covenant-ID ownership. That makes the flow deterministic while still exercising covenant state, generated covenant entrypoints, `State[]` arguments, witness inputs, minting, burning, and non-minter transfer checks. + +Flow: + +1. `01-init-kcc20-minter-branch.json` initializes a zero-amount minter branch. +2. `02-create-tokens-from-minter.json` creates token supply from that minter branch. +3. `03-burn-tokens-from-minter.json` burns part of the minter branch supply. +4. `04-transfer-created-tokens.json` transfers a created non-minter token branch through a covenant-ID witness input. + +The final DAP goal is to launch each file, stop in source-level covenant code, inspect `prevStates`/`newStates`, continue to completion, and report success. diff --git a/debugger/session/src/args.rs b/debugger/session/src/args.rs index c0c594f..05d467d 100644 --- a/debugger/session/src/args.rs +++ b/debugger/session/src/args.rs @@ -315,6 +315,20 @@ pub fn parse_state_value(contract: &ContractAst<'_>, raw_state: &str) -> Result< parse_struct_arg(&entries, &declared_fields, &shapes) } +pub fn values_to_args(values: &[Value]) -> Result, String> { + values.iter().map(value_to_arg).collect() +} + +fn value_to_arg(value: &Value) -> Result { + match value { + Value::String(raw) => Ok(raw.clone()), + Value::Number(raw) => Ok(raw.to_string()), + Value::Bool(raw) => Ok(raw.to_string()), + Value::Null => Ok("null".to_string()), + Value::Array(_) | Value::Object(_) => serde_json::to_string(value).map_err(|err| format!("invalid arg value: {err}")), + } +} + #[cfg(test)] mod tests { use super::{parse_call_args, parse_ctor_args, parse_state_value}; diff --git a/docs/dap-layer-migration-plan.md b/docs/dap-layer-migration-plan.md new file mode 100644 index 0000000..be5d4b6 --- /dev/null +++ b/docs/dap-layer-migration-plan.md @@ -0,0 +1,105 @@ +# DAP Layer Migration Plan + +## Current Git State + +- Working branch: `dap-and-vsc-debugger` +- New base: `origin/covpp-reset2` at `efd4293` (`Fix State[] field access for array fields (#111)`) +- Backup branch before rebasing: `codex/dap-and-vsc-debugger-pre-covpp-reset2-rebase` +- Preserved pre-rebase dirty worktree: `stash@{0}` (`pre-rebase dap-vscode dirty worktree`) +- The rebased branch still contains the historical combined DAP and VS Code extension changes. The next cleanup step should split the PR surface before implementation continues. + +## Goal + +Prepare a first PR that reintroduces only the DAP layer on top of the covenant-aware debugger/session work from `covpp-reset2`. + +The VS Code extension should be a later PR that consumes the stable DAP binary/protocol behavior after the DAP layer has landed. + +The concrete end-to-end target for this migration is debugging the full KCC20 token flow from an initialized minter branch, through token creation, token burn, and created-token transfer. The JSON launch fixtures for that flow live under `debugger/fixtures/kcc20-flow/`. + +## Scope For The DAP PR + +Keep: + +- `debugger/dap/**` +- Workspace wiring needed to build the DAP crate: + - `Cargo.toml` + - `Cargo.lock` +- Minimal shared debugger/session API changes that are required by the DAP layer and are not VS Code specific. +- DAP tests that exercise the adapter and launch/runtime behavior. + +Defer: + +- `extensions/vscode/**` +- Extension packaging, CodeLens, quick launch UI, webviews, and adapter bootstrap scripts. +- Any UX or editor-specific launch configuration migration. + +## Important Upstream Changes To Preserve + +The reset branch has deliberate covenant support inside the debugger/session layer. The DAP migration should reuse it instead of duplicating older launch/runtime logic. + +Relevant upstream pieces: + +- `debugger/session/src/covenant.rs` + - Resolves source covenant functions to generated entrypoints. + - Tracks auth/cov binding, verification/transition mode, generated names, and source binding metadata. +- `DebugSession::with_covenant_mode` + - Activates covenant display names and synthetic binding overlays. + - Preserves source-level stepping behavior by hiding generated covenant internals. +- `debugger/session/src/args.rs` + - Parses constructor/call args from the contract AST rather than ABI strings. + - Supports structured `State`, `State[]`, custom structs, fixed byte arrays, and explicit state values. +- `debugger/cli/src/main.rs` + - Is the current reference implementation for covenant launch behavior, including generated covenant entrypoint selection, synthesized prefix args, and explicit state materialization. + +## Migration Plan + +1. Split the branch surface. + - Create or keep a DAP-only branch based on the current rebased `dap-and-vsc-debugger`. + - Remove VS Code extension commits/files from the DAP PR branch. + - Keep the pre-rebase backup branch and stash until both DAP and VS Code follow-up work are accounted for. + +2. Rebase-normalize the DAP crate against covenant-aware APIs. + - Replace older ABI-string argument parsing in `debugger/dap/src/runtime_builder.rs` with the contract-AST based parser used by the CLI. + - Resolve user-selected source covenant functions through `resolve_covenant_call_target`. + - Launch generated covenant entrypoints where appropriate, while displaying source covenant function names through `DebugSession`. + - Pass `with_covenant_mode(...)` and covenant state values into the session when debugging covenant flows. + +3. Align transaction and state setup with CLI behavior. + - Reuse the CLI’s covenant transaction semantics rather than adding a parallel DAP-only interpretation. + - Support `prev_state`/`prev_states`, generated leader/delegate entrypoints, and explicit state scripts consistently with the CLI/test runner. + - Keep DAP launch JSON as a transport format only; do not make it an alternate contract semantics layer. + +4. Rework DAP tests around the new base. + - Keep adapter protocol tests focused on DAP behavior: launch, breakpoints, stack trace, scopes, variables, stepping, and errors. + - Add covenant-oriented DAP launch cases only after the runtime path is using the upstream covenant session support. + - Avoid importing VS Code fixtures or extension behavior into DAP tests. + +5. Verify the DAP-only PR. + - `cargo fmt` + - `cargo check -p debugger-dap` + - `cargo test -p debugger-dap` + - Relevant `debugger-session` tests if shared session APIs are touched. + +## Known Cleanup Before Implementation + +- The rebased diff still includes `extensions/vscode/**`; those files must be removed from the DAP PR branch before opening the first PR. +- `debugger/dap/src/runtime_builder.rs` still reflects the older pre-reset launch path and must be reconciled with the covenant-aware CLI/session path. +- `debugger/session/src/args.rs` currently keeps a small `values_to_args` helper for the DAP launch config. If the DAP launch parser is refactored, either keep this as shared utility or move it into DAP-local config parsing. + +## Resolved Build Blockers + +The initial post-rebase `cargo check -p debugger-dap` failures have been resolved: + +- `DebugSession::format_value` no longer exists as a session method; DAP formatting should use the current `debugger_session::format_value` helper pattern used by the CLI. +- `DebugSession::current_function_name` now returns `Option`, so DAP stack frame naming needs to drop the old borrowed-string conversion. +- `parse_call_args` now takes `(&ContractAst, function_name, raw_args)` rather than ABI input type strings. +- `EngineFlags` gained `sigop_script_units`. +- `TestTxInputScenarioResolved` and `TestTxOutputScenarioResolved` gained `state`. +- `TransactionInput` uses `mass` rather than `sig_op_count`. +- `VariableOrigin` now includes `ContractField` and `ConstructorArg`, and DAP variable presentation must account for both. + +Verification now passes with: + +- `cargo check -p debugger-dap` +- `cargo test -p debugger-dap` +- all `debugger/fixtures/kcc20-flow/*.json` through `debugger-dap --run-config-json` diff --git a/docs/vscode-debugger-extension-redesign-plan.md b/docs/vscode-debugger-extension-redesign-plan.md new file mode 100644 index 0000000..5fc1266 --- /dev/null +++ b/docs/vscode-debugger-extension-redesign-plan.md @@ -0,0 +1,75 @@ +# VS Code Debugger Extension Redesign Plan + +## Positioning + +The VS Code extension should be rebuilt after the DAP layer lands. It should treat `debugger-dap` as the product boundary and avoid duplicating compiler, covenant, transaction, or state semantics in TypeScript. + +The extension PR should not be part of the DAP PR. + +## Product Goal + +Make it easy to debug real covenant flows, including KCC20, without asking users to hand-author large launch objects from memory. + +The initial extension success case should be: + +- open `kcc20.sil` +- choose a saved KCC20 fixture/run profile +- launch the DAP adapter +- stop in source-level covenant code +- inspect `prevStates`, `newStates`, constructor args, contract fields, locals, and stack scopes +- run to completion or failure with the same error report as the CLI/DAP layer + +## Design Direction + +1. Keep the adapter boring. + - Use the DAP binary directly. + - Do not embed a second debug adapter in TypeScript. + - Do not reimplement transaction/state construction in the extension. + +2. Make launch configuration file-first. + - Support opening and running JSON launch files like `debugger/fixtures/kcc20-flow/*.json`. + - Keep VS Code `launch.json` support, but do not make it the only workflow. + - Let users save named run profiles next to the contract or in a workspace debug folder. + +3. Build a covenant-aware run profile editor. + - Inspect the contract for constructor params and source covenant functions. + - Present structured `State` and `State[]` editors as JSON objects/arrays. + - Provide transaction input/output sections with covenant IDs, authorizing input, constructor args, and explicit state. + - Show generated entrypoint details only as advanced/debug information. + +4. Prefer validation over generation magic. + - Validate JSON shape before launch. + - Validate missing function, constructor arg count, active input index, and state object shape by asking the DAP/CLI validation path where possible. + - Surface errors in the VS Code UI without rewriting them. + +5. Keep KCC20 as the acceptance fixture. + - Ship or document the KCC20 flow fixtures as examples. + - Add extension tests that launch those profiles through the actual DAP binary. + +## Proposed Extension PR Slices + +1. Minimal adapter host. + - Register the SilverScript debug type. + - Resolve or build `debugger-dap`. + - Launch existing JSON configs. + +2. Run profile explorer. + - Discover `*.debug.json` or selected fixture files. + - Provide run/debug buttons for saved profiles. + - Avoid custom webview UI initially unless native VS Code tree/detail views are insufficient. + +3. Covenant profile editor. + - Add a focused editor or webview only for editing structured tx/state JSON. + - Keep it backed by the same JSON file on disk. + +4. KCC20 workflow polish. + - Add commands for the four KCC20 flow profiles. + - Make failures navigable to source locations reported by DAP. + +## Non-Goals + +- No TypeScript implementation of covenant state materialization. +- No extension-specific transaction semantics. +- No bundled fork of the DAP protocol. +- No custom UI before the JSON profile workflow is stable. +