From 5fd4d1f96061877957183a935e9ae644bb1f632b Mon Sep 17 00:00:00 2001 From: libr Date: Thu, 19 Feb 2026 22:09:13 +0800 Subject: [PATCH 1/2] gas update --- crates/movy-replay/src/exec.rs | 4 ++-- crates/movy/src/sui/deploy.rs | 2 +- crates/movy/src/sui/fuzz.rs | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/movy-replay/src/exec.rs b/crates/movy-replay/src/exec.rs index 4c00390..ab9313d 100644 --- a/crates/movy-replay/src/exec.rs +++ b/crates/movy-replay/src/exec.rs @@ -256,7 +256,7 @@ where ) -> Result, MovyError> { let gas = self.db.get_move_object_info(gas.into())?.sui_reference(); let tx_kind = TransactionKind::ProgrammableTransaction(ptb.clone()); - let tx_data = TransactionData::new(tx_kind, sender, gas, 1_000_000_000, 1); + let tx_data = TransactionData::new(tx_kind, sender, gas, 100_000_000_000_000, 1); self.run_tx_trace(tx_data, epoch, epoch_ms, tracer) } @@ -274,7 +274,7 @@ where TypeTag::from_str("0x2::sui::SUI").unwrap().into(), MoveOwner::AddressOwner(sender.into()), gas_id.into(), - 10_000_000_000, + 100_000_000_000_000, )?; let gas_ref = self .db diff --git a/crates/movy/src/sui/deploy.rs b/crates/movy/src/sui/deploy.rs index 9d386ae..fd6be8c 100644 --- a/crates/movy/src/sui/deploy.rs +++ b/crates/movy/src/sui/deploy.rs @@ -50,7 +50,7 @@ impl SuiBuildDeployArgs { MoveTypeTag::from_str("0x2::sui::SUI").unwrap(), MoveOwner::AddressOwner(self.roles.deployer), gas_id.into(), - 100_000_000_000, + 100_000_000_000_0000, )?; let testing_env = SuiTestingEnv::new(env.wrapped()); testing_env.mock_testing_std()?; diff --git a/crates/movy/src/sui/fuzz.rs b/crates/movy/src/sui/fuzz.rs index 17b74e9..2b1c5b1 100644 --- a/crates/movy/src/sui/fuzz.rs +++ b/crates/movy/src/sui/fuzz.rs @@ -195,7 +195,7 @@ impl SuiFuzzArgs { MoveTypeTag::from_str("0x2::sui::SUI").unwrap(), MoveOwner::AddressOwner(self.roles.deployer), gas_id.into(), - 100_000_000_000, + 100_000_000_000_0000, )?; let testing_env = SuiTestingEnv::new(env.wrapped()); testing_env.mock_testing_std()?; From e9533e8d1f0e4831b9c4429ee0956fde7201cfae Mon Sep 17 00:00:00 2001 From: libr Date: Fri, 20 Feb 2026 17:28:40 +0800 Subject: [PATCH 2/2] feat: upgrade --- .gitignore | 3 +- crates/movy-replay/src/env.rs | 479 ++++++++++++++++++++++++++++++++- crates/movy-sui/src/compile.rs | 170 +++++++++++- 3 files changed, 638 insertions(+), 14 deletions(-) diff --git a/.gitignore b/.gitignore index fbf4ec1..14c71f9 100644 --- a/.gitignore +++ b/.gitignore @@ -5,4 +5,5 @@ build .vscode log *.log -flamegraph.svg \ No newline at end of file +flamegraph.svg +*.bcs \ No newline at end of file diff --git a/crates/movy-replay/src/env.rs b/crates/movy-replay/src/env.rs index 12f1a72..e80ac6c 100644 --- a/crates/movy-replay/src/env.rs +++ b/crates/movy-replay/src/env.rs @@ -8,7 +8,7 @@ use color_eyre::eyre::eyre; use itertools::Itertools; use move_core_types::account_address::AccountAddress; use movy_sui::{ - compile::SuiCompiledPackage, + compile::{SuiCompiledPackage, mock_module_address}, database::{cache::ObjectSuiStoreCommit, graphql::GraphQlDatabase}, rpc::graphql::{GraphQlClient, OwnerKind}, }; @@ -22,22 +22,50 @@ use sui_types::{ base_types::{ObjectID, SequenceNumber}, digests::TransactionDigest, effects::TransactionEffectsAPI, - move_package::MovePackage, + move_package::{MovePackage, UpgradeCap}, object::{Data, Object}, programmable_transaction_builder::ProgrammableTransactionBuilder, - storage::{BackingPackageStore, BackingStore, ObjectStore}, + storage::{BackingPackageStore, BackingStore, ObjectStore, WriteKind}, + transaction::{ + Argument, Command, ObjectArg, TransactionData, TransactionDataAPI, TransactionKind, + }, }; use crate::{ db::{ObjectStoreCachedStore, ObjectStoreInfo}, exec::SuiExecutor, - tracer::tree::TreeTracer, + tracer::{NopTracer, tree::TreeTracer}, }; pub struct SuiTestingEnv { db: T, } +#[derive(Debug, Clone)] +struct OnchainUpgradeStep { + chain_from: ObjectID, + chain_to: ObjectID, + source_tx: TransactionDigest, + source_checkpoint: u64, + modules: Vec>, + dependencies: Vec, + use_local_modules: bool, +} + +fn extract_upgrade_command( + tx_data: &TransactionData, +) -> Option<(Vec>, Vec, ObjectID)> { + let TransactionKind::ProgrammableTransaction(ptb) = tx_data.kind() else { + return None; + }; + for command in &ptb.commands { + if let Command::Upgrade(modules, deps, package, _ticket) = command { + return Some((modules.clone(), deps.clone(), *package)); + } + } + None +} + impl SuiTestingEnv { pub fn inner(&self) -> &T { &self.db @@ -190,6 +218,27 @@ impl< let package_names = compiled_result.package_names.clone(); let compiled_result = compiled_result.movy_mock()?; + let target_package_id = compiled_result.package_id; + let mut local_package_addresses = abi_result + .all_modules_iter() + .map(|m| ObjectID::from(*m.address())) + .collect::>(); + let original_package_id = local_package_addresses + .first() + .copied() + .ok_or_else(|| eyre!("{} has no root modules after compilation", path.display()))?; + let upgrade_mode = + target_package_id != ObjectID::ZERO && original_package_id != target_package_id; + if upgrade_mode { + tracing::info!( + "Detected package upgrade mode for {}: original-id={}, published-at={}", + path.display(), + original_package_id, + target_package_id + ); + local_package_addresses.remove(&original_package_id); + } + local_package_addresses.insert(target_package_id); // Deploy onchain deps or deps used by immediate dependencies let mut packages_to_deploy = abi_result @@ -202,12 +251,16 @@ impl< .map(|im| (*im.address()).into()) })) .collect::>(); + if upgrade_mode { + packages_to_deploy.insert(original_package_id); + } while let Some(dep) = packages_to_deploy.pop_last() { let dep = AccountAddress::from(dep); + let dep_obj: ObjectID = dep.into(); if dep != AccountAddress::ZERO - && dep != compiled_result.package_id.into() - && self.db.get_object(&dep.into()).is_none() + && !local_package_addresses.contains(&dep_obj) + && self.db.get_object(&dep_obj).is_none() { tracing::info!( "Dependency {} not found in our db for {}, trying to fetch it from onchain", @@ -230,8 +283,418 @@ impl< } let mut executor = SuiExecutor::new(self.db.clone())?; - let address = - executor.deploy_contract(epoch, epoch_ms, deployer.into(), gas, compiled_result)?; + let address = if upgrade_mode { + let (mut local_modules, local_dependencies) = compiled_result.into_deployment(); + let local_module_count_before_filter = local_modules.len(); + local_modules.retain(|module| ObjectID::from(*module.address()) == original_package_id); + if local_modules.is_empty() { + return Err(eyre!( + "no local modules remain for original-id {} after filtering upgrade payload for {}", + original_package_id, + path.display() + ) + .into()); + } + if local_modules.len() != local_module_count_before_filter { + tracing::warn!( + "Filtered local upgrade payload modules for {}: kept {} modules at original-id {}, dropped {} non-root modules", + path.display(), + local_modules.len(), + original_package_id, + local_module_count_before_filter - local_modules.len() + ); + } + for module in local_modules.iter_mut() { + mock_module_address(ObjectID::ZERO, module); + } + let mut local_module_bytes = Vec::with_capacity(local_modules.len()); + for module in local_modules { + let mut buf = vec![]; + module.serialize_with_version(module.version, &mut buf)?; + local_module_bytes.push(buf); + } + + let mut replay_steps: Vec = vec![]; + let mut chain_start_package_id = original_package_id; + if rpc.get_object(target_package_id.into()).await?.is_some() { + let mut replay_steps_rev: Vec = vec![]; + let mut walk_to = target_package_id; + let mut walk_done = false; + for hop in 0..64usize { + if walk_to == original_package_id { + chain_start_package_id = original_package_id; + walk_done = true; + break; + } + let current_pkg_object = rpc + .get_object(walk_to.into()) + .await? + .ok_or_else(|| eyre!("onchain package {} not found", walk_to))?; + let walk_tx_digest = current_pkg_object.previous_transaction; + let walk_tx = rpc + .transaction(&walk_tx_digest.to_string()) + .await? + .ok_or_else(|| eyre!("tx {} not found", walk_tx_digest))?; + if let Some((step_modules, step_dependencies, package_arg)) = + extract_upgrade_command(&walk_tx.tx) + { + if package_arg == walk_to { + return Err(eyre!( + "invalid onchain upgrade chain at tx {}: from == to == {}", + walk_tx_digest, + walk_to + ) + .into()); + } + replay_steps_rev.push(OnchainUpgradeStep { + chain_from: package_arg, + chain_to: walk_to, + source_tx: walk_tx_digest, + source_checkpoint: walk_tx.checkpoint, + modules: step_modules, + dependencies: step_dependencies, + use_local_modules: false, + }); + walk_to = package_arg; + if hop == 63 && walk_to != original_package_id { + return Err(eyre!( + "upgrade chain from {} to {} exceeds 64 hops", + original_package_id, + target_package_id + ) + .into()); + } + } else { + tracing::info!( + "Stop walking upgrade chain at tx {} for package {} (no Command::Upgrade); treat {} as chain start", + walk_tx_digest, + walk_to, + walk_to + ); + chain_start_package_id = walk_to; + walk_done = true; + break; + } + } + if !walk_done && walk_to == original_package_id { + chain_start_package_id = original_package_id; + walk_done = true; + } + if !walk_done { + return Err(eyre!( + "upgrade chain walk for {} did not terminate cleanly (last package {})", + target_package_id, + walk_to + ) + .into()); + } + if chain_start_package_id != original_package_id { + tracing::warn!( + "Upgrade storage chain start {} differs from local original-id {} for {}", + chain_start_package_id, + original_package_id, + path.display() + ); + } + replay_steps_rev.reverse(); + replay_steps = replay_steps_rev; + } else { + tracing::warn!( + "published-at {} not found on chain, fallback to a single local upgrade step", + target_package_id + ); + } + + if replay_steps.is_empty() { + replay_steps.push(OnchainUpgradeStep { + chain_from: original_package_id, + chain_to: target_package_id, + source_tx: TransactionDigest::genesis_marker(), + source_checkpoint: 0, + modules: local_module_bytes.clone(), + dependencies: local_dependencies.clone(), + use_local_modules: true, + }); + } else { + let last = replay_steps + .last_mut() + .ok_or_else(|| eyre!("empty replay steps"))?; + last.modules = local_module_bytes.clone(); + last.dependencies = local_dependencies.clone(); + last.use_local_modules = true; + tracing::info!( + "Resolved {} upgrade replay steps from {} to {} (final step uses local modules)", + replay_steps.len(), + original_package_id, + target_package_id + ); + } + + if self.db.get_object(&chain_start_package_id).is_none() { + tracing::info!( + "Chain start package {} not in db, fetching from chain", + chain_start_package_id + ); + self.fetch_package_at_address(chain_start_package_id.into(), rpc) + .await?; + } + let chain_start_pkg = self.db.get_object(&chain_start_package_id).ok_or_else(|| { + eyre!( + "Chain start package {} is not present", + chain_start_package_id + ) + })?; + let publish_tx_digest = chain_start_pkg.previous_transaction; + tracing::info!( + "Resolving upgrade cap from chain start package {} publish tx {}", + chain_start_package_id, + publish_tx_digest + ); + let publish_tx = rpc + .transaction(&publish_tx_digest.to_string()) + .await? + .ok_or_else(|| eyre!("tx {} not found", publish_tx_digest))?; + + let mut resolved_cap: Option = None; + for (obj_ref, _owner, _kind) in publish_tx.effects.all_changed_objects() { + let Some(object) = rpc + .get_object_at_checkpoint(obj_ref.0, publish_tx.checkpoint) + .await? + else { + continue; + }; + if !object.type_().is_some_and(|ty| ty.is_upgrade_cap()) { + continue; + } + let Some(move_obj) = object.data.try_as_move() else { + continue; + }; + let cap: UpgradeCap = bcs::from_bytes(move_obj.contents())?; + if cap.package.bytes == chain_start_package_id { + resolved_cap = Some(object); + break; + } + } + + let mut upgrade_cap_object = resolved_cap.ok_or_else(|| { + eyre!( + "No UpgradeCap found for chain start package {} in publish tx {}", + chain_start_package_id, + publish_tx_digest + ) + })?; + tracing::info!( + "Selected UpgradeCap {} for chain start package {}", + upgrade_cap_object.id(), + chain_start_package_id + ); + if upgrade_cap_object.get_single_owner() != Some(deployer.into()) { + tracing::info!( + "Hooking UpgradeCap {} owner from {:?} to {}", + upgrade_cap_object.id(), + upgrade_cap_object.owner(), + deployer + ); + upgrade_cap_object.transfer(deployer.into()); + self.db.commit_single_object(upgrade_cap_object.clone())?; + } + let cap_id = upgrade_cap_object.id(); + + let mut effective_package_id = original_package_id; + for (step_idx, step) in replay_steps.iter().enumerate() { + let step_num = step_idx + 1; + let mut current_cap_object = self.db.get_object(&cap_id).ok_or_else(|| { + eyre!( + "UpgradeCap {} missing before replay step {}/{}", + cap_id, + step_num, + replay_steps.len() + ) + })?; + if current_cap_object.get_single_owner() != Some(deployer.into()) { + tracing::info!( + "Hooking UpgradeCap {} owner from {:?} to {} before step {}/{}", + current_cap_object.id(), + current_cap_object.owner(), + deployer, + step_num, + replay_steps.len() + ); + current_cap_object.transfer(deployer.into()); + self.db.commit_single_object(current_cap_object.clone())?; + } + let current_cap_move = current_cap_object.data.try_as_move().ok_or_else(|| { + eyre!( + "UpgradeCap object {} is not a Move object", + current_cap_object.id() + ) + })?; + let current_cap: UpgradeCap = bcs::from_bytes(current_cap_move.contents())?; + tracing::info!( + "UpgradeCap snapshot before step {}/{}: cap_id={}, cap.package={}, cap.policy={}, cap.owner={:?}", + step_num, + replay_steps.len(), + current_cap_object.id(), + current_cap.package.bytes, + current_cap.policy, + current_cap_object.owner() + ); + if current_cap.package.bytes != step.chain_from { + return Err(eyre!( + "upgrade replay chain mismatch at step {}/{}: cap.package={} but expected chain_from={}", + step_num, + replay_steps.len(), + current_cap.package.bytes, + step.chain_from + ) + .into()); + } + + if self.db.get_object(¤t_cap.package.bytes).is_none() { + tracing::info!( + "Current package {} not in db, fetching from chain before step {}/{}", + current_cap.package.bytes, + step_num, + replay_steps.len() + ); + self.fetch_package_at_address(current_cap.package.bytes.into(), rpc) + .await?; + } + + let module_bytes = step.modules.clone(); + let dependencies = step.dependencies.clone(); + for dep in &dependencies { + if *dep != ObjectID::ZERO && self.db.get_object(dep).is_none() { + tracing::info!( + "Replay step {}/{} missing dependency {}, fetching from chain", + step_num, + replay_steps.len(), + dep + ); + self.fetch_package_at_address((*dep).into(), rpc).await?; + } + } + + let package_digest = MovePackage::compute_digest_for_modules_and_deps( + &module_bytes, + &dependencies, + true, + ); + let mut upgrade_digest = package_digest.to_vec(); + // Custom runtime extension: append desired storage package ID for upgrade target. + upgrade_digest.extend_from_slice(&step.chain_to.to_vec()); + + let mut builder = ProgrammableTransactionBuilder::new(); + let cap_input = builder.obj(ObjectArg::ImmOrOwnedObject( + current_cap_object.compute_object_reference(), + ))?; + let policy_arg = builder.pure(current_cap.policy)?; + let digest_arg = builder.pure(upgrade_digest)?; + let upgrade_ticket = builder.programmable_move_call( + MoveAddress::two().into(), + Identifier::from_str("package").unwrap(), + Identifier::from_str("authorize_upgrade").unwrap(), + vec![], + vec![cap_input, policy_arg, digest_arg], + ); + let upgrade_receipt = builder.command(Command::Upgrade( + module_bytes, + dependencies, + current_cap.package.bytes, + upgrade_ticket, + )); + builder.programmable_move_call( + MoveAddress::two().into(), + Identifier::from_str("package").unwrap(), + Identifier::from_str("commit_upgrade").unwrap(), + vec![], + vec![Argument::Input(0), upgrade_receipt], + ); + + if step.use_local_modules { + tracing::info!( + "Running final local replay step {}/{}: {} -> {}", + step_num, + replay_steps.len(), + step.chain_from, + step.chain_to + ); + } else { + tracing::info!( + "Running onchain replay step {}/{} (tx {}, checkpoint {}): {} -> {}", + step_num, + replay_steps.len(), + step.source_tx, + step.source_checkpoint, + step.chain_from, + step.chain_to + ); + } + + let out = executor.run_ptb_with_gas::( + builder.finish(), + epoch, + epoch_ms, + deployer.into(), + gas, + None, + )?; + let effects = out.results.effects; + let store = out.results.store; + if !effects.status().is_ok() { + return Err(eyre!( + "fail to replay upgrade step {}/{} ({} -> {}) with {:?}", + step_num, + replay_steps.len(), + step.chain_from, + step.chain_to, + effects.status() + ) + .into()); + } + + let mut upgraded_package = None; + for t in effects.all_changed_objects() { + if matches!(&t.2, WriteKind::Create) + && let Some(object) = store.written.get(&t.0.0) + && object.is_package() + { + upgraded_package = Some(t.0.0); + } + } + let upgraded_package_id = upgraded_package.ok_or_else(|| { + eyre!( + "replay step {}/{} succeeds but no package object created", + step_num, + replay_steps.len() + ) + })?; + if upgraded_package_id != step.chain_to { + return Err(eyre!( + "replay step {}/{} storage id mismatch: expected {}, got {}", + step_num, + replay_steps.len(), + step.chain_to, + upgraded_package_id + ) + .into()); + } + self.db.commit_store(store, &effects)?; + effective_package_id = upgraded_package_id; + } + + if effective_package_id != target_package_id { + return Err(eyre!( + "upgrade replay ends at {}, expected published-at {}", + effective_package_id, + target_package_id + ) + .into()); + } + effective_package_id + } else { + executor.deploy_contract(epoch, epoch_ms, deployer.into(), gas, compiled_result)? + }; // In search of any deploy functions let mut abi = self.db.get_package_info(address.into())?.unwrap(); diff --git a/crates/movy-sui/src/compile.rs b/crates/movy-sui/src/compile.rs index ce04a6c..84d08f2 100644 --- a/crates/movy-sui/src/compile.rs +++ b/crates/movy-sui/src/compile.rs @@ -1,6 +1,12 @@ -use std::{collections::BTreeSet, fmt::Display, io::Write, path::Path}; +use std::{ + collections::BTreeSet, + fmt::Display, + io::Write, + path::{Path, PathBuf}, +}; use color_eyre::eyre::eyre; +use fastcrypto::hash::HashFunction; use itertools::Itertools; use move_binary_format::CompiledModule; use move_compiler::editions::Flavor; @@ -12,8 +18,153 @@ use movy_types::{ }; use serde::{Deserialize, Serialize}; use sui_move_build::{BuildConfig, CompiledPackage}; -use sui_types::base_types::ObjectID; -use tracing::{debug, trace}; +use sui_types::{base_types::ObjectID, crypto::DefaultHash}; +use tracing::{debug, trace, warn}; + +fn shared_move_install_dir() -> PathBuf { + if let Some(path) = std::env::var_os("MOVY_MOVE_INSTALL_DIR") { + return PathBuf::from(path); + } + std::env::temp_dir().join("movy-move-shared-build") +} + +fn compiled_package_cache_root(folder: &Path) -> PathBuf { + folder.join(".movy").join("cache") +} + +fn move_toml_cache_key(folder: &Path) -> Result { + let move_toml = folder.join("Move.toml"); + let content = std::fs::read(&move_toml) + .map_err(|e| eyre!("failed to read {}: {}", move_toml.display(), e))?; + let mut hasher = DefaultHash::default(); + hasher.update(content); + let digest = hasher.finalize().digest; + Ok(const_hex::encode(digest)) +} + +fn compiled_package_cache_file( + folder: &Path, + test_mode: bool, + with_unpublished: bool, + verify_deps: bool, +) -> Result { + let key = move_toml_cache_key(folder)?; + let mode = if test_mode { "test" } else { "non-test" }; + let unpublished = if with_unpublished { "u1" } else { "u0" }; + let verify = if verify_deps { "v1" } else { "v0" }; + Ok(compiled_package_cache_root(folder) + .join("compiled-packages") + .join(key) + .join(format!("{mode}-{unpublished}-{verify}.bcs"))) +} + +fn load_compiled_package_cache( + folder: &Path, + test_mode: bool, + with_unpublished: bool, + verify_deps: bool, +) -> Result, MovyError> { + let cache_file = compiled_package_cache_file(folder, test_mode, with_unpublished, verify_deps)?; + let bytes = match std::fs::read(&cache_file) { + Ok(bytes) => bytes, + Err(e) if e.kind() == std::io::ErrorKind::NotFound => { + tracing::info!( + "Compile disk cache miss: {} (test_mode={}, unpublished_deps={}, verify_deps={})", + folder.display(), + test_mode, + with_unpublished, + verify_deps + ); + return Ok(None); + } + Err(e) => { + warn!( + "Compile disk cache read failed for {}: {} (fallback to compile)", + cache_file.display(), + e + ); + return Ok(None); + } + }; + + match bcs::from_bytes::(&bytes) { + Ok(cached) => { + tracing::info!( + "Compile disk cache hit: {} (test_mode={}, unpublished_deps={}, verify_deps={})", + folder.display(), + test_mode, + with_unpublished, + verify_deps + ); + Ok(Some(cached)) + } + Err(e) => { + warn!( + "Compile disk cache decode failed for {}: {} (fallback to compile)", + cache_file.display(), + e + ); + if let Err(remove_err) = std::fs::remove_file(&cache_file) { + warn!( + "Failed to remove broken cache file {}: {}", + cache_file.display(), + remove_err + ); + } + Ok(None) + } + } +} + +fn store_compiled_package_cache( + folder: &Path, + test_mode: bool, + with_unpublished: bool, + verify_deps: bool, + compiled: &SuiCompiledPackage, +) { + let cache_file = + match compiled_package_cache_file(folder, test_mode, with_unpublished, verify_deps) { + Ok(path) => path, + Err(e) => { + warn!( + "Failed to build cache key for {}: {} (skip cache write)", + folder.display(), + e + ); + return; + } + }; + let Some(parent) = cache_file.parent() else { + return; + }; + if let Err(e) = std::fs::create_dir_all(parent) { + warn!( + "Failed to create compile cache dir {}: {}", + parent.display(), + e + ); + return; + } + let bytes = match bcs::to_bytes(compiled) { + Ok(bytes) => bytes, + Err(e) => { + warn!( + "Failed to encode compile cache for {}: {}", + folder.display(), + e + ); + return; + } + }; + if let Err(e) = std::fs::write(&cache_file, bytes) { + warn!( + "Failed to write compile cache {}: {}", + cache_file.display(), + e + ); + } +} pub fn build_package_resolved( folder: &Path, @@ -27,6 +178,7 @@ pub fn build_package_resolved( // Sui now tries to assign a few unique addresses for each unpublished modules (see `unique_hash` and `NamedAddress::Unpublished`) // We overrite this and request the behavior of the old version cfg.set_unpublished_deps_to_zero = true; + cfg.install_dir = Some(shared_move_install_dir()); let cfg = BuildConfig { config: cfg, @@ -179,6 +331,12 @@ impl SuiCompiledPackage { with_unpublished: bool, verify_deps: bool, ) -> Result { + if let Some(cached) = + load_compiled_package_cache(folder, test_mode, with_unpublished, verify_deps)? + { + return Ok(cached); + } + let artifacts = build_package_resolved(folder, test_mode)?; debug!("artifacts dep: {:?}", artifacts.dependency_ids); debug!("published: {:?}", artifacts.dependency_ids.published); @@ -338,14 +496,16 @@ impl SuiCompiledPackage { deps.iter().map(|t| t.to_string()).join(","), published.iter().map(|t| t.to_string()).join(",") ); - Ok(SuiCompiledPackage { + let compiled = SuiCompiledPackage { package_id: (*root_address).into(), package_name, package_names, modules, dependencies: deps.into_iter().collect(), published_dependencies: published, - }) + }; + store_compiled_package_cache(folder, test_mode, with_unpublished, verify_deps, &compiled); + Ok(compiled) } pub fn build(