diff --git a/Cargo.lock b/Cargo.lock index 4405c591382ff..40867ab733f92 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -12418,11 +12418,13 @@ dependencies = [ "move-binary-format", "move-bytecode-source-map", "move-bytecode-verifier-meter", + "move-cli", "move-command-line-common", "move-compiler", "move-core-types", "move-ir-types", "move-package", + "move-symbol-pool", "move-vm-config", "move-vm-profiler", "msim", diff --git a/crates/sui-move-build/src/lib.rs b/crates/sui-move-build/src/lib.rs index 85585dd905dc8..34b9c8256713b 100644 --- a/crates/sui-move-build/src/lib.rs +++ b/crates/sui-move-build/src/lib.rs @@ -619,15 +619,12 @@ impl CompiledPackage { self.dependency_ids.published.values().cloned().collect() } - /// Tree-shake the package's dependencies to remove any that are not referenced in source code. - /// - /// This algorithm uses the set of root modules as the starting point to retrieve the - /// list of used packages that are immediate dependencies of these modules. Essentially, it - /// will remove any package that has no immediate module dependency to it. - /// - /// Then, it will recursively find all the transitive dependencies of the packages in the list - /// above and add them to the list of packages that need to be kept as dependencies. - pub fn tree_shake(&mut self, with_unpublished_deps: bool) -> Result<(), anyhow::Error> { + /// Find the map of packages that are immediate dependencies of the root modules, joined with + /// the set of bytecode dependencies. + pub fn find_immediate_deps_pkgs_to_keep( + &self, + with_unpublished_deps: bool, + ) -> Result, anyhow::Error> { // Start from the root modules (or all modules if with_unpublished_deps is true as we // need to include modules with 0x0 address) let root_modules: Vec<_> = if with_unpublished_deps { @@ -644,7 +641,7 @@ impl CompiledPackage { }; // Find the immediate dependencies for each root module and store the package name - // in the used_immediate_packages set. This basically prunes the packages that are not used + // in the pkgs_to_keep set. This basically prunes the packages that are not used // based on the modules information. let mut pkgs_to_keep: BTreeSet = BTreeSet::new(); let module_to_pkg_name: BTreeMap<_, _> = self @@ -652,7 +649,6 @@ impl CompiledPackage { .all_modules() .map(|m| (m.unit.module.self_id(), m.unit.package_name)) .collect(); - let mut used_immediate_packages: BTreeSet = BTreeSet::new(); for module in &root_modules { let immediate_deps = module.module.immediate_dependencies(); @@ -661,31 +657,24 @@ impl CompiledPackage { let Some(pkg_name) = pkg_name else { bail!("Expected a package name but it's None") }; - used_immediate_packages.insert(*pkg_name); + pkgs_to_keep.insert(*pkg_name); } } } - // Next, for each package from used_immediate_packages set, we need to find all the - // transitive dependencies. Those trans dependencies need to be included in the final list - // of package dependencies (note that the pkg itself will be added by the transitive deps - // function) - used_immediate_packages.iter().for_each(|pkg| { - self.dependency_graph - .add_transitive_dependencies(pkg, &mut pkgs_to_keep) - }); - // If a package depends on another published package that has only bytecode without source // code available, we need to include also that package as dep. pkgs_to_keep.extend(self.bytecode_deps.iter().map(|(name, _)| *name)); - // Finally, filter out packages that are not in the union above from the - // dependency_ids.published field and return the package ids. - self.dependency_ids + // Finally, filter out packages that are published and exist in the manifest at the + // compilation time but are not referenced in the source code. + Ok(self + .dependency_ids + .clone() .published - .retain(|pkg_name, _| pkgs_to_keep.contains(pkg_name)); - - Ok(()) + .into_iter() + .filter(|(pkg_name, _)| pkgs_to_keep.contains(pkg_name)) + .collect()) } } diff --git a/crates/sui-move/src/build.rs b/crates/sui-move/src/build.rs index 9c5ded8e4d077..2f5eddc077367 100644 --- a/crates/sui-move/src/build.rs +++ b/crates/sui-move/src/build.rs @@ -5,9 +5,8 @@ use crate::manage_package::resolve_lock_file_path; use clap::Parser; use move_cli::base; use move_package::BuildConfig as MoveBuildConfig; -use serde_json::json; use std::{fs, path::Path}; -use sui_move_build::{check_invalid_dependencies, check_unpublished_dependencies, BuildConfig}; +use sui_move_build::BuildConfig; const LAYOUTS_DIR: &str = "layouts"; const STRUCT_LAYOUTS_FILENAME: &str = "struct_layouts.yaml"; @@ -52,8 +51,6 @@ impl Build { Self::execute_internal( &rerooted_path, build_config, - self.with_unpublished_dependencies, - self.dump_bytecode_as_base64, self.generate_struct_layouts, self.chain_id.clone(), ) @@ -62,34 +59,16 @@ impl Build { pub fn execute_internal( rerooted_path: &Path, config: MoveBuildConfig, - with_unpublished_deps: bool, - dump_bytecode_as_base64: bool, generate_struct_layouts: bool, chain_id: Option, ) -> anyhow::Result<()> { - let mut pkg = BuildConfig { + let pkg = BuildConfig { config, run_bytecode_verifier: true, print_diags_to_stderr: true, chain_id, } .build(rerooted_path)?; - if dump_bytecode_as_base64 { - check_invalid_dependencies(&pkg.dependency_ids.invalid)?; - if !with_unpublished_deps { - check_unpublished_dependencies(&pkg.dependency_ids.unpublished)?; - } - - pkg.tree_shake(with_unpublished_deps)?; - println!( - "{}", - json!({ - "modules": pkg.get_package_base64(with_unpublished_deps), - "dependencies": pkg.get_dependency_storage_package_ids(), - "digest": pkg.get_package_digest(with_unpublished_deps), - }) - ) - } if generate_struct_layouts { let layout_str = serde_yaml::to_string(&pkg.generate_struct_layouts()).unwrap(); diff --git a/crates/sui/Cargo.toml b/crates/sui/Cargo.toml index 37fb4a02b7c18..4215949e13e91 100644 --- a/crates/sui/Cargo.toml +++ b/crates/sui/Cargo.toml @@ -98,6 +98,8 @@ move-vm-profiler.workspace = true move-vm-config.workspace = true move-ir-types.workspace = true move-command-line-common.workspace = true +move-cli.workspace = true +move-symbol-pool.workspace = true [target.'cfg(not(target_env = "msvc"))'.dependencies] jemalloc-ctl.workspace = true diff --git a/crates/sui/src/client_commands.rs b/crates/sui/src/client_commands.rs index 4a34db4fc4b42..7a883de745618 100644 --- a/crates/sui/src/client_commands.rs +++ b/crates/sui/src/client_commands.rs @@ -10,7 +10,7 @@ use crate::{ verifier_meter::{AccumulatingMeter, Accumulator}, }; use std::{ - collections::{btree_map::Entry, BTreeMap}, + collections::{btree_map::Entry, BTreeMap, BTreeSet}, fmt::{Debug, Display, Formatter, Write}, fs, path::{Path, PathBuf}, @@ -67,12 +67,12 @@ use sui_types::{ base_types::{ObjectID, SequenceNumber, SuiAddress}, crypto::{EmptySignInfo, SignatureScheme}, digests::TransactionDigest, - error::SuiError, + error::{SuiError, SuiObjectResponseError}, gas::GasCostSummary, gas_coin::GasCoin, message_envelope::Envelope, metrics::BytecodeVerifierMetrics, - move_package::UpgradeCap, + move_package::{MovePackage, UpgradeCap}, object::Owner, parse_sui_type_tag, signature::GenericSignature, @@ -94,6 +94,7 @@ use tabled::{ }, }; +use move_symbol_pool::Symbol; use sui_types::digests::ChainIdentifier; use tracing::{debug, info}; @@ -890,12 +891,9 @@ impl SuiClientCommands { let sender = context.try_get_object_owner(&opts.gas).await?; let sender = sender.unwrap_or(context.active_address()?); let client = context.get_client().await?; - let chain_id = client.read_api().get_chain_identifier().await.ok(); - let protocol_version = client - .read_api() - .get_protocol_config(None) - .await? - .protocol_version; + let read_api = client.read_api(); + let chain_id = read_api.get_chain_identifier().await.ok(); + let protocol_version = read_api.get_protocol_config(None).await?.protocol_version; let protocol_config = ProtocolConfig::get_for_version( protocol_version, match chain_id @@ -907,7 +905,7 @@ impl SuiClientCommands { }, ); - check_protocol_version_and_warn(&client).await?; + check_protocol_version_and_warn(read_api).await?; let package_path = package_path .canonicalize() @@ -934,7 +932,7 @@ impl SuiClientCommands { check_dep_verification_flags(skip_dependency_verification, verify_deps)?; let upgrade_result = upgrade_package( - client.read_api(), + read_api, build_config.clone(), &package_path, upgrade_capability, @@ -966,7 +964,7 @@ impl SuiClientCommands { if verify_compatibility { check_compatibility( - &client, + read_api, package_id, compiled_package, package_path, @@ -1039,9 +1037,10 @@ impl SuiClientCommands { let sender = context.try_get_object_owner(&opts.gas).await?; let sender = sender.unwrap_or(context.active_address()?); let client = context.get_client().await?; - let chain_id = client.read_api().get_chain_identifier().await.ok(); + let read_api = client.read_api(); + let chain_id = read_api.get_chain_identifier().await.ok(); - check_protocol_version_and_warn(&client).await?; + check_protocol_version_and_warn(read_api).await?; let package_path = package_path .canonicalize() @@ -1063,7 +1062,7 @@ impl SuiClientCommands { check_dep_verification_flags(skip_dependency_verification, verify_deps)?; let compile_result = compile_package( - client.read_api(), + read_api, build_config.clone(), &package_path, with_unpublished_dependencies, @@ -1121,6 +1120,8 @@ impl SuiClientCommands { package_path, build_config, } => { + let client = context.get_client().await?; + let read_api = client.read_api(); let protocol_version = protocol_version.map_or(ProtocolVersion::MAX, ProtocolVersion::new); let protocol_config = @@ -1148,7 +1149,9 @@ impl SuiClientCommands { (_, package_path) => { let package_path = package_path.unwrap_or_else(|| PathBuf::from(".")); - let package = compile_package_simple(build_config, &package_path, None)?; + let package = + compile_package_simple(read_api, build_config, &package_path, None) + .await?; let name = package .package .compiled_package_info @@ -1762,7 +1765,8 @@ fn check_dep_verification_flags( } } -fn compile_package_simple( +async fn compile_package_simple( + read_api: &ReadApi, build_config: MoveBuildConfig, package_path: &Path, chain_id: Option, @@ -1776,7 +1780,7 @@ fn compile_package_simple( let resolution_graph = config.resolution_graph(package_path, chain_id.clone())?; let mut compiled_package = build_from_resolution_graph(resolution_graph, false, false, chain_id)?; - compiled_package.tree_shake(false)?; + pkg_tree_shake(read_api, false, &mut compiled_package).await?; Ok(compiled_package) } @@ -1798,7 +1802,13 @@ pub(crate) async fn upgrade_package( skip_dependency_verification, ) .await?; - compiled_package.tree_shake(with_unpublished_dependencies)?; + + pkg_tree_shake( + read_api, + with_unpublished_dependencies, + &mut compiled_package, + ) + .await?; compiled_package.published_at.as_ref().map_err(|e| match e { PublishedAtError::NotPresent => { @@ -1882,7 +1892,14 @@ pub(crate) async fn compile_package( print_diags_to_stderr, chain_id, )?; - compiled_package.tree_shake(with_unpublished_dependencies)?; + + pkg_tree_shake( + read_api, + with_unpublished_dependencies, + &mut compiled_package, + ) + .await?; + let protocol_config = read_api.get_protocol_config(None).await?; // Check that the package's Move version is compatible with the chain's @@ -3076,8 +3093,8 @@ pub(crate) async fn prerender_clever_errors( } /// Warn the user if the CLI falls behind more than 2 protocol versions. -async fn check_protocol_version_and_warn(client: &SuiClient) -> Result<(), anyhow::Error> { - let protocol_cfg = client.read_api().get_protocol_config(None).await?; +async fn check_protocol_version_and_warn(read_api: &ReadApi) -> Result<(), anyhow::Error> { + let protocol_cfg = read_api.get_protocol_config(None).await?; let on_chain_protocol_version = protocol_cfg.protocol_version.as_u64(); let cli_protocol_version = ProtocolVersion::MAX.as_u64(); if (cli_protocol_version + 2) < on_chain_protocol_version { @@ -3098,3 +3115,91 @@ async fn check_protocol_version_and_warn(client: &SuiClient) -> Result<(), anyho Ok(()) } + +/// Fetch move packages based on the provided package IDs. +async fn fetch_move_packages( + read_api: &ReadApi, + package_ids: &[ObjectID], + pkg_id_to_name: &BTreeMap<&ObjectID, &Symbol>, +) -> Result, anyhow::Error> { + let objects = read_api + .multi_get_object_with_options(package_ids.to_vec(), SuiObjectDataOptions::bcs_lossless()) + .await?; + + objects + .into_iter() + .map(|o| { + let o = o.into_object().map_err(|e| match e { + SuiObjectResponseError::NotExists { object_id } => { + anyhow!( + "Package {} with object ID {object_id} does not exist", + pkg_id_to_name.get(&object_id).unwrap() + ) + } + SuiObjectResponseError::Deleted { + object_id, + version, + digest, + } => { + anyhow!( + "Package {} with object ID {object_id} was deleted at version {version} \ + with digest {digest}", + pkg_id_to_name.get(&object_id).unwrap() + ) + } + _ => anyhow!("Cannot convert data into an object: {e}"), + })?; + + let Some(SuiRawData::Package(p)) = o.bcs else { + bail!( + "Expected package {} with object ID {} but got something else", + pkg_id_to_name.get(&o.object_id).unwrap(), + o.object_id + ); + }; + p.to_move_package(u64::MAX /* safe as this pkg comes from the network */) + .map_err(|e| anyhow!(e)) + }) + .collect() +} + +/// Filter out a package's dependencies which are not referenced in the source code. +pub(crate) async fn pkg_tree_shake( + read_api: &ReadApi, + with_unpublished_dependencies: bool, + compiled_package: &mut CompiledPackage, +) -> Result<(), anyhow::Error> { + let pkgs = compiled_package.find_immediate_deps_pkgs_to_keep(with_unpublished_dependencies)?; + let pkg_ids = pkgs.values().cloned().collect::>(); + let pkg_id_to_name = pkgs + .iter() + .map(|(name, id)| (id, name)) + .collect::>(); + + let pkg_name_to_orig_id: BTreeMap<_, _> = compiled_package + .package + .deps_compiled_units + .iter() + .map(|(pkg_name, module)| (pkg_name, ObjectID::from(module.unit.address.into_inner()))) + .collect(); + + let published_deps_packages = fetch_move_packages(read_api, &pkg_ids, &pkg_id_to_name).await?; + + let linkage_table_ids: BTreeSet<_> = published_deps_packages + .iter() + .flat_map(|pkg| pkg.linkage_table().keys()) + .collect(); + + compiled_package + .dependency_ids + .published + .retain(|pkg_name, id| { + linkage_table_ids.contains(id) + || pkgs.contains_key(pkg_name) + || pkg_name_to_orig_id + .get(pkg_name) + .is_some_and(|orig_id| pkg_ids.contains(orig_id)) + }); + + Ok(()) +} diff --git a/crates/sui/src/sui_commands.rs b/crates/sui/src/sui_commands.rs index 09a628a44d2b4..0cf861b002c84 100644 --- a/crates/sui/src/sui_commands.rs +++ b/crates/sui/src/sui_commands.rs @@ -1,7 +1,7 @@ // Copyright (c) Mysten Labs, Inc. // SPDX-License-Identifier: Apache-2.0 -use crate::client_commands::SuiClientCommands; +use crate::client_commands::{pkg_tree_shake, SuiClientCommands}; use crate::console::start_console; use crate::fire_drill::{run_fire_drill, FireDrill}; use crate::genesis_ceremony::{run, Ceremony}; @@ -43,10 +43,15 @@ use sui_graphql_rpc::{ test_infra::cluster::start_graphql_server_with_fn_rpc, }; +use serde_json::json; use sui_keys::keypair_file::read_key; use sui_keys::keystore::{AccountKeystore, FileBasedKeystore, Keystore}; +use sui_move::manage_package::resolve_lock_file_path; use sui_move::{self, execute_move_command}; -use sui_move_build::SuiPackageHooks; +use sui_move_build::{ + check_invalid_dependencies, check_unpublished_dependencies, BuildConfig as SuiBuildConfig, + SuiPackageHooks, +}; use sui_sdk::sui_client_config::{SuiClientConfig, SuiEnv}; use sui_sdk::wallet_context::WalletContext; use sui_swarm::memory::Swarm; @@ -499,31 +504,67 @@ impl SuiCommand { SuiCommand::Move { package_path, build_config, - mut cmd, + cmd, config: client_config, } => { - match &mut cmd { + match cmd { sui_move::Command::Build(build) if build.dump_bytecode_as_base64 => { - if build.ignore_chain { - build.chain_id = None; + // `sui move build` does not ordinarily require a network connection. + // The exception is when --dump-bytecode-as-base64 is specified: In this + // case, we should resolve the correct addresses for the respective chain + // (e.g., testnet, mainnet) from the Move.lock under automated address management. + // In addition, tree shaking also requires a network as it needs to fetch + // on-chain linkage table of package dependencies. + let config = + client_config.unwrap_or(sui_config_dir()?.join(SUI_CLIENT_CONFIG)); + prompt_if_no_config(&config, false).await?; + let context = WalletContext::new(&config, None, None)?; + + let Ok(client) = context.get_client().await else { + bail!("`sui move build --dump-bytecode-as-base64` requires a connection to the network. Current active network is {} but failed to connect to it.", context.config.active_env.as_ref().unwrap()); + }; + let read_api = client.read_api(); + + if let Err(e) = client.check_api_version() { + eprintln!("{}", format!("[warning] {e}").yellow().bold()); + } + + let chain_id = if build.ignore_chain { + // for tests it's useful to ignore the chain id! + None } else { - // `sui move build` does not ordinarily require a network connection. - // The exception is when --dump-bytecode-as-base64 is specified: In this - // case, we should resolve the correct addresses for the respective chain - // (e.g., testnet, mainnet) from the Move.lock under automated address management. - let config = - client_config.unwrap_or(sui_config_dir()?.join(SUI_CLIENT_CONFIG)); - prompt_if_no_config(&config, false).await?; - let context = WalletContext::new(&config, None, None)?; - if let Ok(client) = context.get_client().await { - if let Err(e) = client.check_api_version() { - eprintln!("{}", format!("[warning] {e}").yellow().bold()); - } - } - let client = context.get_client().await?; - let chain_id = client.read_api().get_chain_identifier().await.ok(); - build.chain_id = chain_id.clone(); + read_api.get_chain_identifier().await.ok() + }; + + let rerooted_path = move_cli::base::reroot_path(package_path.as_deref())?; + let build_config = + resolve_lock_file_path(build_config, Some(&rerooted_path))?; + let mut pkg = SuiBuildConfig { + config: build_config, + run_bytecode_verifier: true, + print_diags_to_stderr: true, + chain_id, + } + .build(&rerooted_path)?; + + let with_unpublished_deps = build.with_unpublished_dependencies; + + check_invalid_dependencies(&pkg.dependency_ids.invalid)?; + if !with_unpublished_deps { + check_unpublished_dependencies(&pkg.dependency_ids.unpublished)?; } + + pkg_tree_shake(read_api, with_unpublished_deps, &mut pkg).await?; + + println!( + "{}", + json!({ + "modules": pkg.get_package_base64(with_unpublished_deps), + "dependencies": pkg.get_dependency_storage_package_ids(), + "digest": pkg.get_package_digest(with_unpublished_deps), + }) + ); + return Ok(()); } _ => (), }; diff --git a/crates/sui/src/upgrade_compatibility/mod.rs b/crates/sui/src/upgrade_compatibility/mod.rs index f9da6d38137bc..a2853adc5f6b7 100644 --- a/crates/sui/src/upgrade_compatibility/mod.rs +++ b/crates/sui/src/upgrade_compatibility/mod.rs @@ -45,7 +45,7 @@ use move_package::compilation::compiled_package::CompiledUnitWithSource; use sui_json_rpc_types::{SuiObjectDataOptions, SuiRawData}; use sui_move_build::CompiledPackage; use sui_protocol_config::ProtocolConfig; -use sui_sdk::SuiClient; +use sui_sdk::apis::ReadApi; use sui_types::move_package::UpgradePolicy; use sui_types::{base_types::ObjectID, execution_config_utils::to_binary_config}; @@ -654,15 +654,14 @@ upgrade_codes!( /// Check the upgrade compatibility of a new package with an existing on-chain package. pub(crate) async fn check_compatibility( - client: &SuiClient, + read_api: &ReadApi, package_id: ObjectID, new_package: CompiledPackage, package_path: PathBuf, upgrade_policy: u8, protocol_config: ProtocolConfig, ) -> Result<(), Error> { - let existing_obj_read = client - .read_api() + let existing_obj_read = read_api .get_object_with_options(package_id, SuiObjectDataOptions::new().with_bcs()) .await .context("Unable to get existing package")?; diff --git a/crates/sui/tests/cli_tests.rs b/crates/sui/tests/cli_tests.rs index 2cea7a7507104..1266054dc45a2 100644 --- a/crates/sui/tests/cli_tests.rs +++ b/crates/sui/tests/cli_tests.rs @@ -104,7 +104,9 @@ impl TreeShakingTest { let temp_dir = tempfile::Builder::new().prefix("tree_shaking").tempdir()?; std::fs::create_dir_all(temp_dir.path()).unwrap(); let tests_dir = PathBuf::from(TEST_DATA_DIR); + let framework_pkgs = PathBuf::from("../sui-framework/packages"); copy_dir_all(tests_dir, temp_dir.path())?; + copy_dir_all(framework_pkgs, temp_dir.path().join("system-packages"))?; Ok(Self { test_cluster, @@ -1877,7 +1879,7 @@ async fn test_package_publish_nonexistent_dependency() -> Result<(), anyhow::Err let err = result.unwrap_err().to_string(); assert!( - err.contains("Dependency object does not exist or was deleted"), + err.contains("Package Nonexistent with object ID 0x0000000000000000000000000000000000000000000000000000000000abc123 does not exist"), "{}", err ); @@ -4330,7 +4332,7 @@ async fn test_tree_shaking_package_with_unused_dependency() -> Result<(), anyhow } #[sim_test] -async fn test_tree_shaking_package_with_transitive_dependencies() -> Result<(), anyhow::Error> { +async fn test_tree_shaking_package_with_transitive_dependencies1() -> Result<(), anyhow::Error> { let mut test = TreeShakingTest::new().await?; // Publish packages A and B @@ -4519,3 +4521,33 @@ async fn test_tree_shaking_package_deps_on_pkg_upgrade_1() -> Result<(), anyhow: Ok(()) } + +#[sim_test] +async fn test_tree_shaking_package_system_deps() -> Result<(), anyhow::Error> { + let mut test = TreeShakingTest::new().await?; + + // Publish package J and verify empty linkage table + let (package_j_id, _) = test.publish_package("J_system_deps", false).await?; + let move_pkg_j = fetch_move_packages(&test.client, vec![package_j_id]).await; + let linkage_table_j = move_pkg_j.first().unwrap().linkage_table(); + assert!( + linkage_table_j.is_empty(), + "Package J should have no dependencies" + ); + + // sui move build --dump-bytecode-as-base64 should also yield a json with no dependencies + let package_path = test.package_path("J_system_deps"); + let binary_path = env!("CARGO_BIN_EXE_sui"); + let cmd = std::process::Command::new(binary_path) + .arg("move") + .arg("build") + .arg("--dump-bytecode-as-base64") + .arg(package_path) + .output() + .expect("Failed to execute command"); + + let output = String::from_utf8_lossy(&cmd.stdout); + assert!(!output.contains("dependencies: []")); + + Ok(()) +} diff --git a/crates/sui/tests/data/tree_shaking/J_system_deps/.gitignore b/crates/sui/tests/data/tree_shaking/J_system_deps/.gitignore new file mode 100644 index 0000000000000..a007feab071f4 --- /dev/null +++ b/crates/sui/tests/data/tree_shaking/J_system_deps/.gitignore @@ -0,0 +1 @@ +build/* diff --git a/crates/sui/tests/data/tree_shaking/J_system_deps/Move.toml b/crates/sui/tests/data/tree_shaking/J_system_deps/Move.toml new file mode 100644 index 0000000000000..7cb2aee6c6f0c --- /dev/null +++ b/crates/sui/tests/data/tree_shaking/J_system_deps/Move.toml @@ -0,0 +1,11 @@ +[package] +name = "J_system_deps" +edition = "2024.beta" + +[dependencies] +# this is relative to the temp directory where this whole tree shaking dir is copied to +Sui = { local = "../../system-packages/sui-framework", override = true } + +[addresses] +j_system_deps = "0x0" + diff --git a/crates/sui/tests/data/tree_shaking/J_system_deps/sources/j_system_deps.move b/crates/sui/tests/data/tree_shaking/J_system_deps/sources/j_system_deps.move new file mode 100644 index 0000000000000..3d8f2c1560902 --- /dev/null +++ b/crates/sui/tests/data/tree_shaking/J_system_deps/sources/j_system_deps.move @@ -0,0 +1,10 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +module j_system_deps::j_system_deps { + public fun j_system_deps() { + let x = 1; + let y = 2; + let z = x + y; + } +}