-
Notifications
You must be signed in to change notification settings - Fork 11.4k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
[Package management] add implicit system package dependencies (dvx-197) #21204
base: mdgeorge/build-config-refactors
Are you sure you want to change the base?
Changes from all commits
5d9ef0a
b2edeb5
5fb5898
af8f01b
a247208
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -34,15 +34,21 @@ use move_package::{ | |
}, | ||
package_hooks::{PackageHooks, PackageIdentifier}, | ||
resolution::{dependency_graph::DependencyGraph, resolution_graph::ResolvedGraph}, | ||
source_package::parsed_manifest::{Dependencies, PackageName}, | ||
BuildConfig as MoveBuildConfig, LintFlag, | ||
source_package::parsed_manifest::{ | ||
Dependencies, Dependency, DependencyKind, GitInfo, InternalDependency, PackageName, | ||
}, | ||
BuildConfig as MoveBuildConfig, | ||
}; | ||
use move_package::{ | ||
source_package::parsed_manifest::OnChainInfo, source_package::parsed_manifest::SourceManifest, | ||
}; | ||
use move_symbol_pool::Symbol; | ||
use serde_reflection::Registry; | ||
use sui_package_management::{resolve_published_id, PublishedAtError}; | ||
use sui_package_management::{ | ||
resolve_published_id, | ||
system_package_versions::{SystemPackagesVersion, SYSTEM_GIT_REPO}, | ||
PublishedAtError, | ||
}; | ||
use sui_protocol_config::{Chain, ProtocolConfig, ProtocolVersion}; | ||
use sui_types::{ | ||
base_types::ObjectID, | ||
|
@@ -110,18 +116,17 @@ pub struct BuildConfig { | |
impl BuildConfig { | ||
pub fn new_for_testing() -> Self { | ||
move_package::package_hooks::register_package_hooks(Box::new(SuiPackageHooks)); | ||
|
||
// Note: in the future, consider changing this to dependencies on the local system | ||
// packages: | ||
let implicit_dependencies = Dependencies::new(); | ||
let install_dir = tempfile::tempdir().unwrap().into_path(); | ||
|
||
let config = MoveBuildConfig { | ||
default_flavor: Some(move_compiler::editions::Flavor::Sui), | ||
implicit_dependencies, | ||
|
||
lock_file: Some(install_dir.join("Move.lock")), | ||
install_dir: Some(install_dir), | ||
lint_flag: LintFlag::LEVEL_NONE, | ||
silence_warnings: true, | ||
lint_flag: move_package::LintFlag::LEVEL_NONE, | ||
// TODO: in the future this should probably be changed to a set of local deps: | ||
implicit_dependencies: Dependencies::new(), | ||
..MoveBuildConfig::default() | ||
}; | ||
BuildConfig { | ||
|
@@ -276,9 +281,9 @@ pub fn build_from_resolution_graph( | |
|
||
// compile! | ||
let result = if print_diags_to_stderr { | ||
BuildConfig::compile_package(resolution_graph, &mut std::io::stderr()) | ||
BuildConfig::compile_package(&resolution_graph, &mut std::io::stderr()) | ||
} else { | ||
BuildConfig::compile_package(resolution_graph, &mut std::io::sink()) | ||
BuildConfig::compile_package(&resolution_graph, &mut std::io::sink()) | ||
}; | ||
|
||
let (package, fn_info) = result.map_err(|error| SuiError::ModuleBuildFailure { | ||
|
@@ -295,6 +300,7 @@ pub fn build_from_resolution_graph( | |
published_at, | ||
dependency_ids, | ||
bytecode_deps, | ||
dependency_graph: resolution_graph.graph, | ||
}) | ||
} | ||
|
||
|
@@ -638,15 +644,12 @@ impl CompiledPackage { | |
self.dependency_ids.published.values().cloned().collect() | ||
} | ||
|
||
/// Tree-shake the package's dependencies to remove any that are not referenced in source code. | ||
/// | ||
/// This algorithm uses the set of root modules as the starting point to retrieve the | ||
/// list of used packages that are immediate dependencies of these modules. Essentially, it | ||
/// will remove any package that has no immediate module dependency to it. | ||
/// | ||
/// Then, it will recursively find all the transitive dependencies of the packages in the list | ||
/// above and add them to the list of packages that need to be kept as dependencies. | ||
pub fn tree_shake(&mut self, with_unpublished_deps: bool) -> Result<(), anyhow::Error> { | ||
/// Find the map of packages that are immediate dependencies of the root modules, joined with | ||
/// the set of bytecode dependencies. | ||
pub fn find_immediate_deps_pkgs_to_keep( | ||
&self, | ||
with_unpublished_deps: bool, | ||
) -> Result<BTreeMap<Symbol, ObjectID>, anyhow::Error> { | ||
// Start from the root modules (or all modules if with_unpublished_deps is true as we | ||
// need to include modules with 0x0 address) | ||
let root_modules: Vec<_> = if with_unpublished_deps { | ||
|
@@ -663,15 +666,14 @@ impl CompiledPackage { | |
}; | ||
|
||
// Find the immediate dependencies for each root module and store the package name | ||
// in the used_immediate_packages set. This basically prunes the packages that are not used | ||
// in the pkgs_to_keep set. This basically prunes the packages that are not used | ||
// based on the modules information. | ||
let mut pkgs_to_keep: BTreeSet<Symbol> = BTreeSet::new(); | ||
let module_to_pkg_name: BTreeMap<_, _> = self | ||
.package | ||
.all_modules() | ||
.map(|m| (m.unit.module.self_id(), m.unit.package_name)) | ||
.collect(); | ||
let mut used_immediate_packages: BTreeSet<Symbol> = BTreeSet::new(); | ||
|
||
for module in &root_modules { | ||
let immediate_deps = module.module.immediate_dependencies(); | ||
|
@@ -680,34 +682,51 @@ impl CompiledPackage { | |
let Some(pkg_name) = pkg_name else { | ||
bail!("Expected a package name but it's None") | ||
}; | ||
used_immediate_packages.insert(*pkg_name); | ||
pkgs_to_keep.insert(*pkg_name); | ||
} | ||
} | ||
} | ||
|
||
// Next, for each package from used_immediate_packages set, we need to find all the | ||
// transitive dependencies. Those trans dependencies need to be included in the final list | ||
// of package dependencies (note that the pkg itself will be added by the transitive deps | ||
// function) | ||
used_immediate_packages.iter().for_each(|pkg| { | ||
self.dependency_graph | ||
.add_transitive_dependencies(pkg, &mut pkgs_to_keep) | ||
}); | ||
|
||
// If a package depends on another published package that has only bytecode without source | ||
// code available, we need to include also that package as dep. | ||
pkgs_to_keep.extend(self.bytecode_deps.iter().map(|(name, _)| *name)); | ||
|
||
// Finally, filter out packages that are not in the union above from the | ||
// dependency_ids.published field and return the package ids. | ||
self.dependency_ids | ||
// Finally, filter out packages that are published and exist in the manifest at the | ||
// compilation time but are not referenced in the source code. | ||
Ok(self | ||
.dependency_ids | ||
.clone() | ||
.published | ||
.retain(|pkg_name, _| pkgs_to_keep.contains(pkg_name)); | ||
|
||
Ok(()) | ||
.into_iter() | ||
.filter(|(pkg_name, _)| pkgs_to_keep.contains(pkg_name)) | ||
.collect()) | ||
} | ||
} | ||
|
||
/// Create a set of [Dependencies] from a [SystemPackagesVersion]; the dependencies are override git | ||
/// dependencies to the specific revision given by the [SystemPackagesVersion] | ||
pub fn implicit_deps(packages: &SystemPackagesVersion) -> Dependencies { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Does this include other system packages? I would have a preference not to include the deepbook package here (and possibly even the bridge package) as that seems maybe a bit too much. "it's a system package, deal with it" will make me sad, but is an acceptable answer here too... There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. But why would we leave those out? Or the better question, why do we want to treat some packages in one way and other packages in another way? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. @amnn and I discussed this some (and maybe it was at the tooling meeting?) - the thing that differentiates these packages is that they get upgraded automatically on-chain, so that you /can't/ actually use older versions even if you want to. Arguably, the problem is that we made There is admittedly a bit of overhead for compiling them, but most of this overhead comes from downloading the source, and we're already downloading the source since they're colocated with other system deps. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Yea, it just pains me to include that overhead just to prune it away later, but I get the reason for keeping them. Maybe the real answer here is to allow the compiler to lazily compile dependencies.... There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
That sounds like a cool thing, but I guess it would take a bit of effort. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Yea, I think it's something I would chuck on the "someday, but not now" pile. In the meantime though it's fine :) |
||
packages | ||
.packages | ||
.iter() | ||
.map(|package| { | ||
( | ||
package.package_name.clone().into(), | ||
Dependency::Internal(InternalDependency { | ||
kind: DependencyKind::Git(GitInfo { | ||
git_url: SYSTEM_GIT_REPO.into(), | ||
git_rev: packages.git_revision.clone().into(), | ||
subdir: package.repo_path.clone().into(), | ||
}), | ||
subst: None, | ||
digest: None, | ||
dep_override: true, | ||
}), | ||
) | ||
}) | ||
.collect() | ||
} | ||
|
||
impl GetModule for CompiledPackage { | ||
type Error = anyhow::Error; | ||
// TODO: return ref here for better efficiency? Borrow checker + all_modules_map() make it hard to do this | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -5,9 +5,9 @@ use crate::manage_package::resolve_lock_file_path; | |
use clap::Parser; | ||
use move_cli::base; | ||
use move_package::BuildConfig as MoveBuildConfig; | ||
use serde_json::json; | ||
use std::{fs, path::Path}; | ||
use sui_move_build::{check_invalid_dependencies, check_unpublished_dependencies, BuildConfig}; | ||
use sui_move_build::{implicit_deps, BuildConfig}; | ||
use sui_package_management::system_package_versions::latest_system_packages; | ||
|
||
const LAYOUTS_DIR: &str = "layouts"; | ||
const STRUCT_LAYOUTS_FILENAME: &str = "struct_layouts.yaml"; | ||
|
@@ -52,44 +52,25 @@ impl Build { | |
Self::execute_internal( | ||
&rerooted_path, | ||
build_config, | ||
self.with_unpublished_dependencies, | ||
self.dump_bytecode_as_base64, | ||
self.generate_struct_layouts, | ||
self.chain_id.clone(), | ||
) | ||
} | ||
|
||
pub fn execute_internal( | ||
rerooted_path: &Path, | ||
config: MoveBuildConfig, | ||
with_unpublished_deps: bool, | ||
dump_bytecode_as_base64: bool, | ||
mut config: MoveBuildConfig, | ||
generate_struct_layouts: bool, | ||
chain_id: Option<String>, | ||
) -> anyhow::Result<()> { | ||
let mut pkg = BuildConfig { | ||
config.implicit_dependencies = implicit_deps(latest_system_packages()); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I am not sure I follow why we call the latest system packages here. Is it because the There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Yes, exactly There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Perhaps I'm not following the logic here fully (or maybe this is handled elsewhere?), but I'm somewhat worried about this and the behavior it introduces. My understanding of what this is doing (and please correct me if I'm wrong) is that this is setting the implicit dependencies to be the system packages at the time the Sui CLI (/binary) is built. However this then fixes the system packages that will be used implicitly to be frozen at build time of the CLI binary, so if someone is using an old(er) Sui CLI to test or publish they will be getting possibly stale dependencies unless they specify the dep explicitly or upgrade their CLI? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Just a note, that if indeed the CLI is 2 or more protocol versions behind, it will warn and suggest to upgrade. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. There's two cases in different places. For Because of backwards compatibility, I think the worst that will happen if the framework changes is you won't be able to use features that were introduced after your CLI was built - otherwise it should work fine (unless you specifically request source verification) There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Any thoughts @tzakian @mdgeorge4153 on having a flag that allows you to overwrite system packages, such that theoretically it would work as today: download the latest version from branch X? It feels a bit like an anti pattern, but possibly useful in some very narrow situations (e.g., CLI is too old, though the best solution is to update rather than force it..) There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Apologies for being such a stickler here and asking so many question, but I just want to make sure we get this part right, otherwise folks might either keep using explicit dependencies or get frustrated (or both...). And, causing ourselves lots of pain in the process. As far as local testing/running with the latest version of the framework at the time of compiling the CLI -- I've been convinced that that is totally fine (and prompting for/requiring an upgrade of the CLI, while slightly annoying possibly is both acceptable and probably the right thing to do)[1]. ✅ Now, for
This part seems a bit wonky to me IMO as this means that e.g., if I'm using testnet basically every 2-4 weeks I will be running an outdated framework when publishing unless I'm aggressively updating my Sui CLI I believe(?). We have tagged git branches for these networks that have the source code we need, so I'm wondering how hard it would be to try and go from "network == testnet => use branch
I would probably be against this at least for now, or at least it feels like a slippery slope to me in terms of what other command-line overrides of the [1] It's worth examining what this means for how quickly we roll out a new CLI when we rollout a testnet release and the dependency it adds there in terms of our ability to change/decouple these cycles in the future if we want to. This change commits us more heavily to a "CLI release per tesnet release" if we want folks to be able to use implicit dependencies in a smooth fashion I believe. This is probably fine, but just something worth considering/making sure we're OK with going forward as today they are somewhat coupled, but not as tightly coupled I believe. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
No apologies, this is really important and I'm glad you're asking.
You will be using an outdated version, but you will only notice this if you're using a feature that was added in the latest version[2]. In that case, your local builds are going to fail anyway so you probably will want to have upgraded anyway. I think it would be possible to switch to the branches for special cases, but I'm not sure how much it buys (again modulo source verification).
I would also be against this. If you want to override system packages, you do it in your [2] This would be good to test and I'm not entirely sure how to do it There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Note that I think these should go away once we roll out implicit deps, because they rely on us manually waiting for protocol upgrade and then bumping the version (so it's just a worse version of implicit dependencies).
To me it doesn't seem like the worst thing to support being able to override where the package resolver looks for system packages to some local repo -- that would be quite helpful for writing one off tests etc (as we often do). It is akin to setting system package dependencies explicitly to Back to the central question though:
Because of the relationship between system packages and native functions, I think this is actually desirable. With how things work today, people could wake up one morning to find that their build is broken for some incomprehensible reason, because the protocol bumped overnight, and the new framework uses a native function their CLI doesn't have a definition for (which their package doesn't even use). They would need to figure that out, and then upgrade their CLI to clear the error. With the new system, that can't happen -- the CLI will only run against frameworks it is aware of and therefore has all the necessary bindings for. The builder only needs to upgrade if they want to use a feature they don't have access to, when they want to start using that feature. |
||
let pkg = BuildConfig { | ||
config, | ||
run_bytecode_verifier: true, | ||
print_diags_to_stderr: true, | ||
chain_id, | ||
} | ||
.build(rerooted_path)?; | ||
if dump_bytecode_as_base64 { | ||
check_invalid_dependencies(&pkg.dependency_ids.invalid)?; | ||
if !with_unpublished_deps { | ||
check_unpublished_dependencies(&pkg.dependency_ids.unpublished)?; | ||
} | ||
|
||
pkg.tree_shake(with_unpublished_deps)?; | ||
println!( | ||
"{}", | ||
json!({ | ||
"modules": pkg.get_package_base64(with_unpublished_deps), | ||
"dependencies": pkg.get_dependency_storage_package_ids(), | ||
"digest": pkg.get_package_digest(with_unpublished_deps), | ||
}) | ||
) | ||
} | ||
|
||
if generate_struct_layouts { | ||
let layout_str = serde_yaml::to_string(&pkg.generate_struct_layouts()).unwrap(); | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Not sure I understand this TODO comment.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
In future, we can make it so that all tests implicitly depend on the system packages that are part of this repo (at the moment, all tests explicitly state their system package dependencies, so it's moot).