From 8f6b4040815f1991f0e9546513a167402d1bb498 Mon Sep 17 00:00:00 2001 From: David Zbarsky Date: Mon, 25 Aug 2025 10:07:50 -0400 Subject: [PATCH 1/2] Remove workspace discovery --- crate_universe/extensions.bzl | 9 +- crate_universe/private/crates_repository.bzl | 9 +- crate_universe/private/splicing_utils.bzl | 7 + crate_universe/private/srcs.bzl | 1 - crate_universe/src/cli/splice.rs | 33 +- crate_universe/src/metadata.rs | 2 - .../src/metadata/workspace_discoverer.rs | 475 ------------------ crate_universe/src/splicing/splicer.rs | 123 +---- crate_universe/src/utils/starlark/label.rs | 139 +---- 9 files changed, 74 insertions(+), 724 deletions(-) delete mode 100644 crate_universe/src/metadata/workspace_discoverer.rs diff --git a/crate_universe/extensions.bzl b/crate_universe/extensions.bzl index 257122f7f5..860a3fab48 100644 --- a/crate_universe/extensions.bzl +++ b/crate_universe/extensions.bzl @@ -628,6 +628,9 @@ def _generate_hub_and_spokes( splicing_manifest = splicing_manifest, ) + # The workspace root when one is explicitly provided. + nonhermetic_root_bazel_workspace_dir = module_ctx.path(Label("@@//:MODULE.bazel")).dirname + # If re-pinning is enabled, gather additional inputs for the generator kwargs = dict() if repin: @@ -658,8 +661,10 @@ def _generate_hub_and_spokes( "metadata": splice_outputs.metadata, }) - # The workspace root when one is explicitly provided. - nonhermetic_root_bazel_workspace_dir = module_ctx.path(Label("@@//:MODULE.bazel")).dirname + for path_to_track in splice_outputs.extra_paths_to_track: + # We can only watch paths in our workspace. + if path_to_track.startswith(str(nonhermetic_root_bazel_workspace_dir)): + module_ctx.watch(path_to_track) paths_to_track_file = tag_path.get_child("paths_to_track.json") warnings_output_file = tag_path.get_child("warnings_output.json") diff --git a/crate_universe/private/crates_repository.bzl b/crate_universe/private/crates_repository.bzl index 63d1f8e4c1..659a4f2694 100644 --- a/crate_universe/private/crates_repository.bzl +++ b/crate_universe/private/crates_repository.bzl @@ -76,6 +76,8 @@ def _crates_repository_impl(repository_ctx): repin_instructions = repository_ctx.attr.repin_instructions, ) + nonhermetic_root_bazel_workspace_dir = repository_ctx.workspace_root + # If re-pinning is enabled, gather additional inputs for the generator kwargs = dict() if repin: @@ -92,6 +94,11 @@ def _crates_repository_impl(repository_ctx): repository_name = repository_ctx.name, ) + for path_to_track in splice_outputs.extra_paths_to_track: + # We can only watch paths in our workspace. + if path_to_track.startswith(str(nonhermetic_root_bazel_workspace_dir)): + repository_ctx.watch(path_to_track) + kwargs.update({ "metadata": splice_outputs.metadata, }) @@ -109,7 +116,7 @@ def _crates_repository_impl(repository_ctx): lockfile_path = lockfiles.bazel, cargo_lockfile_path = lockfiles.cargo, repository_dir = repository_ctx.path("."), - nonhermetic_root_bazel_workspace_dir = repository_ctx.workspace_root, + nonhermetic_root_bazel_workspace_dir = nonhermetic_root_bazel_workspace_dir, paths_to_track_file = paths_to_track_file, warnings_output_file = warnings_output_file, skip_cargo_lockfile_overwrite = repository_ctx.attr.skip_cargo_lockfile_overwrite, diff --git a/crate_universe/private/splicing_utils.bzl b/crate_universe/private/splicing_utils.bzl index 49d1acd829..c0aac189d3 100644 --- a/crate_universe/private/splicing_utils.bzl +++ b/crate_universe/private/splicing_utils.bzl @@ -188,7 +188,14 @@ def splice_workspace_manifest( if not spliced_metadata.exists: fail("Metadata file does not exist: " + str(spliced_metadata)) + extra_paths_to_track_path = repository_ctx.path(output_dir.get_child("extra_paths_to_track")) + if extra_paths_to_track_path.exists: + extra_paths_to_track = repository_ctx.read(extra_paths_to_track_path).split("\n") + else: + extra_paths_to_track = [] + return struct( metadata = spliced_metadata, cargo_lock = spliced_lockfile, + extra_paths_to_track = extra_paths_to_track, ) diff --git a/crate_universe/private/srcs.bzl b/crate_universe/private/srcs.bzl index 53f9e7b8cd..bea5cc28f9 100644 --- a/crate_universe/private/srcs.bzl +++ b/crate_universe/private/srcs.bzl @@ -28,7 +28,6 @@ CARGO_BAZEL_SRCS = [ Label("//crate_universe:src/metadata/cargo_tree_rustc_wrapper.sh"), Label("//crate_universe:src/metadata/dependency.rs"), Label("//crate_universe:src/metadata/metadata_annotation.rs"), - Label("//crate_universe:src/metadata/workspace_discoverer.rs"), Label("//crate_universe:src/rendering.rs"), Label("//crate_universe:src/rendering/template_engine.rs"), Label("//crate_universe:src/rendering/templates/module_bzl.j2"), diff --git a/crate_universe/src/cli/splice.rs b/crate_universe/src/cli/splice.rs index c689fbcc82..f245b7ad07 100644 --- a/crate_universe/src/cli/splice.rs +++ b/crate_universe/src/cli/splice.rs @@ -5,13 +5,16 @@ use std::path::PathBuf; use anyhow::Context; use camino::Utf8PathBuf; use clap::Parser; +use itertools::Itertools; use crate::cli::Result; use crate::config::Config; use crate::metadata::{ write_metadata, Cargo, CargoUpdateRequest, Generator, MetadataGenerator, TreeResolver, }; -use crate::splicing::{generate_lockfile, Splicer, SplicingManifest, WorkspaceMetadata}; +use crate::splicing::{ + generate_lockfile, Splicer, SplicerKind, SplicingManifest, WorkspaceMetadata, +}; /// Command line options for the `splice` subcommand #[derive(Parser, Debug)] @@ -81,13 +84,14 @@ pub fn splice(opt: SpliceOptions) -> Result<()> { }; // Generate a splicer for creating a Cargo workspace manifest - let splicer = Splicer::new(splicing_dir, splicing_manifest)?; + let splicer = Splicer::new(splicing_dir.clone(), splicing_manifest)?; + let prepared_splicer = splicer.prepare()?; let cargo = Cargo::new(opt.cargo, opt.rustc.clone()); // Splice together the manifest - let manifest_path = splicer - .splice_workspace() + let manifest_path = prepared_splicer + .splice(&splicing_dir) .with_context(|| format!("Failed to splice workspace {}", opt.repository_name))?; // Generate a lockfile @@ -121,8 +125,8 @@ pub fn splice(opt: SpliceOptions) -> Result<()> { // Write metadata to the workspace for future reuse let (cargo_metadata, _) = Generator::new() - .with_cargo(cargo) - .with_rustc(opt.rustc) + .with_cargo(cargo.clone()) + .with_rustc(opt.rustc.clone()) .generate(manifest_path.as_path_buf()) .context("Failed to generate cargo metadata")?; @@ -147,5 +151,22 @@ pub fn splice(opt: SpliceOptions) -> Result<()> { std::fs::copy(cargo_lockfile_path, output_dir.join("Cargo.lock")) .context("Failed to copy lockfile")?; + if let SplicerKind::Workspace { path, .. } = prepared_splicer { + let metadata = cargo.metadata_command_with_options( + path.as_std_path(), + vec![String::from("--no-deps")], + )?.exec().with_context(|| { + format!( + "Error spawning cargo in child process to compute crate paths for workspace '{}'", + path + ) + })?; + let contents = metadata + .packages + .iter() + .map(|package| package.manifest_path.clone()) + .join("\n"); + std::fs::write(opt.output_dir.join("extra_paths_to_track"), contents)?; + } Ok(()) } diff --git a/crate_universe/src/metadata.rs b/crate_universe/src/metadata.rs index 7cb24b839c..26f0109122 100644 --- a/crate_universe/src/metadata.rs +++ b/crate_universe/src/metadata.rs @@ -4,7 +4,6 @@ mod cargo_bin; mod cargo_tree_resolver; mod dependency; mod metadata_annotation; -mod workspace_discoverer; use std::env; use std::fs; @@ -21,7 +20,6 @@ pub(crate) use self::cargo_bin::*; pub(crate) use self::cargo_tree_resolver::*; pub(crate) use self::dependency::*; pub(crate) use self::metadata_annotation::*; -pub(crate) use self::workspace_discoverer::*; // TODO: This should also return a set of [crate-index::IndexConfig]s for packages in metadata.packages /// A Trait for generating metadata (`cargo metadata` output and a lock file) from a Cargo manifest. diff --git a/crate_universe/src/metadata/workspace_discoverer.rs b/crate_universe/src/metadata/workspace_discoverer.rs deleted file mode 100644 index 8814f33b23..0000000000 --- a/crate_universe/src/metadata/workspace_discoverer.rs +++ /dev/null @@ -1,475 +0,0 @@ -use std::collections::{BTreeMap, BTreeSet}; - -use anyhow::{anyhow, bail, Context, Result}; -use camino::{Utf8Path, Utf8PathBuf}; -use cargo_toml::Manifest; - -/// A description of Cargo.toml files and how they are related in workspaces. -/// All `Utf8PathBuf` values are paths of Cargo.toml files. -#[derive(Debug, PartialEq)] -pub(crate) struct DiscoveredWorkspaces { - workspaces_to_members: BTreeMap>, - non_workspaces: BTreeSet, -} - -impl DiscoveredWorkspaces { - pub(crate) fn workspaces(&self) -> BTreeSet { - self.workspaces_to_members.keys().cloned().collect() - } - - pub(crate) fn all_workspaces_and_members(&self) -> BTreeSet { - self.workspaces_to_members - .keys() - .chain(self.workspaces_to_members.values().flatten()) - .cloned() - .collect() - } -} - -pub(crate) fn discover_workspaces( - cargo_toml_paths: BTreeSet, - known_manifests: &BTreeMap, -) -> Result { - let mut manifest_cache = ManifestCache { - cache: BTreeMap::new(), - known_manifests, - }; - discover_workspaces_with_cache(cargo_toml_paths, &mut manifest_cache) -} - -fn discover_workspaces_with_cache( - cargo_toml_paths: BTreeSet, - manifest_cache: &mut ManifestCache, -) -> Result { - let mut discovered_workspaces = DiscoveredWorkspaces { - workspaces_to_members: BTreeMap::new(), - non_workspaces: BTreeSet::new(), - }; - - // First pass: Discover workspace parents. - for cargo_toml_path in &cargo_toml_paths { - if let Some(workspace_parent) = discover_workspace_parent(cargo_toml_path, manifest_cache) { - discovered_workspaces - .workspaces_to_members - .insert(workspace_parent, BTreeSet::new()); - } else { - discovered_workspaces - .non_workspaces - .insert(cargo_toml_path.clone()); - } - } - - // Second pass: Find all child manifests. - for workspace_path in discovered_workspaces - .workspaces_to_members - .keys() - .cloned() - .collect::>() - { - let workspace_manifest = manifest_cache.get(&workspace_path).unwrap(); - - let workspace_explicit_members = workspace_manifest - .workspace - .as_ref() - .and_then(|workspace| { - // Unfortunately cargo_toml doesn't preserve presence/absence of this field, so we infer empty means absent. - if workspace.members.is_empty() { - return None; - } - let members = workspace - .members - .iter() - .map(|member| glob::Pattern::new(member).map_err(anyhow::Error::from)) - .collect::>>(); - Some(members) - }) - .transpose()?; - - 'per_child: for entry in walkdir::WalkDir::new(workspace_path.parent().unwrap()) - .follow_links(false) - .follow_root_links(false) - .into_iter() - { - let entry = match entry { - Ok(entry) => entry, - Err(err) => { - if let Some(io_err) = err.io_error() { - if let Some(path) = err.path() { - if let Ok(symlink_metadata) = std::fs::symlink_metadata(path) { - if symlink_metadata.is_symlink() - && io_err.kind() == std::io::ErrorKind::NotFound - { - // Ignore dangling symlinks - continue; - } - } - } - } - return Err(err) - .context("Failed to walk filesystem finding workspace Cargo.toml files"); - } - }; - - if entry.file_name() != "Cargo.toml" { - continue; - } - - let child_path = Utf8Path::from_path(entry.path()) - .ok_or_else(|| anyhow!("Failed to parse {:?} as UTF-8", entry.path()))? - .to_path_buf(); - if child_path == workspace_path { - continue; - } - - let manifest = manifest_cache - .get(&child_path) - .ok_or_else(|| anyhow!("Failed to read manifest at {}", child_path))?; - - let mut actual_workspace_path = workspace_path.clone(); - if let Some(package) = manifest.package { - if let Some(explicit_workspace_path) = package.workspace { - let explicit_workspace_path = Utf8Path::from_path(&explicit_workspace_path) - .with_context(|| { - anyhow!( - "Failed to construct UTF-8 path from: {}", - explicit_workspace_path.display() - ) - })?; - actual_workspace_path = - child_path.parent().unwrap().join(explicit_workspace_path); - } - } - if !discovered_workspaces - .workspaces_to_members - .contains_key(&actual_workspace_path) - { - bail!("Found manifest at {} which is a member of the workspace at {} which isn't included in the crates_universe", child_path, actual_workspace_path); - } - - let dir_relative_to_workspace_dir = child_path - .parent() - .unwrap() - .strip_prefix(workspace_path.parent().unwrap()); - - if let Ok(dir_relative_to_workspace_dir) = dir_relative_to_workspace_dir { - use itertools::Itertools; - if workspace_manifest - .workspace - .as_ref() - .unwrap() - .exclude - .contains(&dir_relative_to_workspace_dir.components().join("/")) - { - continue 'per_child; - } - } - - if let (Ok(dir_relative_to_workspace_dir), Some(workspace_explicit_members)) = ( - dir_relative_to_workspace_dir, - workspace_explicit_members.as_ref(), - ) { - if workspace_explicit_members - .iter() - .any(|glob| glob.matches(dir_relative_to_workspace_dir.as_str())) - { - discovered_workspaces - .workspaces_to_members - .get_mut(&actual_workspace_path) - .unwrap() - .insert(child_path.clone()); - } - } else { - discovered_workspaces - .workspaces_to_members - .get_mut(&actual_workspace_path) - .unwrap() - .insert(child_path.clone()); - } - } - } - - for cargo_toml_path in cargo_toml_paths { - if !discovered_workspaces - .all_workspaces_and_members() - .contains(&cargo_toml_path) - { - discovered_workspaces.non_workspaces.insert(cargo_toml_path); - } - } - - Ok(discovered_workspaces) -} - -fn discover_workspace_parent( - cargo_toml_path: &Utf8PathBuf, - manifest_cache: &mut ManifestCache, -) -> Option { - for parent_dir in cargo_toml_path.ancestors().skip(1) { - let maybe_cargo_toml_path = parent_dir.join("Cargo.toml"); - let maybe_manifest = manifest_cache.get(&maybe_cargo_toml_path); - if let Some(manifest) = maybe_manifest { - if manifest.workspace.is_some() { - return Some(maybe_cargo_toml_path); - } - } - } - None -} - -struct ManifestCache<'a> { - cache: BTreeMap>, - known_manifests: &'a BTreeMap, -} - -impl ManifestCache<'_> { - fn get(&mut self, path: &Utf8PathBuf) -> Option { - if let Some(manifest) = self.known_manifests.get(path) { - return Some(manifest.clone()); - } - if let Some(maybe_manifest) = self.cache.get(path) { - return maybe_manifest.clone(); - } - let maybe_manifest = Manifest::from_path(path).ok(); - self.cache.insert(path.clone(), maybe_manifest.clone()); - maybe_manifest - } -} - -#[cfg(test)] -mod test { - use super::*; - use std::path::{Path, PathBuf}; - use std::sync::Mutex; - - // Both of these tests try to create the same symlink, so they can't run in parallel. - static FILESYSTEM_GUARD: Mutex<()> = Mutex::new(()); - - #[test] - fn test_discover() { - let _guard = FILESYSTEM_GUARD.lock().unwrap(); - let r = runfiles::Runfiles::create().unwrap(); - let root_dir = - runfiles::rlocation!(r, "rules_rust/crate_universe/test_data/workspace_examples") - .unwrap(); - let root_dir = Utf8PathBuf::from_path_buf(root_dir).unwrap(); - - let _manifest_symlink = DeleteOnDropDirSymlink::symlink( - Path::new("..").join("symlinked"), - root_dir.join("ws1").join("bazel-ws1").into_std_path_buf(), - ) - .unwrap(); - - let mut expected = ws1_discovered_workspaces(&root_dir); - - expected.workspaces_to_members.insert( - root_dir.join("ws2").join("Cargo.toml"), - BTreeSet::from([ - root_dir.join("ws2").join("ws2c1").join("Cargo.toml"), - root_dir - .join("ws2") - .join("ws2excluded") - .join("ws2included") - .join("Cargo.toml"), - ]), - ); - - expected.non_workspaces.extend([ - root_dir.join("non-ws").join("Cargo.toml"), - root_dir.join("ws2").join("ws2excluded").join("Cargo.toml"), - ]); - - let actual = discover_workspaces( - vec![ - root_dir.join("ws1").join("ws1c1").join("Cargo.toml"), - root_dir.join("ws2").join("Cargo.toml"), - root_dir.join("ws2").join("ws2excluded").join("Cargo.toml"), - root_dir.join("non-ws").join("Cargo.toml"), - ] - .into_iter() - .collect(), - &BTreeMap::new(), - ) - .unwrap(); - - assert_eq!(expected, actual); - } - - #[test] - fn test_ignore_bazel_root_symlink() { - let _guard = FILESYSTEM_GUARD.lock().unwrap(); - let r = runfiles::Runfiles::create().unwrap(); - let root_dir = - runfiles::rlocation!(r, "rules_rust/crate_universe/test_data/workspace_examples") - .unwrap(); - let root_dir = Utf8PathBuf::from_path_buf(root_dir).unwrap(); - - let _symlink1 = DeleteOnDropDirSymlink::symlink( - Path::new("..").join("symlinked"), - root_dir.join("ws1").join("bazel-ws1").into_std_path_buf(), - ) - .unwrap(); - - let _symlink2 = DeleteOnDropDirSymlink::symlink( - Path::new("..").join("symlinked"), - root_dir.join("ws1").join(".bazel").into_std_path_buf(), - ) - .unwrap(); - - let expected = ws1_discovered_workspaces(&root_dir); - - let actual = discover_workspaces( - vec![root_dir.join("ws1").join("ws1c1").join("Cargo.toml")] - .into_iter() - .collect(), - &BTreeMap::new(), - ) - .unwrap(); - - assert_eq!(expected, actual); - } - - #[test] - fn test_discover_ignores_dangling_symlinks() { - let _guard = FILESYSTEM_GUARD.lock().unwrap(); - let r = runfiles::Runfiles::create().unwrap(); - let root_dir = - runfiles::rlocation!(r, "rules_rust/crate_universe/test_data/workspace_examples") - .unwrap(); - let root_dir = Utf8PathBuf::from_path_buf(root_dir).unwrap(); - - let _dangling_symlink = DeleteOnDropDirSymlink::symlink( - non_existing_path(), - root_dir.join("ws1").join("dangling").into_std_path_buf(), - ) - .unwrap(); - - let expected = ws1_discovered_workspaces(&root_dir); - - let actual = discover_workspaces( - vec![root_dir.join("ws1").join("ws1c1").join("Cargo.toml")] - .into_iter() - .collect(), - &BTreeMap::new(), - ) - .unwrap(); - - assert_eq!(expected, actual); - } - - #[test] - fn test_discover_explicit_members() { - let r = runfiles::Runfiles::create().unwrap(); - let root_dir = - runfiles::rlocation!(r, "rules_rust/crate_universe/test_data/workspace_examples") - .unwrap(); - let root_dir = Utf8PathBuf::from_path_buf(root_dir).unwrap(); - - let mut workspaces_to_members = BTreeMap::>::new(); - - let mut includes_members = BTreeSet::new(); - let includes_root = root_dir.join("includes"); - for child in [ - vec!["explicit-child"], - vec!["glob-char1"], - vec!["glob-char2"], - vec!["glob-direct-children", "grandchild1"], - vec!["glob-direct-children", "grandchild2"], - vec!["glob-transitive-children", "level1", "anchor"], - vec!["glob-transitive-children", "level1", "level2", "anchor"], - vec![ - "glob-transitive-children", - "level1", - "level2", - "level3", - "anchor", - ], - ] { - let mut path = includes_root.clone(); - for path_part in child { - path.push(path_part); - } - path.push("Cargo.toml"); - includes_members.insert(path); - } - - workspaces_to_members.insert(includes_root.join("Cargo.toml"), includes_members); - let non_workspaces = BTreeSet::::new(); - - let expected = DiscoveredWorkspaces { - workspaces_to_members, - non_workspaces, - }; - - let actual = discover_workspaces( - vec![root_dir - .join("includes") - .join("explicit-child") - .join("Cargo.toml")] - .into_iter() - .collect(), - &BTreeMap::new(), - ) - .unwrap(); - - assert_eq!(expected, actual); - } - - fn ws1_discovered_workspaces(root_dir: &Utf8Path) -> DiscoveredWorkspaces { - let mut workspaces_to_members = BTreeMap::new(); - workspaces_to_members.insert( - root_dir.join("ws1").join("Cargo.toml"), - BTreeSet::from([ - root_dir.join("ws1").join("ws1c1").join("Cargo.toml"), - root_dir - .join("ws1") - .join("ws1c1") - .join("ws1c1c1") - .join("Cargo.toml"), - root_dir.join("ws1").join("ws1c2").join("Cargo.toml"), - ]), - ); - let non_workspaces = BTreeSet::new(); - - DiscoveredWorkspaces { - workspaces_to_members, - non_workspaces, - } - } - - struct DeleteOnDropDirSymlink(PathBuf); - - impl DeleteOnDropDirSymlink { - #[cfg(unix)] - fn symlink>(original: P, link: PathBuf) -> std::io::Result { - std::os::unix::fs::symlink(original, &link)?; - Ok(Self(link)) - } - - #[cfg(windows)] - fn symlink>(original: P, link: PathBuf) -> std::io::Result { - std::os::windows::fs::symlink_dir(original, &link)?; - Ok(Self(link)) - } - } - - impl Drop for DeleteOnDropDirSymlink { - #[cfg(unix)] - fn drop(&mut self) { - std::fs::remove_file(&self.0).expect("Failed to delete symlink"); - } - #[cfg(windows)] - fn drop(&mut self) { - std::fs::remove_dir(&self.0).expect("Failed to delete symlink"); - } - } - - #[cfg(unix)] - fn non_existing_path() -> PathBuf { - PathBuf::from("/doesnotexist") - } - - #[cfg(windows)] - fn non_existing_path() -> PathBuf { - PathBuf::from("Z:\\doesnotexist") - } -} diff --git a/crate_universe/src/splicing/splicer.rs b/crate_universe/src/splicing/splicer.rs index 9de96f0c5b..7825ea8ddd 100644 --- a/crate_universe/src/splicing/splicer.rs +++ b/crate_universe/src/splicing/splicer.rs @@ -1,6 +1,6 @@ //! Utility for creating valid Cargo workspaces -use std::collections::BTreeMap; +use std::collections::{BTreeMap, HashSet}; use std::fs; use std::path::Path; @@ -10,9 +10,7 @@ use cargo_toml::Manifest; use tracing::debug; use crate::config::CrateId; -use crate::metadata::discover_workspaces; use crate::splicing::{SplicedManifest, SplicingManifest}; -use crate::utils::starlark::Label; use crate::utils::symlink::{remove_symlink, symlink}; use super::{read_manifest, DirectPackageManifest, WorkspaceMetadata}; @@ -43,57 +41,38 @@ pub(crate) enum SplicerKind<'a> { /// A list of files or directories to ignore when when symlinking const IGNORE_LIST: &[&str] = &[".git", "bazel-*", ".svn"]; +fn parent_workspace(cargo_toml_path: &Utf8PathBuf) -> Option { + for dir in cargo_toml_path.ancestors() { + let maybe_cargo_toml_path = dir.join("Cargo.toml"); + let maybe_manifest = Manifest::from_path(&maybe_cargo_toml_path).ok(); + if let Some(manifest) = maybe_manifest { + if manifest.workspace.is_some() { + return Some(maybe_cargo_toml_path); + } + } + } + None +} + impl<'a> SplicerKind<'a> { pub(crate) fn new( manifests: &'a BTreeMap, splicing_manifest: &'a SplicingManifest, ) -> Result { - let workspaces = discover_workspaces(manifests.keys().cloned().collect(), manifests)?; - let workspace_roots = workspaces.workspaces(); + let workspace_roots: HashSet = + manifests.keys().filter_map(parent_workspace).collect(); if workspace_roots.len() > 1 { bail!("When splicing manifests, manifests are not allowed to from from different workspaces. Saw manifests which belong to the following workspaces: {}", workspace_roots.iter().map(|wr| wr.to_string()).collect::>().join(", ")); } - let all_workspace_and_member_paths = workspaces.all_workspaces_and_members(); - let mut missing_labels = Vec::new(); - let mut missing_paths = Vec::new(); - for manifest_path in &all_workspace_and_member_paths { - if !manifests.contains_key(manifest_path) { - if let Ok(label) = Label::from_absolute_path(manifest_path.as_path().as_std_path()) - { - missing_labels.push(label.to_string()); - } else { - missing_paths.push(manifest_path.to_string()); - } - } - } - if !missing_labels.is_empty() || !missing_paths.is_empty() { - bail!( - "Some manifests are not being tracked.{}{}", - if !missing_labels.is_empty() { - format!( - "\nPlease add the following labels to the `manifests` key:\n {}.", - missing_labels.join("\n ") - ) - } else { - String::new() - }, - if !missing_paths.is_empty() { - format!( - " Please add labels for the following paths to the `manifests` key:\n {}.", - missing_paths.join("\n ") - ) - } else { - String::new() - }, - ) - } - if let Some((path, manifest)) = workspace_roots .iter() .next() .and_then(|path| manifests.get_key_value(path)) { + if manifests.len() > 1 { + eprintln!("Only the workspace's Cargo.toml is required in the `manifests` attribute; the rest can be removed"); + } Ok(Self::Workspace { path, manifest, @@ -500,6 +479,10 @@ impl Splicer { pub(crate) fn splice_workspace(&self) -> Result { SplicerKind::new(&self.manifests, &self.splicing_manifest)?.splice(&self.workspace_dir) } + + pub(crate) fn prepare(&self) -> Result { + SplicerKind::new(&self.manifests, &self.splicing_manifest) + } } const DEFAULT_SPLICING_PACKAGE_NAME: &str = "direct-cargo-bazel-deps"; const DEFAULT_SPLICING_PACKAGE_VERSION: &str = "0.0.1"; @@ -642,6 +625,7 @@ mod test { use cargo_metadata::PackageId; use crate::splicing::Cargo; + use crate::utils::starlark::Label; /// Clone and compare two items after calling `.sort()` on them. macro_rules! assert_sort_eq { @@ -1046,65 +1030,6 @@ mod test { cargo_lock::Lockfile::load(workspace_root.as_ref().join("Cargo.lock")).unwrap(); } - #[test] - fn splice_workspace_report_missing_members() { - let (mut splicing_manifest, _cache_dir) = mock_splicing_manifest_with_workspace(); - - // Remove everything but the root manifest - splicing_manifest - .manifests - .retain(|_, label| *label == Label::from_str("//root_pkg:Cargo.toml").unwrap()); - assert_eq!(splicing_manifest.manifests.len(), 1); - - // Splice the workspace - let workspace_root = tempfile::tempdir().unwrap(); - let workspace_manifest = Splicer::new( - Utf8PathBuf::try_from(workspace_root.as_ref().to_path_buf()).unwrap(), - splicing_manifest, - ) - .unwrap() - .splice_workspace(); - - assert!(workspace_manifest.is_err()); - - // Ensure both the missing manifests are mentioned in the error string - let err_str = format!("{:?}", &workspace_manifest); - assert!( - err_str.contains("Some manifests are not being tracked") - && err_str.contains("//root_pkg/sub_pkg_a:Cargo.toml") - && err_str.contains("//root_pkg/sub_pkg_b:Cargo.toml") - ); - } - - #[test] - fn splice_workspace_report_missing_root() { - let (mut splicing_manifest, _cache_dir) = mock_splicing_manifest_with_workspace(); - - // Remove everything but the root manifest - splicing_manifest - .manifests - .retain(|_, label| *label != Label::from_str("//root_pkg:Cargo.toml").unwrap()); - assert_eq!(splicing_manifest.manifests.len(), 2); - - // Splice the workspace - let workspace_root = tempfile::tempdir().unwrap(); - let workspace_manifest = Splicer::new( - Utf8PathBuf::try_from(workspace_root.as_ref().to_path_buf()).unwrap(), - splicing_manifest, - ) - .unwrap() - .splice_workspace(); - - assert!(workspace_manifest.is_err()); - - // Ensure both the missing manifests are mentioned in the error string - let err_str = format!("{:?}", &workspace_manifest); - assert!( - err_str.contains("Some manifests are not being tracked") - && err_str.contains("//root_pkg:Cargo.toml") - ); - } - #[test] fn splice_workspace_report_external_workspace_members() { let (mut splicing_manifest, _cache_dir) = mock_splicing_manifest_with_workspace(); diff --git a/crate_universe/src/utils/starlark/label.rs b/crate_universe/src/utils/starlark/label.rs index 7d6c52dff2..f28b7a6bf5 100644 --- a/crate_universe/src/utils/starlark/label.rs +++ b/crate_universe/src/utils/starlark/label.rs @@ -1,8 +1,7 @@ use std::fmt::{self, Display}; -use std::path::Path; use std::str::FromStr; -use anyhow::{anyhow, bail, Context, Result}; +use anyhow::{bail, Context, Result}; use camino::Utf8Path; use once_cell::sync::OnceCell; use regex::Regex; @@ -186,79 +185,6 @@ impl Display for Label { } } -impl Label { - /// Generates a label appropriate for the passed Path by walking the filesystem to identify its - /// workspace and package. - pub(crate) fn from_absolute_path(p: &Path) -> Result { - let mut workspace_root = None; - let mut package_root = None; - for ancestor in p.ancestors().skip(1) { - if package_root.is_none() - && (ancestor.join("BUILD").exists() || ancestor.join("BUILD.bazel").exists()) - { - package_root = Some(ancestor); - } - if workspace_root.is_none() - && (ancestor.join("WORKSPACE").exists() - || ancestor.join("WORKSPACE.bazel").exists() - || ancestor.join("MODULE.bazel").exists()) - { - workspace_root = Some(ancestor); - break; - } - } - match (workspace_root, package_root) { - (Some(workspace_root), Some(package_root)) => { - // These unwraps are safe by construction of the ancestors and prefix calls which set up these paths. - let target = p.strip_prefix(package_root).unwrap(); - let workspace_relative = p.strip_prefix(workspace_root).unwrap(); - let mut package_path = workspace_relative.to_path_buf(); - for _ in target.components() { - package_path.pop(); - } - - let package = if package_path.components().count() > 0 { - path_to_label_part(&package_path)? - } else { - String::new() - }; - let target = path_to_label_part(target)?; - - Ok(Label::Absolute { - repository: Repository::Local, - package, - target, - }) - } - (Some(_workspace_root), None) => { - bail!( - "Could not identify package for path {}. Maybe you need to add a BUILD.bazel file.", - p.display() - ); - } - _ => { - bail!("Could not identify workspace for path {}", p.display()); - } - } - } -} - -/// Converts a path to a forward-slash-delimited label-appropriate path string. -fn path_to_label_part(path: &Path) -> Result { - let components: Result, _> = path - .components() - .map(|c| { - c.as_os_str().to_str().ok_or_else(|| { - anyhow!( - "Found non-UTF8 component turning path into label: {}", - path.display() - ) - }) - }) - .collect(); - Ok(components?.join("/")) -} - impl Serialize for Label { fn serialize(&self, serializer: S) -> Result where @@ -302,8 +228,6 @@ impl Label { #[cfg(test)] mod test { use super::*; - use std::fs::{create_dir_all, File}; - use tempfile::tempdir; #[test] fn relative() { @@ -509,65 +433,4 @@ mod test { fn invalid_triple_at() { Label::from_str("@@@repo//pkg:target").unwrap_err(); } - - #[test] - fn from_absolute_path_exists() { - let dir = tempdir().unwrap(); - let workspace = dir.path().join("WORKSPACE.bazel"); - let build_file = dir.path().join("parent").join("child").join("BUILD.bazel"); - let subdir = dir.path().join("parent").join("child").join("grandchild"); - let actual_file = subdir.join("greatgrandchild"); - create_dir_all(subdir).unwrap(); - { - File::create(workspace).unwrap(); - File::create(build_file).unwrap(); - File::create(&actual_file).unwrap(); - } - let label = Label::from_absolute_path(&actual_file).unwrap(); - assert_eq!( - label.to_string(), - "//parent/child:grandchild/greatgrandchild" - ); - assert!(label.is_absolute()); - assert_eq!(label.repository(), Some(&Repository::Local)); - assert_eq!(label.package(), Some("parent/child")); - assert_eq!(label.target(), "grandchild/greatgrandchild"); - } - - #[test] - fn from_absolute_path_no_workspace() { - let dir = tempdir().unwrap(); - let build_file = dir.path().join("parent").join("child").join("BUILD.bazel"); - let subdir = dir.path().join("parent").join("child").join("grandchild"); - let actual_file = subdir.join("greatgrandchild"); - create_dir_all(subdir).unwrap(); - { - File::create(build_file).unwrap(); - File::create(&actual_file).unwrap(); - } - let err = Label::from_absolute_path(&actual_file) - .unwrap_err() - .to_string(); - assert!(err.contains("Could not identify workspace")); - assert!(err.contains(format!("{}", actual_file.display()).as_str())); - } - - #[test] - fn from_absolute_path_no_build_file() { - let dir = tempdir().unwrap(); - let workspace = dir.path().join("WORKSPACE.bazel"); - let subdir = dir.path().join("parent").join("child").join("grandchild"); - let actual_file = subdir.join("greatgrandchild"); - create_dir_all(subdir).unwrap(); - { - File::create(workspace).unwrap(); - File::create(&actual_file).unwrap(); - } - let err = Label::from_absolute_path(&actual_file) - .unwrap_err() - .to_string(); - assert!(err.contains("Could not identify package")); - assert!(err.contains("Maybe you need to add a BUILD.bazel file")); - assert!(err.contains(format!("{}", actual_file.display()).as_str())); - } } From f905c0483e53b3033145fa2eba435d0109b0317c Mon Sep 17 00:00:00 2001 From: David Zbarsky Date: Mon, 1 Sep 2025 13:27:10 -0400 Subject: [PATCH 2/2] Collapse Splicer and SplicerKind --- crate_universe/src/cli/splice.rs | 15 +-- crate_universe/src/cli/vendor.rs | 2 +- crate_universe/src/splicing/splicer.rs | 180 +++++++++++-------------- 3 files changed, 89 insertions(+), 108 deletions(-) diff --git a/crate_universe/src/cli/splice.rs b/crate_universe/src/cli/splice.rs index f245b7ad07..f08c639152 100644 --- a/crate_universe/src/cli/splice.rs +++ b/crate_universe/src/cli/splice.rs @@ -13,7 +13,7 @@ use crate::metadata::{ write_metadata, Cargo, CargoUpdateRequest, Generator, MetadataGenerator, TreeResolver, }; use crate::splicing::{ - generate_lockfile, Splicer, SplicerKind, SplicingManifest, WorkspaceMetadata, + generate_lockfile, Splicer, SplicingManifest, WorkspaceMetadata, }; /// Command line options for the `splice` subcommand @@ -83,15 +83,12 @@ pub fn splice(opt: SpliceOptions) -> Result<()> { } }; - // Generate a splicer for creating a Cargo workspace manifest - let splicer = Splicer::new(splicing_dir.clone(), splicing_manifest)?; - let prepared_splicer = splicer.prepare()?; - let cargo = Cargo::new(opt.cargo, opt.rustc.clone()); - // Splice together the manifest - let manifest_path = prepared_splicer - .splice(&splicing_dir) + // Generate a splicer for creating a Cargo workspace manifest + let splicer = Splicer::new(splicing_dir.clone(), splicing_manifest)?; + let manifest_path = splicer + .splice() .with_context(|| format!("Failed to splice workspace {}", opt.repository_name))?; // Generate a lockfile @@ -151,7 +148,7 @@ pub fn splice(opt: SpliceOptions) -> Result<()> { std::fs::copy(cargo_lockfile_path, output_dir.join("Cargo.lock")) .context("Failed to copy lockfile")?; - if let SplicerKind::Workspace { path, .. } = prepared_splicer { + if let Splicer::Workspace { path, .. } = splicer { let metadata = cargo.metadata_command_with_options( path.as_std_path(), vec![String::from("--no-deps")], diff --git a/crate_universe/src/cli/vendor.rs b/crate_universe/src/cli/vendor.rs index b2ebf71139..74e017664c 100644 --- a/crate_universe/src/cli/vendor.rs +++ b/crate_universe/src/cli/vendor.rs @@ -217,7 +217,7 @@ pub fn vendor(opt: VendorOptions) -> anyhow::Result<()> { // Splice together the manifest let manifest_path = splicer - .splice_workspace() + .splice() .context("Failed to splice workspace")?; // Gather a cargo lockfile diff --git a/crate_universe/src/splicing/splicer.rs b/crate_universe/src/splicing/splicer.rs index 7825ea8ddd..cd5fe8e1e1 100644 --- a/crate_universe/src/splicing/splicer.rs +++ b/crate_universe/src/splicing/splicer.rs @@ -15,29 +15,6 @@ use crate::utils::symlink::{remove_symlink, symlink}; use super::{read_manifest, DirectPackageManifest, WorkspaceMetadata}; -/// The core splicer implementation. Each style of Bazel workspace should be represented -/// here and a splicing implementation defined. -pub(crate) enum SplicerKind<'a> { - /// Splice a manifest which is represented by a Cargo workspace - Workspace { - path: &'a Utf8PathBuf, - manifest: &'a Manifest, - splicing_manifest: &'a SplicingManifest, - }, - /// Splice a manifest for a single package. This includes cases where - /// were defined directly in Bazel. - Package { - path: &'a Utf8PathBuf, - manifest: &'a Manifest, - splicing_manifest: &'a SplicingManifest, - }, - /// Splice a manifest from multiple disjoint Cargo manifests. - MultiPackage { - manifests: &'a BTreeMap, - splicing_manifest: &'a SplicingManifest, - }, -} - /// A list of files or directories to ignore when when symlinking const IGNORE_LIST: &[&str] = &[".git", "bazel-*", ".svn"]; @@ -54,39 +31,81 @@ fn parent_workspace(cargo_toml_path: &Utf8PathBuf) -> Option { None } -impl<'a> SplicerKind<'a> { +/// The core splicer implementation. Each style of Bazel workspace should be represented +/// here and a splicing implementation defined. +pub(crate) enum Splicer { + /// Splice a manifest which is represented by a Cargo workspace + Workspace { + workspace_dir: Utf8PathBuf, + path: Utf8PathBuf, + manifest: Manifest, + splicing_manifest: SplicingManifest, + }, + /// Splice a manifest for a single package. This includes cases where + /// were defined directly in Bazel. + Package { + workspace_dir: Utf8PathBuf, + path: Utf8PathBuf, + manifest: Manifest, + splicing_manifest: SplicingManifest, + }, + /// Splice a manifest from multiple disjoint Cargo manifests. + MultiPackage { + workspace_dir: Utf8PathBuf, + manifests: BTreeMap, + splicing_manifest: SplicingManifest, + }, +} + +impl Splicer { pub(crate) fn new( - manifests: &'a BTreeMap, - splicing_manifest: &'a SplicingManifest, + workspace_dir: Utf8PathBuf, + splicing_manifest: SplicingManifest, ) -> Result { + // Load all manifests (owned) + let manifests: BTreeMap = splicing_manifest + .manifests + .keys() + .map(|path| { + let m = read_manifest(path) + .with_context(|| format!("Failed to read manifest at {}", path))?; + Ok((path.clone(), m)) + }) + .collect::>()?; + + // Identify unique workspace roots among the provided manifests let workspace_roots: HashSet = manifests.keys().filter_map(parent_workspace).collect(); + if workspace_roots.len() > 1 { bail!("When splicing manifests, manifests are not allowed to from from different workspaces. Saw manifests which belong to the following workspaces: {}", workspace_roots.iter().map(|wr| wr.to_string()).collect::>().join(", ")); } if let Some((path, manifest)) = workspace_roots - .iter() + .into_iter() .next() - .and_then(|path| manifests.get_key_value(path)) + .and_then(|path| manifests.get_key_value(&path)) { if manifests.len() > 1 { eprintln!("Only the workspace's Cargo.toml is required in the `manifests` attribute; the rest can be removed"); } Ok(Self::Workspace { - path, - manifest, + workspace_dir, + path: path.clone(), + manifest: manifest.clone(), splicing_manifest, }) } else if manifests.len() == 1 { - let (path, manifest) = manifests.iter().last().unwrap(); + let (path, manifest) = manifests.into_iter().last().unwrap(); Ok(Self::Package { + workspace_dir, path, manifest, splicing_manifest, }) } else { Ok(Self::MultiPackage { + workspace_dir, manifests, splicing_manifest, }) @@ -95,19 +114,22 @@ impl<'a> SplicerKind<'a> { /// Performs splicing based on the current variant. #[tracing::instrument(skip_all)] - pub(crate) fn splice(&self, workspace_dir: &Utf8Path) -> Result { + pub(crate) fn splice(&self) -> Result { match self { - SplicerKind::Workspace { + Splicer::Workspace { + workspace_dir, path, manifest, splicing_manifest, } => Self::splice_workspace(workspace_dir, path, manifest, splicing_manifest), - SplicerKind::Package { + Splicer::Package { + workspace_dir, path, manifest, splicing_manifest, } => Self::splice_package(workspace_dir, path, manifest, splicing_manifest), - SplicerKind::MultiPackage { + Splicer::MultiPackage { + workspace_dir, manifests, splicing_manifest, } => Self::splice_multi_package(workspace_dir, manifests, splicing_manifest), @@ -118,9 +140,9 @@ impl<'a> SplicerKind<'a> { #[tracing::instrument(skip_all)] fn splice_workspace( workspace_dir: &Utf8Path, - path: &&Utf8PathBuf, - manifest: &&Manifest, - splicing_manifest: &&SplicingManifest, + path: &Utf8PathBuf, + manifest: &Manifest, + splicing_manifest: &SplicingManifest, ) -> Result { let mut manifest = (*manifest).clone(); let manifest_dir = path @@ -143,7 +165,7 @@ impl<'a> SplicerKind<'a> { } let root_manifest_path = workspace_dir.join("Cargo.toml"); - let member_manifests = BTreeMap::from([(*path, String::new())]); + let member_manifests = BTreeMap::from([(path, String::new())]); // Write the generated metadata to the manifest let workspace_metadata = WorkspaceMetadata::new(splicing_manifest, member_manifests)?; @@ -159,9 +181,9 @@ impl<'a> SplicerKind<'a> { #[tracing::instrument(skip_all)] fn splice_package( workspace_dir: &Utf8Path, - path: &&Utf8PathBuf, - manifest: &&Manifest, - splicing_manifest: &&SplicingManifest, + path: &Utf8PathBuf, + manifest: &Manifest, + splicing_manifest: &SplicingManifest, ) -> Result { let manifest_dir = path .parent() @@ -190,7 +212,7 @@ impl<'a> SplicerKind<'a> { } let root_manifest_path = workspace_dir.join("Cargo.toml"); - let member_manifests = BTreeMap::from([(*path, String::new())]); + let member_manifests = BTreeMap::from([(path, String::new())]); // Write the generated metadata to the manifest let workspace_metadata = WorkspaceMetadata::new(splicing_manifest, member_manifests)?; @@ -206,8 +228,8 @@ impl<'a> SplicerKind<'a> { #[tracing::instrument(skip_all)] fn splice_multi_package( workspace_dir: &Utf8Path, - manifests: &&BTreeMap, - splicing_manifest: &&SplicingManifest, + manifests: &BTreeMap, + splicing_manifest: &SplicingManifest, ) -> Result { let mut manifest = default_cargo_workspace_manifest(&splicing_manifest.resolver_version); @@ -446,44 +468,6 @@ impl<'a> SplicerKind<'a> { } } -pub(crate) struct Splicer { - workspace_dir: Utf8PathBuf, - manifests: BTreeMap, - splicing_manifest: SplicingManifest, -} - -impl Splicer { - pub(crate) fn new( - workspace_dir: Utf8PathBuf, - splicing_manifest: SplicingManifest, - ) -> Result { - // Load all manifests - let manifests = splicing_manifest - .manifests - .keys() - .map(|path| { - let m = read_manifest(path) - .with_context(|| format!("Failed to read manifest at {}", path))?; - Ok((path.clone(), m)) - }) - .collect::>>()?; - - Ok(Self { - workspace_dir, - manifests, - splicing_manifest, - }) - } - - /// Build a new workspace root - pub(crate) fn splice_workspace(&self) -> Result { - SplicerKind::new(&self.manifests, &self.splicing_manifest)?.splice(&self.workspace_dir) - } - - pub(crate) fn prepare(&self) -> Result { - SplicerKind::new(&self.manifests, &self.splicing_manifest) - } -} const DEFAULT_SPLICING_PACKAGE_NAME: &str = "direct-cargo-bazel-deps"; const DEFAULT_SPLICING_PACKAGE_VERSION: &str = "0.0.1"; @@ -953,7 +937,7 @@ mod test { let workspace_manifest = Splicer::new(tempdir_utf8pathbuf(&workspace_root), splicing_manifest) .unwrap() - .splice_workspace() + .splice() .unwrap(); // Locate cargo @@ -996,7 +980,7 @@ mod test { let workspace_manifest = Splicer::new(tempdir_utf8pathbuf(&workspace_root), splicing_manifest) .unwrap() - .splice_workspace() + .splice() .unwrap(); // Locate cargo @@ -1086,7 +1070,7 @@ mod test { let workspace_manifest = Splicer::new(tempdir_utf8pathbuf(&workspace_root), splicing_manifest) .unwrap() - .splice_workspace(); + .splice(); assert!(workspace_manifest.is_err()); @@ -1123,7 +1107,7 @@ mod test { let workspace_manifest = Splicer::new(tempdir_utf8pathbuf(&workspace_root), splicing_manifest) .unwrap() - .splice_workspace() + .splice() .unwrap(); let metadata = generate_metadata(workspace_manifest.as_path_buf()); @@ -1148,7 +1132,7 @@ mod test { let workspace_manifest = Splicer::new(tempdir_utf8pathbuf(&workspace_root), splicing_manifest) .unwrap() - .splice_workspace() + .splice() .unwrap(); // Locate cargo @@ -1185,7 +1169,7 @@ mod test { let workspace_manifest = Splicer::new(tempdir_utf8pathbuf(&workspace_root), splicing_manifest) .unwrap() - .splice_workspace() + .splice() .unwrap(); // Check the default resolver version @@ -1235,7 +1219,7 @@ mod test { let workspace_manifest = Splicer::new(tempdir_utf8pathbuf(&workspace_root), splicing_manifest) .unwrap() - .splice_workspace() + .splice() .unwrap(); // Check the specified resolver version @@ -1295,7 +1279,7 @@ mod test { let workspace_manifest = Splicer::new(tempdir_utf8pathbuf(&workspace_root), splicing_manifest) .unwrap() - .splice_workspace() + .splice() .unwrap(); // Check the default resolver version @@ -1337,7 +1321,7 @@ mod test { let workspace_manifest = Splicer::new(tempdir_utf8pathbuf(&workspace_root), splicing_manifest) .unwrap() - .splice_workspace() + .splice() .unwrap(); // Ensure the patches match the expected value @@ -1400,7 +1384,7 @@ mod test { let workspace_manifest = Splicer::new(tempdir_utf8pathbuf(&workspace_root), splicing_manifest) .unwrap() - .splice_workspace() + .splice() .unwrap(); // Ensure the patches match the expected value @@ -1451,7 +1435,7 @@ mod test { let workspace_manifest = Splicer::new(tempdir_utf8pathbuf(&workspace_root), splicing_manifest) .unwrap() - .splice_workspace() + .splice() .unwrap(); // Ensure the patches match the expected value @@ -1493,7 +1477,7 @@ mod test { let workspace_root = tempfile::tempdir().unwrap(); let result = Splicer::new(tempdir_utf8pathbuf(&workspace_root), splicing_manifest) .unwrap() - .splice_workspace(); + .splice(); // Confirm conflicting patches have been detected assert!(result.is_err()); @@ -1515,7 +1499,7 @@ mod test { let workspace_root = tempfile::tempdir().unwrap(); Splicer::new(tempdir_utf8pathbuf(&workspace_root), splicing_manifest) .unwrap() - .splice_workspace() + .splice() .unwrap(); let cargo_config = workspace_root.as_ref().join(".cargo").join("config.toml"); @@ -1548,7 +1532,7 @@ mod test { let workspace_root = tempfile::tempdir().unwrap(); Splicer::new(tempdir_utf8pathbuf(&workspace_root), splicing_manifest) .unwrap() - .splice_workspace() + .splice() .unwrap(); let cargo_config = workspace_root.as_ref().join(".cargo").join("config.toml"); @@ -1575,7 +1559,7 @@ mod test { let workspace_root = tempdir_utf8pathbuf(&temp_dir).join("workspace_root"); let splicing_result = Splicer::new(workspace_root.clone(), splicing_manifest) .unwrap() - .splice_workspace(); + .splice(); // Ensure cargo config files in parent directories lead to errors assert!(splicing_result.is_err());