Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

188 changes: 124 additions & 64 deletions cargo-nextest/src/dispatch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,9 @@ use nextest_runner::{
show_config::{ShowNextestVersion, ShowTestGroupSettings, ShowTestGroups, ShowTestGroupsMode},
signal::SignalHandlerKind,
target_runner::{PlatformRunner, TargetRunner},
test_filter::{FilterBound, RunIgnored, TestFilterBuilder, TestFilterPatterns},
test_filter::{
FilterBinaryMatch, FilterBound, RunIgnored, TestFilterBuilder, TestFilterPatterns,
},
test_output::CaptureStrategy,
write_str::WriteStr,
};
Expand Down Expand Up @@ -188,6 +190,7 @@ impl AppOpts {
cargo_options,
archive_file,
archive_format,
build_filter,
zstd_level,
} => {
let app = BaseApp::new(
Expand All @@ -198,6 +201,7 @@ impl AppOpts {
self.common.manifest_path,
output_writer,
)?;
let app = App::new(app, build_filter)?;
app.exec_archive(&archive_file, archive_format, zstd_level, output_writer)?;
Ok(0)
}
Expand Down Expand Up @@ -390,6 +394,9 @@ enum Command {
)]
archive_format: ArchiveFormatOpt,

#[clap(flatten)]
build_filter: TestBuildFilter,

/// Zstandard compression level (-7 to 22, higher is more compressed + slower)
#[arg(
long,
Expand Down Expand Up @@ -1519,69 +1526,6 @@ impl BaseApp {
})
}

fn exec_archive(
&self,
output_file: &Utf8Path,
format: ArchiveFormatOpt,
zstd_level: i32,
output_writer: &mut OutputWriter,
) -> Result<()> {
// Do format detection first so we fail immediately.
let format = format.to_archive_format(output_file)?;
let binary_list = self.build_binary_list()?;
let path_mapper = PathMapper::noop();

let build_platforms = binary_list.rust_build_meta.build_platforms.clone();
let pcx = ParseContext::new(self.graph());
let (_, config) = self.load_config(&pcx)?;
let profile = self
.load_profile(&config)?
.apply_build_platforms(&build_platforms);

let redactor = if should_redact() {
Redactor::build_active(&binary_list.rust_build_meta)
.with_path(output_file.to_path_buf(), "<archive-file>".to_owned())
.build()
} else {
Redactor::noop()
};

let mut reporter = ArchiveReporter::new(self.output.verbose, redactor.clone());
if self
.output
.color
.should_colorize(supports_color::Stream::Stderr)
{
reporter.colorize();
}

let mut writer = output_writer.stderr_writer();
archive_to_file(
profile,
&binary_list,
&self.cargo_metadata_json,
&self.package_graph,
// Note that path_mapper is currently a no-op -- we don't support reusing builds for
// archive creation because it's too confusing.
&path_mapper,
format,
zstd_level,
output_file,
|event| {
reporter.report_event(event, &mut writer)?;
writer.flush()
},
redactor.clone(),
)
.map_err(|err| ExpectedError::ArchiveCreateError {
archive_file: output_file.to_owned(),
err,
redactor,
})?;

Ok(())
}

fn build_binary_list(&self) -> Result<Arc<BinaryList>> {
let binary_list = match self.reuse_build.binaries_metadata() {
Some(m) => m.binary_list.clone(),
Expand Down Expand Up @@ -1758,6 +1702,122 @@ impl App {
Ok(())
}

fn exec_archive(
&self,
output_file: &Utf8Path,
format: ArchiveFormatOpt,
zstd_level: i32,
output_writer: &mut OutputWriter,
) -> Result<()> {
// Do format detection first so we fail immediately.
let format = format.to_archive_format(output_file)?;
let binary_list = self.base.build_binary_list()?;
let path_mapper = PathMapper::noop();
let build_platforms = &binary_list.rust_build_meta.build_platforms;
let pcx = ParseContext::new(self.base.graph());
let (_, config) = self.base.load_config(&pcx)?;
let profile = self
.base
.load_profile(&config)?
.apply_build_platforms(build_platforms);
let redactor = if should_redact() {
Redactor::build_active(&binary_list.rust_build_meta)
.with_path(output_file.to_path_buf(), "<archive-file>".to_owned())
.build()
} else {
Redactor::noop()
};
let mut reporter = ArchiveReporter::new(self.base.output.verbose, redactor.clone());

if self
.base
.output
.color
.should_colorize(supports_color::Stream::Stderr)
{
reporter.colorize();
}

let rust_build_meta = binary_list.rust_build_meta.map_paths(&path_mapper);
let test_artifacts = RustTestArtifact::from_binary_list(
self.base.graph(),
binary_list.clone(),
&rust_build_meta,
&path_mapper,
self.build_filter.platform_filter.into(),
)?;

let filter_bound = {
if self.build_filter.ignore_default_filter {
FilterBound::All
} else {
FilterBound::DefaultSet
}
};

let filter_exprs = self.build_filtering_expressions(&pcx)?;
let test_filter_builder = self.build_filter.make_test_filter_builder(filter_exprs)?;
let ecx = profile.filterset_ecx();

// Apply filterset to `RustTestArtifact` list.
let test_artifacts: BTreeSet<_> = test_artifacts
.iter()
.filter(|test_artifact| {
matches!(
test_filter_builder.filter_binary_match(test_artifact, &ecx, filter_bound),
FilterBinaryMatch::Definite | FilterBinaryMatch::Possible
Comment on lines +1767 to +1768
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Rather than this I'd just reject test predicates entirely, returning a FiltersetParseErrors that contains all the errors (so you get a nice display). You could do it either at parsing time or as a pass afterwards (have a check_no_test_predicates method or similar).

)
})
.map(|test_artifact| &test_artifact.binary_id)
.collect();

let filtered_binaries: Vec<_> = binary_list
.rust_binaries
.iter()
.filter(|binary| test_artifacts.contains(&binary.id))
.cloned()
.collect();

if filtered_binaries.len() < binary_list.rust_binaries.len() {
info!(
"archiving {} of {} test binaries after filtering",
filtered_binaries.len(),
binary_list.rust_binaries.len()
);
}

let binary_list_to_archive = BinaryList {
rust_build_meta: binary_list.rust_build_meta.clone(),
rust_binaries: filtered_binaries,
};

let mut writer = output_writer.stderr_writer();
archive_to_file(
profile,
&binary_list_to_archive,
&self.base.cargo_metadata_json,
&self.base.package_graph,
// Note that path_mapper is currently a no-op -- we don't support reusing builds for
// archive creation because it's too confusing.
&path_mapper,
format,
zstd_level,
output_file,
|event| {
reporter.report_event(event, &mut writer)?;
writer.flush()
},
redactor.clone(),
)
.map_err(|err| ExpectedError::ArchiveCreateError {
archive_file: output_file.to_owned(),
err,
redactor,
})?;

Ok(())
}

fn exec_show_test_groups(
&self,
show_default: bool,
Expand Down
2 changes: 2 additions & 0 deletions integration-tests/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,8 @@ serde_json.workspace = true
sha2.workspace = true
shell-words.workspace = true
whoami.workspace = true
zstd.workspace = true
tar.workspace = true

[dev-dependencies]
camino-tempfile-ext.workspace = true
Expand Down
69 changes: 54 additions & 15 deletions integration-tests/tests/integration/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -842,6 +842,29 @@ archive.include = [
.expect_err("archive should have failed");
}

#[test]
fn test_archive_with_build_filters() {
set_env_vars();

let filters = ["all()", "none()", "package(cdylib-example)"];
let expected_file_counts = [27, 11, 12];
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is brittle and it seems the file count varies per platform. I'll rewrite this once I have feedback on the implementation.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This does vary by platform but doesn't change much. Instead can you scan the archive to ensure that the expected binaries are available and the others aren't?


for (&expected_file_count, filter) in expected_file_counts.iter().zip(filters) {
let (_p1, archive_file) =
create_archive_with_args("", false, "archive_build_filter", &["-E", filter], false)
.expect("archive succeeded");
let file = File::open(archive_file).unwrap();
let decoder = zstd::stream::read::Decoder::new(file).unwrap();
let mut archive = tar::Archive::new(decoder);
let paths = archive
.entries()
.unwrap()
.map(|e| e.unwrap().path().unwrap().into_owned())
.collect::<Vec<_>>();
assert_eq!(paths.len(), expected_file_count);
}
}

const APP_DATA_DIR: &str = "application-data";
// The default limit is 16, so anything at depth 17 (under d16) is excluded.
const DIR_TREE: &str = "application-data/d1/d2/d3/d4/d5/d6/d7/d8/d9/d10/d11/d12/d13/d14/d15/d16";
Expand All @@ -860,6 +883,16 @@ fn create_archive(
config_contents: &str,
make_uds: bool,
snapshot_name: &str,
) -> Result<(TempProject, Utf8PathBuf), CargoNextestOutput> {
create_archive_with_args(config_contents, make_uds, snapshot_name, &[], true)
}

fn create_archive_with_args(
config_contents: &str,
make_uds: bool,
snapshot_name: &str,
extra_args: &[&str],
check_output: bool,
) -> Result<(TempProject, Utf8PathBuf), CargoNextestOutput> {
let custom_target_dir = Utf8TempDir::new().unwrap();
let custom_target_path = custom_target_dir.path().join("target");
Expand Down Expand Up @@ -894,22 +927,26 @@ fn create_archive(

let archive_file = p.temp_root().join("my-archive.tar.zst");

let manifest_path = p.manifest_path();
let mut cli_args = vec![
"--manifest-path",
manifest_path.as_str(),
"archive",
"--archive-file",
archive_file.as_str(),
"--workspace",
"--target-dir",
p.target_dir().as_str(),
"--all-targets",
// Make cargo fully quiet since we're testing just nextest output below.
"--cargo-quiet",
"--cargo-quiet",
];
cli_args.extend(extra_args);

// Write the archive to the archive_file above.
let output = CargoNextestCli::for_test()
.args([
"--manifest-path",
p.manifest_path().as_str(),
"archive",
"--archive-file",
archive_file.as_str(),
"--workspace",
"--target-dir",
p.target_dir().as_str(),
"--all-targets",
// Make cargo fully quiet since we're testing just nextest output below.
"--cargo-quiet",
"--cargo-quiet",
])
.args(cli_args)
.env("__NEXTEST_REDACT", "1")
// Used for linked path testing. See comment in
// binary_list.rs:detect_linked_path.
Expand All @@ -923,7 +960,9 @@ fn create_archive(
UdsStatus::NotCreated => format!("{snapshot_name}_without_uds"),
};

insta::assert_snapshot!(snapshot_name, output.stderr_as_str());
if check_output {
insta::assert_snapshot!(snapshot_name, output.stderr_as_str());
}

// Remove the old source and target directories to ensure that any tests that refer to files within
// it fail.
Expand Down
Loading