Skip to content

Commit 5ca1add

Browse files
committed
Use context instead of with_context
1 parent b66cad8 commit 5ca1add

File tree

18 files changed

+49
-50
lines changed

18 files changed

+49
-50
lines changed

src/cargo/core/compiler/build_runner/mod.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -100,8 +100,8 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
100100
let jobserver = match bcx.gctx.jobserver_from_env() {
101101
Some(c) => c.clone(),
102102
None => {
103-
let client = Client::new(bcx.jobs() as usize)
104-
.with_context(|| "failed to create jobserver")?;
103+
let client =
104+
Client::new(bcx.jobs() as usize).context("failed to create jobserver")?;
105105
client.acquire_raw()?;
106106
client
107107
}
@@ -354,11 +354,11 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
354354
.unwrap()
355355
.host
356356
.prepare()
357-
.with_context(|| "couldn't prepare build directories")?;
357+
.context("couldn't prepare build directories")?;
358358
for target in self.files.as_mut().unwrap().target.values_mut() {
359359
target
360360
.prepare()
361-
.with_context(|| "couldn't prepare build directories")?;
361+
.context("couldn't prepare build directories")?;
362362
}
363363

364364
let files = self.files.as_ref().unwrap();

src/cargo/core/compiler/custom_build.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -431,7 +431,7 @@ fn build_work(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) -> CargoResul
431431
// If we have an old build directory, then just move it into place,
432432
// otherwise create it!
433433
paths::create_dir_all(&script_out_dir)
434-
.with_context(|| "failed to create script output directory for build command")?;
434+
.context("failed to create script output directory for build command")?;
435435

436436
// For all our native lib dependencies, pick up their metadata to pass
437437
// along to this custom build command. We're also careful to augment our

src/cargo/core/compiler/future_incompat.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -211,9 +211,9 @@ impl OnDiskReports {
211211
report_file
212212
.file()
213213
.read_to_string(&mut file_contents)
214-
.with_context(|| "failed to read report")?;
214+
.context("failed to read report")?;
215215
let on_disk_reports: OnDiskReports =
216-
serde_json::from_str(&file_contents).with_context(|| "failed to load report")?;
216+
serde_json::from_str(&file_contents).context("failed to load report")?;
217217
if on_disk_reports.version != ON_DISK_VERSION {
218218
bail!("unable to read reports; reports were saved from a future version of Cargo");
219219
}

src/cargo/core/compiler/job_queue/mod.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -513,7 +513,7 @@ impl<'gctx> JobQueue<'gctx> {
513513
.into_helper_thread(move |token| {
514514
messages.push(Message::Token(token));
515515
})
516-
.with_context(|| "failed to create helper thread for jobserver management")?;
516+
.context("failed to create helper thread for jobserver management")?;
517517

518518
// Create a helper thread to manage the diagnostics for rustfix if
519519
// necessary.
@@ -700,7 +700,7 @@ impl<'gctx> DrainState<'gctx> {
700700
.push(FutureIncompatReportPackage { package_id, items });
701701
}
702702
Message::Token(acquired_token) => {
703-
let token = acquired_token.with_context(|| "failed to acquire jobserver token")?;
703+
let token = acquired_token.context("failed to acquire jobserver token")?;
704704
self.tokens.push(token);
705705
}
706706
}

src/cargo/core/compiler/timings.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -299,7 +299,7 @@ impl<'gctx> Timings<'gctx> {
299299
.sort_unstable_by(|a, b| a.start.partial_cmp(&b.start).unwrap());
300300
if self.report_html {
301301
self.report_html(build_runner, error)
302-
.with_context(|| "failed to save timing report")?;
302+
.context("failed to save timing report")?;
303303
}
304304
Ok(())
305305
}

src/cargo/core/global_cache_tracker.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -543,7 +543,7 @@ impl GlobalCacheTracker {
543543
/// Deletes files from the global cache based on the given options.
544544
pub fn clean(&mut self, clean_ctx: &mut CleanContext<'_>, gc_opts: &GcOpts) -> CargoResult<()> {
545545
self.clean_inner(clean_ctx, gc_opts)
546-
.with_context(|| "failed to clean entries from the global cache")
546+
.context("failed to clean entries from the global cache")
547547
}
548548

549549
#[tracing::instrument(skip_all)]
@@ -575,7 +575,7 @@ impl GlobalCacheTracker {
575575
gc_opts.is_download_cache_size_set(),
576576
&mut delete_paths,
577577
)
578-
.with_context(|| "failed to sync tracking database")?
578+
.context("failed to sync tracking database")?
579579
}
580580
if let Some(max_age) = gc_opts.max_index_age {
581581
let max_age = now - max_age.as_secs();

src/cargo/core/package.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -393,7 +393,7 @@ impl<'gctx> PackageSet<'gctx> {
393393
let multiplexing = gctx.http_config()?.multiplexing.unwrap_or(true);
394394
multi
395395
.pipelining(false, multiplexing)
396-
.with_context(|| "failed to enable multiplexing/pipelining in curl")?;
396+
.context("failed to enable multiplexing/pipelining in curl")?;
397397

398398
// let's not flood crates.io with connections
399399
multi.set_max_host_connections(2)?;
@@ -681,7 +681,7 @@ impl<'a, 'gctx> Downloads<'a, 'gctx> {
681681
.ok_or_else(|| internal(format!("couldn't find source for `{}`", id)))?;
682682
let pkg = source
683683
.download(id)
684-
.with_context(|| "unable to get packages from source")?;
684+
.context("unable to get packages from source")?;
685685
let (url, descriptor, authorization) = match pkg {
686686
MaybePackage::Ready(pkg) => {
687687
debug!("{} doesn't need a download", id);
@@ -951,7 +951,7 @@ impl<'a, 'gctx> Downloads<'a, 'gctx> {
951951
self.set
952952
.multi
953953
.perform()
954-
.with_context(|| "failed to perform http requests")
954+
.context("failed to perform http requests")
955955
})?;
956956
debug!(target: "network", "handles remaining: {}", n);
957957
let results = &mut self.results;
@@ -981,7 +981,7 @@ impl<'a, 'gctx> Downloads<'a, 'gctx> {
981981
self.set
982982
.multi
983983
.wait(&mut [], timeout)
984-
.with_context(|| "failed to wait on curl `Multi`")?;
984+
.context("failed to wait on curl `Multi`")?;
985985
}
986986
}
987987
}

src/cargo/ops/cargo_package.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -147,13 +147,13 @@ fn create_package(
147147
.status("Packaging", pkg.package_id().to_string())?;
148148
dst.file().set_len(0)?;
149149
let uncompressed_size = tar(ws, pkg, local_reg, ar_files, dst.file(), &filename)
150-
.with_context(|| "failed to prepare local package for uploading")?;
150+
.context("failed to prepare local package for uploading")?;
151151

152152
dst.seek(SeekFrom::Start(0))?;
153153
let src_path = dst.path();
154154
let dst_path = dst.parent().join(&filename);
155155
fs::rename(&src_path, &dst_path)
156-
.with_context(|| "failed to move temporary tarball into final location")?;
156+
.context("failed to move temporary tarball into final location")?;
157157

158158
let dst_metadata = dst
159159
.file()
@@ -331,7 +331,7 @@ pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult<Vec<Fi
331331
if opts.verify {
332332
for (pkg, opts, tarball) in &outputs {
333333
run_verify(ws, pkg, tarball, local_reg.as_ref(), opts)
334-
.with_context(|| "failed to verify package tarball")?
334+
.context("failed to verify package tarball")?
335335
}
336336
}
337337

src/cargo/ops/common_for_install_and_uninstall.rs

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ impl InstallTracker {
111111
if contents.is_empty() {
112112
Ok(CrateListingV1::default())
113113
} else {
114-
Ok(toml::from_str(&contents).with_context(|| "invalid TOML found for metadata")?)
114+
Ok(toml::from_str(&contents).context("invalid TOML found for metadata")?)
115115
}
116116
})()
117117
.with_context(|| {
@@ -127,8 +127,7 @@ impl InstallTracker {
127127
let mut v2 = if contents.is_empty() {
128128
CrateListingV2::default()
129129
} else {
130-
serde_json::from_str(&contents)
131-
.with_context(|| "invalid JSON found for metadata")?
130+
serde_json::from_str(&contents).context("invalid JSON found for metadata")?
132131
};
133132
v2.sync_v1(&v1);
134133
Ok(v2)

src/cargo/ops/vendor.rs

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ pub fn vendor(ws: &Workspace<'_>, opts: &VendorOptions<'_>) -> CargoResult<()> {
3232
}
3333
let workspaces = extra_workspaces.iter().chain(Some(ws)).collect::<Vec<_>>();
3434
let _lock = gctx.acquire_package_cache_lock(CacheLockMode::MutateExclusive)?;
35-
let vendor_config = sync(gctx, &workspaces, opts).with_context(|| "failed to sync")?;
35+
let vendor_config = sync(gctx, &workspaces, opts).context("failed to sync")?;
3636

3737
if gctx.shell().verbosity() != Verbosity::Quiet {
3838
if vendor_config.source.is_empty() {
@@ -113,11 +113,11 @@ fn sync(
113113
// crate to work with.
114114
for ws in workspaces {
115115
let (packages, resolve) =
116-
ops::resolve_ws(ws, dry_run).with_context(|| "failed to load pkg lockfile")?;
116+
ops::resolve_ws(ws, dry_run).context("failed to load pkg lockfile")?;
117117

118118
packages
119119
.get_many(resolve.iter())
120-
.with_context(|| "failed to download packages")?;
120+
.context("failed to download packages")?;
121121

122122
for pkg in resolve.iter() {
123123
// Don't delete actual source code!
@@ -145,11 +145,11 @@ fn sync(
145145
// tables about them.
146146
for ws in workspaces {
147147
let (packages, resolve) =
148-
ops::resolve_ws(ws, dry_run).with_context(|| "failed to load pkg lockfile")?;
148+
ops::resolve_ws(ws, dry_run).context("failed to load pkg lockfile")?;
149149

150150
packages
151151
.get_many(resolve.iter())
152-
.with_context(|| "failed to download packages")?;
152+
.context("failed to download packages")?;
153153

154154
for pkg in resolve.iter() {
155155
// No need to vendor path crates since they're already in the
@@ -161,7 +161,7 @@ fn sync(
161161
pkg,
162162
packages
163163
.get_one(pkg)
164-
.with_context(|| "failed to fetch package")?
164+
.context("failed to fetch package")?
165165
.clone(),
166166
);
167167

0 commit comments

Comments
 (0)