Skip to content

Commit 4c280e2

Browse files
authored
Fix clippy lints (#270)
1 parent 8894556 commit 4c280e2

File tree

5 files changed

+56
-68
lines changed

5 files changed

+56
-68
lines changed

src/github.rs

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@ pub async fn command_fetch_release_distributions(args: &ArgMatches) -> Result<()
120120
| ".github/workflows/linux.yml"
121121
| ".github/workflows/windows.yml"
122122
) {
123-
workflow_names.insert(wf.id.clone(), wf.name);
123+
workflow_names.insert(wf.id, wf.name);
124124

125125
Some(wf.id)
126126
} else {
@@ -263,7 +263,7 @@ pub async fn command_fetch_release_distributions(args: &ArgMatches) -> Result<()
263263
.to_string_lossy()
264264
);
265265

266-
let dest_path = produce_install_only(&path)?;
266+
let dest_path = produce_install_only(path)?;
267267

268268
println!(
269269
"releasing {}",
@@ -302,8 +302,7 @@ pub async fn command_upload_release_distributions(args: &ArgMatches) -> Result<(
302302
.expect("repo should be specified");
303303
let dry_run = args.get_flag("dry_run");
304304

305-
let mut filenames = std::fs::read_dir(&dist_dir)?
306-
.into_iter()
305+
let mut filenames = std::fs::read_dir(dist_dir)?
307306
.map(|x| {
308307
let path = x?.path();
309308
let filename = path

src/json.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,7 @@ impl PythonJsonMain {
108108
}
109109

110110
pub fn parse_python_json(json_data: &[u8]) -> Result<PythonJsonMain> {
111-
let v: PythonJsonMain = serde_json::from_slice(&json_data)?;
111+
let v: PythonJsonMain = serde_json::from_slice(json_data)?;
112112

113113
Ok(v)
114114
}

src/macho.rs

Lines changed: 4 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -288,11 +288,8 @@ impl TbdMetadata {
288288
let stripped = symbols
289289
.iter()
290290
.filter_map(|x| {
291-
if let Some(stripped) = x.strip_prefix("R8289209$") {
292-
Some(stripped.to_string())
293-
} else {
294-
None
295-
}
291+
x.strip_prefix("R8289209$")
292+
.map(|stripped| stripped.to_string())
296293
})
297294
.collect::<Vec<_>>();
298295

@@ -307,7 +304,7 @@ impl TbdMetadata {
307304

308305
for (target, paths) in self.re_export_paths.iter_mut() {
309306
for path in paths.iter() {
310-
let tbd_path = root_path.join(tbd_relative_path(&path)?);
307+
let tbd_path = root_path.join(tbd_relative_path(path)?);
311308
let tbd_info = TbdMetadata::from_path(&tbd_path)?;
312309

313310
if let Some(symbols) = tbd_info.symbols.get(target) {
@@ -409,10 +406,7 @@ impl IndexedSdks {
409406

410407
let empty = BTreeSet::new();
411408

412-
let target_symbols = tbd_info
413-
.symbols
414-
.get(&symbol_target.to_string())
415-
.unwrap_or(&empty);
409+
let target_symbols = tbd_info.symbols.get(symbol_target).unwrap_or(&empty);
416410

417411
for (symbol, paths) in &symbols.symbols {
418412
if !target_symbols.contains(symbol) {

src/release.rs

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -241,8 +241,7 @@ pub fn convert_to_install_only<W: Write>(reader: impl BufRead, writer: W) -> Res
241241
// increases the size of the archive and isn't needed in most cases.
242242
if path_bytes
243243
.windows(b"/libpython".len())
244-
.position(|x| x == b"/libpython")
245-
.is_some()
244+
.any(|x| x == b"/libpython")
246245
&& path_bytes.ends_with(b".a")
247246
{
248247
continue;
@@ -303,7 +302,7 @@ pub fn produce_install_only(tar_zst_path: &Path) -> Result<PathBuf> {
303302
let install_only_name = install_only_name.replace(".tar.zst", ".tar.gz");
304303

305304
let dest_path = tar_zst_path.with_file_name(install_only_name);
306-
std::fs::write(&dest_path, &gz_data)?;
305+
std::fs::write(&dest_path, gz_data)?;
307306

308307
Ok(dest_path)
309308
}

src/validation.rs

Lines changed: 46 additions & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -792,7 +792,7 @@ impl ValidationContext {
792792
}
793793
}
794794

795-
fn validate_elf<'data, Elf: FileHeader<Endian = Endianness>>(
795+
fn validate_elf<Elf: FileHeader<Endian = Endianness>>(
796796
context: &mut ValidationContext,
797797
json: &PythonJsonMain,
798798
target_triple: &str,
@@ -985,20 +985,18 @@ fn validate_elf<'data, Elf: FileHeader<Endian = Endianness>>(
985985
if let Some(version) = version_version {
986986
let parts: Vec<&str> = version.splitn(2, '_').collect();
987987

988-
if parts.len() == 2 {
989-
if parts[0] == "GLIBC" {
990-
let v = version_compare::Version::from(parts[1])
991-
.expect("unable to parse version");
988+
if parts.len() == 2 && parts[0] == "GLIBC" {
989+
let v = version_compare::Version::from(parts[1])
990+
.expect("unable to parse version");
992991

993-
if &v > wanted_glibc_max_version {
994-
context.errors.push(format!(
995-
"{} references too new glibc symbol {:?} ({} > {})",
996-
path.display(),
997-
name,
998-
v,
999-
wanted_glibc_max_version,
1000-
));
1001-
}
992+
if &v > wanted_glibc_max_version {
993+
context.errors.push(format!(
994+
"{} references too new glibc symbol {:?} ({} > {})",
995+
path.display(),
996+
name,
997+
v,
998+
wanted_glibc_max_version,
999+
));
10021000
}
10031001
}
10041002
}
@@ -1026,12 +1024,12 @@ fn validate_elf<'data, Elf: FileHeader<Endian = Endianness>>(
10261024
if let Some(filename) = path.file_name() {
10271025
let filename = filename.to_string_lossy();
10281026

1029-
if filename.starts_with("libpython") && filename.ends_with(".so.1.0") {
1030-
if matches!(symbol.st_bind(), STB_GLOBAL | STB_WEAK)
1031-
&& symbol.st_visibility() == STV_DEFAULT
1032-
{
1033-
context.libpython_exported_symbols.insert(name.to_string());
1034-
}
1027+
if filename.starts_with("libpython")
1028+
&& filename.ends_with(".so.1.0")
1029+
&& matches!(symbol.st_bind(), STB_GLOBAL | STB_WEAK)
1030+
&& symbol.st_visibility() == STV_DEFAULT
1031+
{
1032+
context.libpython_exported_symbols.insert(name.to_string());
10351033
}
10361034
}
10371035
}
@@ -1058,6 +1056,7 @@ fn parse_version_nibbles(v: u32) -> semver::Version {
10581056
semver::Version::new(major as _, minor as _, patch as _)
10591057
}
10601058

1059+
#[allow(clippy::too_many_arguments)]
10611060
fn validate_macho<Mach: MachHeader<Endian = Endianness>>(
10621061
context: &mut ValidationContext,
10631062
target_triple: &str,
@@ -1125,7 +1124,7 @@ fn validate_macho<Mach: MachHeader<Endian = Endianness>>(
11251124
target_version = Some(parse_version_nibbles(v.version.get(endian)));
11261125
}
11271126
LoadCommandVariant::Dylib(command) => {
1128-
let raw_string = load_command.string(endian, command.dylib.name.clone())?;
1127+
let raw_string = load_command.string(endian, command.dylib.name)?;
11291128
let lib = String::from_utf8(raw_string.to_vec())?;
11301129

11311130
dylib_names.push(lib.clone());
@@ -1336,9 +1335,9 @@ fn validate_possible_object_file(
13361335
json,
13371336
triple,
13381337
python_major_minor,
1339-
path.as_ref(),
1338+
path,
13401339
header,
1341-
&data,
1340+
data,
13421341
)?;
13431342
}
13441343
FileKind::Elf64 => {
@@ -1349,9 +1348,9 @@ fn validate_possible_object_file(
13491348
json,
13501349
triple,
13511350
python_major_minor,
1352-
path.as_ref(),
1351+
path,
13531352
header,
1354-
&data,
1353+
data,
13551354
)?;
13561355
}
13571356
FileKind::MachO32 => {
@@ -1367,9 +1366,9 @@ fn validate_possible_object_file(
13671366
json.apple_sdk_version
13681367
.as_ref()
13691368
.expect("apple_sdk_version should be set"),
1370-
path.as_ref(),
1369+
path,
13711370
header,
1372-
&data,
1371+
data,
13731372
)?;
13741373
}
13751374
FileKind::MachO64 => {
@@ -1385,9 +1384,9 @@ fn validate_possible_object_file(
13851384
json.apple_sdk_version
13861385
.as_ref()
13871386
.expect("apple_sdk_version should be set"),
1388-
path.as_ref(),
1387+
path,
13891388
header,
1390-
&data,
1389+
data,
13911390
)?;
13921391
}
13931392
FileKind::MachOFat32 | FileKind::MachOFat64 => {
@@ -1399,11 +1398,11 @@ fn validate_possible_object_file(
13991398
}
14001399
FileKind::Pe32 => {
14011400
let file = PeFile32::parse(data)?;
1402-
validate_pe(&mut context, path.as_ref(), &file)?;
1401+
validate_pe(&mut context, path, &file)?;
14031402
}
14041403
FileKind::Pe64 => {
14051404
let file = PeFile64::parse(data)?;
1406-
validate_pe(&mut context, path.as_ref(), &file)?;
1405+
validate_pe(&mut context, path, &file)?;
14071406
}
14081407
_ => {}
14091408
}
@@ -1431,7 +1430,7 @@ fn validate_extension_modules(
14311430
return Ok(errors);
14321431
}
14331432

1434-
let mut wanted = BTreeSet::from_iter(GLOBAL_EXTENSIONS.iter().map(|x| *x));
1433+
let mut wanted = BTreeSet::from_iter(GLOBAL_EXTENSIONS.iter().copied());
14351434

14361435
match python_major_minor {
14371436
"3.8" => {
@@ -1576,15 +1575,12 @@ fn validate_json(json: &PythonJsonMain, triple: &str, is_debug: bool) -> Result<
15761575
.map(|x| x.as_str())
15771576
.collect::<BTreeSet<_>>();
15781577

1579-
errors.extend(
1580-
validate_extension_modules(
1581-
&json.python_major_minor_version,
1582-
triple,
1583-
json.crt_features.contains(&"static".to_string()),
1584-
&have_extensions,
1585-
)?
1586-
.into_iter(),
1587-
);
1578+
errors.extend(validate_extension_modules(
1579+
&json.python_major_minor_version,
1580+
triple,
1581+
json.crt_features.contains(&"static".to_string()),
1582+
&have_extensions,
1583+
)?);
15881584

15891585
Ok(errors)
15901586
}
@@ -1635,7 +1631,7 @@ fn validate_distribution(
16351631

16361632
let is_static = triple.contains("unknown-linux-musl");
16371633

1638-
let mut tf = crate::open_distribution_archive(&dist_path)?;
1634+
let mut tf = crate::open_distribution_archive(dist_path)?;
16391635

16401636
// First entry in archive should be python/PYTHON.json.
16411637
let mut entries = tf.entries()?;
@@ -1701,7 +1697,7 @@ fn validate_distribution(
17011697
context.merge(validate_possible_object_file(
17021698
json.as_ref().unwrap(),
17031699
python_major_minor,
1704-
&triple,
1700+
triple,
17051701
&path,
17061702
&data,
17071703
)?);
@@ -1726,9 +1722,9 @@ fn validate_distribution(
17261722
context.merge(validate_possible_object_file(
17271723
json.as_ref().unwrap(),
17281724
python_major_minor,
1729-
&triple,
1725+
triple,
17301726
&member_path,
1731-
&member_data,
1727+
member_data,
17321728
)?);
17331729
}
17341730
}
@@ -1841,9 +1837,7 @@ fn validate_distribution(
18411837

18421838
// Ensure that some well known Python symbols are being exported from libpython.
18431839
for symbol in PYTHON_EXPORTED_SYMBOLS {
1844-
let exported = context
1845-
.libpython_exported_symbols
1846-
.contains(&symbol.to_string());
1840+
let exported = context.libpython_exported_symbols.contains(*symbol);
18471841
let wanted = !is_static;
18481842

18491843
if exported != wanted {
@@ -1867,6 +1861,7 @@ fn validate_distribution(
18671861
}
18681862
}
18691863

1864+
#[allow(clippy::if_same_then_else)]
18701865
// Static builds never have shared library extension modules.
18711866
let want_shared = if is_static {
18721867
false
@@ -1904,6 +1899,7 @@ fn validate_distribution(
19041899

19051900
let exported = context.libpython_exported_symbols.contains(&ext.init_fn);
19061901

1902+
#[allow(clippy::needless_bool, clippy::if_same_then_else)]
19071903
// Static distributions never export symbols.
19081904
let wanted = if is_static {
19091905
false
@@ -1996,7 +1992,7 @@ fn verify_distribution_behavior(dist_path: &Path) -> Result<Vec<String>> {
19961992
tf.unpack(temp_dir.path())?;
19971993

19981994
let python_json_path = temp_dir.path().join("python").join("PYTHON.json");
1999-
let python_json_data = std::fs::read(&python_json_path)?;
1995+
let python_json_data = std::fs::read(python_json_path)?;
20001996
let python_json = parse_python_json(&python_json_data)?;
20011997

20021998
let python_exe = temp_dir.path().join("python").join(python_json.python_exe);
@@ -2005,7 +2001,7 @@ fn verify_distribution_behavior(dist_path: &Path) -> Result<Vec<String>> {
20052001
std::fs::write(&test_file, PYTHON_VERIFICATIONS.as_bytes())?;
20062002

20072003
eprintln!(" running interpreter tests (output should follow)");
2008-
let output = duct::cmd(&python_exe, &[test_file.display().to_string()])
2004+
let output = duct::cmd(python_exe, [test_file.display().to_string()])
20092005
.stdout_to_stderr()
20102006
.unchecked()
20112007
.env("TARGET_TRIPLE", &python_json.target_triple)

0 commit comments

Comments
 (0)