diff --git a/src/github.rs b/src/github.rs index f010acf2..705c4948 100644 --- a/src/github.rs +++ b/src/github.rs @@ -120,7 +120,7 @@ pub async fn command_fetch_release_distributions(args: &ArgMatches) -> Result<() | ".github/workflows/linux.yml" | ".github/workflows/windows.yml" ) { - workflow_names.insert(wf.id.clone(), wf.name); + workflow_names.insert(wf.id, wf.name); Some(wf.id) } else { @@ -263,7 +263,7 @@ pub async fn command_fetch_release_distributions(args: &ArgMatches) -> Result<() .to_string_lossy() ); - let dest_path = produce_install_only(&path)?; + let dest_path = produce_install_only(path)?; println!( "releasing {}", @@ -302,8 +302,7 @@ pub async fn command_upload_release_distributions(args: &ArgMatches) -> Result<( .expect("repo should be specified"); let dry_run = args.get_flag("dry_run"); - let mut filenames = std::fs::read_dir(&dist_dir)? - .into_iter() + let mut filenames = std::fs::read_dir(dist_dir)? .map(|x| { let path = x?.path(); let filename = path diff --git a/src/json.rs b/src/json.rs index 5b656cff..7c27f46b 100644 --- a/src/json.rs +++ b/src/json.rs @@ -108,7 +108,7 @@ impl PythonJsonMain { } pub fn parse_python_json(json_data: &[u8]) -> Result { - let v: PythonJsonMain = serde_json::from_slice(&json_data)?; + let v: PythonJsonMain = serde_json::from_slice(json_data)?; Ok(v) } diff --git a/src/macho.rs b/src/macho.rs index 1f9bb067..4717fb96 100644 --- a/src/macho.rs +++ b/src/macho.rs @@ -288,11 +288,8 @@ impl TbdMetadata { let stripped = symbols .iter() .filter_map(|x| { - if let Some(stripped) = x.strip_prefix("R8289209$") { - Some(stripped.to_string()) - } else { - None - } + x.strip_prefix("R8289209$") + .map(|stripped| stripped.to_string()) }) .collect::>(); @@ -307,7 +304,7 @@ impl TbdMetadata { for (target, paths) in self.re_export_paths.iter_mut() { for path in paths.iter() { - let tbd_path = root_path.join(tbd_relative_path(&path)?); + let tbd_path = root_path.join(tbd_relative_path(path)?); let tbd_info = TbdMetadata::from_path(&tbd_path)?; if let Some(symbols) = tbd_info.symbols.get(target) { @@ -409,10 +406,7 @@ impl IndexedSdks { let empty = BTreeSet::new(); - let target_symbols = tbd_info - .symbols - .get(&symbol_target.to_string()) - .unwrap_or(&empty); + let target_symbols = tbd_info.symbols.get(symbol_target).unwrap_or(&empty); for (symbol, paths) in &symbols.symbols { if !target_symbols.contains(symbol) { diff --git a/src/release.rs b/src/release.rs index 2e2a6987..fa6c3387 100644 --- a/src/release.rs +++ b/src/release.rs @@ -241,8 +241,7 @@ pub fn convert_to_install_only(reader: impl BufRead, writer: W) -> Res // increases the size of the archive and isn't needed in most cases. if path_bytes .windows(b"/libpython".len()) - .position(|x| x == b"/libpython") - .is_some() + .any(|x| x == b"/libpython") && path_bytes.ends_with(b".a") { continue; @@ -303,7 +302,7 @@ pub fn produce_install_only(tar_zst_path: &Path) -> Result { let install_only_name = install_only_name.replace(".tar.zst", ".tar.gz"); let dest_path = tar_zst_path.with_file_name(install_only_name); - std::fs::write(&dest_path, &gz_data)?; + std::fs::write(&dest_path, gz_data)?; Ok(dest_path) } diff --git a/src/validation.rs b/src/validation.rs index 292a3c56..dbf0537c 100644 --- a/src/validation.rs +++ b/src/validation.rs @@ -792,7 +792,7 @@ impl ValidationContext { } } -fn validate_elf<'data, Elf: FileHeader>( +fn validate_elf>( context: &mut ValidationContext, json: &PythonJsonMain, target_triple: &str, @@ -985,20 +985,18 @@ fn validate_elf<'data, Elf: FileHeader>( if let Some(version) = version_version { let parts: Vec<&str> = version.splitn(2, '_').collect(); - if parts.len() == 2 { - if parts[0] == "GLIBC" { - let v = version_compare::Version::from(parts[1]) - .expect("unable to parse version"); + if parts.len() == 2 && parts[0] == "GLIBC" { + let v = version_compare::Version::from(parts[1]) + .expect("unable to parse version"); - if &v > wanted_glibc_max_version { - context.errors.push(format!( - "{} references too new glibc symbol {:?} ({} > {})", - path.display(), - name, - v, - wanted_glibc_max_version, - )); - } + if &v > wanted_glibc_max_version { + context.errors.push(format!( + "{} references too new glibc symbol {:?} ({} > {})", + path.display(), + name, + v, + wanted_glibc_max_version, + )); } } } @@ -1026,12 +1024,12 @@ fn validate_elf<'data, Elf: FileHeader>( if let Some(filename) = path.file_name() { let filename = filename.to_string_lossy(); - if filename.starts_with("libpython") && filename.ends_with(".so.1.0") { - if matches!(symbol.st_bind(), STB_GLOBAL | STB_WEAK) - && symbol.st_visibility() == STV_DEFAULT - { - context.libpython_exported_symbols.insert(name.to_string()); - } + if filename.starts_with("libpython") + && filename.ends_with(".so.1.0") + && matches!(symbol.st_bind(), STB_GLOBAL | STB_WEAK) + && symbol.st_visibility() == STV_DEFAULT + { + context.libpython_exported_symbols.insert(name.to_string()); } } } @@ -1058,6 +1056,7 @@ fn parse_version_nibbles(v: u32) -> semver::Version { semver::Version::new(major as _, minor as _, patch as _) } +#[allow(clippy::too_many_arguments)] fn validate_macho>( context: &mut ValidationContext, target_triple: &str, @@ -1125,7 +1124,7 @@ fn validate_macho>( target_version = Some(parse_version_nibbles(v.version.get(endian))); } LoadCommandVariant::Dylib(command) => { - let raw_string = load_command.string(endian, command.dylib.name.clone())?; + let raw_string = load_command.string(endian, command.dylib.name)?; let lib = String::from_utf8(raw_string.to_vec())?; dylib_names.push(lib.clone()); @@ -1336,9 +1335,9 @@ fn validate_possible_object_file( json, triple, python_major_minor, - path.as_ref(), + path, header, - &data, + data, )?; } FileKind::Elf64 => { @@ -1349,9 +1348,9 @@ fn validate_possible_object_file( json, triple, python_major_minor, - path.as_ref(), + path, header, - &data, + data, )?; } FileKind::MachO32 => { @@ -1367,9 +1366,9 @@ fn validate_possible_object_file( json.apple_sdk_version .as_ref() .expect("apple_sdk_version should be set"), - path.as_ref(), + path, header, - &data, + data, )?; } FileKind::MachO64 => { @@ -1385,9 +1384,9 @@ fn validate_possible_object_file( json.apple_sdk_version .as_ref() .expect("apple_sdk_version should be set"), - path.as_ref(), + path, header, - &data, + data, )?; } FileKind::MachOFat32 | FileKind::MachOFat64 => { @@ -1399,11 +1398,11 @@ fn validate_possible_object_file( } FileKind::Pe32 => { let file = PeFile32::parse(data)?; - validate_pe(&mut context, path.as_ref(), &file)?; + validate_pe(&mut context, path, &file)?; } FileKind::Pe64 => { let file = PeFile64::parse(data)?; - validate_pe(&mut context, path.as_ref(), &file)?; + validate_pe(&mut context, path, &file)?; } _ => {} } @@ -1431,7 +1430,7 @@ fn validate_extension_modules( return Ok(errors); } - let mut wanted = BTreeSet::from_iter(GLOBAL_EXTENSIONS.iter().map(|x| *x)); + let mut wanted = BTreeSet::from_iter(GLOBAL_EXTENSIONS.iter().copied()); match python_major_minor { "3.8" => { @@ -1576,15 +1575,12 @@ fn validate_json(json: &PythonJsonMain, triple: &str, is_debug: bool) -> Result< .map(|x| x.as_str()) .collect::>(); - errors.extend( - validate_extension_modules( - &json.python_major_minor_version, - triple, - json.crt_features.contains(&"static".to_string()), - &have_extensions, - )? - .into_iter(), - ); + errors.extend(validate_extension_modules( + &json.python_major_minor_version, + triple, + json.crt_features.contains(&"static".to_string()), + &have_extensions, + )?); Ok(errors) } @@ -1635,7 +1631,7 @@ fn validate_distribution( let is_static = triple.contains("unknown-linux-musl"); - let mut tf = crate::open_distribution_archive(&dist_path)?; + let mut tf = crate::open_distribution_archive(dist_path)?; // First entry in archive should be python/PYTHON.json. let mut entries = tf.entries()?; @@ -1701,7 +1697,7 @@ fn validate_distribution( context.merge(validate_possible_object_file( json.as_ref().unwrap(), python_major_minor, - &triple, + triple, &path, &data, )?); @@ -1726,9 +1722,9 @@ fn validate_distribution( context.merge(validate_possible_object_file( json.as_ref().unwrap(), python_major_minor, - &triple, + triple, &member_path, - &member_data, + member_data, )?); } } @@ -1841,9 +1837,7 @@ fn validate_distribution( // Ensure that some well known Python symbols are being exported from libpython. for symbol in PYTHON_EXPORTED_SYMBOLS { - let exported = context - .libpython_exported_symbols - .contains(&symbol.to_string()); + let exported = context.libpython_exported_symbols.contains(*symbol); let wanted = !is_static; if exported != wanted { @@ -1867,6 +1861,7 @@ fn validate_distribution( } } + #[allow(clippy::if_same_then_else)] // Static builds never have shared library extension modules. let want_shared = if is_static { false @@ -1914,10 +1909,8 @@ fn validate_distribution( } else if triple.contains("-windows-") { false // Presence of a shared library extension implies no export. - } else if ext.shared_lib.is_some() { - false } else { - true + ext.shared_lib.is_none() }; if exported != wanted { @@ -1996,7 +1989,7 @@ fn verify_distribution_behavior(dist_path: &Path) -> Result> { tf.unpack(temp_dir.path())?; let python_json_path = temp_dir.path().join("python").join("PYTHON.json"); - let python_json_data = std::fs::read(&python_json_path)?; + let python_json_data = std::fs::read(python_json_path)?; let python_json = parse_python_json(&python_json_data)?; let python_exe = temp_dir.path().join("python").join(python_json.python_exe); @@ -2005,7 +1998,7 @@ fn verify_distribution_behavior(dist_path: &Path) -> Result> { std::fs::write(&test_file, PYTHON_VERIFICATIONS.as_bytes())?; eprintln!(" running interpreter tests (output should follow)"); - let output = duct::cmd(&python_exe, &[test_file.display().to_string()]) + let output = duct::cmd(python_exe, [test_file.display().to_string()]) .stdout_to_stderr() .unchecked() .env("TARGET_TRIPLE", &python_json.target_triple)