Skip to content

Commit

Permalink
Fix clippy lints
Browse files Browse the repository at this point in the history
  • Loading branch information
zanieb committed Jun 6, 2024
1 parent 9aa61da commit a6227fe
Show file tree
Hide file tree
Showing 5 changed files with 56 additions and 71 deletions.
7 changes: 3 additions & 4 deletions src/github.rs
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ pub async fn command_fetch_release_distributions(args: &ArgMatches) -> Result<()
| ".github/workflows/linux.yml"
| ".github/workflows/windows.yml"
) {
workflow_names.insert(wf.id.clone(), wf.name);
workflow_names.insert(wf.id, wf.name);

Some(wf.id)
} else {
Expand Down Expand Up @@ -263,7 +263,7 @@ pub async fn command_fetch_release_distributions(args: &ArgMatches) -> Result<()
.to_string_lossy()
);

let dest_path = produce_install_only(&path)?;
let dest_path = produce_install_only(path)?;

println!(
"releasing {}",
Expand Down Expand Up @@ -302,8 +302,7 @@ pub async fn command_upload_release_distributions(args: &ArgMatches) -> Result<(
.expect("repo should be specified");
let dry_run = args.get_flag("dry_run");

let mut filenames = std::fs::read_dir(&dist_dir)?
.into_iter()
let mut filenames = std::fs::read_dir(dist_dir)?
.map(|x| {
let path = x?.path();
let filename = path
Expand Down
2 changes: 1 addition & 1 deletion src/json.rs
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ impl PythonJsonMain {
}

pub fn parse_python_json(json_data: &[u8]) -> Result<PythonJsonMain> {
let v: PythonJsonMain = serde_json::from_slice(&json_data)?;
let v: PythonJsonMain = serde_json::from_slice(json_data)?;

Ok(v)
}
14 changes: 4 additions & 10 deletions src/macho.rs
Original file line number Diff line number Diff line change
Expand Up @@ -288,11 +288,8 @@ impl TbdMetadata {
let stripped = symbols
.iter()
.filter_map(|x| {
if let Some(stripped) = x.strip_prefix("R8289209$") {
Some(stripped.to_string())
} else {
None
}
x.strip_prefix("R8289209$")
.map(|stripped| stripped.to_string())
})
.collect::<Vec<_>>();

Expand All @@ -307,7 +304,7 @@ impl TbdMetadata {

for (target, paths) in self.re_export_paths.iter_mut() {
for path in paths.iter() {
let tbd_path = root_path.join(tbd_relative_path(&path)?);
let tbd_path = root_path.join(tbd_relative_path(path)?);
let tbd_info = TbdMetadata::from_path(&tbd_path)?;

if let Some(symbols) = tbd_info.symbols.get(target) {
Expand Down Expand Up @@ -409,10 +406,7 @@ impl IndexedSdks {

let empty = BTreeSet::new();

let target_symbols = tbd_info
.symbols
.get(&symbol_target.to_string())
.unwrap_or(&empty);
let target_symbols = tbd_info.symbols.get(symbol_target).unwrap_or(&empty);

for (symbol, paths) in &symbols.symbols {
if !target_symbols.contains(symbol) {
Expand Down
5 changes: 2 additions & 3 deletions src/release.rs
Original file line number Diff line number Diff line change
Expand Up @@ -241,8 +241,7 @@ pub fn convert_to_install_only<W: Write>(reader: impl BufRead, writer: W) -> Res
// increases the size of the archive and isn't needed in most cases.
if path_bytes
.windows(b"/libpython".len())
.position(|x| x == b"/libpython")
.is_some()
.any(|x| x == b"/libpython")
&& path_bytes.ends_with(b".a")
{
continue;
Expand Down Expand Up @@ -303,7 +302,7 @@ pub fn produce_install_only(tar_zst_path: &Path) -> Result<PathBuf> {
let install_only_name = install_only_name.replace(".tar.zst", ".tar.gz");

let dest_path = tar_zst_path.with_file_name(install_only_name);
std::fs::write(&dest_path, &gz_data)?;
std::fs::write(&dest_path, gz_data)?;

Ok(dest_path)
}
99 changes: 46 additions & 53 deletions src/validation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -792,7 +792,7 @@ impl ValidationContext {
}
}

fn validate_elf<'data, Elf: FileHeader<Endian = Endianness>>(
fn validate_elf<Elf: FileHeader<Endian = Endianness>>(
context: &mut ValidationContext,
json: &PythonJsonMain,
target_triple: &str,
Expand Down Expand Up @@ -985,20 +985,18 @@ fn validate_elf<'data, Elf: FileHeader<Endian = Endianness>>(
if let Some(version) = version_version {
let parts: Vec<&str> = version.splitn(2, '_').collect();

if parts.len() == 2 {
if parts[0] == "GLIBC" {
let v = version_compare::Version::from(parts[1])
.expect("unable to parse version");
if parts.len() == 2 && parts[0] == "GLIBC" {
let v = version_compare::Version::from(parts[1])
.expect("unable to parse version");

if &v > wanted_glibc_max_version {
context.errors.push(format!(
"{} references too new glibc symbol {:?} ({} > {})",
path.display(),
name,
v,
wanted_glibc_max_version,
));
}
if &v > wanted_glibc_max_version {
context.errors.push(format!(
"{} references too new glibc symbol {:?} ({} > {})",
path.display(),
name,
v,
wanted_glibc_max_version,
));
}
}
}
Expand Down Expand Up @@ -1026,12 +1024,12 @@ fn validate_elf<'data, Elf: FileHeader<Endian = Endianness>>(
if let Some(filename) = path.file_name() {
let filename = filename.to_string_lossy();

if filename.starts_with("libpython") && filename.ends_with(".so.1.0") {
if matches!(symbol.st_bind(), STB_GLOBAL | STB_WEAK)
&& symbol.st_visibility() == STV_DEFAULT
{
context.libpython_exported_symbols.insert(name.to_string());
}
if filename.starts_with("libpython")
&& filename.ends_with(".so.1.0")
&& matches!(symbol.st_bind(), STB_GLOBAL | STB_WEAK)
&& symbol.st_visibility() == STV_DEFAULT
{
context.libpython_exported_symbols.insert(name.to_string());
}
}
}
Expand All @@ -1058,6 +1056,7 @@ fn parse_version_nibbles(v: u32) -> semver::Version {
semver::Version::new(major as _, minor as _, patch as _)
}

#[allow(clippy::too_many_arguments)]
fn validate_macho<Mach: MachHeader<Endian = Endianness>>(
context: &mut ValidationContext,
target_triple: &str,
Expand Down Expand Up @@ -1125,7 +1124,7 @@ fn validate_macho<Mach: MachHeader<Endian = Endianness>>(
target_version = Some(parse_version_nibbles(v.version.get(endian)));
}
LoadCommandVariant::Dylib(command) => {
let raw_string = load_command.string(endian, command.dylib.name.clone())?;
let raw_string = load_command.string(endian, command.dylib.name)?;
let lib = String::from_utf8(raw_string.to_vec())?;

dylib_names.push(lib.clone());
Expand Down Expand Up @@ -1336,9 +1335,9 @@ fn validate_possible_object_file(
json,
triple,
python_major_minor,
path.as_ref(),
path,
header,
&data,
data,
)?;
}
FileKind::Elf64 => {
Expand All @@ -1349,9 +1348,9 @@ fn validate_possible_object_file(
json,
triple,
python_major_minor,
path.as_ref(),
path,
header,
&data,
data,
)?;
}
FileKind::MachO32 => {
Expand All @@ -1367,9 +1366,9 @@ fn validate_possible_object_file(
json.apple_sdk_version
.as_ref()
.expect("apple_sdk_version should be set"),
path.as_ref(),
path,
header,
&data,
data,
)?;
}
FileKind::MachO64 => {
Expand All @@ -1385,9 +1384,9 @@ fn validate_possible_object_file(
json.apple_sdk_version
.as_ref()
.expect("apple_sdk_version should be set"),
path.as_ref(),
path,
header,
&data,
data,
)?;
}
FileKind::MachOFat32 | FileKind::MachOFat64 => {
Expand All @@ -1399,11 +1398,11 @@ fn validate_possible_object_file(
}
FileKind::Pe32 => {
let file = PeFile32::parse(data)?;
validate_pe(&mut context, path.as_ref(), &file)?;
validate_pe(&mut context, path, &file)?;
}
FileKind::Pe64 => {
let file = PeFile64::parse(data)?;
validate_pe(&mut context, path.as_ref(), &file)?;
validate_pe(&mut context, path, &file)?;
}
_ => {}
}
Expand Down Expand Up @@ -1431,7 +1430,7 @@ fn validate_extension_modules(
return Ok(errors);
}

let mut wanted = BTreeSet::from_iter(GLOBAL_EXTENSIONS.iter().map(|x| *x));
let mut wanted = BTreeSet::from_iter(GLOBAL_EXTENSIONS.iter().copied());

match python_major_minor {
"3.8" => {
Expand Down Expand Up @@ -1576,15 +1575,12 @@ fn validate_json(json: &PythonJsonMain, triple: &str, is_debug: bool) -> Result<
.map(|x| x.as_str())
.collect::<BTreeSet<_>>();

errors.extend(
validate_extension_modules(
&json.python_major_minor_version,
triple,
json.crt_features.contains(&"static".to_string()),
&have_extensions,
)?
.into_iter(),
);
errors.extend(validate_extension_modules(
&json.python_major_minor_version,
triple,
json.crt_features.contains(&"static".to_string()),
&have_extensions,
)?);

Ok(errors)
}
Expand Down Expand Up @@ -1635,7 +1631,7 @@ fn validate_distribution(

let is_static = triple.contains("unknown-linux-musl");

let mut tf = crate::open_distribution_archive(&dist_path)?;
let mut tf = crate::open_distribution_archive(dist_path)?;

// First entry in archive should be python/PYTHON.json.
let mut entries = tf.entries()?;
Expand Down Expand Up @@ -1701,7 +1697,7 @@ fn validate_distribution(
context.merge(validate_possible_object_file(
json.as_ref().unwrap(),
python_major_minor,
&triple,
triple,
&path,
&data,
)?);
Expand All @@ -1726,9 +1722,9 @@ fn validate_distribution(
context.merge(validate_possible_object_file(
json.as_ref().unwrap(),
python_major_minor,
&triple,
triple,
&member_path,
&member_data,
member_data,
)?);
}
}
Expand Down Expand Up @@ -1841,9 +1837,7 @@ fn validate_distribution(

// Ensure that some well known Python symbols are being exported from libpython.
for symbol in PYTHON_EXPORTED_SYMBOLS {
let exported = context
.libpython_exported_symbols
.contains(&symbol.to_string());
let exported = context.libpython_exported_symbols.contains(*symbol);
let wanted = !is_static;

if exported != wanted {
Expand All @@ -1867,6 +1861,7 @@ fn validate_distribution(
}
}

#[allow(clippy::if_same_then_else)]
// Static builds never have shared library extension modules.
let want_shared = if is_static {
false
Expand Down Expand Up @@ -1914,10 +1909,8 @@ fn validate_distribution(
} else if triple.contains("-windows-") {
false
// Presence of a shared library extension implies no export.
} else if ext.shared_lib.is_some() {
false
} else {
true
ext.shared_lib.is_none()
};

if exported != wanted {
Expand Down Expand Up @@ -1996,7 +1989,7 @@ fn verify_distribution_behavior(dist_path: &Path) -> Result<Vec<String>> {
tf.unpack(temp_dir.path())?;

let python_json_path = temp_dir.path().join("python").join("PYTHON.json");
let python_json_data = std::fs::read(&python_json_path)?;
let python_json_data = std::fs::read(python_json_path)?;
let python_json = parse_python_json(&python_json_data)?;

let python_exe = temp_dir.path().join("python").join(python_json.python_exe);
Expand All @@ -2005,7 +1998,7 @@ fn verify_distribution_behavior(dist_path: &Path) -> Result<Vec<String>> {
std::fs::write(&test_file, PYTHON_VERIFICATIONS.as_bytes())?;

eprintln!(" running interpreter tests (output should follow)");
let output = duct::cmd(&python_exe, &[test_file.display().to_string()])
let output = duct::cmd(python_exe, [test_file.display().to_string()])
.stdout_to_stderr()
.unchecked()
.env("TARGET_TRIPLE", &python_json.target_triple)
Expand Down

0 comments on commit a6227fe

Please sign in to comment.