Skip to content

Commit

Permalink
Apply assorted ruff rules (RUF010 and RUF100) (pydicom#2101)
Browse files Browse the repository at this point in the history
* Apply ruff rule RUF100

RUF100 Unused blanket `noqa` directive

* Apply ruff rule RUF010

RUF010 Use explicit conversion flag

* Ignore ruff rules in generated files
  • Loading branch information
DimitriPapadopoulos committed Jul 9, 2024
1 parent a09dd6d commit e5c2d27
Show file tree
Hide file tree
Showing 18 changed files with 57 additions and 71 deletions.
6 changes: 3 additions & 3 deletions doc/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,8 @@
# directory, add these directories to sys.path here. If the directory
# is relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
sys.path.insert(0, os.path.abspath("../build_tools/sphinx")) # noqa
from github_link import make_linkcode_resolve # noqa: E402
sys.path.insert(0, os.path.abspath("../build_tools/sphinx"))
from github_link import make_linkcode_resolve

# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
Expand Down Expand Up @@ -317,7 +317,7 @@ def setup(app):
# The following is used by sphinx.ext.linkcode to provide links to github
linkcode_resolve = make_linkcode_resolve(
"pydicom",
"https://github.com/pydicom/pydicom/blob/{revision}/{package}/{path}#L{lineno}", # noqa
"https://github.com/pydicom/pydicom/blob/{revision}/{package}/{path}#L{lineno}",
)

doctest_global_setup = """
Expand Down
2 changes: 2 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,9 @@ max-statements = 108

[tool.ruff.lint.per-file-ignores]
"*/__init__.py" = ["F401"]
"src/pydicom/_*_dict.py" = ["E501"]
"src/pydicom/config.py" = ["PLW0602"]
"src/pydicom/uid.py" = ["E501"]
"src/pydicom/sr/_concepts_dict.py" = ["F601"]
"tests/*.py" = ["F401"]

Expand Down
4 changes: 2 additions & 2 deletions src/pydicom/cli/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def eval_element(ds: Dataset, element: str) -> Any:
f"Data element '{element}' is not in the dataset"
)
except IndexError as e:
raise argparse.ArgumentTypeError(f"'{element}' has an index error: {str(e)}")
raise argparse.ArgumentTypeError(f"'{element}' has an index error: {e}")


def filespec_parts(filespec: str) -> tuple[str, str, str]:
Expand Down Expand Up @@ -160,7 +160,7 @@ def filespec_parser(filespec: str) -> list[tuple[Dataset, Any]]:
)
raise argparse.ArgumentTypeError(f"File '{filename}' not found{extra}")
except Exception as e:
raise argparse.ArgumentTypeError(f"Error reading '{filename}': {str(e)}")
raise argparse.ArgumentTypeError(f"Error reading '{filename}': {e}")

if not element:
return [(ds, None)]
Expand Down
2 changes: 1 addition & 1 deletion src/pydicom/data/retry.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def f_retry(*args: Any, **kwargs: Any) -> Any:
try:
return f(*args, **kwargs)
except exc as e:
msg = f"{str(e)}: retrying in {mdelay} seconds..."
msg = f"{e}: retrying in {mdelay} seconds..."
if exc_msg:
msg += f" {exc_msg}"

Expand Down
4 changes: 2 additions & 2 deletions src/pydicom/dataelem.py
Original file line number Diff line number Diff line change
Expand Up @@ -851,7 +851,7 @@ def DataElement_from_raw(
elif raw.tag.element == 0:
vr = VR_.UL
else:
msg = f"Unknown DICOM tag {str(raw.tag)}"
msg = f"Unknown DICOM tag {raw.tag}"
if config.settings.reading_validation_mode == config.RAISE:
raise KeyError(msg + " can't look up VR")

Expand All @@ -870,7 +870,7 @@ def DataElement_from_raw(
try:
value = convert_value(vr, raw, encoding)
except NotImplementedError as e:
raise NotImplementedError(f"{str(e)} in tag {raw.tag!r}")
raise NotImplementedError(f"{e} in tag {raw.tag!r}")
except BytesLengthException as e:
message = (
f"{e} This occurred while trying to parse {raw.tag} according "
Expand Down
12 changes: 5 additions & 7 deletions src/pydicom/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -2334,11 +2334,9 @@ def waveform_array(self, index: int) -> "numpy.ndarray":
return wave_handler.multiplex_array(self, index, as_raw=False)

# Format strings spec'd according to python string formatting options
# See https://docs.python.org/3/library/stdtypes.html#printf-style-string-formatting # noqa
# See https://docs.python.org/3/library/stdtypes.html#printf-style-string-formatting
default_element_format = "%(tag)s %(name)-35.35s %(VR)s: %(repval)s"
default_sequence_element_format = (
"%(tag)s %(name)-35.35s %(VR)s: %(repval)s" # noqa
)
default_sequence_element_format = "%(tag)s %(name)-35.35s %(VR)s: %(repval)s"

def formatted_lines(
self,
Expand Down Expand Up @@ -2443,7 +2441,7 @@ def _pretty_str(self, indent: int = 0, top_level_only: bool = False) -> str:
with tag_in_exception(elem.tag):
if elem.VR == VR_.SQ: # a sequence
strings.append(
f"{indent_str}{str(elem.tag)} {elem.name} "
f"{indent_str}{elem.tag} {elem.name} "
f"{len(elem.value)} item(s) ---- "
)
if not top_level_only:
Expand Down Expand Up @@ -3114,7 +3112,7 @@ def from_json(
def to_json_dict(
self,
bulk_data_threshold: int = 1024,
bulk_data_element_handler: Callable[[DataElement], str] | None = None, # noqa
bulk_data_element_handler: Callable[[DataElement], str] | None = None,
suppress_invalid_tags: bool = False,
) -> dict[str, Any]:
"""Return a dictionary representation of the :class:`Dataset`
Expand Down Expand Up @@ -3164,7 +3162,7 @@ def to_json_dict(
def to_json(
self,
bulk_data_threshold: int = 1024,
bulk_data_element_handler: Callable[[DataElement], str] | None = None, # noqa
bulk_data_element_handler: Callable[[DataElement], str] | None = None,
dump_handler: Callable[[dict[str, Any]], str] | None = None,
suppress_invalid_tags: bool = False,
) -> str:
Expand Down
2 changes: 1 addition & 1 deletion src/pydicom/fileset.py
Original file line number Diff line number Diff line change
Expand Up @@ -524,7 +524,7 @@ def leaf_summary(node: "RecordNode", indent_char: str) -> list[str]:
for node in self:
indent = indent_char * node.depth
if node.children:
s.append(f"{indent}{str(node)}")
s.append(f"{indent}{node}")
# Summarise any leaves at the next level
for child in node.children:
if child.has_instance:
Expand Down
4 changes: 2 additions & 2 deletions src/pydicom/filewriter.py
Original file line number Diff line number Diff line change
Expand Up @@ -270,7 +270,7 @@ def correct_ambiguous_vr_element(
_correct_ambiguous_vr_element(elem, ancestors, is_little_endian)
except AttributeError as e:
raise AttributeError(
f"Failed to resolve ambiguous VR for tag {elem.tag}: {str(e)}"
f"Failed to resolve ambiguous VR for tag {elem.tag}: {e}"
)

return elem
Expand Down Expand Up @@ -614,7 +614,7 @@ def write_data_element(
if not fp.is_implicit_VR and vr and len(vr) != 2:
msg = (
f"Cannot write ambiguous VR of '{vr}' for data element with "
f"tag {repr(elem.tag)}.\nSet the correct VR before "
f"tag {elem.tag!r}.\nSet the correct VR before "
f"writing, or use an implicit VR transfer syntax"
)
raise ValueError(msg)
Expand Down
2 changes: 1 addition & 1 deletion src/pydicom/tag.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def tag_in_exception(tag: "BaseTag") -> Iterator[None]:
yield
except Exception as exc:
stack_trace = traceback.format_exc()
msg = f"With tag {tag} got exception: {str(exc)}\n{stack_trace}"
msg = f"With tag {tag} got exception: {exc}\n{stack_trace}"
raise type(exc)(msg) from exc


Expand Down
2 changes: 1 addition & 1 deletion src/pydicom/util/leanread.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def __exit__(
) -> bool | None:
self.fobj.close()

return None # noqa: PLR1711
return None

def __iter__(self) -> Iterator[_ElementType]:
# Need the transfer_syntax later
Expand Down
10 changes: 5 additions & 5 deletions src/pydicom/valuerep.py
Original file line number Diff line number Diff line change
Expand Up @@ -617,7 +617,7 @@ def __str__(self) -> str:
return super().__str__()

def __repr__(self) -> str:
return f'"{str(self)}"'
return f'"{self}"'


class DA(_DateTimeBase, datetime.date):
Expand Down Expand Up @@ -1075,7 +1075,7 @@ def __init__(
if not is_valid_ds(str(self)):
# This will catch nan and inf
raise ValueError(
f'Value "{str(self)}" is not valid for elements with a VR of DS'
f'Value "{self}" is not valid for elements with a VR of DS'
)

def __eq__(self, other: Any) -> Any:
Expand Down Expand Up @@ -1212,7 +1212,7 @@ def __init__(
warn_and_log(msg)
if not is_valid_ds(repr(self).strip("'")):
# This will catch nan and inf
msg = f'Value "{str(self)}" is not valid for elements with a VR of DS'
msg = f'Value "{self}" is not valid for elements with a VR of DS'
if validation_mode == config.RAISE:
raise ValueError(msg)
warn_and_log(msg)
Expand Down Expand Up @@ -1240,7 +1240,7 @@ def __str__(self) -> str:
def __repr__(self) -> str:
if self.auto_format and hasattr(self, "original_string"):
return f"'{self.original_string}'"
return f"'{str(self)}'"
return f"'{self}'"


# CHOOSE TYPE OF DS
Expand Down Expand Up @@ -1318,7 +1318,7 @@ def __init__(
elif isinstance(val, IS | ISfloat) and hasattr(val, "original_string"):
self.original_string = val.original_string
if validation_mode:
msg = f'Value "{str(self)}" is not valid for elements with a VR of IS'
msg = f'Value "{self}" is not valid for elements with a VR of IS'
if validation_mode == config.WARN:
warn_and_log(msg)
elif validation_mode == config.RAISE:
Expand Down
2 changes: 1 addition & 1 deletion tests/test_filereader.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@

have_numpy = pydicom.config.have_numpy
if have_numpy:
import numpy # NOQA
import numpy

try:
import jpeg_ls
Expand Down
34 changes: 13 additions & 21 deletions tests/test_pillow_pixel_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,23 +57,15 @@
# FMT_BA_BV_SPX_PR_FRAMESF_PI
# JPGB: 1.2.840.10008.1.2.4.50 - JPEG Baseline (8-bit only)
JPGB_08_08_3_0_1F_YBR_FULL = get_testdata_file("SC_rgb_small_odd_jpeg.dcm")
JPGB_08_08_3_0_120F_YBR_FULL_422 = get_testdata_file(
"color3d_jpeg_baseline.dcm"
) # noqa
JPGB_08_08_3_0_120F_YBR_FULL_422 = get_testdata_file("color3d_jpeg_baseline.dcm")
# Different subsampling 411, 422, 444
JPGB_08_08_3_0_1F_YBR_FULL_422_411 = get_testdata_file(
"SC_rgb_dcmtk_+eb+cy+np.dcm"
) # noqa
JPGB_08_08_3_0_1F_YBR_FULL_422_422 = get_testdata_file(
"SC_rgb_dcmtk_+eb+cy+s2.dcm"
) # noqa
JPGB_08_08_3_0_1F_YBR_FULL_411 = get_testdata_file("SC_rgb_dcmtk_+eb+cy+n1.dcm") # noqa
JPGB_08_08_3_0_1F_YBR_FULL_422 = get_testdata_file("SC_rgb_dcmtk_+eb+cy+n2.dcm") # noqa
JPGB_08_08_3_0_1F_YBR_FULL_444 = get_testdata_file("SC_rgb_dcmtk_+eb+cy+s4.dcm") # noqa
JPGB_08_08_3_0_1F_YBR_FULL_422_411 = get_testdata_file("SC_rgb_dcmtk_+eb+cy+np.dcm")
JPGB_08_08_3_0_1F_YBR_FULL_422_422 = get_testdata_file("SC_rgb_dcmtk_+eb+cy+s2.dcm")
JPGB_08_08_3_0_1F_YBR_FULL_411 = get_testdata_file("SC_rgb_dcmtk_+eb+cy+n1.dcm")
JPGB_08_08_3_0_1F_YBR_FULL_422 = get_testdata_file("SC_rgb_dcmtk_+eb+cy+n2.dcm")
JPGB_08_08_3_0_1F_YBR_FULL_444 = get_testdata_file("SC_rgb_dcmtk_+eb+cy+s4.dcm")
JPGB_08_08_3_0_1F_RGB = get_testdata_file("SC_jpeg_no_color_transform.dcm")
JPGB_08_08_3_0_1F_RGB_APP14 = get_testdata_file(
"SC_jpeg_no_color_transform_2.dcm"
) # noqa
JPGB_08_08_3_0_1F_RGB_APP14 = get_testdata_file("SC_jpeg_no_color_transform_2.dcm")
# JPGE: 1.2.840.10008.1.2.4.51 - JPEG Extended (Process 2 and 4) (8 and 12-bit)
# No supported datasets available

Expand Down Expand Up @@ -224,27 +216,27 @@ def test_using_pillow_handler_raises(self):
(
JPGB_08_08_3_0_120F_YBR_FULL_422,
(JPGB, 8, 3, 0, 120, (120, 480, 640, 3), "uint8"),
), # noqa
),
(
JPGB_08_08_3_0_1F_YBR_FULL_422_411,
(JPGB, 8, 3, 0, 1, (100, 100, 3), "uint8"),
), # noqa
),
(
JPGB_08_08_3_0_1F_YBR_FULL_422_422,
(JPGB, 8, 3, 0, 1, (100, 100, 3), "uint8"),
), # noqa
),
(
JPGB_08_08_3_0_1F_YBR_FULL_411,
(JPGB, 8, 3, 0, 1, (100, 100, 3), "uint8"),
), # noqa
),
(
JPGB_08_08_3_0_1F_YBR_FULL_422,
(JPGB, 8, 3, 0, 1, (100, 100, 3), "uint8"),
), # noqa
),
(
JPGB_08_08_3_0_1F_YBR_FULL_444,
(JPGB, 8, 3, 0, 1, (100, 100, 3), "uint8"),
), # noqa
),
(JPGB_08_08_3_0_1F_RGB, (JPGB, 8, 3, 0, 1, (256, 256, 3), "uint8")),
(JPGB_08_08_3_0_1F_RGB_APP14, (JPGB, 8, 3, 0, 1, (256, 256, 3), "uint8")),
(J2KR_08_08_3_0_1F_YBR_ICT, (J2KR, 8, 3, 0, 1, (480, 640, 3), "uint8")),
Expand Down
30 changes: 12 additions & 18 deletions tests/test_pylibjpeg.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,19 +104,13 @@
# FMT_BA_BV_SPX_PR_FRAMESF_PI
# JPGB: 1.2.840.10008.1.2.4.50 - JPEG Baseline (8-bit only)
JPGB_08_08_3_0_1F_YBR_FULL = get_testdata_file("SC_rgb_small_odd_jpeg.dcm")
JPGB_08_08_3_0_120F_YBR_FULL_422 = get_testdata_file(
"color3d_jpeg_baseline.dcm"
) # noqa
JPGB_08_08_3_0_120F_YBR_FULL_422 = get_testdata_file("color3d_jpeg_baseline.dcm")
# Different subsampling 411, 422, 444
JPGB_08_08_3_0_1F_YBR_FULL_422_411 = get_testdata_file(
"SC_rgb_dcmtk_+eb+cy+np.dcm"
) # noqa
JPGB_08_08_3_0_1F_YBR_FULL_422_422 = get_testdata_file(
"SC_rgb_dcmtk_+eb+cy+s2.dcm"
) # noqa
JPGB_08_08_3_0_1F_YBR_FULL_411 = get_testdata_file("SC_rgb_dcmtk_+eb+cy+n1.dcm") # noqa
JPGB_08_08_3_0_1F_YBR_FULL_422 = get_testdata_file("SC_rgb_dcmtk_+eb+cy+n2.dcm") # noqa
JPGB_08_08_3_0_1F_YBR_FULL_444 = get_testdata_file("SC_rgb_dcmtk_+eb+cy+s4.dcm") # noqa
JPGB_08_08_3_0_1F_YBR_FULL_422_411 = get_testdata_file("SC_rgb_dcmtk_+eb+cy+np.dcm")
JPGB_08_08_3_0_1F_YBR_FULL_422_422 = get_testdata_file("SC_rgb_dcmtk_+eb+cy+s2.dcm")
JPGB_08_08_3_0_1F_YBR_FULL_411 = get_testdata_file("SC_rgb_dcmtk_+eb+cy+n1.dcm")
JPGB_08_08_3_0_1F_YBR_FULL_422 = get_testdata_file("SC_rgb_dcmtk_+eb+cy+n2.dcm")
JPGB_08_08_3_0_1F_YBR_FULL_444 = get_testdata_file("SC_rgb_dcmtk_+eb+cy+s4.dcm")
JPGB_08_08_3_0_1F_RGB = get_testdata_file("SC_rgb_dcmtk_+eb+cr.dcm")
# JPGE: 1.2.840.1.2.4.51 - JPEG Extended
JPGE_BAD = get_testdata_file("JPEG-lossy.dcm") # Bad JPEG file
Expand All @@ -134,27 +128,27 @@
(
JPGB_08_08_3_0_120F_YBR_FULL_422,
(JPGB, 8, 3, 0, 120, (120, 480, 640, 3), "uint8"),
), # noqa
),
(
JPGB_08_08_3_0_1F_YBR_FULL_422_411,
(JPGB, 8, 3, 0, 1, (100, 100, 3), "uint8"),
), # noqa
),
(
JPGB_08_08_3_0_1F_YBR_FULL_422_422,
(JPGB, 8, 3, 0, 1, (100, 100, 3), "uint8"),
), # noqa
),
(
JPGB_08_08_3_0_1F_YBR_FULL_411,
(JPGB, 8, 3, 0, 1, (100, 100, 3), "uint8"),
), # noqa
),
(
JPGB_08_08_3_0_1F_YBR_FULL_422,
(JPGB, 8, 3, 0, 1, (100, 100, 3), "uint8"),
), # noqa
),
(
JPGB_08_08_3_0_1F_YBR_FULL_444,
(JPGB, 8, 3, 0, 1, (100, 100, 3), "uint8"),
), # noqa
),
(JPGB_08_08_3_0_1F_RGB, (JPGB, 8, 3, 0, 1, (100, 100, 3), "uint8")),
(JPGE_16_12_1_0_1F_M2, (JPGE, 16, 1, 0, 1, (1024, 256), "uint16")),
(JPGL_08_08_1_0_1F, (JPGL, 8, 1, 0, 1, (768, 1024), "uint8")),
Expand Down
4 changes: 2 additions & 2 deletions util/generate_dict/generate_dicom_dict.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,8 +82,8 @@ def write_dict(fp, dict_name, attributes, tag_is_string):
entry_format = f"{{Tag}}: {tag_content}"

fp.write(f"\n{dict_name} = {{\n ")
fp.write(", # noqa\n ".join(entry_format.format(**attr) for attr in attributes))
fp.write(" # noqa\n}\n")
fp.write(",\n ".join(entry_format.format(**attr) for attr in attributes))
fp.write("\n}\n")


def parse_header(header_row):
Expand Down
2 changes: 1 addition & 1 deletion util/generate_dict/generate_private_dict.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def write_dict(fp, dict_name, dict_entries):
vr, vm, name = dict_entries[owner][tag]
quote = '"' if "'" in name else "'"
fp.write(
f""" '{tag}': ('{vr}', '{vm}', {quote}{name}{quote}, ''), # noqa\n"""
f""" '{tag}': ('{vr}', '{vm}', {quote}{name}{quote}, ''),\n"""
)
fp.write(" },\n")
fp.write("}\n")
Expand Down
4 changes: 2 additions & 2 deletions util/generate_dict/generate_uid_dict.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,8 @@ def write_dict(fp, dict_name, attributes):
)

fp.write(f"\n{dict_name} = {{\n ")
fp.write(", # noqa\n ".join(uid_entry.format(**attr) for attr in attributes))
fp.write(" # noqa\n}\n")
fp.write(",\n ".join(uid_entry.format(**attr) for attr in attributes))
fp.write("\n}\n")


def parse_row(column_names, row):
Expand Down
2 changes: 1 addition & 1 deletion util/generate_uids/generate_storage_sopclass_uids.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def is_storage_class(attributes):

def uid_line(uid, keyword):
"""Return the UID class definition line to be written to the file."""
return f"{keyword} = UID('{uid}') # noqa\n"
return f"{keyword} = UID('{uid}')\n"


def update_uids(path: Path) -> None:
Expand Down

0 comments on commit e5c2d27

Please sign in to comment.