Skip to content

Commit 3c75013

Browse files
chore(deps): bump ruff from 0.7.3 to 0.8.4 in /requirements/lintrunner (#1982)
1 parent 9a4c4f5 commit 3c75013

File tree

14 files changed

+38
-43
lines changed

14 files changed

+38
-43
lines changed

onnxscript/_thirdparty/asciichartpy.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -198,8 +198,8 @@ def plot(series, *, bin_edges=None, cfg=None):
198198
height = cfg.get("height", interval)
199199
ratio = height / interval if interval > 0 else 1
200200

201-
min2 = int(floor(minimum * ratio))
202-
max2 = int(ceil(maximum * ratio))
201+
min2 = floor(minimum * ratio)
202+
max2 = ceil(maximum * ratio)
203203

204204
def clamp(n):
205205
return min(max(n, minimum), maximum)

onnxscript/converter.py

+8-8
Original file line numberDiff line numberDiff line change
@@ -1239,14 +1239,14 @@ def _translate_loop_stmt(self, loop_stmt: Union[ast.For, ast.While]) -> None:
12391239
if i != len(loop_stmt.body) - 1:
12401240
self.fail(s, "Instruction break must be the last one of the loop.")
12411241

1242-
_current_scope = self._current_scope()
1243-
if s.test.id not in _current_scope:
1242+
current_scope = self._current_scope()
1243+
if s.test.id not in current_scope:
12441244
self.fail(
12451245
loop_stmt,
12461246
f"Unable to find condition variable {s.test.id!r} in known "
1247-
f"variables {list(_current_scope)!r}.",
1247+
f"variables {list(current_scope)!r}.",
12481248
)
1249-
condition_name = _current_scope[s.test.id].value
1249+
condition_name = current_scope[s.test.id].value
12501250
operator_name = "Not"
12511251
continue
12521252
self._translate_stmt(s)
@@ -1255,14 +1255,14 @@ def _translate_loop_stmt(self, loop_stmt: Union[ast.For, ast.While]) -> None:
12551255

12561256
if cond_while is not None:
12571257
# Loop while
1258-
_current_scope = self._current_scope()
1259-
if cond_while not in _current_scope:
1258+
current_scope = self._current_scope()
1259+
if cond_while not in current_scope:
12601260
self.fail(
12611261
loop_stmt,
12621262
f"Unable to find condition variable {cond_while!r} in known "
1263-
f"variables {list(_current_scope)!r}.",
1263+
f"variables {list(current_scope)!r}.",
12641264
)
1265-
o_cond_var = _current_scope[cond_while].value
1265+
o_cond_var = current_scope[cond_while].value
12661266

12671267
self.emit(
12681268
[o_cond_out],

onnxscript/evaluator.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -290,16 +290,16 @@ def eval_function(
290290
has_array = False
291291
for arg, param_schema in tagged_args:
292292
if param_schema.is_input:
293-
adapted_arg, _has_array = _adapt_to_eager_mode(arg)
294-
has_array = has_array or _has_array
293+
adapted_arg, has_array_ = _adapt_to_eager_mode(arg)
294+
has_array = has_array or has_array_
295295
adapted_args.append(adapted_arg)
296296
else:
297297
adapted_args.append(arg)
298298

299299
for key, (arg, param_schema) in tagged_kwargs.items():
300300
if param_schema.is_input:
301-
adapted_arg, _has_array = _adapt_to_eager_mode(arg)
302-
has_array = has_array or _has_array
301+
adapted_arg, has_array_ = _adapt_to_eager_mode(arg)
302+
has_array = has_array or has_array_
303303
adapted_kwargs[key] = adapted_arg
304304
else:
305305
adapted_kwargs[key] = arg

onnxscript/function_libs/tools/torch_lib/deduce_type_constraints.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -210,15 +210,15 @@ def type_constraints(self, signature_only: bool = True) -> OnnxFunctionTypeConst
210210
)
211211

212212
# Rename type constraints to T0, T1, T2, ...
213-
_seen_type_constraints: Set[TypeConstraint] = set()
213+
seen_type_constraints: Set[TypeConstraint] = set()
214214
for type_constraint in (
215215
*input_type_constraints.values(),
216216
*output_type_constraints.values(),
217217
*intermediate_type_constraints.values(),
218218
):
219-
if type_constraint is not None and type_constraint not in _seen_type_constraints:
220-
type_constraint.name = f"T{len(_seen_type_constraints)}"
221-
_seen_type_constraints.add(type_constraint)
219+
if type_constraint is not None and type_constraint not in seen_type_constraints:
220+
type_constraint.name = f"T{len(seen_type_constraints)}"
221+
seen_type_constraints.add(type_constraint)
222222

223223
return OnnxFunctionTypeConstraints(
224224
input_type_constraints, output_type_constraints, intermediate_type_constraints

onnxscript/function_libs/tools/torch_lib/generate_aten_signatures.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -283,7 +283,7 @@ def main(args: argparse.Namespace) -> None:
283283
functions[module_name] = {}
284284
op_name = get_op_name(func)
285285
if op_name in functions[module_name]:
286-
logging.warning(
286+
logging.warning( # noqa: LOG015
287287
"Duplicated function: %s, overload: %s", op_name, func.func.name.overload_name
288288
)
289289
continue

onnxscript/function_libs/tools/torch_lib/generate_prims_signatures.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -258,7 +258,7 @@ def _get_func_schema_in_namespace(namespaces: List[_OpNamespace]) -> Dict[str, F
258258
# to "resize(Tensor a, SymInt[] shape) -> Tensor"
259259
if "!" in op_overload_packet.schema:
260260
op_overload_packet.schema = re.sub( # type: ignore[attr-defined]
261-
"[(][A-Za-z]![)]", "", op_overload_packet.schema
261+
r"[(][A-Za-z]![)]", "", op_overload_packet.schema
262262
)
263263

264264
# FIXME: remove below code if the issue below is fixed.
@@ -283,7 +283,7 @@ def main(args: argparse.Namespace) -> None:
283283
if module_name not in functions:
284284
functions[module_name] = {}
285285
if op_name in functions[module_name]:
286-
logging.warning(
286+
logging.warning( # noqa: LOG015
287287
"Duplicated function: %s, overload: %s",
288288
op_name,
289289
func_schema.name.overload_name,

onnxscript/ir/serde.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -1071,7 +1071,7 @@ def format_name(value_name: str) -> str:
10711071

10721072
for input in function.inputs:
10731073
if not input.name:
1074-
logging.warning(
1074+
logger.warning(
10751075
"Function '%s': Value name not set for function input: %s",
10761076
function_qualified_name,
10771077
input,
@@ -1084,7 +1084,7 @@ def format_name(value_name: str) -> str:
10841084
for node in function:
10851085
for node_output in node.outputs:
10861086
if not node_output.name:
1087-
logging.warning(
1087+
logger.warning(
10881088
"Function '%s': Value name not set for node output: %s",
10891089
function_qualified_name,
10901090
node_output,

onnxscript/rewriter/onnxruntime/xformers/_smollm_1layer.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ def main_graph(
7171
val_191 = opset18.Transpose(slice_scatter, perm=[1, 0, 2, 3])
7272
slice_scatter_1 = opset18.Transpose(val_191, perm=[1, 0, 2, 3])
7373
unsqueeze_6 = opset18.Unsqueeze(input2, 1)
74-
_to_copy_1 = opset18.Cast(unsqueeze_6, to=1)
74+
to_copy_1 = opset18.Cast(unsqueeze_6, to=1)
7575
view_1 = opset18.Constant(
7676
value=make_tensor(
7777
"value",
@@ -113,7 +113,7 @@ def main_graph(
113113
],
114114
)
115115
)
116-
view_2 = opset18.Reshape(_to_copy_1, [1, 1, 10], allowzero=0)
116+
view_2 = opset18.Reshape(to_copy_1, [1, 1, 10], allowzero=0)
117117
bmm = view_1 @ view_2
118118
view_3 = opset18.Reshape(bmm, [1, 32, 10], allowzero=0)
119119
transpose = opset18.Transpose(view_3, perm=[0, 2, 1])
@@ -199,8 +199,8 @@ def main_graph(
199199
mul_13 = model_norm_weight * mul_12
200200
t_7 = opset18.Transpose(lm_head_weight, perm=[1, 0])
201201
view_23 = mul_13 @ t_7
202-
_to_copy_12 = opset18.Identity(view_23)
203-
return _to_copy_12, add_3, transpose_3
202+
to_copy_12 = opset18.Identity(view_23)
203+
return to_copy_12, add_3, transpose_3
204204

205205
model = main_graph.to_model_proto()
206206
return model

onnxscript/rewriter/pattern.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -411,7 +411,7 @@ def clone(self, node_map: dict[NodePattern, NodePattern]) -> ValuePattern:
411411
def name(self) -> str | None:
412412
return self._name
413413

414-
def producer(self) -> None | NodePattern:
414+
def producer(self) -> NodePattern | None:
415415
return None
416416

417417
def uses(self) -> Sequence[tuple[NodePattern, int]]:

onnxscript/tools/benchmark/benchmark_helpers.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,7 @@ def _cmd_line(script_name: str, **kwargs: dict[str, Any]) -> list[str]:
108108

109109

110110
def _extract_metrics(text: str) -> dict[str, str]:
111-
reg = re.compile(":(.*?),(.*.?);")
111+
reg = re.compile(r":(.*?),(.*.?);")
112112
res = reg.findall(text)
113113
if len(res) == 0:
114114
return {}

onnxscript/tools/benchmark/benchmark_run.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ def _cmd_line(script_name: str, **kwargs: dict[str, str | int | float]) -> list[
4545

4646

4747
def _extract_metrics(text: str) -> dict[str, str]:
48-
reg = re.compile(":(.*?),(.*.?);")
48+
reg = re.compile(r":(.*?),(.*.?);")
4949
res = reg.findall(text)
5050
if len(res) == 0:
5151
return {}

pyproject.toml

+1
Original file line numberDiff line numberDiff line change
@@ -194,6 +194,7 @@ ignore = [
194194
"PYI041", # int | float is more clear
195195
"RUF022", # We don't need to sort __all__ for elements to be grouped
196196
"RUF031", # Parentheses for tuple in subscripts is more readable
197+
"RUF052", # Variables with `_` prefix may not be dummy variables in all cases
197198
"SIM102", # Collapible if statements are not always more readable
198199
"SIM108", # We don't always encourage ternary operators
199200
"SIM114", # Don't always combine if branches for debugability

requirements/lintrunner/requirements.txt

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
# This file is auto updated by dependabot
22
lintrunner-adapters>=0.8.0
33
# RUFF, RUFF-FIX
4-
ruff==0.7.3
4+
ruff==0.8.4
55
# MYPY
66
mypy==1.10.1
77
types-PyYAML==6.0.12.20240808

tests/function_libs/torch_lib/ops_test_data.py

+6-12
Original file line numberDiff line numberDiff line change
@@ -254,19 +254,16 @@ def _embedding_input_wrangler(
254254
args: list[Any], kwargs: dict[str, Any]
255255
) -> tuple[list[Any], dict[str, Any]]:
256256
"""Remove arguments not present in the aten op signature."""
257-
if "max_norm" in kwargs:
258-
del kwargs["max_norm"]
259-
if "norm_type" in kwargs:
260-
del kwargs["norm_type"]
257+
kwargs.pop("max_norm", None)
258+
kwargs.pop("norm_type", None)
261259
return args, kwargs
262260

263261

264262
def _empty_input_wrangler(
265263
args: list[Any], kwargs: dict[str, Any]
266264
) -> tuple[list[Any], dict[str, Any]]:
267265
"""Remove arguments not present in the aten op signature."""
268-
if "requires_grad" in kwargs:
269-
del kwargs["requires_grad"]
266+
kwargs.pop("requires_grad", None)
270267
return args, kwargs
271268

272269

@@ -325,8 +322,7 @@ def _max_pool_input_wrangler(
325322
args: list[Any], kwargs: dict[str, Any]
326323
) -> tuple[list[Any], dict[str, Any]]:
327324
# Remove return_indices argument because this op doesn't accept it
328-
if "return_indices" in kwargs:
329-
del kwargs["return_indices"]
325+
kwargs.pop("return_indices", None)
330326
return args, kwargs
331327

332328

@@ -364,8 +360,7 @@ def _nll_loss_input_wrangler(
364360
def _nonzero_input_wrangler(
365361
args: list[Any], kwargs: dict[str, Any]
366362
) -> tuple[list[Any], dict[str, Any]]:
367-
if "as_tuple" in kwargs:
368-
del kwargs["as_tuple"]
363+
kwargs.pop("as_tuple", None)
369364
return args, kwargs
370365

371366

@@ -421,8 +416,7 @@ def _roll_input_wrangler(
421416
def _scalar_tensor_input_wrangler(
422417
args: list[Any], kwargs: dict[str, Any]
423418
) -> tuple[list[Any], dict[str, Any]]:
424-
if "requires_grad" in kwargs:
425-
del kwargs["requires_grad"]
419+
kwargs.pop("requires_grad", None)
426420
return args, kwargs
427421

428422

0 commit comments

Comments
 (0)