Skip to content

Commit 27302a4

Browse files
maggiemosspytorchmergebot
authored andcommitted
Fix error suppression syntax in onnx, jit, _dynamo (#166249)
Ensures pyrefly will only silence one specific error code pyrefly check lintrunner Pull Request resolved: #166249 Approved by: https://github.com/oulgen
1 parent 507614b commit 27302a4

69 files changed

Lines changed: 303 additions & 303 deletions

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

torch/_dynamo/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@
5555
reset_code,
5656
)
5757

58-
# pyrefly: ignore # deprecated
58+
# pyrefly: ignore [deprecated]
5959
from .external_utils import is_compiling
6060
from .mutation_guard import GenerationTracker
6161
from .pgo import reset_code_state

torch/_dynamo/_trace_wrapped_higher_order_op.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,7 @@ class ModIndex(torch.autograd.Function):
9595
generate_vmap_rule = True
9696

9797
@staticmethod
98-
# pyrefly: ignore # bad-override
98+
# pyrefly: ignore [bad-override]
9999
def forward(x: Tensor, indices: list[Tensor]) -> Tensor:
100100
return torch.ops.aten.index(x, indices)
101101

@@ -243,7 +243,7 @@ def _trace_wrapped_functionalized(ctx: Any, *args: Any, **kwargs: Any) -> Any:
243243

244244
def autograd_function_backward_rewritten(original_backward: Any) -> Any:
245245
def new_backward(ctx: Any, *grads: Any) -> Any:
246-
# pyrefly: ignore # bad-assignment
246+
# pyrefly: ignore [bad-assignment]
247247
grads = [g.contiguous() for g in grads]
248248
return original_backward(ctx, *grads)
249249

torch/_dynamo/aot_compile.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ def __post_init__(self) -> None:
8181
**import_sources,
8282
self._artifacts.backend_id: self._artifacts.compiled_fn,
8383
}
84-
# pyrefly: ignore # read-only
84+
# pyrefly: ignore [read-only]
8585
self.fn = types.FunctionType(
8686
self._artifacts.bytecode, f_globals, closure=self._artifacts.closure
8787
)

torch/_dynamo/backends/cudagraphs.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -206,7 +206,7 @@ def backward_cudagraphs(
206206
assert manager is not None
207207

208208
def fn(inputs: list[Any]) -> Any:
209-
# pyrefly: ignore # missing-attribute
209+
# pyrefly: ignore [missing-attribute]
210210
manager.set_to_running_backward()
211211
return aot_model(inputs)
212212

torch/_dynamo/backends/tvm.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -78,19 +78,19 @@ def tvm(
7878
opt_level = options.get("opt_level", 3)
7979

8080
if scheduler == "auto_scheduler":
81-
# pyrefly: ignore # import-error
81+
# pyrefly: ignore [import-error]
8282
from tvm import auto_scheduler
8383

8484
log_file = tempfile.NamedTemporaryFile()
8585

86-
# pyrefly: ignore # bad-argument-type
86+
# pyrefly: ignore [bad-argument-type]
8787
if not os.path.exists(log_file):
8888
tasks, task_weights = auto_scheduler.extract_tasks(
8989
mod["main"], params, target
9090
)
9191
if len(tasks) != 0:
9292
tuner = auto_scheduler.TaskScheduler(tasks, task_weights)
93-
# pyrefly: ignore # bad-argument-type
93+
# pyrefly: ignore [bad-argument-type]
9494
if not os.path.exists(log_file):
9595
assert trials > 0
9696
tune_option = auto_scheduler.TuningOptions(
@@ -101,9 +101,9 @@ def tvm(
101101
try:
102102
tuner.tune(tune_option)
103103
except Exception:
104-
# pyrefly: ignore # bad-argument-type
104+
# pyrefly: ignore [bad-argument-type]
105105
if os.path.exists(log_file):
106-
# pyrefly: ignore # bad-argument-type
106+
# pyrefly: ignore [bad-argument-type]
107107
os.unlink(log_file)
108108
raise
109109

@@ -113,7 +113,7 @@ def tvm(
113113
):
114114
lib = relay.build(mod, target=target, params=params)
115115
elif scheduler == "meta_schedule":
116-
# pyrefly: ignore # import-error
116+
# pyrefly: ignore [import-error]
117117
from tvm import meta_schedule as ms
118118

119119
with tempfile.TemporaryDirectory() as work_dir:

torch/_dynamo/bytecode_analysis.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@
3737
TERMINAL_OPCODES.add(dis.opmap["JUMP_FORWARD"])
3838
else:
3939
TERMINAL_OPCODES.add(dis.opmap["JUMP_ABSOLUTE"])
40-
# pyrefly: ignore # unsupported-operation
40+
# pyrefly: ignore [unsupported-operation]
4141
if (3, 12) <= sys.version_info < (3, 14):
4242
TERMINAL_OPCODES.add(dis.opmap["RETURN_CONST"])
4343
if sys.version_info >= (3, 13):

torch/_dynamo/bytecode_transformation.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -923,7 +923,7 @@ def devirtualize_jumps(instructions: list[Instruction]) -> None:
923923
inst.arg = abs(
924924
int(target.offset - inst.offset - instruction_size(inst))
925925
)
926-
# pyrefly: ignore # unsupported-operation
926+
# pyrefly: ignore [unsupported-operation]
927927
inst.arg //= 2
928928
inst.argval = target.offset
929929
inst.argrepr = f"to {target.offset}"
@@ -1375,7 +1375,7 @@ def update_offsets(instructions: Sequence[Instruction]) -> None:
13751375
offset = 0
13761376
for inst in instructions:
13771377
inst.offset = offset
1378-
# pyrefly: ignore # unsupported-operation
1378+
# pyrefly: ignore [unsupported-operation]
13791379
offset += instruction_size(inst)
13801380

13811381

torch/_dynamo/convert_frame.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -475,7 +475,7 @@ def profile_wrapper(*args: _P.args, **kwargs: _P.kwargs) -> _T:
475475
try:
476476
prof.enable()
477477
start_ts = time.time()
478-
# pyrefly: ignore # bad-argument-type
478+
# pyrefly: ignore [bad-argument-type]
479479
retval = prof.runcall(func, *args, **kwargs)
480480
profile_latency = time.time() - start_ts
481481
prof.disable()
@@ -995,7 +995,7 @@ def get_traced_fn(mod: Any) -> tuple[FunctionType, Optional[object]]:
995995
if isinstance(mod, torch.nn.Module):
996996
mod = mod.forward
997997
if hasattr(mod, "__self__"):
998-
# pyrefly: ignore # missing-attribute
998+
# pyrefly: ignore [missing-attribute]
999999
return mod.__func__, mod.__self__
10001000
elif inspect.isfunction(mod):
10011001
return mod, None
@@ -1140,7 +1140,7 @@ def fullgraph_compiler(
11401140
while cur_exn.__cause__ is not None:
11411141
cur_exn.__cause__.with_traceback(None)
11421142
cur_exn = cur_exn.__cause__
1143-
# pyrefly: ignore # invalid-inheritance
1143+
# pyrefly: ignore [invalid-inheritance]
11441144
raise e.with_traceback(None) from e.__cause__ # User compiler error
11451145

11461146
return CaptureOutput(
@@ -1164,7 +1164,7 @@ def compile_frame( # type: ignore[return]
11641164
frame_state: Optional[dict[str, Union[int, FrameStateSizeEntry]]] = None,
11651165
distributed_state: Optional[DistributedState] = None,
11661166
package: Optional[CompilePackage] = None,
1167-
# pyrefly: ignore # bad-return
1167+
# pyrefly: ignore [bad-return]
11681168
) -> DynamoOutput:
11691169
"""
11701170
A helper function taking a frame and backend, then return the generated bytecode

torch/_dynamo/create_parameter_op.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020

2121
class TracableCreateParameter(torch.autograd.Function):
2222
@staticmethod
23-
# pyrefly: ignore # bad-override
23+
# pyrefly: ignore [bad-override]
2424
def forward(ctx: Any, tensor: Any, placeholder: Any) -> torch.nn.Parameter:
2525
assert not tensor.requires_grad
2626
return placeholder.set_(tensor)

torch/_dynamo/debug_utils.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -879,7 +879,7 @@ def gen_tensor(shape: torch._prims_common.ShapeType, dtype: torch.dtype) -> Tens
879879
data_type, shape_str = match.groups()
880880
shape = tuple(shape_str.split(","))
881881
dtype = dtype_map[data_type]
882-
# pyrefly: ignore # bad-argument-type
882+
# pyrefly: ignore [bad-argument-type]
883883
kwargs[param] = gen_tensor(shape, dtype)
884884

885885
match = re.search(sym_shape_regex, annotation)
@@ -893,7 +893,7 @@ def gen_tensor(shape: torch._prims_common.ShapeType, dtype: torch.dtype) -> Tens
893893
attr_name, data_type, shape_str, _ = match.groups()
894894
shape = tuple(shape_str.split(","))
895895
dtype = dtype_map[data_type]
896-
# pyrefly: ignore # bad-argument-type
896+
# pyrefly: ignore [bad-argument-type]
897897
setattr(container, attr_name, gen_tensor(shape, dtype))
898898

899899
return kwargs

0 commit comments

Comments
 (0)