Skip to content

Commit 392acee

Browse files
cyyeverpytorchmergebot
authored andcommitted
[6/N] Remove unused loop variables in tests (#166785)
This PR removes unused loop variables in tests. Pull Request resolved: #166785 Approved by: https://github.com/Skylion007, https://github.com/mlazos
1 parent fee1ac9 commit 392acee

11 files changed

Lines changed: 15 additions & 15 deletions

File tree

test/distributed/_composable/fsdp/test_fully_shard_state_dict.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -438,8 +438,8 @@ def test_rank0_offload_full_state_dict(self):
438438
if self.rank == 0:
439439
self.assertEqual(len(full_sd), len(ref_full_sd))
440440
self.assertEqual(list(full_sd.keys()), list(ref_full_sd.keys()))
441-
for (param_name, param), ref_param in zip(
442-
full_sd.items(), ref_full_sd.values()
441+
for param, ref_param in zip(
442+
full_sd.values(), ref_full_sd.values(), strict=True
443443
):
444444
self.assertEqual(param.device, torch.device("cpu"))
445445
self.assertEqual(param.device, ref_param.device)

test/distributed/fsdp/test_checkpoint_wrapper.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -303,7 +303,7 @@ def check_fn(l):
303303
)
304304

305305
inp = torch.randn(4, 10, requires_grad=True)
306-
for i in range(6):
306+
for _ in range(6):
307307
# Kwarg input
308308
loss = model(x=inp).sum()
309309
self.assertTrue(loss.requires_grad)

test/distributed/test_c10d_ops_nccl.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -278,7 +278,7 @@ def test_alltoall_ops_with_cudafree_race(self):
278278
tmp.append(torch.rand(10 ** (3 + i), device=local_device))
279279
race_tensors.append(tmp)
280280

281-
for i in range(10):
281+
for _ in range(10):
282282
race_tensors.pop()
283283
work = pg.alltoall_base(output, input, [], [], opts)
284284
# this triggers cudaFree

test/dynamo/test_subgraphs.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -580,7 +580,7 @@ def fn(x, it):
580580

581581
def test_enumerate_not_break_graph(self):
582582
def fn(a, b):
583-
for i, x in enumerate(a.shape):
583+
for _, x in enumerate(a.shape):
584584
b = b + x
585585
for i, x in enumerate(b.shape, 8):
586586
b = b + x * i

test/functorch/test_control_flow.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -742,7 +742,7 @@ def forward(self, pred_1, x_1):
742742

743743
def test_cond_in_forloop(self):
744744
def for_loop_fake(x):
745-
for i in range(3):
745+
for _ in range(3):
746746
x = x * x + 1
747747
return x
748748

test/inductor/test_compile_worker.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,7 @@ def doit():
104104

105105
t = Timer(0.1, doit)
106106
t.sleep_time = 0.1
107-
for i in range(10):
107+
for _ in range(10):
108108
t.record_call()
109109
self.assertTrue(done.wait(4))
110110
done.clear()
@@ -130,7 +130,7 @@ def doit():
130130

131131
t = Timer(1, doit)
132132
t.sleep_time = 0.1
133-
for i in range(400):
133+
for _ in range(400):
134134
t.record_call()
135135
self.assertTrue(done.wait(4))
136136
t.quit()

test/jit/test_module_containers.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -300,7 +300,7 @@ def forward(self, inputs):
300300

301301
# note: unable to index moduledict with a string variable currently
302302
i = 0
303-
for key in self.moduledict:
303+
for _ in self.moduledict:
304304
i += 1
305305
assert i == len(self.moduledict), "iteration failing for ModuleDict"
306306

test/profiler/test_profiler.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2124,7 +2124,7 @@ def test_cpu_annotation_overlap(self):
21242124
adjust_profiler_step=True
21252125
),
21262126
) as prof:
2127-
for i in range(5):
2127+
for _ in range(5):
21282128
self._step_helper_func(prof)
21292129
with TemporaryFileName(mode="w+") as fname:
21302130
prof.export_chrome_trace(fname)

test/quantization/fx/test_quantize_fx.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9435,7 +9435,7 @@ def _test_model_impl(
94359435
criterion = nn.CrossEntropyLoss()
94369436
train_one_epoch(prepared, criterion, optimizer, [(input_value, output_value)], torch.device('cpu'), 1)
94379437
else:
9438-
for i in range(10):
9438+
for _ in range(10):
94399439
prepared(input_value)
94409440

94419441
# print('after observation root:', prepared.root)
@@ -9480,7 +9480,7 @@ def _test_model_impl(
94809480
optimizer = torch.optim.SGD(qeager.parameters(), lr=0.0001)
94819481
train_one_epoch(qeager, criterion, optimizer, [(input_value, output_value)], torch.device('cpu'), 1)
94829482
else:
9483-
for i in range(10):
9483+
for _ in range(10):
94849484
qeager(input_value)
94859485

94869486
# print('ref after observation:', qeager)

test/test_fx_experimental.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -272,7 +272,7 @@ def forward(self, a, b, offset):
272272
x = self.bottom_layers(a)
273273
y = []
274274
c = []
275-
for i in range(len(self.embedding_layers)):
275+
for _ in range(len(self.embedding_layers)):
276276
temp = torch.randint(10, (8,))
277277
c.append(temp + b)
278278
for i in range(len(self.embedding_layers)):

0 commit comments

Comments
 (0)