From b22e6e947934e5a40cdffdc78b15737eaca67b27 Mon Sep 17 00:00:00 2001 From: Devmate Bot Date: Thu, 20 Nov 2025 18:57:57 -0800 Subject: [PATCH] fbcode/pytorch/data/test/stateful_dataloader/test_dataloader.py (#1517) Summary: Pull Request resolved: https://github.com/meta-pytorch/data/pull/1517 Differential Revision: D87533222 --- test/stateful_dataloader/test_dataloader.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/test/stateful_dataloader/test_dataloader.py b/test/stateful_dataloader/test_dataloader.py index 17abd0dc5..61d311c25 100644 --- a/test/stateful_dataloader/test_dataloader.py +++ b/test/stateful_dataloader/test_dataloader.py @@ -211,14 +211,14 @@ def __len__(self): dataset = CustomDataset(self, x) dataset = random_split(dataset, [5])[0] data_loader = DataLoader(dataset) - for batch in data_loader: + for _ in data_loader: pass # fractional splitting dataset = CustomDataset(self, x) dataset = random_split(dataset, [1.0])[0] data_loader = DataLoader(dataset) - for batch in data_loader: + for _ in data_loader: pass def test_splits_reproducibility(self): @@ -1052,7 +1052,7 @@ def __iter__(self): worker_info = torch.utils.data.get_worker_info() assert worker_info is not None worker_id = worker_info.id - for idx in range(self.length // worker_info.num_workers): + for _ in range(self.length // worker_info.num_workers): yield worker_id def __len__(self): @@ -1711,7 +1711,7 @@ def test_multi_epochs_reproducibility(self): dataset = TestMultiEpochDataset(batch_size * num_workers) dataloader = self._get_data_loader(dataset, batch_size=batch_size, shuffle=False, num_workers=num_workers) - for ind in range(num_epochs): + for _ in range(num_epochs): for batch_idx, sample in enumerate(dataloader): self.assertEqual(sample.tolist(), [batch_idx % num_workers] * batch_size) @@ -2532,7 +2532,7 @@ def _create_dp(buffer_size): # Same seeds dl_res = [] - for epoch in range(2): + for _ in range(2): torch.manual_seed(123) dl_res.append(list(dl)) self.assertEqual(dl_res[0], dl_res[1]) @@ -2734,7 +2734,7 @@ def test_dataset_not_reset(self): dataloader = self._get_data_loader(dataset, num_workers=2, pin_memory=pin_memory) dataset.start = 0 for i in range(10): - for x in dataloader: + for _ in dataloader: pass # Changing the start value here doesn't have any effect in the dataset # cached by the workers. since they are not recreated between epochs