diff --git a/test/test_transforms.py b/test/test_transforms.py index 01c27a30a65..b49aeb59b5b 100644 --- a/test/test_transforms.py +++ b/test/test_transforms.py @@ -1614,8 +1614,8 @@ def test_augmix(fill, severity, mixture_width, chain_depth, all_ops, grayscale): def test_random_crop(): height = random.randint(10, 32) * 2 width = random.randint(10, 32) * 2 - oheight = random.randint(5, (height - 2) / 2) * 2 - owidth = random.randint(5, (width - 2) / 2) * 2 + oheight = random.randint(5, (height - 2) // 2) * 2 + owidth = random.randint(5, (width - 2) // 2) * 2 img = torch.ones(3, height, width, dtype=torch.uint8) result = transforms.Compose( [ @@ -1664,8 +1664,8 @@ def test_random_crop(): def test_center_crop(): height = random.randint(10, 32) * 2 width = random.randint(10, 32) * 2 - oheight = random.randint(5, (height - 2) / 2) * 2 - owidth = random.randint(5, (width - 2) / 2) * 2 + oheight = random.randint(5, (height - 2) // 2) * 2 + owidth = random.randint(5, (width - 2) // 2) * 2 img = torch.ones(3, height, width, dtype=torch.uint8) oh1 = (height - oheight) // 2 diff --git a/test/test_transforms_video.py b/test/test_transforms_video.py index 21594868f09..4ad57e6a98e 100644 --- a/test/test_transforms_video.py +++ b/test/test_transforms_video.py @@ -23,8 +23,8 @@ def test_random_crop_video(self): numFrames = random.randint(4, 128) height = random.randint(10, 32) * 2 width = random.randint(10, 32) * 2 - oheight = random.randint(5, (height - 2) / 2) * 2 - owidth = random.randint(5, (width - 2) / 2) * 2 + oheight = random.randint(5, (height - 2) // 2) * 2 + owidth = random.randint(5, (width - 2) // 2) * 2 clip = torch.randint(0, 256, (numFrames, height, width, 3), dtype=torch.uint8) result = Compose( [ @@ -41,8 +41,8 @@ def test_random_resized_crop_video(self): numFrames = random.randint(4, 128) height = random.randint(10, 32) * 2 width = random.randint(10, 32) * 2 - oheight = random.randint(5, (height - 2) / 2) * 2 - owidth = random.randint(5, (width - 2) / 2) * 2 + oheight = random.randint(5, (height - 2) // 2) * 2 + owidth = random.randint(5, (width - 2) // 2) * 2 clip = torch.randint(0, 256, (numFrames, height, width, 3), dtype=torch.uint8) result = Compose( [ @@ -59,8 +59,8 @@ def test_center_crop_video(self): numFrames = random.randint(4, 128) height = random.randint(10, 32) * 2 width = random.randint(10, 32) * 2 - oheight = random.randint(5, (height - 2) / 2) * 2 - owidth = random.randint(5, (width - 2) / 2) * 2 + oheight = random.randint(5, (height - 2) // 2) * 2 + owidth = random.randint(5, (width - 2) // 2) * 2 clip = torch.ones((numFrames, height, width, 3), dtype=torch.uint8) * 255 oh1 = (height - oheight) // 2