Skip to content

Commit

Permalink
[fbsync] Help make test dependent on randint compatible with python 3…
Browse files Browse the repository at this point in the history
….12 (#8119)

Summary: Co-authored-by: Nicolas Hug <[email protected]>

Reviewed By: vmoens

Differential Revision: D55062798

fbshipit-source-id: e24a223a82eda5ce9ad5ea816d04ab2725f253ff
  • Loading branch information
NicolasHug authored and facebook-github-bot committed Mar 20, 2024
1 parent e3255f4 commit e19cea7
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 10 deletions.
8 changes: 4 additions & 4 deletions test/test_transforms.py
Original file line number Diff line number Diff line change
Expand Up @@ -1614,8 +1614,8 @@ def test_augmix(fill, severity, mixture_width, chain_depth, all_ops, grayscale):
def test_random_crop():
height = random.randint(10, 32) * 2
width = random.randint(10, 32) * 2
oheight = random.randint(5, (height - 2) / 2) * 2
owidth = random.randint(5, (width - 2) / 2) * 2
oheight = random.randint(5, (height - 2) // 2) * 2
owidth = random.randint(5, (width - 2) // 2) * 2
img = torch.ones(3, height, width, dtype=torch.uint8)
result = transforms.Compose(
[
Expand Down Expand Up @@ -1664,8 +1664,8 @@ def test_random_crop():
def test_center_crop():
height = random.randint(10, 32) * 2
width = random.randint(10, 32) * 2
oheight = random.randint(5, (height - 2) / 2) * 2
owidth = random.randint(5, (width - 2) / 2) * 2
oheight = random.randint(5, (height - 2) // 2) * 2
owidth = random.randint(5, (width - 2) // 2) * 2

img = torch.ones(3, height, width, dtype=torch.uint8)
oh1 = (height - oheight) // 2
Expand Down
12 changes: 6 additions & 6 deletions test/test_transforms_video.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ def test_random_crop_video(self):
numFrames = random.randint(4, 128)
height = random.randint(10, 32) * 2
width = random.randint(10, 32) * 2
oheight = random.randint(5, (height - 2) / 2) * 2
owidth = random.randint(5, (width - 2) / 2) * 2
oheight = random.randint(5, (height - 2) // 2) * 2
owidth = random.randint(5, (width - 2) // 2) * 2
clip = torch.randint(0, 256, (numFrames, height, width, 3), dtype=torch.uint8)
result = Compose(
[
Expand All @@ -41,8 +41,8 @@ def test_random_resized_crop_video(self):
numFrames = random.randint(4, 128)
height = random.randint(10, 32) * 2
width = random.randint(10, 32) * 2
oheight = random.randint(5, (height - 2) / 2) * 2
owidth = random.randint(5, (width - 2) / 2) * 2
oheight = random.randint(5, (height - 2) // 2) * 2
owidth = random.randint(5, (width - 2) // 2) * 2
clip = torch.randint(0, 256, (numFrames, height, width, 3), dtype=torch.uint8)
result = Compose(
[
Expand All @@ -59,8 +59,8 @@ def test_center_crop_video(self):
numFrames = random.randint(4, 128)
height = random.randint(10, 32) * 2
width = random.randint(10, 32) * 2
oheight = random.randint(5, (height - 2) / 2) * 2
owidth = random.randint(5, (width - 2) / 2) * 2
oheight = random.randint(5, (height - 2) // 2) * 2
owidth = random.randint(5, (width - 2) // 2) * 2

clip = torch.ones((numFrames, height, width, 3), dtype=torch.uint8) * 255
oh1 = (height - oheight) // 2
Expand Down

0 comments on commit e19cea7

Please sign in to comment.