Skip to content

Commit

Permalink
SAM2 AMG load_fast fix (#1374)
Browse files Browse the repository at this point in the history
  • Loading branch information
cpuhrsch authored Dec 3, 2024
1 parent ebb9086 commit b7630f1
Showing 1 changed file with 6 additions and 6 deletions.
12 changes: 6 additions & 6 deletions examples/sam2_amg_server/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -588,6 +588,12 @@ def main(checkpoint_path,
if load_fast != "":
load_aot_fast(mask_generator, load_fast)

if furious:
set_furious(mask_generator)
# since autoquant is replicating what furious mode is doing, don't use these two together
elif use_autoquant:
set_autoquant(mask_generator)

if save_fast != "":
assert load_fast == "", "Can't save compiled models while loading them with --load-fast."
assert not baseline, "--fast cannot be combined with baseline. code to be torch.compile(fullgraph=True) compatible."
Expand All @@ -598,12 +604,6 @@ def main(checkpoint_path,
assert not baseline, "--fast cannot be combined with baseline. code to be torch.compile(fullgraph=True) compatible."
set_fast(mask_generator, load_fast)

if furious:
set_furious(mask_generator)
# since autoquant is replicating what furious mode is doing, don't use these two together
elif use_autoquant:
set_autoquant(mask_generator)

with open('dog.jpg', 'rb') as f:
image_tensor = file_bytes_to_image_tensor(bytearray(f.read()))

Expand Down

0 comments on commit b7630f1

Please sign in to comment.