diff --git a/tests/models/test_llama.py b/tests/models/test_llama.py index e2f7a31b3..c0363cbb5 100644 --- a/tests/models/test_llama.py +++ b/tests/models/test_llama.py @@ -65,6 +65,7 @@ def test_llama2(device, opt): print(current_memory_pool("vcuda")) +@pytest.mark.skip(reason='The transformers updated their modeling, skip until we update.') def test_model_architecture(): import torch import hidet