Skip to content

Commit

Permalink
skip 8bit lora on h100, add 4bit lora on h100 to multi gpu tests
Browse files Browse the repository at this point in the history
  • Loading branch information
winglian committed Oct 30, 2024
1 parent c2b9994 commit 345ee54
Show file tree
Hide file tree
Showing 3 changed files with 85 additions and 9 deletions.
73 changes: 71 additions & 2 deletions tests/e2e/multigpu/test_llama.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

from axolotl.utils.dict import DictDefault

from ..utils import with_temp_dir
from ..utils import is_hopper, with_temp_dir

LOG = logging.getLogger("axolotl.tests.e2e.multigpu")
os.environ["WANDB_DISABLED"] = "true"
Expand Down Expand Up @@ -144,6 +144,7 @@ def test_lora_ddp_packed(self, temp_dir):
]
)

@pytest.mark.skipif(is_hopper(), reason="h100 doesn't support 8-bit lora")
@with_temp_dir
def test_dpo_lora_ddp(self, temp_dir):
# pylint: disable=duplicate-code
Expand Down Expand Up @@ -186,7 +187,75 @@ def test_dpo_lora_ddp(self, temp_dir):
},
],
"num_epochs": 1,
"max_steps": 50,
"max_steps": 10,
"micro_batch_size": 4,
"gradient_accumulation_steps": 4,
"output_dir": temp_dir,
"warmup_steps": 0,
"learning_rate": 0.00001,
"optimizer": "adamw_8bit",
"lr_scheduler": "cosine",
"flash_attention": True,
}
)

# write cfg to yaml file
Path(temp_dir).mkdir(parents=True, exist_ok=True)
with open(Path(temp_dir) / "config.yaml", "w", encoding="utf-8") as fout:
fout.write(yaml.dump(cfg.to_dict(), Dumper=yaml.Dumper))

execute_subprocess_async(
[
"accelerate",
"launch",
"--num-processes",
"2",
"-m",
"axolotl.cli.train",
str(Path(temp_dir) / "config.yaml"),
]
)

@with_temp_dir
def test_dpo_qlora_ddp(self, temp_dir):
# pylint: disable=duplicate-code
cfg = DictDefault(
{
"base_model": "HuggingFaceTB/SmolLM-135M",
"sequence_len": 2048,
"sample_packing": False,
"eval_sample_packing": False,
"pad_to_sequence_len": True,
"load_in_4bit": True,
"adapter": "qlora",
"lora_r": 8,
"lora_alpha": 16,
"lora_dropout": 0.05,
"lora_target_linear": True,
"val_set_size": 0.05,
"special_tokens": {
"pad_token": "<|endoftext|>",
},
"rl": "dpo",
"chat_template": "chatml",
"datasets": [
{
"path": "fozziethebeat/alpaca_messages_2k_dpo_test",
"type": "chat_template.default",
"field_messages": "conversation",
"field_chosen": "chosen",
"field_rejected": "rejected",
"message_field_role": "role",
"message_field_content": "content",
"roles": {
"system": ["system"],
"user": ["user"],
"assistant": ["assistant"],
},
},
],
"num_epochs": 1,
"max_steps": 10,
"micro_batch_size": 4,
"gradient_accumulation_steps": 4,
"output_dir": temp_dir,
Expand Down
4 changes: 2 additions & 2 deletions tests/e2e/patched/test_4d_multipack_llama.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from axolotl.utils.config import normalize_config
from axolotl.utils.dict import DictDefault

from ..utils import require_torch_2_1_1, with_temp_dir
from ..utils import require_torch_2_3_1, with_temp_dir

LOG = logging.getLogger("axolotl.tests.e2e")
os.environ["WANDB_DISABLED"] = "true"
Expand All @@ -24,7 +24,7 @@ class Test4dMultipackLlama(unittest.TestCase):
Test case for Llama models using 4d attention with multipack
"""

@require_torch_2_1_1
@require_torch_2_3_1
@with_temp_dir
def test_sdp_lora_packing(self, temp_dir):
# pylint: disable=duplicate-code
Expand Down
17 changes: 12 additions & 5 deletions tests/e2e/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@
from importlib.metadata import version
from pathlib import Path

import torch


def with_temp_dir(test_func):
@wraps(test_func)
Expand All @@ -35,13 +37,18 @@ def most_recent_subdir(path):
return subdir


def require_torch_2_1_1(test_case):
def require_torch_2_3_1(test_case):
"""
Decorator marking a test that requires torch >= 2.1.1
Decorator marking a test that requires torch >= 2.3.1
"""

def is_min_2_1_1():
def is_min_2_3_1():
torch_version = version("torch")
return torch_version >= "2.1.1"
return torch_version >= "2.3.1"

return unittest.skipUnless(is_min_2_3_1(), "test torch 2.3.1")(test_case)


return unittest.skipUnless(is_min_2_1_1(), "test torch 2.1.1")(test_case)
def is_hopper():
compute_capability = torch.cuda.get_device_capability()
return compute_capability == (9, 0)

0 comments on commit 345ee54

Please sign in to comment.