From 039e2a03700730f333471d130dbf1f4d7922b357 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Wed, 15 May 2024 13:27:44 -0400 Subject: [PATCH] bump versions of deps (#1621) * bump versions of deps * bump transformers too * fix xformers deps and include s3fs install --- docker/Dockerfile | 2 +- requirements.txt | 16 ++++++++-------- setup.py | 17 ++++++++++------- 3 files changed, 19 insertions(+), 16 deletions(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index ae3fe89ae..416582bbe 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -11,7 +11,7 @@ ARG PYTORCH_VERSION="2.1.2" ENV PYTORCH_VERSION=$PYTORCH_VERSION RUN apt-get update && \ - apt-get install -y --allow-change-held-packages vim curl nano libnccl2 libnccl-dev + apt-get install -y --allow-change-held-packages vim curl nano libnccl2 libnccl-dev s3fs WORKDIR /workspace diff --git a/requirements.txt b/requirements.txt index b15a28c90..6723b1798 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,22 +1,22 @@ --extra-index-url https://huggingface.github.io/autogptq-index/whl/cu118/ packaging==23.2 peft==0.10.0 -transformers @ git+https://github.com/huggingface/transformers.git@43d17c18360ac9c3d3491389328e2fe55fe8f9ce -tokenizers==0.15.0 -bitsandbytes==0.43.0 -accelerate==0.28.0 -deepspeed==0.13.1 +transformers==4.40.2 +tokenizers==0.19.1 +bitsandbytes==0.43.1 +accelerate==0.30.1 +deepspeed==0.14.2 pydantic==2.6.3 addict fire PyYAML>=6.0 requests -datasets==2.15.0 -flash-attn==2.5.5 +datasets==2.19.1 +flash-attn==2.5.8 sentencepiece wandb einops -xformers==0.0.22 +xformers==0.0.23.post1 optimum==1.16.2 hf_transfer colorama diff --git a/setup.py b/setup.py index fbca5a360..31a6d6716 100644 --- a/setup.py +++ b/setup.py @@ -30,7 +30,7 @@ def parse_requirements(): try: if "Darwin" in platform.system(): - _install_requires.pop(_install_requires.index("xformers==0.0.22")) + _install_requires.pop(_install_requires.index("xformers==0.0.23.post1")) else: torch_version = version("torch") _install_requires.append(f"torch=={torch_version}") @@ -45,9 +45,12 @@ def parse_requirements(): else: raise ValueError("Invalid version format") - if (major, minor) >= (2, 1): - _install_requires.pop(_install_requires.index("xformers==0.0.22")) - _install_requires.append("xformers>=0.0.23") + if (major, minor) >= (2, 3): + _install_requires.pop(_install_requires.index("xformers==0.0.23.post1")) + _install_requires.append("xformers>=0.0.26.post1") + elif (major, minor) >= (2, 2): + _install_requires.pop(_install_requires.index("xformers==0.0.23.post1")) + _install_requires.append("xformers>=0.0.25.post1") except PackageNotFoundError: pass @@ -68,13 +71,13 @@ def parse_requirements(): dependency_links=dependency_links, extras_require={ "flash-attn": [ - "flash-attn==2.5.5", + "flash-attn==2.5.8", ], "fused-dense-lib": [ - "fused-dense-lib @ git+https://github.com/Dao-AILab/flash-attention@v2.3.3#subdirectory=csrc/fused_dense_lib", + "fused-dense-lib @ git+https://github.com/Dao-AILab/flash-attention@v2.5.8#subdirectory=csrc/fused_dense_lib", ], "deepspeed": [ - "deepspeed==0.13.1", + "deepspeed==0.14.2", "deepspeed-kernels", ], "mamba-ssm": [