Skip to content

Commit

Permalink
bump versions of deps (#1621)
Browse files Browse the repository at this point in the history
* bump versions of deps

* bump transformers too

* fix xformers deps and include s3fs install
  • Loading branch information
winglian authored May 15, 2024
1 parent 4fde300 commit 039e2a0
Show file tree
Hide file tree
Showing 3 changed files with 19 additions and 16 deletions.
2 changes: 1 addition & 1 deletion docker/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ ARG PYTORCH_VERSION="2.1.2"
ENV PYTORCH_VERSION=$PYTORCH_VERSION

RUN apt-get update && \
apt-get install -y --allow-change-held-packages vim curl nano libnccl2 libnccl-dev
apt-get install -y --allow-change-held-packages vim curl nano libnccl2 libnccl-dev s3fs

WORKDIR /workspace

Expand Down
16 changes: 8 additions & 8 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,22 +1,22 @@
--extra-index-url https://huggingface.github.io/autogptq-index/whl/cu118/
packaging==23.2
peft==0.10.0
transformers @ git+https://github.com/huggingface/transformers.git@43d17c18360ac9c3d3491389328e2fe55fe8f9ce
tokenizers==0.15.0
bitsandbytes==0.43.0
accelerate==0.28.0
deepspeed==0.13.1
transformers==4.40.2
tokenizers==0.19.1
bitsandbytes==0.43.1
accelerate==0.30.1
deepspeed==0.14.2
pydantic==2.6.3
addict
fire
PyYAML>=6.0
requests
datasets==2.15.0
flash-attn==2.5.5
datasets==2.19.1
flash-attn==2.5.8
sentencepiece
wandb
einops
xformers==0.0.22
xformers==0.0.23.post1
optimum==1.16.2
hf_transfer
colorama
Expand Down
17 changes: 10 additions & 7 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def parse_requirements():

try:
if "Darwin" in platform.system():
_install_requires.pop(_install_requires.index("xformers==0.0.22"))
_install_requires.pop(_install_requires.index("xformers==0.0.23.post1"))
else:
torch_version = version("torch")
_install_requires.append(f"torch=={torch_version}")
Expand All @@ -45,9 +45,12 @@ def parse_requirements():
else:
raise ValueError("Invalid version format")

if (major, minor) >= (2, 1):
_install_requires.pop(_install_requires.index("xformers==0.0.22"))
_install_requires.append("xformers>=0.0.23")
if (major, minor) >= (2, 3):
_install_requires.pop(_install_requires.index("xformers==0.0.23.post1"))
_install_requires.append("xformers>=0.0.26.post1")
elif (major, minor) >= (2, 2):
_install_requires.pop(_install_requires.index("xformers==0.0.23.post1"))
_install_requires.append("xformers>=0.0.25.post1")
except PackageNotFoundError:
pass

Expand All @@ -68,13 +71,13 @@ def parse_requirements():
dependency_links=dependency_links,
extras_require={
"flash-attn": [
"flash-attn==2.5.5",
"flash-attn==2.5.8",
],
"fused-dense-lib": [
"fused-dense-lib @ git+https://github.com/Dao-AILab/flash-attention@v2.3.3#subdirectory=csrc/fused_dense_lib",
"fused-dense-lib @ git+https://github.com/Dao-AILab/flash-attention@v2.5.8#subdirectory=csrc/fused_dense_lib",
],
"deepspeed": [
"deepspeed==0.13.1",
"deepspeed==0.14.2",
"deepspeed-kernels",
],
"mamba-ssm": [
Expand Down

0 comments on commit 039e2a0

Please sign in to comment.