|
1 |
| -accelerate==1.1.0 |
2 |
| -bitsandbytes==0.45.0 |
| 1 | +accelerate==1.4.0 |
| 2 | +bitsandbytes==0.45.2 |
3 | 3 | ExifRead==3.0.0
|
4 | 4 | imagesize==1.4.1
|
5 |
| -pillow==11.0.0 |
6 |
| -pyparsing==3.2.0 |
7 |
| -PySide6==6.8.1 |
8 |
| -transformers==4.45.2 |
9 |
| - |
10 |
| -# PyTorch |
11 |
| -# AutoGPTQ does not support PyTorch v2.3. |
12 |
| -torch==2.2.2; platform_system != "Windows" |
13 |
| -https://download.pytorch.org/whl/cu121/torch-2.2.2%2Bcu121-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11" |
14 |
| -https://download.pytorch.org/whl/cu121/torch-2.2.2%2Bcu121-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10" |
| 5 | +pillow==11.1.0 |
| 6 | +pyparsing==3.2.1 |
| 7 | +PySide6==6.8.2.1 |
| 8 | +transformers==4.48.3 |
15 | 9 |
|
16 | 10 | # CogAgent
|
17 |
| -timm==1.0.12 |
| 11 | +timm==1.0.14 |
18 | 12 |
|
19 | 13 | # CogVLM
|
20 |
| -einops==0.8.0 |
21 |
| -protobuf==5.29.1 |
| 14 | +einops==0.8.1 |
| 15 | +protobuf==5.29.3 |
22 | 16 | sentencepiece==0.2.0
|
23 |
| -# These versions of torchvision and xFormers are the latest versions compatible |
24 |
| -# with PyTorch v2.2.2. |
25 |
| -torchvision==0.17.2 |
26 |
| -xformers==0.0.25.post1 |
| 17 | +torchvision==0.21.0 |
| 18 | +xformers==0.0.29.post3 |
27 | 19 |
|
28 | 20 | # InternLM-XComposer2
|
29 |
| -auto-gptq==0.7.1; platform_system == "Linux" or platform_system == "Windows" |
30 |
| -# PyTorch versions prior to 2.3 do not support NumPy v2. |
31 |
| -numpy==1.26.4 |
| 21 | +gptqmodel==1.9.0 |
| 22 | +numpy==2.2.3 |
32 | 23 |
|
33 | 24 | # WD Tagger
|
34 |
| -huggingface-hub==0.26.5 |
| 25 | +huggingface-hub==0.29.1 |
35 | 26 | onnxruntime==1.20.1
|
36 | 27 |
|
| 28 | +# PyTorch |
| 29 | +torch==2.6.0; platform_system != "Windows" |
| 30 | +https://download.pytorch.org/whl/cu124/torch-2.6.0%2Bcu124-cp312-cp312-win_amd64.whl; platform_system == "Windows" and python_version == "3.12" |
| 31 | +https://download.pytorch.org/whl/cu124/torch-2.6.0%2Bcu124-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11" |
| 32 | + |
37 | 33 | # FlashAttention (Florence-2, Phi-3-Vision)
|
38 |
| -flash-attn==2.6.3; platform_system == "Linux" |
39 |
| -https://github.com/bdashore3/flash-attention/releases/download/v2.6.3/flash_attn-2.6.3+cu123torch2.2.2cxx11abiFALSE-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11" |
40 |
| -https://github.com/bdashore3/flash-attention/releases/download/v2.6.3/flash_attn-2.6.3+cu123torch2.2.2cxx11abiFALSE-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10" |
| 34 | +flash-attn==2.7.4.post1; platform_system == "Linux" |
| 35 | +https://github.com/kingbri1/flash-attention/releases/download/v2.7.4.post1/flash_attn-2.7.4.post1+cu124torch2.6.0cxx11abiFALSE-cp312-cp312-win_amd64.whl; platform_system == "Windows" and python_version == "3.12" |
| 36 | +https://github.com/kingbri1/flash-attention/releases/download/v2.7.4.post1/flash_attn-2.7.4.post1+cu124torch2.6.0cxx11abiFALSE-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11" |
| 37 | + |
| 38 | +# Triton (CogVLM2) |
| 39 | +triton==3.2.0; platform_system == "Linux" |
| 40 | +https://github.com/woct0rdho/triton-windows/releases/download/v3.2.0-windows.post10/triton-3.2.0-cp312-cp312-win_amd64.whl; platform_system == "Windows" and python_version == "3.12" |
| 41 | +https://github.com/woct0rdho/triton-windows/releases/download/v3.2.0-windows.post10/triton-3.2.0-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11" |
0 commit comments