Skip to content

Commit bfe8001

Browse files
committed
Add CogVLM2 support for Windows
1 parent 3303706 commit bfe8001

File tree

3 files changed

+14
-7
lines changed

3 files changed

+14
-7
lines changed

requirements.txt

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -7,11 +7,6 @@ pyparsing==3.2.1
77
PySide6==6.8.2.1
88
transformers==4.48.3
99

10-
# PyTorch
11-
torch==2.6.0; platform_system != "Windows"
12-
https://download.pytorch.org/whl/cu124/torch-2.6.0%2Bcu124-cp312-cp312-win_amd64.whl; platform_system == "Windows" and python_version == "3.12"
13-
https://download.pytorch.org/whl/cu124/torch-2.6.0%2Bcu124-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
14-
1510
# CogAgent
1611
timm==1.0.14
1712

@@ -30,7 +25,17 @@ numpy==2.2.3
3025
huggingface-hub==0.29.1
3126
onnxruntime==1.20.1
3227

28+
# PyTorch
29+
torch==2.6.0; platform_system != "Windows"
30+
https://download.pytorch.org/whl/cu124/torch-2.6.0%2Bcu124-cp312-cp312-win_amd64.whl; platform_system == "Windows" and python_version == "3.12"
31+
https://download.pytorch.org/whl/cu124/torch-2.6.0%2Bcu124-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
32+
3333
# FlashAttention (Florence-2, Phi-3-Vision)
3434
flash-attn==2.7.4.post1; platform_system == "Linux"
3535
https://github.com/kingbri1/flash-attention/releases/download/v2.7.4.post1/flash_attn-2.7.4.post1+cu124torch2.6.0cxx11abiFALSE-cp312-cp312-win_amd64.whl; platform_system == "Windows" and python_version == "3.12"
3636
https://github.com/kingbri1/flash-attention/releases/download/v2.7.4.post1/flash_attn-2.7.4.post1+cu124torch2.6.0cxx11abiFALSE-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
37+
38+
# Triton (CogVLM2)
39+
triton==3.2.0; platform_system == "Linux"
40+
https://github.com/woct0rdho/triton-windows/releases/download/v3.2.0-windows.post10/triton-3.2.0-cp312-cp312-win_amd64.whl; platform_system == "Windows" and python_version == "3.12"
41+
https://github.com/woct0rdho/triton-windows/releases/download/v3.2.0-windows.post10/triton-3.2.0-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"

taggui-windows.spec

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ from PyInstaller.utils.hooks import collect_data_files
33

44
datas = [('clip-vit-base-patch32', 'clip-vit-base-patch32'),
55
('images/icon.ico', 'images')]
6+
datas += collect_data_files('triton')
67
datas += collect_data_files('xformers')
78
hiddenimports = [
89
'timm.models.layers',
@@ -27,6 +28,7 @@ a = Analysis(
2728
cipher=block_cipher,
2829
noarchive=False,
2930
module_collection_mode={
31+
'triton': 'py',
3032
'xformers': 'pyz+py',
3133
},
3234
)

taggui/auto_captioning/models/cogvlm2.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,8 @@ class Cogvlm2(AutoCaptioningModel):
1919
def get_additional_error_message(self) -> str | None:
2020
if not importlib.util.find_spec('triton'):
2121
return ('This model requires the `triton` package, which is only '
22-
'available on Linux. Therefore, this model cannot be run '
23-
'on this system.')
22+
'available for Linux and Windows. Therefore, this model '
23+
'cannot be run on this system.')
2424
is_4_bit_model = 'int4' in self.model_id
2525
if is_4_bit_model:
2626
if self.device_setting == CaptionDevice.CPU:

0 commit comments

Comments
 (0)