Skip to content

Commit 8942e87

Browse files
authored
Merge pull request #195 from allenai/favyen/20250826-fix-helios
Fix a couple bugs with Helios
2 parents 2e8f28c + 83e1eb1 commit 8942e87

File tree

2 files changed

+5
-4
lines changed

2 files changed

+5
-4
lines changed

helios.Dockerfile

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,6 @@ RUN apt install -y libpq-dev ffmpeg libsm6 libxext6 git wget
66
# Install rslearn and helios (need to be in local directory).
77
COPY ./docker_build/rslearn /opt/rslearn
88
COPY ./docker_build/helios /opt/helios
9-
COPY requirements.txt /opt/rslearn_projects/requirements.txt
10-
COPY ai2_requirements.txt /opt/rslearn_projects/ai2_requirements.txt
119

1210
# We also install terratorch so that we can use the same Docker image for TerraMind
1311
# experiments.
@@ -16,7 +14,10 @@ RUN pip install --no-cache-dir geobench==0.0.1
1614

1715
RUN pip install --no-cache-dir --upgrade /opt/rslearn[extra]
1816
RUN pip install --no-cache-dir --upgrade /opt/helios
19-
RUN pip install --no-cache-dir -r /opt/rslearn_projects/requirements.txt -r /opt/rslearn_projects/ai2_requirements.txt
17+
18+
COPY requirements-without-rslearn.txt /opt/rslearn_projects/requirements-without-rslearn.txt
19+
COPY requirements-extra.txt /opt/rslearn_projects/requirements-extra.txt
20+
RUN pip install --no-cache-dir -r /opt/rslearn_projects/requirements-without-rslearn.txt -r /opt/rslearn_projects/requirements-extra.txt
2021

2122
# Copy rslearn_projects and install it too.
2223
COPY . /opt/rslearn_projects/

rslp/helios/model.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -162,7 +162,7 @@ def forward(self, inputs: list[dict[str, Any]]) -> list[torch.Tensor]:
162162
# Currently we assume the provided model always returns a TokensAndMasks object.
163163
tokens_and_masks: TokensAndMasks = self.model(
164164
sample, always_pass_none_mask_to_transformer=True, **self.forward_kwargs
165-
)[0]
165+
)["tokens_and_masks"]
166166

167167
# Apply temporal/modality pooling so we just have one feature per patch.
168168
features = []

0 commit comments

Comments
 (0)