Skip to content

Commit

Permalink
Fix hstu
Browse files Browse the repository at this point in the history
  • Loading branch information
xuzhao9 committed Nov 18, 2024
1 parent 597facf commit 8a19930
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions tritonbench/operators/ragged_attention/hstu.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@
_ragged_hstu_attn_fwd_persistent = (
triton_ragged_hstu_attention._ragged_hstu_attn_fwd_persistent
)
_RaggedAttentionRelativeBiasFunction = (
triton_ragged_hstu_attention._RaggedAttentionRelativeBiasFunction
RaggedAttentionRelativeBiasFunction = (
triton_ragged_hstu_attention.RaggedAttentionRelativeBiasFunction
)

@torch.fx.wrap
Expand Down Expand Up @@ -150,7 +150,7 @@ def forward(
grid = (1216,)
_ragged_hstu_attn_fwd_persistent[grid](**kwargs)
else:
out = _RaggedAttentionRelativeBiasFunction.apply(
out = RaggedAttentionRelativeBiasFunction.apply(
self.max_seq_len, # N
kwargs["alpha"],
q,
Expand Down

0 comments on commit 8a19930

Please sign in to comment.