Skip to content

Commit 021552f

Browse files
RyanJDickhipsterusername
authored andcommitted
Avoid unnecessary dtype conversions with rope encodings.
1 parent be73dbb commit 021552f

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

invokeai/backend/flux/math.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -24,12 +24,12 @@ def rope(pos: Tensor, dim: int, theta: int) -> Tensor:
2424
out = torch.einsum("...n,d->...nd", pos, omega)
2525
out = torch.stack([torch.cos(out), -torch.sin(out), torch.sin(out), torch.cos(out)], dim=-1)
2626
out = rearrange(out, "b n d (i j) -> b n d i j", i=2, j=2)
27-
return out.float()
27+
return out.to(dtype=pos.dtype, device=pos.device)
2828

2929

3030
def apply_rope(xq: Tensor, xk: Tensor, freqs_cis: Tensor) -> tuple[Tensor, Tensor]:
3131
xq_ = xq.view(*xq.shape[:-1], -1, 1, 2)
3232
xk_ = xk.view(*xk.shape[:-1], -1, 1, 2)
3333
xq_out = freqs_cis[..., 0] * xq_[..., 0] + freqs_cis[..., 1] * xq_[..., 1]
3434
xk_out = freqs_cis[..., 0] * xk_[..., 0] + freqs_cis[..., 1] * xk_[..., 1]
35-
return xq_out.view(*xq.shape).type_as(xq), xk_out.view(*xk.shape).type_as(xk)
35+
return xq_out.view(*xq.shape), xk_out.view(*xk.shape)

0 commit comments

Comments
 (0)