|
|
|
|
|
from typing import Optional |
|
import torch |
|
|
|
|
|
def precompute_freqs_cis( |
|
dim: int, end: int, theta: float = 10000.0, scaling_factor: float = 1.0 |
|
) -> torch.Tensor: |
|
freqs = 1.0 / (theta ** (torch.arange(0, dim, 2).float() / dim)) |
|
t = torch.arange(end, device=freqs.device).float() / scaling_factor |
|
freqs = torch.outer(t, freqs).float() |
|
return torch.polar(torch.ones_like(freqs), freqs) |
|
|
|
|
|
def reshape_for_broadcast(freqs_cis: torch.Tensor, x: torch.Tensor) -> torch.Tensor: |
|
ndim = x.ndim |
|
assert 0 <= 1 < ndim |
|
assert freqs_cis.shape == (x.shape[0], x.shape[-1]) |
|
shape = [d if i == 0 or i == ndim - 1 else 1 for i, d in enumerate(x.shape)] |
|
return freqs_cis.view(*shape) |
|
|
|
|
|
def apply_rotary_emb( |
|
xq: torch.Tensor, |
|
xk: torch.Tensor, |
|
freqs_cis: torch.Tensor, |
|
position_ids: Optional[torch.Tensor] = None, |
|
) -> tuple[torch.Tensor, torch.Tensor]: |
|
xq_ = torch.view_as_complex(xq.float().reshape(*xq.shape[:-1], -1, 2)) |
|
xk_ = torch.view_as_complex(xk.float().reshape(*xk.shape[:-1], -1, 2)) |
|
|
|
freqs_cis = freqs_cis.to(xq.device) |
|
if position_ids is None: |
|
|
|
freqs_cis = reshape_for_broadcast(freqs_cis, xq_) |
|
|
|
else: |
|
|
|
|
|
|
|
|
|
position_ids = position_ids.to(xq.device) |
|
assert position_ids.shape == (xq_.shape[1], xq_.shape[0]) |
|
assert (freqs_cis.shape[1] == xq_.shape[-1]) |
|
freqs_cis = freqs_cis[position_ids].transpose(0, 1).unsqueeze(-2) |
|
|
|
|
|
xq_out = torch.view_as_real(xq_ * freqs_cis).flatten(3) |
|
xk_out = torch.view_as_real(xk_ * freqs_cis).flatten(3) |
|
return xq_out.type_as(xq), xk_out.type_as(xk) |
|
|