Skip to content

vllm.v1.attention.backends.mamba_selectors

get_mamba_attn_backend

get_mamba_attn_backend(
    mamba_type: str,
) -> type[AttentionBackend]
Source code in vllm/v1/attention/backends/mamba_selectors.py
def get_mamba_attn_backend(mamba_type: str) -> type[AttentionBackend]:
    if mamba_type == "mamba1":
        return Mamba1AttentionBackend
    if mamba_type == "mamba2":
        return Mamba2AttentionBackend
    if mamba_type == "linear_attention":
        return LinearAttentionBackend

    raise NotImplementedError(f"Mamba Attention type {mamba_type} is not "
                              "supported yet.")