mpt-125m / layers.py
germanjke's picture
Upload folder using huggingface_hub
2f200a8
from .layers import flash_attn_triton
from .attention import ATTN_CLASS_REGISTRY, MultiheadAttention, MultiQueryAttention, attn_bias_shape, build_alibi_bias, build_attn_bias, flash_attn_fn, scaled_multihead_dot_product_attention, triton_flash_attn_fn
from .blocks import MPTMLP, MPTBlock
from .norm import NORM_CLASS_REGISTRY, LPLayerNorm