RWKV-Runner/finetune/lora/v6/fla/layers/__init__.py
2024-05-28 22:35:47 +08:00

26 lines
655 B
Python
Vendored

# -*- coding: utf-8 -*-
from .abc import ABCAttention
from .based import BasedLinearAttention
from .delta_net import DeltaNet
from .gla import GatedLinearAttention
from .hgrn import HGRNAttention
from .hgrn2 import HGRN2Attention
from .linear_attn import LinearAttention
from .multiscale_retention import MultiScaleRetention
from .rebased import ReBasedLinearAttention
from .rwkv6 import RWKV6Attention
__all__ = [
'ABCAttention',
'BasedLinearAttention',
'DeltaNet',
'GatedLinearAttention',
'HGRNAttention',
'HGRN2Attention',
'LinearAttention',
'MultiScaleRetention',
'ReBasedLinearAttention',
'RWKV6Attention'
]