Spaces:
Running
Running
File size: 1,093 Bytes
f9edbb4 a18e93d f9edbb4 a18e93d f9edbb4 a18e93d f9edbb4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 |
"""
LexiMind custom transformer models.
This package provides a from-scratch transformer implementation with:
- TransformerEncoder/TransformerDecoder
- MultiHeadAttention, FeedForward, PositionalEncoding
- Task heads: ClassificationHead, TokenClassificationHead, LMHead
- MultiTaskModel: composable wrapper for encoder/decoder + task heads
"""
from .attention import MultiHeadAttention
from .decoder import TransformerDecoder, TransformerDecoderLayer, create_causal_mask
from .encoder import TransformerEncoder, TransformerEncoderLayer
from .feedforward import FeedForward
from .heads import ClassificationHead, LMHead, ProjectionHead, TokenClassificationHead
from .multitask import MultiTaskModel
from .positional_encoding import PositionalEncoding
__all__ = [
"TransformerEncoder",
"TransformerEncoderLayer",
"TransformerDecoder",
"TransformerDecoderLayer",
"create_causal_mask",
"MultiHeadAttention",
"FeedForward",
"PositionalEncoding",
"ClassificationHead",
"TokenClassificationHead",
"LMHead",
"ProjectionHead",
"MultiTaskModel",
]
|