Coverage for transformer_lens/components/__init__.py: 100%
20 statements
« prev ^ index » next coverage.py v7.4.4, created at 2024-12-14 00:54 +0000
« prev ^ index » next coverage.py v7.4.4, created at 2024-12-14 00:54 +0000
1"""Hooked Transformer Components.
3This module contains all the components (e.g. :class:`Attention`, :class:`MLP`, :class:`LayerNorm`)
4needed to create many different types of generative language models. They are used by
5:class:`transformer_lens.HookedTransformer`.
6"""
8# Independent classes
9from .abstract_attention import AbstractAttention
10from .layer_norm import LayerNorm
11from .layer_norm_pre import LayerNormPre
12from .pos_embed import PosEmbed
13from .rms_norm import RMSNorm
14from .rms_norm_pre import RMSNormPre
15from .token_typed_embed import TokenTypeEmbed
16from .unembed import Unembed
18# Only dependent on independent modules
19from .attention import Attention
20from .bert_mlm_head import BertMLMHead
21from .embed import Embed
22from .grouped_query_attention import GroupedQueryAttention
23from .mlps.gated_mlp import GatedMLP
24from .mlps.mlp import MLP
26# Interdependent modules
27from .bert_block import BertBlock
28from .bert_embed import BertEmbed
29from .mlps.moe import MoE
30from .transformer_block import TransformerBlock
31from .t5_attention import T5Attention
32from .t5_block import T5Block