Coverage for transformer_lens/components/__init__.py: 100%

22 statements  

« prev     ^ index     » next       coverage.py v7.4.4, created at 2025-02-20 00:46 +0000

1"""Hooked Transformer Components. 

2 

3This module contains all the components (e.g. :class:`Attention`, :class:`MLP`, :class:`LayerNorm`) 

4needed to create many different types of generative language models. They are used by 

5:class:`transformer_lens.HookedTransformer`. 

6""" 

7 

8# Independent classes 

9from .abstract_attention import AbstractAttention 

10from .layer_norm import LayerNorm 

11from .layer_norm_pre import LayerNormPre 

12from .pos_embed import PosEmbed 

13from .rms_norm import RMSNorm 

14from .rms_norm_pre import RMSNormPre 

15from .token_typed_embed import TokenTypeEmbed 

16from .unembed import Unembed 

17 

18# Only dependent on independent modules 

19from .attention import Attention 

20from .bert_mlm_head import BertMLMHead 

21from .bert_nsp_head import BertNSPHead 

22from .bert_pooler import BertPooler 

23from .embed import Embed 

24from .grouped_query_attention import GroupedQueryAttention 

25from .mlps.gated_mlp import GatedMLP 

26from .mlps.mlp import MLP 

27 

28# Interdependent modules 

29from .bert_block import BertBlock 

30from .bert_embed import BertEmbed 

31from .mlps.moe import MoE 

32from .transformer_block import TransformerBlock 

33from .t5_attention import T5Attention 

34from .t5_block import T5Block