This is an annotated PyTorch experiment to train a transformer model with Rotary Positional Embeddings (RoPE).
14from labml import experiment
15from labml.configs import option, calculate
16from labml_nn.transformers import TransformerConfigs
17from labml_nn.transformers.basic.autoregressive_experiment import AutoregressiveTransformer, Configs21def _rotary_pe_mha(c: TransformerConfigs):
22 from labml_nn.transformers.rope import RotaryPEMultiHeadAttention
23 return RotaryPEMultiHeadAttention(c.n_heads, c.d_model, 1.)Configuration options
27calculate(TransformerConfigs.encoder_attn, 'rotary', _rotary_pe_mha)
28calculate(TransformerConfigs.decoder_attn, 'rotary', _rotary_pe_mha)
29calculate(TransformerConfigs.decoder_mem_attn, 'rotary', _rotary_pe_mha)Create an autoregressive model and initialize weights
32@option(Configs.model, 'rotary_pe_transformer')
33def _model(c: Configs):37 m = AutoregressiveTransformer(c.transformer.encoder,
38 c.transformer.src_embed,
39 c.transformer.generator).to(c.device)
40
41 return m44def main():Create experiment
46 experiment.create(name="rotary_pe_transformer", writers={'screen'})Create configs
48 conf = Configs()Override configurations
50 experiment.configs(conf, {No fixed positional embeddings
52 'transformer.src_embed': 'no_pos',
53 'transformer.tgt_embed': 'no_pos',Encoder with RoPE
56 'transformer.encoder_attn': 'rotary',59 'model': 'rotary_pe_transformer',Use character level tokenizer
62 'tokenizer': 'character',Prompt separator is blank
64 'prompt_separator': '',Starting prompt for sampling
66 'prompt': 'It is ',Use Tiny Shakespeare dataset
68 'text': 'tiny_shakespeare',Use a context size of
71 'seq_len': 512,Train for 32 epochs
73 'epochs': 32,Batch size
75 'batch_size': 4,Switch between training and validation for times per epoch
78 'inner_iterations': 10,Model size
81 'd_model': 128,
82 'transformer.ffn.d_ff': 512,
83 'transformer.n_heads': 16,
84 'transformer.dropout': 0.0,Use Noam optimizer
87 'optimizer.optimizer': 'Noam',
88 'optimizer.learning_rate': 1.,
89
90 'dataloader_shuffle_with_replacement': True
91 })Set models for saving and loading
94 experiment.add_pytorch_models({'model': conf.model})Start the experiment
97 with experiment.start():Run training
99 conf.run()103if __name__ == '__main__':
104 main()