This is an annotated PyTorch experiment to train a transformer model with Rotary Positional Embeddings (RoPE).
12from labml import experiment
13from labml.configs import option, calculate
14from labml_nn.transformers import TransformerConfigs
15from labml_nn.transformers.basic.autoregressive_experiment import AutoregressiveTransformer, Configs19def _rotary_pe_mha(c: TransformerConfigs):
20    from labml_nn.transformers.rope import RotaryPEMultiHeadAttention
21    return RotaryPEMultiHeadAttention(c.n_heads, c.d_model, 1.)Configuration options
25calculate(TransformerConfigs.encoder_attn, 'rotary', _rotary_pe_mha)
26calculate(TransformerConfigs.decoder_attn, 'rotary', _rotary_pe_mha)
27calculate(TransformerConfigs.decoder_mem_attn, 'rotary', _rotary_pe_mha)Create an autoregressive model and initialize weights
30@option(Configs.model, 'rotary_pe_transformer')
31def _model(c: Configs):35    m = AutoregressiveTransformer(c.transformer.encoder,
36                                  c.transformer.src_embed,
37                                  c.transformer.generator).to(c.device)
38
39    return m42def main():Create experiment
44    experiment.create(name="rotary_pe_transformer", writers={'screen'})Create configs
46    conf = Configs()Override configurations
48    experiment.configs(conf, {No fixed positional embeddings
50        'transformer.src_embed': 'no_pos',
51        'transformer.tgt_embed': 'no_pos',Encoder with RoPE
54        'transformer.encoder_attn': 'rotary',57        'model': 'rotary_pe_transformer',Use character level tokenizer
60        'tokenizer': 'character',Prompt separator is blank
62        'prompt_separator': '',Starting prompt for sampling
64        'prompt': 'It is ',Use Tiny Shakespeare dataset
66        'text': 'tiny_shakespeare',Use a context size of
69        'seq_len': 512,Train for 32 epochs
71        'epochs': 32,Batch size
73        'batch_size': 4,Switch between training and validation for times per epoch
76        'inner_iterations': 10,Model size
79        'd_model': 128,
80        'transformer.ffn.d_ff': 512,
81        'transformer.n_heads': 16,
82        'transformer.dropout': 0.0,Use Noam optimizer
85        'optimizer.optimizer': 'Noam',
86        'optimizer.learning_rate': 1.,
87
88        'dataloader_shuffle_with_replacement': True
89    })Set models for saving and loading
92    experiment.add_pytorch_models({'model': conf.model})Start the experiment
95    with experiment.start():Run training
97        conf.run()101if __name__ == '__main__':
102    main()