11from labml import experiment
12from labml.configs import calculate
13from labml_nn.experiments.arithmetic_dataset import ArithmeticAutoregression
14from labml_nn.transformers import TransformerConfigs
15from labml_nn.transformers.rope.experiment import Configs as RoPEConfigs18class Configs(RoPEConfigs, ArithmeticAutoregression):26 pass33 from labml_nn.transformers.rope.value_pe import RotaryValuePEMultiHeadAttention
34 return RotaryValuePEMultiHeadAttention(c.n_heads, c.d_model, 1., 1.)配置选项
38calculate(TransformerConfigs.encoder_attn, 'rotary_value', _rotary_value_pe_mha)
39calculate(TransformerConfigs.decoder_attn, 'rotary_value', _rotary_value_pe_mha)
40calculate(TransformerConfigs.decoder_mem_attn, 'rotary_value', _rotary_value_pe_mha)43def main():创建实验
45 experiment.create(name="roper_addition", comment="rotary value 7", writers={'screen', 'labml'})创建配置
47 conf = Configs()覆盖配置
49 experiment.configs(conf, {
50 'max_digits': 7,没有固定的位置嵌入
53 'transformer.src_embed': 'no_pos',
54 'transformer.tgt_embed': 'no_pos',RoPer 关注的编码器
57 'transformer.encoder_attn': 'rotary_value',带有 roPe 注意力的编码器 “transformer.encoder_attn”:“rotary”,
62 'model': 'rotary_pe_transformer',使用上下文大小为
65 'seq_len': 512,训练 32 个时代
67 'epochs': 20,批量大小
69 'batch_size': 16,型号尺寸
72 'd_model': 128,
73 'transformer.ffn.d_ff': 512,
74 'transformer.n_heads': 4,
75 'transformer.dropout': 0.0,设置用于保存和加载的模型
83 experiment.add_pytorch_models({'model': conf.model})开始实验
86 with experiment.start():跑步训练
88 conf.run()92if __name__ == '__main__':
93 main()