yizhongw commited on
Commit
6b453df
1 Parent(s): f820fa6

Checkpoint first release.

Browse files
added_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "[PAD]": 32000
3
+ }
config.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "reward_model_llama_new_scgeme/checkpoint-5400",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "bos_token_id": 1,
7
+ "eos_token_id": 2,
8
+ "hidden_act": "silu",
9
+ "hidden_size": 4096,
10
+ "initializer_range": 0.02,
11
+ "intermediate_size": 11008,
12
+ "max_position_embeddings": 4096,
13
+ "model_type": "llama",
14
+ "num_attention_heads": 32,
15
+ "num_hidden_layers": 32,
16
+ "num_key_value_heads": 32,
17
+ "pad_token_id": 0,
18
+ "pretraining_tp": 1,
19
+ "rms_norm_eps": 1e-05,
20
+ "rope_scaling": null,
21
+ "tie_word_embeddings": false,
22
+ "torch_dtype": "float32",
23
+ "transformers_version": "4.28.1",
24
+ "use_cache": true,
25
+ "vocab_size": 32001
26
+ }
generation_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 1,
3
+ "do_sample": true,
4
+ "eos_token_id": 2,
5
+ "max_length": 4096,
6
+ "pad_token_id": 0,
7
+ "temperature": 0.6,
8
+ "top_p": 0.9,
9
+ "transformers_version": "4.28.1"
10
+ }
pytorch_model-00001-of-00003.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef8f2e4fa1fd976b711e48fd3d3af9fab9fa19d8a6259af3caf1b5bd16234c6c
3
+ size 9878005970
pytorch_model-00002-of-00003.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5a9b6c535430b6cff55fb323f4b289beeb32f3f0cf7b650fd2899f15454e35b1
3
+ size 9894801014
pytorch_model-00003-of-00003.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ce66101de944ddcd48ed9cac10280cf0861ba4a4f68ff820ac89454eed56abc8
3
+ size 7181007033
pytorch_model.bin.index.json ADDED
@@ -0,0 +1,330 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 26953703424
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "pytorch_model-00003-of-00003.bin",
7
+ "model.embed_tokens.weight": "pytorch_model-00001-of-00003.bin",
8
+ "model.layers.0.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
9
+ "model.layers.0.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
10
+ "model.layers.0.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
11
+ "model.layers.0.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
12
+ "model.layers.0.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
13
+ "model.layers.0.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
14
+ "model.layers.0.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
15
+ "model.layers.0.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
16
+ "model.layers.0.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
17
+ "model.layers.0.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
18
+ "model.layers.1.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
19
+ "model.layers.1.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
20
+ "model.layers.1.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
21
+ "model.layers.1.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
22
+ "model.layers.1.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
23
+ "model.layers.1.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
24
+ "model.layers.1.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
25
+ "model.layers.1.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
26
+ "model.layers.1.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
27
+ "model.layers.1.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
28
+ "model.layers.10.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
29
+ "model.layers.10.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
30
+ "model.layers.10.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
31
+ "model.layers.10.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
32
+ "model.layers.10.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
33
+ "model.layers.10.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
34
+ "model.layers.10.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
35
+ "model.layers.10.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
36
+ "model.layers.10.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
37
+ "model.layers.10.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
38
+ "model.layers.11.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
39
+ "model.layers.11.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
40
+ "model.layers.11.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
41
+ "model.layers.11.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
42
+ "model.layers.11.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
43
+ "model.layers.11.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
44
+ "model.layers.11.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
45
+ "model.layers.11.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
46
+ "model.layers.11.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
47
+ "model.layers.11.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
48
+ "model.layers.12.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
49
+ "model.layers.12.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
50
+ "model.layers.12.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
51
+ "model.layers.12.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
52
+ "model.layers.12.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
53
+ "model.layers.12.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
54
+ "model.layers.12.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
55
+ "model.layers.12.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
56
+ "model.layers.12.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
57
+ "model.layers.12.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
58
+ "model.layers.13.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
59
+ "model.layers.13.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
60
+ "model.layers.13.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
61
+ "model.layers.13.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
62
+ "model.layers.13.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
63
+ "model.layers.13.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
64
+ "model.layers.13.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
65
+ "model.layers.13.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
66
+ "model.layers.13.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
67
+ "model.layers.13.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
68
+ "model.layers.14.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
69
+ "model.layers.14.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
70
+ "model.layers.14.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
71
+ "model.layers.14.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
72
+ "model.layers.14.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
73
+ "model.layers.14.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
74
+ "model.layers.14.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
75
+ "model.layers.14.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
76
+ "model.layers.14.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
77
+ "model.layers.14.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
78
+ "model.layers.15.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
79
+ "model.layers.15.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
80
+ "model.layers.15.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
81
+ "model.layers.15.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
82
+ "model.layers.15.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
83
+ "model.layers.15.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
84
+ "model.layers.15.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
85
+ "model.layers.15.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
86
+ "model.layers.15.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
87
+ "model.layers.15.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
88
+ "model.layers.16.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
89
+ "model.layers.16.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
90
+ "model.layers.16.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
91
+ "model.layers.16.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
92
+ "model.layers.16.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
93
+ "model.layers.16.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
94
+ "model.layers.16.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
95
+ "model.layers.16.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
96
+ "model.layers.16.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
97
+ "model.layers.16.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
98
+ "model.layers.17.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
99
+ "model.layers.17.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
100
+ "model.layers.17.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
101
+ "model.layers.17.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
102
+ "model.layers.17.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
103
+ "model.layers.17.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
104
+ "model.layers.17.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
105
+ "model.layers.17.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
106
+ "model.layers.17.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
107
+ "model.layers.17.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
108
+ "model.layers.18.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
109
+ "model.layers.18.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
110
+ "model.layers.18.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
111
+ "model.layers.18.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
112
+ "model.layers.18.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
113
+ "model.layers.18.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
114
+ "model.layers.18.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
115
+ "model.layers.18.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
116
+ "model.layers.18.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
117
+ "model.layers.18.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
118
+ "model.layers.19.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
119
+ "model.layers.19.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
120
+ "model.layers.19.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
121
+ "model.layers.19.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
122
+ "model.layers.19.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
123
+ "model.layers.19.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
124
+ "model.layers.19.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
125
+ "model.layers.19.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
126
+ "model.layers.19.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
127
+ "model.layers.19.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
128
+ "model.layers.2.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
129
+ "model.layers.2.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
130
+ "model.layers.2.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
131
+ "model.layers.2.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
132
+ "model.layers.2.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
133
+ "model.layers.2.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
134
+ "model.layers.2.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
135
+ "model.layers.2.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
136
+ "model.layers.2.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
137
+ "model.layers.2.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
138
+ "model.layers.20.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
139
+ "model.layers.20.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
140
+ "model.layers.20.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
141
+ "model.layers.20.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
142
+ "model.layers.20.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
143
+ "model.layers.20.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
144
+ "model.layers.20.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
145
+ "model.layers.20.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
146
+ "model.layers.20.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
147
+ "model.layers.20.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
148
+ "model.layers.21.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
149
+ "model.layers.21.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
150
+ "model.layers.21.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
151
+ "model.layers.21.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
152
+ "model.layers.21.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
153
+ "model.layers.21.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
154
+ "model.layers.21.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
155
+ "model.layers.21.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
156
+ "model.layers.21.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
157
+ "model.layers.21.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
158
+ "model.layers.22.input_layernorm.weight": "pytorch_model-00002-of-00003.bin",
159
+ "model.layers.22.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
160
+ "model.layers.22.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
161
+ "model.layers.22.mlp.up_proj.weight": "pytorch_model-00002-of-00003.bin",
162
+ "model.layers.22.post_attention_layernorm.weight": "pytorch_model-00002-of-00003.bin",
163
+ "model.layers.22.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
164
+ "model.layers.22.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
165
+ "model.layers.22.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
166
+ "model.layers.22.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
167
+ "model.layers.22.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
168
+ "model.layers.23.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
169
+ "model.layers.23.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
170
+ "model.layers.23.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
171
+ "model.layers.23.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
172
+ "model.layers.23.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
173
+ "model.layers.23.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
174
+ "model.layers.23.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
175
+ "model.layers.23.self_attn.q_proj.weight": "pytorch_model-00002-of-00003.bin",
176
+ "model.layers.23.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00003.bin",
177
+ "model.layers.23.self_attn.v_proj.weight": "pytorch_model-00002-of-00003.bin",
178
+ "model.layers.24.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
179
+ "model.layers.24.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
180
+ "model.layers.24.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
181
+ "model.layers.24.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
182
+ "model.layers.24.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
183
+ "model.layers.24.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
184
+ "model.layers.24.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
185
+ "model.layers.24.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
186
+ "model.layers.24.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
187
+ "model.layers.24.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
188
+ "model.layers.25.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
189
+ "model.layers.25.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
190
+ "model.layers.25.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
191
+ "model.layers.25.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
192
+ "model.layers.25.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
193
+ "model.layers.25.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
194
+ "model.layers.25.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
195
+ "model.layers.25.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
196
+ "model.layers.25.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
197
+ "model.layers.25.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
198
+ "model.layers.26.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
199
+ "model.layers.26.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
200
+ "model.layers.26.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
201
+ "model.layers.26.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
202
+ "model.layers.26.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
203
+ "model.layers.26.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
204
+ "model.layers.26.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
205
+ "model.layers.26.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
206
+ "model.layers.26.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
207
+ "model.layers.26.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
208
+ "model.layers.27.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
209
+ "model.layers.27.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
210
+ "model.layers.27.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
211
+ "model.layers.27.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
212
+ "model.layers.27.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
213
+ "model.layers.27.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
214
+ "model.layers.27.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
215
+ "model.layers.27.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
216
+ "model.layers.27.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
217
+ "model.layers.27.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
218
+ "model.layers.28.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
219
+ "model.layers.28.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
220
+ "model.layers.28.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
221
+ "model.layers.28.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
222
+ "model.layers.28.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
223
+ "model.layers.28.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
224
+ "model.layers.28.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
225
+ "model.layers.28.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
226
+ "model.layers.28.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
227
+ "model.layers.28.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
228
+ "model.layers.29.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
229
+ "model.layers.29.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
230
+ "model.layers.29.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
231
+ "model.layers.29.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
232
+ "model.layers.29.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
233
+ "model.layers.29.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
234
+ "model.layers.29.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
235
+ "model.layers.29.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
236
+ "model.layers.29.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
237
+ "model.layers.29.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
238
+ "model.layers.3.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
239
+ "model.layers.3.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
240
+ "model.layers.3.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
241
+ "model.layers.3.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
242
+ "model.layers.3.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
243
+ "model.layers.3.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
244
+ "model.layers.3.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
245
+ "model.layers.3.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
246
+ "model.layers.3.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
247
+ "model.layers.3.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
248
+ "model.layers.30.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
249
+ "model.layers.30.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
250
+ "model.layers.30.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
251
+ "model.layers.30.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
252
+ "model.layers.30.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
253
+ "model.layers.30.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
254
+ "model.layers.30.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
255
+ "model.layers.30.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
256
+ "model.layers.30.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
257
+ "model.layers.30.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
258
+ "model.layers.31.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
259
+ "model.layers.31.mlp.down_proj.weight": "pytorch_model-00003-of-00003.bin",
260
+ "model.layers.31.mlp.gate_proj.weight": "pytorch_model-00003-of-00003.bin",
261
+ "model.layers.31.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
262
+ "model.layers.31.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
263
+ "model.layers.31.self_attn.k_proj.weight": "pytorch_model-00003-of-00003.bin",
264
+ "model.layers.31.self_attn.o_proj.weight": "pytorch_model-00003-of-00003.bin",
265
+ "model.layers.31.self_attn.q_proj.weight": "pytorch_model-00003-of-00003.bin",
266
+ "model.layers.31.self_attn.rotary_emb.inv_freq": "pytorch_model-00003-of-00003.bin",
267
+ "model.layers.31.self_attn.v_proj.weight": "pytorch_model-00003-of-00003.bin",
268
+ "model.layers.4.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
269
+ "model.layers.4.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
270
+ "model.layers.4.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
271
+ "model.layers.4.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
272
+ "model.layers.4.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
273
+ "model.layers.4.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
274
+ "model.layers.4.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
275
+ "model.layers.4.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
276
+ "model.layers.4.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
277
+ "model.layers.4.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
278
+ "model.layers.5.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
279
+ "model.layers.5.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
280
+ "model.layers.5.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
281
+ "model.layers.5.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
282
+ "model.layers.5.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
283
+ "model.layers.5.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
284
+ "model.layers.5.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
285
+ "model.layers.5.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
286
+ "model.layers.5.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
287
+ "model.layers.5.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
288
+ "model.layers.6.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
289
+ "model.layers.6.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
290
+ "model.layers.6.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
291
+ "model.layers.6.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
292
+ "model.layers.6.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
293
+ "model.layers.6.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
294
+ "model.layers.6.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
295
+ "model.layers.6.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
296
+ "model.layers.6.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
297
+ "model.layers.6.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
298
+ "model.layers.7.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
299
+ "model.layers.7.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
300
+ "model.layers.7.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
301
+ "model.layers.7.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
302
+ "model.layers.7.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
303
+ "model.layers.7.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
304
+ "model.layers.7.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
305
+ "model.layers.7.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
306
+ "model.layers.7.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
307
+ "model.layers.7.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
308
+ "model.layers.8.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
309
+ "model.layers.8.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
310
+ "model.layers.8.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
311
+ "model.layers.8.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
312
+ "model.layers.8.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
313
+ "model.layers.8.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
314
+ "model.layers.8.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
315
+ "model.layers.8.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
316
+ "model.layers.8.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
317
+ "model.layers.8.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
318
+ "model.layers.9.input_layernorm.weight": "pytorch_model-00001-of-00003.bin",
319
+ "model.layers.9.mlp.down_proj.weight": "pytorch_model-00001-of-00003.bin",
320
+ "model.layers.9.mlp.gate_proj.weight": "pytorch_model-00001-of-00003.bin",
321
+ "model.layers.9.mlp.up_proj.weight": "pytorch_model-00001-of-00003.bin",
322
+ "model.layers.9.post_attention_layernorm.weight": "pytorch_model-00001-of-00003.bin",
323
+ "model.layers.9.self_attn.k_proj.weight": "pytorch_model-00001-of-00003.bin",
324
+ "model.layers.9.self_attn.o_proj.weight": "pytorch_model-00001-of-00003.bin",
325
+ "model.layers.9.self_attn.q_proj.weight": "pytorch_model-00001-of-00003.bin",
326
+ "model.layers.9.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
327
+ "model.layers.9.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
328
+ "model.norm.weight": "pytorch_model-00003-of-00003.bin"
329
+ }
330
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "eos_token": "</s>",
4
+ "pad_token": "[PAD]",
5
+ "unk_token": "<unk>"
6
+ }
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
3
+ size 499723
tokenizer_config.json ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "bos_token": {
5
+ "__type": "AddedToken",
6
+ "content": "<s>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false
11
+ },
12
+ "clean_up_tokenization_spaces": false,
13
+ "eos_token": {
14
+ "__type": "AddedToken",
15
+ "content": "</s>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false
20
+ },
21
+ "legacy": false,
22
+ "model_max_length": 840,
23
+ "pad_token": null,
24
+ "padding_side": "right",
25
+ "sp_model_kwargs": {},
26
+ "tokenizer_class": "LlamaTokenizer",
27
+ "unk_token": {
28
+ "__type": "AddedToken",
29
+ "content": "<unk>",
30
+ "lstrip": false,
31
+ "normalized": false,
32
+ "rstrip": false,
33
+ "single_word": false
34
+ }
35
+ }
trainer_state.json ADDED
@@ -0,0 +1,1855 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 1.9993461915658712,
5
+ "global_step": 3058,
6
+ "is_hyper_param_search": false,
7
+ "is_local_process_zero": true,
8
+ "is_world_process_zero": true,
9
+ "log_history": [
10
+ {
11
+ "epoch": 0.01,
12
+ "learning_rate": 6.451612903225806e-06,
13
+ "loss": 0.2905,
14
+ "step": 10
15
+ },
16
+ {
17
+ "epoch": 0.01,
18
+ "learning_rate": 1.2903225806451613e-05,
19
+ "loss": 0.1863,
20
+ "step": 20
21
+ },
22
+ {
23
+ "epoch": 0.02,
24
+ "learning_rate": 1.935483870967742e-05,
25
+ "loss": 0.2482,
26
+ "step": 30
27
+ },
28
+ {
29
+ "epoch": 0.03,
30
+ "learning_rate": 1.999956375870988e-05,
31
+ "loss": 0.1986,
32
+ "step": 40
33
+ },
34
+ {
35
+ "epoch": 0.03,
36
+ "learning_rate": 1.999805581299163e-05,
37
+ "loss": 0.215,
38
+ "step": 50
39
+ },
40
+ {
41
+ "epoch": 0.04,
42
+ "learning_rate": 1.9995470939538076e-05,
43
+ "loss": 0.2792,
44
+ "step": 60
45
+ },
46
+ {
47
+ "epoch": 0.05,
48
+ "learning_rate": 1.999180941677554e-05,
49
+ "loss": 0.2919,
50
+ "step": 70
51
+ },
52
+ {
53
+ "epoch": 0.05,
54
+ "learning_rate": 1.998707163910024e-05,
55
+ "loss": 0.2196,
56
+ "step": 80
57
+ },
58
+ {
59
+ "epoch": 0.06,
60
+ "learning_rate": 1.9981258116835808e-05,
61
+ "loss": 0.3381,
62
+ "step": 90
63
+ },
64
+ {
65
+ "epoch": 0.07,
66
+ "learning_rate": 1.9974369476178333e-05,
67
+ "loss": 0.1835,
68
+ "step": 100
69
+ },
70
+ {
71
+ "epoch": 0.07,
72
+ "learning_rate": 1.9966406459128885e-05,
73
+ "loss": 0.2671,
74
+ "step": 110
75
+ },
76
+ {
77
+ "epoch": 0.08,
78
+ "learning_rate": 1.9957369923413616e-05,
79
+ "loss": 0.1615,
80
+ "step": 120
81
+ },
82
+ {
83
+ "epoch": 0.08,
84
+ "learning_rate": 1.9947260842391358e-05,
85
+ "loss": 0.1339,
86
+ "step": 130
87
+ },
88
+ {
89
+ "epoch": 0.09,
90
+ "learning_rate": 1.9936080304948787e-05,
91
+ "loss": 0.2017,
92
+ "step": 140
93
+ },
94
+ {
95
+ "epoch": 0.1,
96
+ "learning_rate": 1.992382951538313e-05,
97
+ "loss": 0.2972,
98
+ "step": 150
99
+ },
100
+ {
101
+ "epoch": 0.1,
102
+ "learning_rate": 1.9910509793272433e-05,
103
+ "loss": 0.1995,
104
+ "step": 160
105
+ },
106
+ {
107
+ "epoch": 0.11,
108
+ "learning_rate": 1.989612257333346e-05,
109
+ "loss": 0.2271,
110
+ "step": 170
111
+ },
112
+ {
113
+ "epoch": 0.12,
114
+ "learning_rate": 1.988066940526711e-05,
115
+ "loss": 0.1475,
116
+ "step": 180
117
+ },
118
+ {
119
+ "epoch": 0.12,
120
+ "learning_rate": 1.986415195359154e-05,
121
+ "loss": 0.4046,
122
+ "step": 190
123
+ },
124
+ {
125
+ "epoch": 0.13,
126
+ "learning_rate": 1.984657199746282e-05,
127
+ "loss": 0.2995,
128
+ "step": 200
129
+ },
130
+ {
131
+ "epoch": 0.14,
132
+ "learning_rate": 1.9827931430483342e-05,
133
+ "loss": 0.2134,
134
+ "step": 210
135
+ },
136
+ {
137
+ "epoch": 0.14,
138
+ "learning_rate": 1.9808232260497826e-05,
139
+ "loss": 0.3513,
140
+ "step": 220
141
+ },
142
+ {
143
+ "epoch": 0.15,
144
+ "learning_rate": 1.9787476609377055e-05,
145
+ "loss": 0.2049,
146
+ "step": 230
147
+ },
148
+ {
149
+ "epoch": 0.16,
150
+ "learning_rate": 1.976566671278932e-05,
151
+ "loss": 0.2787,
152
+ "step": 240
153
+ },
154
+ {
155
+ "epoch": 0.16,
156
+ "learning_rate": 1.974280491995961e-05,
157
+ "loss": 0.223,
158
+ "step": 250
159
+ },
160
+ {
161
+ "epoch": 0.17,
162
+ "learning_rate": 1.9718893693416562e-05,
163
+ "loss": 0.2959,
164
+ "step": 260
165
+ },
166
+ {
167
+ "epoch": 0.18,
168
+ "learning_rate": 1.969393560872722e-05,
169
+ "loss": 0.293,
170
+ "step": 270
171
+ },
172
+ {
173
+ "epoch": 0.18,
174
+ "learning_rate": 1.966793335421961e-05,
175
+ "loss": 0.2299,
176
+ "step": 280
177
+ },
178
+ {
179
+ "epoch": 0.19,
180
+ "learning_rate": 1.964088973069316e-05,
181
+ "loss": 0.2127,
182
+ "step": 290
183
+ },
184
+ {
185
+ "epoch": 0.2,
186
+ "learning_rate": 1.9612807651117034e-05,
187
+ "loss": 0.16,
188
+ "step": 300
189
+ },
190
+ {
191
+ "epoch": 0.2,
192
+ "learning_rate": 1.958369014031635e-05,
193
+ "loss": 0.2136,
194
+ "step": 310
195
+ },
196
+ {
197
+ "epoch": 0.21,
198
+ "learning_rate": 1.955354033464637e-05,
199
+ "loss": 0.2905,
200
+ "step": 320
201
+ },
202
+ {
203
+ "epoch": 0.22,
204
+ "learning_rate": 1.952236148165468e-05,
205
+ "loss": 0.2233,
206
+ "step": 330
207
+ },
208
+ {
209
+ "epoch": 0.22,
210
+ "learning_rate": 1.9490156939731347e-05,
211
+ "loss": 0.1722,
212
+ "step": 340
213
+ },
214
+ {
215
+ "epoch": 0.23,
216
+ "learning_rate": 1.9456930177747247e-05,
217
+ "loss": 0.197,
218
+ "step": 350
219
+ },
220
+ {
221
+ "epoch": 0.24,
222
+ "learning_rate": 1.942268477468034e-05,
223
+ "loss": 0.1626,
224
+ "step": 360
225
+ },
226
+ {
227
+ "epoch": 0.24,
228
+ "learning_rate": 1.938742441923022e-05,
229
+ "loss": 0.1259,
230
+ "step": 370
231
+ },
232
+ {
233
+ "epoch": 0.25,
234
+ "learning_rate": 1.935115290942077e-05,
235
+ "loss": 0.2693,
236
+ "step": 380
237
+ },
238
+ {
239
+ "epoch": 0.25,
240
+ "learning_rate": 1.931387415219106e-05,
241
+ "loss": 0.2404,
242
+ "step": 390
243
+ },
244
+ {
245
+ "epoch": 0.26,
246
+ "learning_rate": 1.9275592162974527e-05,
247
+ "loss": 0.1602,
248
+ "step": 400
249
+ },
250
+ {
251
+ "epoch": 0.27,
252
+ "learning_rate": 1.9236311065266448e-05,
253
+ "loss": 0.1392,
254
+ "step": 410
255
+ },
256
+ {
257
+ "epoch": 0.27,
258
+ "learning_rate": 1.9196035090179793e-05,
259
+ "loss": 0.1638,
260
+ "step": 420
261
+ },
262
+ {
263
+ "epoch": 0.28,
264
+ "learning_rate": 1.9154768575989466e-05,
265
+ "loss": 0.1532,
266
+ "step": 430
267
+ },
268
+ {
269
+ "epoch": 0.29,
270
+ "learning_rate": 1.9112515967665016e-05,
271
+ "loss": 0.2115,
272
+ "step": 440
273
+ },
274
+ {
275
+ "epoch": 0.29,
276
+ "learning_rate": 1.9069281816391868e-05,
277
+ "loss": 0.1884,
278
+ "step": 450
279
+ },
280
+ {
281
+ "epoch": 0.3,
282
+ "learning_rate": 1.9025070779081064e-05,
283
+ "loss": 0.1788,
284
+ "step": 460
285
+ },
286
+ {
287
+ "epoch": 0.31,
288
+ "learning_rate": 1.8979887617867684e-05,
289
+ "loss": 0.1243,
290
+ "step": 470
291
+ },
292
+ {
293
+ "epoch": 0.31,
294
+ "learning_rate": 1.8933737199597882e-05,
295
+ "loss": 0.2044,
296
+ "step": 480
297
+ },
298
+ {
299
+ "epoch": 0.32,
300
+ "learning_rate": 1.888662449530467e-05,
301
+ "loss": 0.1791,
302
+ "step": 490
303
+ },
304
+ {
305
+ "epoch": 0.33,
306
+ "learning_rate": 1.8838554579672443e-05,
307
+ "loss": 0.2833,
308
+ "step": 500
309
+ },
310
+ {
311
+ "epoch": 0.33,
312
+ "learning_rate": 1.8789532630490405e-05,
313
+ "loss": 0.16,
314
+ "step": 510
315
+ },
316
+ {
317
+ "epoch": 0.34,
318
+ "learning_rate": 1.873956392809482e-05,
319
+ "loss": 0.1645,
320
+ "step": 520
321
+ },
322
+ {
323
+ "epoch": 0.35,
324
+ "learning_rate": 1.868865385480027e-05,
325
+ "loss": 0.245,
326
+ "step": 530
327
+ },
328
+ {
329
+ "epoch": 0.35,
330
+ "learning_rate": 1.8636807894319876e-05,
331
+ "loss": 0.2011,
332
+ "step": 540
333
+ },
334
+ {
335
+ "epoch": 0.36,
336
+ "learning_rate": 1.8584031631174653e-05,
337
+ "loss": 0.1425,
338
+ "step": 550
339
+ },
340
+ {
341
+ "epoch": 0.37,
342
+ "learning_rate": 1.8530330750091978e-05,
343
+ "loss": 0.2352,
344
+ "step": 560
345
+ },
346
+ {
347
+ "epoch": 0.37,
348
+ "learning_rate": 1.8475711035393255e-05,
349
+ "loss": 0.1825,
350
+ "step": 570
351
+ },
352
+ {
353
+ "epoch": 0.38,
354
+ "learning_rate": 1.8420178370370872e-05,
355
+ "loss": 0.2032,
356
+ "step": 580
357
+ },
358
+ {
359
+ "epoch": 0.39,
360
+ "learning_rate": 1.8363738736654484e-05,
361
+ "loss": 0.2187,
362
+ "step": 590
363
+ },
364
+ {
365
+ "epoch": 0.39,
366
+ "learning_rate": 1.830639821356672e-05,
367
+ "loss": 0.1901,
368
+ "step": 600
369
+ },
370
+ {
371
+ "epoch": 0.4,
372
+ "learning_rate": 1.8248162977468346e-05,
373
+ "loss": 0.2359,
374
+ "step": 610
375
+ },
376
+ {
377
+ "epoch": 0.41,
378
+ "learning_rate": 1.818903930109299e-05,
379
+ "loss": 0.2114,
380
+ "step": 620
381
+ },
382
+ {
383
+ "epoch": 0.41,
384
+ "learning_rate": 1.8129033552871492e-05,
385
+ "loss": 0.2116,
386
+ "step": 630
387
+ },
388
+ {
389
+ "epoch": 0.42,
390
+ "learning_rate": 1.806815219624591e-05,
391
+ "loss": 0.2054,
392
+ "step": 640
393
+ },
394
+ {
395
+ "epoch": 0.42,
396
+ "learning_rate": 1.800640178897335e-05,
397
+ "loss": 0.2725,
398
+ "step": 650
399
+ },
400
+ {
401
+ "epoch": 0.43,
402
+ "learning_rate": 1.7943788982419586e-05,
403
+ "loss": 0.2232,
404
+ "step": 660
405
+ },
406
+ {
407
+ "epoch": 0.44,
408
+ "learning_rate": 1.788032052084263e-05,
409
+ "loss": 0.2528,
410
+ "step": 670
411
+ },
412
+ {
413
+ "epoch": 0.44,
414
+ "learning_rate": 1.781600324066626e-05,
415
+ "loss": 0.2436,
416
+ "step": 680
417
+ },
418
+ {
419
+ "epoch": 0.45,
420
+ "learning_rate": 1.7750844069743676e-05,
421
+ "loss": 0.1689,
422
+ "step": 690
423
+ },
424
+ {
425
+ "epoch": 0.46,
426
+ "learning_rate": 1.7684850026611248e-05,
427
+ "loss": 0.1595,
428
+ "step": 700
429
+ },
430
+ {
431
+ "epoch": 0.46,
432
+ "learning_rate": 1.7618028219732533e-05,
433
+ "loss": 0.1997,
434
+ "step": 710
435
+ },
436
+ {
437
+ "epoch": 0.47,
438
+ "learning_rate": 1.7550385846732606e-05,
439
+ "loss": 0.5174,
440
+ "step": 720
441
+ },
442
+ {
443
+ "epoch": 0.48,
444
+ "learning_rate": 1.748193019362275e-05,
445
+ "loss": 0.1299,
446
+ "step": 730
447
+ },
448
+ {
449
+ "epoch": 0.48,
450
+ "learning_rate": 1.741266863401569e-05,
451
+ "loss": 0.2168,
452
+ "step": 740
453
+ },
454
+ {
455
+ "epoch": 0.49,
456
+ "learning_rate": 1.734260862833132e-05,
457
+ "loss": 0.1283,
458
+ "step": 750
459
+ },
460
+ {
461
+ "epoch": 0.5,
462
+ "learning_rate": 1.7271757722993133e-05,
463
+ "loss": 0.4788,
464
+ "step": 760
465
+ },
466
+ {
467
+ "epoch": 0.5,
468
+ "learning_rate": 1.720012354961536e-05,
469
+ "loss": 0.1463,
470
+ "step": 770
471
+ },
472
+ {
473
+ "epoch": 0.51,
474
+ "learning_rate": 1.712771382418094e-05,
475
+ "loss": 0.1469,
476
+ "step": 780
477
+ },
478
+ {
479
+ "epoch": 0.52,
480
+ "learning_rate": 1.705453634621042e-05,
481
+ "loss": 0.2196,
482
+ "step": 790
483
+ },
484
+ {
485
+ "epoch": 0.52,
486
+ "learning_rate": 1.6980598997921793e-05,
487
+ "loss": 0.1376,
488
+ "step": 800
489
+ },
490
+ {
491
+ "epoch": 0.53,
492
+ "learning_rate": 1.6905909743381544e-05,
493
+ "loss": 0.2466,
494
+ "step": 810
495
+ },
496
+ {
497
+ "epoch": 0.54,
498
+ "learning_rate": 1.683047662764675e-05,
499
+ "loss": 0.2474,
500
+ "step": 820
501
+ },
502
+ {
503
+ "epoch": 0.54,
504
+ "learning_rate": 1.6754307775898547e-05,
505
+ "loss": 0.2719,
506
+ "step": 830
507
+ },
508
+ {
509
+ "epoch": 0.55,
510
+ "learning_rate": 1.6677411392566937e-05,
511
+ "loss": 0.2841,
512
+ "step": 840
513
+ },
514
+ {
515
+ "epoch": 0.56,
516
+ "learning_rate": 1.659979576044704e-05,
517
+ "loss": 0.1851,
518
+ "step": 850
519
+ },
520
+ {
521
+ "epoch": 0.56,
522
+ "learning_rate": 1.6521469239806943e-05,
523
+ "loss": 0.6387,
524
+ "step": 860
525
+ },
526
+ {
527
+ "epoch": 0.57,
528
+ "learning_rate": 1.6442440267487174e-05,
529
+ "loss": 0.2083,
530
+ "step": 870
531
+ },
532
+ {
533
+ "epoch": 0.58,
534
+ "learning_rate": 1.636271735599194e-05,
535
+ "loss": 0.1612,
536
+ "step": 880
537
+ },
538
+ {
539
+ "epoch": 0.58,
540
+ "learning_rate": 1.628230909257222e-05,
541
+ "loss": 0.2165,
542
+ "step": 890
543
+ },
544
+ {
545
+ "epoch": 0.59,
546
+ "learning_rate": 1.6201224138300787e-05,
547
+ "loss": 0.1853,
548
+ "step": 900
549
+ },
550
+ {
551
+ "epoch": 0.59,
552
+ "learning_rate": 1.6119471227139306e-05,
553
+ "loss": 0.2882,
554
+ "step": 910
555
+ },
556
+ {
557
+ "epoch": 0.6,
558
+ "learning_rate": 1.603705916499755e-05,
559
+ "loss": 0.1554,
560
+ "step": 920
561
+ },
562
+ {
563
+ "epoch": 0.61,
564
+ "learning_rate": 1.5953996828784912e-05,
565
+ "loss": 0.1088,
566
+ "step": 930
567
+ },
568
+ {
569
+ "epoch": 0.61,
570
+ "learning_rate": 1.5870293165454205e-05,
571
+ "loss": 0.1696,
572
+ "step": 940
573
+ },
574
+ {
575
+ "epoch": 0.62,
576
+ "learning_rate": 1.578595719103797e-05,
577
+ "loss": 0.225,
578
+ "step": 950
579
+ },
580
+ {
581
+ "epoch": 0.63,
582
+ "learning_rate": 1.5700997989677337e-05,
583
+ "loss": 0.2903,
584
+ "step": 960
585
+ },
586
+ {
587
+ "epoch": 0.63,
588
+ "learning_rate": 1.56154247126435e-05,
589
+ "loss": 0.1684,
590
+ "step": 970
591
+ },
592
+ {
593
+ "epoch": 0.64,
594
+ "learning_rate": 1.5529246577352056e-05,
595
+ "loss": 0.1553,
596
+ "step": 980
597
+ },
598
+ {
599
+ "epoch": 0.65,
600
+ "learning_rate": 1.544247286637011e-05,
601
+ "loss": 0.1534,
602
+ "step": 990
603
+ },
604
+ {
605
+ "epoch": 0.65,
606
+ "learning_rate": 1.5355112926416442e-05,
607
+ "loss": 0.1236,
608
+ "step": 1000
609
+ },
610
+ {
611
+ "epoch": 0.66,
612
+ "learning_rate": 1.526717616735474e-05,
613
+ "loss": 0.2146,
614
+ "step": 1010
615
+ },
616
+ {
617
+ "epoch": 0.67,
618
+ "learning_rate": 1.5178672061180008e-05,
619
+ "loss": 0.2008,
620
+ "step": 1020
621
+ },
622
+ {
623
+ "epoch": 0.67,
624
+ "learning_rate": 1.5089610140998332e-05,
625
+ "loss": 0.1754,
626
+ "step": 1030
627
+ },
628
+ {
629
+ "epoch": 0.68,
630
+ "learning_rate": 1.5000000000000002e-05,
631
+ "loss": 0.2003,
632
+ "step": 1040
633
+ },
634
+ {
635
+ "epoch": 0.69,
636
+ "learning_rate": 1.4909851290426215e-05,
637
+ "loss": 0.2021,
638
+ "step": 1050
639
+ },
640
+ {
641
+ "epoch": 0.69,
642
+ "learning_rate": 1.4819173722529392e-05,
643
+ "loss": 0.2995,
644
+ "step": 1060
645
+ },
646
+ {
647
+ "epoch": 0.7,
648
+ "learning_rate": 1.4727977063527248e-05,
649
+ "loss": 0.1397,
650
+ "step": 1070
651
+ },
652
+ {
653
+ "epoch": 0.71,
654
+ "learning_rate": 1.4636271136550723e-05,
655
+ "loss": 0.1469,
656
+ "step": 1080
657
+ },
658
+ {
659
+ "epoch": 0.71,
660
+ "learning_rate": 1.4544065819585911e-05,
661
+ "loss": 0.1932,
662
+ "step": 1090
663
+ },
664
+ {
665
+ "epoch": 0.72,
666
+ "learning_rate": 1.4451371044410057e-05,
667
+ "loss": 0.1776,
668
+ "step": 1100
669
+ },
670
+ {
671
+ "epoch": 0.73,
672
+ "learning_rate": 1.4358196795521755e-05,
673
+ "loss": 0.1927,
674
+ "step": 1110
675
+ },
676
+ {
677
+ "epoch": 0.73,
678
+ "learning_rate": 1.4264553109065511e-05,
679
+ "loss": 0.1359,
680
+ "step": 1120
681
+ },
682
+ {
683
+ "epoch": 0.74,
684
+ "learning_rate": 1.4170450071750686e-05,
685
+ "loss": 0.2743,
686
+ "step": 1130
687
+ },
688
+ {
689
+ "epoch": 0.75,
690
+ "learning_rate": 1.4075897819765033e-05,
691
+ "loss": 0.1994,
692
+ "step": 1140
693
+ },
694
+ {
695
+ "epoch": 0.75,
696
+ "learning_rate": 1.3980906537682876e-05,
697
+ "loss": 0.1961,
698
+ "step": 1150
699
+ },
700
+ {
701
+ "epoch": 0.76,
702
+ "learning_rate": 1.3885486457368118e-05,
703
+ "loss": 0.2461,
704
+ "step": 1160
705
+ },
706
+ {
707
+ "epoch": 0.76,
708
+ "learning_rate": 1.3789647856872102e-05,
709
+ "loss": 0.1861,
710
+ "step": 1170
711
+ },
712
+ {
713
+ "epoch": 0.77,
714
+ "learning_rate": 1.369340105932653e-05,
715
+ "loss": 0.1926,
716
+ "step": 1180
717
+ },
718
+ {
719
+ "epoch": 0.78,
720
+ "learning_rate": 1.359675643183154e-05,
721
+ "loss": 0.1551,
722
+ "step": 1190
723
+ },
724
+ {
725
+ "epoch": 0.78,
726
+ "learning_rate": 1.3499724384338999e-05,
727
+ "loss": 0.2548,
728
+ "step": 1200
729
+ },
730
+ {
731
+ "epoch": 0.79,
732
+ "learning_rate": 1.3402315368531226e-05,
733
+ "loss": 0.2113,
734
+ "step": 1210
735
+ },
736
+ {
737
+ "epoch": 0.8,
738
+ "learning_rate": 1.33045398766952e-05,
739
+ "loss": 0.1721,
740
+ "step": 1220
741
+ },
742
+ {
743
+ "epoch": 0.8,
744
+ "learning_rate": 1.3206408440592385e-05,
745
+ "loss": 0.1798,
746
+ "step": 1230
747
+ },
748
+ {
749
+ "epoch": 0.81,
750
+ "learning_rate": 1.3107931630324333e-05,
751
+ "loss": 0.1337,
752
+ "step": 1240
753
+ },
754
+ {
755
+ "epoch": 0.82,
756
+ "learning_rate": 1.3009120053194133e-05,
757
+ "loss": 0.2212,
758
+ "step": 1250
759
+ },
760
+ {
761
+ "epoch": 0.82,
762
+ "learning_rate": 1.2909984352563845e-05,
763
+ "loss": 0.2069,
764
+ "step": 1260
765
+ },
766
+ {
767
+ "epoch": 0.83,
768
+ "learning_rate": 1.2810535206708089e-05,
769
+ "loss": 0.2119,
770
+ "step": 1270
771
+ },
772
+ {
773
+ "epoch": 0.84,
774
+ "learning_rate": 1.2710783327663819e-05,
775
+ "loss": 0.1255,
776
+ "step": 1280
777
+ },
778
+ {
779
+ "epoch": 0.84,
780
+ "learning_rate": 1.2610739460076522e-05,
781
+ "loss": 0.1392,
782
+ "step": 1290
783
+ },
784
+ {
785
+ "epoch": 0.85,
786
+ "learning_rate": 1.2510414380042843e-05,
787
+ "loss": 0.2273,
788
+ "step": 1300
789
+ },
790
+ {
791
+ "epoch": 0.86,
792
+ "learning_rate": 1.2409818893949874e-05,
793
+ "loss": 0.202,
794
+ "step": 1310
795
+ },
796
+ {
797
+ "epoch": 0.86,
798
+ "learning_rate": 1.230896383731114e-05,
799
+ "loss": 0.1433,
800
+ "step": 1320
801
+ },
802
+ {
803
+ "epoch": 0.87,
804
+ "learning_rate": 1.2207860073599478e-05,
805
+ "loss": 0.1484,
806
+ "step": 1330
807
+ },
808
+ {
809
+ "epoch": 0.88,
810
+ "learning_rate": 1.2106518493076889e-05,
811
+ "loss": 0.1173,
812
+ "step": 1340
813
+ },
814
+ {
815
+ "epoch": 0.88,
816
+ "learning_rate": 1.2004950011621501e-05,
817
+ "loss": 0.1549,
818
+ "step": 1350
819
+ },
820
+ {
821
+ "epoch": 0.89,
822
+ "learning_rate": 1.1903165569551792e-05,
823
+ "loss": 0.1346,
824
+ "step": 1360
825
+ },
826
+ {
827
+ "epoch": 0.9,
828
+ "learning_rate": 1.1801176130448148e-05,
829
+ "loss": 0.1434,
830
+ "step": 1370
831
+ },
832
+ {
833
+ "epoch": 0.9,
834
+ "learning_rate": 1.1698992679971964e-05,
835
+ "loss": 0.169,
836
+ "step": 1380
837
+ },
838
+ {
839
+ "epoch": 0.91,
840
+ "learning_rate": 1.1596626224682306e-05,
841
+ "loss": 0.2079,
842
+ "step": 1390
843
+ },
844
+ {
845
+ "epoch": 0.92,
846
+ "learning_rate": 1.1494087790850375e-05,
847
+ "loss": 0.1593,
848
+ "step": 1400
849
+ },
850
+ {
851
+ "epoch": 0.92,
852
+ "learning_rate": 1.1391388423271818e-05,
853
+ "loss": 0.2082,
854
+ "step": 1410
855
+ },
856
+ {
857
+ "epoch": 0.93,
858
+ "learning_rate": 1.1288539184077057e-05,
859
+ "loss": 0.1683,
860
+ "step": 1420
861
+ },
862
+ {
863
+ "epoch": 0.93,
864
+ "learning_rate": 1.1185551151539737e-05,
865
+ "loss": 0.1701,
866
+ "step": 1430
867
+ },
868
+ {
869
+ "epoch": 0.94,
870
+ "learning_rate": 1.1082435418883457e-05,
871
+ "loss": 0.179,
872
+ "step": 1440
873
+ },
874
+ {
875
+ "epoch": 0.95,
876
+ "learning_rate": 1.097920309308686e-05,
877
+ "loss": 0.2179,
878
+ "step": 1450
879
+ },
880
+ {
881
+ "epoch": 0.95,
882
+ "learning_rate": 1.0875865293687281e-05,
883
+ "loss": 0.1826,
884
+ "step": 1460
885
+ },
886
+ {
887
+ "epoch": 0.96,
888
+ "learning_rate": 1.0772433151583001e-05,
889
+ "loss": 0.2002,
890
+ "step": 1470
891
+ },
892
+ {
893
+ "epoch": 0.97,
894
+ "learning_rate": 1.06689178078343e-05,
895
+ "loss": 0.1828,
896
+ "step": 1480
897
+ },
898
+ {
899
+ "epoch": 0.97,
900
+ "learning_rate": 1.056533041246343e-05,
901
+ "loss": 0.1393,
902
+ "step": 1490
903
+ },
904
+ {
905
+ "epoch": 0.98,
906
+ "learning_rate": 1.0461682123253578e-05,
907
+ "loss": 0.1597,
908
+ "step": 1500
909
+ },
910
+ {
911
+ "epoch": 0.99,
912
+ "learning_rate": 1.0357984104547038e-05,
913
+ "loss": 0.1738,
914
+ "step": 1510
915
+ },
916
+ {
917
+ "epoch": 0.99,
918
+ "learning_rate": 1.0254247526042656e-05,
919
+ "loss": 0.2535,
920
+ "step": 1520
921
+ },
922
+ {
923
+ "epoch": 1.0,
924
+ "learning_rate": 1.0150483561592687e-05,
925
+ "loss": 0.1162,
926
+ "step": 1530
927
+ },
928
+ {
929
+ "epoch": 1.01,
930
+ "learning_rate": 1.0046703387999242e-05,
931
+ "loss": 0.4422,
932
+ "step": 1540
933
+ },
934
+ {
935
+ "epoch": 1.01,
936
+ "learning_rate": 9.942918183810368e-06,
937
+ "loss": 0.1518,
938
+ "step": 1550
939
+ },
940
+ {
941
+ "epoch": 1.02,
942
+ "learning_rate": 9.839139128115978e-06,
943
+ "loss": 0.061,
944
+ "step": 1560
945
+ },
946
+ {
947
+ "epoch": 1.03,
948
+ "learning_rate": 9.735377399343714e-06,
949
+ "loss": 0.111,
950
+ "step": 1570
951
+ },
952
+ {
953
+ "epoch": 1.03,
954
+ "learning_rate": 9.631644174054862e-06,
955
+ "loss": 0.0424,
956
+ "step": 1580
957
+ },
958
+ {
959
+ "epoch": 1.04,
960
+ "learning_rate": 9.527950625740502e-06,
961
+ "loss": 0.035,
962
+ "step": 1590
963
+ },
964
+ {
965
+ "epoch": 1.05,
966
+ "learning_rate": 9.424307923617948e-06,
967
+ "loss": 0.0563,
968
+ "step": 1600
969
+ },
970
+ {
971
+ "epoch": 1.05,
972
+ "learning_rate": 9.320727231427688e-06,
973
+ "loss": 0.0894,
974
+ "step": 1610
975
+ },
976
+ {
977
+ "epoch": 1.06,
978
+ "learning_rate": 9.217219706230892e-06,
979
+ "loss": 0.0344,
980
+ "step": 1620
981
+ },
982
+ {
983
+ "epoch": 1.07,
984
+ "learning_rate": 9.113796497207637e-06,
985
+ "loss": 0.068,
986
+ "step": 1630
987
+ },
988
+ {
989
+ "epoch": 1.07,
990
+ "learning_rate": 9.010468744455994e-06,
991
+ "loss": 0.0403,
992
+ "step": 1640
993
+ },
994
+ {
995
+ "epoch": 1.08,
996
+ "learning_rate": 8.907247577792087e-06,
997
+ "loss": 0.035,
998
+ "step": 1650
999
+ },
1000
+ {
1001
+ "epoch": 1.09,
1002
+ "learning_rate": 8.80414411555125e-06,
1003
+ "loss": 0.071,
1004
+ "step": 1660
1005
+ },
1006
+ {
1007
+ "epoch": 1.09,
1008
+ "learning_rate": 8.70116946339044e-06,
1009
+ "loss": 0.0448,
1010
+ "step": 1670
1011
+ },
1012
+ {
1013
+ "epoch": 1.1,
1014
+ "learning_rate": 8.598334713092002e-06,
1015
+ "loss": 0.0634,
1016
+ "step": 1680
1017
+ },
1018
+ {
1019
+ "epoch": 1.1,
1020
+ "learning_rate": 8.49565094136893e-06,
1021
+ "loss": 0.058,
1022
+ "step": 1690
1023
+ },
1024
+ {
1025
+ "epoch": 1.11,
1026
+ "learning_rate": 8.39312920867176e-06,
1027
+ "loss": 0.0806,
1028
+ "step": 1700
1029
+ },
1030
+ {
1031
+ "epoch": 1.12,
1032
+ "learning_rate": 8.290780557997192e-06,
1033
+ "loss": 0.0375,
1034
+ "step": 1710
1035
+ },
1036
+ {
1037
+ "epoch": 1.12,
1038
+ "learning_rate": 8.188616013698629e-06,
1039
+ "loss": 0.0744,
1040
+ "step": 1720
1041
+ },
1042
+ {
1043
+ "epoch": 1.13,
1044
+ "learning_rate": 8.086646580298686e-06,
1045
+ "loss": 0.0941,
1046
+ "step": 1730
1047
+ },
1048
+ {
1049
+ "epoch": 1.14,
1050
+ "learning_rate": 7.984883241303865e-06,
1051
+ "loss": 0.0442,
1052
+ "step": 1740
1053
+ },
1054
+ {
1055
+ "epoch": 1.14,
1056
+ "learning_rate": 7.883336958021463e-06,
1057
+ "loss": 0.0398,
1058
+ "step": 1750
1059
+ },
1060
+ {
1061
+ "epoch": 1.15,
1062
+ "learning_rate": 7.78201866837891e-06,
1063
+ "loss": 0.048,
1064
+ "step": 1760
1065
+ },
1066
+ {
1067
+ "epoch": 1.16,
1068
+ "learning_rate": 7.680939285745589e-06,
1069
+ "loss": 0.0534,
1070
+ "step": 1770
1071
+ },
1072
+ {
1073
+ "epoch": 1.16,
1074
+ "learning_rate": 7.58010969775732e-06,
1075
+ "loss": 0.0267,
1076
+ "step": 1780
1077
+ },
1078
+ {
1079
+ "epoch": 1.17,
1080
+ "learning_rate": 7.479540765143619e-06,
1081
+ "loss": 0.0408,
1082
+ "step": 1790
1083
+ },
1084
+ {
1085
+ "epoch": 1.18,
1086
+ "learning_rate": 7.379243320557841e-06,
1087
+ "loss": 0.0536,
1088
+ "step": 1800
1089
+ },
1090
+ {
1091
+ "epoch": 1.18,
1092
+ "learning_rate": 7.279228167410356e-06,
1093
+ "loss": 0.03,
1094
+ "step": 1810
1095
+ },
1096
+ {
1097
+ "epoch": 1.19,
1098
+ "learning_rate": 7.179506078704874e-06,
1099
+ "loss": 0.04,
1100
+ "step": 1820
1101
+ },
1102
+ {
1103
+ "epoch": 1.2,
1104
+ "learning_rate": 7.080087795878047e-06,
1105
+ "loss": 0.0861,
1106
+ "step": 1830
1107
+ },
1108
+ {
1109
+ "epoch": 1.2,
1110
+ "learning_rate": 6.980984027642468e-06,
1111
+ "loss": 0.0528,
1112
+ "step": 1840
1113
+ },
1114
+ {
1115
+ "epoch": 1.21,
1116
+ "learning_rate": 6.8822054488331876e-06,
1117
+ "loss": 0.0319,
1118
+ "step": 1850
1119
+ },
1120
+ {
1121
+ "epoch": 1.22,
1122
+ "learning_rate": 6.783762699257907e-06,
1123
+ "loss": 0.0628,
1124
+ "step": 1860
1125
+ },
1126
+ {
1127
+ "epoch": 1.22,
1128
+ "learning_rate": 6.685666382550901e-06,
1129
+ "loss": 0.0377,
1130
+ "step": 1870
1131
+ },
1132
+ {
1133
+ "epoch": 1.23,
1134
+ "learning_rate": 6.587927065030883e-06,
1135
+ "loss": 0.0466,
1136
+ "step": 1880
1137
+ },
1138
+ {
1139
+ "epoch": 1.24,
1140
+ "learning_rate": 6.490555274562853e-06,
1141
+ "loss": 0.0481,
1142
+ "step": 1890
1143
+ },
1144
+ {
1145
+ "epoch": 1.24,
1146
+ "learning_rate": 6.393561499424108e-06,
1147
+ "loss": 0.0634,
1148
+ "step": 1900
1149
+ },
1150
+ {
1151
+ "epoch": 1.25,
1152
+ "learning_rate": 6.296956187174505e-06,
1153
+ "loss": 0.0676,
1154
+ "step": 1910
1155
+ },
1156
+ {
1157
+ "epoch": 1.26,
1158
+ "learning_rate": 6.200749743531129e-06,
1159
+ "loss": 0.0525,
1160
+ "step": 1920
1161
+ },
1162
+ {
1163
+ "epoch": 1.26,
1164
+ "learning_rate": 6.104952531247437e-06,
1165
+ "loss": 0.0784,
1166
+ "step": 1930
1167
+ },
1168
+ {
1169
+ "epoch": 1.27,
1170
+ "learning_rate": 6.009574868997061e-06,
1171
+ "loss": 0.05,
1172
+ "step": 1940
1173
+ },
1174
+ {
1175
+ "epoch": 1.27,
1176
+ "learning_rate": 5.9146270302623355e-06,
1177
+ "loss": 0.0371,
1178
+ "step": 1950
1179
+ },
1180
+ {
1181
+ "epoch": 1.28,
1182
+ "learning_rate": 5.820119242227715e-06,
1183
+ "loss": 0.1927,
1184
+ "step": 1960
1185
+ },
1186
+ {
1187
+ "epoch": 1.29,
1188
+ "learning_rate": 5.726061684678153e-06,
1189
+ "loss": 0.0825,
1190
+ "step": 1970
1191
+ },
1192
+ {
1193
+ "epoch": 1.29,
1194
+ "learning_rate": 5.632464488902606e-06,
1195
+ "loss": 0.0259,
1196
+ "step": 1980
1197
+ },
1198
+ {
1199
+ "epoch": 1.3,
1200
+ "learning_rate": 5.539337736602748e-06,
1201
+ "loss": 0.0442,
1202
+ "step": 1990
1203
+ },
1204
+ {
1205
+ "epoch": 1.31,
1206
+ "learning_rate": 5.446691458807052e-06,
1207
+ "loss": 0.0398,
1208
+ "step": 2000
1209
+ },
1210
+ {
1211
+ "epoch": 1.31,
1212
+ "learning_rate": 5.354535634790283e-06,
1213
+ "loss": 0.0491,
1214
+ "step": 2010
1215
+ },
1216
+ {
1217
+ "epoch": 1.32,
1218
+ "learning_rate": 5.262880190998612e-06,
1219
+ "loss": 0.0754,
1220
+ "step": 2020
1221
+ },
1222
+ {
1223
+ "epoch": 1.33,
1224
+ "learning_rate": 5.171734999980404e-06,
1225
+ "loss": 0.029,
1226
+ "step": 2030
1227
+ },
1228
+ {
1229
+ "epoch": 1.33,
1230
+ "learning_rate": 5.081109879322788e-06,
1231
+ "loss": 0.2079,
1232
+ "step": 2040
1233
+ },
1234
+ {
1235
+ "epoch": 1.34,
1236
+ "learning_rate": 4.991014590594199e-06,
1237
+ "loss": 0.066,
1238
+ "step": 2050
1239
+ },
1240
+ {
1241
+ "epoch": 1.35,
1242
+ "learning_rate": 4.901458838292891e-06,
1243
+ "loss": 0.0516,
1244
+ "step": 2060
1245
+ },
1246
+ {
1247
+ "epoch": 1.35,
1248
+ "learning_rate": 4.812452268801658e-06,
1249
+ "loss": 0.043,
1250
+ "step": 2070
1251
+ },
1252
+ {
1253
+ "epoch": 1.36,
1254
+ "learning_rate": 4.724004469348763e-06,
1255
+ "loss": 0.0311,
1256
+ "step": 2080
1257
+ },
1258
+ {
1259
+ "epoch": 1.37,
1260
+ "learning_rate": 4.63612496697528e-06,
1261
+ "loss": 0.0295,
1262
+ "step": 2090
1263
+ },
1264
+ {
1265
+ "epoch": 1.37,
1266
+ "learning_rate": 4.548823227508887e-06,
1267
+ "loss": 0.0641,
1268
+ "step": 2100
1269
+ },
1270
+ {
1271
+ "epoch": 1.38,
1272
+ "learning_rate": 4.462108654544282e-06,
1273
+ "loss": 0.052,
1274
+ "step": 2110
1275
+ },
1276
+ {
1277
+ "epoch": 1.39,
1278
+ "learning_rate": 4.375990588430261e-06,
1279
+ "loss": 0.0502,
1280
+ "step": 2120
1281
+ },
1282
+ {
1283
+ "epoch": 1.39,
1284
+ "learning_rate": 4.290478305263672e-06,
1285
+ "loss": 0.0765,
1286
+ "step": 2130
1287
+ },
1288
+ {
1289
+ "epoch": 1.4,
1290
+ "learning_rate": 4.205581015890214e-06,
1291
+ "loss": 0.0447,
1292
+ "step": 2140
1293
+ },
1294
+ {
1295
+ "epoch": 1.41,
1296
+ "learning_rate": 4.121307864912332e-06,
1297
+ "loss": 0.0721,
1298
+ "step": 2150
1299
+ },
1300
+ {
1301
+ "epoch": 1.41,
1302
+ "learning_rate": 4.037667929704197e-06,
1303
+ "loss": 0.0552,
1304
+ "step": 2160
1305
+ },
1306
+ {
1307
+ "epoch": 1.42,
1308
+ "learning_rate": 3.954670219433957e-06,
1309
+ "loss": 0.1431,
1310
+ "step": 2170
1311
+ },
1312
+ {
1313
+ "epoch": 1.43,
1314
+ "learning_rate": 3.8723236740933365e-06,
1315
+ "loss": 0.0592,
1316
+ "step": 2180
1317
+ },
1318
+ {
1319
+ "epoch": 1.43,
1320
+ "learning_rate": 3.7906371635346485e-06,
1321
+ "loss": 0.0428,
1322
+ "step": 2190
1323
+ },
1324
+ {
1325
+ "epoch": 1.44,
1326
+ "learning_rate": 3.7096194865154324e-06,
1327
+ "loss": 0.0487,
1328
+ "step": 2200
1329
+ },
1330
+ {
1331
+ "epoch": 1.44,
1332
+ "learning_rate": 3.629279369750657e-06,
1333
+ "loss": 0.0743,
1334
+ "step": 2210
1335
+ },
1336
+ {
1337
+ "epoch": 1.45,
1338
+ "learning_rate": 3.549625466972776e-06,
1339
+ "loss": 0.1101,
1340
+ "step": 2220
1341
+ },
1342
+ {
1343
+ "epoch": 1.46,
1344
+ "learning_rate": 3.4706663579995737e-06,
1345
+ "loss": 0.0394,
1346
+ "step": 2230
1347
+ },
1348
+ {
1349
+ "epoch": 1.46,
1350
+ "learning_rate": 3.3924105478100245e-06,
1351
+ "loss": 0.0389,
1352
+ "step": 2240
1353
+ },
1354
+ {
1355
+ "epoch": 1.47,
1356
+ "learning_rate": 3.314866465628167e-06,
1357
+ "loss": 0.0714,
1358
+ "step": 2250
1359
+ },
1360
+ {
1361
+ "epoch": 1.48,
1362
+ "learning_rate": 3.2380424640151742e-06,
1363
+ "loss": 0.0265,
1364
+ "step": 2260
1365
+ },
1366
+ {
1367
+ "epoch": 1.48,
1368
+ "learning_rate": 3.1619468179696745e-06,
1369
+ "loss": 0.0235,
1370
+ "step": 2270
1371
+ },
1372
+ {
1373
+ "epoch": 1.49,
1374
+ "learning_rate": 3.086587724036397e-06,
1375
+ "loss": 0.0216,
1376
+ "step": 2280
1377
+ },
1378
+ {
1379
+ "epoch": 1.5,
1380
+ "learning_rate": 3.0119732994233254e-06,
1381
+ "loss": 0.0661,
1382
+ "step": 2290
1383
+ },
1384
+ {
1385
+ "epoch": 1.5,
1386
+ "learning_rate": 2.9381115811273254e-06,
1387
+ "loss": 0.0427,
1388
+ "step": 2300
1389
+ },
1390
+ {
1391
+ "epoch": 1.51,
1392
+ "learning_rate": 2.8650105250684835e-06,
1393
+ "loss": 0.0399,
1394
+ "step": 2310
1395
+ },
1396
+ {
1397
+ "epoch": 1.52,
1398
+ "learning_rate": 2.7926780052331192e-06,
1399
+ "loss": 0.0667,
1400
+ "step": 2320
1401
+ },
1402
+ {
1403
+ "epoch": 1.52,
1404
+ "learning_rate": 2.7211218128256733e-06,
1405
+ "loss": 0.0465,
1406
+ "step": 2330
1407
+ },
1408
+ {
1409
+ "epoch": 1.53,
1410
+ "learning_rate": 2.6503496554294606e-06,
1411
+ "loss": 0.0335,
1412
+ "step": 2340
1413
+ },
1414
+ {
1415
+ "epoch": 1.54,
1416
+ "learning_rate": 2.5803691561764855e-06,
1417
+ "loss": 0.038,
1418
+ "step": 2350
1419
+ },
1420
+ {
1421
+ "epoch": 1.54,
1422
+ "learning_rate": 2.5111878529263e-06,
1423
+ "loss": 0.0473,
1424
+ "step": 2360
1425
+ },
1426
+ {
1427
+ "epoch": 1.55,
1428
+ "learning_rate": 2.4428131974540982e-06,
1429
+ "loss": 0.0653,
1430
+ "step": 2370
1431
+ },
1432
+ {
1433
+ "epoch": 1.56,
1434
+ "learning_rate": 2.3752525546480344e-06,
1435
+ "loss": 0.0351,
1436
+ "step": 2380
1437
+ },
1438
+ {
1439
+ "epoch": 1.56,
1440
+ "learning_rate": 2.3085132017159427e-06,
1441
+ "loss": 0.0225,
1442
+ "step": 2390
1443
+ },
1444
+ {
1445
+ "epoch": 1.57,
1446
+ "learning_rate": 2.2426023274014686e-06,
1447
+ "loss": 0.0689,
1448
+ "step": 2400
1449
+ },
1450
+ {
1451
+ "epoch": 1.58,
1452
+ "learning_rate": 2.177527031209747e-06,
1453
+ "loss": 0.0715,
1454
+ "step": 2410
1455
+ },
1456
+ {
1457
+ "epoch": 1.58,
1458
+ "learning_rate": 2.113294322642694e-06,
1459
+ "loss": 0.016,
1460
+ "step": 2420
1461
+ },
1462
+ {
1463
+ "epoch": 1.59,
1464
+ "learning_rate": 2.0499111204439793e-06,
1465
+ "loss": 0.0894,
1466
+ "step": 2430
1467
+ },
1468
+ {
1469
+ "epoch": 1.6,
1470
+ "learning_rate": 1.987384251853789e-06,
1471
+ "loss": 0.2198,
1472
+ "step": 2440
1473
+ },
1474
+ {
1475
+ "epoch": 1.6,
1476
+ "learning_rate": 1.9257204518734276e-06,
1477
+ "loss": 0.0503,
1478
+ "step": 2450
1479
+ },
1480
+ {
1481
+ "epoch": 1.61,
1482
+ "learning_rate": 1.8649263625398806e-06,
1483
+ "loss": 0.0374,
1484
+ "step": 2460
1485
+ },
1486
+ {
1487
+ "epoch": 1.61,
1488
+ "learning_rate": 1.8050085322103616e-06,
1489
+ "loss": 0.048,
1490
+ "step": 2470
1491
+ },
1492
+ {
1493
+ "epoch": 1.62,
1494
+ "learning_rate": 1.7459734148569818e-06,
1495
+ "loss": 0.0214,
1496
+ "step": 2480
1497
+ },
1498
+ {
1499
+ "epoch": 1.63,
1500
+ "learning_rate": 1.6878273693715474e-06,
1501
+ "loss": 0.0383,
1502
+ "step": 2490
1503
+ },
1504
+ {
1505
+ "epoch": 1.63,
1506
+ "learning_rate": 1.630576658880636e-06,
1507
+ "loss": 0.0333,
1508
+ "step": 2500
1509
+ },
1510
+ {
1511
+ "epoch": 1.64,
1512
+ "learning_rate": 1.5742274500709686e-06,
1513
+ "loss": 0.0332,
1514
+ "step": 2510
1515
+ },
1516
+ {
1517
+ "epoch": 1.65,
1518
+ "learning_rate": 1.5187858125251642e-06,
1519
+ "loss": 0.0442,
1520
+ "step": 2520
1521
+ },
1522
+ {
1523
+ "epoch": 1.65,
1524
+ "learning_rate": 1.4642577180679795e-06,
1525
+ "loss": 0.0333,
1526
+ "step": 2530
1527
+ },
1528
+ {
1529
+ "epoch": 1.66,
1530
+ "learning_rate": 1.4106490401230388e-06,
1531
+ "loss": 0.0441,
1532
+ "step": 2540
1533
+ },
1534
+ {
1535
+ "epoch": 1.67,
1536
+ "learning_rate": 1.3579655530802115e-06,
1537
+ "loss": 0.0521,
1538
+ "step": 2550
1539
+ },
1540
+ {
1541
+ "epoch": 1.67,
1542
+ "learning_rate": 1.3062129316736039e-06,
1543
+ "loss": 0.0296,
1544
+ "step": 2560
1545
+ },
1546
+ {
1547
+ "epoch": 1.68,
1548
+ "learning_rate": 1.2553967503703401e-06,
1549
+ "loss": 0.0174,
1550
+ "step": 2570
1551
+ },
1552
+ {
1553
+ "epoch": 1.69,
1554
+ "learning_rate": 1.2055224827700895e-06,
1555
+ "loss": 0.0334,
1556
+ "step": 2580
1557
+ },
1558
+ {
1559
+ "epoch": 1.69,
1560
+ "learning_rate": 1.1565955010155072e-06,
1561
+ "loss": 0.019,
1562
+ "step": 2590
1563
+ },
1564
+ {
1565
+ "epoch": 1.7,
1566
+ "learning_rate": 1.1086210752135628e-06,
1567
+ "loss": 0.0716,
1568
+ "step": 2600
1569
+ },
1570
+ {
1571
+ "epoch": 1.71,
1572
+ "learning_rate": 1.0616043728678915e-06,
1573
+ "loss": 0.0423,
1574
+ "step": 2610
1575
+ },
1576
+ {
1577
+ "epoch": 1.71,
1578
+ "learning_rate": 1.0155504583221708e-06,
1579
+ "loss": 0.0552,
1580
+ "step": 2620
1581
+ },
1582
+ {
1583
+ "epoch": 1.72,
1584
+ "learning_rate": 9.704642922146345e-07,
1585
+ "loss": 0.0365,
1586
+ "step": 2630
1587
+ },
1588
+ {
1589
+ "epoch": 1.73,
1590
+ "learning_rate": 9.263507309437314e-07,
1591
+ "loss": 0.0911,
1592
+ "step": 2640
1593
+ },
1594
+ {
1595
+ "epoch": 1.73,
1596
+ "learning_rate": 8.832145261450331e-07,
1597
+ "loss": 0.0219,
1598
+ "step": 2650
1599
+ },
1600
+ {
1601
+ "epoch": 1.74,
1602
+ "learning_rate": 8.410603241794113e-07,
1603
+ "loss": 0.0233,
1604
+ "step": 2660
1605
+ },
1606
+ {
1607
+ "epoch": 1.75,
1608
+ "learning_rate": 7.998926656325668e-07,
1609
+ "loss": 0.105,
1610
+ "step": 2670
1611
+ },
1612
+ {
1613
+ "epoch": 1.75,
1614
+ "learning_rate": 7.597159848259472e-07,
1615
+ "loss": 0.0742,
1616
+ "step": 2680
1617
+ },
1618
+ {
1619
+ "epoch": 1.76,
1620
+ "learning_rate": 7.205346093390964e-07,
1621
+ "loss": 0.0466,
1622
+ "step": 2690
1623
+ },
1624
+ {
1625
+ "epoch": 1.77,
1626
+ "learning_rate": 6.823527595435353e-07,
1627
+ "loss": 0.0266,
1628
+ "step": 2700
1629
+ },
1630
+ {
1631
+ "epoch": 1.77,
1632
+ "learning_rate": 6.451745481481508e-07,
1633
+ "loss": 0.0374,
1634
+ "step": 2710
1635
+ },
1636
+ {
1637
+ "epoch": 1.78,
1638
+ "learning_rate": 6.090039797562163e-07,
1639
+ "loss": 0.043,
1640
+ "step": 2720
1641
+ },
1642
+ {
1643
+ "epoch": 1.78,
1644
+ "learning_rate": 5.73844950434026e-07,
1645
+ "loss": 0.0452,
1646
+ "step": 2730
1647
+ },
1648
+ {
1649
+ "epoch": 1.79,
1650
+ "learning_rate": 5.397012472912521e-07,
1651
+ "loss": 0.0237,
1652
+ "step": 2740
1653
+ },
1654
+ {
1655
+ "epoch": 1.8,
1656
+ "learning_rate": 5.065765480730033e-07,
1657
+ "loss": 0.0297,
1658
+ "step": 2750
1659
+ },
1660
+ {
1661
+ "epoch": 1.8,
1662
+ "learning_rate": 4.744744207636942e-07,
1663
+ "loss": 0.039,
1664
+ "step": 2760
1665
+ },
1666
+ {
1667
+ "epoch": 1.81,
1668
+ "learning_rate": 4.4339832320272057e-07,
1669
+ "loss": 0.1598,
1670
+ "step": 2770
1671
+ },
1672
+ {
1673
+ "epoch": 1.82,
1674
+ "learning_rate": 4.1335160271200104e-07,
1675
+ "loss": 0.0168,
1676
+ "step": 2780
1677
+ },
1678
+ {
1679
+ "epoch": 1.82,
1680
+ "learning_rate": 3.843374957354262e-07,
1681
+ "loss": 0.0359,
1682
+ "step": 2790
1683
+ },
1684
+ {
1685
+ "epoch": 1.83,
1686
+ "learning_rate": 3.563591274902467e-07,
1687
+ "loss": 0.0674,
1688
+ "step": 2800
1689
+ },
1690
+ {
1691
+ "epoch": 1.84,
1692
+ "learning_rate": 3.2941951163045215e-07,
1693
+ "loss": 0.0769,
1694
+ "step": 2810
1695
+ },
1696
+ {
1697
+ "epoch": 1.84,
1698
+ "learning_rate": 3.0352154992214557e-07,
1699
+ "loss": 0.0542,
1700
+ "step": 2820
1701
+ },
1702
+ {
1703
+ "epoch": 1.85,
1704
+ "learning_rate": 2.786680319309998e-07,
1705
+ "loss": 0.0338,
1706
+ "step": 2830
1707
+ },
1708
+ {
1709
+ "epoch": 1.86,
1710
+ "learning_rate": 2.5486163472176693e-07,
1711
+ "loss": 0.1245,
1712
+ "step": 2840
1713
+ },
1714
+ {
1715
+ "epoch": 1.86,
1716
+ "learning_rate": 2.3210492256993677e-07,
1717
+ "loss": 0.0397,
1718
+ "step": 2850
1719
+ },
1720
+ {
1721
+ "epoch": 1.87,
1722
+ "learning_rate": 2.1040034668551646e-07,
1723
+ "loss": 0.0411,
1724
+ "step": 2860
1725
+ },
1726
+ {
1727
+ "epoch": 1.88,
1728
+ "learning_rate": 1.8975024494900963e-07,
1729
+ "loss": 0.0472,
1730
+ "step": 2870
1731
+ },
1732
+ {
1733
+ "epoch": 1.88,
1734
+ "learning_rate": 1.7015684165959113e-07,
1735
+ "loss": 0.03,
1736
+ "step": 2880
1737
+ },
1738
+ {
1739
+ "epoch": 1.89,
1740
+ "learning_rate": 1.5162224729552088e-07,
1741
+ "loss": 0.041,
1742
+ "step": 2890
1743
+ },
1744
+ {
1745
+ "epoch": 1.9,
1746
+ "learning_rate": 1.3414845828681466e-07,
1747
+ "loss": 0.034,
1748
+ "step": 2900
1749
+ },
1750
+ {
1751
+ "epoch": 1.9,
1752
+ "learning_rate": 1.1773735680020493e-07,
1753
+ "loss": 0.0426,
1754
+ "step": 2910
1755
+ },
1756
+ {
1757
+ "epoch": 1.91,
1758
+ "learning_rate": 1.0239071053639971e-07,
1759
+ "loss": 0.0581,
1760
+ "step": 2920
1761
+ },
1762
+ {
1763
+ "epoch": 1.92,
1764
+ "learning_rate": 8.811017253968379e-08,
1765
+ "loss": 0.0503,
1766
+ "step": 2930
1767
+ },
1768
+ {
1769
+ "epoch": 1.92,
1770
+ "learning_rate": 7.489728101985671e-08,
1771
+ "loss": 0.0549,
1772
+ "step": 2940
1773
+ },
1774
+ {
1775
+ "epoch": 1.93,
1776
+ "learning_rate": 6.275345918655307e-08,
1777
+ "loss": 0.0429,
1778
+ "step": 2950
1779
+ },
1780
+ {
1781
+ "epoch": 1.94,
1782
+ "learning_rate": 5.1680015095936234e-08,
1783
+ "loss": 0.028,
1784
+ "step": 2960
1785
+ },
1786
+ {
1787
+ "epoch": 1.94,
1788
+ "learning_rate": 4.167814150980887e-08,
1789
+ "loss": 0.0321,
1790
+ "step": 2970
1791
+ },
1792
+ {
1793
+ "epoch": 1.95,
1794
+ "learning_rate": 3.274891576713346e-08,
1795
+ "loss": 0.0646,
1796
+ "step": 2980
1797
+ },
1798
+ {
1799
+ "epoch": 1.95,
1800
+ "learning_rate": 2.489329966798626e-08,
1801
+ "loss": 0.0284,
1802
+ "step": 2990
1803
+ },
1804
+ {
1805
+ "epoch": 1.96,
1806
+ "learning_rate": 1.8112139369962367e-08,
1807
+ "loss": 0.0322,
1808
+ "step": 3000
1809
+ },
1810
+ {
1811
+ "epoch": 1.97,
1812
+ "learning_rate": 1.240616529702976e-08,
1813
+ "loss": 0.0388,
1814
+ "step": 3010
1815
+ },
1816
+ {
1817
+ "epoch": 1.97,
1818
+ "learning_rate": 7.775992060854442e-09,
1819
+ "loss": 0.0574,
1820
+ "step": 3020
1821
+ },
1822
+ {
1823
+ "epoch": 1.98,
1824
+ "learning_rate": 4.2221183945978515e-09,
1825
+ "loss": 0.0359,
1826
+ "step": 3030
1827
+ },
1828
+ {
1829
+ "epoch": 1.99,
1830
+ "learning_rate": 1.744927099193161e-09,
1831
+ "loss": 0.0459,
1832
+ "step": 3040
1833
+ },
1834
+ {
1835
+ "epoch": 1.99,
1836
+ "learning_rate": 3.446850021204817e-10,
1837
+ "loss": 0.0343,
1838
+ "step": 3050
1839
+ },
1840
+ {
1841
+ "epoch": 2.0,
1842
+ "step": 3058,
1843
+ "total_flos": 5.835951563016765e+17,
1844
+ "train_loss": 0.13229776309782418,
1845
+ "train_runtime": 69734.0297,
1846
+ "train_samples_per_second": 0.702,
1847
+ "train_steps_per_second": 0.044
1848
+ }
1849
+ ],
1850
+ "max_steps": 3058,
1851
+ "num_train_epochs": 2,
1852
+ "total_flos": 5.835951563016765e+17,
1853
+ "trial_name": null,
1854
+ "trial_params": null
1855
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ce51f679978ee2278d404b7c67db77e777500990decb065bce73dfbf92f582b8
3
+ size 3771