anindya64 commited on
Commit
6d78926
1 Parent(s): a7cc709

Upload folder using huggingface_hub

Browse files
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
- "_name_or_path": "output_deepseek_v0.0.3/checkpoint-1400",
3
  "architectures": [
4
- "LlamaModel"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
 
1
  {
2
+ "_name_or_path": "output_deepseek_v0.0.2/checkpoint-2400",
3
  "architectures": [
4
+ "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 32013,
4
+ "eos_token_id": 32021,
5
+ "transformers_version": "4.44.0"
6
+ }
model-00001-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:48a54d19c337625a865c6dde984bcbf248ffcbb38ff096071c99418610bace20
3
- size 4986378792
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:34a7e0c12737c54f1c824032489551825c626a1643c93ff05baa82cd588d83ab
3
+ size 4986380064
model-00002-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:66da5a74fe93ff87bb1d6d5f3f5bd32469fcd6d2a6875d60f3b1d6950d80e742
3
- size 135291512
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:983686ed30eb67909631f0c5c3a97233409d5b38b9c32b06ab2913ff5293def3
3
+ size 399532808
model.safetensors.index.json CHANGED
@@ -1,225 +1,226 @@
1
  {
2
  "metadata": {
3
- "total_size": 5121646592
4
  },
5
  "weight_map": {
6
- "embed_tokens.weight": "model-00001-of-00002.safetensors",
7
- "layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
8
- "layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
9
- "layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
10
- "layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
11
- "layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
12
- "layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
13
- "layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
14
- "layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
15
- "layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
16
- "layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
17
- "layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
18
- "layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
19
- "layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
20
- "layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
21
- "layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
22
- "layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
23
- "layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
24
- "layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
25
- "layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
26
- "layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
27
- "layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
28
- "layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
29
- "layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
30
- "layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
31
- "layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
32
- "layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
33
- "layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
34
- "layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
35
- "layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
36
- "layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
37
- "layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
38
- "layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
39
- "layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
40
- "layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
41
- "layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
42
- "layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
43
- "layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
44
- "layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
45
- "layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
46
- "layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
47
- "layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
48
- "layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
49
- "layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
50
- "layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
51
- "layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
52
- "layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
53
- "layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
54
- "layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
55
- "layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
56
- "layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
57
- "layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
58
- "layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
59
- "layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
60
- "layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
61
- "layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors",
62
- "layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
63
- "layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
64
- "layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
65
- "layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
66
- "layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
67
- "layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
68
- "layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
69
- "layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
70
- "layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors",
71
- "layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
72
- "layers.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
73
- "layers.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
74
- "layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
75
- "layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
76
- "layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
77
- "layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
78
- "layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
79
- "layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors",
80
- "layers.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
81
- "layers.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
82
- "layers.16.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
83
- "layers.16.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
84
- "layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
85
- "layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
86
- "layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
87
- "layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
88
- "layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors",
89
- "layers.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
90
- "layers.17.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
91
- "layers.17.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
92
- "layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
93
- "layers.17.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
94
- "layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
95
- "layers.17.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
96
- "layers.17.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
97
- "layers.18.input_layernorm.weight": "model-00001-of-00002.safetensors",
98
- "layers.18.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
99
- "layers.18.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
100
- "layers.18.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
101
- "layers.18.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
102
- "layers.18.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
103
- "layers.18.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
104
- "layers.18.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
105
- "layers.18.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
106
- "layers.19.input_layernorm.weight": "model-00001-of-00002.safetensors",
107
- "layers.19.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
108
- "layers.19.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
109
- "layers.19.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
110
- "layers.19.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
111
- "layers.19.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
112
- "layers.19.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
113
- "layers.19.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
114
- "layers.19.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
115
- "layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
116
- "layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
117
- "layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
118
- "layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
119
- "layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
120
- "layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
121
- "layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
122
- "layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
123
- "layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
124
- "layers.20.input_layernorm.weight": "model-00001-of-00002.safetensors",
125
- "layers.20.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
126
- "layers.20.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
127
- "layers.20.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
128
- "layers.20.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
129
- "layers.20.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
130
- "layers.20.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
131
- "layers.20.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
132
- "layers.20.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
133
- "layers.21.input_layernorm.weight": "model-00001-of-00002.safetensors",
134
- "layers.21.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
135
- "layers.21.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
136
- "layers.21.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
137
- "layers.21.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
138
- "layers.21.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
139
- "layers.21.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
140
- "layers.21.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
141
- "layers.21.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
142
- "layers.22.input_layernorm.weight": "model-00001-of-00002.safetensors",
143
- "layers.22.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
144
- "layers.22.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
145
- "layers.22.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
146
- "layers.22.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
147
- "layers.22.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
148
- "layers.22.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
149
- "layers.22.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
150
- "layers.22.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
151
- "layers.23.input_layernorm.weight": "model-00002-of-00002.safetensors",
152
- "layers.23.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
153
- "layers.23.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
154
- "layers.23.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
155
- "layers.23.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
156
- "layers.23.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
157
- "layers.23.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
158
- "layers.23.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
159
- "layers.23.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
160
- "layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
161
- "layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
162
- "layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
163
- "layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
164
- "layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
165
- "layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
166
- "layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
167
- "layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
168
- "layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
169
- "layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
170
- "layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
171
- "layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
172
- "layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
173
- "layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
174
- "layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
175
- "layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
176
- "layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
177
- "layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
178
- "layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
179
- "layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
180
- "layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
181
- "layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
182
- "layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
183
- "layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
184
- "layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
185
- "layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
186
- "layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
187
- "layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
188
- "layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
189
- "layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
190
- "layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
191
- "layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
192
- "layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
193
- "layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
194
- "layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
195
- "layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
196
- "layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
197
- "layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
198
- "layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
199
- "layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
200
- "layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
201
- "layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
202
- "layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
203
- "layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
204
- "layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
205
- "layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
206
- "layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
207
- "layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
208
- "layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
209
- "layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
210
- "layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
211
- "layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
212
- "layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
213
- "layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
214
- "layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
215
- "layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
216
- "layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
217
- "layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
218
- "layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
219
- "layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
220
- "layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
221
- "layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
222
- "layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
223
- "norm.weight": "model-00002-of-00002.safetensors"
 
224
  }
225
  }
 
1
  {
2
  "metadata": {
3
+ "total_size": 5385887744
4
  },
5
  "weight_map": {
6
+ "lm_head.weight": "model-00002-of-00002.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00002.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
13
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
14
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
15
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
16
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
17
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
18
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
19
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
20
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
21
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
22
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
23
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
24
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
25
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
26
+ "model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
27
+ "model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
28
+ "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
29
+ "model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
30
+ "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
31
+ "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
32
+ "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
33
+ "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
34
+ "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
35
+ "model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
36
+ "model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
37
+ "model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
38
+ "model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
39
+ "model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
40
+ "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
41
+ "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
42
+ "model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
43
+ "model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
44
+ "model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
45
+ "model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
46
+ "model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
47
+ "model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
48
+ "model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
49
+ "model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
50
+ "model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
51
+ "model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
52
+ "model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
53
+ "model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
54
+ "model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
55
+ "model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
56
+ "model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
57
+ "model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
58
+ "model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
59
+ "model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
60
+ "model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
61
+ "model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
62
+ "model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors",
63
+ "model.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
64
+ "model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
65
+ "model.layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
66
+ "model.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
67
+ "model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
68
+ "model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
69
+ "model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
70
+ "model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
71
+ "model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors",
72
+ "model.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
73
+ "model.layers.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
74
+ "model.layers.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
75
+ "model.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
76
+ "model.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
77
+ "model.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
78
+ "model.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
79
+ "model.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
80
+ "model.layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors",
81
+ "model.layers.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
82
+ "model.layers.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
83
+ "model.layers.16.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
84
+ "model.layers.16.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
85
+ "model.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
86
+ "model.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
87
+ "model.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
88
+ "model.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
89
+ "model.layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors",
90
+ "model.layers.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
91
+ "model.layers.17.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
92
+ "model.layers.17.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
93
+ "model.layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
94
+ "model.layers.17.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
95
+ "model.layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
96
+ "model.layers.17.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
97
+ "model.layers.17.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
98
+ "model.layers.18.input_layernorm.weight": "model-00001-of-00002.safetensors",
99
+ "model.layers.18.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
100
+ "model.layers.18.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
101
+ "model.layers.18.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
102
+ "model.layers.18.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
103
+ "model.layers.18.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
104
+ "model.layers.18.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
105
+ "model.layers.18.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
106
+ "model.layers.18.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
107
+ "model.layers.19.input_layernorm.weight": "model-00001-of-00002.safetensors",
108
+ "model.layers.19.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
109
+ "model.layers.19.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
110
+ "model.layers.19.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
111
+ "model.layers.19.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
112
+ "model.layers.19.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
113
+ "model.layers.19.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
114
+ "model.layers.19.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
115
+ "model.layers.19.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
116
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
117
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
118
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
119
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
120
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
121
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
122
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
123
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
124
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
125
+ "model.layers.20.input_layernorm.weight": "model-00001-of-00002.safetensors",
126
+ "model.layers.20.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
127
+ "model.layers.20.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
128
+ "model.layers.20.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
129
+ "model.layers.20.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
130
+ "model.layers.20.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
131
+ "model.layers.20.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
132
+ "model.layers.20.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
133
+ "model.layers.20.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
134
+ "model.layers.21.input_layernorm.weight": "model-00001-of-00002.safetensors",
135
+ "model.layers.21.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
136
+ "model.layers.21.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
137
+ "model.layers.21.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
138
+ "model.layers.21.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
139
+ "model.layers.21.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
140
+ "model.layers.21.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
141
+ "model.layers.21.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
142
+ "model.layers.21.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
143
+ "model.layers.22.input_layernorm.weight": "model-00001-of-00002.safetensors",
144
+ "model.layers.22.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
145
+ "model.layers.22.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
146
+ "model.layers.22.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
147
+ "model.layers.22.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
148
+ "model.layers.22.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
149
+ "model.layers.22.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
150
+ "model.layers.22.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
151
+ "model.layers.22.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
152
+ "model.layers.23.input_layernorm.weight": "model-00002-of-00002.safetensors",
153
+ "model.layers.23.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
154
+ "model.layers.23.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
155
+ "model.layers.23.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
156
+ "model.layers.23.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
157
+ "model.layers.23.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
158
+ "model.layers.23.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
159
+ "model.layers.23.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
160
+ "model.layers.23.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
161
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
162
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
163
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
164
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
165
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
166
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
167
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
168
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
169
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
170
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
171
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
172
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
173
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
174
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
175
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
176
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
177
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
178
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
179
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
180
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
181
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
182
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
183
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
184
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
185
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
186
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
187
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
188
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
189
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
190
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
191
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
192
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
193
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
194
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
195
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
196
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
197
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
198
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
199
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
200
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
201
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
202
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
203
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
204
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
205
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
206
+ "model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
207
+ "model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
208
+ "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
209
+ "model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
210
+ "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
211
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
212
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
213
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
214
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
215
+ "model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
216
+ "model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
217
+ "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
218
+ "model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
219
+ "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
220
+ "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
221
+ "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
222
+ "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
223
+ "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
224
+ "model.norm.weight": "model-00002-of-00002.safetensors"
225
  }
226
  }
optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db84075a90fba35fd8d31611a65ce75095ea0bddea9d2781edeb239cf25a9d16
3
+ size 10771909506
rng_state_0.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e33c1a835a3506bc9c891b6d79847ac4178eb35f120b8d91a5656ec1b3016528
3
+ size 14960
rng_state_1.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d0de18c0c41258210fc5df6f484fa8ff2f740d8f622268d7d7f77acd7f4685c8
3
+ size 14960
rng_state_2.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9f08d210bbe20cfaf8c036275434f6d66d7847cfa2e495379e69d0a918685b80
3
+ size 14960
scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d4bb6b0131250ccf76ecc2baec6b26f8f1c1973f6094ba98c71fe79aee9426eb
3
+ size 1064
trainer_state.json ADDED
@@ -0,0 +1,1023 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 1.0213386832026263,
5
+ "eval_steps": 500,
6
+ "global_step": 1400,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.007295276308590188,
13
+ "grad_norm": 0.9992304444313049,
14
+ "learning_rate": 9.999671349822887e-06,
15
+ "loss": 0.087,
16
+ "step": 10
17
+ },
18
+ {
19
+ "epoch": 0.014590552617180376,
20
+ "grad_norm": 1.0456160306930542,
21
+ "learning_rate": 9.998685442495921e-06,
22
+ "loss": 0.0757,
23
+ "step": 20
24
+ },
25
+ {
26
+ "epoch": 0.021885828925770564,
27
+ "grad_norm": 1.014310598373413,
28
+ "learning_rate": 9.99704240762655e-06,
29
+ "loss": 0.0901,
30
+ "step": 30
31
+ },
32
+ {
33
+ "epoch": 0.029181105234360752,
34
+ "grad_norm": 0.9391605257987976,
35
+ "learning_rate": 9.994742461208251e-06,
36
+ "loss": 0.0811,
37
+ "step": 40
38
+ },
39
+ {
40
+ "epoch": 0.036476381542950936,
41
+ "grad_norm": 0.7982305884361267,
42
+ "learning_rate": 9.991785905592149e-06,
43
+ "loss": 0.0818,
44
+ "step": 50
45
+ },
46
+ {
47
+ "epoch": 0.04377165785154113,
48
+ "grad_norm": 0.9946795701980591,
49
+ "learning_rate": 9.988173129447251e-06,
50
+ "loss": 0.0905,
51
+ "step": 60
52
+ },
53
+ {
54
+ "epoch": 0.05106693416013131,
55
+ "grad_norm": 0.8562415242195129,
56
+ "learning_rate": 9.983904607709365e-06,
57
+ "loss": 0.0831,
58
+ "step": 70
59
+ },
60
+ {
61
+ "epoch": 0.058362210468721504,
62
+ "grad_norm": 1.054521918296814,
63
+ "learning_rate": 9.978980901518663e-06,
64
+ "loss": 0.083,
65
+ "step": 80
66
+ },
67
+ {
68
+ "epoch": 0.06565748677731169,
69
+ "grad_norm": 1.0189253091812134,
70
+ "learning_rate": 9.973402658145908e-06,
71
+ "loss": 0.0891,
72
+ "step": 90
73
+ },
74
+ {
75
+ "epoch": 0.07295276308590187,
76
+ "grad_norm": 0.7745731472969055,
77
+ "learning_rate": 9.96717061090737e-06,
78
+ "loss": 0.0846,
79
+ "step": 100
80
+ },
81
+ {
82
+ "epoch": 0.08024803939449207,
83
+ "grad_norm": 0.8214731812477112,
84
+ "learning_rate": 9.960285579068419e-06,
85
+ "loss": 0.0895,
86
+ "step": 110
87
+ },
88
+ {
89
+ "epoch": 0.08754331570308226,
90
+ "grad_norm": 0.9114850759506226,
91
+ "learning_rate": 9.95274846773583e-06,
92
+ "loss": 0.0862,
93
+ "step": 120
94
+ },
95
+ {
96
+ "epoch": 0.09483859201167244,
97
+ "grad_norm": 1.216098427772522,
98
+ "learning_rate": 9.944560267738792e-06,
99
+ "loss": 0.0896,
100
+ "step": 130
101
+ },
102
+ {
103
+ "epoch": 0.10213386832026262,
104
+ "grad_norm": 0.7587252855300903,
105
+ "learning_rate": 9.935722055498655e-06,
106
+ "loss": 0.0888,
107
+ "step": 140
108
+ },
109
+ {
110
+ "epoch": 0.10942914462885282,
111
+ "grad_norm": 0.8791028261184692,
112
+ "learning_rate": 9.92623499288743e-06,
113
+ "loss": 0.0832,
114
+ "step": 150
115
+ },
116
+ {
117
+ "epoch": 0.11672442093744301,
118
+ "grad_norm": 1.133716344833374,
119
+ "learning_rate": 9.916100327075038e-06,
120
+ "loss": 0.0854,
121
+ "step": 160
122
+ },
123
+ {
124
+ "epoch": 0.12401969724603319,
125
+ "grad_norm": 0.7774285674095154,
126
+ "learning_rate": 9.905319390365364e-06,
127
+ "loss": 0.0873,
128
+ "step": 170
129
+ },
130
+ {
131
+ "epoch": 0.13131497355462338,
132
+ "grad_norm": 0.7029784321784973,
133
+ "learning_rate": 9.893893600021112e-06,
134
+ "loss": 0.0786,
135
+ "step": 180
136
+ },
137
+ {
138
+ "epoch": 0.13861024986321357,
139
+ "grad_norm": 1.0894945859909058,
140
+ "learning_rate": 9.881824458077491e-06,
141
+ "loss": 0.089,
142
+ "step": 190
143
+ },
144
+ {
145
+ "epoch": 0.14590552617180375,
146
+ "grad_norm": 0.7053947448730469,
147
+ "learning_rate": 9.869113551144754e-06,
148
+ "loss": 0.0843,
149
+ "step": 200
150
+ },
151
+ {
152
+ "epoch": 0.15320080248039394,
153
+ "grad_norm": 0.9423328638076782,
154
+ "learning_rate": 9.85576255019963e-06,
155
+ "loss": 0.0887,
156
+ "step": 210
157
+ },
158
+ {
159
+ "epoch": 0.16049607878898414,
160
+ "grad_norm": 1.1778377294540405,
161
+ "learning_rate": 9.841773210365646e-06,
162
+ "loss": 0.0939,
163
+ "step": 220
164
+ },
165
+ {
166
+ "epoch": 0.1677913550975743,
167
+ "grad_norm": 0.8132289052009583,
168
+ "learning_rate": 9.82714737068241e-06,
169
+ "loss": 0.0812,
170
+ "step": 230
171
+ },
172
+ {
173
+ "epoch": 0.1750866314061645,
174
+ "grad_norm": 0.925722062587738,
175
+ "learning_rate": 9.811886953863841e-06,
176
+ "loss": 0.091,
177
+ "step": 240
178
+ },
179
+ {
180
+ "epoch": 0.1823819077147547,
181
+ "grad_norm": 0.8174653649330139,
182
+ "learning_rate": 9.795993966045418e-06,
183
+ "loss": 0.0868,
184
+ "step": 250
185
+ },
186
+ {
187
+ "epoch": 0.18967718402334488,
188
+ "grad_norm": 1.086427092552185,
189
+ "learning_rate": 9.779470496520442e-06,
190
+ "loss": 0.0882,
191
+ "step": 260
192
+ },
193
+ {
194
+ "epoch": 0.19697246033193508,
195
+ "grad_norm": 0.8431033492088318,
196
+ "learning_rate": 9.76231871746539e-06,
197
+ "loss": 0.0819,
198
+ "step": 270
199
+ },
200
+ {
201
+ "epoch": 0.20426773664052525,
202
+ "grad_norm": 0.9793428778648376,
203
+ "learning_rate": 9.744540883654348e-06,
204
+ "loss": 0.0838,
205
+ "step": 280
206
+ },
207
+ {
208
+ "epoch": 0.21156301294911545,
209
+ "grad_norm": 1.1424306631088257,
210
+ "learning_rate": 9.726139332162613e-06,
211
+ "loss": 0.0923,
212
+ "step": 290
213
+ },
214
+ {
215
+ "epoch": 0.21885828925770565,
216
+ "grad_norm": 0.8779186010360718,
217
+ "learning_rate": 9.707116482059447e-06,
218
+ "loss": 0.0909,
219
+ "step": 300
220
+ },
221
+ {
222
+ "epoch": 0.22615356556629582,
223
+ "grad_norm": 0.862575113773346,
224
+ "learning_rate": 9.68747483409007e-06,
225
+ "loss": 0.094,
226
+ "step": 310
227
+ },
228
+ {
229
+ "epoch": 0.23344884187488602,
230
+ "grad_norm": 0.8033697009086609,
231
+ "learning_rate": 9.667216970346916e-06,
232
+ "loss": 0.0821,
233
+ "step": 320
234
+ },
235
+ {
236
+ "epoch": 0.24074411818347619,
237
+ "grad_norm": 1.136168360710144,
238
+ "learning_rate": 9.646345553930187e-06,
239
+ "loss": 0.084,
240
+ "step": 330
241
+ },
242
+ {
243
+ "epoch": 0.24803939449206638,
244
+ "grad_norm": 1.227287769317627,
245
+ "learning_rate": 9.624863328597767e-06,
246
+ "loss": 0.0867,
247
+ "step": 340
248
+ },
249
+ {
250
+ "epoch": 0.2553346708006566,
251
+ "grad_norm": 0.8906837105751038,
252
+ "learning_rate": 9.602773118404518e-06,
253
+ "loss": 0.0845,
254
+ "step": 350
255
+ },
256
+ {
257
+ "epoch": 0.26262994710924675,
258
+ "grad_norm": 0.9557612538337708,
259
+ "learning_rate": 9.580077827331038e-06,
260
+ "loss": 0.0896,
261
+ "step": 360
262
+ },
263
+ {
264
+ "epoch": 0.2699252234178369,
265
+ "grad_norm": 0.8804728984832764,
266
+ "learning_rate": 9.556780438901899e-06,
267
+ "loss": 0.0799,
268
+ "step": 370
269
+ },
270
+ {
271
+ "epoch": 0.27722049972642715,
272
+ "grad_norm": 0.8825680017471313,
273
+ "learning_rate": 9.532884015793432e-06,
274
+ "loss": 0.0879,
275
+ "step": 380
276
+ },
277
+ {
278
+ "epoch": 0.2845157760350173,
279
+ "grad_norm": 1.216217041015625,
280
+ "learning_rate": 9.508391699431114e-06,
281
+ "loss": 0.0878,
282
+ "step": 390
283
+ },
284
+ {
285
+ "epoch": 0.2918110523436075,
286
+ "grad_norm": 1.0918773412704468,
287
+ "learning_rate": 9.48330670957659e-06,
288
+ "loss": 0.0842,
289
+ "step": 400
290
+ },
291
+ {
292
+ "epoch": 0.2991063286521977,
293
+ "grad_norm": 0.8797096014022827,
294
+ "learning_rate": 9.457632343904404e-06,
295
+ "loss": 0.075,
296
+ "step": 410
297
+ },
298
+ {
299
+ "epoch": 0.3064016049607879,
300
+ "grad_norm": 0.9638001322746277,
301
+ "learning_rate": 9.431371977568483e-06,
302
+ "loss": 0.0834,
303
+ "step": 420
304
+ },
305
+ {
306
+ "epoch": 0.31369688126937806,
307
+ "grad_norm": 0.7447642683982849,
308
+ "learning_rate": 9.404529062758447e-06,
309
+ "loss": 0.0909,
310
+ "step": 430
311
+ },
312
+ {
313
+ "epoch": 0.3209921575779683,
314
+ "grad_norm": 1.179291844367981,
315
+ "learning_rate": 9.377107128245782e-06,
316
+ "loss": 0.0832,
317
+ "step": 440
318
+ },
319
+ {
320
+ "epoch": 0.32828743388655846,
321
+ "grad_norm": 0.6798911094665527,
322
+ "learning_rate": 9.349109778919938e-06,
323
+ "loss": 0.0842,
324
+ "step": 450
325
+ },
326
+ {
327
+ "epoch": 0.3355827101951486,
328
+ "grad_norm": 0.9468401670455933,
329
+ "learning_rate": 9.32054069531444e-06,
330
+ "loss": 0.0937,
331
+ "step": 460
332
+ },
333
+ {
334
+ "epoch": 0.34287798650373885,
335
+ "grad_norm": 0.9600223898887634,
336
+ "learning_rate": 9.291403633123046e-06,
337
+ "loss": 0.0909,
338
+ "step": 470
339
+ },
340
+ {
341
+ "epoch": 0.350173262812329,
342
+ "grad_norm": 0.7957492470741272,
343
+ "learning_rate": 9.261702422706014e-06,
344
+ "loss": 0.0924,
345
+ "step": 480
346
+ },
347
+ {
348
+ "epoch": 0.3574685391209192,
349
+ "grad_norm": 0.9197902083396912,
350
+ "learning_rate": 9.231440968586572e-06,
351
+ "loss": 0.0852,
352
+ "step": 490
353
+ },
354
+ {
355
+ "execution_accuracy": 37.0,
356
+ "selected_difficulty": "challenging",
357
+ "step": 500
358
+ },
359
+ {
360
+ "epoch": 0.3647638154295094,
361
+ "grad_norm": 1.0779783725738525,
362
+ "learning_rate": 9.200623248937619e-06,
363
+ "loss": 0.0872,
364
+ "step": 500
365
+ },
366
+ {
367
+ "epoch": 0.3720590917380996,
368
+ "grad_norm": 1.0330190658569336,
369
+ "learning_rate": 9.169253315058764e-06,
370
+ "loss": 0.0884,
371
+ "step": 510
372
+ },
373
+ {
374
+ "epoch": 0.37935436804668976,
375
+ "grad_norm": 0.8854564428329468,
376
+ "learning_rate": 9.13733529084374e-06,
377
+ "loss": 0.0888,
378
+ "step": 520
379
+ },
380
+ {
381
+ "epoch": 0.38664964435527993,
382
+ "grad_norm": 0.7928789258003235,
383
+ "learning_rate": 9.104873372238269e-06,
384
+ "loss": 0.0858,
385
+ "step": 530
386
+ },
387
+ {
388
+ "epoch": 0.39394492066387016,
389
+ "grad_norm": 1.1074901819229126,
390
+ "learning_rate": 9.071871826688472e-06,
391
+ "loss": 0.0866,
392
+ "step": 540
393
+ },
394
+ {
395
+ "epoch": 0.40124019697246033,
396
+ "grad_norm": 0.9030365347862244,
397
+ "learning_rate": 9.038334992579863e-06,
398
+ "loss": 0.084,
399
+ "step": 550
400
+ },
401
+ {
402
+ "epoch": 0.4085354732810505,
403
+ "grad_norm": 0.9121033549308777,
404
+ "learning_rate": 9.004267278667032e-06,
405
+ "loss": 0.0841,
406
+ "step": 560
407
+ },
408
+ {
409
+ "epoch": 0.4158307495896407,
410
+ "grad_norm": 0.7888039350509644,
411
+ "learning_rate": 8.969673163494063e-06,
412
+ "loss": 0.0844,
413
+ "step": 570
414
+ },
415
+ {
416
+ "epoch": 0.4231260258982309,
417
+ "grad_norm": 0.8982441425323486,
418
+ "learning_rate": 8.934557194805787e-06,
419
+ "loss": 0.0881,
420
+ "step": 580
421
+ },
422
+ {
423
+ "epoch": 0.43042130220682107,
424
+ "grad_norm": 0.9907477498054504,
425
+ "learning_rate": 8.898923988949936e-06,
426
+ "loss": 0.0829,
427
+ "step": 590
428
+ },
429
+ {
430
+ "epoch": 0.4377165785154113,
431
+ "grad_norm": 0.7510169744491577,
432
+ "learning_rate": 8.862778230270276e-06,
433
+ "loss": 0.0812,
434
+ "step": 600
435
+ },
436
+ {
437
+ "epoch": 0.44501185482400146,
438
+ "grad_norm": 0.8274700045585632,
439
+ "learning_rate": 8.826124670490804e-06,
440
+ "loss": 0.0872,
441
+ "step": 610
442
+ },
443
+ {
444
+ "epoch": 0.45230713113259163,
445
+ "grad_norm": 1.118033766746521,
446
+ "learning_rate": 8.788968128091084e-06,
447
+ "loss": 0.0934,
448
+ "step": 620
449
+ },
450
+ {
451
+ "epoch": 0.45960240744118186,
452
+ "grad_norm": 0.7703680992126465,
453
+ "learning_rate": 8.751313487672815e-06,
454
+ "loss": 0.0891,
455
+ "step": 630
456
+ },
457
+ {
458
+ "epoch": 0.46689768374977203,
459
+ "grad_norm": 1.0380890369415283,
460
+ "learning_rate": 8.71316569931769e-06,
461
+ "loss": 0.0931,
462
+ "step": 640
463
+ },
464
+ {
465
+ "epoch": 0.4741929600583622,
466
+ "grad_norm": 0.9548109769821167,
467
+ "learning_rate": 8.674529777936674e-06,
468
+ "loss": 0.0816,
469
+ "step": 650
470
+ },
471
+ {
472
+ "epoch": 0.48148823636695237,
473
+ "grad_norm": 1.2412773370742798,
474
+ "learning_rate": 8.635410802610724e-06,
475
+ "loss": 0.0872,
476
+ "step": 660
477
+ },
478
+ {
479
+ "epoch": 0.4887835126755426,
480
+ "grad_norm": 0.8023186922073364,
481
+ "learning_rate": 8.595813915923113e-06,
482
+ "loss": 0.0857,
483
+ "step": 670
484
+ },
485
+ {
486
+ "epoch": 0.49607878898413277,
487
+ "grad_norm": 0.829302966594696,
488
+ "learning_rate": 8.555744323283364e-06,
489
+ "loss": 0.0932,
490
+ "step": 680
491
+ },
492
+ {
493
+ "epoch": 0.503374065292723,
494
+ "grad_norm": 1.0218778848648071,
495
+ "learning_rate": 8.515207292242969e-06,
496
+ "loss": 0.0791,
497
+ "step": 690
498
+ },
499
+ {
500
+ "epoch": 0.5106693416013132,
501
+ "grad_norm": 0.9110598564147949,
502
+ "learning_rate": 8.474208151802898e-06,
503
+ "loss": 0.0917,
504
+ "step": 700
505
+ },
506
+ {
507
+ "epoch": 0.5179646179099033,
508
+ "grad_norm": 0.8709658980369568,
509
+ "learning_rate": 8.432752291713058e-06,
510
+ "loss": 0.0805,
511
+ "step": 710
512
+ },
513
+ {
514
+ "epoch": 0.5252598942184935,
515
+ "grad_norm": 0.9375218749046326,
516
+ "learning_rate": 8.390845161763756e-06,
517
+ "loss": 0.0887,
518
+ "step": 720
519
+ },
520
+ {
521
+ "epoch": 0.5325551705270837,
522
+ "grad_norm": 0.819020688533783,
523
+ "learning_rate": 8.34849227106926e-06,
524
+ "loss": 0.0823,
525
+ "step": 730
526
+ },
527
+ {
528
+ "epoch": 0.5398504468356738,
529
+ "grad_norm": 0.7377147078514099,
530
+ "learning_rate": 8.305699187343586e-06,
531
+ "loss": 0.0867,
532
+ "step": 740
533
+ },
534
+ {
535
+ "epoch": 0.5471457231442641,
536
+ "grad_norm": 0.9633333086967468,
537
+ "learning_rate": 8.262471536168547e-06,
538
+ "loss": 0.0893,
539
+ "step": 750
540
+ },
541
+ {
542
+ "epoch": 0.5544409994528543,
543
+ "grad_norm": 0.894578218460083,
544
+ "learning_rate": 8.218815000254233e-06,
545
+ "loss": 0.0874,
546
+ "step": 760
547
+ },
548
+ {
549
+ "epoch": 0.5617362757614445,
550
+ "grad_norm": 0.9977262616157532,
551
+ "learning_rate": 8.174735318691946e-06,
552
+ "loss": 0.0822,
553
+ "step": 770
554
+ },
555
+ {
556
+ "epoch": 0.5690315520700346,
557
+ "grad_norm": 0.8501657247543335,
558
+ "learning_rate": 8.130238286199747e-06,
559
+ "loss": 0.0874,
560
+ "step": 780
561
+ },
562
+ {
563
+ "epoch": 0.5763268283786248,
564
+ "grad_norm": 0.7603849172592163,
565
+ "learning_rate": 8.085329752360683e-06,
566
+ "loss": 0.0784,
567
+ "step": 790
568
+ },
569
+ {
570
+ "epoch": 0.583622104687215,
571
+ "grad_norm": 0.8487511277198792,
572
+ "learning_rate": 8.04001562085379e-06,
573
+ "loss": 0.0878,
574
+ "step": 800
575
+ },
576
+ {
577
+ "epoch": 0.5909173809958053,
578
+ "grad_norm": 0.9253877401351929,
579
+ "learning_rate": 7.994301848678006e-06,
580
+ "loss": 0.0815,
581
+ "step": 810
582
+ },
583
+ {
584
+ "epoch": 0.5982126573043954,
585
+ "grad_norm": 0.9946874976158142,
586
+ "learning_rate": 7.948194445369065e-06,
587
+ "loss": 0.0901,
588
+ "step": 820
589
+ },
590
+ {
591
+ "epoch": 0.6055079336129856,
592
+ "grad_norm": 1.006040334701538,
593
+ "learning_rate": 7.901699472209467e-06,
594
+ "loss": 0.0792,
595
+ "step": 830
596
+ },
597
+ {
598
+ "epoch": 0.6128032099215758,
599
+ "grad_norm": 0.8797623515129089,
600
+ "learning_rate": 7.85482304143168e-06,
601
+ "loss": 0.0816,
602
+ "step": 840
603
+ },
604
+ {
605
+ "epoch": 0.620098486230166,
606
+ "grad_norm": 0.9626962542533875,
607
+ "learning_rate": 7.807571315414616e-06,
608
+ "loss": 0.08,
609
+ "step": 850
610
+ },
611
+ {
612
+ "epoch": 0.6273937625387561,
613
+ "grad_norm": 1.075498104095459,
614
+ "learning_rate": 7.759950505873523e-06,
615
+ "loss": 0.0802,
616
+ "step": 860
617
+ },
618
+ {
619
+ "epoch": 0.6346890388473464,
620
+ "grad_norm": 0.90218585729599,
621
+ "learning_rate": 7.711966873043396e-06,
622
+ "loss": 0.0824,
623
+ "step": 870
624
+ },
625
+ {
626
+ "epoch": 0.6419843151559366,
627
+ "grad_norm": 0.8692035675048828,
628
+ "learning_rate": 7.66362672485601e-06,
629
+ "loss": 0.0769,
630
+ "step": 880
631
+ },
632
+ {
633
+ "epoch": 0.6492795914645267,
634
+ "grad_norm": 1.062687635421753,
635
+ "learning_rate": 7.614936416110668e-06,
636
+ "loss": 0.0834,
637
+ "step": 890
638
+ },
639
+ {
640
+ "epoch": 0.6565748677731169,
641
+ "grad_norm": 0.9357954859733582,
642
+ "learning_rate": 7.565902347638806e-06,
643
+ "loss": 0.0833,
644
+ "step": 900
645
+ },
646
+ {
647
+ "epoch": 0.6638701440817071,
648
+ "grad_norm": 0.873466432094574,
649
+ "learning_rate": 7.5165309654625405e-06,
650
+ "loss": 0.0803,
651
+ "step": 910
652
+ },
653
+ {
654
+ "epoch": 0.6711654203902973,
655
+ "grad_norm": 0.8454645276069641,
656
+ "learning_rate": 7.466828759947271e-06,
657
+ "loss": 0.0793,
658
+ "step": 920
659
+ },
660
+ {
661
+ "epoch": 0.6784606966988874,
662
+ "grad_norm": 1.1247007846832275,
663
+ "learning_rate": 7.416802264948455e-06,
664
+ "loss": 0.0757,
665
+ "step": 930
666
+ },
667
+ {
668
+ "epoch": 0.6857559730074777,
669
+ "grad_norm": 0.9007195234298706,
670
+ "learning_rate": 7.366458056952668e-06,
671
+ "loss": 0.0855,
672
+ "step": 940
673
+ },
674
+ {
675
+ "epoch": 0.6930512493160679,
676
+ "grad_norm": 0.775996208190918,
677
+ "learning_rate": 7.315802754213062e-06,
678
+ "loss": 0.079,
679
+ "step": 950
680
+ },
681
+ {
682
+ "epoch": 0.700346525624658,
683
+ "grad_norm": 1.1148663759231567,
684
+ "learning_rate": 7.264843015879321e-06,
685
+ "loss": 0.0881,
686
+ "step": 960
687
+ },
688
+ {
689
+ "epoch": 0.7076418019332482,
690
+ "grad_norm": 0.8462682366371155,
691
+ "learning_rate": 7.213585541122261e-06,
692
+ "loss": 0.0848,
693
+ "step": 970
694
+ },
695
+ {
696
+ "epoch": 0.7149370782418384,
697
+ "grad_norm": 0.8557429313659668,
698
+ "learning_rate": 7.162037068253141e-06,
699
+ "loss": 0.0839,
700
+ "step": 980
701
+ },
702
+ {
703
+ "epoch": 0.7222323545504286,
704
+ "grad_norm": 0.8856000900268555,
705
+ "learning_rate": 7.110204373837857e-06,
706
+ "loss": 0.0804,
707
+ "step": 990
708
+ },
709
+ {
710
+ "execution_accuracy": 47.0,
711
+ "selected_difficulty": "challenging",
712
+ "step": 1000
713
+ },
714
+ {
715
+ "epoch": 0.7295276308590188,
716
+ "grad_norm": 0.7584331631660461,
717
+ "learning_rate": 7.058094271806091e-06,
718
+ "loss": 0.0818,
719
+ "step": 1000
720
+ },
721
+ {
722
+ "epoch": 0.736822907167609,
723
+ "grad_norm": 0.9286295175552368,
724
+ "learning_rate": 7.0057136125555456e-06,
725
+ "loss": 0.0766,
726
+ "step": 1010
727
+ },
728
+ {
729
+ "epoch": 0.7441181834761992,
730
+ "grad_norm": 1.1081056594848633,
731
+ "learning_rate": 6.953069282051397e-06,
732
+ "loss": 0.0835,
733
+ "step": 1020
734
+ },
735
+ {
736
+ "epoch": 0.7514134597847894,
737
+ "grad_norm": 0.9020804762840271,
738
+ "learning_rate": 6.900168200921065e-06,
739
+ "loss": 0.0791,
740
+ "step": 1030
741
+ },
742
+ {
743
+ "epoch": 0.7587087360933795,
744
+ "grad_norm": 1.0643606185913086,
745
+ "learning_rate": 6.84701732354442e-06,
746
+ "loss": 0.0866,
747
+ "step": 1040
748
+ },
749
+ {
750
+ "epoch": 0.7660040124019697,
751
+ "grad_norm": 0.9293026924133301,
752
+ "learning_rate": 6.79362363713957e-06,
753
+ "loss": 0.0828,
754
+ "step": 1050
755
+ },
756
+ {
757
+ "epoch": 0.7732992887105599,
758
+ "grad_norm": 0.9997085332870483,
759
+ "learning_rate": 6.7399941608443096e-06,
760
+ "loss": 0.0737,
761
+ "step": 1060
762
+ },
763
+ {
764
+ "epoch": 0.7805945650191501,
765
+ "grad_norm": 0.8557327389717102,
766
+ "learning_rate": 6.686135944793395e-06,
767
+ "loss": 0.0836,
768
+ "step": 1070
769
+ },
770
+ {
771
+ "epoch": 0.7878898413277403,
772
+ "grad_norm": 0.8752471208572388,
773
+ "learning_rate": 6.632056069191723e-06,
774
+ "loss": 0.0897,
775
+ "step": 1080
776
+ },
777
+ {
778
+ "epoch": 0.7951851176363305,
779
+ "grad_norm": 0.9777392148971558,
780
+ "learning_rate": 6.57776164338357e-06,
781
+ "loss": 0.0805,
782
+ "step": 1090
783
+ },
784
+ {
785
+ "epoch": 0.8024803939449207,
786
+ "grad_norm": 0.9013363122940063,
787
+ "learning_rate": 6.523259804918001e-06,
788
+ "loss": 0.0764,
789
+ "step": 1100
790
+ },
791
+ {
792
+ "epoch": 0.8097756702535108,
793
+ "grad_norm": 0.930316686630249,
794
+ "learning_rate": 6.4685577186105595e-06,
795
+ "loss": 0.0858,
796
+ "step": 1110
797
+ },
798
+ {
799
+ "epoch": 0.817070946562101,
800
+ "grad_norm": 1.2016055583953857,
801
+ "learning_rate": 6.413662575601391e-06,
802
+ "loss": 0.0809,
803
+ "step": 1120
804
+ },
805
+ {
806
+ "epoch": 0.8243662228706913,
807
+ "grad_norm": 0.8230682611465454,
808
+ "learning_rate": 6.358581592409881e-06,
809
+ "loss": 0.0771,
810
+ "step": 1130
811
+ },
812
+ {
813
+ "epoch": 0.8316614991792814,
814
+ "grad_norm": 0.9575796127319336,
815
+ "learning_rate": 6.303322009985984e-06,
816
+ "loss": 0.0893,
817
+ "step": 1140
818
+ },
819
+ {
820
+ "epoch": 0.8389567754878716,
821
+ "grad_norm": 0.7613864541053772,
822
+ "learning_rate": 6.247891092758319e-06,
823
+ "loss": 0.0802,
824
+ "step": 1150
825
+ },
826
+ {
827
+ "epoch": 0.8462520517964618,
828
+ "grad_norm": 0.8168739080429077,
829
+ "learning_rate": 6.1922961276791925e-06,
830
+ "loss": 0.0718,
831
+ "step": 1160
832
+ },
833
+ {
834
+ "epoch": 0.853547328105052,
835
+ "grad_norm": 0.719241201877594,
836
+ "learning_rate": 6.136544423266651e-06,
837
+ "loss": 0.073,
838
+ "step": 1170
839
+ },
840
+ {
841
+ "epoch": 0.8608426044136421,
842
+ "grad_norm": 1.0418319702148438,
843
+ "learning_rate": 6.08064330864371e-06,
844
+ "loss": 0.0816,
845
+ "step": 1180
846
+ },
847
+ {
848
+ "epoch": 0.8681378807222323,
849
+ "grad_norm": 0.7831118106842041,
850
+ "learning_rate": 6.024600132574855e-06,
851
+ "loss": 0.0775,
852
+ "step": 1190
853
+ },
854
+ {
855
+ "epoch": 0.8754331570308226,
856
+ "grad_norm": 0.9321058988571167,
857
+ "learning_rate": 5.968422262499983e-06,
858
+ "loss": 0.0777,
859
+ "step": 1200
860
+ },
861
+ {
862
+ "epoch": 0.8827284333394128,
863
+ "grad_norm": 0.9959325194358826,
864
+ "learning_rate": 5.912117083565874e-06,
865
+ "loss": 0.0743,
866
+ "step": 1210
867
+ },
868
+ {
869
+ "epoch": 0.8900237096480029,
870
+ "grad_norm": 0.7858604192733765,
871
+ "learning_rate": 5.85569199765534e-06,
872
+ "loss": 0.0742,
873
+ "step": 1220
874
+ },
875
+ {
876
+ "epoch": 0.8973189859565931,
877
+ "grad_norm": 1.0771974325180054,
878
+ "learning_rate": 5.799154422414174e-06,
879
+ "loss": 0.0784,
880
+ "step": 1230
881
+ },
882
+ {
883
+ "epoch": 0.9046142622651833,
884
+ "grad_norm": 1.0624542236328125,
885
+ "learning_rate": 5.7425117902760195e-06,
886
+ "loss": 0.0784,
887
+ "step": 1240
888
+ },
889
+ {
890
+ "epoch": 0.9119095385737734,
891
+ "grad_norm": 0.7207911014556885,
892
+ "learning_rate": 5.685771547485312e-06,
893
+ "loss": 0.0694,
894
+ "step": 1250
895
+ },
896
+ {
897
+ "epoch": 0.9192048148823637,
898
+ "grad_norm": 1.0225133895874023,
899
+ "learning_rate": 5.628941153118388e-06,
900
+ "loss": 0.0747,
901
+ "step": 1260
902
+ },
903
+ {
904
+ "epoch": 0.9265000911909539,
905
+ "grad_norm": 1.0003268718719482,
906
+ "learning_rate": 5.572028078102917e-06,
907
+ "loss": 0.077,
908
+ "step": 1270
909
+ },
910
+ {
911
+ "epoch": 0.9337953674995441,
912
+ "grad_norm": 0.9205290079116821,
913
+ "learning_rate": 5.515039804235772e-06,
914
+ "loss": 0.0764,
915
+ "step": 1280
916
+ },
917
+ {
918
+ "epoch": 0.9410906438081342,
919
+ "grad_norm": 1.0423191785812378,
920
+ "learning_rate": 5.457983823199475e-06,
921
+ "loss": 0.0801,
922
+ "step": 1290
923
+ },
924
+ {
925
+ "epoch": 0.9483859201167244,
926
+ "grad_norm": 0.8814120888710022,
927
+ "learning_rate": 5.400867635577335e-06,
928
+ "loss": 0.0801,
929
+ "step": 1300
930
+ },
931
+ {
932
+ "epoch": 0.9556811964253146,
933
+ "grad_norm": 0.9548910856246948,
934
+ "learning_rate": 5.343698749867421e-06,
935
+ "loss": 0.0802,
936
+ "step": 1310
937
+ },
938
+ {
939
+ "epoch": 0.9629764727339047,
940
+ "grad_norm": 0.7712908983230591,
941
+ "learning_rate": 5.2864846814955e-06,
942
+ "loss": 0.077,
943
+ "step": 1320
944
+ },
945
+ {
946
+ "epoch": 0.970271749042495,
947
+ "grad_norm": 0.9300876259803772,
948
+ "learning_rate": 5.229232951827054e-06,
949
+ "loss": 0.081,
950
+ "step": 1330
951
+ },
952
+ {
953
+ "epoch": 0.9775670253510852,
954
+ "grad_norm": 0.932421863079071,
955
+ "learning_rate": 5.17195108717852e-06,
956
+ "loss": 0.0841,
957
+ "step": 1340
958
+ },
959
+ {
960
+ "epoch": 0.9848623016596754,
961
+ "grad_norm": 0.9440054297447205,
962
+ "learning_rate": 5.114646617827884e-06,
963
+ "loss": 0.0714,
964
+ "step": 1350
965
+ },
966
+ {
967
+ "epoch": 0.9921575779682655,
968
+ "grad_norm": 0.8986610174179077,
969
+ "learning_rate": 5.057327077024745e-06,
970
+ "loss": 0.0781,
971
+ "step": 1360
972
+ },
973
+ {
974
+ "epoch": 0.9994528542768557,
975
+ "grad_norm": 0.9697067141532898,
976
+ "learning_rate": 5e-06,
977
+ "loss": 0.0863,
978
+ "step": 1370
979
+ },
980
+ {
981
+ "epoch": 1.006748130585446,
982
+ "grad_norm": 0.5880870223045349,
983
+ "learning_rate": 4.942672922975255e-06,
984
+ "loss": 0.0443,
985
+ "step": 1380
986
+ },
987
+ {
988
+ "epoch": 1.014043406894036,
989
+ "grad_norm": 0.8269129395484924,
990
+ "learning_rate": 4.8853533821721175e-06,
991
+ "loss": 0.0464,
992
+ "step": 1390
993
+ },
994
+ {
995
+ "epoch": 1.0213386832026263,
996
+ "grad_norm": 0.9354344606399536,
997
+ "learning_rate": 4.82804891282148e-06,
998
+ "loss": 0.0436,
999
+ "step": 1400
1000
+ }
1001
+ ],
1002
+ "logging_steps": 10,
1003
+ "max_steps": 2740,
1004
+ "num_input_tokens_seen": 0,
1005
+ "num_train_epochs": 2,
1006
+ "save_steps": 200,
1007
+ "stateful_callbacks": {
1008
+ "TrainerControl": {
1009
+ "args": {
1010
+ "should_epoch_stop": false,
1011
+ "should_evaluate": false,
1012
+ "should_log": false,
1013
+ "should_save": true,
1014
+ "should_training_stop": false
1015
+ },
1016
+ "attributes": {}
1017
+ }
1018
+ },
1019
+ "total_flos": 1.849931290145456e+18,
1020
+ "train_batch_size": 4,
1021
+ "trial_name": null,
1022
+ "trial_params": null
1023
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:86fddb185e4a273f80c38e16b89981e4876f4332fecde011f69e03e2cd9db509
3
+ size 5240