sundar-pichai
commited on
Commit
•
a093d99
1
Parent(s):
d3a8bb9
Upload 4 files
Browse files
pytorch_model-00001-of-00003.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ba766552efefdfbdef71a581ceffdb1a4335731dc09f72d250e25e03075917bc
|
3 |
+
size 9948730542
|
pytorch_model-00002-of-00003.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:278e02d729f3daef3ab06f2bcce7100fb674994efad94ea762d08176d0cfc0a0
|
3 |
+
size 9904167200
|
pytorch_model-00003-of-00003.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d361ac55d036ab8e44140d5b2d2ae2702fe7de0746d29c4262b72d1c6ef68c41
|
3 |
+
size 6178984969
|
pytorch_model.bin.index.json
CHANGED
@@ -246,9 +246,9 @@
|
|
246 |
"model.layers.3.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
|
247 |
"model.layers.3.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
|
248 |
"model.layers.30.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
|
249 |
-
"model.layers.30.mlp.down_proj.weight": "pytorch_model-
|
250 |
"model.layers.30.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
|
251 |
-
"model.layers.30.mlp.up_proj.weight": "pytorch_model-
|
252 |
"model.layers.30.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
|
253 |
"model.layers.30.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
|
254 |
"model.layers.30.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
|
|
|
246 |
"model.layers.3.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00003.bin",
|
247 |
"model.layers.3.self_attn.v_proj.weight": "pytorch_model-00001-of-00003.bin",
|
248 |
"model.layers.30.input_layernorm.weight": "pytorch_model-00003-of-00003.bin",
|
249 |
+
"model.layers.30.mlp.down_proj.weight": "pytorch_model-00002-of-00003.bin",
|
250 |
"model.layers.30.mlp.gate_proj.weight": "pytorch_model-00002-of-00003.bin",
|
251 |
+
"model.layers.30.mlp.up_proj.weight": "pytorch_model-00003-of-00003.bin",
|
252 |
"model.layers.30.post_attention_layernorm.weight": "pytorch_model-00003-of-00003.bin",
|
253 |
"model.layers.30.self_attn.k_proj.weight": "pytorch_model-00002-of-00003.bin",
|
254 |
"model.layers.30.self_attn.o_proj.weight": "pytorch_model-00002-of-00003.bin",
|