valhalla commited on
Commit
5b2e376
1 Parent(s): e7eb308

add flax model

Browse files
Files changed (3) hide show
  1. .gitattributes +1 -0
  2. config.json +5 -0
  3. flax_model.msgpack +3 -0
.gitattributes CHANGED
@@ -6,3 +6,4 @@
6
  *.tar.gz filter=lfs diff=lfs merge=lfs -text
7
  *.ot filter=lfs diff=lfs merge=lfs -text
8
  *.onnx filter=lfs diff=lfs merge=lfs -text
 
 
6
  *.tar.gz filter=lfs diff=lfs merge=lfs -text
7
  *.ot filter=lfs diff=lfs merge=lfs -text
8
  *.onnx filter=lfs diff=lfs merge=lfs -text
9
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
config.json CHANGED
@@ -10,6 +10,7 @@
10
  "attention_dropout": 0.0,
11
  "bos_token_id": 0,
12
  "classif_dropout": 0.0,
 
13
  "d_model": 1024,
14
  "decoder_attention_heads": 16,
15
  "decoder_ffn_dim": 4096,
@@ -27,6 +28,8 @@
27
  2
28
  ],
29
  "extra_pos_embeddings": 2,
 
 
30
  "id2label": {
31
  "0": "LABEL_0",
32
  "1": "LABEL_1",
@@ -59,5 +62,7 @@
59
  "student_decoder_layers": null,
60
  "student_encoder_layers": null,
61
  "task_specific_params": {},
 
 
62
  "vocab_size": 50264
63
  }
 
10
  "attention_dropout": 0.0,
11
  "bos_token_id": 0,
12
  "classif_dropout": 0.0,
13
+ "classifier_dropout": 0.0,
14
  "d_model": 1024,
15
  "decoder_attention_heads": 16,
16
  "decoder_ffn_dim": 4096,
 
28
  2
29
  ],
30
  "extra_pos_embeddings": 2,
31
+ "forced_eos_token_id": 2,
32
+ "gradient_checkpointing": false,
33
  "id2label": {
34
  "0": "LABEL_0",
35
  "1": "LABEL_1",
 
62
  "student_decoder_layers": null,
63
  "student_encoder_layers": null,
64
  "task_specific_params": {},
65
+ "transformers_version": "4.7.0.dev0",
66
+ "use_cache": true,
67
  "vocab_size": 50264
68
  }
flax_model.msgpack ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1b27661f9737ed209e969fa2f0cd36eba40248c3f528070ab911085e9bf07a02
3
+ size 611133844