GinnM commited on
Commit
8c63008
1 Parent(s): 4f2aa84

Upload ProSSTForMaskedLM

Browse files
Files changed (1) hide show
  1. config.json +6 -1
config.json CHANGED
@@ -1,7 +1,11 @@
1
  {
 
 
 
2
  "attention_probs_dropout_prob": 0.1,
3
  "auto_map": {
4
- "AutoConfig": "configuration_prosst.ProSSTConfig"
 
5
  },
6
  "hidden_act": "gelu",
7
  "hidden_dropout_prob": 0.1,
@@ -33,6 +37,7 @@
33
  "scale_hidden": 1,
34
  "ss_vocab_size": 1027,
35
  "token_dropout": true,
 
36
  "transformers_version": "4.38.2",
37
  "type_vocab_size": 0,
38
  "vocab_size": 25
 
1
  {
2
+ "architectures": [
3
+ "ProSSTForMaskedLM"
4
+ ],
5
  "attention_probs_dropout_prob": 0.1,
6
  "auto_map": {
7
+ "AutoConfig": "configuration_prosst.ProSSTConfig",
8
+ "AutoModelForMaskedLM": "modeling_prosst.ProSSTForMaskedLM"
9
  },
10
  "hidden_act": "gelu",
11
  "hidden_dropout_prob": 0.1,
 
37
  "scale_hidden": 1,
38
  "ss_vocab_size": 1027,
39
  "token_dropout": true,
40
+ "torch_dtype": "float32",
41
  "transformers_version": "4.38.2",
42
  "type_vocab_size": 0,
43
  "vocab_size": 25