Rocketknight1 HF staff commited on
Commit
17aaf52
1 Parent(s): 18fae53

Update TF weights

Browse files
Files changed (2) hide show
  1. config.json +5 -1
  2. tf_model.h5 +1 -1
config.json CHANGED
@@ -1,15 +1,18 @@
1
  {
 
2
  "architectures": [
3
  "EsmForMaskedLM"
4
  ],
5
  "attention_probs_dropout_prob": 0.0,
6
  "classifier_dropout": null,
7
  "emb_layer_norm_before": false,
 
8
  "hidden_act": "gelu",
9
  "hidden_dropout_prob": 0.0,
10
  "hidden_size": 320,
11
  "initializer_range": 0.02,
12
  "intermediate_size": 1280,
 
13
  "layer_norm_eps": 1e-05,
14
  "mask_token_id": 32,
15
  "max_position_embeddings": 1026,
@@ -20,7 +23,8 @@
20
  "position_embedding_type": "rotary",
21
  "token_dropout": true,
22
  "torch_dtype": "float32",
23
- "transformers_version": "4.23.0.dev0",
24
  "use_cache": true,
 
25
  "vocab_size": 33
26
  }
 
1
  {
2
+ "_name_or_path": "/tmp/facebook/esm2_t6_8M_UR50D",
3
  "architectures": [
4
  "EsmForMaskedLM"
5
  ],
6
  "attention_probs_dropout_prob": 0.0,
7
  "classifier_dropout": null,
8
  "emb_layer_norm_before": false,
9
+ "esmfold_config": null,
10
  "hidden_act": "gelu",
11
  "hidden_dropout_prob": 0.0,
12
  "hidden_size": 320,
13
  "initializer_range": 0.02,
14
  "intermediate_size": 1280,
15
+ "is_folding_model": false,
16
  "layer_norm_eps": 1e-05,
17
  "mask_token_id": 32,
18
  "max_position_embeddings": 1026,
 
23
  "position_embedding_type": "rotary",
24
  "token_dropout": true,
25
  "torch_dtype": "float32",
26
+ "transformers_version": "4.25.0.dev0",
27
  "use_cache": true,
28
+ "vocab_list": null,
29
  "vocab_size": 33
30
  }
tf_model.h5 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:30978d2e98177ad9df3300ff26c39c980c1f20291956311219e65634b8c70ca6
3
  size 30254224
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4e960ac198cf3da8e4464775b74737976755f432c8d475d9e7d1876404bca9d9
3
  size 30254224