Aaditya1 commited on
Commit
a1e7163
1 Parent(s): d73ed40

Create config.json

Browse files
Files changed (1) hide show
  1. config.json +67 -0
config.json ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_dropout": 0.1,
3
+ "activation_fn": "gelu",
4
+ "apply_graphormer_init": true,
5
+ "architectures": [
6
+ "GraphormerForGraphClassification"
7
+ ],
8
+ "attention_dropout": 0.1,
9
+ "bias": true,
10
+ "bos_token_id": 1,
11
+ "dropout": 0.0,
12
+ "edge_type": "multi_hop",
13
+ "embed_scale": null,
14
+ "embedding_dim": 768,
15
+ "encoder_normalize_before": true,
16
+ "eos_token_id": 2,
17
+ "export": false,
18
+ "ffn_embedding_dim": 768,
19
+ "freeze_embeddings": false,
20
+ "hidden_size": 768,
21
+ "id2label": {
22
+ "0": "a",
23
+ "1": "b",
24
+ "2": "c",
25
+ "3": "d",
26
+ "4": "e",
27
+ "5": "f",
28
+ "6": "g"
29
+ },
30
+ "init_fn": null,
31
+ "kdim": null,
32
+ "label2id": {
33
+ "a": 0,
34
+ "b": 1,
35
+ "c": 2,
36
+ "d": 3,
37
+ "e": 4,
38
+ "f": 5,
39
+ "g": 6
40
+ },
41
+ "layerdrop": 0.0,
42
+ "max_nodes": 512,
43
+ "model_type": "graphormer",
44
+ "multi_hop_max_dist": 5,
45
+ "no_token_positional_embeddings": false,
46
+ "num_atoms": 4608,
47
+ "num_attention_heads": 32,
48
+ "num_edge_dis": 128,
49
+ "num_edges": 1536,
50
+ "num_in_degree": 512,
51
+ "num_layers": 12,
52
+ "num_out_degree": 512,
53
+ "num_spatial": 512,
54
+ "num_trans_layers_to_freeze": 0,
55
+ "num_classes": 7, // Add the number of classes
56
+ "pad_token_id": 0,
57
+ "pre_layernorm": false,
58
+ "q_noise": 0.0,
59
+ "qn_block_size": 8,
60
+ "self_attention": true,
61
+ "share_input_output_embed": false,
62
+ "spatial_pos_max": 1024,
63
+ "torch_dtype": "float32",
64
+ "traceable": false,
65
+ "transformers_version": "4.26.0.dev0",
66
+ "vdim": null
67
+ }