{ | |
"architectures": [ | |
"LsrModel" | |
], | |
"coref_dim": 20, | |
"distance_size": 20, | |
"dropout_emb": 0.2, | |
"dropout_gcn": 0.4, | |
"dropout_rate": 0.3, | |
"finetune_emb": false, | |
"hidden_dim": 120, | |
"initializer_range": 0.02, | |
"max_length": 512, | |
"ner_dim": 20, | |
"num_relations": 97, | |
"reasoner_layer_sizes": [ | |
3, | |
4 | |
], | |
"torch_dtype": "float32", | |
"transformers_version": "4.9.2", | |
"use_bert": false, | |
"use_reasoning_block": true, | |
"use_struct_att": false, | |
"word_embedding_shape": [ | |
194784, | |
100 | |
] | |
} | |