hypewarner_petapter / config.json
erikweber's picture
Upload model
aa5122d verified
{
"_name_or_path": "xlm-roberta-large",
"adapters": {
"adapters": {
"myadapter": "779363b34fd3bd94"
},
"config_map": {
"779363b34fd3bd94": {
"alpha": 16,
"architecture": "lora",
"attn_matrices": [
"q",
"v"
],
"composition_mode": "add",
"dropout": 0.0,
"init_weights": "lora",
"intermediate_lora": false,
"leave_out": [],
"output_lora": false,
"r": 8,
"selfattn_lora": true,
"use_gating": false
}
},
"fusion_config_map": {},
"fusions": {}
},
"architectures": [
"XLMRobertaAdapterModel"
],
"attention_probs_dropout_prob": 0.1,
"bos_token_id": 0,
"classifier_dropout": null,
"eos_token_id": 2,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 1024,
"id2label": null,
"initializer_range": 0.02,
"intermediate_size": 4096,
"label2id": null,
"layer_norm_eps": 1e-05,
"max_position_embeddings": 514,
"model_type": "xlm-roberta",
"num_attention_heads": 16,
"num_hidden_layers": 24,
"output_past": true,
"pad_token_id": 1,
"position_embedding_type": "absolute",
"prediction_heads": {
"default": {
"activation_function": "gelu",
"bias": true,
"embedding_size": 1024,
"head_type": "masked_lm",
"label2id": null,
"layer_norm": true,
"layers": 2,
"shift_labels": false,
"vocab_size": 250002
},
"myadapter": {
"head_type": "PEThead",
"id2tokenid": {
"0": [
81793
],
"1": [
35943
]
},
"id2tokenid_values": [
35943,
81793
],
"vocab_size": 250002
}
},
"torch_dtype": "float32",
"transformers_version": "4.43.4",
"type_vocab_size": 1,
"use_cache": true,
"vocab_size": 250002
}