alfiannajih
commited on
Commit
•
9915504
1
Parent(s):
7f13948
Update config.json
Browse files- config.json +8 -8
config.json
CHANGED
@@ -4,7 +4,7 @@
|
|
4 |
"GRetrieverModel"
|
5 |
],
|
6 |
"attention_bias": false,
|
7 |
-
"attention_dropout": 0
|
8 |
"auto_map": {
|
9 |
"AutoConfig": "alfiannajih/g-retriever--g_retriever_config.GRetrieverConfig",
|
10 |
"AutoModelForCausalLM": "alfiannajih/g-retriever--g_retriever_model.GRetrieverModel"
|
@@ -25,7 +25,7 @@
|
|
25 |
"dee4a24"
|
26 |
]
|
27 |
}
|
28 |
-
}
|
29 |
"impl": "g_retriever_pipeline.GRetrieverPipeline",
|
30 |
"pt": [
|
31 |
"AutoModelForCausalLM"
|
@@ -53,18 +53,18 @@
|
|
53 |
"num_key_value_heads": 8,
|
54 |
"pad_token_id": 128040,
|
55 |
"pretraining_tp": 1,
|
56 |
-
"rms_norm_eps":
|
57 |
"rope_scaling": {
|
58 |
-
"factor": 8
|
59 |
-
"high_freq_factor": 4
|
60 |
-
"low_freq_factor": 1
|
61 |
"original_max_position_embeddings": 8192,
|
62 |
"rope_type": "llama3"
|
63 |
},
|
64 |
-
"rope_theta": 500000
|
65 |
"tie_word_embeddings": false,
|
66 |
"torch_dtype": "bfloat16",
|
67 |
"transformers_version": "4.44.2",
|
68 |
"use_cache": true,
|
69 |
"vocab_size": 128256
|
70 |
-
}
|
|
|
4 |
"GRetrieverModel"
|
5 |
],
|
6 |
"attention_bias": false,
|
7 |
+
"attention_dropout": 0,
|
8 |
"auto_map": {
|
9 |
"AutoConfig": "alfiannajih/g-retriever--g_retriever_config.GRetrieverConfig",
|
10 |
"AutoModelForCausalLM": "alfiannajih/g-retriever--g_retriever_model.GRetrieverModel"
|
|
|
25 |
"dee4a24"
|
26 |
]
|
27 |
}
|
28 |
+
},
|
29 |
"impl": "g_retriever_pipeline.GRetrieverPipeline",
|
30 |
"pt": [
|
31 |
"AutoModelForCausalLM"
|
|
|
53 |
"num_key_value_heads": 8,
|
54 |
"pad_token_id": 128040,
|
55 |
"pretraining_tp": 1,
|
56 |
+
"rms_norm_eps": 0.00001,
|
57 |
"rope_scaling": {
|
58 |
+
"factor": 8,
|
59 |
+
"high_freq_factor": 4,
|
60 |
+
"low_freq_factor": 1,
|
61 |
"original_max_position_embeddings": 8192,
|
62 |
"rope_type": "llama3"
|
63 |
},
|
64 |
+
"rope_theta": 500000,
|
65 |
"tie_word_embeddings": false,
|
66 |
"torch_dtype": "bfloat16",
|
67 |
"transformers_version": "4.44.2",
|
68 |
"use_cache": true,
|
69 |
"vocab_size": 128256
|
70 |
+
}
|