LumenscopeAI
commited on
Commit
•
a1a07d7
1
Parent(s):
5efe647
Update config.json
Browse files- config.json +19 -3
config.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
{
|
2 |
"architectures": [
|
3 |
-
"
|
4 |
],
|
5 |
"attention_dropout": 0.0,
|
6 |
"bos_token_id": 151643,
|
@@ -23,5 +23,21 @@
|
|
23 |
"transformers_version": "4.38.1",
|
24 |
"use_cache": true,
|
25 |
"use_sliding_window": false,
|
26 |
-
"vocab_size": 151936
|
27 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
{
|
2 |
"architectures": [
|
3 |
+
"BrainGPTForCausalLM"
|
4 |
],
|
5 |
"attention_dropout": 0.0,
|
6 |
"bos_token_id": 151643,
|
|
|
23 |
"transformers_version": "4.38.1",
|
24 |
"use_cache": true,
|
25 |
"use_sliding_window": false,
|
26 |
+
"vocab_size": 151936,
|
27 |
+
"beta": 0.1,
|
28 |
+
"S_target": 0.1,
|
29 |
+
"V_target": -65.0,
|
30 |
+
"V_rest": -70.0,
|
31 |
+
"eta_theta": 0.01,
|
32 |
+
"eta_alpha": 0.01,
|
33 |
+
"eta_r": 0.01,
|
34 |
+
"lambda_T": 0.1,
|
35 |
+
"T_target": 10,
|
36 |
+
"C": 1.0,
|
37 |
+
"lambda_task": 1.0,
|
38 |
+
"lambda_stdp": 0.1,
|
39 |
+
"lambda_neuron": 0.1,
|
40 |
+
"lambda_time": 0.1,
|
41 |
+
"lambda_C": 0.1,
|
42 |
+
"lambda_reg": 0.01
|
43 |
+
}
|