haowei
commited on
Commit
•
cb6f926
1
Parent(s):
932302a
update
Browse files- config.json +4 -1
- roberta_modeling.py +1 -2
config.json
CHANGED
@@ -1,6 +1,9 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "/home/haowei/haowei/continual-post-training/agnews_unsup_roberta",
|
3 |
"adapter_mode": "parallel",
|
|
|
|
|
|
|
|
|
4 |
"adapter_task": 5,
|
5 |
"adapters": {
|
6 |
"adapters": {},
|
|
|
1 |
{
|
|
|
2 |
"adapter_mode": "parallel",
|
3 |
+
"auto_map": {
|
4 |
+
"AutoModelForSequenceClassification": "roberta_modeling.RobertaMaskForSequenceClassification",
|
5 |
+
"AutoModelForMaskedLM": "roberta_modeling.RobertaMaskForMaskedLM"
|
6 |
+
},
|
7 |
"adapter_task": 5,
|
8 |
"adapters": {
|
9 |
"adapters": {},
|
roberta_modeling.py
CHANGED
@@ -2191,5 +2191,4 @@ class RobertaMaskForSequenceClassification(RobertaMaskBasedModel, RobertaForSequ
|
|
2191 |
attn_adapter_size=config.attn_adapter_size,
|
2192 |
ffn_adapter_size=config.ffn_adapter_size,
|
2193 |
)
|
2194 |
-
self.roberta = add_roberta_adapters(self.roberta, adapter_config)
|
2195 |
-
|
|
|
2191 |
attn_adapter_size=config.attn_adapter_size,
|
2192 |
ffn_adapter_size=config.ffn_adapter_size,
|
2193 |
)
|
2194 |
+
self.roberta = add_roberta_adapters(self.roberta, adapter_config)
|
|