Results:{"exact_match": 73.7012987012987, "f1": 85.32812905326854, "jaccard": 0.8245064554898146, "wer": 0.32326105814251166}"length train": 6159, "length test": 1540, n_epochs = 1 base_LM_model = "deepset/xlm-roberta-large-squad2" max_seq_length = 512 learning_rate = 3e-5 evaluation_strategy = "epoch" save_strategy = "epoch" learning_rate = 3e-5 warmup_ratio = 0.1 gradient_accumulation_steps = 8 weight_decay = 0.01 output_dir: str = './' test_size= 0.2 seed=0 doc_stride = 128
Browse files
README.md
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
language:
|
3 |
+
- fa
|
4 |
+
- en
|
5 |
+
metrics:
|
6 |
+
- exact_match
|
7 |
+
- wer
|
8 |
+
library_name: transformers
|
9 |
+
pipeline_tag: question-answering
|
10 |
+
---
|
11 |
+
x---
|
12 |
+
language:
|
13 |
+
- fa
|
14 |
+
- en
|
15 |
+
metrics:
|
16 |
+
|
17 |
+
"length train": 6159,
|
18 |
+
"length test": 1540,
|
19 |
+
pipeline_tag: question-answering
|
20 |
+
Dataset:PersianQuAd->https://github.com/BigData-IsfahanUni/PersianQuAD/tree/main/Dataset
|
21 |
+
---
|