alicore's picture
Results:{"exact_match": 73.7012987012987, "f1": 85.32812905326854, "jaccard": 0.8245064554898146, "wer": 0.32326105814251166}"length train": 6159, "length test": 1540, n_epochs = 1 base_LM_model = "deepset/xlm-roberta-large-squad2" max_seq_length = 512 learning_rate = 3e-5 evaluation_strategy = "epoch" save_strategy = "epoch" learning_rate = 3e-5 warmup_ratio = 0.1 gradient_accumulation_steps = 8 weight_decay = 0.01 output_dir: str = './' test_size= 0.2 seed=0 doc_stride = 128
5944f01
|
raw
history blame
321 Bytes
metadata
language:
  - fa
  - en
metrics:
  - exact_match
  - wer
library_name: transformers
pipeline_tag: question-answering

x--- language:

  • fa
  • en metrics:

"length train": 6159, "length test": 1540, pipeline_tag: question-answering Dataset:PersianQuAd->https://github.com/BigData-IsfahanUni/PersianQuAD/tree/main/Dataset