salbatarni commited on
Commit
70afa55
1 Parent(s): d33767b

Training in progress, step 150

Browse files
Files changed (4) hide show
  1. README.md +137 -0
  2. config.json +32 -0
  3. model.safetensors +3 -0
  4. training_args.bin +3 -0
README.md ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ base_model: aubmindlab/bert-base-arabertv02
3
+ tags:
4
+ - generated_from_trainer
5
+ model-index:
6
+ - name: arabert_cross_development_task1_fold4
7
+ results: []
8
+ ---
9
+
10
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
11
+ should probably proofread and complete it, then remove this comment. -->
12
+
13
+ # arabert_cross_development_task1_fold4
14
+
15
+ This model is a fine-tuned version of [aubmindlab/bert-base-arabertv02](https://huggingface.co/aubmindlab/bert-base-arabertv02) on the None dataset.
16
+ It achieves the following results on the evaluation set:
17
+ - Loss: 0.3891
18
+ - Qwk: 0.7487
19
+ - Mse: 0.3891
20
+
21
+ ## Model description
22
+
23
+ More information needed
24
+
25
+ ## Intended uses & limitations
26
+
27
+ More information needed
28
+
29
+ ## Training and evaluation data
30
+
31
+ More information needed
32
+
33
+ ## Training procedure
34
+
35
+ ### Training hyperparameters
36
+
37
+ The following hyperparameters were used during training:
38
+ - learning_rate: 2e-05
39
+ - train_batch_size: 64
40
+ - eval_batch_size: 64
41
+ - seed: 42
42
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
43
+ - lr_scheduler_type: linear
44
+ - num_epochs: 10
45
+
46
+ ### Training results
47
+
48
+ | Training Loss | Epoch | Step | Validation Loss | Qwk | Mse |
49
+ |:-------------:|:-----:|:----:|:---------------:|:------:|:------:|
50
+ | No log | 0.125 | 2 | 2.7046 | 0.0134 | 2.7046 |
51
+ | No log | 0.25 | 4 | 1.3006 | 0.1103 | 1.3006 |
52
+ | No log | 0.375 | 6 | 0.7355 | 0.3354 | 0.7355 |
53
+ | No log | 0.5 | 8 | 0.8932 | 0.3820 | 0.8932 |
54
+ | No log | 0.625 | 10 | 0.5839 | 0.4676 | 0.5839 |
55
+ | No log | 0.75 | 12 | 0.4634 | 0.5294 | 0.4634 |
56
+ | No log | 0.875 | 14 | 0.4578 | 0.5335 | 0.4578 |
57
+ | No log | 1.0 | 16 | 0.5111 | 0.5160 | 0.5111 |
58
+ | No log | 1.125 | 18 | 0.4822 | 0.6507 | 0.4822 |
59
+ | No log | 1.25 | 20 | 0.4309 | 0.6257 | 0.4309 |
60
+ | No log | 1.375 | 22 | 0.4637 | 0.6422 | 0.4637 |
61
+ | No log | 1.5 | 24 | 0.5888 | 0.7264 | 0.5888 |
62
+ | No log | 1.625 | 26 | 0.5751 | 0.7173 | 0.5751 |
63
+ | No log | 1.75 | 28 | 0.4309 | 0.6445 | 0.4309 |
64
+ | No log | 1.875 | 30 | 0.3977 | 0.6113 | 0.3977 |
65
+ | No log | 2.0 | 32 | 0.4095 | 0.6330 | 0.4095 |
66
+ | No log | 2.125 | 34 | 0.4861 | 0.7147 | 0.4861 |
67
+ | No log | 2.25 | 36 | 0.5155 | 0.7516 | 0.5155 |
68
+ | No log | 2.375 | 38 | 0.4703 | 0.7321 | 0.4703 |
69
+ | No log | 2.5 | 40 | 0.3861 | 0.6978 | 0.3861 |
70
+ | No log | 2.625 | 42 | 0.3964 | 0.7189 | 0.3964 |
71
+ | No log | 2.75 | 44 | 0.5105 | 0.7660 | 0.5105 |
72
+ | No log | 2.875 | 46 | 0.5630 | 0.7439 | 0.5630 |
73
+ | No log | 3.0 | 48 | 0.4666 | 0.7758 | 0.4666 |
74
+ | No log | 3.125 | 50 | 0.4033 | 0.7314 | 0.4033 |
75
+ | No log | 3.25 | 52 | 0.3886 | 0.7225 | 0.3886 |
76
+ | No log | 3.375 | 54 | 0.4264 | 0.7369 | 0.4264 |
77
+ | No log | 3.5 | 56 | 0.4681 | 0.7538 | 0.4681 |
78
+ | No log | 3.625 | 58 | 0.4255 | 0.7357 | 0.4255 |
79
+ | No log | 3.75 | 60 | 0.3784 | 0.7381 | 0.3784 |
80
+ | No log | 3.875 | 62 | 0.3835 | 0.7261 | 0.3835 |
81
+ | No log | 4.0 | 64 | 0.3863 | 0.7091 | 0.3863 |
82
+ | No log | 4.125 | 66 | 0.3964 | 0.7022 | 0.3964 |
83
+ | No log | 4.25 | 68 | 0.4674 | 0.7519 | 0.4674 |
84
+ | No log | 4.375 | 70 | 0.5670 | 0.7310 | 0.5670 |
85
+ | No log | 4.5 | 72 | 0.5082 | 0.7265 | 0.5082 |
86
+ | No log | 4.625 | 74 | 0.3989 | 0.7387 | 0.3989 |
87
+ | No log | 4.75 | 76 | 0.3568 | 0.7218 | 0.3568 |
88
+ | No log | 4.875 | 78 | 0.3670 | 0.7343 | 0.3670 |
89
+ | No log | 5.0 | 80 | 0.4147 | 0.7453 | 0.4147 |
90
+ | No log | 5.125 | 82 | 0.4613 | 0.7583 | 0.4613 |
91
+ | No log | 5.25 | 84 | 0.4365 | 0.7493 | 0.4365 |
92
+ | No log | 5.375 | 86 | 0.3787 | 0.7383 | 0.3787 |
93
+ | No log | 5.5 | 88 | 0.3637 | 0.7327 | 0.3637 |
94
+ | No log | 5.625 | 90 | 0.3896 | 0.7461 | 0.3896 |
95
+ | No log | 5.75 | 92 | 0.4827 | 0.7585 | 0.4827 |
96
+ | No log | 5.875 | 94 | 0.5207 | 0.7560 | 0.5207 |
97
+ | No log | 6.0 | 96 | 0.4771 | 0.7622 | 0.4771 |
98
+ | No log | 6.125 | 98 | 0.4131 | 0.7595 | 0.4131 |
99
+ | No log | 6.25 | 100 | 0.3861 | 0.7447 | 0.3861 |
100
+ | No log | 6.375 | 102 | 0.3770 | 0.7473 | 0.3770 |
101
+ | No log | 6.5 | 104 | 0.4030 | 0.7421 | 0.4030 |
102
+ | No log | 6.625 | 106 | 0.4334 | 0.7447 | 0.4334 |
103
+ | No log | 6.75 | 108 | 0.4677 | 0.7616 | 0.4677 |
104
+ | No log | 6.875 | 110 | 0.4931 | 0.7670 | 0.4931 |
105
+ | No log | 7.0 | 112 | 0.4703 | 0.7622 | 0.4703 |
106
+ | No log | 7.125 | 114 | 0.4736 | 0.7622 | 0.4736 |
107
+ | No log | 7.25 | 116 | 0.4565 | 0.7580 | 0.4565 |
108
+ | No log | 7.375 | 118 | 0.4114 | 0.7521 | 0.4114 |
109
+ | No log | 7.5 | 120 | 0.3925 | 0.7534 | 0.3925 |
110
+ | No log | 7.625 | 122 | 0.3937 | 0.7441 | 0.3937 |
111
+ | No log | 7.75 | 124 | 0.3906 | 0.7441 | 0.3906 |
112
+ | No log | 7.875 | 126 | 0.3958 | 0.7488 | 0.3958 |
113
+ | No log | 8.0 | 128 | 0.4005 | 0.7481 | 0.4005 |
114
+ | No log | 8.125 | 130 | 0.4095 | 0.7399 | 0.4095 |
115
+ | No log | 8.25 | 132 | 0.4044 | 0.7426 | 0.4044 |
116
+ | No log | 8.375 | 134 | 0.3885 | 0.7487 | 0.3885 |
117
+ | No log | 8.5 | 136 | 0.3727 | 0.7455 | 0.3727 |
118
+ | No log | 8.625 | 138 | 0.3751 | 0.7428 | 0.3751 |
119
+ | No log | 8.75 | 140 | 0.3851 | 0.7477 | 0.3851 |
120
+ | No log | 8.875 | 142 | 0.4024 | 0.7472 | 0.4024 |
121
+ | No log | 9.0 | 144 | 0.4118 | 0.7505 | 0.4118 |
122
+ | No log | 9.125 | 146 | 0.4171 | 0.7505 | 0.4171 |
123
+ | No log | 9.25 | 148 | 0.4179 | 0.7495 | 0.4179 |
124
+ | No log | 9.375 | 150 | 0.4096 | 0.7483 | 0.4096 |
125
+ | No log | 9.5 | 152 | 0.4015 | 0.7555 | 0.4015 |
126
+ | No log | 9.625 | 154 | 0.3949 | 0.7508 | 0.3949 |
127
+ | No log | 9.75 | 156 | 0.3905 | 0.7487 | 0.3905 |
128
+ | No log | 9.875 | 158 | 0.3894 | 0.7487 | 0.3894 |
129
+ | No log | 10.0 | 160 | 0.3891 | 0.7487 | 0.3891 |
130
+
131
+
132
+ ### Framework versions
133
+
134
+ - Transformers 4.44.0
135
+ - Pytorch 2.4.0
136
+ - Datasets 2.21.0
137
+ - Tokenizers 0.19.1
config.json ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "aubmindlab/bert-base-arabertv02",
3
+ "architectures": [
4
+ "BertForSequenceClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "classifier_dropout": null,
8
+ "hidden_act": "gelu",
9
+ "hidden_dropout_prob": 0.1,
10
+ "hidden_size": 768,
11
+ "id2label": {
12
+ "0": "LABEL_0"
13
+ },
14
+ "initializer_range": 0.02,
15
+ "intermediate_size": 3072,
16
+ "label2id": {
17
+ "LABEL_0": 0
18
+ },
19
+ "layer_norm_eps": 1e-12,
20
+ "max_position_embeddings": 512,
21
+ "model_type": "bert",
22
+ "num_attention_heads": 12,
23
+ "num_hidden_layers": 12,
24
+ "pad_token_id": 0,
25
+ "position_embedding_type": "absolute",
26
+ "problem_type": "regression",
27
+ "torch_dtype": "float32",
28
+ "transformers_version": "4.44.0",
29
+ "type_vocab_size": 2,
30
+ "use_cache": true,
31
+ "vocab_size": 64000
32
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:da091ec6a95e09d440b0cee1844b136eaf333e4cc0fd68fb58b471081cffee59
3
+ size 540799996
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4bcf25d6229cebb510b53eff92b75907d13e37c22d3fe116e90e6a8a79e2f46b
3
+ size 5240