Model save
Browse files- README.md +68 -37
- config.json +9 -9
- model.safetensors +1 -1
- preprocessor_config.json +21 -8
- runs/Apr18_13-48-02_895abc48f543/events.out.tfevents.1713448213.895abc48f543.34.0 +3 -0
- training_args.bin +2 -2
README.md
CHANGED
@@ -22,7 +22,7 @@ model-index:
|
|
22 |
metrics:
|
23 |
- name: Accuracy
|
24 |
type: accuracy
|
25 |
-
value: 0.
|
26 |
---
|
27 |
|
28 |
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
@@ -32,8 +32,8 @@ should probably proofread and complete it, then remove this comment. -->
|
|
32 |
|
33 |
This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset.
|
34 |
It achieves the following results on the evaluation set:
|
35 |
-
- Loss:
|
36 |
-
- Accuracy: 0.
|
37 |
|
38 |
## Model description
|
39 |
|
@@ -58,49 +58,80 @@ The following hyperparameters were used during training:
|
|
58 |
- seed: 42
|
59 |
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
60 |
- lr_scheduler_type: linear
|
61 |
-
- num_epochs:
|
62 |
- mixed_precision_training: Native AMP
|
63 |
|
64 |
### Training results
|
65 |
|
66 |
| Training Loss | Epoch | Step | Validation Loss | Accuracy |
|
67 |
|:-------------:|:-----:|:----:|:---------------:|:--------:|
|
68 |
-
|
|
69 |
-
|
|
70 |
-
| 0.
|
71 |
-
| 0.
|
72 |
-
| 0.
|
73 |
-
| 0.
|
74 |
-
| 0.
|
75 |
-
| 0.
|
76 |
-
| 0.
|
77 |
-
| 0.
|
78 |
-
| 0.
|
79 |
-
| 0.
|
80 |
-
| 0.
|
81 |
-
| 0.
|
82 |
-
| 0.
|
83 |
-
| 0.
|
84 |
-
| 0.
|
85 |
-
| 0.
|
86 |
-
| 0.
|
87 |
-
| 0.
|
88 |
-
| 0.
|
89 |
-
| 0.
|
90 |
-
| 0.
|
91 |
-
| 0.
|
92 |
-
| 0.
|
93 |
-
| 0.
|
94 |
-
| 0.
|
95 |
-
| 0.
|
96 |
-
| 0.
|
97 |
-
| 0.
|
98 |
-
| 0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
99 |
|
100 |
|
101 |
### Framework versions
|
102 |
|
103 |
-
- Transformers 4.
|
104 |
-
- Pytorch 2.2
|
105 |
- Datasets 2.18.0
|
106 |
- Tokenizers 0.15.2
|
|
|
22 |
metrics:
|
23 |
- name: Accuracy
|
24 |
type: accuracy
|
25 |
+
value: 0.7619047619047619
|
26 |
---
|
27 |
|
28 |
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
|
|
32 |
|
33 |
This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset.
|
34 |
It achieves the following results on the evaluation set:
|
35 |
+
- Loss: 1.2147
|
36 |
+
- Accuracy: 0.7619
|
37 |
|
38 |
## Model description
|
39 |
|
|
|
58 |
- seed: 42
|
59 |
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
60 |
- lr_scheduler_type: linear
|
61 |
+
- num_epochs: 10
|
62 |
- mixed_precision_training: Native AMP
|
63 |
|
64 |
### Training results
|
65 |
|
66 |
| Training Loss | Epoch | Step | Validation Loss | Accuracy |
|
67 |
|:-------------:|:-----:|:----:|:---------------:|:--------:|
|
68 |
+
| 0.1624 | 0.16 | 100 | 1.0534 | 0.7638 |
|
69 |
+
| 0.2926 | 0.32 | 200 | 1.3484 | 0.6867 |
|
70 |
+
| 0.159 | 0.48 | 300 | 0.9484 | 0.7724 |
|
71 |
+
| 0.2145 | 0.64 | 400 | 1.0014 | 0.7476 |
|
72 |
+
| 0.1889 | 0.8 | 500 | 1.0321 | 0.7457 |
|
73 |
+
| 0.3064 | 0.96 | 600 | 1.0795 | 0.7314 |
|
74 |
+
| 0.2195 | 1.11 | 700 | 0.9886 | 0.7629 |
|
75 |
+
| 0.2982 | 1.27 | 800 | 1.0292 | 0.7590 |
|
76 |
+
| 0.2477 | 1.43 | 900 | 1.2391 | 0.7248 |
|
77 |
+
| 0.3076 | 1.59 | 1000 | 1.1326 | 0.7324 |
|
78 |
+
| 0.1863 | 1.75 | 1100 | 1.2596 | 0.7048 |
|
79 |
+
| 0.2577 | 1.91 | 1200 | 1.0649 | 0.7610 |
|
80 |
+
| 0.1491 | 2.07 | 1300 | 1.1044 | 0.7562 |
|
81 |
+
| 0.2635 | 2.23 | 1400 | 1.1965 | 0.7448 |
|
82 |
+
| 0.2597 | 2.39 | 1500 | 1.2241 | 0.7429 |
|
83 |
+
| 0.2468 | 2.55 | 1600 | 1.1452 | 0.7390 |
|
84 |
+
| 0.216 | 2.71 | 1700 | 1.2419 | 0.7276 |
|
85 |
+
| 0.1971 | 2.87 | 1800 | 1.1883 | 0.7362 |
|
86 |
+
| 0.2071 | 3.03 | 1900 | 1.4659 | 0.6952 |
|
87 |
+
| 0.1535 | 3.18 | 2000 | 1.0239 | 0.7724 |
|
88 |
+
| 0.1842 | 3.34 | 2100 | 1.1967 | 0.7390 |
|
89 |
+
| 0.2087 | 3.5 | 2200 | 1.1403 | 0.7467 |
|
90 |
+
| 0.1658 | 3.66 | 2300 | 1.2901 | 0.7343 |
|
91 |
+
| 0.1159 | 3.82 | 2400 | 1.1826 | 0.7438 |
|
92 |
+
| 0.1498 | 3.98 | 2500 | 1.2627 | 0.7419 |
|
93 |
+
| 0.135 | 4.14 | 2600 | 1.1383 | 0.76 |
|
94 |
+
| 0.1492 | 4.3 | 2700 | 1.2310 | 0.7343 |
|
95 |
+
| 0.0982 | 4.46 | 2800 | 1.4144 | 0.7105 |
|
96 |
+
| 0.1256 | 4.62 | 2900 | 1.3513 | 0.7171 |
|
97 |
+
| 0.1544 | 4.78 | 3000 | 1.4280 | 0.7019 |
|
98 |
+
| 0.0858 | 4.94 | 3100 | 1.2231 | 0.7429 |
|
99 |
+
| 0.1049 | 5.1 | 3200 | 1.2775 | 0.7352 |
|
100 |
+
| 0.1361 | 5.25 | 3300 | 1.2840 | 0.7429 |
|
101 |
+
| 0.1505 | 5.41 | 3400 | 1.3373 | 0.7390 |
|
102 |
+
| 0.1244 | 5.57 | 3500 | 1.2959 | 0.7438 |
|
103 |
+
| 0.1114 | 5.73 | 3600 | 1.3181 | 0.7381 |
|
104 |
+
| 0.0851 | 5.89 | 3700 | 1.3288 | 0.7457 |
|
105 |
+
| 0.0799 | 6.05 | 3800 | 1.1859 | 0.76 |
|
106 |
+
| 0.1331 | 6.21 | 3900 | 1.2544 | 0.7371 |
|
107 |
+
| 0.121 | 6.37 | 4000 | 1.2186 | 0.7533 |
|
108 |
+
| 0.1276 | 6.53 | 4100 | 1.2964 | 0.7324 |
|
109 |
+
| 0.1194 | 6.69 | 4200 | 1.1907 | 0.7590 |
|
110 |
+
| 0.1649 | 6.85 | 4300 | 1.4679 | 0.7105 |
|
111 |
+
| 0.0558 | 7.01 | 4400 | 1.2028 | 0.7533 |
|
112 |
+
| 0.0687 | 7.17 | 4500 | 1.3242 | 0.7381 |
|
113 |
+
| 0.1419 | 7.32 | 4600 | 1.2328 | 0.76 |
|
114 |
+
| 0.0901 | 7.48 | 4700 | 1.1861 | 0.7676 |
|
115 |
+
| 0.1181 | 7.64 | 4800 | 1.4031 | 0.7352 |
|
116 |
+
| 0.1272 | 7.8 | 4900 | 1.3608 | 0.7438 |
|
117 |
+
| 0.0979 | 7.96 | 5000 | 1.3098 | 0.7495 |
|
118 |
+
| 0.0805 | 8.12 | 5100 | 1.2445 | 0.7533 |
|
119 |
+
| 0.0354 | 8.28 | 5200 | 1.2345 | 0.7581 |
|
120 |
+
| 0.0499 | 8.44 | 5300 | 1.1776 | 0.7571 |
|
121 |
+
| 0.1046 | 8.6 | 5400 | 1.1939 | 0.76 |
|
122 |
+
| 0.0912 | 8.76 | 5500 | 1.2373 | 0.7486 |
|
123 |
+
| 0.0589 | 8.92 | 5600 | 1.2165 | 0.7552 |
|
124 |
+
| 0.0829 | 9.08 | 5700 | 1.2684 | 0.7505 |
|
125 |
+
| 0.0897 | 9.24 | 5800 | 1.2467 | 0.7552 |
|
126 |
+
| 0.1114 | 9.39 | 5900 | 1.2303 | 0.7571 |
|
127 |
+
| 0.0712 | 9.55 | 6000 | 1.1997 | 0.7638 |
|
128 |
+
| 0.0621 | 9.71 | 6100 | 1.2094 | 0.7629 |
|
129 |
+
| 0.037 | 9.87 | 6200 | 1.2147 | 0.7619 |
|
130 |
|
131 |
|
132 |
### Framework versions
|
133 |
|
134 |
+
- Transformers 4.39.3
|
135 |
+
- Pytorch 2.1.2
|
136 |
- Datasets 2.18.0
|
137 |
- Tokenizers 0.15.2
|
config.json
CHANGED
@@ -11,11 +11,11 @@
|
|
11 |
"id2label": {
|
12 |
"0": "\u0995\u09a5\u09be_\u09ac\u09b2\u09be",
|
13 |
"1": "\u0995\u09ae\u09cd\u09aa\u09bf\u0989\u099f\u09be\u09b0_\u09ac\u09cd\u09af\u09ac\u09b9\u09be\u09b0_\u0995\u09b0\u09be",
|
14 |
-
"2": "\u0996\u09be\u0993\
|
15 |
"3": "\u0996\u09c7\u09b2\u09be_\u0995\u09b0\u09be",
|
16 |
-
"4": "\u0998\u09c1\u09ae\u09be\u09a8\
|
17 |
-
"5": "\u09aa\
|
18 |
-
"6": "\u09aa\
|
19 |
"7": "\u09b0\u09be\u09a8\u09cd\u09a8\u09be_\u0995\u09b0\u09be",
|
20 |
"8": "\u09b2\u09c7\u0996\u09be",
|
21 |
"9": "\u09b9\u09be\u0981\u099f\u09be"
|
@@ -26,11 +26,11 @@
|
|
26 |
"label2id": {
|
27 |
"\u0995\u09a5\u09be_\u09ac\u09b2\u09be": "0",
|
28 |
"\u0995\u09ae\u09cd\u09aa\u09bf\u0989\u099f\u09be\u09b0_\u09ac\u09cd\u09af\u09ac\u09b9\u09be\u09b0_\u0995\u09b0\u09be": "1",
|
29 |
-
"\u0996\u09be\u0993\
|
30 |
"\u0996\u09c7\u09b2\u09be_\u0995\u09b0\u09be": "3",
|
31 |
-
"\u0998\u09c1\u09ae\u09be\u09a8\
|
32 |
-
"\u09aa\
|
33 |
-
"\u09aa\
|
34 |
"\u09b0\u09be\u09a8\u09cd\u09a8\u09be_\u0995\u09b0\u09be": "7",
|
35 |
"\u09b2\u09c7\u0996\u09be": "8",
|
36 |
"\u09b9\u09be\u0981\u099f\u09be": "9"
|
@@ -44,5 +44,5 @@
|
|
44 |
"problem_type": "single_label_classification",
|
45 |
"qkv_bias": true,
|
46 |
"torch_dtype": "float32",
|
47 |
-
"transformers_version": "4.
|
48 |
}
|
|
|
11 |
"id2label": {
|
12 |
"0": "\u0995\u09a5\u09be_\u09ac\u09b2\u09be",
|
13 |
"1": "\u0995\u09ae\u09cd\u09aa\u09bf\u0989\u099f\u09be\u09b0_\u09ac\u09cd\u09af\u09ac\u09b9\u09be\u09b0_\u0995\u09b0\u09be",
|
14 |
+
"2": "\u0996\u09be\u0993\u09df\u09be",
|
15 |
"3": "\u0996\u09c7\u09b2\u09be_\u0995\u09b0\u09be",
|
16 |
+
"4": "\u0998\u09c1\u09ae\u09be\u09a8\u09cb",
|
17 |
+
"5": "\u09aa\u09be\u09a8_\u0995\u09b0\u09be",
|
18 |
+
"6": "\u09aa\u09dc\u09be",
|
19 |
"7": "\u09b0\u09be\u09a8\u09cd\u09a8\u09be_\u0995\u09b0\u09be",
|
20 |
"8": "\u09b2\u09c7\u0996\u09be",
|
21 |
"9": "\u09b9\u09be\u0981\u099f\u09be"
|
|
|
26 |
"label2id": {
|
27 |
"\u0995\u09a5\u09be_\u09ac\u09b2\u09be": "0",
|
28 |
"\u0995\u09ae\u09cd\u09aa\u09bf\u0989\u099f\u09be\u09b0_\u09ac\u09cd\u09af\u09ac\u09b9\u09be\u09b0_\u0995\u09b0\u09be": "1",
|
29 |
+
"\u0996\u09be\u0993\u09df\u09be": "2",
|
30 |
"\u0996\u09c7\u09b2\u09be_\u0995\u09b0\u09be": "3",
|
31 |
+
"\u0998\u09c1\u09ae\u09be\u09a8\u09cb": "4",
|
32 |
+
"\u09aa\u09be\u09a8_\u0995\u09b0\u09be": "5",
|
33 |
+
"\u09aa\u09dc\u09be": "6",
|
34 |
"\u09b0\u09be\u09a8\u09cd\u09a8\u09be_\u0995\u09b0\u09be": "7",
|
35 |
"\u09b2\u09c7\u0996\u09be": "8",
|
36 |
"\u09b9\u09be\u0981\u099f\u09be": "9"
|
|
|
44 |
"problem_type": "single_label_classification",
|
45 |
"qkv_bias": true,
|
46 |
"torch_dtype": "float32",
|
47 |
+
"transformers_version": "4.39.3"
|
48 |
}
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 343248584
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:01e55b79737132a953a13d3eaefeae96815fc4474b69ff4d319c0aafbaa5d922
|
3 |
size 343248584
|
preprocessor_config.json
CHANGED
@@ -1,20 +1,33 @@
|
|
1 |
{
|
2 |
-
"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3 |
"do_normalize": true,
|
4 |
"do_rescale": true,
|
5 |
"do_resize": true,
|
6 |
"image_mean": [
|
7 |
-
0.
|
8 |
-
0.
|
9 |
-
0.
|
10 |
],
|
11 |
"image_processor_type": "ViTFeatureExtractor",
|
12 |
"image_std": [
|
13 |
-
0.
|
14 |
-
0.
|
15 |
-
0.
|
16 |
],
|
17 |
-
"resample":
|
18 |
"rescale_factor": 0.00392156862745098,
|
19 |
"size": {
|
20 |
"height": 224,
|
|
|
1 |
{
|
2 |
+
"_valid_processor_keys": [
|
3 |
+
"images",
|
4 |
+
"do_resize",
|
5 |
+
"size",
|
6 |
+
"resample",
|
7 |
+
"do_rescale",
|
8 |
+
"rescale_factor",
|
9 |
+
"do_normalize",
|
10 |
+
"image_mean",
|
11 |
+
"image_std",
|
12 |
+
"return_tensors",
|
13 |
+
"data_format",
|
14 |
+
"input_data_format"
|
15 |
+
],
|
16 |
"do_normalize": true,
|
17 |
"do_rescale": true,
|
18 |
"do_resize": true,
|
19 |
"image_mean": [
|
20 |
+
0.5,
|
21 |
+
0.5,
|
22 |
+
0.5
|
23 |
],
|
24 |
"image_processor_type": "ViTFeatureExtractor",
|
25 |
"image_std": [
|
26 |
+
0.5,
|
27 |
+
0.5,
|
28 |
+
0.5
|
29 |
],
|
30 |
+
"resample": 2,
|
31 |
"rescale_factor": 0.00392156862745098,
|
32 |
"size": {
|
33 |
"height": 224,
|
runs/Apr18_13-48-02_895abc48f543/events.out.tfevents.1713448213.895abc48f543.34.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:959290847a257dda4548edbc595ae781e6841f72c905bceaddd386edb7d460f9
|
3 |
+
size 142302
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7b4a230fc8ad6f0d0dd95b728ff274dcf43b381ca09877b314b4325a07c81c50
|
3 |
+
size 4920
|