Model save
Browse files- README.md +29 -97
- config.json +0 -8
- model.safetensors +1 -1
- runs/Oct02_13-17-31_a0cc4a8e576f/events.out.tfevents.1727875288.a0cc4a8e576f.1530.0 +3 -0
- runs/Oct02_13-24-12_a0cc4a8e576f/events.out.tfevents.1727875462.a0cc4a8e576f.1530.1 +3 -0
- runs/Oct02_13-36-07_a0cc4a8e576f/events.out.tfevents.1727876177.a0cc4a8e576f.1530.2 +3 -0
- training_args.bin +1 -1
README.md
CHANGED
@@ -3,13 +3,27 @@ library_name: transformers
|
|
3 |
license: apache-2.0
|
4 |
base_model: google/vit-base-patch16-224-in21k
|
5 |
tags:
|
6 |
-
- image-classification
|
7 |
- generated_from_trainer
|
|
|
|
|
8 |
metrics:
|
9 |
- accuracy
|
10 |
model-index:
|
11 |
- name: finetuned-fake-food
|
12 |
-
results:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
13 |
---
|
14 |
|
15 |
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
@@ -17,10 +31,10 @@ should probably proofread and complete it, then remove this comment. -->
|
|
17 |
|
18 |
# finetuned-fake-food
|
19 |
|
20 |
-
This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the
|
21 |
It achieves the following results on the evaluation set:
|
22 |
-
- Loss: 0.
|
23 |
-
- Accuracy: 0.
|
24 |
|
25 |
## Model description
|
26 |
|
@@ -40,112 +54,30 @@ More information needed
|
|
40 |
|
41 |
The following hyperparameters were used during training:
|
42 |
- learning_rate: 0.0002
|
43 |
-
- train_batch_size:
|
44 |
- eval_batch_size: 8
|
45 |
- seed: 42
|
46 |
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
47 |
- lr_scheduler_type: linear
|
48 |
-
-
|
49 |
- mixed_precision_training: Native AMP
|
50 |
|
51 |
### Training results
|
52 |
|
53 |
| Training Loss | Epoch | Step | Validation Loss | Accuracy |
|
54 |
|:-------------:|:------:|:----:|:---------------:|:--------:|
|
55 |
-
| 0.
|
56 |
-
| 0.
|
57 |
-
| 0.
|
58 |
-
| 0.
|
59 |
-
| 0.
|
60 |
-
| 0.
|
61 |
-
| 0.
|
62 |
-
| 0.5484 | 0.4042 | 800 | 0.3940 | 0.8264 |
|
63 |
-
| 0.6263 | 0.4548 | 900 | 0.6219 | 0.7118 |
|
64 |
-
| 0.5453 | 0.5053 | 1000 | 0.4548 | 0.7888 |
|
65 |
-
| 0.5431 | 0.5558 | 1100 | 0.4210 | 0.8084 |
|
66 |
-
| 0.5678 | 0.6064 | 1200 | 0.4946 | 0.8038 |
|
67 |
-
| 0.3266 | 0.6569 | 1300 | 0.4538 | 0.8264 |
|
68 |
-
| 0.4225 | 0.7074 | 1400 | 0.4366 | 0.8088 |
|
69 |
-
| 0.32 | 0.7580 | 1500 | 0.5586 | 0.7884 |
|
70 |
-
| 0.473 | 0.8085 | 1600 | 0.4805 | 0.7974 |
|
71 |
-
| 0.4557 | 0.8590 | 1700 | 0.3707 | 0.8371 |
|
72 |
-
| 0.408 | 0.9096 | 1800 | 0.4968 | 0.7999 |
|
73 |
-
| 0.4979 | 0.9601 | 1900 | 0.4432 | 0.7898 |
|
74 |
-
| 0.4115 | 1.0106 | 2000 | 0.3722 | 0.8392 |
|
75 |
-
| 0.3421 | 1.0611 | 2100 | 0.5450 | 0.7401 |
|
76 |
-
| 0.5165 | 1.1117 | 2200 | 0.4611 | 0.7988 |
|
77 |
-
| 0.4066 | 1.1622 | 2300 | 0.3226 | 0.8725 |
|
78 |
-
| 0.5085 | 1.2127 | 2400 | 0.5858 | 0.7762 |
|
79 |
-
| 0.4814 | 1.2633 | 2500 | 0.3981 | 0.7766 |
|
80 |
-
| 0.4554 | 1.3138 | 2600 | 0.5076 | 0.7816 |
|
81 |
-
| 0.2816 | 1.3643 | 2700 | 0.4732 | 0.8127 |
|
82 |
-
| 0.2516 | 1.4149 | 2800 | 0.4315 | 0.8074 |
|
83 |
-
| 0.2903 | 1.4654 | 2900 | 0.3845 | 0.8557 |
|
84 |
-
| 0.3493 | 1.5159 | 3000 | 0.4921 | 0.7977 |
|
85 |
-
| 0.4251 | 1.5664 | 3100 | 0.3855 | 0.8231 |
|
86 |
-
| 0.3356 | 1.6170 | 3200 | 0.4012 | 0.8328 |
|
87 |
-
| 0.3597 | 1.6675 | 3300 | 0.3308 | 0.8496 |
|
88 |
-
| 0.257 | 1.7180 | 3400 | 0.4104 | 0.8138 |
|
89 |
-
| 0.3709 | 1.7686 | 3500 | 0.2769 | 0.8879 |
|
90 |
-
| 0.3393 | 1.8191 | 3600 | 0.3412 | 0.8643 |
|
91 |
-
| 0.4151 | 1.8696 | 3700 | 0.3078 | 0.8747 |
|
92 |
-
| 0.3043 | 1.9202 | 3800 | 0.3424 | 0.8650 |
|
93 |
-
| 0.3302 | 1.9707 | 3900 | 0.3513 | 0.8335 |
|
94 |
-
| 0.4033 | 2.0212 | 4000 | 0.3371 | 0.8511 |
|
95 |
-
| 0.3386 | 2.0718 | 4100 | 0.3402 | 0.8396 |
|
96 |
-
| 0.3661 | 2.1223 | 4200 | 0.3277 | 0.8561 |
|
97 |
-
| 0.2914 | 2.1728 | 4300 | 0.3065 | 0.8650 |
|
98 |
-
| 0.4444 | 2.2233 | 4400 | 0.3207 | 0.8493 |
|
99 |
-
| 0.2922 | 2.2739 | 4500 | 0.2968 | 0.8686 |
|
100 |
-
| 0.3464 | 2.3244 | 4600 | 0.4151 | 0.8070 |
|
101 |
-
| 0.2684 | 2.3749 | 4700 | 0.3810 | 0.8385 |
|
102 |
-
| 0.3779 | 2.4255 | 4800 | 0.3368 | 0.8514 |
|
103 |
-
| 0.4462 | 2.4760 | 4900 | 0.2677 | 0.8965 |
|
104 |
-
| 0.3766 | 2.5265 | 5000 | 0.3732 | 0.8439 |
|
105 |
-
| 0.4971 | 2.5771 | 5100 | 0.3266 | 0.8618 |
|
106 |
-
| 0.3795 | 2.6276 | 5200 | 0.3380 | 0.8607 |
|
107 |
-
| 0.4205 | 2.6781 | 5300 | 0.3436 | 0.8618 |
|
108 |
-
| 0.3652 | 2.7287 | 5400 | 0.3483 | 0.8518 |
|
109 |
-
| 0.3999 | 2.7792 | 5500 | 0.2603 | 0.8908 |
|
110 |
-
| 0.2909 | 2.8297 | 5600 | 0.3080 | 0.8693 |
|
111 |
-
| 0.3703 | 2.8802 | 5700 | 0.2950 | 0.8808 |
|
112 |
-
| 0.4048 | 2.9308 | 5800 | 0.3191 | 0.8500 |
|
113 |
-
| 0.3333 | 2.9813 | 5900 | 0.3773 | 0.8443 |
|
114 |
-
| 0.2917 | 3.0318 | 6000 | 0.3731 | 0.8432 |
|
115 |
-
| 0.4204 | 3.0824 | 6100 | 0.3783 | 0.8528 |
|
116 |
-
| 0.3832 | 3.1329 | 6200 | 0.3009 | 0.8693 |
|
117 |
-
| 0.32 | 3.1834 | 6300 | 0.3690 | 0.8367 |
|
118 |
-
| 0.3761 | 3.2340 | 6400 | 0.3398 | 0.8392 |
|
119 |
-
| 0.4041 | 3.2845 | 6500 | 0.2726 | 0.8761 |
|
120 |
-
| 0.3373 | 3.3350 | 6600 | 0.3735 | 0.8285 |
|
121 |
-
| 0.2869 | 3.3855 | 6700 | 0.2326 | 0.8987 |
|
122 |
-
| 0.3381 | 3.4361 | 6800 | 0.2562 | 0.8933 |
|
123 |
-
| 0.2193 | 3.4866 | 6900 | 0.2605 | 0.8912 |
|
124 |
-
| 0.2685 | 3.5371 | 7000 | 0.2592 | 0.8822 |
|
125 |
-
| 0.2867 | 3.5877 | 7100 | 0.3182 | 0.8636 |
|
126 |
-
| 0.318 | 3.6382 | 7200 | 0.2988 | 0.8743 |
|
127 |
-
| 0.3088 | 3.6887 | 7300 | 0.2870 | 0.8768 |
|
128 |
-
| 0.3531 | 3.7393 | 7400 | 0.2924 | 0.8697 |
|
129 |
-
| 0.2605 | 3.7898 | 7500 | 0.2942 | 0.8704 |
|
130 |
-
| 0.419 | 3.8403 | 7600 | 0.3634 | 0.8485 |
|
131 |
-
| 0.264 | 3.8909 | 7700 | 0.2996 | 0.8629 |
|
132 |
-
| 0.2349 | 3.9414 | 7800 | 0.2417 | 0.8937 |
|
133 |
-
| 0.2726 | 3.9919 | 7900 | 0.3228 | 0.8518 |
|
134 |
-
| 0.3398 | 4.0424 | 8000 | 0.2684 | 0.8897 |
|
135 |
-
| 0.1933 | 4.0930 | 8100 | 0.2657 | 0.8919 |
|
136 |
-
| 0.435 | 4.1435 | 8200 | 0.2455 | 0.8972 |
|
137 |
-
| 0.2373 | 4.1940 | 8300 | 0.2929 | 0.8690 |
|
138 |
-
| 0.3151 | 4.2446 | 8400 | 0.2745 | 0.8761 |
|
139 |
-
| 0.2258 | 4.2951 | 8500 | 0.2486 | 0.8922 |
|
140 |
-
| 0.2592 | 4.3456 | 8600 | 0.2696 | 0.8801 |
|
141 |
-
| 0.2301 | 4.3962 | 8700 | 0.2719 | 0.8811 |
|
142 |
-
| 0.1388 | 4.4467 | 8800 | 0.2617 | 0.8879 |
|
143 |
-
| 0.3242 | 4.4972 | 8900 | 0.2543 | 0.8915 |
|
144 |
-
| 0.1693 | 4.5478 | 9000 | 0.2602 | 0.8879 |
|
145 |
|
146 |
|
147 |
### Framework versions
|
148 |
|
149 |
- Transformers 4.44.2
|
150 |
- Pytorch 2.4.1+cu121
|
|
|
151 |
- Tokenizers 0.19.1
|
|
|
3 |
license: apache-2.0
|
4 |
base_model: google/vit-base-patch16-224-in21k
|
5 |
tags:
|
|
|
6 |
- generated_from_trainer
|
7 |
+
datasets:
|
8 |
+
- imagefolder
|
9 |
metrics:
|
10 |
- accuracy
|
11 |
model-index:
|
12 |
- name: finetuned-fake-food
|
13 |
+
results:
|
14 |
+
- task:
|
15 |
+
name: Image Classification
|
16 |
+
type: image-classification
|
17 |
+
dataset:
|
18 |
+
name: imagefolder
|
19 |
+
type: imagefolder
|
20 |
+
config: default
|
21 |
+
split: train
|
22 |
+
args: default
|
23 |
+
metrics:
|
24 |
+
- name: Accuracy
|
25 |
+
type: accuracy
|
26 |
+
value: 0.8234200743494424
|
27 |
---
|
28 |
|
29 |
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
|
|
31 |
|
32 |
# finetuned-fake-food
|
33 |
|
34 |
+
This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset.
|
35 |
It achieves the following results on the evaluation set:
|
36 |
+
- Loss: 0.3684
|
37 |
+
- Accuracy: 0.8234
|
38 |
|
39 |
## Model description
|
40 |
|
|
|
54 |
|
55 |
The following hyperparameters were used during training:
|
56 |
- learning_rate: 0.0002
|
57 |
+
- train_batch_size: 16
|
58 |
- eval_batch_size: 8
|
59 |
- seed: 42
|
60 |
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
61 |
- lr_scheduler_type: linear
|
62 |
+
- num_epochs: 4
|
63 |
- mixed_precision_training: Native AMP
|
64 |
|
65 |
### Training results
|
66 |
|
67 |
| Training Loss | Epoch | Step | Validation Loss | Accuracy |
|
68 |
|:-------------:|:------:|:----:|:---------------:|:--------:|
|
69 |
+
| 0.5282 | 0.5236 | 100 | 0.6420 | 0.6506 |
|
70 |
+
| 0.5825 | 1.0471 | 200 | 0.4441 | 0.7937 |
|
71 |
+
| 0.5148 | 1.5707 | 300 | 0.4870 | 0.7342 |
|
72 |
+
| 0.5144 | 2.0942 | 400 | 0.6652 | 0.6580 |
|
73 |
+
| 0.493 | 2.6178 | 500 | 0.4075 | 0.8141 |
|
74 |
+
| 0.4884 | 3.1414 | 600 | 0.4667 | 0.7732 |
|
75 |
+
| 0.3899 | 3.6649 | 700 | 0.3684 | 0.8234 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
76 |
|
77 |
|
78 |
### Framework versions
|
79 |
|
80 |
- Transformers 4.44.2
|
81 |
- Pytorch 2.4.1+cu121
|
82 |
+
- Datasets 3.0.1
|
83 |
- Tokenizers 0.19.1
|
config.json
CHANGED
@@ -8,17 +8,9 @@
|
|
8 |
"hidden_act": "gelu",
|
9 |
"hidden_dropout_prob": 0.0,
|
10 |
"hidden_size": 768,
|
11 |
-
"id2label": {
|
12 |
-
"0": 0,
|
13 |
-
"1": 1
|
14 |
-
},
|
15 |
"image_size": 224,
|
16 |
"initializer_range": 0.02,
|
17 |
"intermediate_size": 3072,
|
18 |
-
"label2id": {
|
19 |
-
"0": "0",
|
20 |
-
"1": "1"
|
21 |
-
},
|
22 |
"layer_norm_eps": 1e-12,
|
23 |
"model_type": "vit",
|
24 |
"num_attention_heads": 12,
|
|
|
8 |
"hidden_act": "gelu",
|
9 |
"hidden_dropout_prob": 0.0,
|
10 |
"hidden_size": 768,
|
|
|
|
|
|
|
|
|
11 |
"image_size": 224,
|
12 |
"initializer_range": 0.02,
|
13 |
"intermediate_size": 3072,
|
|
|
|
|
|
|
|
|
14 |
"layer_norm_eps": 1e-12,
|
15 |
"model_type": "vit",
|
16 |
"num_attention_heads": 12,
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 343223968
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2db04dca56c64c7c9b7d5cd25ca47c182f972392b6e7557f8ec072a0f5f98aac
|
3 |
size 343223968
|
runs/Oct02_13-17-31_a0cc4a8e576f/events.out.tfevents.1727875288.a0cc4a8e576f.1530.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1a717b8e3b7216f5e8dc18716df8cd65d129ad858da7b9dd1498381bee3502c2
|
3 |
+
size 8519
|
runs/Oct02_13-24-12_a0cc4a8e576f/events.out.tfevents.1727875462.a0cc4a8e576f.1530.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:911758c02544fa5f943a4043d074945e7f2113ea06d707d04f7a18e11913519b
|
3 |
+
size 21433
|
runs/Oct02_13-36-07_a0cc4a8e576f/events.out.tfevents.1727876177.a0cc4a8e576f.1530.2
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6c3cb9d398412e9b5bc522b27b516f5f2ac899d0ad2985736e312ef8ea885a18
|
3 |
+
size 23377
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 5176
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d8fb02a0a2cba10384d13aa30d74774be5892cf68173be206e8a561269e8d9a9
|
3 |
size 5176
|