dennisjooo commited on
Commit
f717422
1 Parent(s): dd4b127

End of training

Browse files
Files changed (2) hide show
  1. README.md +66 -93
  2. pytorch_model.bin +1 -1
README.md CHANGED
@@ -24,13 +24,13 @@ model-index:
24
  metrics:
25
  - name: Accuracy
26
  type: accuracy
27
- value: 0.63125
28
  - name: Precision
29
  type: precision
30
- value: 0.6580684399341683
31
  - name: F1
32
  type: f1
33
- value: 0.6375321878900636
34
  ---
35
 
36
  <!-- This model card has been generated automatically according to the information the Trainer had access to. You
@@ -40,10 +40,10 @@ should probably proofread and complete it, then remove this comment. -->
40
 
41
  This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset.
42
  It achieves the following results on the evaluation set:
43
- - Loss: 1.1145
44
- - Accuracy: 0.6312
45
- - Precision: 0.6581
46
- - F1: 0.6375
47
 
48
  ## Model description
49
 
@@ -75,92 +75,65 @@ The following hyperparameters were used during training:
75
 
76
  | Training Loss | Epoch | Step | Validation Loss | Accuracy | Precision | F1 |
77
  |:-------------:|:-----:|:----:|:---------------:|:--------:|:---------:|:------:|
78
- | 2.0848 | 1.0 | 10 | 2.0806 | 0.1625 | 0.1527 | 0.1483 |
79
- | 2.0824 | 2.0 | 20 | 2.0784 | 0.1688 | 0.1556 | 0.1538 |
80
- | 2.0785 | 3.0 | 30 | 2.0748 | 0.175 | 0.1612 | 0.1606 |
81
- | 2.0709 | 4.0 | 40 | 2.0698 | 0.1812 | 0.1684 | 0.1661 |
82
- | 2.067 | 5.0 | 50 | 2.0635 | 0.1812 | 0.1787 | 0.1697 |
83
- | 2.0554 | 6.0 | 60 | 2.0553 | 0.2 | 0.1958 | 0.1893 |
84
- | 2.0461 | 7.0 | 70 | 2.0438 | 0.2313 | 0.2434 | 0.2272 |
85
- | 2.0263 | 8.0 | 80 | 2.0260 | 0.2437 | 0.2763 | 0.2472 |
86
- | 1.9963 | 9.0 | 90 | 1.9959 | 0.275 | 0.3073 | 0.2780 |
87
- | 1.9512 | 10.0 | 100 | 1.9435 | 0.3312 | 0.3481 | 0.3307 |
88
- | 1.8885 | 11.0 | 110 | 1.8610 | 0.4313 | 0.4426 | 0.4138 |
89
- | 1.7908 | 12.0 | 120 | 1.7604 | 0.4688 | 0.4485 | 0.4243 |
90
- | 1.6944 | 13.0 | 130 | 1.6677 | 0.4813 | 0.4369 | 0.4349 |
91
- | 1.6245 | 14.0 | 140 | 1.6105 | 0.4625 | 0.4071 | 0.4124 |
92
- | 1.5745 | 15.0 | 150 | 1.5671 | 0.5062 | 0.4551 | 0.4690 |
93
- | 1.5132 | 16.0 | 160 | 1.5169 | 0.4688 | 0.4481 | 0.4201 |
94
- | 1.471 | 17.0 | 170 | 1.4772 | 0.4813 | 0.4203 | 0.4404 |
95
- | 1.4272 | 18.0 | 180 | 1.4426 | 0.4938 | 0.4453 | 0.4496 |
96
- | 1.3896 | 19.0 | 190 | 1.4153 | 0.4813 | 0.4409 | 0.4370 |
97
- | 1.3347 | 20.0 | 200 | 1.3976 | 0.5062 | 0.4694 | 0.4662 |
98
- | 1.3145 | 21.0 | 210 | 1.3840 | 0.4813 | 0.4459 | 0.4366 |
99
- | 1.3319 | 22.0 | 220 | 1.3511 | 0.5062 | 0.4867 | 0.4655 |
100
- | 1.2438 | 23.0 | 230 | 1.3186 | 0.5312 | 0.5804 | 0.4945 |
101
- | 1.2202 | 24.0 | 240 | 1.3012 | 0.5375 | 0.5342 | 0.5023 |
102
- | 1.1838 | 25.0 | 250 | 1.2879 | 0.5563 | 0.6162 | 0.5295 |
103
- | 1.1448 | 26.0 | 260 | 1.2534 | 0.5687 | 0.5631 | 0.5456 |
104
- | 1.113 | 27.0 | 270 | 1.2398 | 0.55 | 0.5645 | 0.5359 |
105
- | 1.0862 | 28.0 | 280 | 1.2357 | 0.5437 | 0.6075 | 0.5143 |
106
- | 1.0837 | 29.0 | 290 | 1.2095 | 0.5687 | 0.5653 | 0.5471 |
107
- | 1.0609 | 30.0 | 300 | 1.2095 | 0.5437 | 0.5729 | 0.5393 |
108
- | 1.0112 | 31.0 | 310 | 1.1859 | 0.575 | 0.5989 | 0.5490 |
109
- | 0.9584 | 32.0 | 320 | 1.1683 | 0.5875 | 0.6019 | 0.5777 |
110
- | 0.941 | 33.0 | 330 | 1.1649 | 0.5938 | 0.6083 | 0.5875 |
111
- | 0.904 | 34.0 | 340 | 1.1896 | 0.5875 | 0.6078 | 0.5720 |
112
- | 0.921 | 35.0 | 350 | 1.1662 | 0.6062 | 0.6352 | 0.5975 |
113
- | 0.9026 | 36.0 | 360 | 1.1441 | 0.5875 | 0.5981 | 0.5841 |
114
- | 0.8217 | 37.0 | 370 | 1.1602 | 0.5813 | 0.6098 | 0.5779 |
115
- | 0.8292 | 38.0 | 380 | 1.2140 | 0.5437 | 0.5588 | 0.5258 |
116
- | 0.8017 | 39.0 | 390 | 1.1545 | 0.5563 | 0.5459 | 0.5294 |
117
- | 0.7787 | 40.0 | 400 | 1.1358 | 0.6062 | 0.6300 | 0.5948 |
118
- | 0.7473 | 41.0 | 410 | 1.1285 | 0.5813 | 0.5996 | 0.5779 |
119
- | 0.6941 | 42.0 | 420 | 1.1311 | 0.575 | 0.5982 | 0.5757 |
120
- | 0.7009 | 43.0 | 430 | 1.1296 | 0.6125 | 0.6371 | 0.6076 |
121
- | 0.6537 | 44.0 | 440 | 1.0996 | 0.5813 | 0.5866 | 0.5684 |
122
- | 0.6524 | 45.0 | 450 | 1.1477 | 0.5875 | 0.6077 | 0.5813 |
123
- | 0.674 | 46.0 | 460 | 1.1063 | 0.6188 | 0.6322 | 0.6127 |
124
- | 0.5999 | 47.0 | 470 | 1.1077 | 0.6 | 0.6035 | 0.5951 |
125
- | 0.6194 | 48.0 | 480 | 1.1249 | 0.5813 | 0.5936 | 0.5805 |
126
- | 0.595 | 49.0 | 490 | 1.1331 | 0.6 | 0.5955 | 0.5876 |
127
- | 0.5403 | 50.0 | 500 | 1.1577 | 0.5875 | 0.6010 | 0.5781 |
128
- | 0.5932 | 51.0 | 510 | 1.1352 | 0.5938 | 0.6214 | 0.5851 |
129
- | 0.621 | 52.0 | 520 | 1.0893 | 0.6062 | 0.6044 | 0.6007 |
130
- | 0.5157 | 53.0 | 530 | 1.1382 | 0.6125 | 0.6173 | 0.6075 |
131
- | 0.5318 | 54.0 | 540 | 1.1402 | 0.6 | 0.6158 | 0.5970 |
132
- | 0.4757 | 55.0 | 550 | 1.1668 | 0.5938 | 0.6096 | 0.5930 |
133
- | 0.4826 | 56.0 | 560 | 1.1506 | 0.6062 | 0.6367 | 0.6051 |
134
- | 0.5058 | 57.0 | 570 | 1.1857 | 0.5875 | 0.5873 | 0.5767 |
135
- | 0.4791 | 58.0 | 580 | 1.1618 | 0.5813 | 0.5670 | 0.5587 |
136
- | 0.4322 | 59.0 | 590 | 1.2007 | 0.5625 | 0.5628 | 0.5532 |
137
- | 0.442 | 60.0 | 600 | 1.1862 | 0.5875 | 0.5681 | 0.5560 |
138
- | 0.431 | 61.0 | 610 | 1.1145 | 0.6312 | 0.6581 | 0.6375 |
139
- | 0.4131 | 62.0 | 620 | 1.2081 | 0.575 | 0.5912 | 0.5705 |
140
- | 0.3911 | 63.0 | 630 | 1.1380 | 0.6062 | 0.6043 | 0.5988 |
141
- | 0.4281 | 64.0 | 640 | 1.1189 | 0.6188 | 0.6157 | 0.6138 |
142
- | 0.385 | 65.0 | 650 | 1.2177 | 0.5625 | 0.5888 | 0.5615 |
143
- | 0.398 | 66.0 | 660 | 1.2204 | 0.6 | 0.6321 | 0.6008 |
144
- | 0.4821 | 67.0 | 670 | 1.2037 | 0.5938 | 0.6065 | 0.5804 |
145
- | 0.4127 | 68.0 | 680 | 1.1473 | 0.6 | 0.6193 | 0.5996 |
146
- | 0.4062 | 69.0 | 690 | 1.2160 | 0.5938 | 0.5950 | 0.5806 |
147
- | 0.3906 | 70.0 | 700 | 1.1763 | 0.5938 | 0.6421 | 0.6034 |
148
- | 0.352 | 71.0 | 710 | 1.2355 | 0.5687 | 0.5836 | 0.5613 |
149
- | 0.3801 | 72.0 | 720 | 1.1623 | 0.5813 | 0.5800 | 0.5789 |
150
- | 0.333 | 73.0 | 730 | 1.1770 | 0.5875 | 0.5920 | 0.5851 |
151
- | 0.3562 | 74.0 | 740 | 1.2140 | 0.5875 | 0.6367 | 0.5917 |
152
- | 0.3403 | 75.0 | 750 | 1.1679 | 0.6 | 0.6209 | 0.6044 |
153
- | 0.3456 | 76.0 | 760 | 1.2496 | 0.5625 | 0.5465 | 0.5409 |
154
- | 0.3331 | 77.0 | 770 | 1.1975 | 0.575 | 0.6042 | 0.5759 |
155
- | 0.3408 | 78.0 | 780 | 1.2381 | 0.575 | 0.5606 | 0.5565 |
156
- | 0.2964 | 79.0 | 790 | 1.1792 | 0.6 | 0.6204 | 0.6009 |
157
- | 0.2833 | 80.0 | 800 | 1.1840 | 0.6 | 0.6059 | 0.5933 |
158
- | 0.2875 | 81.0 | 810 | 1.2024 | 0.5875 | 0.5920 | 0.5841 |
159
- | 0.327 | 82.0 | 820 | 1.2190 | 0.5813 | 0.5799 | 0.5728 |
160
- | 0.3027 | 83.0 | 830 | 1.2520 | 0.5813 | 0.5682 | 0.5704 |
161
- | 0.2731 | 84.0 | 840 | 1.2167 | 0.5875 | 0.6021 | 0.5847 |
162
- | 0.2821 | 85.0 | 850 | 1.2805 | 0.575 | 0.5659 | 0.5527 |
163
- | 0.3192 | 86.0 | 860 | 1.2453 | 0.5625 | 0.5585 | 0.5575 |
164
 
165
 
166
  ### Framework versions
 
24
  metrics:
25
  - name: Accuracy
26
  type: accuracy
27
+ value: 0.64375
28
  - name: Precision
29
  type: precision
30
+ value: 0.650616883116883
31
  - name: F1
32
  type: f1
33
+ value: 0.6344950707077283
34
  ---
35
 
36
  <!-- This model card has been generated automatically according to the information the Trainer had access to. You
 
40
 
41
  This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset.
42
  It achieves the following results on the evaluation set:
43
+ - Loss: 1.1553
44
+ - Accuracy: 0.6438
45
+ - Precision: 0.6506
46
+ - F1: 0.6345
47
 
48
  ## Model description
49
 
 
75
 
76
  | Training Loss | Epoch | Step | Validation Loss | Accuracy | Precision | F1 |
77
  |:-------------:|:-----:|:----:|:---------------:|:--------:|:---------:|:------:|
78
+ | 2.0799 | 1.0 | 10 | 2.0707 | 0.1313 | 0.1740 | 0.1156 |
79
+ | 2.0811 | 2.0 | 20 | 2.0681 | 0.1437 | 0.1617 | 0.1245 |
80
+ | 2.0709 | 3.0 | 30 | 2.0640 | 0.1562 | 0.1544 | 0.1330 |
81
+ | 2.0701 | 4.0 | 40 | 2.0590 | 0.1688 | 0.1463 | 0.1431 |
82
+ | 2.0639 | 5.0 | 50 | 2.0529 | 0.1812 | 0.1676 | 0.1613 |
83
+ | 2.0499 | 6.0 | 60 | 2.0439 | 0.2 | 0.2050 | 0.1871 |
84
+ | 2.0387 | 7.0 | 70 | 2.0322 | 0.25 | 0.2679 | 0.2373 |
85
+ | 2.0235 | 8.0 | 80 | 2.0141 | 0.3312 | 0.3638 | 0.3331 |
86
+ | 1.9933 | 9.0 | 90 | 1.9883 | 0.3375 | 0.3752 | 0.3392 |
87
+ | 1.9573 | 10.0 | 100 | 1.9473 | 0.3563 | 0.3940 | 0.3535 |
88
+ | 1.912 | 11.0 | 110 | 1.8863 | 0.3875 | 0.4352 | 0.3759 |
89
+ | 1.8306 | 12.0 | 120 | 1.8102 | 0.3875 | 0.4062 | 0.3586 |
90
+ | 1.7479 | 13.0 | 130 | 1.7158 | 0.4062 | 0.4056 | 0.3689 |
91
+ | 1.665 | 14.0 | 140 | 1.6250 | 0.475 | 0.4543 | 0.4248 |
92
+ | 1.6115 | 15.0 | 150 | 1.5597 | 0.4875 | 0.4646 | 0.4414 |
93
+ | 1.5716 | 16.0 | 160 | 1.5112 | 0.5125 | 0.4846 | 0.4575 |
94
+ | 1.5062 | 17.0 | 170 | 1.4672 | 0.525 | 0.4932 | 0.4925 |
95
+ | 1.4655 | 18.0 | 180 | 1.4262 | 0.5312 | 0.5018 | 0.4876 |
96
+ | 1.413 | 19.0 | 190 | 1.3851 | 0.575 | 0.5253 | 0.5317 |
97
+ | 1.3758 | 20.0 | 200 | 1.3421 | 0.5625 | 0.5900 | 0.5113 |
98
+ | 1.317 | 21.0 | 210 | 1.3156 | 0.55 | 0.5835 | 0.4996 |
99
+ | 1.291 | 22.0 | 220 | 1.2712 | 0.5938 | 0.6374 | 0.5601 |
100
+ | 1.2369 | 23.0 | 230 | 1.2697 | 0.5563 | 0.5681 | 0.5250 |
101
+ | 1.2139 | 24.0 | 240 | 1.2439 | 0.5625 | 0.5733 | 0.5417 |
102
+ | 1.1766 | 25.0 | 250 | 1.2228 | 0.5938 | 0.6099 | 0.5735 |
103
+ | 1.1483 | 26.0 | 260 | 1.2464 | 0.5625 | 0.6016 | 0.5508 |
104
+ | 1.1344 | 27.0 | 270 | 1.1877 | 0.5875 | 0.6142 | 0.5718 |
105
+ | 1.0898 | 28.0 | 280 | 1.1871 | 0.6 | 0.6481 | 0.5817 |
106
+ | 1.0515 | 29.0 | 290 | 1.1553 | 0.6438 | 0.6506 | 0.6345 |
107
+ | 1.0628 | 30.0 | 300 | 1.1603 | 0.575 | 0.6209 | 0.5727 |
108
+ | 1.0257 | 31.0 | 310 | 1.1326 | 0.6125 | 0.6312 | 0.6109 |
109
+ | 1.0048 | 32.0 | 320 | 1.1450 | 0.6125 | 0.6402 | 0.6079 |
110
+ | 0.9646 | 33.0 | 330 | 1.1250 | 0.6062 | 0.6161 | 0.6004 |
111
+ | 0.9231 | 34.0 | 340 | 1.1299 | 0.6 | 0.6183 | 0.5976 |
112
+ | 0.8944 | 35.0 | 350 | 1.1312 | 0.5938 | 0.5996 | 0.5885 |
113
+ | 0.9001 | 36.0 | 360 | 1.1293 | 0.625 | 0.6358 | 0.6220 |
114
+ | 0.8587 | 37.0 | 370 | 1.1415 | 0.6062 | 0.6122 | 0.6037 |
115
+ | 0.8708 | 38.0 | 380 | 1.1171 | 0.6062 | 0.6379 | 0.5985 |
116
+ | 0.843 | 39.0 | 390 | 1.1220 | 0.625 | 0.6658 | 0.6229 |
117
+ | 0.8038 | 40.0 | 400 | 1.1144 | 0.6188 | 0.6243 | 0.6153 |
118
+ | 0.7815 | 41.0 | 410 | 1.1538 | 0.575 | 0.6042 | 0.5679 |
119
+ | 0.7289 | 42.0 | 420 | 1.1125 | 0.6062 | 0.6218 | 0.6024 |
120
+ | 0.7255 | 43.0 | 430 | 1.1401 | 0.6 | 0.6307 | 0.5947 |
121
+ | 0.7182 | 44.0 | 440 | 1.1092 | 0.6 | 0.6121 | 0.5916 |
122
+ | 0.6533 | 45.0 | 450 | 1.1219 | 0.625 | 0.6448 | 0.6268 |
123
+ | 0.6658 | 46.0 | 460 | 1.1322 | 0.6125 | 0.6272 | 0.6135 |
124
+ | 0.6293 | 47.0 | 470 | 1.1306 | 0.6 | 0.6075 | 0.5980 |
125
+ | 0.6287 | 48.0 | 480 | 1.1227 | 0.6125 | 0.6210 | 0.6099 |
126
+ | 0.622 | 49.0 | 490 | 1.1441 | 0.5938 | 0.6154 | 0.5940 |
127
+ | 0.6004 | 50.0 | 500 | 1.1119 | 0.625 | 0.6267 | 0.6206 |
128
+ | 0.606 | 51.0 | 510 | 1.1301 | 0.5938 | 0.6146 | 0.5925 |
129
+ | 0.5924 | 52.0 | 520 | 1.1552 | 0.6062 | 0.6135 | 0.6022 |
130
+ | 0.5639 | 53.0 | 530 | 1.1956 | 0.5938 | 0.6411 | 0.5945 |
131
+ | 0.5434 | 54.0 | 540 | 1.1843 | 0.5813 | 0.5925 | 0.5765 |
132
+ | 0.5479 | 55.0 | 550 | 1.1529 | 0.6125 | 0.6247 | 0.6142 |
133
+ | 0.5227 | 56.0 | 560 | 1.1730 | 0.5687 | 0.5724 | 0.5628 |
134
+ | 0.5402 | 57.0 | 570 | 1.1919 | 0.6 | 0.6075 | 0.5954 |
135
+ | 0.4971 | 58.0 | 580 | 1.1761 | 0.5938 | 0.5984 | 0.5925 |
136
+ | 0.5004 | 59.0 | 590 | 1.2305 | 0.5687 | 0.5957 | 0.5645 |
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
137
 
138
 
139
  ### Framework versions
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:df7b637f8087149bbbe14460436a84101d6f3de54f3c09fa93ade383dfcb1007
3
  size 343287149
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b734375980f6b5602b5502c50039640ff1f422bb8633da94bbb27e7cdb8643c1
3
  size 343287149