Toflamus commited on
Commit
43a3be4
1 Parent(s): c0af7a5

Toflamus/GPT-2_para3M

Browse files
README.md CHANGED
@@ -15,7 +15,7 @@ should probably proofread and complete it, then remove this comment. -->
15
 
16
  This model is a fine-tuned version of [gpt2](https://huggingface.co/gpt2) on an unknown dataset.
17
  It achieves the following results on the evaluation set:
18
- - Loss: 2.3235
19
 
20
  ## Model description
21
 
@@ -42,20 +42,124 @@ The following hyperparameters were used during training:
42
  - total_train_batch_size: 256
43
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
44
  - lr_scheduler_type: cosine
45
- - lr_scheduler_warmup_steps: 1000
46
  - num_epochs: 1
47
 
48
  ### Training results
49
 
50
  | Training Loss | Epoch | Step | Validation Loss |
51
  |:-------------:|:-----:|:-----:|:---------------:|
52
- | 3.6451 | 0.47 | 5000 | 2.4368 |
53
- | 2.5113 | 0.94 | 10000 | 2.3235 |
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
54
 
55
 
56
  ### Framework versions
57
 
58
  - Transformers 4.32.0
59
- - Pytorch 2.0.1+cu118
60
  - Datasets 2.14.4
61
- - Tokenizers 0.13.3
 
15
 
16
  This model is a fine-tuned version of [gpt2](https://huggingface.co/gpt2) on an unknown dataset.
17
  It achieves the following results on the evaluation set:
18
+ - Loss: 2.3207
19
 
20
  ## Model description
21
 
 
42
  - total_train_batch_size: 256
43
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
44
  - lr_scheduler_type: cosine
45
+ - lr_scheduler_warmup_steps: 100
46
  - num_epochs: 1
47
 
48
  ### Training results
49
 
50
  | Training Loss | Epoch | Step | Validation Loss |
51
  |:-------------:|:-----:|:-----:|:---------------:|
52
+ | 9.6976 | 0.01 | 100 | 7.7754 |
53
+ | 6.488 | 0.02 | 200 | 5.7795 |
54
+ | 5.3705 | 0.03 | 300 | 4.8609 |
55
+ | 4.5632 | 0.04 | 400 | 4.2544 |
56
+ | 4.141 | 0.05 | 500 | 3.9425 |
57
+ | 3.902 | 0.06 | 600 | 3.7189 |
58
+ | 3.7074 | 0.07 | 700 | 3.5514 |
59
+ | 3.5716 | 0.08 | 800 | 3.4291 |
60
+ | 3.4695 | 0.08 | 900 | 3.3253 |
61
+ | 3.3847 | 0.09 | 1000 | 3.2311 |
62
+ | 3.2974 | 0.1 | 1100 | 3.1595 |
63
+ | 3.2318 | 0.11 | 1200 | 3.0909 |
64
+ | 3.1698 | 0.12 | 1300 | 3.0329 |
65
+ | 3.1258 | 0.13 | 1400 | 2.9879 |
66
+ | 3.0802 | 0.14 | 1500 | 2.9396 |
67
+ | 3.046 | 0.15 | 1600 | 2.9017 |
68
+ | 3.0047 | 0.16 | 1700 | 2.8652 |
69
+ | 2.9701 | 0.17 | 1800 | 2.8320 |
70
+ | 2.9425 | 0.18 | 1900 | 2.8048 |
71
+ | 2.9141 | 0.19 | 2000 | 2.7757 |
72
+ | 2.8896 | 0.2 | 2100 | 2.7515 |
73
+ | 2.8667 | 0.21 | 2200 | 2.7263 |
74
+ | 2.8443 | 0.22 | 2300 | 2.7066 |
75
+ | 2.8288 | 0.23 | 2400 | 2.6815 |
76
+ | 2.8044 | 0.24 | 2500 | 2.6620 |
77
+ | 2.7886 | 0.25 | 2600 | 2.6471 |
78
+ | 2.7732 | 0.25 | 2700 | 2.6283 |
79
+ | 2.7576 | 0.26 | 2800 | 2.6101 |
80
+ | 2.7479 | 0.27 | 2900 | 2.5978 |
81
+ | 2.7256 | 0.28 | 3000 | 2.5819 |
82
+ | 2.7179 | 0.29 | 3100 | 2.5688 |
83
+ | 2.707 | 0.3 | 3200 | 2.5595 |
84
+ | 2.6921 | 0.31 | 3300 | 2.5471 |
85
+ | 2.6809 | 0.32 | 3400 | 2.5329 |
86
+ | 2.6779 | 0.33 | 3500 | 2.5232 |
87
+ | 2.663 | 0.34 | 3600 | 2.5154 |
88
+ | 2.6554 | 0.35 | 3700 | 2.5030 |
89
+ | 2.6437 | 0.36 | 3800 | 2.4967 |
90
+ | 2.6346 | 0.37 | 3900 | 2.4859 |
91
+ | 2.6293 | 0.38 | 4000 | 2.4768 |
92
+ | 2.6221 | 0.39 | 4100 | 2.4709 |
93
+ | 2.6178 | 0.4 | 4200 | 2.4623 |
94
+ | 2.6076 | 0.41 | 4300 | 2.4586 |
95
+ | 2.6025 | 0.41 | 4400 | 2.4492 |
96
+ | 2.5907 | 0.42 | 4500 | 2.4409 |
97
+ | 2.5896 | 0.43 | 4600 | 2.4369 |
98
+ | 2.5816 | 0.44 | 4700 | 2.4316 |
99
+ | 2.5783 | 0.45 | 4800 | 2.4256 |
100
+ | 2.577 | 0.46 | 4900 | 2.4204 |
101
+ | 2.5685 | 0.47 | 5000 | 2.4150 |
102
+ | 2.567 | 0.48 | 5100 | 2.4093 |
103
+ | 2.5564 | 0.49 | 5200 | 2.4059 |
104
+ | 2.5556 | 0.5 | 5300 | 2.4012 |
105
+ | 2.5496 | 0.51 | 5400 | 2.3997 |
106
+ | 2.545 | 0.52 | 5500 | 2.3956 |
107
+ | 2.5473 | 0.53 | 5600 | 2.3905 |
108
+ | 2.5389 | 0.54 | 5700 | 2.3856 |
109
+ | 2.5373 | 0.55 | 5800 | 2.3818 |
110
+ | 2.5318 | 0.56 | 5900 | 2.3787 |
111
+ | 2.5313 | 0.57 | 6000 | 2.3751 |
112
+ | 2.5285 | 0.58 | 6100 | 2.3722 |
113
+ | 2.5318 | 0.58 | 6200 | 2.3687 |
114
+ | 2.5229 | 0.59 | 6300 | 2.3666 |
115
+ | 2.5194 | 0.6 | 6400 | 2.3632 |
116
+ | 2.5174 | 0.61 | 6500 | 2.3598 |
117
+ | 2.5169 | 0.62 | 6600 | 2.3567 |
118
+ | 2.511 | 0.63 | 6700 | 2.3552 |
119
+ | 2.5093 | 0.64 | 6800 | 2.3546 |
120
+ | 2.5114 | 0.65 | 6900 | 2.3528 |
121
+ | 2.5064 | 0.66 | 7000 | 2.3492 |
122
+ | 2.507 | 0.67 | 7100 | 2.3483 |
123
+ | 2.502 | 0.68 | 7200 | 2.3445 |
124
+ | 2.4964 | 0.69 | 7300 | 2.3448 |
125
+ | 2.4999 | 0.7 | 7400 | 2.3423 |
126
+ | 2.4961 | 0.71 | 7500 | 2.3407 |
127
+ | 2.489 | 0.72 | 7600 | 2.3386 |
128
+ | 2.4926 | 0.73 | 7700 | 2.3384 |
129
+ | 2.4919 | 0.74 | 7800 | 2.3365 |
130
+ | 2.491 | 0.74 | 7900 | 2.3349 |
131
+ | 2.4893 | 0.75 | 8000 | 2.3333 |
132
+ | 2.4909 | 0.76 | 8100 | 2.3318 |
133
+ | 2.4862 | 0.77 | 8200 | 2.3305 |
134
+ | 2.4884 | 0.78 | 8300 | 2.3299 |
135
+ | 2.49 | 0.79 | 8400 | 2.3280 |
136
+ | 2.4788 | 0.8 | 8500 | 2.3286 |
137
+ | 2.4865 | 0.81 | 8600 | 2.3272 |
138
+ | 2.4823 | 0.82 | 8700 | 2.3263 |
139
+ | 2.4844 | 0.83 | 8800 | 2.3255 |
140
+ | 2.4826 | 0.84 | 8900 | 2.3251 |
141
+ | 2.4844 | 0.85 | 9000 | 2.3243 |
142
+ | 2.4798 | 0.86 | 9100 | 2.3231 |
143
+ | 2.4864 | 0.87 | 9200 | 2.3231 |
144
+ | 2.4755 | 0.88 | 9300 | 2.3228 |
145
+ | 2.4735 | 0.89 | 9400 | 2.3228 |
146
+ | 2.4786 | 0.9 | 9500 | 2.3224 |
147
+ | 2.4791 | 0.91 | 9600 | 2.3222 |
148
+ | 2.4809 | 0.91 | 9700 | 2.3214 |
149
+ | 2.4778 | 0.92 | 9800 | 2.3213 |
150
+ | 2.4777 | 0.93 | 9900 | 2.3211 |
151
+ | 2.4798 | 0.94 | 10000 | 2.3209 |
152
+ | 2.4768 | 0.95 | 10100 | 2.3212 |
153
+ | 2.4808 | 0.96 | 10200 | 2.3209 |
154
+ | 2.4762 | 0.97 | 10300 | 2.3208 |
155
+ | 2.4778 | 0.98 | 10400 | 2.3208 |
156
+ | 2.4816 | 0.99 | 10500 | 2.3207 |
157
+ | 2.4728 | 1.0 | 10600 | 2.3207 |
158
 
159
 
160
  ### Framework versions
161
 
162
  - Transformers 4.32.0
163
+ - Pytorch 2.0.1+cu117
164
  - Datasets 2.14.4
165
+ - Tokenizers 0.13.2
merges.txt CHANGED
@@ -1,4 +1,4 @@
1
- #version: 0.2
2
  Ġ t
3
  Ġ a
4
  h e
 
1
+ #version: 0.2 - Trained by `huggingface/tokenizers`
2
  Ġ t
3
  Ġ a
4
  h e
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5fc3af29f0c6a6dc1b13b45295b1a11880de907d05abee6d6528717096e3d45d
3
  size 14352341
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:14fff0e75573c30941f285deaa9e55953fbe83007725d41a1aee6a2b96c9a7fb
3
  size 14352341
special_tokens_map.json CHANGED
@@ -1,24 +1,6 @@
1
  {
2
- "bos_token": {
3
- "content": "<|endoftext|>",
4
- "lstrip": false,
5
- "normalized": true,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "<|endoftext|>",
11
- "lstrip": false,
12
- "normalized": true,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
  "pad_token": "<|endoftext|>",
17
- "unk_token": {
18
- "content": "<|endoftext|>",
19
- "lstrip": false,
20
- "normalized": true,
21
- "rstrip": false,
22
- "single_word": false
23
- }
24
  }
 
1
  {
2
+ "bos_token": "<|endoftext|>",
3
+ "eos_token": "<|endoftext|>",
 
 
 
 
 
 
 
 
 
 
 
 
4
  "pad_token": "<|endoftext|>",
5
+ "unk_token": "<|endoftext|>"
 
 
 
 
 
 
6
  }
tokenizer.json CHANGED
@@ -9,7 +9,7 @@
9
  "single_word": false,
10
  "lstrip": false,
11
  "rstrip": false,
12
- "normalized": true,
13
  "special": true
14
  }
15
  ],
@@ -39,7 +39,6 @@
39
  "continuing_subword_prefix": "",
40
  "end_of_word_suffix": "",
41
  "fuse_unk": false,
42
- "byte_fallback": false,
43
  "vocab": {
44
  "!": 0,
45
  "\"": 1,
 
9
  "single_word": false,
10
  "lstrip": false,
11
  "rstrip": false,
12
+ "normalized": false,
13
  "special": true
14
  }
15
  ],
 
39
  "continuing_subword_prefix": "",
40
  "end_of_word_suffix": "",
41
  "fuse_unk": false,
 
42
  "vocab": {
43
  "!": 0,
44
  "\"": 1,
tokenizer_config.json CHANGED
@@ -1,33 +1,9 @@
1
  {
2
- "add_bos_token": false,
3
  "add_prefix_space": false,
4
- "bos_token": {
5
- "__type": "AddedToken",
6
- "content": "<|endoftext|>",
7
- "lstrip": false,
8
- "normalized": true,
9
- "rstrip": false,
10
- "single_word": false
11
- },
12
  "clean_up_tokenization_spaces": true,
13
- "eos_token": {
14
- "__type": "AddedToken",
15
- "content": "<|endoftext|>",
16
- "lstrip": false,
17
- "normalized": true,
18
- "rstrip": false,
19
- "single_word": false
20
- },
21
- "errors": "replace",
22
- "model_max_length": 2048,
23
- "pad_token": null,
24
  "tokenizer_class": "GPT2Tokenizer",
25
- "unk_token": {
26
- "__type": "AddedToken",
27
- "content": "<|endoftext|>",
28
- "lstrip": false,
29
- "normalized": true,
30
- "rstrip": false,
31
- "single_word": false
32
- }
33
  }
 
1
  {
 
2
  "add_prefix_space": false,
3
+ "bos_token": "<|endoftext|>",
 
 
 
 
 
 
 
4
  "clean_up_tokenization_spaces": true,
5
+ "eos_token": "<|endoftext|>",
6
+ "model_max_length": 1024,
 
 
 
 
 
 
 
 
 
7
  "tokenizer_class": "GPT2Tokenizer",
8
+ "unk_token": "<|endoftext|>"
 
 
 
 
 
 
 
9
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:835cfdd3a5058f6f5f6b5461aea15a84c02c1ac239e2e56b9b6469d0d491d142
3
  size 4027
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1c363c8942fdca6aa050728393ab72a9533fe8c19f2604ac1b1f8330fce72295
3
  size 4027