Files changed (3) hide show
  1. config.json +2 -3
  2. generation_config.json +2 -1
  3. special_tokens_map.json +3 -15
config.json CHANGED
@@ -1,12 +1,11 @@
1
  {
2
- "_name_or_path": "meta-llama/Meta-Llama-3-70B-Instruct",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 128000,
9
- "eos_token_id": 128001,
10
  "hidden_act": "silu",
11
  "hidden_size": 8192,
12
  "initializer_range": 0.02,
@@ -23,6 +22,6 @@
23
  "tie_word_embeddings": false,
24
  "torch_dtype": "bfloat16",
25
  "transformers_version": "4.40.0",
26
- "use_cache": false,
27
  "vocab_size": 128256
28
  }
 
1
  {
 
2
  "architectures": [
3
  "LlamaForCausalLM"
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 128000,
8
+ "eos_token_id": 128009,
9
  "hidden_act": "silu",
10
  "hidden_size": 8192,
11
  "initializer_range": 0.02,
 
22
  "tie_word_embeddings": false,
23
  "torch_dtype": "bfloat16",
24
  "transformers_version": "4.40.0",
25
+ "use_cache": true,
26
  "vocab_size": 128256
27
  }
generation_config.json CHANGED
@@ -1,6 +1,7 @@
1
  {
2
  "_from_model_config": true,
3
  "bos_token_id": 128000,
4
- "eos_token_id": 128001,
 
5
  "transformers_version": "4.40.0"
6
  }
 
1
  {
2
  "_from_model_config": true,
3
  "bos_token_id": 128000,
4
+ "eos_token_id": 128009,
5
+ "pad_token_id": 128009,
6
  "transformers_version": "4.40.0"
7
  }
special_tokens_map.json CHANGED
@@ -1,17 +1,5 @@
1
  {
2
- "bos_token": {
3
- "content": "<|begin_of_text|>",
4
- "lstrip": false,
5
- "normalized": false,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "<|eot_id|>",
11
- "lstrip": false,
12
- "normalized": false,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
  "pad_token": "<|eot_id|>"
17
- }
 
1
  {
2
+ "bos_token": "<|begin_of_text|>",
3
+ "eos_token": "<|eot_id|>",
 
 
 
 
 
 
 
 
 
 
 
 
4
  "pad_token": "<|eot_id|>"
5
+ }