alex-chiu commited on
Commit
2676935
1 Parent(s): 83ab962

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +14 -2
config.json CHANGED
@@ -18,16 +18,28 @@
18
  "num_hidden_layers": 62,
19
  "num_key_value_heads": 8,
20
  "pretraining_tp": 1,
 
 
 
 
 
 
 
 
 
 
 
 
 
21
  "rms_norm_eps": 1e-06,
22
  "rope_scaling": {
23
  "factor": 4.0,
24
- "rope_type": "linear",
25
  "type": "linear"
26
  },
27
  "rope_theta": 100000,
28
  "tie_word_embeddings": false,
29
  "torch_dtype": "float16",
30
- "transformers_version": "4.44.2",
31
  "use_cache": true,
32
  "vocab_size": 32256
33
  }
 
18
  "num_hidden_layers": 62,
19
  "num_key_value_heads": 8,
20
  "pretraining_tp": 1,
21
+ "quantization_config": {
22
+ "bits": 4,
23
+ "checkpoint_format": "gptq",
24
+ "damp_percent": 0.01,
25
+ "desc_act": false,
26
+ "group_size": 128,
27
+ "model_file_base_name": null,
28
+ "model_name_or_path": null,
29
+ "quant_method": "gptq",
30
+ "static_groups": false,
31
+ "sym": true,
32
+ "true_sequential": true
33
+ },
34
  "rms_norm_eps": 1e-06,
35
  "rope_scaling": {
36
  "factor": 4.0,
 
37
  "type": "linear"
38
  },
39
  "rope_theta": 100000,
40
  "tie_word_embeddings": false,
41
  "torch_dtype": "float16",
42
+ "transformers_version": "4.41.2",
43
  "use_cache": true,
44
  "vocab_size": 32256
45
  }