File size: 225 Bytes
2559d53
593ce5a
 
 
ce4f4f8
2559d53
 
 
 
 
dc5bb19
2559d53
1
2
3
4
5
6
7
8
9
10
11
12
{
  "architectures": [
    "LlamaForCausalLM"
  ],
  "model_type": "llama",
  "embedding_dim": 2,
  "hidden_size": 100,
  "output_size": 32768,
  "num_layers": 2,
  "activation_function": "tanh",
  "initializer_range": 0.02
}