Update config.json
Browse files- config.json +10 -5
config.json
CHANGED
|
@@ -21,13 +21,17 @@
|
|
| 21 |
"num_experts": 64,
|
| 22 |
"num_shared_experts": 1,
|
| 23 |
"norm_topk_prob": true,
|
| 24 |
-
"num_attention_heads":
|
| 25 |
"num_experts_per_tok": 4,
|
| 26 |
"num_hidden_layers": 88,
|
| 27 |
-
"num_key_value_heads":
|
| 28 |
"pretraining_tp": 1,
|
| 29 |
-
"rms_norm_eps": 1e-
|
| 30 |
-
"rope_scaling":
|
|
|
|
|
|
|
|
|
|
|
|
|
| 31 |
"rope_theta": 600000,
|
| 32 |
"tie_word_embeddings": false,
|
| 33 |
"torch_dtype": "bfloat16",
|
|
@@ -36,9 +40,10 @@
|
|
| 36 |
"use_bias": false,
|
| 37 |
"use_qkv_bias": false,
|
| 38 |
"vocab_size": 126464,
|
|
|
|
| 39 |
"embedding_dropout": 0.0,
|
| 40 |
"norm_head": true,
|
| 41 |
"norm_softmax": false,
|
| 42 |
"output_dropout": 0.0,
|
| 43 |
-
"
|
| 44 |
}
|
|
|
|
| 21 |
"num_experts": 64,
|
| 22 |
"num_shared_experts": 1,
|
| 23 |
"norm_topk_prob": true,
|
| 24 |
+
"num_attention_heads": 56,
|
| 25 |
"num_experts_per_tok": 4,
|
| 26 |
"num_hidden_layers": 88,
|
| 27 |
+
"num_key_value_heads": 8,
|
| 28 |
"pretraining_tp": 1,
|
| 29 |
+
"rms_norm_eps": 1e-05,
|
| 30 |
+
"rope_scaling": {
|
| 31 |
+
"factor": 4.0,
|
| 32 |
+
"original_max_position_embeddings": 16384,
|
| 33 |
+
"type": "yarn"
|
| 34 |
+
},
|
| 35 |
"rope_theta": 600000,
|
| 36 |
"tie_word_embeddings": false,
|
| 37 |
"torch_dtype": "bfloat16",
|
|
|
|
| 40 |
"use_bias": false,
|
| 41 |
"use_qkv_bias": false,
|
| 42 |
"vocab_size": 126464,
|
| 43 |
+
"output_router_logits": false,
|
| 44 |
"embedding_dropout": 0.0,
|
| 45 |
"norm_head": true,
|
| 46 |
"norm_softmax": false,
|
| 47 |
"output_dropout": 0.0,
|
| 48 |
+
"head_dim": 128
|
| 49 |
}
|