File size: 1,013 Bytes
34cccbc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
41de4f8
34cccbc
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
{
  "att_groups": 4,
  "att_heads": 16,
  "att_query_groups": 8,
  "cross_att_type": "sqa",
  "dense_layer_dim": 1024,
  "embed_dim": 256,
  "ff_activation": "silu",
  "ff_dim": 96,
  "ff_dropout": 0.0,
  "final_stateless_layers_config": [
    "moe"
  ],
  "head_norm_type": "rms_norm",
  "moe_bias_mode": "global",
  "moe_grouped_gemm": true,
  "moe_shared_experts_bias_mode": "global",
  "moe_top_k": 10,
  "moe_use_cutlass_grouped_gemm": true,
  "moe_use_weighted_shared_experts": false,
  "num_experts": 384,
  "num_layers": 16,
  "num_shared_experts": 2,
  "rope_base": 100000,
  "router_amp": true,
  "self_att_type": "sqa",
  "seq_len": 8192,
  "shared_expert_dim": 192,
  "stateless_layers_config": [
    "dense",
    "moe"
  ],
  "stm_size": 1024,
  "use_attention_output_bias": false,
  "use_flash_attention": true,
  "use_gated": true,
  "use_gated_attention": true,
  "use_gated_cross_attention": false,
  "use_head_norm": true,
  "use_moe": true,
  "use_vectorized_moe": true,
  "vocab_size": 65536
}