amd
/

Safetensors
llama
alignment-handbook
Generated from Trainer
Mingyuyang-1 commited on
Commit
e952c42
·
1 Parent(s): a1c0349

Update hybrid_config.json

Browse files
Files changed (1) hide show
  1. hybrid_config.json +0 -4
hybrid_config.json CHANGED
@@ -19,14 +19,10 @@
19
  "kv_lora_rank": 160,
20
  "q_lora_rank": 2048,
21
  "use_lora_layer_norm": false,
22
- "use_fixed_rank_for_first_and_last_block": true,
23
  "use_full_kv_head": false,
24
- "layer_rank_list": {},
25
  "qk_rope_head_dim": 64,
26
  "v_head_dim": 128,
27
  "qk_nope_head_dim": 64,
28
- "q_energy_ratio": null,
29
- "kv_energy_ratio": null,
30
  "qkv_rank_divisor": 8,
31
  "max_position_embeddings": 131072,
32
  "rope_theta": 500000.0,
 
19
  "kv_lora_rank": 160,
20
  "q_lora_rank": 2048,
21
  "use_lora_layer_norm": false,
 
22
  "use_full_kv_head": false,
 
23
  "qk_rope_head_dim": 64,
24
  "v_head_dim": 128,
25
  "qk_nope_head_dim": 64,
 
 
26
  "qkv_rank_divisor": 8,
27
  "max_position_embeddings": 131072,
28
  "rope_theta": 500000.0,