amd
/

Safetensors
llama
alignment-handbook
Generated from Trainer
Mingyuyang-1 commited on
Commit
369c8b3
·
1 Parent(s): 825bbe0

Update hybrid_config.json

Browse files
Files changed (1) hide show
  1. hybrid_config.json +0 -4
hybrid_config.json CHANGED
@@ -25,14 +25,10 @@
25
  "kv_lora_rank": 128,
26
  "q_lora_rank": 1536,
27
  "use_lora_layer_norm": false,
28
- "use_fixed_rank_for_first_and_last_block": true,
29
  "use_full_kv_head": false,
30
- "layer_rank_list": {},
31
  "qk_rope_head_dim": 64,
32
  "v_head_dim": 128,
33
  "qk_nope_head_dim": 64,
34
- "q_energy_ratio": null,
35
- "kv_energy_ratio": null,
36
  "qkv_rank_divisor": 8,
37
  "max_position_embeddings": 131072,
38
  "rope_theta": 500000.0,
 
25
  "kv_lora_rank": 128,
26
  "q_lora_rank": 1536,
27
  "use_lora_layer_norm": false,
 
28
  "use_full_kv_head": false,
 
29
  "qk_rope_head_dim": 64,
30
  "v_head_dim": 128,
31
  "qk_nope_head_dim": 64,
 
 
32
  "qkv_rank_divisor": 8,
33
  "max_position_embeddings": 131072,
34
  "rope_theta": 500000.0,