lora_config.json 177 Bytes
Newer Older
dengjb's avatar
dengjb committed
1
2
3
4
5
6
7
8
9
10
{
"r":8,
"lora_alpha":16,
"target_modules":["q_proj","v_proj"],
"fan_in_fan_out":"False",
"lora_dropout":0.05,
"bias":"none",
"task_type":"CAUSAL_LM",
"inference_mode":"False"
}