File tree Expand file tree Collapse file tree 2 files changed +13
-9
lines changed Expand file tree Collapse file tree 2 files changed +13
-9
lines changed Original file line number Diff line number Diff line change @@ -38,14 +38,16 @@ lora:
3838 r : 32
3939 lora_alpha : 64
4040 lora_dropout : 0.1
41- target_modules :
42- - q_proj
43- - v_proj
44- - k_proj
45- - o_proj
46- - up_proj
47- - down_proj
48- - gate_proj
41+ target_modules : " all-linear"
42+ # to target specific modules
43+ # target_modules:
44+ # - q_proj
45+ # - v_proj
46+ # - k_proj
47+ # - o_proj
48+ # - up_proj
49+ # - down_proj
50+ # - gate_proj
4951
5052# Training -------------------
5153training :
Original file line number Diff line number Diff line change @@ -125,7 +125,9 @@ class LoraConfig(BaseModel):
125125 lora_alpha : Optional [int ] = Field (16 , description = "The alpha parameter for Lora scaling" )
126126 bias : Optional [str ] = Field ("none" , description = "Bias type for Lora. Can be 'none', 'all' or 'lora_only'" )
127127 lora_dropout : Optional [float ] = Field (0.1 , description = "The dropout probability for Lora layers" )
128- target_modules : Optional [List [str ]] = Field (None , description = "The names of the modules to apply Lora to" )
128+ target_modules : Optional [Union [List [str ], Literal ["all-linear" ]]] = Field (
129+ "all-linear" , description = "The names of the modules to apply Lora to"
130+ )
129131 fan_in_fan_out : Optional [bool ] = Field (
130132 False ,
131133 description = "Flag to indicate if the layer to replace stores weight like (fan_in, fan_out)" ,
You can’t perform that action at this time.
0 commit comments