Skip to content

Commit 4b767df

Browse files
committed
config to use new "all-linear" feature for target_modules
1 parent f443e89 commit 4b767df

File tree

2 files changed

+13
-9
lines changed

2 files changed

+13
-9
lines changed

llmtune/config.yml

Lines changed: 10 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -38,14 +38,16 @@ lora:
3838
r: 32
3939
lora_alpha: 64
4040
lora_dropout: 0.1
41-
target_modules:
42-
- q_proj
43-
- v_proj
44-
- k_proj
45-
- o_proj
46-
- up_proj
47-
- down_proj
48-
- gate_proj
41+
target_modules: "all-linear"
42+
# to target specific modules
43+
# target_modules:
44+
# - q_proj
45+
# - v_proj
46+
# - k_proj
47+
# - o_proj
48+
# - up_proj
49+
# - down_proj
50+
# - gate_proj
4951

5052
# Training -------------------
5153
training:

llmtune/pydantic_models/config_model.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -125,7 +125,9 @@ class LoraConfig(BaseModel):
125125
lora_alpha: Optional[int] = Field(16, description="The alpha parameter for Lora scaling")
126126
bias: Optional[str] = Field("none", description="Bias type for Lora. Can be 'none', 'all' or 'lora_only'")
127127
lora_dropout: Optional[float] = Field(0.1, description="The dropout probability for Lora layers")
128-
target_modules: Optional[List[str]] = Field(None, description="The names of the modules to apply Lora to")
128+
target_modules: Optional[Union[List[str], Literal["all-linear"]]] = Field(
129+
"all-linear", description="The names of the modules to apply Lora to"
130+
)
129131
fan_in_fan_out: Optional[bool] = Field(
130132
False,
131133
description="Flag to indicate if the layer to replace stores weight like (fan_in, fan_out)",

0 commit comments

Comments
 (0)