We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent f759400 commit 2c83f44Copy full SHA for 2c83f44
src/together/resources/finetune.py
@@ -101,9 +101,10 @@ def create_finetune_request(
101
raise ValueError(
102
f"LoRA adapters are not supported for the selected model ({model_or_checkpoint})."
103
)
104
-
105
- if not 0 <= lora_dropout < 1.0:
106
- raise ValueError("LoRA dropout must be in [0, 1) range.")
+
+ if lora_dropout is not None:
+ if not 0 <= lora_dropout < 1.0:
107
+ raise ValueError("LoRA dropout must be in [0, 1) range.")
108
109
lora_r = lora_r if lora_r is not None else model_limits.lora_training.max_rank
110
lora_alpha = lora_alpha if lora_alpha is not None else lora_r * 2
0 commit comments