Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ build-backend = "poetry.masonry.api"

[tool.poetry]
name = "together"
version = "1.4.6"
version = "1.4.7"
authors = [
"Together AI <[email protected]>"
]
Expand Down
16 changes: 16 additions & 0 deletions src/together/cli/api/finetune.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,12 +71,24 @@ def fine_tuning(ctx: click.Context) -> None:
)
@click.option("--batch-size", type=INT_WITH_MAX, default="max", help="Train batch size")
@click.option("--learning-rate", type=float, default=1e-5, help="Learning rate")
@click.option(
"--lr-scheduler-type",
type=click.Choice(["linear", "cosine"]),
default="linear",
help="Learning rate scheduler type",
)
@click.option(
"--min-lr-ratio",
type=float,
default=0.0,
help="The ratio of the final learning rate to the peak learning rate",
)
@click.option(
"--num-cycles",
type=float,
default=0.5,
help="Number of cycles for cosine learning rate scheduler.",
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
help="Number of cycles for cosine learning rate scheduler.",
help="Number of cycles for the cosine learning rate scheduler.",

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: Maybe also add what fractional values mean.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Updated to "Number or fraction of cycles".

)
@click.option(
"--warmup-ratio",
type=float,
Expand Down Expand Up @@ -162,7 +174,9 @@ def create(
n_checkpoints: int,
batch_size: int | Literal["max"],
learning_rate: float,
lr_scheduler_type: Literal["linear", "cosine"],
min_lr_ratio: float,
num_cycles: float,
warmup_ratio: float,
max_grad_norm: float,
weight_decay: float,
Expand Down Expand Up @@ -194,7 +208,9 @@ def create(
n_checkpoints=n_checkpoints,
batch_size=batch_size,
learning_rate=learning_rate,
lr_scheduler_type=lr_scheduler_type,
min_lr_ratio=min_lr_ratio,
num_cycles=num_cycles,
warmup_ratio=warmup_ratio,
max_grad_norm=max_grad_norm,
weight_decay=weight_decay,
Expand Down
34 changes: 30 additions & 4 deletions src/together/resources/finetune.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
TrainingType,
FinetuneLRScheduler,
FinetuneLinearLRSchedulerArgs,
FinetuneCosineLRSchedulerArgs,
TrainingMethodDPO,
TrainingMethodSFT,
FinetuneCheckpoint,
Expand Down Expand Up @@ -57,7 +58,9 @@ def createFinetuneRequest(
n_checkpoints: int | None = 1,
batch_size: int | Literal["max"] = "max",
learning_rate: float | None = 0.00001,
lr_scheduler_type: Literal["linear", "cosine"] = "linear",
min_lr_ratio: float = 0.0,
num_cycles: float = 0.5,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Let's call it scheduler_num_cycles for clarity?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The backend expects the field in the request to be num_cycles, but in the scheduler args. Would just changing the CLI arg be ok?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Changed CLI arg to scheduler_num_cycles. Worth noting FinetuneCosineLRSChedulerArgs still has the field as num_cycles

warmup_ratio: float = 0.0,
max_grad_norm: float = 1.0,
weight_decay: float = 0.0,
Expand Down Expand Up @@ -129,10 +132,21 @@ def createFinetuneRequest(
f"training_method must be one of {', '.join(AVAILABLE_TRAINING_METHODS)}"
)

lrScheduler = FinetuneLRScheduler(
lr_scheduler_type="linear",
lr_scheduler_args=FinetuneLinearLRSchedulerArgs(min_lr_ratio=min_lr_ratio),
)
if lr_scheduler_type == "cosine":
if num_cycles <= 0.0:
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: maybe also add some meaningful upperbound?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hard to know what a reasonable upperbound would be without knowing the number of steps afaik. I think it makes sense to follow the hf implementation and let the cosine alias if the user inputs something unreasonably large for their job.

raise ValueError("Number of cycles should be greater than 0")

lrScheduler = FinetuneLRScheduler(
lr_scheduler_type="cosine",
lr_scheduler_args=FinetuneCosineLRSchedulerArgs(
min_lr_ratio=min_lr_ratio, num_cycles=num_cycles
),
)
else:
lrScheduler = FinetuneLRScheduler(
lr_scheduler_type="linear",
lr_scheduler_args=FinetuneLinearLRSchedulerArgs(min_lr_ratio=min_lr_ratio),
)

training_method_cls: TrainingMethodSFT | TrainingMethodDPO = TrainingMethodSFT()
if training_method == "dpo":
Expand Down Expand Up @@ -244,7 +258,9 @@ def create(
n_checkpoints: int | None = 1,
batch_size: int | Literal["max"] = "max",
learning_rate: float | None = 0.00001,
lr_scheduler_type: Literal["linear", "cosine"] = "linear",
min_lr_ratio: float = 0.0,
num_cycles: float = 0.5,
warmup_ratio: float = 0.0,
max_grad_norm: float = 1.0,
weight_decay: float = 0.0,
Expand Down Expand Up @@ -279,8 +295,10 @@ def create(
batch_size (int or "max"): Batch size for fine-tuning. Defaults to max.
learning_rate (float, optional): Learning rate multiplier to use for training
Defaults to 0.00001.
lr_scheduler_type (Literal["linear", "cosine"]): Learning rate scheduler type. Defaults to "linear".
min_lr_ratio (float, optional): Min learning rate ratio of the initial learning rate for
the learning rate scheduler. Defaults to 0.0.
num_cycles (float, optional): Number of cycles for cosine learning rate scheduler. Defaults to 0.5.
warmup_ratio (float, optional): Warmup ratio for learning rate scheduler.
max_grad_norm (float, optional): Max gradient norm. Defaults to 1.0, set to 0 to disable.
weight_decay (float, optional): Weight decay. Defaults to 0.0.
Expand Down Expand Up @@ -336,7 +354,9 @@ def create(
n_checkpoints=n_checkpoints,
batch_size=batch_size,
learning_rate=learning_rate,
lr_scheduler_type=lr_scheduler_type,
min_lr_ratio=min_lr_ratio,
num_cycles=num_cycles,
warmup_ratio=warmup_ratio,
max_grad_norm=max_grad_norm,
weight_decay=weight_decay,
Expand Down Expand Up @@ -617,7 +637,9 @@ async def create(
n_checkpoints: int | None = 1,
batch_size: int | Literal["max"] = "max",
learning_rate: float | None = 0.00001,
lr_scheduler_type: Literal["linear", "cosine"] = "linear",
min_lr_ratio: float = 0.0,
num_cycles: float = 0.5,
warmup_ratio: float = 0.0,
max_grad_norm: float = 1.0,
weight_decay: float = 0.0,
Expand Down Expand Up @@ -652,8 +674,10 @@ async def create(
batch_size (int, optional): Batch size for fine-tuning. Defaults to max.
learning_rate (float, optional): Learning rate multiplier to use for training
Defaults to 0.00001.
lr_scheduler_type (Literal["linear", "cosine"]): Learning rate scheduler type. Defaults to "linear".
min_lr_ratio (float, optional): Min learning rate ratio of the initial learning rate for
the learning rate scheduler. Defaults to 0.0.
num_cycles (float, optional): Number of cycles for cosine learning rate scheduler. Defaults to 0.5.
warmup_ratio (float, optional): Warmup ratio for learning rate scheduler.
max_grad_norm (float, optional): Max gradient norm. Defaults to 1.0, set to 0 to disable.
weight_decay (float, optional): Weight decay. Defaults to 0.0.
Expand Down Expand Up @@ -710,7 +734,9 @@ async def create(
n_checkpoints=n_checkpoints,
batch_size=batch_size,
learning_rate=learning_rate,
lr_scheduler_type=lr_scheduler_type,
min_lr_ratio=min_lr_ratio,
num_cycles=num_cycles,
warmup_ratio=warmup_ratio,
max_grad_norm=max_grad_norm,
weight_decay=weight_decay,
Expand Down
2 changes: 2 additions & 0 deletions src/together/types/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
TrainingMethodDPO,
TrainingMethodSFT,
FinetuneCheckpoint,
FinetuneCosineLRSchedulerArgs,
FinetuneDownloadResult,
FinetuneLinearLRSchedulerArgs,
FinetuneList,
Expand Down Expand Up @@ -70,6 +71,7 @@
"FinetuneDownloadResult",
"FinetuneLRScheduler",
"FinetuneLinearLRSchedulerArgs",
"FinetuneCosineLRSchedulerArgs",
"FileRequest",
"FileResponse",
"FileList",
Expand Down
50 changes: 45 additions & 5 deletions src/together/types/finetune.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
from __future__ import annotations

from enum import Enum
from typing import List, Literal
from typing import List, Literal, Union

from pydantic import StrictBool, Field, validator, field_validator
from pydantic import StrictBool, Field, validator, field_validator, ValidationInfo

from together.types.abstract import BaseModel
from together.types.common import (
Expand Down Expand Up @@ -345,13 +345,53 @@ class FinetuneTrainingLimits(BaseModel):
lora_training: FinetuneLoraTrainingLimits | None = None


class FinetuneLinearLRSchedulerArgs(BaseModel):
min_lr_ratio: float | None = 0.0


class FinetuneCosineLRSchedulerArgs(BaseModel):
min_lr_ratio: float | None = 0.0
num_cycles: float | None = 0.5


LRSchedulerTypeToArgs = {
"linear": FinetuneLinearLRSchedulerArgs,
"cosine": FinetuneCosineLRSchedulerArgs,
}

FinetuneLRSchedulerArgs = Union[
FinetuneLinearLRSchedulerArgs, FinetuneCosineLRSchedulerArgs, None
]


class FinetuneLRScheduler(BaseModel):
lr_scheduler_type: str
lr_scheduler_args: FinetuneLinearLRSchedulerArgs | None = None
lr_scheduler_args: FinetuneLRSchedulerArgs | None = None

@field_validator("lr_scheduler_type")
@classmethod
def validate_scheduler_type(cls, v: str) -> str:
if v not in LRSchedulerTypeToArgs:
raise ValueError(
f"Scheduler type must be one of: {LRSchedulerTypeToArgs.keys()}"
)
return v

@field_validator("lr_scheduler_args")
@classmethod
def validate_scheduler_args(
cls, v: FinetuneLRSchedulerArgs, info: ValidationInfo
) -> FinetuneLRSchedulerArgs:
scheduler_type = str(info.data.get("lr_scheduler_type"))

if v is None:
return v

class FinetuneLinearLRSchedulerArgs(BaseModel):
min_lr_ratio: float | None = 0.0
expected_type = LRSchedulerTypeToArgs[scheduler_type]
if not isinstance(v, expected_type):
raise ValueError(f"Expected {expected_type}, got {type(v)}")

return v


class FinetuneCheckpoint(BaseModel):
Expand Down