Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 0 additions & 3 deletions arctic_training/config/scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,6 @@ class SchedulerConfig(BaseConfig):
type: str = ""
""" Scheduler factory type. Defaults to the `scheduler_factory_type` of the trainer. """

warmup_ratio: float = Field(default=0.1, ge=0.0, le=1.0)
""" The fraction of total training steps used for linear learning rate warmup. """

learning_rate: Optional[float] = Field(default=None, alias="lr")
""" The initial learning rate. Deprecated in favor of `optimizer.learning_rate`. """

Expand Down
6 changes: 5 additions & 1 deletion arctic_training/scheduler/hf_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@

from typing import Any

from pydantic import Field
from transformers import get_scheduler

from arctic_training.config.scheduler import SchedulerConfig
Expand All @@ -23,16 +24,19 @@

class HFSchedulerConfig(SchedulerConfig):
name: str = "linear"
warmup_ratio: float = Field(default=0.1, ge=0.0, le=1.0)
""" The fraction of total training steps used for linear learning rate warmup. """


class HFSchedulerFactory(SchedulerFactory):
name = "huggingface"
config: HFSchedulerConfig

def create_scheduler(self, optimizer: Any) -> Any:
num_warmup_steps = int(self.config.warmup_ratio * self.trainer.training_horizon)
return get_scheduler(
name=self.config.name,
optimizer=optimizer,
num_warmup_steps=self.trainer.warmup_steps,
num_warmup_steps=num_warmup_steps,
num_training_steps=self.trainer.training_horizon,
)
50 changes: 50 additions & 0 deletions arctic_training/scheduler/wsd_factory.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
# Copyright 2025 Snowflake Inc.
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from typing import Any

from torch.optim.lr_scheduler import LambdaLR
from transformers import get_wsd_schedule

from arctic_training.config.scheduler import SchedulerConfig
from arctic_training.scheduler.factory import SchedulerFactory


class WSDSchedulerConfig(SchedulerConfig):
"""See: https://huggingface.co/docs/transformers/en/main_classes/optimizer_schedules#transformers.get_wsd_schedule"""

name: str = "wsd"
num_warmup_steps: int
num_decay_steps: int
min_lr_ratio: float = 0.0


class WSDSchedulerFactory(SchedulerFactory):
name = "wds"
config: WSDSchedulerConfig

def create_scheduler(self, optimizer: Any) -> LambdaLR:
num_stable_steps = (
self.trainer.training_horizon
- self.config.num_warmup_steps
- self.config.num_decay_steps
)
return get_wsd_schedule(
optimizer=optimizer,
num_warmup_steps=self.config.num_warmup_steps,
num_decay_steps=self.config.num_decay_steps,
num_stable_steps=num_stable_steps,
min_lr_ratio=self.config.min_lr_ratio,
)
5 changes: 0 additions & 5 deletions arctic_training/trainer/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,11 +235,6 @@ def training_horizon(self) -> int:
// self.config.gradient_accumulation_steps
)

@property
def warmup_steps(self) -> int:
"""Number of warmup steps."""
return int(self.config.scheduler.warmup_ratio * self.training_horizon)

@callback_wrapper("loss")
@abstractmethod
def loss(self, batch: Dict[str, torch.Tensor]) -> torch.Tensor:
Expand Down