Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion plugins/attention-and-distributed-packing/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ classifiers=[
"Programming Language :: Python :: 3.11",
]

dependencies = ["numba", "trl"]
dependencies = ["numba", "trl>=0.19.1,<0.20.0"]

[tool.hatch.build.targets.wheel]
only-include = ["src/fms_acceleration_aadp"]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,9 @@
from fms_acceleration import AccelerationPlugin
from peft import LoraConfig
from transformers import DataCollatorForSeq2Seq, TrainingArguments
from trl import DataCollatorForCompletionOnlyLM # pylint: disable=import-error
from trl import ( # pylint: disable=import-error, no-name-in-module
DataCollatorForCompletionOnlyLM,
)
import torch


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,7 @@ def _cross_entropy_backward(
pass


MAX_FUSED_SIZE: tl.constexpr = 65536 # 2**16
MAX_FUSED_SIZE = tl.constexpr(65536) # 2**16

class Fast_CrossEntropyLoss(torch.autograd.Function):
@staticmethod
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
import torch
from .utils import calculate_settings

ROPE_GROUP_SIZE: tl.constexpr = 4
ROPE_GROUP_SIZE = tl.constexpr(4)

@triton.heuristics({"BACKWARD_PASS": lambda args: args["BACKWARD_PASS"],})
@triton.jit
Expand Down