Skip to content

Commit d3ff133

Browse files
authored
FEAT Add RandLoRA to PEFT (#2464)
Implements "RandLoRA: Full-rank parameter-efficient fine-tuning of large models", https://arxiv.org/abs/2502.00987.
1 parent 9fdb21e commit d3ff133

17 files changed

+2514
-6
lines changed

src/peft/__init__.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -87,6 +87,8 @@
8787
PromptEncoderReparameterizationType,
8888
PromptTuningConfig,
8989
PromptTuningInit,
90+
RandLoraConfig,
91+
RandLoraModel,
9092
TrainableTokensConfig,
9193
TrainableTokensModel,
9294
VBLoRAConfig,
@@ -178,6 +180,8 @@
178180
"PromptLearningConfig",
179181
"PromptTuningConfig",
180182
"PromptTuningInit",
183+
"RandLoraConfig",
184+
"RandLoraModel",
181185
"TaskType",
182186
"TrainableTokensConfig",
183187
"TrainableTokensModel",

src/peft/tuners/__init__.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,7 @@
3939
from .poly import PolyConfig, PolyModel
4040
from .prefix_tuning import PrefixEncoder, PrefixTuningConfig
4141
from .prompt_tuning import PromptEmbedding, PromptTuningConfig, PromptTuningInit
42+
from .randlora import RandLoraConfig, RandLoraModel
4243
from .trainable_tokens import TrainableTokensConfig, TrainableTokensModel
4344
from .vblora import VBLoRAConfig, VBLoRAModel
4445
from .vera import VeraConfig, VeraModel
@@ -89,6 +90,8 @@
8990
"PromptEncoderReparameterizationType",
9091
"PromptTuningConfig",
9192
"PromptTuningInit",
93+
"RandLoraConfig",
94+
"RandLoraModel",
9295
"TrainableTokensConfig",
9396
"TrainableTokensModel",
9497
"VBLoRAConfig",
Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
# Copyright 2025-present the HuggingFace Inc. team.
2+
3+
#
4+
# Licensed under the Apache License, Version 2.0 (the "License");
5+
# you may not use this file except in compliance with the License.
6+
# You may obtain a copy of the License at
7+
#
8+
# http://www.apache.org/licenses/LICENSE-2.0
9+
#
10+
# Unless required by applicable law or agreed to in writing, software
11+
# distributed under the License is distributed on an "AS IS" BASIS,
12+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
# See the License for the specific language governing permissions and
14+
# limitations under the License.
15+
16+
from peft.import_utils import is_bnb_4bit_available, is_bnb_available
17+
from peft.utils import register_peft_method
18+
19+
from .config import RandLoraConfig
20+
from .layer import Linear, RandLoraLayer
21+
from .model import RandLoraModel
22+
23+
24+
__all__ = ["Linear", "RandLoraConfig", "RandLoraLayer", "RandLoraModel"]
25+
26+
register_peft_method(name="randlora", config_cls=RandLoraConfig, model_cls=RandLoraModel, prefix="randlora_")
27+
28+
29+
def __getattr__(name):
30+
if (name == "Linear8bitLt") and is_bnb_available():
31+
from .bnb import Linear8bitLt
32+
33+
return Linear8bitLt
34+
35+
if (name == "Linear4bit") and is_bnb_4bit_available():
36+
from .bnb import Linear4bit
37+
38+
return Linear4bit
39+
40+
raise AttributeError(f"module {__name__} has no attribute {name}")

0 commit comments

Comments
 (0)