Skip to content

Commit e6f78c8

Browse files
authored
Merge pull request #1320 from Zrealshadow/dev-postgresql-patch-8
Add config file for linear lora
2 parents 50ed1ab + 863c3b5 commit e6f78c8

File tree

1 file changed

+40
-0
lines changed
  • examples/singa_peft/src/singa_peft/tuners/linear_lora

1 file changed

+40
-0
lines changed
Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
#
2+
# Licensed to the Apache Software Foundation (ASF) under one
3+
# or more contributor license agreements. See the NOTICE file
4+
# distributed with this work for additional information
5+
# regarding copyright ownership. The ASF licenses this file
6+
# to you under the Apache License, Version 2.0 (the
7+
# "License"); you may not use this file except in compliance
8+
# with the License. You may obtain a copy of the License at
9+
#
10+
# http://www.apache.org/licenses/LICENSE-2.0
11+
#
12+
# Unless required by applicable law or agreed to in writing,
13+
# software distributed under the License is distributed on an
14+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15+
# KIND, either express or implied. See the License for the
16+
# specific language governing permissions and limitations
17+
# under the License.
18+
#
19+
20+
from typing import Optional
21+
from singa_peft.peft_config import PeftConfig
22+
23+
24+
class LinearLoraConfig(PeftConfig):
25+
"""
26+
LinearLoraConfig: linear lora config class
27+
"""
28+
def __init__(self, r: int = 8, lora_alpha: int = 1, lora_dropout: float = 0, target_layers: Optional[list[str]] = None):
29+
r"""
30+
Args:
31+
r: the rank in LoRA, which determines the size of the low-rank matrix, default 8
32+
lora_alpha: learning rate scaling factor, default 1
33+
lora_dropout: dropout ratio, default 0.
34+
target_layers: list of the layer names to replace with LoRA. For examples, ['linear1', 'linear2']
35+
"""
36+
super().__init__(peft_type="linear_lora")
37+
self.r = r
38+
self.lora_alpha = lora_alpha
39+
self.lora_dropout = lora_dropout
40+
self.target_layers = target_layers

0 commit comments

Comments
 (0)