|
13 | 13 | # See the License for the specific language governing permissions and |
14 | 14 | # limitations under the License. |
15 | 15 |
|
| 16 | +import json |
16 | 17 | import logging |
17 | 18 | import os |
18 | 19 | import sys |
19 | 20 | import tempfile |
20 | 21 |
|
21 | 22 | import safetensors |
22 | 23 |
|
| 24 | +from diffusers.loaders.lora_base import LORA_ADAPTER_METADATA_KEY |
| 25 | + |
23 | 26 |
|
24 | 27 | sys.path.append("..") |
25 | 28 | from test_examples_utils import ExamplesTestsAccelerate, run_command # noqa: E402 |
@@ -175,6 +178,49 @@ def test_dreambooth_lora_hidream_checkpointing_checkpoints_total_limit(self): |
175 | 178 | {"checkpoint-4", "checkpoint-6"}, |
176 | 179 | ) |
177 | 180 |
|
| 181 | + def test_dreambooth_lora_with_metadata(self): |
| 182 | + # Use a `lora_alpha` that is different from `rank`. |
| 183 | + lora_alpha = 8 |
| 184 | + rank = 4 |
| 185 | + with tempfile.TemporaryDirectory() as tmpdir: |
| 186 | + test_args = f""" |
| 187 | + {self.script_path} |
| 188 | + --pretrained_model_name_or_path {self.pretrained_model_name_or_path} |
| 189 | + --instance_data_dir {self.instance_data_dir} |
| 190 | + --instance_prompt {self.instance_prompt} |
| 191 | + --resolution 64 |
| 192 | + --train_batch_size 1 |
| 193 | + --gradient_accumulation_steps 1 |
| 194 | + --max_train_steps 2 |
| 195 | + --lora_alpha={lora_alpha} |
| 196 | + --rank={rank} |
| 197 | + --learning_rate 5.0e-04 |
| 198 | + --scale_lr |
| 199 | + --lr_scheduler constant |
| 200 | + --lr_warmup_steps 0 |
| 201 | + --output_dir {tmpdir} |
| 202 | + """.split() |
| 203 | + |
| 204 | + run_command(self._launch_args + test_args) |
| 205 | + # save_pretrained smoke test |
| 206 | + state_dict_file = os.path.join(tmpdir, "pytorch_lora_weights.safetensors") |
| 207 | + self.assertTrue(os.path.isfile(state_dict_file)) |
| 208 | + |
| 209 | + # Check if the metadata was properly serialized. |
| 210 | + with safetensors.torch.safe_open(state_dict_file, framework="pt", device="cpu") as f: |
| 211 | + metadata = f.metadata() or {} |
| 212 | + |
| 213 | + metadata.pop("format", None) |
| 214 | + raw = metadata.get(LORA_ADAPTER_METADATA_KEY) |
| 215 | + if raw: |
| 216 | + raw = json.loads(raw) |
| 217 | + |
| 218 | + loaded_lora_alpha = raw["transformer.lora_alpha"] |
| 219 | + self.assertTrue(loaded_lora_alpha == lora_alpha) |
| 220 | + loaded_lora_rank = raw["transformer.r"] |
| 221 | + self.assertTrue(loaded_lora_rank == rank) |
| 222 | + |
| 223 | + |
178 | 224 | def test_dreambooth_lora_hidream_checkpointing_checkpoints_total_limit_removes_multiple_checkpoints(self): |
179 | 225 | with tempfile.TemporaryDirectory() as tmpdir: |
180 | 226 | test_args = f""" |
|
0 commit comments