Skip to content

Commit 87314bb

Browse files
refactor(mm): split big migration into 3
Split the big migration that did all of these things into 3: - Migration 22: Remove unique contraint on base/name/type in models table - Migration 23: Migrate configs to v6.8.0 schemas - Migration 24: Normalize file storage
1 parent 387eb63 commit 87314bb

File tree

3 files changed

+407
-272
lines changed

3 files changed

+407
-272
lines changed

invokeai/app/services/shared/sqlite_migrator/migrations/migration_22.py

Lines changed: 59 additions & 272 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,8 @@
1-
import json
21
import sqlite3
32
from logging import Logger
4-
from pathlib import Path
5-
from typing import Any, NamedTuple
6-
7-
from pydantic import ValidationError
83

94
from invokeai.app.services.config import InvokeAIAppConfig
105
from invokeai.app.services.shared.sqlite_migrator.sqlite_migrator_common import Migration
11-
from invokeai.backend.model_manager.configs.factory import AnyModelConfig, AnyModelConfigValidator
12-
from invokeai.backend.model_manager.taxonomy import BaseModelType, FluxVariantType, ModelType, SchedulerPredictionType
13-
14-
15-
class NormalizeResult(NamedTuple):
16-
new_relative_path: str | None
17-
rollback_ops: list[tuple[Path, Path]]
186

197

208
class Migration22Callback:
@@ -24,271 +12,70 @@ def __init__(self, app_config: InvokeAIAppConfig, logger: Logger) -> None:
2412
self._models_dir = app_config.models_path.resolve()
2513

2614
def __call__(self, cursor: sqlite3.Cursor) -> None:
27-
# Grab all model records
28-
cursor.execute("SELECT id, config FROM models;")
29-
rows = cursor.fetchall()
30-
31-
for model_id, config_json in rows:
32-
try:
33-
# Migrate the config JSON to the latest schema
34-
config = self._parse_and_migrate_config(config_json)
35-
except ValidationError:
36-
# This could happen if the config schema changed in a way that makes old configs invalid. Unlikely
37-
# for users, more likely for devs testing out migration paths.
38-
self._logger.warning("Skipping model %s: invalid config schema", model_id)
39-
continue
40-
except json.JSONDecodeError:
41-
# This should never happen, as we use pydantic to serialize the config to JSON.
42-
self._logger.warning("Skipping model %s: invalid config JSON", model_id)
43-
continue
44-
45-
# We'll use a savepoint so we can roll back the database update if something goes wrong, and a simple
46-
# rollback of file operations if needed.
47-
cursor.execute("SAVEPOINT migrate_model")
48-
try:
49-
new_relative_path, rollback_ops = self._normalize_model_storage(
50-
key=config.key,
51-
path_value=config.path,
52-
)
53-
except Exception as err:
54-
self._logger.error("Error normalizing model %s: %s", config.key, err)
55-
cursor.execute("ROLLBACK TO SAVEPOINT migrate_model")
56-
cursor.execute("RELEASE SAVEPOINT migrate_model")
57-
continue
58-
59-
if new_relative_path is None:
60-
cursor.execute("RELEASE SAVEPOINT migrate_model")
61-
continue
62-
63-
config.path = new_relative_path
64-
try:
65-
cursor.execute(
66-
"UPDATE models SET config = ? WHERE id = ?;",
67-
(config.model_dump_json(), model_id),
68-
)
69-
except Exception as err:
70-
self._logger.error("Database update failed for model %s: %s", config.key, err)
71-
cursor.execute("ROLLBACK TO SAVEPOINT migrate_model")
72-
cursor.execute("RELEASE SAVEPOINT migrate_model")
73-
self._rollback_file_ops(rollback_ops)
74-
raise
75-
76-
cursor.execute("RELEASE SAVEPOINT migrate_model")
77-
78-
self._prune_empty_directories()
79-
80-
def _parse_and_migrate_config(self, config_json: Any) -> AnyModelConfig:
81-
config_dict: dict[str, Any] = json.loads(config_json)
82-
83-
# In v6.8.0 we made some improvements to the model taxonomy and the model config schemas. There are a changes
84-
# we need to make to old configs to bring them up to date.
85-
86-
base = config_dict.get("base")
87-
type = config_dict.get("type")
88-
if base == BaseModelType.Flux.value and type == ModelType.Main.value:
89-
# Prior to v6.8.0, we used an awkward combination of `config_path` and `variant` to distinguish between FLUX
90-
# variants.
91-
#
92-
# `config_path` was set to one of:
93-
# - flux-dev
94-
# - flux-dev-fill
95-
# - flux-schnell
96-
#
97-
# `variant` was set to ModelVariantType.Inpaint for FLUX Fill models and ModelVariantType.Normal for all other FLUX
98-
# models.
99-
#
100-
# We now use the `variant` field to directly represent the FLUX variant type, and `config_path` is no longer used.
101-
102-
# Extract and remove `config_path` if present.
103-
config_path = config_dict.pop("config_path", None)
104-
105-
match config_path:
106-
case "flux-dev":
107-
config_dict["variant"] = FluxVariantType.Dev.value
108-
case "flux-dev-fill":
109-
config_dict["variant"] = FluxVariantType.DevFill.value
110-
case "flux-schnell":
111-
config_dict["variant"] = FluxVariantType.Schnell.value
112-
case _:
113-
# Unknown config_path - default to Dev variant
114-
config_dict["variant"] = FluxVariantType.Dev.value
115-
116-
if (
117-
base
118-
in {
119-
BaseModelType.StableDiffusion1.value,
120-
BaseModelType.StableDiffusion2.value,
121-
BaseModelType.StableDiffusionXL.value,
122-
BaseModelType.StableDiffusionXLRefiner.value,
123-
}
124-
and type == "main"
125-
):
126-
# Prior to v6.8.0, the prediction_type field was optional and would default to Epsilon if not present.
127-
# We now make it explicit and always present. Use the existing value if present, otherwise default to
128-
# Epsilon, matching the probe logic.
129-
#
130-
# It's only on SD1.x, SD2.x, and SDXL main models.
131-
config_dict["prediction_type"] = config_dict.get("prediction_type", SchedulerPredictionType.Epsilon.value)
132-
133-
if type == ModelType.CLIPVision.value:
134-
# Prior to v6.8.0, some CLIP Vision models were associated with a specific base model architecture:
135-
# - CLIP-ViT-bigG-14-laion2B-39B-b160k is the image encoder for SDXL IP Adapter and was associated with SDXL
136-
# - CLIP-ViT-H-14-laion2B-s32B-b79K is the image encoder for SD1.5 IP Adapter and was associated with SD1.5
137-
#
138-
# While this made some sense at the time, it is more correct and flexible to treat CLIP Vision models
139-
# as independent of any specific base model architecture.
140-
config_dict["base"] = BaseModelType.Any.value
141-
142-
migrated_config = AnyModelConfigValidator.validate_python(config_dict)
143-
return migrated_config
144-
145-
def _normalize_model_storage(self, key: str, path_value: str) -> NormalizeResult:
146-
models_dir = self._models_dir
147-
stored_path = Path(path_value)
148-
149-
relative_path: Path | None
150-
if stored_path.is_absolute():
151-
# If the stored path is absolute, we need to check if it's inside the models directory, which means it is
152-
# an Invoke-managed model. If it's outside, it is user-managed we leave it alone.
153-
try:
154-
relative_path = stored_path.resolve().relative_to(models_dir)
155-
except ValueError:
156-
self._logger.info("Leaving user-managed model %s at %s", key, stored_path)
157-
return NormalizeResult(new_relative_path=None, rollback_ops=[])
158-
else:
159-
# Relative paths are always relative to the models directory and thus Invoke-managed.
160-
relative_path = stored_path
161-
162-
# If the relative path is empty, assume something is wrong. Warn and skip.
163-
if not relative_path.parts:
164-
self._logger.warning("Skipping model %s: empty relative path", key)
165-
return NormalizeResult(new_relative_path=None, rollback_ops=[])
166-
167-
# Sanity check: the path is relative. It should be present in the models directory.
168-
absolute_path = (models_dir / relative_path).resolve()
169-
if not absolute_path.exists():
170-
self._logger.warning(
171-
"Skipping model %s: expected model files at %s but nothing was found",
172-
key,
173-
absolute_path,
174-
)
175-
return NormalizeResult(new_relative_path=None, rollback_ops=[])
176-
177-
if relative_path.parts[0] == key:
178-
# Already normalized. Still ensure the stored path is relative.
179-
normalized_path = relative_path.as_posix()
180-
# If the stored path is already the normalized path, no change is needed.
181-
new_relative_path = normalized_path if stored_path.as_posix() != normalized_path else None
182-
return NormalizeResult(new_relative_path=new_relative_path, rollback_ops=[])
183-
184-
# We'll store the file operations we perform so we can roll them back if needed.
185-
rollback_ops: list[tuple[Path, Path]] = []
186-
187-
# Destination directory is models_dir/<key> - a flat directory structure.
188-
destination_dir = models_dir / key
189-
190-
try:
191-
if absolute_path.is_file():
192-
destination_dir.mkdir(parents=True, exist_ok=True)
193-
dest_file = destination_dir / absolute_path.name
194-
# This really shouldn't happen.
195-
if dest_file.exists():
196-
self._logger.warning(
197-
"Destination for model %s already exists at %s; skipping move",
198-
key,
199-
dest_file,
200-
)
201-
return NormalizeResult(new_relative_path=None, rollback_ops=[])
202-
203-
self._logger.info("Moving model file %s -> %s", absolute_path, dest_file)
204-
205-
# `Path.rename()` effectively moves the file or directory.
206-
absolute_path.rename(dest_file)
207-
rollback_ops.append((dest_file, absolute_path))
208-
209-
return NormalizeResult(
210-
new_relative_path=(Path(key) / dest_file.name).as_posix(),
211-
rollback_ops=rollback_ops,
212-
)
213-
214-
if absolute_path.is_dir():
215-
dest_path = destination_dir
216-
# This really shouldn't happen.
217-
if dest_path.exists():
218-
self._logger.warning(
219-
"Destination directory %s already exists for model %s; skipping",
220-
dest_path,
221-
key,
222-
)
223-
return NormalizeResult(new_relative_path=None, rollback_ops=[])
224-
225-
self._logger.info("Moving model directory %s -> %s", absolute_path, dest_path)
226-
227-
# `Path.rename()` effectively moves the file or directory.
228-
absolute_path.rename(dest_path)
229-
rollback_ops.append((dest_path, absolute_path))
230-
231-
return NormalizeResult(
232-
new_relative_path=Path(key).as_posix(),
233-
rollback_ops=rollback_ops,
234-
)
235-
236-
# Maybe a broken symlink or something else weird?
237-
self._logger.warning("Skipping model %s: path %s is neither a file nor directory", key, absolute_path)
238-
return NormalizeResult(new_relative_path=None, rollback_ops=[])
239-
except Exception:
240-
self._rollback_file_ops(rollback_ops)
241-
raise
242-
243-
def _rollback_file_ops(self, rollback_ops: list[tuple[Path, Path]]) -> None:
244-
# This is a super-simple rollback that just reverses the move operations we performed.
245-
for source, destination in reversed(rollback_ops):
246-
try:
247-
if source.exists():
248-
source.rename(destination)
249-
except Exception as err:
250-
self._logger.error("Failed to rollback move %s -> %s: %s", source, destination, err)
251-
252-
def _prune_empty_directories(self) -> None:
253-
# These directories are system directories we want to keep even if empty. Technically, the app should not
254-
# have any problems if these are removed, creating them as needed, but it's cleaner to just leave them alone.
255-
keep_names = {"model_images", ".download_cache"}
256-
keep_dirs = {self._models_dir / name for name in keep_names}
257-
removed_dirs: set[Path] = set()
258-
259-
# Walk the models directory tree from the bottom up, removing empty directories. We sort by path length
260-
# descending to ensure we visit children before parents.
261-
for directory in sorted(self._models_dir.rglob("*"), key=lambda p: len(p.parts), reverse=True):
262-
if not directory.is_dir():
263-
continue
264-
if directory == self._models_dir:
265-
continue
266-
if any(directory == keep or keep in directory.parents for keep in keep_dirs):
267-
continue
268-
269-
try:
270-
next(directory.iterdir())
271-
except StopIteration:
272-
try:
273-
directory.rmdir()
274-
removed_dirs.add(directory)
275-
self._logger.debug("Removed empty directory %s", directory)
276-
except OSError:
277-
# Directory not empty (or some other error) - bail out.
278-
self._logger.warning("Failed to prune directory %s - not empty?", directory)
279-
continue
280-
except OSError:
281-
continue
282-
283-
self._logger.info("Pruned %d empty directories under %s", len(removed_dirs), self._models_dir)
15+
self._logger.info("Removing UNIQUE(name, base, type) constraint from models table")
16+
17+
# Step 1: Rename the existing models table
18+
cursor.execute("ALTER TABLE models RENAME TO models_old;")
19+
20+
# Step 2: Create the new models table without the UNIQUE(name, base, type) constraint
21+
cursor.execute(
22+
"""--sql
23+
CREATE TABLE models (
24+
id TEXT NOT NULL PRIMARY KEY,
25+
hash TEXT GENERATED ALWAYS as (json_extract(config, '$.hash')) VIRTUAL NOT NULL,
26+
base TEXT GENERATED ALWAYS as (json_extract(config, '$.base')) VIRTUAL NOT NULL,
27+
type TEXT GENERATED ALWAYS as (json_extract(config, '$.type')) VIRTUAL NOT NULL,
28+
path TEXT GENERATED ALWAYS as (json_extract(config, '$.path')) VIRTUAL NOT NULL,
29+
format TEXT GENERATED ALWAYS as (json_extract(config, '$.format')) VIRTUAL NOT NULL,
30+
name TEXT GENERATED ALWAYS as (json_extract(config, '$.name')) VIRTUAL NOT NULL,
31+
description TEXT GENERATED ALWAYS as (json_extract(config, '$.description')) VIRTUAL,
32+
source TEXT GENERATED ALWAYS as (json_extract(config, '$.source')) VIRTUAL NOT NULL,
33+
source_type TEXT GENERATED ALWAYS as (json_extract(config, '$.source_type')) VIRTUAL NOT NULL,
34+
source_api_response TEXT GENERATED ALWAYS as (json_extract(config, '$.source_api_response')) VIRTUAL,
35+
trigger_phrases TEXT GENERATED ALWAYS as (json_extract(config, '$.trigger_phrases')) VIRTUAL,
36+
file_size INTEGER GENERATED ALWAYS as (json_extract(config, '$.file_size')) VIRTUAL NOT NULL,
37+
-- Serialized JSON representation of the whole config object, which will contain additional fields from subclasses
38+
config TEXT NOT NULL,
39+
created_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')),
40+
-- Updated via trigger
41+
updated_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')),
42+
-- Explicit unique constraint on path
43+
UNIQUE(path)
44+
);
45+
"""
46+
)
47+
48+
# Step 3: Copy all data from the old table to the new table
49+
cursor.execute("INSERT INTO models SELECT * FROM models_old;")
50+
51+
# Step 4: Drop the old table
52+
cursor.execute("DROP TABLE models_old;")
53+
54+
# Step 5: Recreate indexes
55+
cursor.execute("CREATE INDEX IF NOT EXISTS base_index ON models(base);")
56+
cursor.execute("CREATE INDEX IF NOT EXISTS type_index ON models(type);")
57+
cursor.execute("CREATE INDEX IF NOT EXISTS name_index ON models(name);")
58+
59+
# Step 6: Recreate the updated_at trigger
60+
cursor.execute(
61+
"""--sql
62+
CREATE TRIGGER models_updated_at
63+
AFTER UPDATE
64+
ON models FOR EACH ROW
65+
BEGIN
66+
UPDATE models SET updated_at = STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')
67+
WHERE id = old.id;
68+
END;
69+
"""
70+
)
28471

28572

28673
def build_migration_22(app_config: InvokeAIAppConfig, logger: Logger) -> Migration:
28774
"""Builds the migration object for migrating from version 21 to version 22.
28875
289-
This migration normalizes on-disk model storage so that each model lives within
290-
a directory named by its key inside the Invoke-managed models directory, and
291-
updates database records to reference the new relative paths.
76+
This migration:
77+
- Removes the UNIQUE constraint on the combination of (base, name, type) columns in the models table
78+
- Adds an explicit UNIQUE contraint on the path column
29279
"""
29380

29481
return Migration(

0 commit comments

Comments
 (0)