Skip to content

Commit fab5df9

Browse files
authored
2.3.5 fixes to automatic updating and vae conversions (#3444)
# Minor fixes to the 2.3 branch This is a proposed `2.3.5.post2` to correct the updater problems in 2.3.5.post1 and make transition to 3.0.0 easier. ## Updating fixed The invokeai-update script will now recognize when the user previously installed xformers and modifies the pip install command so as to include xformers as an extra that needs to be updated. This will prevent the problems experienced during the upgrade to `2.3.5.post1` in which torch was updated but xformers wasn't. ## VAE autoconversion improved In addition to looking for instances in which a user has entered a VAE ckpt into the "vae" field directly, the model manager now also handles the case in which the user entered a ckpt (rather than a diffusers) into the path field. These two cases now both work: ``` vae: models/ldm/stable-diffusion-1/vae-ft-mse-840000-ema-pruned.ckpt ``` and ``` vae: path: models/ldm/stable-diffusion-1/vae-ft-mse-840000-ema-pruned.ckpt ``` In addition, if a 32-bit checkpoint VAE is encountered and user is using half precision, the VAE is now converted to 16 bits on the fly.
2 parents 0ce628b + 2e21e5b commit fab5df9

File tree

3 files changed

+25
-9
lines changed

3 files changed

+25
-9
lines changed

ldm/invoke/_version.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,3 @@
1-
__version__='2.3.5.post1'
1+
__version__='2.3.5.post2'
2+
23

ldm/invoke/config/invokeai_update.py

Lines changed: 15 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
import platform
77
import psutil
88
import requests
9+
import pkg_resources
910
from rich import box, print
1011
from rich.console import Console, group
1112
from rich.panel import Panel
@@ -72,6 +73,15 @@ def text():
7273
)
7374
console.line()
7475

76+
def get_extras():
77+
extras = ''
78+
try:
79+
dist = pkg_resources.get_distribution('xformers')
80+
extras = '[xformers]'
81+
except pkg_resources.DistributionNotFound:
82+
pass
83+
return extras
84+
7585
def main():
7686
versions = get_versions()
7787
if invokeai_is_running():
@@ -94,13 +104,15 @@ def main():
94104
elif choice=='4':
95105
branch = Prompt.ask('Enter an InvokeAI branch name')
96106

107+
extras = get_extras()
108+
97109
print(f':crossed_fingers: Upgrading to [yellow]{tag if tag else release}[/yellow]')
98110
if release:
99-
cmd = f'pip install {INVOKE_AI_SRC}/{release}.zip --use-pep517 --upgrade'
111+
cmd = f"pip install 'invokeai{extras} @ {INVOKE_AI_SRC}/{release}.zip' --use-pep517 --upgrade"
100112
elif tag:
101-
cmd = f'pip install {INVOKE_AI_TAG}/{tag}.zip --use-pep517 --upgrade'
113+
cmd = f"pip install 'invokeai{extras} @ {INVOKE_AI_TAG}/{tag}.zip' --use-pep517 --upgrade"
102114
else:
103-
cmd = f'pip install {INVOKE_AI_BRANCH}/{branch}.zip --use-pep517 --upgrade'
115+
cmd = f"pip install 'invokeai{extras} @ {INVOKE_AI_BRANCH}/{branch}.zip' --use-pep517 --upgrade"
104116
print('')
105117
print('')
106118
if os.system(cmd)==0:

ldm/invoke/model_manager.py

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1229,12 +1229,16 @@ def _scan_for_matching_file(
12291229
return vae_path
12301230

12311231
def _load_vae(self, vae_config) -> AutoencoderKL:
1232-
1232+
using_fp16 = self.precision == "float16"
1233+
dtype = torch.float16 if using_fp16 else torch.float32
1234+
12331235
# Handle the common case of a user shoving a VAE .ckpt into
12341236
# the vae field for a diffusers. We convert it into diffusers
12351237
# format and use it.
1236-
if type(vae_config) in [str,Path]:
1237-
return self.convert_vae(vae_config)
1238+
if isinstance(vae_config,(str,Path)):
1239+
return self.convert_vae(vae_config).to(dtype=dtype)
1240+
elif isinstance(vae_config,DictConfig) and (vae_path := vae_config.get('path')):
1241+
return self.convert_vae(vae_path).to(dtype=dtype)
12381242

12391243
vae_args = {}
12401244
try:
@@ -1243,7 +1247,6 @@ def _load_vae(self, vae_config) -> AutoencoderKL:
12431247
return None
12441248
if name_or_path is None:
12451249
return None
1246-
using_fp16 = self.precision == "float16"
12471250

12481251
vae_args.update(
12491252
cache_dir=global_cache_dir("hub"),
@@ -1285,7 +1288,7 @@ def _load_vae(self, vae_config) -> AutoencoderKL:
12851288

12861289
@staticmethod
12871290
def convert_vae(vae_path: Union[Path,str])->AutoencoderKL:
1288-
print(f" | A checkpoint VAE was detected. Converting to diffusers format.")
1291+
print(" | A checkpoint VAE was detected. Converting to diffusers format.")
12891292
vae_path = Path(Globals.root,vae_path).resolve()
12901293

12911294
from .ckpt_to_diffuser import (

0 commit comments

Comments
 (0)