2525import invokeai .frontend .dist as frontend
2626from ldm .generate import Generate
2727from ldm .invoke .args import Args , APP_ID , APP_VERSION , calculate_init_img_hash
28- from ldm .invoke .concepts_lib import HuggingFaceConceptsLibrary
28+ from ldm .invoke .concepts_lib import get_hf_concepts_lib
2929from ldm .invoke .conditioning import (
3030 get_tokens_for_prompt_object ,
3131 get_prompt_structure ,
3232 split_weighted_subprompts ,
33- get_tokenizer ,
3433)
3534from ldm .invoke .generator .diffusers_pipeline import PipelineIntermediateState
3635from ldm .invoke .generator .inpaint import infill_methods
3736from ldm .invoke .globals import (
3837 Globals ,
3938 global_converted_ckpts_dir ,
4039 global_models_dir ,
41- global_lora_models_dir ,
4240)
4341from ldm .invoke .pngwriter import PngWriter , retrieve_metadata
4442from compel .prompt_parser import Blend
4543from ldm .invoke .merge_diffusers import merge_diffusion_models
44+ from ldm .modules .lora_manager import LoraManager
4645
4746# Loading Arguments
4847opt = Args ()
@@ -524,20 +523,12 @@ def merge_diffusers_models(model_merge_info: dict):
524523 @socketio .on ("getLoraModels" )
525524 def get_lora_models ():
526525 try :
527- lora_path = global_lora_models_dir ()
528- loras = []
529- for root , _ , files in os .walk (lora_path ):
530- models = [
531- Path (root , x )
532- for x in files
533- if Path (x ).suffix in [".ckpt" , ".pt" , ".safetensors" ]
534- ]
535- loras = loras + models
536-
526+ model = self .generate .model
527+ lora_mgr = LoraManager (model )
528+ loras = lora_mgr .list_compatible_loras ()
537529 found_loras = []
538- for lora in sorted (loras , key = lambda s : s .stem .lower ()):
539- location = str (lora .resolve ()).replace ("\\ " , "/" )
540- found_loras .append ({"name" : lora .stem , "location" : location })
530+ for lora in sorted (loras , key = str .casefold ):
531+ found_loras .append ({"name" :lora ,"location" :str (loras [lora ])})
541532 socketio .emit ("foundLoras" , found_loras )
542533 except Exception as e :
543534 self .handle_exceptions (e )
@@ -547,7 +538,7 @@ def get_ti_triggers():
547538 try :
548539 local_triggers = self .generate .model .textual_inversion_manager .get_all_trigger_strings ()
549540 locals = [{'name' : x } for x in sorted (local_triggers , key = str .casefold )]
550- concepts = HuggingFaceConceptsLibrary ().list_concepts (minimum_likes = 5 )
541+ concepts = get_hf_concepts_lib ().list_concepts (minimum_likes = 5 )
551542 concepts = [{'name' : f'<{ x } >' } for x in sorted (concepts , key = str .casefold ) if f'<{ x } >' not in local_triggers ]
552543 socketio .emit ("foundTextualInversionTriggers" , {'local_triggers' : locals , 'huggingface_concepts' : concepts })
553544 except Exception as e :
@@ -1314,7 +1305,7 @@ def image_done(image, seed, first_seed, attention_maps_image=None):
13141305 None
13151306 if type (parsed_prompt ) is Blend
13161307 else get_tokens_for_prompt_object (
1317- get_tokenizer ( self .generate .model ) , parsed_prompt
1308+ self .generate .model . tokenizer , parsed_prompt
13181309 )
13191310 )
13201311 attention_maps_image_base64_url = (
0 commit comments