3232import llama_cpp .llama_types as llama_types
3333import llama_cpp .llama_grammar as llama_grammar
3434
35+ from ._ggml import GGMLLogLevel
3536from ._logger import logger
3637from ._utils import suppress_stdout_stderr , Singleton
3738
@@ -2776,11 +2777,13 @@ class Llava15ChatHandler:
27762777 "{% endif %}"
27772778 )
27782779
2779- def __init__ (self , clip_model_path : str , verbose : bool = True ):
2780+ def __init__ (self , clip_model_path : str , use_gpu : bool = True , verbosity : GGMLLogLevel = GGMLLogLevel . GGML_LOG_LEVEL_DEBUG ):
27802781 import llama_cpp .llava_cpp as llava_cpp
27812782
27822783 self .clip_model_path = clip_model_path
2783- self .verbose = verbose
2784+ self .ctx_clip_params = self ._llava_cpp .clip_context_params
2785+ self .ctx_clip_params .use_gpu = use_gpu
2786+ self .ctx_clip_params .ggml_log_level = verbosity
27842787
27852788 self ._llava_cpp = llava_cpp # TODO: Fix
27862789 self ._exit_stack = ExitStack ()
@@ -2792,25 +2795,22 @@ def __init__(self, clip_model_path: str, verbose: bool = True):
27922795 if not os .path .exists (clip_model_path ):
27932796 raise ValueError (f"Clip model path does not exist: { clip_model_path } " )
27942797
2795- with suppress_stdout_stderr (disable = self .verbose ):
2796- clip_ctx = self ._llava_cpp .clip_model_load (self .clip_model_path .encode (), 0 )
2798+ clip_ctx = self ._llava_cpp .clip_init (self .clip_model_path .encode (), self .ctx_clip_params )
27972799
2798- if clip_ctx is None :
2799- raise ValueError (f"Failed to load clip model: { clip_model_path } " )
2800+ if clip_ctx is None :
2801+ raise ValueError (f"Failed to load clip model: { clip_model_path } " )
28002802
2801- self .clip_ctx = clip_ctx
2803+ self .clip_ctx = clip_ctx
28022804
2803- def clip_free ():
2804- with suppress_stdout_stderr (disable = self .verbose ):
2805- self ._llava_cpp .clip_free (self .clip_ctx )
2805+ def clip_free ():
2806+ self ._llava_cpp .clip_free (self .clip_ctx )
28062807
2807- self ._exit_stack .callback (clip_free )
2808+ self ._exit_stack .callback (clip_free )
28082809
28092810 def last_image_embed_free ():
2810- with suppress_stdout_stderr (disable = self .verbose ):
2811- if self ._last_image_embed is not None :
2812- self ._llava_cpp .llava_image_embed_free (self ._last_image_embed )
2813- self ._last_image_embed = None
2811+ if self ._last_image_embed is not None :
2812+ self ._llava_cpp .llava_image_embed_free (self ._last_image_embed )
2813+ self ._last_image_embed = None
28142814
28152815 self ._exit_stack .callback (last_image_embed_free )
28162816
0 commit comments