File tree Expand file tree Collapse file tree 1 file changed +11
-5
lines changed
graphrag/language_model/providers/litellm Expand file tree Collapse file tree 1 file changed +11
-5
lines changed Original file line number Diff line number Diff line change 3333def get_cache_key (
3434 model_config : "LanguageModelConfig" ,
3535 prefix : str ,
36+ messages : str | None = None ,
37+ input : str | None = None ,
3638 ** kwargs : Any ,
3739) -> str :
3840 """Generate a cache key based on the model configuration and input arguments.
@@ -54,12 +56,16 @@ def get_cache_key(
5456 "parameters" : _get_parameters (model_config , ** kwargs ),
5557 }
5658
57- if "messages" in kwargs :
58- cache_key ["messages" ] = kwargs ["messages" ]
59- elif "input" in kwargs :
60- cache_key ["input" ] = kwargs ["input" ]
59+ if messages is not None and input is not None :
60+ msg = "Only one of 'messages' or 'input' should be provided."
61+ raise ValueError (msg )
62+
63+ if messages is not None :
64+ cache_key ["messages" ] = messages
65+ elif input is not None :
66+ cache_key ["input" ] = input
6167 else :
62- msg = "Either 'messages' or 'input' must be provided in kwargs ."
68+ msg = "Either 'messages' or 'input' must be provided."
6369 raise ValueError (msg )
6470
6571 data_hash = _hash (json .dumps (cache_key , sort_keys = True ))
You can’t perform that action at this time.
0 commit comments