Skip to content

Commit 9d15938

Browse files
committed
Update cache key.
1 parent f9a4766 commit 9d15938

File tree

1 file changed

+11
-5
lines changed

1 file changed

+11
-5
lines changed

graphrag/language_model/providers/litellm/get_cache_key.py

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,8 @@
3333
def get_cache_key(
3434
model_config: "LanguageModelConfig",
3535
prefix: str,
36+
messages: str | None = None,
37+
input: str | None = None,
3638
**kwargs: Any,
3739
) -> str:
3840
"""Generate a cache key based on the model configuration and input arguments.
@@ -54,12 +56,16 @@ def get_cache_key(
5456
"parameters": _get_parameters(model_config, **kwargs),
5557
}
5658

57-
if "messages" in kwargs:
58-
cache_key["messages"] = kwargs["messages"]
59-
elif "input" in kwargs:
60-
cache_key["input"] = kwargs["input"]
59+
if messages is not None and input is not None:
60+
msg = "Only one of 'messages' or 'input' should be provided."
61+
raise ValueError(msg)
62+
63+
if messages is not None:
64+
cache_key["messages"] = messages
65+
elif input is not None:
66+
cache_key["input"] = input
6167
else:
62-
msg = "Either 'messages' or 'input' must be provided in kwargs."
68+
msg = "Either 'messages' or 'input' must be provided."
6369
raise ValueError(msg)
6470

6571
data_hash = _hash(json.dumps(cache_key, sort_keys=True))

0 commit comments

Comments
 (0)