We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 24fc387 commit 5be0efaCopy full SHA for 5be0efa
llama_cpp/llama.py
@@ -33,12 +33,10 @@ def _find_key(
33
return k
34
return None
35
36
- def __getitem__(
37
- self, key: Sequence[llama_cpp.llama_token]
38
- ) -> Optional["LlamaState"]:
+ def __getitem__(self, key: Sequence[llama_cpp.llama_token]) -> "LlamaState":
39
_key = self._find_key(tuple(key))
40
if _key is None:
41
- return None
+ raise KeyError(f"Key not found: {key}")
42
return self.cache_state[_key]
43
44
def __contains__(self, key: Sequence[llama_cpp.llama_token]) -> bool:
0 commit comments