Skip to content

Commit 5be0efa

Browse files
committed
Cache should raise KeyError when key is missing
1 parent 24fc387 commit 5be0efa

File tree

1 file changed

+2
-4
lines changed

1 file changed

+2
-4
lines changed

llama_cpp/llama.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -33,12 +33,10 @@ def _find_key(
3333
return k
3434
return None
3535

36-
def __getitem__(
37-
self, key: Sequence[llama_cpp.llama_token]
38-
) -> Optional["LlamaState"]:
36+
def __getitem__(self, key: Sequence[llama_cpp.llama_token]) -> "LlamaState":
3937
_key = self._find_key(tuple(key))
4038
if _key is None:
41-
return None
39+
raise KeyError(f"Key not found: {key}")
4240
return self.cache_state[_key]
4341

4442
def __contains__(self, key: Sequence[llama_cpp.llama_token]) -> bool:

0 commit comments

Comments
 (0)