@@ -62,20 +62,31 @@ def set_cache_and_teardown(request: FixtureRequest) -> Generator[None, None, Non
6262 raise ValueError ("Cache not set. This should never happen." )
6363
6464
65- async def test_llm_caching () -> None :
65+ def test_llm_caching () -> None :
6666 prompt = "How are you?"
6767 response = "Test response"
6868 cached_response = "Cached test response"
6969 llm = FakeListLLM (responses = [response ])
7070 if llm_cache := get_llm_cache ():
71- # sync test
7271 llm_cache .update (
7372 prompt = prompt ,
7473 llm_string = create_llm_string (llm ),
7574 return_val = [Generation (text = cached_response )],
7675 )
7776 assert llm .invoke (prompt ) == cached_response
78- # async test
77+ else :
78+ raise ValueError (
79+ "The cache not set. This should never happen, as the pytest fixture "
80+ "`set_cache_and_teardown` always sets the cache."
81+ )
82+
83+
84+ async def test_llm_caching_async () -> None :
85+ prompt = "How are you?"
86+ response = "Test response"
87+ cached_response = "Cached test response"
88+ llm = FakeListLLM (responses = [response ])
89+ if llm_cache := get_llm_cache ():
7990 await llm_cache .aupdate (
8091 prompt = prompt ,
8192 llm_string = create_llm_string (llm ),
@@ -110,14 +121,13 @@ def test_old_sqlite_llm_caching() -> None:
110121 assert llm .invoke (prompt ) == cached_response
111122
112123
113- async def test_chat_model_caching () -> None :
124+ def test_chat_model_caching () -> None :
114125 prompt : List [BaseMessage ] = [HumanMessage (content = "How are you?" )]
115126 response = "Test response"
116127 cached_response = "Cached test response"
117128 cached_message = AIMessage (content = cached_response )
118129 llm = FakeListChatModel (responses = [response ])
119130 if llm_cache := get_llm_cache ():
120- # sync test
121131 llm_cache .update (
122132 prompt = dumps (prompt ),
123133 llm_string = llm ._get_llm_string (),
@@ -126,8 +136,20 @@ async def test_chat_model_caching() -> None:
126136 result = llm .invoke (prompt )
127137 assert isinstance (result , AIMessage )
128138 assert result .content == cached_response
139+ else :
140+ raise ValueError (
141+ "The cache not set. This should never happen, as the pytest fixture "
142+ "`set_cache_and_teardown` always sets the cache."
143+ )
144+
129145
130- # async test
146+ async def test_chat_model_caching_async () -> None :
147+ prompt : List [BaseMessage ] = [HumanMessage (content = "How are you?" )]
148+ response = "Test response"
149+ cached_response = "Cached test response"
150+ cached_message = AIMessage (content = cached_response )
151+ llm = FakeListChatModel (responses = [response ])
152+ if llm_cache := get_llm_cache ():
131153 await llm_cache .aupdate (
132154 prompt = dumps (prompt ),
133155 llm_string = llm ._get_llm_string (),
@@ -143,14 +165,13 @@ async def test_chat_model_caching() -> None:
143165 )
144166
145167
146- async def test_chat_model_caching_params () -> None :
168+ def test_chat_model_caching_params () -> None :
147169 prompt : List [BaseMessage ] = [HumanMessage (content = "How are you?" )]
148170 response = "Test response"
149171 cached_response = "Cached test response"
150172 cached_message = AIMessage (content = cached_response )
151173 llm = FakeListChatModel (responses = [response ])
152174 if llm_cache := get_llm_cache ():
153- # sync test
154175 llm_cache .update (
155176 prompt = dumps (prompt ),
156177 llm_string = llm ._get_llm_string (functions = []),
@@ -162,8 +183,20 @@ async def test_chat_model_caching_params() -> None:
162183 assert result .content == cached_response
163184 assert isinstance (result_no_params , AIMessage )
164185 assert result_no_params .content == response
186+ else :
187+ raise ValueError (
188+ "The cache not set. This should never happen, as the pytest fixture "
189+ "`set_cache_and_teardown` always sets the cache."
190+ )
191+
165192
166- # async test
193+ async def test_chat_model_caching_params_async () -> None :
194+ prompt : List [BaseMessage ] = [HumanMessage (content = "How are you?" )]
195+ response = "Test response"
196+ cached_response = "Cached test response"
197+ cached_message = AIMessage (content = cached_response )
198+ llm = FakeListChatModel (responses = [response ])
199+ if llm_cache := get_llm_cache ():
167200 await llm_cache .aupdate (
168201 prompt = dumps (prompt ),
169202 llm_string = llm ._get_llm_string (functions = []),
@@ -182,13 +215,12 @@ async def test_chat_model_caching_params() -> None:
182215 )
183216
184217
185- async def test_llm_cache_clear () -> None :
218+ def test_llm_cache_clear () -> None :
186219 prompt = "How are you?"
187220 expected_response = "Test response"
188221 cached_response = "Cached test response"
189222 llm = FakeListLLM (responses = [expected_response ])
190223 if llm_cache := get_llm_cache ():
191- # sync test
192224 llm_cache .update (
193225 prompt = prompt ,
194226 llm_string = create_llm_string (llm ),
@@ -197,8 +229,19 @@ async def test_llm_cache_clear() -> None:
197229 llm_cache .clear ()
198230 response = llm .invoke (prompt )
199231 assert response == expected_response
232+ else :
233+ raise ValueError (
234+ "The cache not set. This should never happen, as the pytest fixture "
235+ "`set_cache_and_teardown` always sets the cache."
236+ )
237+
200238
201- # async test
239+ async def test_llm_cache_clear_async () -> None :
240+ prompt = "How are you?"
241+ expected_response = "Test response"
242+ cached_response = "Cached test response"
243+ llm = FakeListLLM (responses = [expected_response ])
244+ if llm_cache := get_llm_cache ():
202245 await llm_cache .aupdate (
203246 prompt = prompt ,
204247 llm_string = create_llm_string (llm ),
0 commit comments