Skip to content

Commit 93346bc

Browse files
authored
Add ruff rules for tryceratops (#559)
1 parent bdd1be2 commit 93346bc

File tree

11 files changed

+37
-28
lines changed

11 files changed

+37
-28
lines changed

examples/evaluation/tru_shared.py

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -129,7 +129,7 @@ def get_recorder(
129129
feedback_mode=feedback_mode,
130130
)
131131
else:
132-
raise Exception(f"Unknown framework: {framework} specified for get_recorder()")
132+
raise ValueError(f"Unknown framework: {framework} specified for get_recorder()")
133133

134134

135135
def get_azure_chat_model(
@@ -152,7 +152,7 @@ def get_azure_chat_model(
152152
temperature=temperature,
153153
)
154154
else:
155-
raise Exception(f"Unknown framework: {framework} specified for getChatModel()")
155+
raise ValueError(f"Unknown framework: {framework} specified for getChatModel()")
156156

157157

158158
def get_azure_embeddings_model(framework: Framework):
@@ -169,7 +169,7 @@ def get_azure_embeddings_model(framework: Framework):
169169
temperature=temperature,
170170
)
171171
else:
172-
raise Exception(
172+
raise ValueError(
173173
f"Unknown framework: {framework} specified for getEmbeddingsModel()"
174174
)
175175

@@ -190,7 +190,7 @@ def get_astra_vector_store(framework: Framework, collection_name: str):
190190
embedding_dimension=1536,
191191
)
192192
else:
193-
raise Exception(
193+
raise ValueError(
194194
f"Unknown framework: {framework} specified for get_astra_vector_store()"
195195
)
196196

@@ -201,7 +201,9 @@ def execute_query(framework: Framework, pipeline, query):
201201
elif framework == Framework.LLAMA_INDEX:
202202
pipeline.query(query)
203203
else:
204-
raise Exception(f"Unknown framework: {framework} specified for execute_query()")
204+
raise ValueError(
205+
f"Unknown framework: {framework} specified for execute_query()"
206+
)
205207

206208

207209
# runs the pipeline across all queries in all known datasets

examples/notebooks/conftest.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ def try_delete_with_backoff(collection: str, sleep=1, max_tries=2):
3434
except Exception as e:
3535
max_tries -= 1
3636
if max_tries < 0:
37-
raise e
37+
raise
3838

3939
logging.warning(f"An exception occurred deleting collection {collection}: {e}")
4040
time.sleep(sleep)

libs/colbert/ragstack_colbert/cassandra_database.py

Lines changed: 12 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,10 @@
2020
from .objects import Chunk, Vector
2121

2222

23+
class CassandraDatabaseError(Exception):
24+
pass
25+
26+
2327
class CassandraDatabase(BaseDatabase):
2428
"""
2529
An implementation of the BaseDatabase abstract base class using Cassandra as the
@@ -167,7 +171,7 @@ def add_chunks(self, chunks: List[Chunk]) -> List[Tuple[str, int]]:
167171
success_chunks.append((doc_id, chunk_id))
168172

169173
if len(failed_chunks) > 0:
170-
raise Exception(
174+
raise CassandraDatabaseError(
171175
f"add failed for these chunks: {failed_chunks}. "
172176
f"See error logs for more info."
173177
)
@@ -198,9 +202,9 @@ async def _limited_put(
198202
await self._table.aput(
199203
partition_id=doc_id, row_id=row_id, vector=vector
200204
)
201-
return doc_id, chunk_id, embedding_id, None
202205
except Exception as e:
203206
return doc_id, chunk_id, embedding_id, e
207+
return doc_id, chunk_id, embedding_id, None
204208

205209
async def aadd_chunks(
206210
self, chunks: List[Chunk], concurrent_inserts: Optional[int] = 100
@@ -269,7 +273,7 @@ async def aadd_chunks(
269273
failed_chunks.append((doc_id, chunk_id))
270274

271275
if len(failed_chunks) > 0:
272-
raise Exception(
276+
raise CassandraDatabaseError(
273277
f"add failed for these chunks: {failed_chunks}. "
274278
f"See error logs for more info."
275279
)
@@ -293,12 +297,12 @@ def delete_chunks(self, doc_ids: List[str]) -> bool:
293297
for doc_id in doc_ids:
294298
try:
295299
self._table.delete_partition(partition_id=doc_id)
296-
except Exception as exp:
297-
logging.error(f"issue on delete of document: {doc_id}: {exp}")
300+
except Exception:
301+
logging.exception(f"issue on delete of document: {doc_id}")
298302
failed_docs.append(doc_id)
299303

300304
if len(failed_docs) > 0:
301-
raise Exception(
305+
raise CassandraDatabaseError(
302306
f"delete failed for these docs: {failed_docs}. "
303307
f"See error logs for more info."
304308
)
@@ -313,9 +317,9 @@ async def _limited_delete(
313317
async with sem:
314318
try:
315319
await self._table.adelete_partition(partition_id=doc_id)
316-
return doc_id, None
317320
except Exception as e:
318321
return doc_id, e
322+
return doc_id, None
319323

320324
async def adelete_chunks(
321325
self, doc_ids: List[str], concurrent_deletes: Optional[int] = 100
@@ -354,7 +358,7 @@ async def adelete_chunks(
354358
failed_docs.append(doc_id)
355359

356360
if len(failed_docs) > 0:
357-
raise Exception(
361+
raise CassandraDatabaseError(
358362
f"delete failed for these docs: {failed_docs}. "
359363
f"See error logs for more info."
360364
)

libs/e2e-tests/e2e_tests/llama_index/test_astra.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -136,7 +136,7 @@ def verify_document(document, expected_content, expected_metadata):
136136
# metadata is not returned by LlamaIndex
137137
# assert document.metadata == expected_metadata
138138
else:
139-
raise Exception(
139+
raise TypeError(
140140
"document is not of type NodeWithScore but of type " + str(type(document))
141141
)
142142

libs/e2e-tests/e2e_tests/test_utils/astradb_vector_store_handler.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -233,7 +233,7 @@ def try_delete_with_backoff(collection: str, sleep=1, max_tries=5):
233233
except Exception as e:
234234
max_tries -= 1
235235
if max_tries < 0:
236-
raise e
236+
raise
237237

238238
logging.warning(f"An exception occurred deleting collection {collection}: {e}")
239239
time.sleep(sleep)

libs/e2e-tests/e2e_tests/test_utils/tracing.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,9 +14,9 @@ def record_langsmith_sharelink(
1414
sharelink = LANGSMITH_CLIENT.share_run(run_id)
1515
record_property("langsmith_url", sharelink)
1616
logging.info(f"recorded langsmith link: {sharelink}")
17-
except Exception as e:
17+
except Exception:
1818
# runs may take a while to be discoverable
1919
if tries < 0:
20-
raise e
20+
raise
2121
time.sleep(5)
2222
record_langsmith_sharelink(run_id, record_property, tries - 1)

libs/knowledge-graph/ragstack_knowledge_graph/knowledge_graph.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -226,7 +226,7 @@ def insert(
226226
),
227227
)
228228
else:
229-
raise ValueError(f"Unsupported element type: {element}")
229+
raise TypeError(f"Unsupported element type: {element}")
230230

231231
# TODO: Support concurrent execution of these statements.
232232
self._session.execute(batch_statement)

libs/knowledge-graph/tests/conftest.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -57,11 +57,11 @@ def db_session(cassandra_port: int) -> Session:
5757
def llm() -> BaseChatModel:
5858
try:
5959
from langchain_openai import ChatOpenAI
60-
61-
model = ChatOpenAI(model_name="gpt-4o", temperature=0.0)
62-
return model
6360
except ValueError:
6461
pytest.skip("Unable to create OpenAI model")
62+
else:
63+
model = ChatOpenAI(model_name="gpt-4o", temperature=0.0)
64+
return model
6565

6666

6767
class DataFixture:

libs/knowledge-store/ragstack_knowledge_store/math.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -26,11 +26,6 @@ def cosine_similarity(x: Matrix, y: Matrix) -> np.ndarray:
2626
)
2727
try:
2828
import simsimd as simd
29-
30-
x = np.array(x, dtype=np.float32)
31-
y = np.array(y, dtype=np.float32)
32-
z = 1 - np.array(simd.cdist(x, y, metric="cosine"))
33-
return z
3429
except ImportError:
3530
logger.debug(
3631
"Unable to import simsimd, defaulting to NumPy implementation. If you want "
@@ -43,3 +38,8 @@ def cosine_similarity(x: Matrix, y: Matrix) -> np.ndarray:
4338
similarity = np.dot(x, y.T) / np.outer(x_norm, y_norm)
4439
similarity[np.isnan(similarity) | np.isinf(similarity)] = 0.0
4540
return similarity
41+
else:
42+
x = np.array(x, dtype=np.float32)
43+
y = np.array(y, dtype=np.float32)
44+
z = 1 - np.array(simd.cdist(x, y, metric="cosine"))
45+
return z

libs/llamaindex/tests/unit_tests/test_import.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ def test_import():
1414
def check_no_import(fn: callable):
1515
try:
1616
fn()
17-
raise Exception("Should have failed to import")
17+
raise RuntimeError("Should have failed to import")
1818
except ImportError:
1919
pass
2020

0 commit comments

Comments
 (0)