Skip to content

Commit 3f8af19

Browse files
authored
Add ruff rule for Error Messages (EM) (#646)
1 parent 7045d55 commit 3f8af19

File tree

27 files changed

+125
-102
lines changed

27 files changed

+125
-102
lines changed

examples/evaluation/tru_shared.py

Lines changed: 10 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -129,7 +129,8 @@ def get_recorder(
129129
feedbacks=feedbacks,
130130
feedback_mode=feedback_mode,
131131
)
132-
raise ValueError(f"Unknown framework: {framework} specified for get_recorder()")
132+
msg = f"Unknown framework: {framework} specified for get_recorder()"
133+
raise ValueError(msg)
133134

134135

135136
def get_azure_chat_model(
@@ -151,7 +152,8 @@ def get_azure_chat_model(
151152
model_version=model_version,
152153
temperature=temperature,
153154
)
154-
raise ValueError(f"Unknown framework: {framework} specified for getChatModel()")
155+
msg = f"Unknown framework: {framework} specified for getChatModel()"
156+
raise ValueError(msg)
155157

156158

157159
def get_azure_embeddings_model(framework: Framework):
@@ -167,9 +169,8 @@ def get_azure_embeddings_model(framework: Framework):
167169
api_version="2023-05-15",
168170
temperature=temperature,
169171
)
170-
raise ValueError(
171-
f"Unknown framework: {framework} specified for getEmbeddingsModel()"
172-
)
172+
msg = f"Unknown framework: {framework} specified for getEmbeddingsModel()"
173+
raise ValueError(msg)
173174

174175

175176
def get_astra_vector_store(framework: Framework, collection_name: str):
@@ -187,9 +188,8 @@ def get_astra_vector_store(framework: Framework, collection_name: str):
187188
token=os.getenv("ASTRA_DB_APPLICATION_TOKEN"),
188189
embedding_dimension=1536,
189190
)
190-
raise ValueError(
191-
f"Unknown framework: {framework} specified for get_astra_vector_store()"
192-
)
191+
msg = f"Unknown framework: {framework} specified for get_astra_vector_store()"
192+
raise ValueError(msg)
193193

194194

195195
def execute_query(framework: Framework, pipeline, query) -> None:
@@ -198,9 +198,8 @@ def execute_query(framework: Framework, pipeline, query) -> None:
198198
elif framework == Framework.LLAMA_INDEX:
199199
pipeline.query(query)
200200
else:
201-
raise ValueError(
202-
f"Unknown framework: {framework} specified for execute_query()"
203-
)
201+
msg = f"Unknown framework: {framework} specified for execute_query()"
202+
raise ValueError(msg)
204203

205204

206205
# runs the pipeline across all queries in all known datasets

examples/notebooks/advancedRAG.ipynb

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -162,7 +162,8 @@
162162
"if uploaded:\n",
163163
" SAMPLEDATA = uploaded\n",
164164
"else:\n",
165-
" raise ValueError(\"Cannot proceed without Sample Data. Please re-run the cell.\")\n",
165+
" msg = \"Cannot proceed without Sample Data. Please re-run the cell.\"\n",
166+
" raise ValueError(msg)\n",
166167
"\n",
167168
"print(\"Please make sure to change your queries to match the contents of your file!\")"
168169
]

examples/notebooks/conftest.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,10 +8,12 @@
88

99
def get_required_env(name) -> str:
1010
if name not in os.environ:
11-
raise ValueError(f"Missing required environment variable: {name}")
11+
msg = f"Missing required environment variable: {name}"
12+
raise ValueError(msg)
1213
value = os.environ[name]
1314
if not value:
14-
raise ValueError(f"Empty required environment variable: {name}")
15+
msg = f"Empty required environment variable: {name}"
16+
raise ValueError(msg)
1517
return value
1618

1719

examples/notebooks/langchain_evaluation.ipynb

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,8 @@
193193
"if uploaded:\n",
194194
" SAMPLEDATA = uploaded\n",
195195
"else:\n",
196-
" raise ValueError(\"Cannot proceed without Sample Data. Please re-run the cell.\")\n",
196+
" msg = \"Cannot proceed without Sample Data. Please re-run the cell.\"\n",
197+
" raise ValueError(msg)\n",
197198
"\n",
198199
"print(\"Please make sure to change your queries to match the contents of your file!\")"
199200
]

libs/colbert/ragstack_colbert/cassandra_database.py

Lines changed: 12 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -45,10 +45,11 @@ class CassandraDatabase(BaseDatabase):
4545
_table: ClusteredMetadataVectorCassandraTable
4646

4747
def __new__(cls) -> Self: # noqa: D102
48-
raise ValueError(
48+
msg = (
4949
"This class cannot be instantiated directly. "
5050
"Please use the `from_astra()` or `from_session()` class methods."
5151
)
52+
raise ValueError(msg)
5253

5354
@classmethod
5455
def from_astra(
@@ -173,10 +174,11 @@ def add_chunks(self, chunks: list[Chunk]) -> list[tuple[str, int]]:
173174
success_chunks.append((doc_id, chunk_id))
174175

175176
if len(failed_chunks) > 0:
176-
raise CassandraDatabaseError(
177+
msg = (
177178
f"add failed for these chunks: {failed_chunks}. "
178179
f"See error logs for more info."
179180
)
181+
raise CassandraDatabaseError(msg)
180182

181183
return success_chunks
182184

@@ -273,10 +275,11 @@ async def aadd_chunks(
273275
failed_chunks.append((doc_id, chunk_id))
274276

275277
if len(failed_chunks) > 0:
276-
raise CassandraDatabaseError(
278+
msg = (
277279
f"add failed for these chunks: {failed_chunks}. "
278280
f"See error logs for more info."
279281
)
282+
raise CassandraDatabaseError(msg)
280283

281284
return outputs
282285

@@ -292,8 +295,9 @@ def delete_chunks(self, doc_ids: list[str]) -> bool:
292295
failed_docs.append(doc_id)
293296

294297
if len(failed_docs) > 0:
298+
msg = "delete failed for these docs: %s. See error logs for more info."
295299
raise CassandraDatabaseError(
296-
"delete failed for these docs: %s. See error logs for more info.",
300+
msg,
297301
failed_docs,
298302
)
299303

@@ -340,10 +344,11 @@ async def adelete_chunks(
340344
failed_docs.append(doc_id)
341345

342346
if len(failed_docs) > 0:
343-
raise CassandraDatabaseError(
347+
msg = (
344348
f"delete failed for these docs: {failed_docs}. "
345349
f"See error logs for more info."
346350
)
351+
raise CassandraDatabaseError(msg)
347352

348353
return success
349354

@@ -379,9 +384,8 @@ async def get_chunk_data(
379384
row = await self._table.aget(partition_id=doc_id, row_id=row_id)
380385

381386
if row is None:
382-
raise CassandraDatabaseError(
383-
f"no chunk found for doc_id: {doc_id} chunk_id: {chunk_id}"
384-
)
387+
msg = f"no chunk found for doc_id: {doc_id} chunk_id: {chunk_id}"
388+
raise CassandraDatabaseError(msg)
385389

386390
if include_embedding is True:
387391
embedded_chunk = await self.get_chunk_embedding(

libs/colbert/ragstack_colbert/colbert_vector_store.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -46,9 +46,8 @@ def __init__(
4646

4747
def _validate_embedding_model(self) -> BaseEmbeddingModel:
4848
if self._embedding_model is None:
49-
raise AttributeError(
50-
"To use this method, `embedding_model` must be set on class creation."
51-
)
49+
msg = "To use this method, `embedding_model` must be set on class creation."
50+
raise AttributeError(msg)
5251
return self._embedding_model
5352

5453
def _build_chunks(
@@ -60,7 +59,8 @@ def _build_chunks(
6059
embedding_model = self._validate_embedding_model()
6160

6261
if metadatas is not None and len(texts) != len(metadatas):
63-
raise ValueError("Length of texts and metadatas must match.")
62+
msg = "Length of texts and metadatas must match."
63+
raise ValueError(msg)
6464

6565
if doc_id is None:
6666
doc_id = str(uuid.uuid4())

libs/e2e-tests/e2e_tests/conftest.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,8 @@ def get_required_env(name) -> str:
5555

5656
vector_database_type = os.environ.get("VECTOR_DATABASE_TYPE", "astradb")
5757
if vector_database_type not in ["astradb", "local-cassandra"]:
58-
raise ValueError(f"Invalid VECTOR_DATABASE_TYPE: {vector_database_type}")
58+
msg = f"Invalid VECTOR_DATABASE_TYPE: {vector_database_type}"
59+
raise ValueError(msg)
5960

6061
is_astra = vector_database_type == "astradb"
6162

@@ -67,7 +68,8 @@ def get_vector_store_handler(
6768
return AstraDBVectorStoreHandler(implementation)
6869
if vector_database_type == "local-cassandra":
6970
return CassandraVectorStoreHandler(implementation)
70-
raise ValueError("Invalid vector store implementation")
71+
msg = "Invalid vector store implementation"
72+
raise ValueError(msg)
7173

7274

7375
failed_report_lines = []

libs/e2e-tests/e2e_tests/langchain/test_compatibility_rag.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -363,7 +363,8 @@ def _run_test(
363363
vector_store=vector_store, config=resolved_llm["nemo_config"]
364364
)
365365
else:
366-
raise ValueError(f"Unknown test case: {test_case}")
366+
msg = f"Unknown test case: {test_case}"
367+
raise ValueError(msg)
367368

368369

369370
@pytest.fixture()

libs/knowledge-store/ragstack_knowledge_store/_mmr_helper.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -131,11 +131,12 @@ def _pop_candidate(self, candidate_id: str) -> NDArray[np.float32]:
131131
"""
132132
# Get the embedding for the id.
133133
index = self.candidate_id_to_index.pop(candidate_id)
134-
if not self.candidates[index].id == candidate_id:
135-
raise ValueError(
134+
if self.candidates[index].id != candidate_id:
135+
msg = (
136136
"ID in self.candidate_id_to_index doesn't match the ID of the "
137137
"corresponding index in self.candidates"
138138
)
139+
raise ValueError(msg)
139140
embedding: NDArray[np.float32] = self.candidate_embeddings[index].copy()
140141

141142
# Swap that index with the last index in the candidates and

libs/knowledge-store/ragstack_knowledge_store/_utils.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,8 @@
1616
# This is equivalent to `itertools.batched`, but that is only available in 3.12
1717
def batched(iterable: Iterable[T], n: int) -> Iterator[tuple[T, ...]]:
1818
if n < 1:
19-
raise ValueError("n must be at least one")
19+
msg = "n must be at least one"
20+
raise ValueError(msg)
2021
it = iter(iterable)
2122
while batch := tuple(islice(it, n)):
2223
yield batch

0 commit comments

Comments
 (0)