diff --git a/m_flow/adapters/graph/get_graph_adapter.py b/m_flow/adapters/graph/get_graph_adapter.py index 691b8ad..4aa560c 100644 --- a/m_flow/adapters/graph/get_graph_adapter.py +++ b/m_flow/adapters/graph/get_graph_adapter.py @@ -128,7 +128,7 @@ def _build_adapter( "neptune", "neptune_analytics", ] - raise EnvironmentError(f"Unknown graph provider '{graph_database_provider}'. Supported: {', '.join(known)}") + raise OSError(f"Unknown graph provider '{graph_database_provider}'. Supported: {', '.join(known)}") # --------------------------------------------------------------------------- @@ -138,7 +138,7 @@ def _build_adapter( def _require(val: Optional[str], label: str) -> None: if not val: - raise EnvironmentError(f"Missing required configuration: {label}") + raise OSError(f"Missing required configuration: {label}") def _validate_prefix(url: str, prefix: str) -> None: diff --git a/m_flow/adapters/vector/create_vector_engine.py b/m_flow/adapters/vector/create_vector_engine.py index 4d38808..84af906 100644 --- a/m_flow/adapters/vector/create_vector_engine.py +++ b/m_flow/adapters/vector/create_vector_engine.py @@ -108,7 +108,7 @@ def create_vector_engine( "Pinecone", "Milvus", ] - raise EnvironmentError(f"Unknown vector provider: {vector_db_provider}. Supported: {', '.join(known)}") + raise OSError(f"Unknown vector provider: {vector_db_provider}. Supported: {', '.join(known)}") def _create_pgvector_adapter(embedder, api_key: str): @@ -119,7 +119,7 @@ def _create_pgvector_adapter(embedder, api_key: str): required = [cfg.db_host, cfg.db_port, cfg.db_name, cfg.db_username, cfg.db_password] if not all(required): - raise EnvironmentError("Missing PGVector credentials") + raise OSError("Missing PGVector credentials") conn_str = f"postgresql+asyncpg://{cfg.db_username}:{cfg.db_password}@{cfg.db_host}:{cfg.db_port}/{cfg.db_name}" @@ -151,7 +151,7 @@ def _create_neptune_adapter(url: str, embedder): raise ImportError("langchain_aws not installed. Run: pip install langchain_aws") if not url: - raise EnvironmentError("Neptune endpoint URL required") + raise OSError("Neptune endpoint URL required") from m_flow.adapters.hybrid.neptune_analytics.NeptuneAnalyticsAdapter import ( NEPTUNE_ANALYTICS_ENDPOINT_URL, diff --git a/m_flow/context_global_variables.py b/m_flow/context_global_variables.py index 170d2d5..fa76162 100644 --- a/m_flow/context_global_variables.py +++ b/m_flow/context_global_variables.py @@ -51,10 +51,10 @@ def _require_handler_registered( kind: str, registry: dict, ) -> None: - """Raise ``EnvironmentError`` when *handler_name* is absent from *registry*.""" + """Raise ``OSError`` when *handler_name* is absent from *registry*.""" if handler_name in registry: return - raise EnvironmentError( + raise OSError( f"The {kind} dataset-database handler '{handler_name}' is not recognised. " f"Supported handlers: {list(registry)}. " f"Set {_ACL_ENV_KEY}=false to disable access-control mode." @@ -71,7 +71,7 @@ def _require_handler_matches_provider( expected = registry[handler_name]["handler_provider"] if expected == provider: return - raise EnvironmentError( + raise OSError( f"Mismatch: the {kind} handler '{handler_name}' expects provider " f"'{expected}' but '{provider}' is configured. " f"Set {_ACL_ENV_KEY}=false to disable access-control mode." diff --git a/m_flow/shared/files/utils/get_file_content_hash.py b/m_flow/shared/files/utils/get_file_content_hash.py index a121f25..677fc1a 100644 --- a/m_flow/shared/files/utils/get_file_content_hash.py +++ b/m_flow/shared/files/utils/get_file_content_hash.py @@ -37,7 +37,7 @@ async def get_file_content_hash(source: Union[str, BinaryIO]) -> str: async with backend.open(name, "rb") as fh: return _hex_digest_of(fh) return _hex_digest_of(source) - except IOError as err: + except OSError as err: raise FileContentHashingError( message=f"Hashing failed for {source!r}: {err}", ) from err