Skip to content

Commit b276315

Browse files
fix: Remove broad exception handling per user request
- Remove try/except block that was catching all exceptions in get_json_schema() - Update test_inferred_schema_loader_handles_errors to verify errors now propagate - This allows actual errors to surface for debugging instead of being silently suppressed - Addresses PR comment from @aaronsteers Co-Authored-By: AJ Steers <[email protected]>
1 parent 02050b8 commit b276315

File tree

2 files changed

+23
-27
lines changed

2 files changed

+23
-27
lines changed

airbyte_cdk/sources/declarative/schema/inferred_schema_loader.py

Lines changed: 20 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -80,31 +80,28 @@ def get_json_schema(self) -> Mapping[str, Any]:
8080
schema_inferrer = SchemaInferrer()
8181

8282
record_count = 0
83-
try:
84-
for stream_slice in self.retriever.stream_slices():
85-
for record in self.retriever.read_records(
86-
records_schema={}, stream_slice=stream_slice
87-
):
88-
if record_count >= self.record_sample_size:
89-
break
90-
91-
# Convert all Mapping-like and Sequence-like objects to plain Python types
92-
# This is necessary because genson doesn't handle custom implementations properly
93-
record = _to_builtin_types(record)
94-
95-
airbyte_record = AirbyteRecordMessage(
96-
stream=self.stream_name,
97-
data=record, # type: ignore[arg-type]
98-
emitted_at=0,
99-
)
100-
101-
schema_inferrer.accumulate(airbyte_record)
102-
record_count += 1
103-
83+
for stream_slice in self.retriever.stream_slices():
84+
for record in self.retriever.read_records(
85+
records_schema={}, stream_slice=stream_slice
86+
):
10487
if record_count >= self.record_sample_size:
10588
break
106-
except Exception:
107-
return {}
89+
90+
# Convert all Mapping-like and Sequence-like objects to plain Python types
91+
# This is necessary because genson doesn't handle custom implementations properly
92+
record = _to_builtin_types(record)
93+
94+
airbyte_record = AirbyteRecordMessage(
95+
stream=self.stream_name,
96+
data=record, # type: ignore[arg-type]
97+
emitted_at=0,
98+
)
99+
100+
schema_inferrer.accumulate(airbyte_record)
101+
record_count += 1
102+
103+
if record_count >= self.record_sample_size:
104+
break
108105

109106
inferred_schema: Optional[Mapping[str, Any]] = schema_inferrer.get_stream_schema(
110107
self.stream_name

unit_tests/sources/declarative/schema/test_inferred_schema_loader.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -170,7 +170,7 @@ def test_inferred_schema_loader_respects_sample_size():
170170

171171

172172
def test_inferred_schema_loader_handles_errors():
173-
"""Test that InferredSchemaLoader handles errors gracefully."""
173+
"""Test that InferredSchemaLoader propagates errors from the retriever."""
174174
retriever = MagicMock()
175175
retriever.stream_slices.return_value = iter([None])
176176
retriever.read_records.side_effect = Exception("API Error")
@@ -185,9 +185,8 @@ def test_inferred_schema_loader_handles_errors():
185185
stream_name="users",
186186
)
187187

188-
schema = loader.get_json_schema()
189-
190-
assert schema == {}
188+
with pytest.raises(Exception, match="API Error"):
189+
loader.get_json_schema()
191190

192191

193192
def test_inferred_schema_loader_with_nested_objects():

0 commit comments

Comments
 (0)