Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ repos:
- id: check-toml

- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.8.3
rev: v0.11.5
hooks:
# Run the linter with repo-defined settings
- id: ruff
Expand Down
6 changes: 3 additions & 3 deletions airbyte_cdk/connector_builder/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,9 +78,9 @@ def handle_connector_builder_request(
if command == "resolve_manifest":
return resolve_manifest(source)
elif command == "test_read":
assert (
catalog is not None
), "`test_read` requires a valid `ConfiguredAirbyteCatalog`, got None."
assert catalog is not None, (
"`test_read` requires a valid `ConfiguredAirbyteCatalog`, got None."
)
return read_stream(source, config, catalog, state, limits)
elif command == "full_resolve_manifest":
return full_resolve_manifest(source, limits)
Expand Down
6 changes: 3 additions & 3 deletions airbyte_cdk/sources/concurrent_source/concurrent_source.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,9 +49,9 @@ def create(
too_many_generator = (
not is_single_threaded and initial_number_of_partitions_to_generate >= num_workers
)
assert (
not too_many_generator
), "It is required to have more workers than threads generating partitions"
assert not too_many_generator, (
"It is required to have more workers than threads generating partitions"
)
threadpool = ThreadPoolManager(
concurrent.futures.ThreadPoolExecutor(
max_workers=num_workers, thread_name_prefix="workerpool"
Expand Down
6 changes: 3 additions & 3 deletions airbyte_cdk/sources/file_based/file_based_source.py
Original file line number Diff line number Diff line change
Expand Up @@ -282,9 +282,9 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]:
and hasattr(self, "_concurrency_level")
and self._concurrency_level is not None
):
assert (
state_manager is not None
), "No ConnectorStateManager was created, but it is required for incremental syncs. This is unexpected. Please contact Support."
assert state_manager is not None, (
"No ConnectorStateManager was created, but it is required for incremental syncs. This is unexpected. Please contact Support."
)

cursor = self.cursor_cls(
stream_config,
Expand Down
2 changes: 1 addition & 1 deletion airbyte_cdk/sources/file_based/file_types/avro_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ def _convert_avro_type_to_json(
# For example: ^-?\d{1,5}(?:\.\d{1,3})?$ would accept 12345.123 and 123456.12345 would be rejected
return {
"type": "string",
"pattern": f"^-?\\d{{{1,max_whole_number_range}}}(?:\\.\\d{1,decimal_range})?$",
"pattern": f"^-?\\d{{{1, max_whole_number_range}}}(?:\\.\\d{1, decimal_range})?$",
}
elif "logicalType" in avro_field:
if avro_field["logicalType"] not in AVRO_LOGICAL_TYPE_TO_JSON:
Expand Down
6 changes: 3 additions & 3 deletions airbyte_cdk/sources/file_based/stream/concurrent/adapters.py
Original file line number Diff line number Diff line change
Expand Up @@ -284,9 +284,9 @@ def read(self) -> Iterable[Record]:
def to_slice(self) -> Optional[Mapping[str, Any]]:
if self._slice is None:
return None
assert (
len(self._slice["files"]) == 1
), f"Expected 1 file per partition but got {len(self._slice['files'])} for stream {self.stream_name()}"
assert len(self._slice["files"]) == 1, (
f"Expected 1 file per partition but got {len(self._slice['files'])} for stream {self.stream_name()}"
)
file = self._slice["files"][0]
return {"files": [file]}

Expand Down
17 changes: 8 additions & 9 deletions airbyte_cdk/sql/shared/sql_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -326,9 +326,9 @@ def _ensure_schema_exists(

if DEBUG_MODE:
found_schemas = schemas_list
assert (
schema_name in found_schemas
), f"Schema {schema_name} was not created. Found: {found_schemas}"
assert schema_name in found_schemas, (
f"Schema {schema_name} was not created. Found: {found_schemas}"
)

def _quote_identifier(self, identifier: str) -> str:
"""Return the given identifier, quoted."""
Expand Down Expand Up @@ -617,10 +617,10 @@ def _append_temp_table_to_final_table(
self._execute_sql(
f"""
INSERT INTO {self._fully_qualified(final_table_name)} (
{f',{nl} '.join(columns)}
{f",{nl} ".join(columns)}
)
SELECT
{f',{nl} '.join(columns)}
{f",{nl} ".join(columns)}
FROM {self._fully_qualified(temp_table_name)}
""",
)
Expand All @@ -645,8 +645,7 @@ def _swap_temp_table_with_final_table(
deletion_name = f"{final_table_name}_deleteme"
commands = "\n".join(
[
f"ALTER TABLE {self._fully_qualified(final_table_name)} RENAME "
f"TO {deletion_name};",
f"ALTER TABLE {self._fully_qualified(final_table_name)} RENAME TO {deletion_name};",
f"ALTER TABLE {self._fully_qualified(temp_table_name)} RENAME "
f"TO {final_table_name};",
f"DROP TABLE {self._fully_qualified(deletion_name)};",
Expand Down Expand Up @@ -686,10 +685,10 @@ def _merge_temp_table_to_final_table(
{set_clause}
WHEN NOT MATCHED THEN INSERT
(
{f',{nl} '.join(columns)}
{f",{nl} ".join(columns)}
)
VALUES (
tmp.{f',{nl} tmp.'.join(columns)}
tmp.{f",{nl} tmp.".join(columns)}
);
""",
)
Expand Down
40 changes: 20 additions & 20 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ whenever = "^0.6.16"
freezegun = "*"
mypy = "*"
asyncio = "3.4.3"
ruff = "^0.7.2"
ruff = "^0.11.5"
pdoc = "^15.0.0"
poethepoet = "^0.24.2"
pyproject-flake8 = "^6.1.0"
Expand Down
18 changes: 9 additions & 9 deletions unit_tests/connector_builder/test_connector_builder_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@
"values": ["0", "1", "2", "3", "4", "5", "6", "7"],
"cursor_field": "item_id",
},
"" "requester": {
"requester": {
"path": "/v3/marketing/lists",
"authenticator": {
"type": "BearerAuthenticator",
Expand Down Expand Up @@ -175,7 +175,7 @@
"values": ["0", "1", "2", "3", "4", "5", "6", "7"],
"cursor_field": "item_id",
},
"" "requester": {
"requester": {
"path": "/v3/marketing/lists",
"authenticator": {
"type": "BearerAuthenticator",
Expand Down Expand Up @@ -348,7 +348,7 @@
"values": ["0", "1", "2", "3", "4", "5", "6", "7"],
"cursor_field": "item_id",
},
"" "requester": {
"requester": {
"path": "/v3/marketing/lists",
"authenticator": {"type": "OAuthAuthenticator", "api_token": "{{ config.apikey }}"},
"request_parameters": {"a_param": "10"},
Expand Down Expand Up @@ -1221,9 +1221,9 @@ def test_handle_read_external_requests(deployment_mode, url_base, expected_error
source, config, catalog, _A_PER_PARTITION_STATE, limits
).record.data
if expected_error:
assert (
len(output_data["logs"]) > 0
), "Expected at least one log message with the expected error"
assert len(output_data["logs"]) > 0, (
"Expected at least one log message with the expected error"
)
error_message = output_data["logs"][0]
assert error_message["level"] == "ERROR"
assert expected_error in error_message["stacktrace"]
Expand Down Expand Up @@ -1317,9 +1317,9 @@ def test_handle_read_external_oauth_request(deployment_mode, token_url, expected
source, config, catalog, _A_PER_PARTITION_STATE, limits
).record.data
if expected_error:
assert (
len(output_data["logs"]) > 0
), "Expected at least one log message with the expected error"
assert len(output_data["logs"]) > 0, (
"Expected at least one log message with the expected error"
)
error_message = output_data["logs"][0]
assert error_message["level"] == "ERROR"
assert expected_error in error_message["stacktrace"]
Expand Down
6 changes: 3 additions & 3 deletions unit_tests/destinations/test_destination.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,9 +58,9 @@ def test_successful_parse(
self, arg_list: List[str], expected_output: Mapping[str, Any], destination: Destination
):
parsed_args = vars(destination.parse_args(arg_list))
assert (
parsed_args == expected_output
), f"Expected parsing {arg_list} to return parsed args {expected_output} but instead found {parsed_args}"
assert parsed_args == expected_output, (
f"Expected parsing {arg_list} to return parsed args {expected_output} but instead found {parsed_args}"
)

@pytest.mark.parametrize(
("arg_list"),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,9 +100,9 @@ def get_py_components_config_dict(

manifest_dict = yaml.safe_load(manifest_yml_path.read_text())
assert manifest_dict, "Failed to load the manifest file."
assert isinstance(
manifest_dict, Mapping
), f"Manifest file is type {type(manifest_dict).__name__}, not a mapping: {manifest_dict}"
assert isinstance(manifest_dict, Mapping), (
f"Manifest file is type {type(manifest_dict).__name__}, not a mapping: {manifest_dict}"
)

custom_py_code = custom_py_code_path.read_text()
combined_config_dict = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ def test_get_new_session_token(requests_mock):
)

session_token = get_new_session_token(
f'{config["instance_api_url"]}session',
f"{config['instance_api_url']}session",
config["username"],
config["password"],
config["session_token_response_key"],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -363,9 +363,9 @@ def run_mocked_test(
request_count = len(
[req for req in m.request_history if unquote(req.url) == unquote(url)]
)
assert (
request_count == 1
), f"URL {url} was called {request_count} times, expected exactly once."
assert request_count == 1, (
f"URL {url} was called {request_count} times, expected exactly once."
)


def _run_read(
Expand Down Expand Up @@ -855,10 +855,11 @@ def run_incremental_parent_state_test(
expected_records_set = list(
{orjson.dumps(record): record for record in expected_records}.values()
)
assert (
sorted(cumulative_records_state_deduped, key=lambda x: x["id"])
== sorted(expected_records_set, key=lambda x: x["id"])
), f"Records mismatch with intermediate state {state}. Expected {expected_records}, got {cumulative_records_state_deduped}"
assert sorted(cumulative_records_state_deduped, key=lambda x: x["id"]) == sorted(
expected_records_set, key=lambda x: x["id"]
), (
f"Records mismatch with intermediate state {state}. Expected {expected_records}, got {cumulative_records_state_deduped}"
)

# Store the final state after each intermediate read
final_state_intermediate = [
Expand All @@ -869,9 +870,9 @@ def run_incremental_parent_state_test(

# Assert that the final state matches the expected state for all runs
for i, final_state in enumerate(final_states):
assert (
final_state in expected_states
), f"Final state mismatch at run {i + 1}. Expected {expected_states}, got {final_state}"
assert final_state in expected_states, (
f"Final state mismatch at run {i + 1}. Expected {expected_states}, got {final_state}"
)


@pytest.mark.parametrize(
Expand Down Expand Up @@ -1300,8 +1301,7 @@ def test_incremental_parent_state(
{"id": 11, "post_id": 1, "updated_at": COMMENT_11_UPDATED_AT},
],
"next_page": (
"https://api.example.com/community/posts/1/comments"
"?per_page=100&page=2"
"https://api.example.com/community/posts/1/comments?per_page=100&page=2"
),
},
),
Expand Down Expand Up @@ -1346,8 +1346,7 @@ def test_incremental_parent_state(
{
"comments": [{"id": 20, "post_id": 2, "updated_at": COMMENT_20_UPDATED_AT}],
"next_page": (
"https://api.example.com/community/posts/2/comments"
"?per_page=100&page=2"
"https://api.example.com/community/posts/2/comments?per_page=100&page=2"
),
},
),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3413,9 +3413,9 @@ def migrate(self, stream_state: Mapping[str, Any]) -> Mapping[str, Any]:
stream_state_migrations=[DummyStateMigration()],
)
assert cursor.state["lookback_window"] != 10, "State migration wasn't called"
assert (
cursor.state["lookback_window"] == 20
), "State migration was called, but actual state don't match expected"
assert cursor.state["lookback_window"] == 20, (
"State migration was called, but actual state don't match expected"
)


def test_create_concurrent_cursor_uses_min_max_datetime_format_if_defined():
Expand Down
Loading
Loading