diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e9241ce07..6557bd241 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -36,7 +36,7 @@ repos: - id: check-toml - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.8.3 + rev: v0.11.5 hooks: # Run the linter with repo-defined settings - id: ruff diff --git a/airbyte_cdk/connector_builder/main.py b/airbyte_cdk/connector_builder/main.py index ad2d6650f..80cf4afa9 100644 --- a/airbyte_cdk/connector_builder/main.py +++ b/airbyte_cdk/connector_builder/main.py @@ -78,9 +78,9 @@ def handle_connector_builder_request( if command == "resolve_manifest": return resolve_manifest(source) elif command == "test_read": - assert ( - catalog is not None - ), "`test_read` requires a valid `ConfiguredAirbyteCatalog`, got None." + assert catalog is not None, ( + "`test_read` requires a valid `ConfiguredAirbyteCatalog`, got None." + ) return read_stream(source, config, catalog, state, limits) elif command == "full_resolve_manifest": return full_resolve_manifest(source, limits) diff --git a/airbyte_cdk/sources/concurrent_source/concurrent_source.py b/airbyte_cdk/sources/concurrent_source/concurrent_source.py index bc7d97cdd..ffdee2dc1 100644 --- a/airbyte_cdk/sources/concurrent_source/concurrent_source.py +++ b/airbyte_cdk/sources/concurrent_source/concurrent_source.py @@ -49,9 +49,9 @@ def create( too_many_generator = ( not is_single_threaded and initial_number_of_partitions_to_generate >= num_workers ) - assert ( - not too_many_generator - ), "It is required to have more workers than threads generating partitions" + assert not too_many_generator, ( + "It is required to have more workers than threads generating partitions" + ) threadpool = ThreadPoolManager( concurrent.futures.ThreadPoolExecutor( max_workers=num_workers, thread_name_prefix="workerpool" diff --git a/airbyte_cdk/sources/file_based/file_based_source.py b/airbyte_cdk/sources/file_based/file_based_source.py index 17a7ee957..2d34fe5dc 100644 --- a/airbyte_cdk/sources/file_based/file_based_source.py +++ b/airbyte_cdk/sources/file_based/file_based_source.py @@ -282,9 +282,9 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: and hasattr(self, "_concurrency_level") and self._concurrency_level is not None ): - assert ( - state_manager is not None - ), "No ConnectorStateManager was created, but it is required for incremental syncs. This is unexpected. Please contact Support." + assert state_manager is not None, ( + "No ConnectorStateManager was created, but it is required for incremental syncs. This is unexpected. Please contact Support." + ) cursor = self.cursor_cls( stream_config, diff --git a/airbyte_cdk/sources/file_based/file_types/avro_parser.py b/airbyte_cdk/sources/file_based/file_types/avro_parser.py index e1aa2c4cb..85e5afa22 100644 --- a/airbyte_cdk/sources/file_based/file_types/avro_parser.py +++ b/airbyte_cdk/sources/file_based/file_types/avro_parser.py @@ -154,7 +154,7 @@ def _convert_avro_type_to_json( # For example: ^-?\d{1,5}(?:\.\d{1,3})?$ would accept 12345.123 and 123456.12345 would be rejected return { "type": "string", - "pattern": f"^-?\\d{{{1,max_whole_number_range}}}(?:\\.\\d{1,decimal_range})?$", + "pattern": f"^-?\\d{{{1, max_whole_number_range}}}(?:\\.\\d{1, decimal_range})?$", } elif "logicalType" in avro_field: if avro_field["logicalType"] not in AVRO_LOGICAL_TYPE_TO_JSON: diff --git a/airbyte_cdk/sources/file_based/stream/concurrent/adapters.py b/airbyte_cdk/sources/file_based/stream/concurrent/adapters.py index fb0efc82c..f02602d58 100644 --- a/airbyte_cdk/sources/file_based/stream/concurrent/adapters.py +++ b/airbyte_cdk/sources/file_based/stream/concurrent/adapters.py @@ -284,9 +284,9 @@ def read(self) -> Iterable[Record]: def to_slice(self) -> Optional[Mapping[str, Any]]: if self._slice is None: return None - assert ( - len(self._slice["files"]) == 1 - ), f"Expected 1 file per partition but got {len(self._slice['files'])} for stream {self.stream_name()}" + assert len(self._slice["files"]) == 1, ( + f"Expected 1 file per partition but got {len(self._slice['files'])} for stream {self.stream_name()}" + ) file = self._slice["files"][0] return {"files": [file]} diff --git a/airbyte_cdk/sql/shared/sql_processor.py b/airbyte_cdk/sql/shared/sql_processor.py index 5fd0a5e46..a53925206 100644 --- a/airbyte_cdk/sql/shared/sql_processor.py +++ b/airbyte_cdk/sql/shared/sql_processor.py @@ -326,9 +326,9 @@ def _ensure_schema_exists( if DEBUG_MODE: found_schemas = schemas_list - assert ( - schema_name in found_schemas - ), f"Schema {schema_name} was not created. Found: {found_schemas}" + assert schema_name in found_schemas, ( + f"Schema {schema_name} was not created. Found: {found_schemas}" + ) def _quote_identifier(self, identifier: str) -> str: """Return the given identifier, quoted.""" @@ -617,10 +617,10 @@ def _append_temp_table_to_final_table( self._execute_sql( f""" INSERT INTO {self._fully_qualified(final_table_name)} ( - {f',{nl} '.join(columns)} + {f",{nl} ".join(columns)} ) SELECT - {f',{nl} '.join(columns)} + {f",{nl} ".join(columns)} FROM {self._fully_qualified(temp_table_name)} """, ) @@ -645,8 +645,7 @@ def _swap_temp_table_with_final_table( deletion_name = f"{final_table_name}_deleteme" commands = "\n".join( [ - f"ALTER TABLE {self._fully_qualified(final_table_name)} RENAME " - f"TO {deletion_name};", + f"ALTER TABLE {self._fully_qualified(final_table_name)} RENAME TO {deletion_name};", f"ALTER TABLE {self._fully_qualified(temp_table_name)} RENAME " f"TO {final_table_name};", f"DROP TABLE {self._fully_qualified(deletion_name)};", @@ -686,10 +685,10 @@ def _merge_temp_table_to_final_table( {set_clause} WHEN NOT MATCHED THEN INSERT ( - {f',{nl} '.join(columns)} + {f",{nl} ".join(columns)} ) VALUES ( - tmp.{f',{nl} tmp.'.join(columns)} + tmp.{f",{nl} tmp.".join(columns)} ); """, ) diff --git a/poetry.lock b/poetry.lock index 992f7f8f8..d415ac2ce 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4418,31 +4418,31 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "ruff" -version = "0.7.4" +version = "0.11.5" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" groups = ["dev"] markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ - {file = "ruff-0.7.4-py3-none-linux_armv6l.whl", hash = "sha256:a4919925e7684a3f18e18243cd6bea7cfb8e968a6eaa8437971f681b7ec51478"}, - {file = "ruff-0.7.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:cfb365c135b830778dda8c04fb7d4280ed0b984e1aec27f574445231e20d6c63"}, - {file = "ruff-0.7.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:63a569b36bc66fbadec5beaa539dd81e0527cb258b94e29e0531ce41bacc1f20"}, - {file = "ruff-0.7.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d06218747d361d06fd2fdac734e7fa92df36df93035db3dc2ad7aa9852cb109"}, - {file = "ruff-0.7.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e0cea28d0944f74ebc33e9f934238f15c758841f9f5edd180b5315c203293452"}, - {file = "ruff-0.7.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80094ecd4793c68b2571b128f91754d60f692d64bc0d7272ec9197fdd09bf9ea"}, - {file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:997512325c6620d1c4c2b15db49ef59543ef9cd0f4aa8065ec2ae5103cedc7e7"}, - {file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00b4cf3a6b5fad6d1a66e7574d78956bbd09abfd6c8a997798f01f5da3d46a05"}, - {file = "ruff-0.7.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7dbdc7d8274e1422722933d1edddfdc65b4336abf0b16dfcb9dedd6e6a517d06"}, - {file = "ruff-0.7.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e92dfb5f00eaedb1501b2f906ccabfd67b2355bdf117fea9719fc99ac2145bc"}, - {file = "ruff-0.7.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3bd726099f277d735dc38900b6a8d6cf070f80828877941983a57bca1cd92172"}, - {file = "ruff-0.7.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2e32829c429dd081ee5ba39aef436603e5b22335c3d3fff013cd585806a6486a"}, - {file = "ruff-0.7.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:662a63b4971807623f6f90c1fb664613f67cc182dc4d991471c23c541fee62dd"}, - {file = "ruff-0.7.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:876f5e09eaae3eb76814c1d3b68879891d6fde4824c015d48e7a7da4cf066a3a"}, - {file = "ruff-0.7.4-py3-none-win32.whl", hash = "sha256:75c53f54904be42dd52a548728a5b572344b50d9b2873d13a3f8c5e3b91f5cac"}, - {file = "ruff-0.7.4-py3-none-win_amd64.whl", hash = "sha256:745775c7b39f914238ed1f1b0bebed0b9155a17cd8bc0b08d3c87e4703b990d6"}, - {file = "ruff-0.7.4-py3-none-win_arm64.whl", hash = "sha256:11bff065102c3ae9d3ea4dc9ecdfe5a5171349cdd0787c1fc64761212fc9cf1f"}, - {file = "ruff-0.7.4.tar.gz", hash = "sha256:cd12e35031f5af6b9b93715d8c4f40360070b2041f81273d0527683d5708fce2"}, + {file = "ruff-0.11.5-py3-none-linux_armv6l.whl", hash = "sha256:2561294e108eb648e50f210671cc56aee590fb6167b594144401532138c66c7b"}, + {file = "ruff-0.11.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ac12884b9e005c12d0bd121f56ccf8033e1614f736f766c118ad60780882a077"}, + {file = "ruff-0.11.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:4bfd80a6ec559a5eeb96c33f832418bf0fb96752de0539905cf7b0cc1d31d779"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0947c0a1afa75dcb5db4b34b070ec2bccee869d40e6cc8ab25aca11a7d527794"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ad871ff74b5ec9caa66cb725b85d4ef89b53f8170f47c3406e32ef040400b038"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6cf918390cfe46d240732d4d72fa6e18e528ca1f60e318a10835cf2fa3dc19f"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:56145ee1478582f61c08f21076dc59153310d606ad663acc00ea3ab5b2125f82"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e5f66f8f1e8c9fc594cbd66fbc5f246a8d91f916cb9667e80208663ec3728304"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80b4df4d335a80315ab9afc81ed1cff62be112bd165e162b5eed8ac55bfc8470"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3068befab73620b8a0cc2431bd46b3cd619bc17d6f7695a3e1bb166b652c382a"}, + {file = "ruff-0.11.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f5da2e710a9641828e09aa98b92c9ebbc60518fdf3921241326ca3e8f8e55b8b"}, + {file = "ruff-0.11.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ef39f19cb8ec98cbc762344921e216f3857a06c47412030374fffd413fb8fd3a"}, + {file = "ruff-0.11.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b2a7cedf47244f431fd11aa5a7e2806dda2e0c365873bda7834e8f7d785ae159"}, + {file = "ruff-0.11.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:81be52e7519f3d1a0beadcf8e974715b2dfc808ae8ec729ecfc79bddf8dbb783"}, + {file = "ruff-0.11.5-py3-none-win32.whl", hash = "sha256:e268da7b40f56e3eca571508a7e567e794f9bfcc0f412c4b607931d3af9c4afe"}, + {file = "ruff-0.11.5-py3-none-win_amd64.whl", hash = "sha256:6c6dc38af3cfe2863213ea25b6dc616d679205732dc0fb673356c2d69608f800"}, + {file = "ruff-0.11.5-py3-none-win_arm64.whl", hash = "sha256:67e241b4314f4eacf14a601d586026a962f4002a475aa702c69980a38087aa4e"}, + {file = "ruff-0.11.5.tar.gz", hash = "sha256:cae2e2439cb88853e421901ec040a758960b576126dab520fa08e9de431d1bef"}, ] [[package]] @@ -5507,4 +5507,4 @@ vector-db-based = ["cohere", "langchain", "openai", "tiktoken"] [metadata] lock-version = "2.1" python-versions = ">=3.10,<3.13" -content-hash = "c8731f26643e07136e524d5e0d6e0f5c2229cf63d43bf5644de9f1cf8e565197" +content-hash = "08cd7b4f8a9e5cc0fdf400cabafd6aaea7705e460422dbbccf212468d69eaea9" diff --git a/pyproject.toml b/pyproject.toml index 6a05b939b..bbfed7639 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -88,7 +88,7 @@ whenever = "^0.6.16" freezegun = "*" mypy = "*" asyncio = "3.4.3" -ruff = "^0.7.2" +ruff = "^0.11.5" pdoc = "^15.0.0" poethepoet = "^0.24.2" pyproject-flake8 = "^6.1.0" diff --git a/unit_tests/connector_builder/test_connector_builder_handler.py b/unit_tests/connector_builder/test_connector_builder_handler.py index 5c537811b..d98a49a8c 100644 --- a/unit_tests/connector_builder/test_connector_builder_handler.py +++ b/unit_tests/connector_builder/test_connector_builder_handler.py @@ -124,7 +124,7 @@ "values": ["0", "1", "2", "3", "4", "5", "6", "7"], "cursor_field": "item_id", }, - "" "requester": { + "requester": { "path": "/v3/marketing/lists", "authenticator": { "type": "BearerAuthenticator", @@ -175,7 +175,7 @@ "values": ["0", "1", "2", "3", "4", "5", "6", "7"], "cursor_field": "item_id", }, - "" "requester": { + "requester": { "path": "/v3/marketing/lists", "authenticator": { "type": "BearerAuthenticator", @@ -348,7 +348,7 @@ "values": ["0", "1", "2", "3", "4", "5", "6", "7"], "cursor_field": "item_id", }, - "" "requester": { + "requester": { "path": "/v3/marketing/lists", "authenticator": {"type": "OAuthAuthenticator", "api_token": "{{ config.apikey }}"}, "request_parameters": {"a_param": "10"}, @@ -1221,9 +1221,9 @@ def test_handle_read_external_requests(deployment_mode, url_base, expected_error source, config, catalog, _A_PER_PARTITION_STATE, limits ).record.data if expected_error: - assert ( - len(output_data["logs"]) > 0 - ), "Expected at least one log message with the expected error" + assert len(output_data["logs"]) > 0, ( + "Expected at least one log message with the expected error" + ) error_message = output_data["logs"][0] assert error_message["level"] == "ERROR" assert expected_error in error_message["stacktrace"] @@ -1317,9 +1317,9 @@ def test_handle_read_external_oauth_request(deployment_mode, token_url, expected source, config, catalog, _A_PER_PARTITION_STATE, limits ).record.data if expected_error: - assert ( - len(output_data["logs"]) > 0 - ), "Expected at least one log message with the expected error" + assert len(output_data["logs"]) > 0, ( + "Expected at least one log message with the expected error" + ) error_message = output_data["logs"][0] assert error_message["level"] == "ERROR" assert expected_error in error_message["stacktrace"] diff --git a/unit_tests/destinations/test_destination.py b/unit_tests/destinations/test_destination.py index 14f52be15..1f8f6573f 100644 --- a/unit_tests/destinations/test_destination.py +++ b/unit_tests/destinations/test_destination.py @@ -58,9 +58,9 @@ def test_successful_parse( self, arg_list: List[str], expected_output: Mapping[str, Any], destination: Destination ): parsed_args = vars(destination.parse_args(arg_list)) - assert ( - parsed_args == expected_output - ), f"Expected parsing {arg_list} to return parsed args {expected_output} but instead found {parsed_args}" + assert parsed_args == expected_output, ( + f"Expected parsing {arg_list} to return parsed args {expected_output} but instead found {parsed_args}" + ) @pytest.mark.parametrize( ("arg_list"), diff --git a/unit_tests/source_declarative_manifest/test_source_declarative_w_custom_components.py b/unit_tests/source_declarative_manifest/test_source_declarative_w_custom_components.py index 40bb6d40b..521572bec 100644 --- a/unit_tests/source_declarative_manifest/test_source_declarative_w_custom_components.py +++ b/unit_tests/source_declarative_manifest/test_source_declarative_w_custom_components.py @@ -100,9 +100,9 @@ def get_py_components_config_dict( manifest_dict = yaml.safe_load(manifest_yml_path.read_text()) assert manifest_dict, "Failed to load the manifest file." - assert isinstance( - manifest_dict, Mapping - ), f"Manifest file is type {type(manifest_dict).__name__}, not a mapping: {manifest_dict}" + assert isinstance(manifest_dict, Mapping), ( + f"Manifest file is type {type(manifest_dict).__name__}, not a mapping: {manifest_dict}" + ) custom_py_code = custom_py_code_path.read_text() combined_config_dict = { diff --git a/unit_tests/sources/declarative/auth/test_session_token_auth.py b/unit_tests/sources/declarative/auth/test_session_token_auth.py index 02de5b5b4..c4b15eb82 100644 --- a/unit_tests/sources/declarative/auth/test_session_token_auth.py +++ b/unit_tests/sources/declarative/auth/test_session_token_auth.py @@ -189,7 +189,7 @@ def test_get_new_session_token(requests_mock): ) session_token = get_new_session_token( - f'{config["instance_api_url"]}session', + f"{config['instance_api_url']}session", config["username"], config["password"], config["session_token_response_key"], diff --git a/unit_tests/sources/declarative/incremental/test_concurrent_perpartitioncursor.py b/unit_tests/sources/declarative/incremental/test_concurrent_perpartitioncursor.py index 042a430aa..e23d03a4a 100644 --- a/unit_tests/sources/declarative/incremental/test_concurrent_perpartitioncursor.py +++ b/unit_tests/sources/declarative/incremental/test_concurrent_perpartitioncursor.py @@ -363,9 +363,9 @@ def run_mocked_test( request_count = len( [req for req in m.request_history if unquote(req.url) == unquote(url)] ) - assert ( - request_count == 1 - ), f"URL {url} was called {request_count} times, expected exactly once." + assert request_count == 1, ( + f"URL {url} was called {request_count} times, expected exactly once." + ) def _run_read( @@ -855,10 +855,11 @@ def run_incremental_parent_state_test( expected_records_set = list( {orjson.dumps(record): record for record in expected_records}.values() ) - assert ( - sorted(cumulative_records_state_deduped, key=lambda x: x["id"]) - == sorted(expected_records_set, key=lambda x: x["id"]) - ), f"Records mismatch with intermediate state {state}. Expected {expected_records}, got {cumulative_records_state_deduped}" + assert sorted(cumulative_records_state_deduped, key=lambda x: x["id"]) == sorted( + expected_records_set, key=lambda x: x["id"] + ), ( + f"Records mismatch with intermediate state {state}. Expected {expected_records}, got {cumulative_records_state_deduped}" + ) # Store the final state after each intermediate read final_state_intermediate = [ @@ -869,9 +870,9 @@ def run_incremental_parent_state_test( # Assert that the final state matches the expected state for all runs for i, final_state in enumerate(final_states): - assert ( - final_state in expected_states - ), f"Final state mismatch at run {i + 1}. Expected {expected_states}, got {final_state}" + assert final_state in expected_states, ( + f"Final state mismatch at run {i + 1}. Expected {expected_states}, got {final_state}" + ) @pytest.mark.parametrize( @@ -1300,8 +1301,7 @@ def test_incremental_parent_state( {"id": 11, "post_id": 1, "updated_at": COMMENT_11_UPDATED_AT}, ], "next_page": ( - "https://api.example.com/community/posts/1/comments" - "?per_page=100&page=2" + "https://api.example.com/community/posts/1/comments?per_page=100&page=2" ), }, ), @@ -1346,8 +1346,7 @@ def test_incremental_parent_state( { "comments": [{"id": 20, "post_id": 2, "updated_at": COMMENT_20_UPDATED_AT}], "next_page": ( - "https://api.example.com/community/posts/2/comments" - "?per_page=100&page=2" + "https://api.example.com/community/posts/2/comments?per_page=100&page=2" ), }, ), diff --git a/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py b/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py index f628eeb3b..856106bfe 100644 --- a/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py +++ b/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py @@ -3413,9 +3413,9 @@ def migrate(self, stream_state: Mapping[str, Any]) -> Mapping[str, Any]: stream_state_migrations=[DummyStateMigration()], ) assert cursor.state["lookback_window"] != 10, "State migration wasn't called" - assert ( - cursor.state["lookback_window"] == 20 - ), "State migration was called, but actual state don't match expected" + assert cursor.state["lookback_window"] == 20, ( + "State migration was called, but actual state don't match expected" + ) def test_create_concurrent_cursor_uses_min_max_datetime_format_if_defined(): diff --git a/unit_tests/sources/declarative/partition_routers/test_grouping_partition_router.py b/unit_tests/sources/declarative/partition_routers/test_grouping_partition_router.py index a75a48966..9bea606e4 100644 --- a/unit_tests/sources/declarative/partition_routers/test_grouping_partition_router.py +++ b/unit_tests/sources/declarative/partition_routers/test_grouping_partition_router.py @@ -269,9 +269,9 @@ def __next__(self): cursor_slice={}, extra_fields={"name": ["Board 0", "Board 1"], "owner": ["User0", "User1"]}, ) - assert ( - controlled_iter.yield_count == 2 - ), "Only 2 slices should be yielded to form the first group" + assert controlled_iter.yield_count == 2, ( + "Only 2 slices should be yielded to form the first group" + ) # Get the second slice second_slice = next(slices_iter) @@ -280,9 +280,9 @@ def __next__(self): cursor_slice={}, extra_fields={"name": ["Board 2", "Board 3"], "owner": ["User2", "User3"]}, ) - assert ( - controlled_iter.yield_count == 4 - ), "Only 4 slices should be yielded up to the second group" + assert controlled_iter.yield_count == 4, ( + "Only 4 slices should be yielded up to the second group" + ) # Exhaust the iterator remaining_slices = list(slices_iter) @@ -293,9 +293,9 @@ def __next__(self): extra_fields={"name": ["Board 4"], "owner": ["User4"]}, ) ] - assert ( - controlled_iter.yield_count == 5 - ), "All 5 slices should be yielded after exhausting the iterator" + assert controlled_iter.yield_count == 5, ( + "All 5 slices should be yielded after exhausting the iterator" + ) def test_set_initial_state_delegation(mock_config, mock_underlying_router): diff --git a/unit_tests/sources/declarative/partition_routers/test_parent_state_stream.py b/unit_tests/sources/declarative/partition_routers/test_parent_state_stream.py index 2cc6080e9..4fbbd7355 100644 --- a/unit_tests/sources/declarative/partition_routers/test_parent_state_stream.py +++ b/unit_tests/sources/declarative/partition_routers/test_parent_state_stream.py @@ -338,10 +338,11 @@ def run_incremental_parent_state_test( expected_records_set = list( {orjson.dumps(record): record for record in expected_records}.values() ) - assert ( - sorted(cumulative_records_state_deduped, key=lambda x: orjson.dumps(x)) - == sorted(expected_records_set, key=lambda x: orjson.dumps(x)) - ), f"Records mismatch with intermediate state {state}. Expected {expected_records}, got {cumulative_records_state_deduped}" + assert sorted( + cumulative_records_state_deduped, key=lambda x: orjson.dumps(x) + ) == sorted(expected_records_set, key=lambda x: orjson.dumps(x)), ( + f"Records mismatch with intermediate state {state}. Expected {expected_records}, got {cumulative_records_state_deduped}" + ) # Store the final state after each intermediate read final_state_intermediate = [ @@ -353,9 +354,9 @@ def run_incremental_parent_state_test( # Assert that the final state matches the expected state for all runs for i, final_state in enumerate(final_states): - assert ( - final_state in expected_states - ), f"Final state mismatch at run {i + 1}. Expected {expected_states}, got {final_state}" + assert final_state in expected_states, ( + f"Final state mismatch at run {i + 1}. Expected {expected_states}, got {final_state}" + ) @pytest.mark.parametrize( diff --git a/unit_tests/sources/declarative/partition_routers/test_substream_partition_router.py b/unit_tests/sources/declarative/partition_routers/test_substream_partition_router.py index 675e76a95..80c8f1e10 100644 --- a/unit_tests/sources/declarative/partition_routers/test_substream_partition_router.py +++ b/unit_tests/sources/declarative/partition_routers/test_substream_partition_router.py @@ -1200,13 +1200,13 @@ def test_substream_using_resumable_full_refresh_parent_stream_slices(use_increme # validate final state for closed substream slices final_state = substream_cursor_slicer.get_stream_state() if not use_incremental_dependency: - assert ( - final_state["states"] == expected_substream_state["states"] - ), "State for substreams is not valid!" + assert final_state["states"] == expected_substream_state["states"], ( + "State for substreams is not valid!" + ) else: - assert ( - final_state == expected_substream_state - ), "State for substreams with incremental dependency is not valid!" + assert final_state == expected_substream_state, ( + "State for substreams with incremental dependency is not valid!" + ) @pytest.mark.parametrize( diff --git a/unit_tests/sources/declarative/test_concurrent_declarative_source.py b/unit_tests/sources/declarative/test_concurrent_declarative_source.py index 4a043ac82..6af69eac5 100644 --- a/unit_tests/sources/declarative/test_concurrent_declarative_source.py +++ b/unit_tests/sources/declarative/test_concurrent_declarative_source.py @@ -1442,9 +1442,9 @@ def test_concurrent_declarative_source_runs_state_migrations_provided_in_manifes source_config=manifest, config=_CONFIG, catalog=_CATALOG, state=state ) concurrent_streams, synchronous_streams = source._group_streams(_CONFIG) - assert ( - concurrent_streams[0].cursor.state.get("state") != state_blob.__dict__ - ), "State was not migrated." + assert concurrent_streams[0].cursor.state.get("state") != state_blob.__dict__, ( + "State was not migrated." + ) assert concurrent_streams[0].cursor.state.get("states") == [ {"cursor": {"updated_at": "2024-08-21"}, "partition": {"type": "type_1"}}, {"cursor": {"updated_at": "2024-08-21"}, "partition": {"type": "type_2"}}, diff --git a/unit_tests/sources/file_based/test_scenarios.py b/unit_tests/sources/file_based/test_scenarios.py index 214dd0872..d70b7f4ef 100644 --- a/unit_tests/sources/file_based/test_scenarios.py +++ b/unit_tests/sources/file_based/test_scenarios.py @@ -122,9 +122,9 @@ def _verify_read_output(output: EntrypointOutput, scenario: TestScenario[Abstrac expected_states = list(filter(lambda e: "data" not in e, expected_records)) states = list(filter(lambda r: r.state, records_and_state_messages)) - assert ( - len(states) > 0 - ), "No state messages emitted. Successful syncs should emit at least one stream state." + assert len(states) > 0, ( + "No state messages emitted. Successful syncs should emit at least one stream state." + ) _verify_state_record_counts(sorted_records, states) if hasattr(scenario.source, "cursor_cls") and issubclass( @@ -182,9 +182,9 @@ def _verify_analytics( expected_analytics: Optional[List[AirbyteAnalyticsTraceMessage]], ) -> None: if expected_analytics: - assert ( - len(analytics) == len(expected_analytics) - ), f"Number of actual analytics messages ({len(analytics)}) did not match expected ({len(expected_analytics)})" + assert len(analytics) == len(expected_analytics), ( + f"Number of actual analytics messages ({len(analytics)}) did not match expected ({len(expected_analytics)})" + ) for actual, expected in zip(analytics, expected_analytics): actual_type, actual_value = actual.trace.analytics.type, actual.trace.analytics.value expected_type = expected.type diff --git a/unit_tests/sources/streams/http/test_http.py b/unit_tests/sources/streams/http/test_http.py index 9f6209866..f7ad9e47e 100644 --- a/unit_tests/sources/streams/http/test_http.py +++ b/unit_tests/sources/streams/http/test_http.py @@ -506,7 +506,7 @@ class CacheHttpStreamWithSlices(CacheHttpStream): paths = ["", "search"] def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: - return f'{stream_slice["path"]}' if stream_slice else "" + return f"{stream_slice['path']}" if stream_slice else "" def stream_slices(self, **kwargs) -> Iterable[Optional[Mapping[str, Any]]]: for path in self.paths: diff --git a/unit_tests/sources/streams/test_call_rate.py b/unit_tests/sources/streams/test_call_rate.py index 853e2997e..b99905870 100644 --- a/unit_tests/sources/streams/test_call_rate.py +++ b/unit_tests/sources/streams/test_call_rate.py @@ -268,9 +268,9 @@ def test_limit_rate(self): with pytest.raises(CallRateLimitHit) as excinfo2: policy.try_acquire("call", weight=1), "call over limit" - assert ( - excinfo2.value.time_to_wait < excinfo1.value.time_to_wait - ), "time to wait must decrease over time" + assert excinfo2.value.time_to_wait < excinfo1.value.time_to_wait, ( + "time to wait must decrease over time" + ) def test_limit_rate_support_custom_weight(self): """try_acquire must take into account provided weight and throw CallRateLimitHit when hit the limit.""" @@ -279,9 +279,9 @@ def test_limit_rate_support_custom_weight(self): policy.try_acquire("call", weight=2), "1st call with weight of 2" with pytest.raises(CallRateLimitHit) as excinfo: policy.try_acquire("call", weight=9), "2nd call, over limit since 2 + 9 = 11 > 10" - assert excinfo.value.time_to_wait.total_seconds() == pytest.approx( - 60, 0.1 - ), "should wait 1 minute before next call" + assert excinfo.value.time_to_wait.total_seconds() == pytest.approx(60, 0.1), ( + "should wait 1 minute before next call" + ) def test_multiple_limit_rates(self): """try_acquire must take into all call rates and apply stricter.""" diff --git a/unit_tests/sources/streams/test_stream_read.py b/unit_tests/sources/streams/test_stream_read.py index ebe258ef2..ac11b7499 100644 --- a/unit_tests/sources/streams/test_stream_read.py +++ b/unit_tests/sources/streams/test_stream_read.py @@ -750,9 +750,9 @@ def test_configured_json_schema_with_invalid_properties(): assert old_user_insights not in configured_json_schema_properties assert old_feature_info not in configured_json_schema_properties for stream_schema_property in stream_schema["properties"]: - assert ( - stream_schema_property in configured_json_schema_properties - ), f"Stream schema property: {stream_schema_property} missing in configured schema" + assert stream_schema_property in configured_json_schema_properties, ( + f"Stream schema property: {stream_schema_property} missing in configured schema" + ) assert ( stream_schema["properties"][stream_schema_property] == configured_json_schema_properties[stream_schema_property] diff --git a/unit_tests/sources/test_config.py b/unit_tests/sources/test_config.py index 23177c683..94d58540e 100644 --- a/unit_tests/sources/test_config.py +++ b/unit_tests/sources/test_config.py @@ -46,7 +46,7 @@ class TestBaseConfig: "name": {"title": "Name", "type": "string"}, "selected_strategy": { "const": "option1", - "title": "Selected " "Strategy", + "title": "Selected Strategy", "type": "string", "default": "option1", }, @@ -59,7 +59,7 @@ class TestBaseConfig: "properties": { "selected_strategy": { "const": "option2", - "title": "Selected " "Strategy", + "title": "Selected Strategy", "type": "string", "default": "option2", }, diff --git a/unit_tests/test_entrypoint.py b/unit_tests/test_entrypoint.py index e906e8b39..52d742c07 100644 --- a/unit_tests/test_entrypoint.py +++ b/unit_tests/test_entrypoint.py @@ -765,9 +765,9 @@ def test_handle_record_counts( assert message_count == expected_records_by_stream[stream_descriptor] if actual_message.type == Type.STATE: - assert isinstance( - actual_message.state.sourceStats.recordCount, float - ), "recordCount value should be expressed as a float" + assert isinstance(actual_message.state.sourceStats.recordCount, float), ( + "recordCount value should be expressed as a float" + ) def test_given_serialization_error_using_orjson_then_fallback_on_json( diff --git a/unit_tests/test_exception_handler.py b/unit_tests/test_exception_handler.py index 0edcf1247..95d0e9c3a 100644 --- a/unit_tests/test_exception_handler.py +++ b/unit_tests/test_exception_handler.py @@ -88,6 +88,6 @@ def test_uncaught_exception_handler(): out_trace_message = AirbyteMessageSerializer.load(json.loads(trace_output)) assert out_trace_message.trace.emitted_at > 0 out_trace_message.trace.emitted_at = 0.0 # set a specific emitted_at value for testing - assert ( - out_trace_message == expected_trace_message - ), "Trace message should be emitted in expected form" + assert out_trace_message == expected_trace_message, ( + "Trace message should be emitted in expected form" + ) diff --git a/unit_tests/test_secure_logger.py b/unit_tests/test_secure_logger.py index 0237091fe..757a069c7 100644 --- a/unit_tests/test_secure_logger.py +++ b/unit_tests/test_secure_logger.py @@ -203,12 +203,12 @@ def read( list(entrypoint.run(parsed_args)) except Exception: sys.excepthook(*sys.exc_info()) - assert ( - I_AM_A_SECRET_VALUE not in capsys.readouterr().out - ), "Should have filtered non-secret value from exception trace message" - assert ( - I_AM_A_SECRET_VALUE not in caplog.text - ), "Should have filtered secret value from exception log message" + assert I_AM_A_SECRET_VALUE not in capsys.readouterr().out, ( + "Should have filtered non-secret value from exception trace message" + ) + assert I_AM_A_SECRET_VALUE not in caplog.text, ( + "Should have filtered secret value from exception log message" + ) def test_non_airbyte_secrets_are_not_masked_on_uncaught_exceptions(mocker, caplog, capsys): @@ -257,9 +257,9 @@ def read( list(entrypoint.run(parsed_args)) except Exception: sys.excepthook(*sys.exc_info()) - assert ( - NOT_A_SECRET_VALUE in capsys.readouterr().out - ), "Should not have filtered non-secret value from exception trace message" - assert ( - NOT_A_SECRET_VALUE in caplog.text - ), "Should not have filtered non-secret value from exception log message" + assert NOT_A_SECRET_VALUE in capsys.readouterr().out, ( + "Should not have filtered non-secret value from exception trace message" + ) + assert NOT_A_SECRET_VALUE in caplog.text, ( + "Should not have filtered non-secret value from exception log message" + ) diff --git a/unit_tests/utils/test_secret_utils.py b/unit_tests/utils/test_secret_utils.py index 73c93e670..d6f4f4563 100644 --- a/unit_tests/utils/test_secret_utils.py +++ b/unit_tests/utils/test_secret_utils.py @@ -150,9 +150,9 @@ def test_get_secret_paths(spec, expected): ], ) def test_get_secrets(spec, config, expected): - assert ( - get_secrets(spec, config) == expected - ), f"Expected the spec {spec} and config {config} to produce {expected}" + assert get_secrets(spec, config) == expected, ( + f"Expected the spec {spec} and config {config} to produce {expected}" + ) def test_secret_filtering(): diff --git a/unit_tests/utils/test_traced_exception.py b/unit_tests/utils/test_traced_exception.py index 0e5b58439..21a44c646 100644 --- a/unit_tests/utils/test_traced_exception.py +++ b/unit_tests/utils/test_traced_exception.py @@ -76,7 +76,7 @@ def test_existing_exception_as_airbyte_message(raised_exception): assert airbyte_message.trace.error.internal_message == "an error has occurred" assert airbyte_message.trace.error.stack_trace.startswith("Traceback (most recent call last):") assert airbyte_message.trace.error.stack_trace.endswith( - 'raise RuntimeError("an error has occurred")\n' "RuntimeError: an error has occurred\n" + 'raise RuntimeError("an error has occurred")\nRuntimeError: an error has occurred\n' )