From 992c39cb937c057fbf1a89c094ad25a579e997c9 Mon Sep 17 00:00:00 2001 From: Ajay Singh Date: Thu, 26 Dec 2024 10:03:11 -0800 Subject: [PATCH] add c4 rules and fix --- api/internal/commit/serializers.py | 2 +- .../tests/views/test_account_viewset.py | 2 +- codecov_auth/tests/test_admin.py | 8 ++--- codecov_auth/views/base.py | 4 +-- core/signals.py | 2 +- graphql_api/dataloader/comparison.py | 2 +- graphql_api/tests/test_owner_measurements.py | 4 +-- graphql_api/types/bundle_analysis/base.py | 8 ++--- .../plan_representation.py | 12 +++---- ruff.toml | 3 +- services/bundle_analysis.py | 2 +- services/profiling.py | 8 ++--- timeseries/helpers.py | 2 +- upload/tests/views/test_upload_coverage.py | 16 +++++----- upload/tests/views/test_uploads.py | 32 +++++++++---------- upload/views/empty_upload.py | 6 ++-- webhook_handlers/views/github.py | 24 +++++++------- 17 files changed, 66 insertions(+), 71 deletions(-) diff --git a/api/internal/commit/serializers.py b/api/internal/commit/serializers.py index d866446d1f..a96801634a 100644 --- a/api/internal/commit/serializers.py +++ b/api/internal/commit/serializers.py @@ -41,7 +41,7 @@ def get_report(self, commit: Commit): for filename in report.files: file_report = report.get(filename) file_totals = CommitTotalsSerializer( - {key: val for key, val in zip(TOTALS_MAP, file_report.totals)} + dict(zip(TOTALS_MAP, file_report.totals)) ) files.append( { diff --git a/api/internal/tests/views/test_account_viewset.py b/api/internal/tests/views/test_account_viewset.py index bd81cbe762..1aabeab654 100644 --- a/api/internal/tests/views/test_account_viewset.py +++ b/api/internal/tests/views/test_account_viewset.py @@ -960,7 +960,7 @@ def test_update_team_plan_must_fail_if_too_many_activated_users_during_trial(sel self.current_owner.plan = PlanName.BASIC_PLAN_NAME.value self.current_owner.plan_user_count = 1 self.current_owner.trial_status = TrialStatus.ONGOING.value - self.current_owner.plan_activated_users = [i for i in range(11)] + self.current_owner.plan_activated_users = list(range(11)) self.current_owner.save() desired_plans = [ diff --git a/codecov_auth/tests/test_admin.py b/codecov_auth/tests/test_admin.py index 953752c907..55d24d2a9c 100644 --- a/codecov_auth/tests/test_admin.py +++ b/codecov_auth/tests/test_admin.py @@ -480,8 +480,8 @@ def test_stale_user_cleanup(): # remove stale users with default > 90 days removed_users, affected_orgs = find_and_remove_stale_users(orgs) - assert removed_users == set([users[0].ownerid, users[2].ownerid, users[4].ownerid]) - assert affected_orgs == set([orgs[0].ownerid, orgs[1].ownerid]) + assert removed_users == {users[0].ownerid, users[2].ownerid, users[4].ownerid} + assert affected_orgs == {orgs[0].ownerid, orgs[1].ownerid} orgs = list( Owner.objects.filter(ownerid__in=[org.ownerid for org in orgs]) @@ -493,8 +493,8 @@ def test_stale_user_cleanup(): # remove even more stale users removed_users, affected_orgs = find_and_remove_stale_users(orgs, timedelta(days=30)) - assert removed_users == set([users[1].ownerid, users[3].ownerid]) - assert affected_orgs == set([orgs[0].ownerid, orgs[1].ownerid]) + assert removed_users == {users[1].ownerid, users[3].ownerid} + assert affected_orgs == {orgs[0].ownerid, orgs[1].ownerid} orgs = list( Owner.objects.filter(ownerid__in=[org.ownerid for org in orgs]) diff --git a/codecov_auth/views/base.py b/codecov_auth/views/base.py index 22c255d5b6..49497e0572 100644 --- a/codecov_auth/views/base.py +++ b/codecov_auth/views/base.py @@ -302,14 +302,14 @@ def _check_enterprise_organizations_membership(self, user_dict, orgs): """Checks if a user belongs to the restricted organizations (or teams if GitHub) allowed in settings.""" if settings.IS_ENTERPRISE and get_config(self.service, "organizations"): orgs_in_settings = set(get_config(self.service, "organizations")) - orgs_in_user = set(org["username"] for org in orgs) + orgs_in_user = {org["username"] for org in orgs} if not (orgs_in_settings & orgs_in_user): raise PermissionDenied( "You must be a member of an organization listed in the Codecov Enterprise setup." ) if get_config(self.service, "teams") and "teams" in user_dict: teams_in_settings = set(get_config(self.service, "teams")) - teams_in_user = set([team["name"] for team in user_dict["teams"]]) + teams_in_user = {team["name"] for team in user_dict["teams"]} if not (teams_in_settings & teams_in_user): raise PermissionDenied( "You must be a member of an allowed team in your organization." diff --git a/core/signals.py b/core/signals.py index 37afc20d80..bddea7e96f 100644 --- a/core/signals.py +++ b/core/signals.py @@ -20,7 +20,7 @@ def update_repository( changes: Dict[str, Any] = instance.tracker.changed() tracked_fields: List[str] = ["name", "upload_token", "activated", "active"] - if created or any([field in changes for field in tracked_fields]): + if created or any(field in changes for field in tracked_fields): data = { "type": "repo", "sync": "one", diff --git a/graphql_api/dataloader/comparison.py b/graphql_api/dataloader/comparison.py index 7e607e0929..c1b1bc953b 100644 --- a/graphql_api/dataloader/comparison.py +++ b/graphql_api/dataloader/comparison.py @@ -42,7 +42,7 @@ def batch_queryset(self, keys): async def batch_load_fn(self, keys): # flat list of all commits involved in all comparisons - commitids = set(commitid for key in keys for commitid in key) + commitids = {commitid for key in keys for commitid in key} commit_loader = CommitLoader.loader(self.info, self.repository_id) commits = await commit_loader.load_many(commitids) diff --git a/graphql_api/tests/test_owner_measurements.py b/graphql_api/tests/test_owner_measurements.py index 105845074e..aae0fbe159 100644 --- a/graphql_api/tests/test_owner_measurements.py +++ b/graphql_api/tests/test_owner_measurements.py @@ -167,7 +167,7 @@ def test_repository_filtering_by_public_private( ]["measurements"] params = owner_coverage_measurements_with_fallback.call_args.args # Check that the call is using both private and public repos - assert set(params[1]) == set([self.repo1.pk, self.repo2.pk]) + assert set(params[1]) == {self.repo1.pk, self.repo2.pk} query = f""" query Measurements {{ @@ -183,4 +183,4 @@ def test_repository_filtering_by_public_private( self.gql_request(query, owner=self.owner)["owner"]["measurements"] params = owner_coverage_measurements_with_fallback.call_args.args # Check that the call is using both private and public repos - assert set(params[1]) == set([self.repo1.pk, self.repo2.pk]) + assert set(params[1]) == {self.repo1.pk, self.repo2.pk} diff --git a/graphql_api/types/bundle_analysis/base.py b/graphql_api/types/bundle_analysis/base.py index 2c4b29431e..dce5f1435e 100644 --- a/graphql_api/types/bundle_analysis/base.py +++ b/graphql_api/types/bundle_analysis/base.py @@ -307,9 +307,7 @@ def resolve_bundle_report_measurements( # All measureable names we need to fetch to compute the requested asset types if not asset_types: - measurables_to_fetch = [ - item for item in list(BundleAnalysisMeasurementsAssetType) - ] + measurables_to_fetch = list(BundleAnalysisMeasurementsAssetType) elif ASSET_TYPE_UNKNOWN in asset_types: measurables_to_fetch = [ BundleAnalysisMeasurementsAssetType.REPORT_SIZE, @@ -332,9 +330,7 @@ def resolve_bundle_report_measurements( # All measureable name we need to return if not asset_types: - measurables_to_display = [ - item for item in list(BundleAnalysisMeasurementsAssetType) - ] + measurables_to_display = list(BundleAnalysisMeasurementsAssetType) else: measurables_to_display = [ BundleAnalysisMeasurementsAssetType[item] diff --git a/graphql_api/types/plan_representation/plan_representation.py b/graphql_api/types/plan_representation/plan_representation.py index 5c791fbe1c..8ef856265a 100644 --- a/graphql_api/types/plan_representation/plan_representation.py +++ b/graphql_api/types/plan_representation/plan_representation.py @@ -36,14 +36,12 @@ def resolve_base_unit_price(plan_data: PlanData, info) -> int: def resolve_benefits(plan_data: PlanData, info) -> List[str]: plan_service: PlanService = info.context["plan_service"] if plan_service.is_org_trialing: - benefits_with_pretrial_users = list( - map( - lambda benefit: benefit.replace( - "Up to 1 user", f"Up to {plan_service.pretrial_users_count} users" - ), - plan_data["benefits"], + benefits_with_pretrial_users = [ + benefit.replace( + "Up to 1 user", f"Up to {plan_service.pretrial_users_count} users" ) - ) + for benefit in plan_data["benefits"] + ] return benefits_with_pretrial_users return plan_data["benefits"] diff --git a/ruff.toml b/ruff.toml index 30fa8079ec..00a1200de5 100644 --- a/ruff.toml +++ b/ruff.toml @@ -39,6 +39,7 @@ target-version = "py312" # https://docs.astral.sh/ruff/rules/ select = [ "ASYNC", # flake8-async - async checks + "C4", # flake8-comprehensions - list/set/dict/generator comprehensions "E", # pycodestyle - error rules "F", # pyflakes - general Python errors, undefined names "I", # isort - import sorting @@ -47,7 +48,7 @@ select = [ "PLE", # pylint - error rules "W", # pycodestyle - warning rules ] -ignore = ["F405", "F403", "E501", "E712"] +ignore = ["F405", "F403", "E501", "E712", "C408"] # Allow fix for all enabled rules (when `--fix`) is provided. # The preferred method (for now) w.r.t. fixable rules is to manually update the makefile diff --git a/services/bundle_analysis.py b/services/bundle_analysis.py index a1dd751618..e949e66026 100644 --- a/services/bundle_analysis.py +++ b/services/bundle_analysis.py @@ -240,7 +240,7 @@ def modules(self) -> List[ModuleReport]: @cached_property def module_extensions(self) -> List[str]: - return list(set([module.extension for module in self.modules])) + return list({module.extension for module in self.modules}) @cached_property def routes(self) -> Optional[List[str]]: diff --git a/services/profiling.py b/services/profiling.py index a2e4b361f9..c37e09773a 100644 --- a/services/profiling.py +++ b/services/profiling.py @@ -96,9 +96,9 @@ def _get_critical_files_from_yaml( file for file in report.files if any( - map( - lambda regex_patt: regex.match(regex_patt, file, timeout=2), - compiled_files_paths, + ( + regex.match(regex_patt, file, timeout=2) + for regex_patt in compiled_files_paths ) ) ] @@ -133,4 +133,4 @@ def critical_files(self) -> List[CriticalFile]: @cached_property def critical_filenames(self) -> set[str]: - return set([file.name for file in self.critical_files]) + return {file.name for file in self.critical_files} diff --git a/timeseries/helpers.py b/timeseries/helpers.py index 1a2c7b9046..82d62c1c03 100644 --- a/timeseries/helpers.py +++ b/timeseries/helpers.py @@ -418,7 +418,7 @@ def owner_coverage_measurements_with_fallback( else: if settings.TIMESERIES_ENABLED: # we need to backfill some datasets - dataset_repo_ids = set(dataset.repository_id for dataset in datasets) + dataset_repo_ids = {dataset.repository_id for dataset in datasets} missing_dataset_repo_ids = set(repo_ids) - dataset_repo_ids created_datasets = Dataset.objects.bulk_create( [ diff --git a/upload/tests/views/test_upload_coverage.py b/upload/tests/views/test_upload_coverage.py index e9c9592133..0de8c68bd7 100644 --- a/upload/tests/views/test_upload_coverage.py +++ b/upload/tests/views/test_upload_coverage.py @@ -139,9 +139,9 @@ def test_upload_coverage_post(db, mocker): ).first() assert response.status_code == 201 assert all( - map( - lambda x: x in response_json.keys(), - ["external_id", "created_at", "raw_upload_location", "url"], + ( + x in response_json.keys() + for x in ["external_id", "created_at", "raw_upload_location", "url"] ) ) assert ( @@ -172,7 +172,7 @@ def test_upload_coverage_post(db, mocker): assert UploadFlagMembership.objects.filter( report_session_id=upload.id, flag_id=flag2.id ).exists() - assert [flag for flag in upload.flags.all()] == [flag1, flag2] + assert list(upload.flags.all()) == [flag1, flag2] archive_service = ArchiveService(repository) assert upload.storage_path == MinioEndpoints.raw_with_upload_id.get_path( @@ -242,9 +242,9 @@ def test_upload_coverage_post_shelter(db, mocker): ).first() assert response.status_code == 201 assert all( - map( - lambda x: x in response_json.keys(), - ["external_id", "created_at", "raw_upload_location", "url"], + ( + x in response_json.keys() + for x in ["external_id", "created_at", "raw_upload_location", "url"] ) ) assert ( @@ -275,7 +275,7 @@ def test_upload_coverage_post_shelter(db, mocker): assert UploadFlagMembership.objects.filter( report_session_id=upload.id, flag_id=flag2.id ).exists() - assert [flag for flag in upload.flags.all()] == [flag1, flag2] + assert list(upload.flags.all()) == [flag1, flag2] assert upload.storage_path == "shelter/test/path.txt" presigned_put_mock.assert_called_with("archive", upload.storage_path, 10) diff --git a/upload/tests/views/test_uploads.py b/upload/tests/views/test_uploads.py index be31fa8d3d..ec88613991 100644 --- a/upload/tests/views/test_uploads.py +++ b/upload/tests/views/test_uploads.py @@ -243,9 +243,9 @@ def test_uploads_post(db, mocker, mock_redis): ).first() assert response.status_code == 201 assert all( - map( - lambda x: x in response_json.keys(), - ["external_id", "created_at", "raw_upload_location", "url"], + ( + x in response_json.keys() + for x in ["external_id", "created_at", "raw_upload_location", "url"] ) ) assert ( @@ -276,7 +276,7 @@ def test_uploads_post(db, mocker, mock_redis): assert UploadFlagMembership.objects.filter( report_session_id=upload.id, flag_id=flag2.id ).exists() - assert [flag for flag in upload.flags.all()] == [flag1, flag2] + assert list(upload.flags.all()) == [flag1, flag2] archive_service = ArchiveService(repository) assert upload.storage_path == MinioEndpoints.raw_with_upload_id.get_path( @@ -357,9 +357,9 @@ def test_uploads_post_tokenless(db, mocker, mock_redis, private, branch, branch_ state="started", ).first() assert all( - map( - lambda x: x in response_json.keys(), - ["external_id", "created_at", "raw_upload_location", "url"], + ( + x in response_json.keys() + for x in ["external_id", "created_at", "raw_upload_location", "url"] ) ) assert ( @@ -390,7 +390,7 @@ def test_uploads_post_tokenless(db, mocker, mock_redis, private, branch, branch_ assert UploadFlagMembership.objects.filter( report_session_id=upload.id, flag_id=flag2.id ).exists() - assert [flag for flag in upload.flags.all()] == [flag1, flag2] + assert list(upload.flags.all()) == [flag1, flag2] archive_service = ArchiveService(repository) assert upload.storage_path == MinioEndpoints.raw_with_upload_id.get_path( @@ -509,9 +509,9 @@ def test_uploads_post_token_required_auth_check( state="started", ).first() assert all( - map( - lambda x: x in response_json.keys(), - ["external_id", "created_at", "raw_upload_location", "url"], + ( + x in response_json.keys() + for x in ["external_id", "created_at", "raw_upload_location", "url"] ) ) assert ( @@ -542,7 +542,7 @@ def test_uploads_post_token_required_auth_check( assert UploadFlagMembership.objects.filter( report_session_id=upload.id, flag_id=flag2.id ).exists() - assert [flag for flag in upload.flags.all()] == [flag1, flag2] + assert list(upload.flags.all()) == [flag1, flag2] archive_service = ArchiveService(repository) assert upload.storage_path == MinioEndpoints.raw_with_upload_id.get_path( @@ -643,9 +643,9 @@ def test_uploads_post_github_oidc_auth( state="started", ).first() assert all( - map( - lambda x: x in response_json.keys(), - ["external_id", "created_at", "raw_upload_location", "url"], + ( + x in response_json.keys() + for x in ["external_id", "created_at", "raw_upload_location", "url"] ) ) assert ( @@ -676,7 +676,7 @@ def test_uploads_post_github_oidc_auth( assert UploadFlagMembership.objects.filter( report_session_id=upload.id, flag_id=flag2.id ).exists() - assert [flag for flag in upload.flags.all()] == [flag1, flag2] + assert list(upload.flags.all()) == [flag1, flag2] archive_service = ArchiveService(repository) assert upload.storage_path == MinioEndpoints.raw_with_upload_id.get_path( diff --git a/upload/views/empty_upload.py b/upload/views/empty_upload.py index 77a3e52e7f..d8bb9ea1f6 100644 --- a/upload/views/empty_upload.py +++ b/upload/views/empty_upload.py @@ -149,9 +149,9 @@ def post(self, request: HttpRequest, *args: Any, **kwargs: Any) -> Response: file for file in changed_files if any( - map( - lambda regex_patt: regex.match(regex_patt, file, timeout=2), - compiled_files_to_ignore, + ( + regex.match(regex_patt, file, timeout=2) + for regex_patt in compiled_files_to_ignore ) ) ] diff --git a/webhook_handlers/views/github.py b/webhook_handlers/views/github.py index 953faf5b7c..0334e0ae4e 100644 --- a/webhook_handlers/views/github.py +++ b/webhook_handlers/views/github.py @@ -433,12 +433,12 @@ def _handle_installation_repository_events(self, request, *args, **kwargs): ghapp_installation.repository_service_ids = None else: repo_list_to_save = set(ghapp_installation.repository_service_ids or []) - repositories_added_service_ids = set( - map(lambda obj: obj["id"], request.data.get("repositories_added", [])) - ) - repositories_removed_service_ids = set( - map(lambda obj: obj["id"], request.data.get("repositories_removed", [])) - ) + repositories_added_service_ids = { + obj["id"] for obj in request.data.get("repositories_added", []) + } + repositories_removed_service_ids = { + obj["id"] for obj in request.data.get("repositories_removed", []) + } repo_list_to_save = repo_list_to_save.union( repositories_added_service_ids ).difference(repositories_removed_service_ids) @@ -501,9 +501,9 @@ def _handle_installation_events( if affects_all_repositories: ghapp_installation.repository_service_ids = None else: - repositories_service_ids = list( - map(lambda obj: obj["id"], request.data.get("repositories", [])) - ) + repositories_service_ids = [ + obj["id"] for obj in request.data.get("repositories", []) + ] ghapp_installation.repository_service_ids = repositories_service_ids if action in ["suspend", "unsuspend"]: @@ -538,9 +538,9 @@ def _handle_installation_events( + request.data.get("repositories_added", []) + request.data.get("repositories_removed", []) ) - repos_affected_clean = set( - map(lambda obj: (obj["id"], obj["node_id"]), repos_affected) - ) + repos_affected_clean = { + (obj["id"], obj["node_id"]) for obj in repos_affected + } TaskService().refresh( ownerid=owner.ownerid,