From 86b19823fbb408d3c00ade15be241f46bbe64092 Mon Sep 17 00:00:00 2001 From: Andrew Ferrazzutti Date: Sun, 12 Oct 2025 21:37:20 -0400 Subject: [PATCH 1/6] Use type hinting generics in standard collections aka PEP 585, added in Python 3.9 --- build_rust.py | 4 +- contrib/graph/graph.py | 5 +- docker/configure_workers_and_start.py | 55 ++--- docker/start.py | 6 +- scripts-dev/build_debian_packages.py | 4 +- scripts-dev/check_locked_deps_have_sdists.py | 3 +- scripts-dev/check_pydantic_models.py | 28 +-- scripts-dev/check_schema_delta.py | 8 +- scripts-dev/federation_client.py | 8 +- scripts-dev/mypy_synapse_plugin.py | 10 +- scripts-dev/release.py | 4 +- scripts-dev/schema_versions.py | 6 +- synapse/__init__.py | 4 +- synapse/_scripts/generate_workers_map.py | 30 +-- synapse/_scripts/register_new_matrix_user.py | 6 +- synapse/_scripts/review_recent_signups.py | 11 +- synapse/_scripts/synapse_port_db.py | 51 ++-- synapse/api/auth/__init__.py | 6 +- synapse/api/auth/base.py | 6 +- synapse/api/auth/mas.py | 4 +- synapse/api/auth/msc3861_delegated.py | 14 +- synapse/api/errors.py | 16 +- synapse/api/filtering.py | 23 +- synapse/api/ratelimiting.py | 8 +- synapse/api/room_versions.py | 6 +- synapse/app/_base.py | 23 +- synapse/app/admin_cmd.py | 6 +- synapse/app/complement_fork_starter.py | 10 +- synapse/app/generic_worker.py | 5 +- synapse/app/homeserver.py | 10 +- synapse/app/phone_stats_home.py | 8 +- synapse/appservice/__init__.py | 16 +- synapse/appservice/api.py | 31 ++- synapse/appservice/scheduler.py | 26 +- synapse/config/__main__.py | 3 +- synapse/config/_base.py | 30 +-- synapse/config/_util.py | 8 +- synapse/config/api.py | 4 +- synapse/config/appservice.py | 10 +- synapse/config/cache.py | 6 +- synapse/config/cas.py | 4 +- synapse/config/database.py | 4 +- synapse/config/key.py | 14 +- synapse/config/logger.py | 4 +- synapse/config/modules.py | 4 +- synapse/config/oembed.py | 12 +- synapse/config/oidc.py | 8 +- synapse/config/password_auth_providers.py | 4 +- synapse/config/ratelimiting.py | 8 +- synapse/config/registration.py | 4 +- synapse/config/repository.py | 12 +- synapse/config/retention.py | 4 +- synapse/config/saml2.py | 6 +- synapse/config/server.py | 28 +-- synapse/config/spam_checker.py | 4 +- synapse/config/sso.py | 6 +- synapse/config/tls.py | 4 +- synapse/config/tracer.py | 6 +- synapse/config/user_types.py | 6 +- synapse/config/workers.py | 36 +-- synapse/crypto/event_signing.py | 10 +- synapse/crypto/keyring.py | 70 +++--- synapse/event_auth.py | 12 +- synapse/events/__init__.py | 30 +-- synapse/events/auto_accept_invites.py | 4 +- synapse/events/builder.py | 12 +- synapse/events/presence_router.py | 17 +- synapse/events/snapshot.py | 26 +- synapse/events/utils.py | 26 +- synapse/events/validator.py | 6 +- synapse/federation/federation_base.py | 4 +- synapse/federation/federation_client.py | 56 +++-- synapse/federation/federation_server.py | 84 ++++--- synapse/federation/persistence.py | 4 +- synapse/federation/send_queue.py | 33 ++- synapse/federation/sender/__init__.py | 13 +- .../sender/per_destination_queue.py | 26 +- .../federation/sender/transaction_manager.py | 6 +- synapse/federation/transport/client.py | 37 ++- .../federation/transport/server/__init__.py | 18 +- synapse/federation/transport/server/_base.py | 12 +- .../federation/transport/server/federation.py | 122 +++++----- synapse/federation/units.py | 12 +- synapse/handlers/account.py | 16 +- synapse/handlers/account_data.py | 6 +- synapse/handlers/account_validity.py | 6 +- synapse/handlers/admin.py | 12 +- synapse/handlers/appservice.py | 55 ++--- synapse/handlers/auth.py | 99 ++++---- synapse/handlers/cas.py | 10 +- synapse/handlers/delayed_events.py | 10 +- synapse/handlers/device.py | 56 ++--- synapse/handlers/devicemessage.py | 8 +- synapse/handlers/directory.py | 4 +- synapse/handlers/e2e_keys.py | 76 +++--- synapse/handlers/e2e_room_keys.py | 6 +- synapse/handlers/event_auth.py | 4 +- synapse/handlers/events.py | 4 +- synapse/handlers/federation.py | 48 ++-- synapse/handlers/federation_event.py | 24 +- synapse/handlers/identity.py | 8 +- synapse/handlers/initial_sync.py | 8 +- synapse/handlers/jwt.py | 4 +- synapse/handlers/message.py | 50 ++-- synapse/handlers/oidc.py | 25 +- synapse/handlers/pagination.py | 14 +- synapse/handlers/presence.py | 101 ++++---- synapse/handlers/profile.py | 4 +- synapse/handlers/push_rules.py | 6 +- synapse/handlers/receipts.py | 14 +- synapse/handlers/register.py | 8 +- synapse/handlers/relations.py | 21 +- synapse/handlers/room.py | 59 +++-- synapse/handlers/room_list.py | 10 +- synapse/handlers/room_member.py | 72 +++--- synapse/handlers/room_member_worker.py | 14 +- synapse/handlers/room_summary.py | 32 +-- synapse/handlers/saml.py | 10 +- synapse/handlers/search.py | 34 +-- synapse/handlers/send_email.py | 4 +- synapse/handlers/sliding_sync/__init__.py | 32 +-- synapse/handlers/sliding_sync/extensions.py | 26 +- synapse/handlers/sliding_sync/room_lists.py | 108 ++++----- synapse/handlers/sso.py | 13 +- synapse/handlers/stats.py | 10 +- synapse/handlers/sync.py | 131 +++++----- synapse/handlers/typing.py | 24 +- synapse/handlers/ui_auth/checkers.py | 4 +- synapse/handlers/user_directory.py | 8 +- synapse/handlers/worker_lock.py | 15 +- synapse/http/additional_resource.py | 6 +- synapse/http/client.py | 21 +- .../federation/matrix_federation_agent.py | 4 +- synapse/http/federation/srv_resolver.py | 12 +- .../http/federation/well_known_resolver.py | 8 +- synapse/http/matrixfederationclient.py | 19 +- synapse/http/proxy.py | 8 +- synapse/http/proxyagent.py | 12 +- synapse/http/replicationagent.py | 6 +- synapse/http/request_metrics.py | 8 +- synapse/http/server.py | 15 +- synapse/http/servlet.py | 33 ++- synapse/http/site.py | 6 +- synapse/logging/context.py | 10 +- synapse/logging/formatter.py | 6 +- synapse/logging/opentracing.py | 37 ++- synapse/media/_base.py | 10 +- synapse/media/filepath.py | 10 +- synapse/media/media_repository.py | 30 +-- synapse/media/media_storage.py | 9 +- synapse/media/oembed.py | 10 +- synapse/media/preview_html.py | 29 +-- synapse/media/thumbnailer.py | 24 +- synapse/media/url_previewer.py | 4 +- synapse/metrics/__init__.py | 24 +- synapse/metrics/background_process_metrics.py | 6 +- synapse/module_api/__init__.py | 43 ++-- .../callbacks/account_validity_callbacks.py | 10 +- .../callbacks/media_repository_callbacks.py | 16 +- .../callbacks/ratelimit_callbacks.py | 4 +- .../callbacks/spamchecker_callbacks.py | 88 ++++--- .../third_party_event_rules_callbacks.py | 32 +-- synapse/notifier.py | 44 ++-- synapse/push/__init__.py | 4 +- synapse/push/bulk_push_rule_evaluator.py | 30 +-- synapse/push/clientformat.py | 14 +- synapse/push/emailpusher.py | 6 +- synapse/push/httppusher.py | 8 +- synapse/push/mailer.py | 24 +- synapse/push/presentable_names.py | 6 +- synapse/push/push_tools.py | 5 +- synapse/push/push_types.py | 8 +- synapse/push/pusher.py | 6 +- synapse/push/pusherpool.py | 4 +- synapse/replication/http/_base.py | 10 +- synapse/replication/http/account_data.py | 14 +- .../replication/http/deactivate_account.py | 4 +- synapse/replication/http/delayed_events.py | 4 +- synapse/replication/http/devices.py | 20 +- synapse/replication/http/federation.py | 14 +- synapse/replication/http/login.py | 4 +- synapse/replication/http/membership.py | 16 +- synapse/replication/http/presence.py | 6 +- synapse/replication/http/push.py | 8 +- synapse/replication/http/register.py | 6 +- synapse/replication/http/send_events.py | 8 +- synapse/replication/http/state.py | 4 +- synapse/replication/http/streams.py | 4 +- synapse/replication/tcp/client.py | 10 +- synapse/replication/tcp/commands.py | 26 +- synapse/replication/tcp/handler.py | 28 +-- synapse/replication/tcp/protocol.py | 2 +- synapse/replication/tcp/redis.py | 8 +- synapse/replication/tcp/resource.py | 8 +- synapse/replication/tcp/streams/_base.py | 9 +- synapse/replication/tcp/streams/events.py | 16 +- synapse/replication/tcp/streams/federation.py | 6 +- synapse/rest/__init__.py | 8 +- synapse/rest/admin/__init__.py | 8 +- synapse/rest/admin/background_updates.py | 10 +- synapse/rest/admin/devices.py | 14 +- synapse/rest/admin/event_reports.py | 8 +- synapse/rest/admin/events.py | 4 +- synapse/rest/admin/experimental_features.py | 6 +- synapse/rest/admin/federation.py | 10 +- synapse/rest/admin/media.py | 28 +-- synapse/rest/admin/registration_tokens.py | 12 +- synapse/rest/admin/rooms.py | 40 ++-- synapse/rest/admin/scheduled_tasks.py | 4 +- synapse/rest/admin/server_notice_servlet.py | 8 +- synapse/rest/admin/statistics.py | 6 +- synapse/rest/admin/username_available.py | 4 +- synapse/rest/admin/users.py | 70 +++--- synapse/rest/client/_base.py | 6 +- synapse/rest/client/account.py | 32 +-- synapse/rest/client/account_data.py | 14 +- synapse/rest/client/account_validity.py | 4 +- synapse/rest/client/appservice_ping.py | 6 +- synapse/rest/client/auth_metadata.py | 6 +- synapse/rest/client/capabilities.py | 4 +- synapse/rest/client/delayed_events.py | 6 +- synapse/rest/client/devices.py | 28 +-- synapse/rest/client/directory.py | 20 +- synapse/rest/client/events.py | 8 +- synapse/rest/client/filter.py | 6 +- synapse/rest/client/initial_sync.py | 6 +- synapse/rest/client/keys.py | 22 +- synapse/rest/client/knock.py | 6 +- synapse/rest/client/login.py | 19 +- synapse/rest/client/login_token_request.py | 4 +- synapse/rest/client/logout.py | 6 +- synapse/rest/client/matrixrtc.py | 4 +- synapse/rest/client/mutual_rooms.py | 6 +- synapse/rest/client/notifications.py | 4 +- synapse/rest/client/openid.py | 4 +- synapse/rest/client/password_policy.py | 4 +- synapse/rest/client/presence.py | 6 +- synapse/rest/client/profile.py | 10 +- synapse/rest/client/push_rule.py | 18 +- synapse/rest/client/pusher.py | 6 +- synapse/rest/client/read_marker.py | 4 +- synapse/rest/client/receipts.py | 4 +- synapse/rest/client/register.py | 18 +- synapse/rest/client/relations.py | 6 +- synapse/rest/client/reporting.py | 8 +- synapse/rest/client/room.py | 84 +++---- synapse/rest/client/room_keys.py | 18 +- .../rest/client/room_upgrade_rest_servlet.py | 4 +- synapse/rest/client/sendtodevice.py | 6 +- synapse/rest/client/sync.py | 22 +- synapse/rest/client/tags.py | 8 +- synapse/rest/client/thirdparty.py | 14 +- synapse/rest/client/thread_subscriptions.py | 14 +- synapse/rest/client/transactions.py | 10 +- synapse/rest/client/user_directory.py | 4 +- synapse/rest/client/versions.py | 4 +- synapse/rest/client/voip.py | 4 +- synapse/rest/consent/consent_resource.py | 6 +- synapse/rest/key/v2/local_key_resource.py | 4 +- synapse/rest/key/v2/remote_key_resource.py | 16 +- synapse/rest/media/upload_resource.py | 6 +- .../synapse/client/federation_whitelist.py | 4 +- synapse/rest/synapse/client/jwks.py | 4 +- synapse/rest/synapse/client/password_reset.py | 6 +- synapse/rest/synapse/client/pick_username.py | 6 +- synapse/rest/synapse/client/rendezvous.py | 8 +- .../synapse/client/saml2/metadata_resource.py | 4 +- synapse/rest/synapse/mas/devices.py | 10 +- synapse/rest/synapse/mas/users.py | 18 +- synapse/rest/well_known.py | 4 +- synapse/server.py | 20 +- .../server_notices/consent_server_notices.py | 4 +- .../resource_limits_server_notices.py | 8 +- synapse/state/__init__.py | 25 +- synapse/state/v1.py | 30 +-- synapse/state/v2.py | 86 ++++--- synapse/storage/_base.py | 4 +- synapse/storage/background_updates.py | 18 +- synapse/storage/controllers/persist_events.py | 55 ++--- synapse/storage/controllers/purge_events.py | 5 +- synapse/storage/controllers/state.py | 28 +-- synapse/storage/controllers/stats.py | 4 +- synapse/storage/database.py | 197 ++++++++------- synapse/storage/databases/__init__.py | 6 +- synapse/storage/databases/main/__init__.py | 22 +- .../storage/databases/main/account_data.py | 46 ++-- synapse/storage/databases/main/appservice.py | 20 +- synapse/storage/databases/main/cache.py | 14 +- synapse/storage/databases/main/client_ips.py | 37 ++- .../storage/databases/main/delayed_events.py | 18 +- synapse/storage/databases/main/deviceinbox.py | 54 ++--- synapse/storage/databases/main/devices.py | 126 +++++----- synapse/storage/databases/main/directory.py | 6 +- .../storage/databases/main/e2e_room_keys.py | 27 +-- .../storage/databases/main/end_to_end_keys.py | 134 +++++------ .../databases/main/event_federation.py | 225 +++++++++--------- .../databases/main/event_push_actions.py | 66 +++-- synapse/storage/databases/main/events.py | 160 ++++++------- .../databases/main/events_bg_updates.py | 64 ++--- .../main/events_forward_extremities.py | 8 +- .../storage/databases/main/events_worker.py | 150 ++++++------ .../databases/main/experimental_features.py | 8 +- synapse/storage/databases/main/filtering.py | 4 +- synapse/storage/databases/main/keys.py | 20 +- synapse/storage/databases/main/lock.py | 14 +- .../databases/main/media_repository.py | 40 ++-- synapse/storage/databases/main/metrics.py | 26 +- .../databases/main/monthly_active_users.py | 20 +- synapse/storage/databases/main/presence.py | 27 +-- synapse/storage/databases/main/profile.py | 10 +- .../storage/databases/main/purge_events.py | 10 +- synapse/storage/databases/main/push_rule.py | 35 ++- synapse/storage/databases/main/pusher.py | 29 +-- synapse/storage/databases/main/receipts.py | 68 +++--- .../storage/databases/main/registration.py | 68 +++--- synapse/storage/databases/main/relations.py | 77 +++--- synapse/storage/databases/main/room.py | 98 ++++---- synapse/storage/databases/main/roommember.py | 109 ++++----- synapse/storage/databases/main/search.py | 35 ++- synapse/storage/databases/main/signatures.py | 6 +- .../storage/databases/main/sliding_sync.py | 18 +- synapse/storage/databases/main/state.py | 31 +-- .../storage/databases/main/state_deltas.py | 14 +- synapse/storage/databases/main/stats.py | 47 ++-- synapse/storage/databases/main/stream.py | 118 +++++---- synapse/storage/databases/main/tags.py | 20 +- .../storage/databases/main/task_scheduler.py | 18 +- .../databases/main/thread_subscriptions.py | 15 +- .../storage/databases/main/transactions.py | 42 ++-- synapse/storage/databases/main/ui_auth.py | 16 +- .../storage/databases/main/user_directory.py | 51 ++-- .../databases/main/user_erasure_store.py | 4 +- synapse/storage/databases/state/bg_updates.py | 17 +- synapse/storage/databases/state/deletion.py | 10 +- synapse/storage/databases/state/store.py | 38 ++- synapse/storage/engines/postgres.py | 6 +- synapse/storage/engines/sqlite.py | 4 +- synapse/storage/prepare_database.py | 8 +- synapse/storage/roommember.py | 4 +- .../storage/schema/main/delta/30/as_users.py | 6 +- synapse/storage/types.py | 28 +-- synapse/storage/util/id_generators.py | 43 ++-- .../util/partial_state_events_tracker.py | 8 +- synapse/storage/util/sequence.py | 8 +- synapse/streams/__init__.py | 4 +- synapse/streams/events.py | 4 +- synapse/types/__init__.py | 43 ++-- synapse/types/handlers/__init__.py | 8 +- synapse/types/handlers/sliding_sync.py | 24 +- synapse/types/rest/client/__init__.py | 42 ++-- synapse/types/state.py | 42 ++-- synapse/util/__init__.py | 6 +- synapse/util/async_helpers.py | 72 +++--- synapse/util/batching_queue.py | 10 +- synapse/util/caches/__init__.py | 6 +- synapse/util/caches/deferred_cache.py | 15 +- synapse/util/caches/descriptors.py | 13 +- synapse/util/caches/dictionary_cache.py | 15 +- synapse/util/caches/lrucache.py | 23 +- synapse/util/caches/response_cache.py | 3 +- synapse/util/caches/stream_change_cache.py | 14 +- synapse/util/caches/ttlcache.py | 6 +- synapse/util/clock.py | 6 +- synapse/util/daemonize.py | 4 +- synapse/util/distributor.py | 10 +- synapse/util/events.py | 6 +- synapse/util/gai_resolver.py | 11 +- synapse/util/httpresourcetree.py | 5 +- synapse/util/iterutils.py | 12 +- synapse/util/json.py | 3 +- synapse/util/linked_list.py | 6 +- synapse/util/manhole.py | 4 +- synapse/util/metrics.py | 8 +- synapse/util/module_loader.py | 4 +- synapse/util/patch_inline_callbacks.py | 6 +- synapse/util/ratelimitutils.py | 14 +- synapse/util/retryutils.py | 4 +- synapse/util/stringutils.py | 8 +- synapse/util/task_scheduler.py | 16 +- synapse/util/wheel_timer.py | 10 +- synapse/visibility.py | 27 +-- synmark/__main__.py | 4 +- tests/api/test_filtering.py | 11 +- tests/app/test_openid_listener.py | 5 +- tests/appservice/test_api.py | 6 +- tests/appservice/test_scheduler.py | 8 +- tests/config/utils.py | 3 +- tests/crypto/test_keyring.py | 30 +-- tests/events/test_auto_accept_invites.py | 8 +- tests/events/test_presence_router.py | 14 +- tests/events/test_utils.py | 4 +- tests/federation/test_federation_catch_up.py | 12 +- .../test_federation_out_of_band_membership.py | 8 +- tests/federation/test_federation_sender.py | 10 +- .../federation/transport/server/test__base.py | 9 +- tests/federation/transport/test_client.py | 4 +- tests/federation/transport/test_knocking.py | 4 +- tests/handlers/test_appservice.py | 9 +- tests/handlers/test_cas.py | 4 +- tests/handlers/test_directory.py | 8 +- tests/handlers/test_e2e_keys.py | 6 +- tests/handlers/test_message.py | 5 +- tests/handlers/test_oauth_delegation.py | 10 +- tests/handlers/test_oidc.py | 8 +- tests/handlers/test_password_providers.py | 10 +- tests/handlers/test_profile.py | 6 +- tests/handlers/test_receipts.py | 3 +- tests/handlers/test_register.py | 20 +- tests/handlers/test_room_list.py | 6 +- tests/handlers/test_room_summary.py | 24 +- tests/handlers/test_saml.py | 8 +- tests/handlers/test_send_email.py | 14 +- tests/handlers/test_sliding_sync.py | 16 +- tests/handlers/test_sso.py | 4 +- tests/handlers/test_stats.py | 8 +- tests/handlers/test_sync.py | 6 +- tests/handlers/test_typing.py | 9 +- tests/handlers/test_user_directory.py | 4 +- tests/http/__init__.py | 7 +- .../test_matrix_federation_agent.py | 4 +- tests/http/federation/test_srv_resolver.py | 28 +-- tests/http/server/_base.py | 20 +- tests/http/test_client.py | 6 +- tests/http/test_matrixfederationclient.py | 4 +- tests/http/test_proxy.py | 3 +- tests/http/test_proxyagent.py | 4 +- tests/http/test_servlet.py | 6 +- tests/logging/test_remote_handler.py | 3 +- tests/media/test_media_storage.py | 36 +-- tests/metrics/test_metrics.py | 10 +- tests/module_api/test_api.py | 4 +- tests/push/test_email.py | 8 +- tests/push/test_http.py | 10 +- tests/push/test_presentable_names.py | 16 +- tests/push/test_push_rule_evaluator.py | 10 +- tests/replication/_base.py | 12 +- tests/replication/http/test__base.py | 5 +- tests/replication/storage/test_events.py | 10 +- tests/replication/tcp/streams/test_events.py | 8 +- tests/replication/test_multi_media_repo.py | 6 +- tests/rest/admin/test_admin.py | 4 +- tests/rest/admin/test_event_reports.py | 3 +- tests/rest/admin/test_federation.py | 10 +- tests/rest/admin/test_jwks.py | 3 +- tests/rest/admin/test_media.py | 3 +- tests/rest/admin/test_room.py | 4 +- tests/rest/admin/test_scheduled_tasks.py | 8 +- tests/rest/admin/test_server_notice.py | 4 +- tests/rest/admin/test_statistics.py | 8 +- tests/rest/admin/test_user.py | 18 +- .../test_extension_thread_subscriptions.py | 6 +- .../sliding_sync/test_extension_to_device.py | 3 +- .../sliding_sync/test_rooms_timeline.py | 10 +- .../client/sliding_sync/test_sliding_sync.py | 8 +- tests/rest/client/test_account.py | 14 +- tests/rest/client/test_auth.py | 20 +- tests/rest/client/test_delayed_events.py | 3 +- tests/rest/client/test_login.py | 37 ++- tests/rest/client/test_media.py | 38 +-- tests/rest/client/test_notifications.py | 4 +- tests/rest/client/test_profile.py | 4 +- tests/rest/client/test_redactions.py | 6 +- tests/rest/client/test_register.py | 10 +- tests/rest/client/test_relations.py | 22 +- tests/rest/client/test_rendezvous.py | 3 +- tests/rest/client/test_retention.py | 4 +- tests/rest/client/test_rooms.py | 30 +-- tests/rest/client/test_sync.py | 3 +- tests/rest/client/test_third_party_rules.py | 16 +- tests/rest/client/test_transactions.py | 8 +- tests/rest/client/utils.py | 25 +- tests/rest/key/v2/test_remote_key_resource.py | 4 +- tests/rest/media/test_domain_blocking.py | 3 +- tests/rest/media/test_url_preview.py | 10 +- .../client/test_federation_whitelist.py | 3 +- tests/scripts/test_new_matrix_user.py | 14 +- tests/server.py | 26 +- .../test_resource_limits_server_notices.py | 3 +- tests/state/test_v2.py | 36 ++- tests/state/test_v21.py | 14 +- .../databases/main/test_end_to_end_keys.py | 4 +- .../databases/main/test_events_worker.py | 10 +- tests/storage/databases/main/test_receipts.py | 6 +- tests/storage/test__base.py | 6 +- tests/storage/test_account_data.py | 6 +- tests/storage/test_appservice.py | 10 +- tests/storage/test_background_update.py | 6 +- tests/storage/test_client_ips.py | 22 +- tests/storage/test_database.py | 4 +- tests/storage/test_devices.py | 6 +- tests/storage/test_event_chain.py | 14 +- tests/storage/test_event_federation.py | 51 ++-- tests/storage/test_event_push_actions.py | 4 +- tests/storage/test_events.py | 8 +- tests/storage/test_events_bg_updates.py | 5 +- tests/storage/test_id_generators.py | 8 +- tests/storage/test_monthly_active_users.py | 6 +- tests/storage/test_redaction.py | 6 +- tests/storage/test_rollback_worker.py | 3 +- tests/storage/test_room_search.py | 3 +- tests/storage/test_roommember.py | 8 +- tests/storage/test_sliding_sync_tables.py | 16 +- tests/storage/test_state.py | 8 +- tests/storage/test_stream.py | 5 +- tests/storage/test_user_directory.py | 22 +- .../util/test_partial_state_events_tracker.py | 12 +- tests/test_event_auth.py | 10 +- tests/test_mau.py | 4 +- tests/test_server.py | 14 +- tests/test_state.py | 42 ++-- tests/test_types.py | 3 +- tests/test_utils/__init__.py | 4 +- tests/test_utils/event_injection.py | 8 +- tests/test_utils/html_parsers.py | 10 +- tests/test_utils/oidc.py | 16 +- tests/unittest.py | 28 +-- tests/util/caches/test_deferred_cache.py | 5 +- tests/util/caches/test_descriptors.py | 17 +- tests/util/test_async_helpers.py | 8 +- tests/util/test_batching_queue.py | 5 +- tests/util/test_expiring_cache.py | 3 +- tests/util/test_itertools.py | 34 +-- tests/util/test_linearizer.py | 4 +- tests/util/test_lrucache.py | 9 +- tests/util/test_mutable_overlay_mapping.py | 3 +- tests/util/test_rwlock.py | 12 +- tests/util/test_task_scheduler.py | 14 +- tests/utils.py | 10 +- 528 files changed, 4493 insertions(+), 4928 deletions(-) diff --git a/build_rust.py b/build_rust.py index 5c796af4611..af7bd2fdc5f 100644 --- a/build_rust.py +++ b/build_rust.py @@ -2,13 +2,13 @@ import itertools import os -from typing import Any, Dict +from typing import Any from packaging.specifiers import SpecifierSet from setuptools_rust import Binding, RustExtension -def build(setup_kwargs: Dict[str, Any]) -> None: +def build(setup_kwargs: dict[str, Any]) -> None: original_project_dir = os.path.dirname(os.path.realpath(__file__)) cargo_toml_path = os.path.join(original_project_dir, "rust", "Cargo.toml") diff --git a/contrib/graph/graph.py b/contrib/graph/graph.py index 9d5f3c7f4f7..2898bb3448f 100644 --- a/contrib/graph/graph.py +++ b/contrib/graph/graph.py @@ -24,7 +24,6 @@ import html import json import urllib.request -from typing import List import pydot @@ -33,7 +32,7 @@ def make_name(pdu_id: str, origin: str) -> str: return f"{pdu_id}@{origin}" -def make_graph(pdus: List[dict], filename_prefix: str) -> None: +def make_graph(pdus: list[dict], filename_prefix: str) -> None: """ Generate a dot and SVG file for a graph of events in the room based on the topological ordering by querying a homeserver. @@ -127,7 +126,7 @@ def make_graph(pdus: List[dict], filename_prefix: str) -> None: graph.write_svg("%s.svg" % filename_prefix, prog="dot") -def get_pdus(host: str, room: str) -> List[dict]: +def get_pdus(host: str, room: str) -> list[dict]: transaction = json.loads( urllib.request.urlopen( f"http://{host}/_matrix/federation/v1/context/{room}/" diff --git a/docker/configure_workers_and_start.py b/docker/configure_workers_and_start.py index 6f25653bb74..2451d1f300d 100755 --- a/docker/configure_workers_and_start.py +++ b/docker/configure_workers_and_start.py @@ -65,13 +65,10 @@ from pathlib import Path from typing import ( Any, - Dict, - List, Mapping, MutableMapping, NoReturn, Optional, - Set, SupportsIndex, ) @@ -96,7 +93,7 @@ # Watching /_matrix/media and related needs a "media" listener # Stream Writers require "client" and "replication" listeners because they # have to attach by instance_map to the master process and have client endpoints. -WORKERS_CONFIG: Dict[str, Dict[str, Any]] = { +WORKERS_CONFIG: dict[str, dict[str, Any]] = { "pusher": { "app": "synapse.app.generic_worker", "listener_resources": [], @@ -408,7 +405,7 @@ def convert(src: str, dst: str, **template_vars: object) -> None: def add_worker_roles_to_shared_config( shared_config: dict, - worker_types_set: Set[str], + worker_types_set: set[str], worker_name: str, worker_port: int, ) -> None: @@ -471,9 +468,9 @@ def add_worker_roles_to_shared_config( def merge_worker_template_configs( - existing_dict: Optional[Dict[str, Any]], - to_be_merged_dict: Dict[str, Any], -) -> Dict[str, Any]: + existing_dict: Optional[dict[str, Any]], + to_be_merged_dict: dict[str, Any], +) -> dict[str, Any]: """When given an existing dict of worker template configuration consisting with both dicts and lists, merge new template data from WORKERS_CONFIG(or create) and return new dict. @@ -484,7 +481,7 @@ def merge_worker_template_configs( existing_dict. Returns: The newly merged together dict values. """ - new_dict: Dict[str, Any] = {} + new_dict: dict[str, Any] = {} if not existing_dict: # It doesn't exist yet, just use the new dict(but take a copy not a reference) new_dict = to_be_merged_dict.copy() @@ -509,8 +506,8 @@ def merge_worker_template_configs( def insert_worker_name_for_worker_config( - existing_dict: Dict[str, Any], worker_name: str -) -> Dict[str, Any]: + existing_dict: dict[str, Any], worker_name: str +) -> dict[str, Any]: """Insert a given worker name into the worker's configuration dict. Args: @@ -526,7 +523,7 @@ def insert_worker_name_for_worker_config( return dict_to_edit -def apply_requested_multiplier_for_worker(worker_types: List[str]) -> List[str]: +def apply_requested_multiplier_for_worker(worker_types: list[str]) -> list[str]: """ Apply multiplier(if found) by returning a new expanded list with some basic error checking. @@ -587,7 +584,7 @@ def is_sharding_allowed_for_worker_type(worker_type: str) -> bool: def split_and_strip_string( given_string: str, split_char: str, max_split: SupportsIndex = -1 -) -> List[str]: +) -> list[str]: """ Helper to split a string on split_char and strip whitespace from each end of each element. @@ -616,8 +613,8 @@ def generate_base_homeserver_config() -> None: def parse_worker_types( - requested_worker_types: List[str], -) -> Dict[str, Set[str]]: + requested_worker_types: list[str], +) -> dict[str, set[str]]: """Read the desired list of requested workers and prepare the data for use in generating worker config files while also checking for potential gotchas. @@ -633,14 +630,14 @@ def parse_worker_types( # A counter of worker_base_name -> int. Used for determining the name for a given # worker when generating its config file, as each worker's name is just # worker_base_name followed by instance number - worker_base_name_counter: Dict[str, int] = defaultdict(int) + worker_base_name_counter: dict[str, int] = defaultdict(int) # Similar to above, but more finely grained. This is used to determine we don't have # more than a single worker for cases where multiples would be bad(e.g. presence). - worker_type_shard_counter: Dict[str, int] = defaultdict(int) + worker_type_shard_counter: dict[str, int] = defaultdict(int) # The final result of all this processing - dict_to_return: Dict[str, Set[str]] = {} + dict_to_return: dict[str, set[str]] = {} # Handle any multipliers requested for given workers. multiple_processed_worker_types = apply_requested_multiplier_for_worker( @@ -684,7 +681,7 @@ def parse_worker_types( # Split the worker_type_string on "+", remove whitespace from ends then make # the list a set so it's deduplicated. - worker_types_set: Set[str] = set( + worker_types_set: set[str] = set( split_and_strip_string(worker_type_string, "+") ) @@ -743,7 +740,7 @@ def generate_worker_files( environ: Mapping[str, str], config_path: str, data_dir: str, - requested_worker_types: Dict[str, Set[str]], + requested_worker_types: dict[str, set[str]], ) -> None: """Read the desired workers(if any) that is passed in and generate shared homeserver, nginx and supervisord configs. @@ -764,7 +761,7 @@ def generate_worker_files( # First read the original config file and extract the listeners block. Then we'll # add another listener for replication. Later we'll write out the result to the # shared config file. - listeners: List[Any] + listeners: list[Any] if using_unix_sockets: listeners = [ { @@ -792,12 +789,12 @@ def generate_worker_files( # base shared worker jinja2 template. This config file will be passed to all # workers, included Synapse's main process. It is intended mainly for disabling # functionality when certain workers are spun up, and adding a replication listener. - shared_config: Dict[str, Any] = {"listeners": listeners} + shared_config: dict[str, Any] = {"listeners": listeners} # List of dicts that describe workers. # We pass this to the Supervisor template later to generate the appropriate # program blocks. - worker_descriptors: List[Dict[str, Any]] = [] + worker_descriptors: list[dict[str, Any]] = [] # Upstreams for load-balancing purposes. This dict takes the form of the worker # type to the ports of each worker. For example: @@ -805,14 +802,14 @@ def generate_worker_files( # worker_type: {1234, 1235, ...}} # } # and will be used to construct 'upstream' nginx directives. - nginx_upstreams: Dict[str, Set[int]] = {} + nginx_upstreams: dict[str, set[int]] = {} # A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what # will be placed after the proxy_pass directive. The main benefit to representing # this data as a dict over a str is that we can easily deduplicate endpoints # across multiple instances of the same worker. The final rendering will be combined # with nginx_upstreams and placed in /etc/nginx/conf.d. - nginx_locations: Dict[str, str] = {} + nginx_locations: dict[str, str] = {} # Create the worker configuration directory if it doesn't already exist os.makedirs("/conf/workers", exist_ok=True) @@ -846,7 +843,7 @@ def generate_worker_files( # yaml config file for worker_name, worker_types_set in requested_worker_types.items(): # The collected and processed data will live here. - worker_config: Dict[str, Any] = {} + worker_config: dict[str, Any] = {} # Merge all worker config templates for this worker into a single config for worker_type in worker_types_set: @@ -1029,7 +1026,7 @@ def generate_worker_log_config( Returns: the path to the generated file """ # Check whether we should write worker logs to disk, in addition to the console - extra_log_template_args: Dict[str, Optional[str]] = {} + extra_log_template_args: dict[str, Optional[str]] = {} if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"): extra_log_template_args["LOG_FILE_PATH"] = f"{data_dir}/logs/{worker_name}.log" @@ -1053,7 +1050,7 @@ def generate_worker_log_config( return log_config_filepath -def main(args: List[str], environ: MutableMapping[str, str]) -> None: +def main(args: list[str], environ: MutableMapping[str, str]) -> None: parser = ArgumentParser() parser.add_argument( "--generate-only", @@ -1087,7 +1084,7 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None: if not worker_types_env: # No workers, just the main process worker_types = [] - requested_worker_types: Dict[str, Any] = {} + requested_worker_types: dict[str, Any] = {} else: # Split type names by comma, ignoring whitespace. worker_types = split_and_strip_string(worker_types_env, ",") diff --git a/docker/start.py b/docker/start.py index 0be9976a0c1..daa041d4637 100755 --- a/docker/start.py +++ b/docker/start.py @@ -6,7 +6,7 @@ import platform import subprocess import sys -from typing import Any, Dict, List, Mapping, MutableMapping, NoReturn, Optional +from typing import Any, Mapping, MutableMapping, NoReturn, Optional import jinja2 @@ -69,7 +69,7 @@ def generate_config_from_template( ) # populate some params from data files (if they exist, else create new ones) - environ: Dict[str, Any] = dict(os_environ) + environ: dict[str, Any] = dict(os_environ) secrets = { "registration": "SYNAPSE_REGISTRATION_SHARED_SECRET", "macaroon": "SYNAPSE_MACAROON_SECRET_KEY", @@ -200,7 +200,7 @@ def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) -> subprocess.run(args, check=True) -def main(args: List[str], environ: MutableMapping[str, str]) -> None: +def main(args: list[str], environ: MutableMapping[str, str]) -> None: mode = args[1] if len(args) > 1 else "run" # if we were given an explicit user to switch to, do so diff --git a/scripts-dev/build_debian_packages.py b/scripts-dev/build_debian_packages.py index 6150dc15a32..f94c5a37fcd 100755 --- a/scripts-dev/build_debian_packages.py +++ b/scripts-dev/build_debian_packages.py @@ -18,7 +18,7 @@ import threading from concurrent.futures import ThreadPoolExecutor from types import FrameType -from typing import Collection, Optional, Sequence, Set +from typing import Collection, Optional, Sequence # These are expanded inside the dockerfile to be a fully qualified image name. # e.g. docker.io/library/debian:bullseye @@ -54,7 +54,7 @@ def __init__( ): self.redirect_stdout = redirect_stdout self._docker_build_args = tuple(docker_build_args or ()) - self.active_containers: Set[str] = set() + self.active_containers: set[str] = set() self._lock = threading.Lock() self._failed = False diff --git a/scripts-dev/check_locked_deps_have_sdists.py b/scripts-dev/check_locked_deps_have_sdists.py index cabe3b8de17..f035ecb644c 100755 --- a/scripts-dev/check_locked_deps_have_sdists.py +++ b/scripts-dev/check_locked_deps_have_sdists.py @@ -21,7 +21,6 @@ # import sys from pathlib import Path -from typing import Dict, List import tomli @@ -33,7 +32,7 @@ def main() -> None: # Poetry 1.3+ lockfile format: # There's a `files` inline table in each [[package]] - packages_to_assets: Dict[str, List[Dict[str, str]]] = { + packages_to_assets: dict[str, list[dict[str, str]]] = { package["name"]: package["files"] for package in lockfile_content["package"] } diff --git a/scripts-dev/check_pydantic_models.py b/scripts-dev/check_pydantic_models.py index 26a473a61b6..69c49e258db 100755 --- a/scripts-dev/check_pydantic_models.py +++ b/scripts-dev/check_pydantic_models.py @@ -47,11 +47,7 @@ from typing import ( Any, Callable, - Dict, Generator, - List, - Set, - Type, TypeVar, ) @@ -69,7 +65,7 @@ logger = logging.getLogger(__name__) -CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG: List[Callable] = [ +CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG: list[Callable] = [ constr, conbytes, conint, @@ -145,7 +141,7 @@ class PatchedBaseModel(PydanticBaseModel): """ @classmethod - def __init_subclass__(cls: Type[PydanticBaseModel], **kwargs: object): + def __init_subclass__(cls: type[PydanticBaseModel], **kwargs: object): for field in cls.__fields__.values(): # Note that field.type_ and field.outer_type are computed based on the # annotation type, see pydantic.fields.ModelField._type_analysis @@ -212,7 +208,7 @@ def lint() -> int: return os.EX_DATAERR if failures else os.EX_OK -def do_lint() -> Set[str]: +def do_lint() -> set[str]: """Try to import all of Synapse and see if we spot any Pydantic type coercions.""" failures = set() @@ -258,8 +254,8 @@ def run_test_snippet(source: str) -> None: # > Remember that at the module level, globals and locals are the same dictionary. # > If exec gets two separate objects as globals and locals, the code will be # > executed as if it were embedded in a class definition. - globals_: Dict[str, object] - locals_: Dict[str, object] + globals_: dict[str, object] + locals_: dict[str, object] globals_ = locals_ = {} exec(textwrap.dedent(source), globals_, locals_) @@ -394,10 +390,10 @@ class TestFieldTypeInspection(unittest.TestCase): ("bool"), ("Optional[str]",), ("Union[None, str]",), - ("List[str]",), - ("List[List[str]]",), - ("Dict[StrictStr, str]",), - ("Dict[str, StrictStr]",), + ("list[str]",), + ("list[list[str]]",), + ("dict[StrictStr, str]",), + ("dict[str, StrictStr]",), ("TypedDict('D', x=int)",), ] ) @@ -425,9 +421,9 @@ class C(BaseModel): ("constr(strict=True, min_length=10)",), ("Optional[StrictStr]",), ("Union[None, StrictStr]",), - ("List[StrictStr]",), - ("List[List[StrictStr]]",), - ("Dict[StrictStr, StrictStr]",), + ("list[StrictStr]",), + ("list[list[StrictStr]]",), + ("dict[StrictStr, StrictStr]",), ("TypedDict('D', x=StrictInt)",), ] ) diff --git a/scripts-dev/check_schema_delta.py b/scripts-dev/check_schema_delta.py index 454784c3ae9..7b2dec25d45 100755 --- a/scripts-dev/check_schema_delta.py +++ b/scripts-dev/check_schema_delta.py @@ -5,7 +5,7 @@ # Also checks that schema deltas do not try and create or drop indices. import re -from typing import Any, Dict, List +from typing import Any import click import git @@ -48,16 +48,16 @@ def main(force_colors: bool) -> None: r = repo.git.show(f"origin/{DEVELOP_BRANCH}:synapse/storage/schema/__init__.py") - locals: Dict[str, Any] = {} + locals: dict[str, Any] = {} exec(r, locals) current_schema_version = locals["SCHEMA_VERSION"] - diffs: List[git.Diff] = repo.remote().refs[DEVELOP_BRANCH].commit.diff(None) + diffs: list[git.Diff] = repo.remote().refs[DEVELOP_BRANCH].commit.diff(None) # Get the schema version of the local file to check against current schema on develop with open("synapse/storage/schema/__init__.py") as file: local_schema = file.read() - new_locals: Dict[str, Any] = {} + new_locals: dict[str, Any] = {} exec(local_schema, new_locals) local_schema_version = new_locals["SCHEMA_VERSION"] diff --git a/scripts-dev/federation_client.py b/scripts-dev/federation_client.py index fb879ef5555..db8655c1ced 100755 --- a/scripts-dev/federation_client.py +++ b/scripts-dev/federation_client.py @@ -43,7 +43,7 @@ import base64 import json import sys -from typing import Any, Dict, Mapping, Optional, Tuple, Union +from typing import Any, Mapping, Optional, Union from urllib import parse as urlparse import requests @@ -147,7 +147,7 @@ def request( s = requests.Session() s.mount("matrix-federation://", MatrixConnectionAdapter()) - headers: Dict[str, str] = { + headers: dict[str, str] = { "Authorization": authorization_headers[0], } @@ -303,7 +303,7 @@ def get_connection_with_tls_context( request: PreparedRequest, verify: Optional[Union[bool, str]], proxies: Optional[Mapping[str, str]] = None, - cert: Optional[Union[Tuple[str, str], str]] = None, + cert: Optional[Union[tuple[str, str], str]] = None, ) -> HTTPConnectionPool: # overrides the get_connection_with_tls_context() method in the base class parsed = urlparse.urlsplit(request.url) @@ -326,7 +326,7 @@ def get_connection_with_tls_context( ) @staticmethod - def _lookup(server_name: str) -> Tuple[str, int, str]: + def _lookup(server_name: str) -> tuple[str, int, str]: """ Do an SRV lookup on a server name and return the host:port to connect to Given the server_name (after any .well-known lookup), return the host, port and diff --git a/scripts-dev/mypy_synapse_plugin.py b/scripts-dev/mypy_synapse_plugin.py index 0b854cdba52..830c4ac4ab7 100644 --- a/scripts-dev/mypy_synapse_plugin.py +++ b/scripts-dev/mypy_synapse_plugin.py @@ -24,7 +24,7 @@ """ import enum -from typing import Callable, Mapping, Optional, Tuple, Type, Union +from typing import Callable, Mapping, Optional, Union import attr import mypy.types @@ -184,8 +184,8 @@ class ArgLocation: # Unbound at this point because we don't know the mypy version yet. # This is set in the `plugin(...)` function below. -MypyPydanticPluginClass: Type[Plugin] -MypyZopePluginClass: Type[Plugin] +MypyPydanticPluginClass: type[Plugin] +MypyZopePluginClass: type[Plugin] class SynapsePlugin(Plugin): @@ -795,7 +795,7 @@ def check_is_cacheable( def is_cacheable( rt: mypy.types.Type, signature: CallableType, verbose: bool -) -> Tuple[bool, Optional[str]]: +) -> tuple[bool, Optional[str]]: """ Check if a particular type is cachable. @@ -905,7 +905,7 @@ def is_cacheable( return False, f"Don't know how to handle {type(rt).__qualname__} return type" -def plugin(version: str) -> Type[SynapsePlugin]: +def plugin(version: str) -> type[SynapsePlugin]: global MypyPydanticPluginClass, MypyZopePluginClass # This is the entry point of the plugin, and lets us deal with the fact # that the mypy plugin interface is *not* stable by looking at the version diff --git a/scripts-dev/release.py b/scripts-dev/release.py index a7e967116e3..a675e6c0a98 100755 --- a/scripts-dev/release.py +++ b/scripts-dev/release.py @@ -32,7 +32,7 @@ import urllib.request from os import path from tempfile import TemporaryDirectory -from typing import Any, List, Match, Optional, Union +from typing import Any, Match, Optional, Union import attr import click @@ -879,7 +879,7 @@ class VersionSection: start_line: int end_line: Optional[int] = None # Is none if its the last entry - headings: List[VersionSection] = [] + headings: list[VersionSection] = [] for i, token in enumerate(tokens): # We look for level 1 headings (h1 tags). if token.type != "heading_open" or token.tag != "h1": diff --git a/scripts-dev/schema_versions.py b/scripts-dev/schema_versions.py index 5a79a433556..cec58e177fb 100755 --- a/scripts-dev/schema_versions.py +++ b/scripts-dev/schema_versions.py @@ -38,7 +38,7 @@ import json import sys from collections import defaultdict -from typing import Any, Dict, Iterator, Optional, Tuple +from typing import Any, Iterator, Optional import git from packaging import version @@ -57,7 +57,7 @@ OLDEST_SHOWN_VERSION = version.parse("v1.0") -def get_schema_versions(tag: git.Tag) -> Tuple[Optional[int], Optional[int]]: +def get_schema_versions(tag: git.Tag) -> tuple[Optional[int], Optional[int]]: """Get the schema and schema compat versions for a tag.""" schema_version = None schema_compat_version = None @@ -81,7 +81,7 @@ def get_schema_versions(tag: git.Tag) -> Tuple[Optional[int], Optional[int]]: # SCHEMA_COMPAT_VERSION is sometimes across multiple lines, the easist # thing to do is exec the code. Luckily it has only ever existed in # a file which imports nothing else from Synapse. - locals: Dict[str, Any] = {} + locals: dict[str, Any] = {} exec(schema_file.data_stream.read().decode("utf-8"), {}, locals) schema_version = locals["SCHEMA_VERSION"] schema_compat_version = locals.get("SCHEMA_COMPAT_VERSION") diff --git a/synapse/__init__.py b/synapse/__init__.py index 3bd1b3307e5..d1c306b8f38 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -24,7 +24,7 @@ import os import sys -from typing import Any, Dict +from typing import Any from PIL import ImageFile @@ -70,7 +70,7 @@ from canonicaljson import register_preserialisation_callback from immutabledict import immutabledict - def _immutabledict_cb(d: immutabledict) -> Dict[str, Any]: + def _immutabledict_cb(d: immutabledict) -> dict[str, Any]: try: return d._dict except Exception: diff --git a/synapse/_scripts/generate_workers_map.py b/synapse/_scripts/generate_workers_map.py index f66c01040cc..e669f6902d7 100755 --- a/synapse/_scripts/generate_workers_map.py +++ b/synapse/_scripts/generate_workers_map.py @@ -25,7 +25,7 @@ import re from collections import defaultdict from dataclasses import dataclass -from typing import Dict, Iterable, Optional, Pattern, Set, Tuple +from typing import Iterable, Optional, Pattern import yaml @@ -81,7 +81,7 @@ class EnumerationResource(HttpServer): """ def __init__(self, is_worker: bool) -> None: - self.registrations: Dict[Tuple[str, str], EndpointDescription] = {} + self.registrations: dict[tuple[str, str], EndpointDescription] = {} self._is_worker = is_worker def register_paths( @@ -115,7 +115,7 @@ def register_paths( def get_registered_paths_for_hs( hs: HomeServer, -) -> Dict[Tuple[str, str], EndpointDescription]: +) -> dict[tuple[str, str], EndpointDescription]: """ Given a homeserver, get all registered endpoints and their descriptions. """ @@ -142,7 +142,7 @@ def get_registered_paths_for_hs( def get_registered_paths_for_default( worker_app: Optional[str], base_config: HomeServerConfig -) -> Dict[Tuple[str, str], EndpointDescription]: +) -> dict[tuple[str, str], EndpointDescription]: """ Given the name of a worker application and a base homeserver configuration, returns: @@ -168,9 +168,9 @@ def get_registered_paths_for_default( def elide_http_methods_if_unconflicting( - registrations: Dict[Tuple[str, str], EndpointDescription], - all_possible_registrations: Dict[Tuple[str, str], EndpointDescription], -) -> Dict[Tuple[str, str], EndpointDescription]: + registrations: dict[tuple[str, str], EndpointDescription], + all_possible_registrations: dict[tuple[str, str], EndpointDescription], +) -> dict[tuple[str, str], EndpointDescription]: """ Elides HTTP methods (by replacing them with `*`) if all possible registered methods can be handled by the worker whose registration map is `registrations`. @@ -180,13 +180,13 @@ def elide_http_methods_if_unconflicting( """ def paths_to_methods_dict( - methods_and_paths: Iterable[Tuple[str, str]], - ) -> Dict[str, Set[str]]: + methods_and_paths: Iterable[tuple[str, str]], + ) -> dict[str, set[str]]: """ Given (method, path) pairs, produces a dict from path to set of methods available at that path. """ - result: Dict[str, Set[str]] = {} + result: dict[str, set[str]] = {} for method, path in methods_and_paths: result.setdefault(path, set()).add(method) return result @@ -210,8 +210,8 @@ def paths_to_methods_dict( def simplify_path_regexes( - registrations: Dict[Tuple[str, str], EndpointDescription], -) -> Dict[Tuple[str, str], EndpointDescription]: + registrations: dict[tuple[str, str], EndpointDescription], +) -> dict[tuple[str, str], EndpointDescription]: """ Simplify all the path regexes for the dict of endpoint descriptions, so that we don't use the Python-specific regex extensions @@ -270,8 +270,8 @@ def main() -> None: # TODO SSO endpoints (pick_idp etc) NOT REGISTERED BY THIS SCRIPT - categories_to_methods_and_paths: Dict[ - Optional[str], Dict[Tuple[str, str], EndpointDescription] + categories_to_methods_and_paths: dict[ + Optional[str], dict[tuple[str, str], EndpointDescription] ] = defaultdict(dict) for (method, path), desc in elided_worker_paths.items(): @@ -283,7 +283,7 @@ def main() -> None: def print_category( category_name: Optional[str], - elided_worker_paths: Dict[Tuple[str, str], EndpointDescription], + elided_worker_paths: dict[tuple[str, str], EndpointDescription], ) -> None: """ Prints out a category, in documentation page style. diff --git a/synapse/_scripts/register_new_matrix_user.py b/synapse/_scripts/register_new_matrix_user.py index 4897fa94b02..3fe2f33e521 100644 --- a/synapse/_scripts/register_new_matrix_user.py +++ b/synapse/_scripts/register_new_matrix_user.py @@ -26,7 +26,7 @@ import hmac import logging import sys -from typing import Any, Callable, Dict, Optional +from typing import Any, Callable, Optional import requests import yaml @@ -262,7 +262,7 @@ def main() -> None: args = parser.parse_args() - config: Optional[Dict[str, Any]] = None + config: Optional[dict[str, Any]] = None if "config" in args and args.config: config = yaml.safe_load(args.config) @@ -350,7 +350,7 @@ def _read_file(file_path: Any, config_path: str) -> str: sys.exit(1) -def _find_client_listener(config: Dict[str, Any]) -> Optional[str]: +def _find_client_listener(config: dict[str, Any]) -> Optional[str]: # try to find a listener in the config. Returns a host:port pair for listener in config.get("listeners", []): if listener.get("type") != "http" or listener.get("tls", False): diff --git a/synapse/_scripts/review_recent_signups.py b/synapse/_scripts/review_recent_signups.py index 0ff7fae567d..d760a84bf25 100644 --- a/synapse/_scripts/review_recent_signups.py +++ b/synapse/_scripts/review_recent_signups.py @@ -23,7 +23,6 @@ import sys import time from datetime import datetime -from typing import List import attr @@ -50,15 +49,15 @@ class ReviewConfig(RootConfig): class UserInfo: user_id: str creation_ts: int - emails: List[str] = attr.Factory(list) - private_rooms: List[str] = attr.Factory(list) - public_rooms: List[str] = attr.Factory(list) - ips: List[str] = attr.Factory(list) + emails: list[str] = attr.Factory(list) + private_rooms: list[str] = attr.Factory(list) + public_rooms: list[str] = attr.Factory(list) + ips: list[str] = attr.Factory(list) def get_recent_users( txn: LoggingTransaction, since_ms: int, exclude_app_service: bool -) -> List[UserInfo]: +) -> list[UserInfo]: """Fetches recently registered users and some info on them.""" sql = """ diff --git a/synapse/_scripts/synapse_port_db.py b/synapse/_scripts/synapse_port_db.py index 3c79919fea4..e83c0de5a47 100755 --- a/synapse/_scripts/synapse_port_db.py +++ b/synapse/_scripts/synapse_port_db.py @@ -33,15 +33,10 @@ Any, Awaitable, Callable, - Dict, Generator, Iterable, - List, NoReturn, Optional, - Set, - Tuple, - Type, TypedDict, TypeVar, cast, @@ -244,7 +239,7 @@ # not the error then the script will show nothing outside of what's printed in the run # function. If both are defined, the script will print both the error and the stacktrace. end_error_exec_info: Optional[ - Tuple[Type[BaseException], BaseException, TracebackType] + tuple[type[BaseException], BaseException, TracebackType] ] = None R = TypeVar("R") @@ -281,8 +276,8 @@ class Store( def execute(self, f: Callable[..., R], *args: Any, **kwargs: Any) -> Awaitable[R]: return self.db_pool.runInteraction(f.__name__, f, *args, **kwargs) - def execute_sql(self, sql: str, *args: object) -> Awaitable[List[Tuple]]: - def r(txn: LoggingTransaction) -> List[Tuple]: + def execute_sql(self, sql: str, *args: object) -> Awaitable[list[tuple]]: + def r(txn: LoggingTransaction) -> list[tuple]: txn.execute(sql, args) return txn.fetchall() @@ -292,8 +287,8 @@ def insert_many_txn( self, txn: LoggingTransaction, table: str, - headers: List[str], - rows: List[Tuple], + headers: list[str], + rows: list[tuple], override_system_value: bool = False, ) -> None: sql = "INSERT INTO %s (%s) %s VALUES (%s)" % ( @@ -330,7 +325,7 @@ def __init__(self, config: HomeServerConfig): class Porter: def __init__( self, - sqlite_config: Dict[str, Any], + sqlite_config: dict[str, Any], progress: "Progress", batch_size: int, hs: HomeServer, @@ -340,7 +335,7 @@ def __init__( self.batch_size = batch_size self.hs = hs - async def setup_table(self, table: str) -> Tuple[str, int, int, int, int]: + async def setup_table(self, table: str) -> tuple[str, int, int, int, int]: if table in APPEND_ONLY_TABLES: # It's safe to just carry on inserting. row = await self.postgres_store.db_pool.simple_select_one( @@ -403,10 +398,10 @@ def delete_all(txn: LoggingTransaction) -> None: return table, already_ported, total_to_port, forward_chunk, backward_chunk - async def get_table_constraints(self) -> Dict[str, Set[str]]: + async def get_table_constraints(self) -> dict[str, set[str]]: """Returns a map of tables that have foreign key constraints to tables they depend on.""" - def _get_constraints(txn: LoggingTransaction) -> Dict[str, Set[str]]: + def _get_constraints(txn: LoggingTransaction) -> dict[str, set[str]]: # We can pull the information about foreign key constraints out from # the postgres schema tables. sql = """ @@ -422,7 +417,7 @@ def _get_constraints(txn: LoggingTransaction) -> Dict[str, Set[str]]: """ txn.execute(sql) - results: Dict[str, Set[str]] = {} + results: dict[str, set[str]] = {} for table, foreign_table in txn: results.setdefault(table, set()).add(foreign_table) return results @@ -490,7 +485,7 @@ async def handle_table( def r( txn: LoggingTransaction, - ) -> Tuple[Optional[List[str]], List[Tuple], List[Tuple]]: + ) -> tuple[Optional[list[str]], list[tuple], list[tuple]]: forward_rows = [] backward_rows = [] if do_forward[0]: @@ -507,7 +502,7 @@ def r( if forward_rows or backward_rows: assert txn.description is not None - headers: Optional[List[str]] = [ + headers: Optional[list[str]] = [ column[0] for column in txn.description ] else: @@ -574,7 +569,7 @@ async def handle_search_table( while True: - def r(txn: LoggingTransaction) -> Tuple[List[str], List[Tuple]]: + def r(txn: LoggingTransaction) -> tuple[list[str], list[tuple]]: txn.execute(select, (forward_chunk, self.batch_size)) rows = txn.fetchall() assert txn.description is not None @@ -956,7 +951,7 @@ def alter_table(txn: LoggingTransaction) -> None: self.progress.set_state("Copying to postgres") constraints = await self.get_table_constraints() - tables_ported = set() # type: Set[str] + tables_ported = set() # type: set[str] while tables_to_port_info_map: # Pulls out all tables that are still to be ported and which @@ -995,8 +990,8 @@ def alter_table(txn: LoggingTransaction) -> None: reactor.stop() def _convert_rows( - self, table: str, headers: List[str], rows: List[Tuple] - ) -> List[Tuple]: + self, table: str, headers: list[str], rows: list[tuple] + ) -> list[tuple]: bool_col_names = BOOLEAN_COLUMNS.get(table, []) bool_cols = [i for i, h in enumerate(headers) if h in bool_col_names] @@ -1030,7 +1025,7 @@ def conv(j: int, col: object) -> object: return outrows - async def _setup_sent_transactions(self) -> Tuple[int, int, int]: + async def _setup_sent_transactions(self) -> tuple[int, int, int]: # Only save things from the last day yesterday = int(time.time() * 1000) - 86400000 @@ -1042,7 +1037,7 @@ async def _setup_sent_transactions(self) -> Tuple[int, int, int]: ")" ) - def r(txn: LoggingTransaction) -> Tuple[List[str], List[Tuple]]: + def r(txn: LoggingTransaction) -> tuple[list[str], list[tuple]]: txn.execute(select) rows = txn.fetchall() assert txn.description is not None @@ -1112,14 +1107,14 @@ async def _get_remaining_count_to_port( self, table: str, forward_chunk: int, backward_chunk: int ) -> int: frows = cast( - List[Tuple[int]], + list[tuple[int]], await self.sqlite_store.execute_sql( "SELECT count(*) FROM %s WHERE rowid >= ?" % (table,), forward_chunk ), ) brows = cast( - List[Tuple[int]], + list[tuple[int]], await self.sqlite_store.execute_sql( "SELECT count(*) FROM %s WHERE rowid <= ?" % (table,), backward_chunk ), @@ -1136,7 +1131,7 @@ async def _get_already_ported_count(self, table: str) -> int: async def _get_total_count_to_port( self, table: str, forward_chunk: int, backward_chunk: int - ) -> Tuple[int, int]: + ) -> tuple[int, int]: remaining, done = await make_deferred_yieldable( defer.gatherResults( [ @@ -1221,7 +1216,7 @@ def _setup_events_stream_seqs_set_pos(txn: LoggingTransaction) -> None: async def _setup_sequence( self, sequence_name: str, - stream_id_tables: Iterable[Tuple[str, str]], + stream_id_tables: Iterable[tuple[str, str]], ) -> None: """Set a sequence to the correct value.""" current_stream_ids = [] @@ -1331,7 +1326,7 @@ class Progress: """Used to report progress of the port""" def __init__(self) -> None: - self.tables: Dict[str, TableProgress] = {} + self.tables: dict[str, TableProgress] = {} self.start_time = int(time.time()) diff --git a/synapse/api/auth/__init__.py b/synapse/api/auth/__init__.py index d253938329b..cc0c0d46014 100644 --- a/synapse/api/auth/__init__.py +++ b/synapse/api/auth/__init__.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import TYPE_CHECKING, Optional, Protocol, Tuple +from typing import TYPE_CHECKING, Optional, Protocol from prometheus_client import Histogram @@ -51,7 +51,7 @@ async def check_user_in_room( room_id: str, requester: Requester, allow_departed_users: bool = False, - ) -> Tuple[str, Optional[str]]: + ) -> tuple[str, Optional[str]]: """Check if the user is in the room, or was at some point. Args: room_id: The room to check. @@ -190,7 +190,7 @@ def get_access_token_from_request(request: Request) -> str: async def check_user_in_room_or_world_readable( self, room_id: str, requester: Requester, allow_departed_users: bool = False - ) -> Tuple[str, Optional[str]]: + ) -> tuple[str, Optional[str]]: """Checks that the user is or was in the room or the room is world readable. If it isn't then an exception is raised. diff --git a/synapse/api/auth/base.py b/synapse/api/auth/base.py index 76c8c71628a..6488b348e0d 100644 --- a/synapse/api/auth/base.py +++ b/synapse/api/auth/base.py @@ -19,7 +19,7 @@ # # import logging -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Optional from netaddr import IPAddress @@ -64,7 +64,7 @@ async def check_user_in_room( room_id: str, requester: Requester, allow_departed_users: bool = False, - ) -> Tuple[str, Optional[str]]: + ) -> tuple[str, Optional[str]]: """Check if the user is in the room, or was at some point. Args: room_id: The room to check. @@ -114,7 +114,7 @@ async def check_user_in_room( @trace async def check_user_in_room_or_world_readable( self, room_id: str, requester: Requester, allow_departed_users: bool = False - ) -> Tuple[str, Optional[str]]: + ) -> tuple[str, Optional[str]]: """Checks that the user is or was in the room or the room is world readable. If it isn't then an exception is raised. diff --git a/synapse/api/auth/mas.py b/synapse/api/auth/mas.py index baa6b27336f..325d2641616 100644 --- a/synapse/api/auth/mas.py +++ b/synapse/api/auth/mas.py @@ -13,7 +13,7 @@ # # import logging -from typing import TYPE_CHECKING, Optional, Set +from typing import TYPE_CHECKING, Optional from urllib.parse import urlencode from synapse._pydantic_compat import ( @@ -369,7 +369,7 @@ async def get_user_by_access_token( # We only allow a single device_id in the scope, so we find them all in the # scope list, and raise if there are more than one. The OIDC server should be # the one enforcing valid scopes, so we raise a 500 if we find an invalid scope. - device_ids: Set[str] = set() + device_ids: set[str] = set() for tok in scope: if tok.startswith(UNSTABLE_SCOPE_MATRIX_DEVICE_PREFIX): device_ids.add(tok[len(UNSTABLE_SCOPE_MATRIX_DEVICE_PREFIX) :]) diff --git a/synapse/api/auth/msc3861_delegated.py b/synapse/api/auth/msc3861_delegated.py index b6adcc83dca..48b32aa04af 100644 --- a/synapse/api/auth/msc3861_delegated.py +++ b/synapse/api/auth/msc3861_delegated.py @@ -20,7 +20,7 @@ # import logging from dataclasses import dataclass -from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Set +from typing import TYPE_CHECKING, Any, Callable, Optional from urllib.parse import urlencode from authlib.oauth2 import ClientAuth @@ -70,7 +70,7 @@ SCOPE_SYNAPSE_ADMIN = "urn:synapse:admin:*" -def scope_to_list(scope: str) -> List[str]: +def scope_to_list(scope: str) -> list[str]: """Convert a scope string to a list of scope tokens""" return scope.strip().split(" ") @@ -96,7 +96,7 @@ def is_active(self, now_ms: int) -> bool: absolute_expiry_ms = expires_in * 1000 + self.retrieved_at_ms return now_ms < absolute_expiry_ms - def get_scope_list(self) -> List[str]: + def get_scope_list(self) -> list[str]: value = self._inner.get("scope") if not isinstance(value, str): return [] @@ -264,7 +264,7 @@ async def account_management_url(self) -> Optional[str]: logger.warning("Failed to load metadata:", exc_info=True) return None - async def auth_metadata(self) -> Dict[str, Any]: + async def auth_metadata(self) -> dict[str, Any]: """ Returns the auth metadata dict """ @@ -303,7 +303,7 @@ async def _introspect_token( # By default, we shouldn't cache the result unless we know it's valid cache_context.should_cache = False introspection_endpoint = await self._introspection_endpoint() - raw_headers: Dict[str, str] = { + raw_headers: dict[str, str] = { "Content-Type": "application/x-www-form-urlencoded", "Accept": "application/json", # Tell MAS that we support reading the device ID as an explicit @@ -520,7 +520,7 @@ async def get_user_by_access_token( raise InvalidClientTokenError("Token is not active") # Let's look at the scope - scope: List[str] = introspection_result.get_scope_list() + scope: list[str] = introspection_result.get_scope_list() # Determine type of user based on presence of particular scopes has_user_scope = ( @@ -575,7 +575,7 @@ async def get_user_by_access_token( # We only allow a single device_id in the scope, so we find them all in the # scope list, and raise if there are more than one. The OIDC server should be # the one enforcing valid scopes, so we raise a 500 if we find an invalid scope. - device_ids: Set[str] = set() + device_ids: set[str] = set() for tok in scope: if tok.startswith(UNSTABLE_SCOPE_MATRIX_DEVICE_PREFIX): device_ids.add(tok[len(UNSTABLE_SCOPE_MATRIX_DEVICE_PREFIX) :]) diff --git a/synapse/api/errors.py b/synapse/api/errors.py index fb6721c0eea..f75b34ef69e 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -26,7 +26,7 @@ import typing from enum import Enum from http import HTTPStatus -from typing import Any, Dict, List, Optional, Union +from typing import Any, Optional, Union from twisted.web import http @@ -166,7 +166,7 @@ def __init__( self, code: Union[int, HTTPStatus], msg: str, - headers: Optional[Dict[str, str]] = None, + headers: Optional[dict[str, str]] = None, ): super().__init__("%d: %s" % (code, msg)) @@ -201,7 +201,7 @@ def __init__(self, location: bytes, http_code: int = http.FOUND): super().__init__(code=http_code, msg=msg) self.location = location - self.cookies: List[bytes] = [] + self.cookies: list[bytes] = [] class SynapseError(CodeMessageException): @@ -223,8 +223,8 @@ def __init__( code: int, msg: str, errcode: str = Codes.UNKNOWN, - additional_fields: Optional[Dict] = None, - headers: Optional[Dict[str, str]] = None, + additional_fields: Optional[dict] = None, + headers: Optional[dict[str, str]] = None, ): """Constructs a synapse error. @@ -236,7 +236,7 @@ def __init__( super().__init__(code, msg, headers) self.errcode = errcode if additional_fields is None: - self._additional_fields: Dict = {} + self._additional_fields: dict = {} else: self._additional_fields = dict(additional_fields) @@ -276,7 +276,7 @@ def __init__( code: int, msg: str, errcode: str = Codes.UNKNOWN, - additional_fields: Optional[Dict] = None, + additional_fields: Optional[dict] = None, ): super().__init__(code, msg, errcode, additional_fields) @@ -409,7 +409,7 @@ class OAuthInsufficientScopeError(SynapseError): def __init__( self, - required_scopes: List[str], + required_scopes: list[str], ): headers = { "WWW-Authenticate": 'Bearer error="insufficient_scope", scope="%s"' diff --git a/synapse/api/filtering.py b/synapse/api/filtering.py index 34dd12368a0..e31bec1a00b 100644 --- a/synapse/api/filtering.py +++ b/synapse/api/filtering.py @@ -26,12 +26,9 @@ Awaitable, Callable, Collection, - Dict, Iterable, - List, Mapping, Optional, - Set, TypeVar, Union, ) @@ -248,34 +245,34 @@ def unread_thread_notifications(self) -> bool: async def filter_presence( self, presence_states: Iterable[UserPresenceState] - ) -> List[UserPresenceState]: + ) -> list[UserPresenceState]: return await self._presence_filter.filter(presence_states) async def filter_global_account_data( self, events: Iterable[JsonDict] - ) -> List[JsonDict]: + ) -> list[JsonDict]: return await self._global_account_data_filter.filter(events) - async def filter_room_state(self, events: Iterable[EventBase]) -> List[EventBase]: + async def filter_room_state(self, events: Iterable[EventBase]) -> list[EventBase]: return await self._room_state_filter.filter( await self._room_filter.filter(events) ) async def filter_room_timeline( self, events: Iterable[EventBase] - ) -> List[EventBase]: + ) -> list[EventBase]: return await self._room_timeline_filter.filter( await self._room_filter.filter(events) ) - async def filter_room_ephemeral(self, events: Iterable[JsonDict]) -> List[JsonDict]: + async def filter_room_ephemeral(self, events: Iterable[JsonDict]) -> list[JsonDict]: return await self._room_ephemeral_filter.filter( await self._room_filter.filter(events) ) async def filter_room_account_data( self, events: Iterable[JsonDict] - ) -> List[JsonDict]: + ) -> list[JsonDict]: return await self._room_account_data_filter.filter( await self._room_filter.filter(events) ) @@ -440,7 +437,7 @@ def _check(self, event: FilterEvent) -> bool: return True - def _check_fields(self, field_matchers: Dict[str, Callable[[str], bool]]) -> bool: + def _check_fields(self, field_matchers: dict[str, Callable[[str], bool]]) -> bool: """Checks whether the filter matches the given event fields. Args: @@ -474,7 +471,7 @@ def _check_fields(self, field_matchers: Dict[str, Callable[[str], bool]]) -> boo # Otherwise, accept it. return True - def filter_rooms(self, room_ids: Iterable[str]) -> Set[str]: + def filter_rooms(self, room_ids: Iterable[str]) -> set[str]: """Apply the 'rooms' filter to a given list of rooms. Args: @@ -496,7 +493,7 @@ def filter_rooms(self, room_ids: Iterable[str]) -> Set[str]: async def _check_event_relations( self, events: Collection[FilterEvent] - ) -> List[FilterEvent]: + ) -> list[FilterEvent]: # The event IDs to check, mypy doesn't understand the isinstance check. event_ids = [event.event_id for event in events if isinstance(event, EventBase)] # type: ignore[attr-defined] event_ids_to_keep = set( @@ -511,7 +508,7 @@ async def _check_event_relations( if not isinstance(event, EventBase) or event.event_id in event_ids_to_keep ] - async def filter(self, events: Iterable[FilterEvent]) -> List[FilterEvent]: + async def filter(self, events: Iterable[FilterEvent]) -> list[FilterEvent]: result = [event for event in events if self._check(event)] if self.related_by_senders or self.related_by_rel_types: diff --git a/synapse/api/ratelimiting.py b/synapse/api/ratelimiting.py index 9d1c7801bc3..1a43bdff23a 100644 --- a/synapse/api/ratelimiting.py +++ b/synapse/api/ratelimiting.py @@ -20,7 +20,7 @@ # # -from typing import TYPE_CHECKING, Dict, Hashable, Optional, Tuple +from typing import TYPE_CHECKING, Hashable, Optional from synapse.api.errors import LimitExceededError from synapse.config.ratelimiting import RatelimitSettings @@ -92,7 +92,7 @@ def __init__( # * The number of tokens currently in the bucket, # * The time point when the bucket was last completely empty, and # * The rate_hz (leak rate) of this particular bucket. - self.actions: Dict[Hashable, Tuple[float, float, float]] = {} + self.actions: dict[Hashable, tuple[float, float, float]] = {} self.clock.looping_call(self._prune_message_counts, 60 * 1000) @@ -109,7 +109,7 @@ def _get_key( def _get_action_counts( self, key: Hashable, time_now_s: float - ) -> Tuple[float, float, float]: + ) -> tuple[float, float, float]: """Retrieve the action counts, with a fallback representing an empty bucket.""" return self.actions.get(key, (0.0, time_now_s, 0.0)) @@ -122,7 +122,7 @@ async def can_do_action( update: bool = True, n_actions: int = 1, _time_now_s: Optional[float] = None, - ) -> Tuple[bool, float]: + ) -> tuple[bool, float]: """Can the entity (e.g. user or IP address) perform the action? Checks if the user has ratelimiting disabled in the database by looking diff --git a/synapse/api/room_versions.py b/synapse/api/room_versions.py index 71ef5952c3c..b6e76379f18 100644 --- a/synapse/api/room_versions.py +++ b/synapse/api/room_versions.py @@ -18,7 +18,7 @@ # # -from typing import Callable, Dict, Optional, Tuple +from typing import Callable, Optional import attr @@ -109,7 +109,7 @@ class RoomVersion: # is not enough to mark it "supported": the push rule evaluator also needs to # support the flag. Unknown flags are ignored by the evaluator, making conditions # fail if used. - msc3931_push_features: Tuple[str, ...] # values from PushRuleRoomFlag + msc3931_push_features: tuple[str, ...] # values from PushRuleRoomFlag # MSC3757: Restricting who can overwrite a state event msc3757_enabled: bool # MSC4289: Creator power enabled @@ -476,7 +476,7 @@ class RoomVersions: ) -KNOWN_ROOM_VERSIONS: Dict[str, RoomVersion] = { +KNOWN_ROOM_VERSIONS: dict[str, RoomVersion] = { v.identifier: v for v in ( RoomVersions.V1, diff --git a/synapse/app/_base.py b/synapse/app/_base.py index a3e4b4ea4b6..0e97aadc647 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -34,11 +34,8 @@ Any, Awaitable, Callable, - Dict, - List, NoReturn, Optional, - Tuple, cast, ) from wsgiref.simple_server import WSGIServer @@ -99,8 +96,8 @@ logger = logging.getLogger(__name__) -_instance_id_to_sighup_callbacks_map: Dict[ - str, List[Tuple[Callable[..., None], Tuple[object, ...], Dict[str, object]]] +_instance_id_to_sighup_callbacks_map: dict[ + str, list[tuple[Callable[..., None], tuple[object, ...], dict[str, object]]] ] = {} """ Map from homeserver instance_id to a list of callbacks. @@ -177,7 +174,7 @@ def start_worker_reactor( def start_reactor( appname: str, soft_file_limit: int, - gc_thresholds: Optional[Tuple[int, int, int]], + gc_thresholds: Optional[tuple[int, int, int]], pid_file: Optional[str], daemonize: bool, print_pidfile: bool, @@ -310,7 +307,7 @@ async def wrapper() -> None: def listen_metrics( bind_addresses: StrCollection, port: int -) -> List[Tuple[WSGIServer, Thread]]: +) -> list[tuple[WSGIServer, Thread]]: """ Start Prometheus metrics server. @@ -331,7 +328,7 @@ def listen_metrics( from synapse.metrics import RegistryProxy - servers: List[Tuple[WSGIServer, Thread]] = [] + servers: list[tuple[WSGIServer, Thread]] = [] for host in bind_addresses: logger.info("Starting metrics listener on %s:%d", host, port) server, thread = start_http_server_prometheus( @@ -346,7 +343,7 @@ def listen_manhole( port: int, manhole_settings: ManholeConfig, manhole_globals: dict, -) -> List[Port]: +) -> list[Port]: # twisted.conch.manhole 21.1.0 uses "int_from_bytes", which produces a confusing # warning. It's fixed by https://github.com/twisted/twisted/pull/1522), so # suppress the warning for now. @@ -371,7 +368,7 @@ def listen_tcp( factory: ServerFactory, reactor: IReactorTCP = reactor, backlog: int = 50, -) -> List[Port]: +) -> list[Port]: """ Create a TCP socket for a port and several addresses @@ -396,7 +393,7 @@ def listen_unix( factory: ServerFactory, reactor: IReactorUNIX = reactor, backlog: int = 50, -) -> List[Port]: +) -> list[Port]: """ Create a UNIX socket for a given path and 'mode' permission @@ -420,7 +417,7 @@ def listen_http( max_request_body_size: int, context_factory: Optional[IOpenSSLContextFactory], reactor: ISynapseReactor = reactor, -) -> List[Port]: +) -> list[Port]: """ Args: listener_config: TODO @@ -490,7 +487,7 @@ def listen_ssl( context_factory: IOpenSSLContextFactory, reactor: IReactorSSL = reactor, backlog: int = 50, -) -> List[Port]: +) -> list[Port]: """ Create an TLS-over-TCP socket for a port and several addresses diff --git a/synapse/app/admin_cmd.py b/synapse/app/admin_cmd.py index bafeb46971e..b5b1edac0a6 100644 --- a/synapse/app/admin_cmd.py +++ b/synapse/app/admin_cmd.py @@ -24,7 +24,7 @@ import os import sys import tempfile -from typing import List, Mapping, Optional, Sequence, Tuple +from typing import Mapping, Optional, Sequence from twisted.internet import defer, task @@ -150,7 +150,7 @@ def __init__(self, user_id: str, directory: Optional[str] = None): if list(os.listdir(self.base_directory)): raise Exception("Directory must be empty") - def write_events(self, room_id: str, events: List[EventBase]) -> None: + def write_events(self, room_id: str, events: list[EventBase]) -> None: room_directory = os.path.join(self.base_directory, "rooms", room_id) os.makedirs(room_directory, exist_ok=True) events_file = os.path.join(room_directory, "events") @@ -255,7 +255,7 @@ def finished(self) -> str: return self.base_directory -def load_config(argv_options: List[str]) -> Tuple[HomeServerConfig, argparse.Namespace]: +def load_config(argv_options: list[str]) -> tuple[HomeServerConfig, argparse.Namespace]: parser = argparse.ArgumentParser(description="Synapse Admin Command") HomeServerConfig.add_arguments_to_parser(parser) diff --git a/synapse/app/complement_fork_starter.py b/synapse/app/complement_fork_starter.py index b981a7631b1..73e33d77a53 100644 --- a/synapse/app/complement_fork_starter.py +++ b/synapse/app/complement_fork_starter.py @@ -26,13 +26,13 @@ import signal import sys from types import FrameType -from typing import Any, Callable, Dict, List, Optional +from typing import Any, Callable, Optional from twisted.internet.main import installReactor # a list of the original signal handlers, before we installed our custom ones. # We restore these in our child processes. -_original_signal_handlers: Dict[int, Any] = {} +_original_signal_handlers: dict[int, Any] = {} class ProxiedReactor: @@ -72,7 +72,7 @@ def __getattr__(self, attr_name: str) -> Any: def _worker_entrypoint( - func: Callable[[], None], proxy_reactor: ProxiedReactor, args: List[str] + func: Callable[[], None], proxy_reactor: ProxiedReactor, args: list[str] ) -> None: """ Entrypoint for a forked worker process. @@ -128,7 +128,7 @@ def main() -> None: # Split up the subsequent arguments into each workers' arguments; # `--` is our delimiter of choice. - args_by_worker: List[List[str]] = [ + args_by_worker: list[list[str]] = [ list(args) for cond, args in itertools.groupby(ns.args, lambda ele: ele != "--") if cond and args @@ -167,7 +167,7 @@ def main() -> None: update_proc.join() print("===== PREPARED DATABASE =====", file=sys.stderr) - processes: List[multiprocessing.Process] = [] + processes: list[multiprocessing.Process] = [] # Install signal handlers to propagate signals to all our children, so that they # shut down cleanly. This also inhibits our own exit, but that's good: we want to diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py index 7518661265d..8f512c1577d 100644 --- a/synapse/app/generic_worker.py +++ b/synapse/app/generic_worker.py @@ -21,7 +21,6 @@ # import logging import sys -from typing import Dict, List from twisted.web.resource import Resource @@ -181,7 +180,7 @@ def _listen_http(self, listener_config: ListenerConfig) -> None: # We always include an admin resource that we populate with servlets as needed admin_resource = JsonResource(self, canonical_json=False) - resources: Dict[str, Resource] = { + resources: dict[str, Resource] = { # We always include a health resource. "/health": HealthResource(), "/_synapse/admin": admin_resource, @@ -314,7 +313,7 @@ def start_listening(self) -> None: self.get_replication_command_handler().start_replication(self) -def load_config(argv_options: List[str]) -> HomeServerConfig: +def load_config(argv_options: list[str]) -> HomeServerConfig: """ Parse the commandline and config files (does not generate config) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index b9ac86c2fc3..3f34630da5a 100644 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -22,7 +22,7 @@ import logging import os import sys -from typing import Dict, Iterable, List, Optional +from typing import Iterable, Optional from twisted.internet.tcp import Port from twisted.web.resource import EncodingResourceWrapper, Resource @@ -99,7 +99,7 @@ def _listener_http( site_tag = listener_config.get_site_tag() # We always include a health resource. - resources: Dict[str, Resource] = {"/health": HealthResource()} + resources: dict[str, Resource] = {"/health": HealthResource()} for res in listener_config.http_options.resources: for name in res.names: @@ -170,7 +170,7 @@ def _listener_http( def _configure_named_resource( self, name: str, compress: bool = False - ) -> Dict[str, Resource]: + ) -> dict[str, Resource]: """Build a resource map for a named resource Args: @@ -180,7 +180,7 @@ def _configure_named_resource( Returns: map from path to HTTP resource """ - resources: Dict[str, Resource] = {} + resources: dict[str, Resource] = {} if name == "client": client_resource: Resource = ClientRestResource(self) if compress: @@ -323,7 +323,7 @@ def start_background_tasks(self) -> None: self.get_datastores().main.db_pool.updates.start_doing_background_updates() -def load_or_generate_config(argv_options: List[str]) -> HomeServerConfig: +def load_or_generate_config(argv_options: list[str]) -> HomeServerConfig: """ Parse the commandline and config files diff --git a/synapse/app/phone_stats_home.py b/synapse/app/phone_stats_home.py index 4bbc33cba28..13a0e3db7cd 100644 --- a/synapse/app/phone_stats_home.py +++ b/synapse/app/phone_stats_home.py @@ -22,7 +22,7 @@ import math import resource import sys -from typing import TYPE_CHECKING, List, Mapping, Sized, Tuple +from typing import TYPE_CHECKING, Mapping, Sized from prometheus_client import Gauge @@ -54,7 +54,7 @@ # Contains the list of processes we will be monitoring # currently either 0 or 1 -_stats_process: List[Tuple[int, "resource.struct_rusage"]] = [] +_stats_process: list[tuple[int, "resource.struct_rusage"]] = [] # Gauges to expose monthly active user control metrics current_mau_gauge = Gauge( @@ -82,12 +82,12 @@ def phone_stats_home( hs: "HomeServer", stats: JsonDict, - stats_process: List[Tuple[int, "resource.struct_rusage"]] = _stats_process, + stats_process: list[tuple[int, "resource.struct_rusage"]] = _stats_process, ) -> "defer.Deferred[None]": async def _phone_stats_home( hs: "HomeServer", stats: JsonDict, - stats_process: List[Tuple[int, "resource.struct_rusage"]] = _stats_process, + stats_process: list[tuple[int, "resource.struct_rusage"]] = _stats_process, ) -> None: """Collect usage statistics and send them to the configured endpoint. diff --git a/synapse/appservice/__init__.py b/synapse/appservice/__init__.py index 1d0735ca1d6..e91fa3a624b 100644 --- a/synapse/appservice/__init__.py +++ b/synapse/appservice/__init__.py @@ -25,9 +25,7 @@ from enum import Enum from typing import ( TYPE_CHECKING, - Dict, Iterable, - List, Optional, Pattern, Sequence, @@ -59,11 +57,11 @@ # Type for the `device_one_time_keys_count` field in an appservice transaction # user ID -> {device ID -> {algorithm -> count}} -TransactionOneTimeKeysCount = Dict[str, Dict[str, Dict[str, int]]] +TransactionOneTimeKeysCount = dict[str, dict[str, dict[str, int]]] # Type for the `device_unused_fallback_key_types` field in an appservice transaction # user ID -> {device ID -> [algorithm]} -TransactionUnusedFallbackKeys = Dict[str, Dict[str, List[str]]] +TransactionUnusedFallbackKeys = dict[str, dict[str, list[str]]] class ApplicationServiceState(Enum): @@ -145,7 +143,7 @@ def __init__( def _check_namespaces( self, namespaces: Optional[JsonDict] - ) -> Dict[str, List[Namespace]]: + ) -> dict[str, list[Namespace]]: # Sanity check that it is of the form: # { # users: [ {regex: "[A-z]+.*", exclusive: true}, ...], @@ -155,7 +153,7 @@ def _check_namespaces( if namespaces is None: namespaces = {} - result: Dict[str, List[Namespace]] = {} + result: dict[str, list[Namespace]] = {} for ns in ApplicationService.NS_LIST: result[ns] = [] @@ -388,7 +386,7 @@ def is_exclusive_alias(self, alias: str) -> bool: def is_exclusive_room(self, room_id: str) -> bool: return self._is_exclusive(ApplicationService.NS_ROOMS, room_id) - def get_exclusive_user_regexes(self) -> List[Pattern[str]]: + def get_exclusive_user_regexes(self) -> list[Pattern[str]]: """Get the list of regexes used to determine if a user is exclusively registered by the AS """ @@ -417,8 +415,8 @@ def __init__( service: ApplicationService, id: int, events: Sequence[EventBase], - ephemeral: List[JsonMapping], - to_device_messages: List[JsonMapping], + ephemeral: list[JsonMapping], + to_device_messages: list[JsonMapping], one_time_keys_count: TransactionOneTimeKeysCount, unused_fallback_keys: TransactionUnusedFallbackKeys, device_list_summary: DeviceListUpdates, diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py index 55069cc5d38..f08a9219986 100644 --- a/synapse/appservice/api.py +++ b/synapse/appservice/api.py @@ -23,13 +23,10 @@ import urllib.parse from typing import ( TYPE_CHECKING, - Dict, Iterable, - List, Mapping, Optional, Sequence, - Tuple, TypeVar, Union, ) @@ -133,14 +130,14 @@ def __init__(self, hs: "HomeServer"): self.clock = hs.get_clock() self.config = hs.config.appservice - self.protocol_meta_cache: ResponseCache[Tuple[str, str]] = ResponseCache( + self.protocol_meta_cache: ResponseCache[tuple[str, str]] = ResponseCache( clock=hs.get_clock(), name="as_protocol_meta", server_name=self.server_name, timeout_ms=HOUR_IN_MS, ) - def _get_headers(self, service: "ApplicationService") -> Dict[bytes, List[bytes]]: + def _get_headers(self, service: "ApplicationService") -> dict[bytes, list[bytes]]: """This makes sure we have always the auth header and opentracing headers set.""" # This is also ensured before in the functions. However this is needed to please @@ -210,8 +207,8 @@ async def query_3pe( service: "ApplicationService", kind: str, protocol: str, - fields: Dict[bytes, List[bytes]], - ) -> List[JsonDict]: + fields: dict[bytes, list[bytes]], + ) -> list[JsonDict]: if kind == ThirdPartyEntityKind.USER: required_field = "userid" elif kind == ThirdPartyEntityKind.LOCATION: @@ -225,7 +222,7 @@ async def query_3pe( assert service.hs_token is not None try: - args: Mapping[bytes, Union[List[bytes], str]] = fields + args: Mapping[bytes, Union[list[bytes], str]] = fields if self.config.use_appservice_legacy_authorization: args = { **fields, @@ -320,8 +317,8 @@ async def push_bulk( self, service: "ApplicationService", events: Sequence[EventBase], - ephemeral: List[JsonMapping], - to_device_messages: List[JsonMapping], + ephemeral: list[JsonMapping], + to_device_messages: list[JsonMapping], one_time_keys_count: TransactionOneTimeKeysCount, unused_fallback_keys: TransactionUnusedFallbackKeys, device_list_summary: DeviceListUpdates, @@ -429,9 +426,9 @@ async def push_bulk( return False async def claim_client_keys( - self, service: "ApplicationService", query: List[Tuple[str, str, str, int]] - ) -> Tuple[ - Dict[str, Dict[str, Dict[str, JsonDict]]], List[Tuple[str, str, str, int]] + self, service: "ApplicationService", query: list[tuple[str, str, str, int]] + ) -> tuple[ + dict[str, dict[str, dict[str, JsonDict]]], list[tuple[str, str, str, int]] ]: """Claim one time keys from an application service. @@ -457,7 +454,7 @@ async def claim_client_keys( assert service.hs_token is not None # Create the expected payload shape. - body: Dict[str, Dict[str, List[str]]] = {} + body: dict[str, dict[str, list[str]]] = {} for user_id, device, algorithm, count in query: body.setdefault(user_id, {}).setdefault(device, []).extend( [algorithm] * count @@ -502,8 +499,8 @@ async def claim_client_keys( return response, missing async def query_keys( - self, service: "ApplicationService", query: Dict[str, List[str]] - ) -> Dict[str, Dict[str, Dict[str, JsonDict]]]: + self, service: "ApplicationService", query: dict[str, list[str]] + ) -> dict[str, dict[str, dict[str, JsonDict]]]: """Query the application service for keys. Note that any error (including a timeout) is treated as the application @@ -545,7 +542,7 @@ async def query_keys( def _serialize( self, service: "ApplicationService", events: Iterable[EventBase] - ) -> List[JsonDict]: + ) -> list[JsonDict]: time_now = self.clock.time_msec() return [ serialize_event( diff --git a/synapse/appservice/scheduler.py b/synapse/appservice/scheduler.py index b4de759b675..b5fab5f50d6 100644 --- a/synapse/appservice/scheduler.py +++ b/synapse/appservice/scheduler.py @@ -61,13 +61,9 @@ Awaitable, Callable, Collection, - Dict, Iterable, - List, Optional, Sequence, - Set, - Tuple, ) from twisted.internet.interfaces import IDelayedCall @@ -183,16 +179,16 @@ class _ServiceQueuer: def __init__(self, txn_ctrl: "_TransactionController", hs: "HomeServer"): # dict of {service_id: [events]} - self.queued_events: Dict[str, List[EventBase]] = {} + self.queued_events: dict[str, list[EventBase]] = {} # dict of {service_id: [events]} - self.queued_ephemeral: Dict[str, List[JsonMapping]] = {} + self.queued_ephemeral: dict[str, list[JsonMapping]] = {} # dict of {service_id: [to_device_message_json]} - self.queued_to_device_messages: Dict[str, List[JsonMapping]] = {} + self.queued_to_device_messages: dict[str, list[JsonMapping]] = {} # dict of {service_id: [device_list_summary]} - self.queued_device_list_summaries: Dict[str, List[DeviceListUpdates]] = {} + self.queued_device_list_summaries: dict[str, list[DeviceListUpdates]] = {} # the appservices which currently have a transaction in flight - self.requests_in_flight: Set[str] = set() + self.requests_in_flight: set[str] = set() self.txn_ctrl = txn_ctrl self._msc3202_transaction_extensions_enabled: bool = ( hs.config.experimental.msc3202_transaction_extensions @@ -302,7 +298,7 @@ async def _compute_msc3202_otk_counts_and_fallback_keys( events: Iterable[EventBase], ephemerals: Iterable[JsonMapping], to_device_messages: Iterable[JsonMapping], - ) -> Tuple[TransactionOneTimeKeysCount, TransactionUnusedFallbackKeys]: + ) -> tuple[TransactionOneTimeKeysCount, TransactionUnusedFallbackKeys]: """ Given a list of the events, ephemeral messages and to-device messages, - first computes a list of application services users that may have @@ -313,14 +309,14 @@ async def _compute_msc3202_otk_counts_and_fallback_keys( """ # Set of 'interesting' users who may have updates - users: Set[str] = set() + users: set[str] = set() # The sender is always included users.add(service.sender.to_string()) # All AS users that would receive the PDUs or EDUs sent to these rooms # are classed as 'interesting'. - rooms_of_interesting_users: Set[str] = set() + rooms_of_interesting_users: set[str] = set() # PDUs rooms_of_interesting_users.update(event.room_id for event in events) # EDUs @@ -364,7 +360,7 @@ def __init__(self, hs: "HomeServer"): self.as_api = hs.get_application_service_api() # map from service id to recoverer instance - self.recoverers: Dict[str, "_Recoverer"] = {} + self.recoverers: dict[str, "_Recoverer"] = {} # for UTs self.RECOVERER_CLASS = _Recoverer @@ -373,8 +369,8 @@ async def send( self, service: ApplicationService, events: Sequence[EventBase], - ephemeral: Optional[List[JsonMapping]] = None, - to_device_messages: Optional[List[JsonMapping]] = None, + ephemeral: Optional[list[JsonMapping]] = None, + to_device_messages: Optional[list[JsonMapping]] = None, one_time_keys_count: Optional[TransactionOneTimeKeysCount] = None, unused_fallback_keys: Optional[TransactionUnusedFallbackKeys] = None, device_list_summary: Optional[DeviceListUpdates] = None, diff --git a/synapse/config/__main__.py b/synapse/config/__main__.py index ef9d36b507c..9169b062bf1 100644 --- a/synapse/config/__main__.py +++ b/synapse/config/__main__.py @@ -20,13 +20,12 @@ # # import sys -from typing import List from synapse.config._base import ConfigError from synapse.config.homeserver import HomeServerConfig -def main(args: List[str]) -> None: +def main(args: list[str]) -> None: action = args[1] if len(args) > 1 and args[1] == "read" else None # If we're reading a key in the config file, then `args[1]` will be `read` and `args[2]` # will be the key to read. diff --git a/synapse/config/_base.py b/synapse/config/_base.py index 5d0560e0f2e..ce06905390b 100644 --- a/synapse/config/_base.py +++ b/synapse/config/_base.py @@ -33,14 +33,10 @@ from typing import ( Any, ClassVar, - Dict, Iterable, Iterator, - List, MutableMapping, Optional, - Tuple, - Type, TypeVar, Union, ) @@ -321,9 +317,9 @@ def read_template(self, filename: str) -> jinja2.Template: def read_templates( self, - filenames: List[str], + filenames: list[str], custom_template_directories: Optional[Iterable[str]] = None, - ) -> List[jinja2.Template]: + ) -> list[jinja2.Template]: """Load a list of template files from disk using the given variables. This function will attempt to load the given templates from the default Synapse @@ -402,7 +398,7 @@ class RootConfig: class, lower-cased and with "Config" removed. """ - config_classes: List[Type[Config]] = [] + config_classes: list[type[Config]] = [] def __init__(self, config_files: StrSequence = ()): # Capture absolute paths here, so we can reload config after we daemonize. @@ -471,7 +467,7 @@ def generate_config( generate_secrets: bool = False, report_stats: Optional[bool] = None, open_private_ports: bool = False, - listeners: Optional[List[dict]] = None, + listeners: Optional[list[dict]] = None, tls_certificate_path: Optional[str] = None, tls_private_key_path: Optional[str] = None, ) -> str: @@ -545,7 +541,7 @@ def generate_config( @classmethod def load_config( - cls: Type[TRootConfig], description: str, argv_options: List[str] + cls: type[TRootConfig], description: str, argv_options: list[str] ) -> TRootConfig: """Parse the commandline and config files @@ -605,8 +601,8 @@ def add_arguments_to_parser(cls, config_parser: argparse.ArgumentParser) -> None @classmethod def load_config_with_parser( - cls: Type[TRootConfig], parser: argparse.ArgumentParser, argv_options: List[str] - ) -> Tuple[TRootConfig, argparse.Namespace]: + cls: type[TRootConfig], parser: argparse.ArgumentParser, argv_options: list[str] + ) -> tuple[TRootConfig, argparse.Namespace]: """Parse the commandline and config files with the given parser Doesn't support config-file-generation: used by the worker apps. @@ -658,7 +654,7 @@ def load_config_with_parser( @classmethod def load_or_generate_config( - cls: Type[TRootConfig], description: str, argv_options: List[str] + cls: type[TRootConfig], description: str, argv_options: list[str] ) -> Optional[TRootConfig]: """Parse the commandline and config files @@ -858,7 +854,7 @@ def load_or_generate_config( def parse_config_dict( self, - config_dict: Dict[str, Any], + config_dict: dict[str, Any], config_dir_path: str, data_dir_path: str, allow_secrets_in_config: bool = True, @@ -883,7 +879,7 @@ def parse_config_dict( ) def generate_missing_files( - self, config_dict: Dict[str, Any], config_dir_path: str + self, config_dict: dict[str, Any], config_dir_path: str ) -> None: self.invoke_all("generate_files", config_dict, config_dir_path) @@ -930,7 +926,7 @@ def validate_config(self) -> None: """ -def read_config_files(config_files: Iterable[str]) -> Dict[str, Any]: +def read_config_files(config_files: Iterable[str]) -> dict[str, Any]: """Read the config files and shallowly merge them into a dict. Successive configurations are shallowly merged into ones provided earlier, @@ -964,7 +960,7 @@ def read_config_files(config_files: Iterable[str]) -> Dict[str, Any]: return specified_config -def find_config_files(search_paths: List[str]) -> List[str]: +def find_config_files(search_paths: list[str]) -> list[str]: """Finds config files using a list of search paths. If a path is a file then that file path is added to the list. If a search path is a directory then all the "*.yaml" files in that directory are added to the list in @@ -1018,7 +1014,7 @@ class ShardedWorkerHandlingConfig: below). """ - instances: List[str] + instances: list[str] def should_handle(self, instance_name: str, key: str) -> bool: """Whether this instance is responsible for handling the given key.""" diff --git a/synapse/config/_util.py b/synapse/config/_util.py index 731b60a8407..3e239c525e7 100644 --- a/synapse/config/_util.py +++ b/synapse/config/_util.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Any, Dict, Type, TypeVar +from typing import Any, TypeVar import jsonschema @@ -79,8 +79,8 @@ def json_error_to_config_error( def parse_and_validate_mapping( config: Any, - model_type: Type[Model], -) -> Dict[str, Model]: + model_type: type[Model], +) -> dict[str, Model]: """Parse `config` as a mapping from strings to a given `Model` type. Args: config: The configuration data to check @@ -93,7 +93,7 @@ def parse_and_validate_mapping( try: # type-ignore: mypy doesn't like constructing `Dict[str, model_type]` because # `model_type` is a runtime variable. Pydantic is fine with this. - instances = parse_obj_as(Dict[str, model_type], config) # type: ignore[valid-type] + instances = parse_obj_as(dict[str, model_type], config) # type: ignore[valid-type] except ValidationError as e: raise ConfigError(str(e)) from e return instances diff --git a/synapse/config/api.py b/synapse/config/api.py index 0bb99d42282..e32e03e55e1 100644 --- a/synapse/config/api.py +++ b/synapse/config/api.py @@ -20,7 +20,7 @@ # import logging -from typing import Any, Iterable, Optional, Tuple +from typing import Any, Iterable, Optional from synapse.api.constants import EventTypes from synapse.config._base import Config, ConfigError @@ -46,7 +46,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: def _get_prejoin_state_entries( self, config: JsonDict - ) -> Iterable[Tuple[str, Optional[str]]]: + ) -> Iterable[tuple[str, Optional[str]]]: """Get the event types and state keys to include in the prejoin state.""" room_prejoin_state_config = config.get("room_prejoin_state") or {} diff --git a/synapse/config/appservice.py b/synapse/config/appservice.py index 81dbd330cc4..b9ed1a702c3 100644 --- a/synapse/config/appservice.py +++ b/synapse/config/appservice.py @@ -21,7 +21,7 @@ # import logging -from typing import Any, Dict, List +from typing import Any from urllib import parse as urlparse import yaml @@ -61,13 +61,13 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: def load_appservices( - hostname: str, config_files: List[str] -) -> List[ApplicationService]: + hostname: str, config_files: list[str] +) -> list[ApplicationService]: """Returns a list of Application Services from the config files.""" # Dicts of value -> filename - seen_as_tokens: Dict[str, str] = {} - seen_ids: Dict[str, str] = {} + seen_as_tokens: dict[str, str] = {} + seen_ids: dict[str, str] = {} appservices = [] diff --git a/synapse/config/cache.py b/synapse/config/cache.py index 35a052b2547..e51efc3dbd6 100644 --- a/synapse/config/cache.py +++ b/synapse/config/cache.py @@ -23,7 +23,7 @@ import os import re import threading -from typing import Any, Callable, Dict, Mapping, Optional +from typing import Any, Callable, Mapping, Optional import attr @@ -38,7 +38,7 @@ _CACHE_PREFIX = "SYNAPSE_CACHE_FACTOR" # Map from canonicalised cache name to cache. -_CACHES: Dict[str, Callable[[float], None]] = {} +_CACHES: dict[str, Callable[[float], None]] = {} # a lock on the contents of _CACHES _CACHES_LOCK = threading.Lock() @@ -104,7 +104,7 @@ class CacheConfig(Config): _environ: Mapping[str, str] = os.environ event_cache_size: int - cache_factors: Dict[str, float] + cache_factors: dict[str, float] global_factor: float track_memory_usage: bool expiry_time_msec: Optional[int] diff --git a/synapse/config/cas.py b/synapse/config/cas.py index 60d66d7019b..e6e869bb167 100644 --- a/synapse/config/cas.py +++ b/synapse/config/cas.py @@ -20,7 +20,7 @@ # # -from typing import Any, List, Optional +from typing import Any, Optional from synapse.config.sso import SsoAttributeRequirement from synapse.types import JsonDict @@ -107,7 +107,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: def _parsed_required_attributes_def( required_attributes: Any, -) -> List[SsoAttributeRequirement]: +) -> list[SsoAttributeRequirement]: validate_config( REQUIRED_ATTRIBUTES_SCHEMA, required_attributes, diff --git a/synapse/config/database.py b/synapse/config/database.py index c4ca63a1fa3..8e9d2538207 100644 --- a/synapse/config/database.py +++ b/synapse/config/database.py @@ -22,7 +22,7 @@ import argparse import logging import os -from typing import Any, List +from typing import Any from synapse.config._base import Config, ConfigError from synapse.types import JsonDict @@ -83,7 +83,7 @@ class DatabaseConfig(Config): def __init__(self, *args: Any): super().__init__(*args) - self.databases: List[DatabaseConnectionConfig] = [] + self.databases: list[DatabaseConnectionConfig] = [] def read_config(self, config: JsonDict, **kwargs: Any) -> None: # We *experimentally* support specifying multiple databases via the diff --git a/synapse/config/key.py b/synapse/config/key.py index f78ff5114f6..3e832b49466 100644 --- a/synapse/config/key.py +++ b/synapse/config/key.py @@ -23,7 +23,7 @@ import hashlib import logging import os -from typing import TYPE_CHECKING, Any, Dict, Iterator, List, Optional +from typing import TYPE_CHECKING, Any, Iterator, Optional import attr import jsonschema @@ -110,7 +110,7 @@ class TrustedKeyServer: server_name: str # map from key id to key object, or None to disable signature verification. - verify_keys: Optional[Dict[str, VerifyKey]] = None + verify_keys: Optional[dict[str, VerifyKey]] = None class KeyConfig(Config): @@ -250,7 +250,7 @@ def generate_config_section( - server_name: "matrix.org" """ % locals() - def read_signing_keys(self, signing_key_path: str, name: str) -> List[SigningKey]: + def read_signing_keys(self, signing_key_path: str, name: str) -> list[SigningKey]: """Read the signing keys in the given path. Args: @@ -280,7 +280,7 @@ def read_signing_keys(self, signing_key_path: str, name: str) -> List[SigningKey def read_old_signing_keys( self, old_signing_keys: Optional[JsonDict] - ) -> Dict[str, "VerifyKeyWithExpiry"]: + ) -> dict[str, "VerifyKeyWithExpiry"]: if old_signing_keys is None: return {} keys = {} @@ -299,7 +299,7 @@ def read_old_signing_keys( ) return keys - def generate_files(self, config: Dict[str, Any], config_dir_path: str) -> None: + def generate_files(self, config: dict[str, Any], config_dir_path: str) -> None: if "signing_key" in config: return @@ -393,7 +393,7 @@ def _perspectives_to_key_servers(config: JsonDict) -> Iterator[JsonDict]: def _parse_key_servers( - key_servers: List[Any], federation_verify_certificates: bool + key_servers: list[Any], federation_verify_certificates: bool ) -> Iterator[TrustedKeyServer]: try: jsonschema.validate(key_servers, TRUSTED_KEY_SERVERS_SCHEMA) @@ -408,7 +408,7 @@ def _parse_key_servers( server_name = server["server_name"] result = TrustedKeyServer(server_name=server_name) - verify_keys: Optional[Dict[str, str]] = server.get("verify_keys") + verify_keys: Optional[dict[str, str]] = server.get("verify_keys") if verify_keys is not None: result.verify_keys = {} for key_id, key_base64 in verify_keys.items(): diff --git a/synapse/config/logger.py b/synapse/config/logger.py index 9dde4c4003f..8e355035a98 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -26,7 +26,7 @@ import sys import threading from string import Template -from typing import TYPE_CHECKING, Any, Dict, Optional +from typing import TYPE_CHECKING, Any, Optional import yaml from zope.interface import implementer @@ -186,7 +186,7 @@ def add_arguments(parser: argparse.ArgumentParser) -> None: help=argparse.SUPPRESS, ) - def generate_files(self, config: Dict[str, Any], config_dir_path: str) -> None: + def generate_files(self, config: dict[str, Any], config_dir_path: str) -> None: log_config = config.get("log_config") if log_config and not os.path.exists(log_config): log_file = self.abspath("homeserver.log") diff --git a/synapse/config/modules.py b/synapse/config/modules.py index 37dc26e130d..42bcffd1839 100644 --- a/synapse/config/modules.py +++ b/synapse/config/modules.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Any, Dict, List, Tuple +from typing import Any, Dict from synapse.config._base import Config, ConfigError from synapse.types import JsonDict @@ -29,7 +29,7 @@ class ModulesConfig(Config): section = "modules" def read_config(self, config: JsonDict, **kwargs: Any) -> None: - self.loaded_modules: List[Tuple[Any, Dict]] = [] + self.loaded_modules: list[tuple[Any, Dict]] = [] configured_modules = config.get("modules") or [] for i, module in enumerate(configured_modules): diff --git a/synapse/config/oembed.py b/synapse/config/oembed.py index 1b6c5210873..a4a192302c3 100644 --- a/synapse/config/oembed.py +++ b/synapse/config/oembed.py @@ -21,7 +21,7 @@ import importlib.resources as importlib_resources import json import re -from typing import Any, Dict, Iterable, List, Optional, Pattern +from typing import Any, Iterable, Optional, Pattern from urllib import parse as urlparse import attr @@ -37,9 +37,9 @@ class OEmbedEndpointConfig: # The API endpoint to fetch. api_endpoint: str # The patterns to match. - url_patterns: List[Pattern[str]] + url_patterns: list[Pattern[str]] # The supported formats. - formats: Optional[List[str]] + formats: Optional[list[str]] class OembedConfig(Config): @@ -48,10 +48,10 @@ class OembedConfig(Config): section = "oembed" def read_config(self, config: JsonDict, **kwargs: Any) -> None: - oembed_config: Dict[str, Any] = config.get("oembed") or {} + oembed_config: dict[str, Any] = config.get("oembed") or {} # A list of patterns which will be used. - self.oembed_patterns: List[OEmbedEndpointConfig] = list( + self.oembed_patterns: list[OEmbedEndpointConfig] = list( self._parse_and_validate_providers(oembed_config) ) @@ -92,7 +92,7 @@ def _parse_and_validate_providers( ) def _parse_and_validate_provider( - self, providers: List[JsonDict], config_path: StrSequence + self, providers: list[JsonDict], config_path: StrSequence ) -> Iterable[OEmbedEndpointConfig]: # Ensure it is the proper form. validate_config( diff --git a/synapse/config/oidc.py b/synapse/config/oidc.py index 3ddf65a3e91..3179635220d 100644 --- a/synapse/config/oidc.py +++ b/synapse/config/oidc.py @@ -21,7 +21,7 @@ # from collections import Counter -from typing import Any, Collection, Iterable, List, Mapping, Optional, Tuple, Type +from typing import Any, Collection, Iterable, Mapping, Optional, Type import attr @@ -213,7 +213,7 @@ def _parse_oidc_provider_configs(config: JsonDict) -> Iterable["OidcProviderConf def _parse_oidc_config_dict( - oidc_config: JsonDict, config_path: Tuple[str, ...] + oidc_config: JsonDict, config_path: tuple[str, ...] ) -> "OidcProviderConfig": """Take the configuration dict and parse it into an OidcProviderConfig @@ -416,7 +416,7 @@ class OidcProviderConfig: # Valid values are 'auto', 'always', and 'never'. pkce_method: str - id_token_signing_alg_values_supported: Optional[List[str]] + id_token_signing_alg_values_supported: Optional[list[str]] """ List of the JWS signing algorithms (`alg` values) that are supported for signing the `id_token`. @@ -497,7 +497,7 @@ class OidcProviderConfig: user_mapping_provider_config: Any # required attributes to require in userinfo to allow login/registration - attribute_requirements: List[SsoAttributeRequirement] + attribute_requirements: list[SsoAttributeRequirement] # Whether automatic registrations are enabled in the ODIC flow. Defaults to True enable_registration: bool diff --git a/synapse/config/password_auth_providers.py b/synapse/config/password_auth_providers.py index b2b624aea27..e01fa9d07b7 100644 --- a/synapse/config/password_auth_providers.py +++ b/synapse/config/password_auth_providers.py @@ -19,7 +19,7 @@ # # -from typing import Any, List, Tuple, Type +from typing import Any, Type from synapse.types import JsonDict from synapse.util.module_loader import load_module @@ -56,7 +56,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: for backwards compatibility. """ - self.password_providers: List[Tuple[Type, Any]] = [] + self.password_providers: list[tuple[Type, Any]] = [] providers = [] # We want to be backwards compatible with the old `ldap_config` diff --git a/synapse/config/ratelimiting.py b/synapse/config/ratelimiting.py index b082daa8f75..be2f49f87c5 100644 --- a/synapse/config/ratelimiting.py +++ b/synapse/config/ratelimiting.py @@ -19,7 +19,7 @@ # # -from typing import Any, Dict, Optional, cast +from typing import Any, Optional, cast import attr @@ -37,9 +37,9 @@ class RatelimitSettings: @classmethod def parse( cls, - config: Dict[str, Any], + config: dict[str, Any], key: str, - defaults: Optional[Dict[str, float]] = None, + defaults: Optional[dict[str, float]] = None, ) -> "RatelimitSettings": """Parse config[key] as a new-style rate limiter config. @@ -62,7 +62,7 @@ def parse( # By this point we should have hit the rate limiter parameters. # We don't actually check this though! - rl_config = cast(Dict[str, float], rl_config) + rl_config = cast(dict[str, float], rl_config) return cls( key=key, diff --git a/synapse/config/registration.py b/synapse/config/registration.py index 283199aa11e..c0e7316bc36 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -20,7 +20,7 @@ # # import argparse -from typing import Any, Dict, Optional +from typing import Any, Optional from synapse.api.constants import RoomCreationPreset from synapse.config._base import Config, ConfigError, read_file @@ -266,7 +266,7 @@ def generate_config_section( else: return "" - def generate_files(self, config: Dict[str, Any], config_dir_path: str) -> None: + def generate_files(self, config: dict[str, Any], config_dir_path: str) -> None: # if 'registration_shared_secret_path' is specified, and the target file # does not exist, generate it. registration_shared_secret_path = config.get("registration_shared_secret_path") diff --git a/synapse/config/repository.py b/synapse/config/repository.py index e7d23740f9e..221130b0cd1 100644 --- a/synapse/config/repository.py +++ b/synapse/config/repository.py @@ -21,7 +21,7 @@ import logging import os -from typing import Any, Dict, List, Tuple +from typing import Any import attr @@ -80,8 +80,8 @@ class MediaStorageProviderConfig: def parse_thumbnail_requirements( - thumbnail_sizes: List[JsonDict], -) -> Dict[str, Tuple[ThumbnailRequirement, ...]]: + thumbnail_sizes: list[JsonDict], +) -> dict[str, tuple[ThumbnailRequirement, ...]]: """Takes a list of dictionaries with "width", "height", and "method" keys and creates a map from image media types to the thumbnail size, thumbnailing method, and thumbnail media type to precalculate @@ -92,7 +92,7 @@ def parse_thumbnail_requirements( Returns: Dictionary mapping from media type string to list of ThumbnailRequirement. """ - requirements: Dict[str, List[ThumbnailRequirement]] = {} + requirements: dict[str, list[ThumbnailRequirement]] = {} for size in thumbnail_sizes: width = size["width"] height = size["height"] @@ -206,7 +206,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: # # We don't create the storage providers here as not all workers need # them to be started. - self.media_storage_providers: List[tuple] = [] + self.media_storage_providers: list[tuple] = [] for i, provider_config in enumerate(storage_providers): # We special case the module "file_system" so as not to need to @@ -298,7 +298,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: self.enable_authenticated_media = config.get("enable_authenticated_media", True) - self.media_upload_limits: List[MediaUploadLimit] = [] + self.media_upload_limits: list[MediaUploadLimit] = [] for limit_config in config.get("media_upload_limits", []): time_period_ms = self.parse_duration(limit_config["time_period"]) max_bytes = self.parse_size(limit_config["max_size"]) diff --git a/synapse/config/retention.py b/synapse/config/retention.py index 7e329c7f42c..9d34f1e241d 100644 --- a/synapse/config/retention.py +++ b/synapse/config/retention.py @@ -20,7 +20,7 @@ # import logging -from typing import Any, List, Optional +from typing import Any, Optional import attr @@ -119,7 +119,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: " greater than 'allowed_lifetime_max'" ) - self.retention_purge_jobs: List[RetentionPurgeJob] = [] + self.retention_purge_jobs: list[RetentionPurgeJob] = [] for purge_job_config in retention_config.get("purge_jobs", []): interval_config = purge_job_config.get("interval") diff --git a/synapse/config/saml2.py b/synapse/config/saml2.py index 9d7ef94507a..b5ea4f09f62 100644 --- a/synapse/config/saml2.py +++ b/synapse/config/saml2.py @@ -20,7 +20,7 @@ # import logging -from typing import Any, List, Set +from typing import Any from synapse.config.sso import SsoAttributeRequirement from synapse.types import JsonDict @@ -191,7 +191,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: ) def _default_saml_config_dict( - self, required_attributes: Set[str], optional_attributes: Set[str] + self, required_attributes: set[str], optional_attributes: set[str] ) -> JsonDict: """Generate a configuration dictionary with required and optional attributes that will be needed to process new user registration @@ -239,7 +239,7 @@ def _default_saml_config_dict( def _parse_attribute_requirements_def( attribute_requirements: Any, -) -> List[SsoAttributeRequirement]: +) -> list[SsoAttributeRequirement]: validate_config( ATTRIBUTE_REQUIREMENTS_SCHEMA, attribute_requirements, diff --git a/synapse/config/server.py b/synapse/config/server.py index e15bceb2968..a486f16542c 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -25,7 +25,7 @@ import os.path import urllib.parse from textwrap import indent -from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, TypedDict, Union +from typing import Any, Iterable, List, Optional, TypedDict, Union from urllib.request import getproxies_environment import attr @@ -213,7 +213,7 @@ def generate_ip_set( @attr.s(frozen=True) class HttpResourceConfig: - names: List[str] = attr.ib( + names: list[str] = attr.ib( factory=list, validator=attr.validators.deep_iterable(attr.validators.in_(KNOWN_RESOURCES)), ) @@ -228,8 +228,8 @@ class HttpListenerConfig: """Object describing the http-specific parts of the config of a listener""" x_forwarded: bool = False - resources: List[HttpResourceConfig] = attr.Factory(list) - additional_resources: Dict[str, dict] = attr.Factory(dict) + resources: list[HttpResourceConfig] = attr.Factory(list) + additional_resources: dict[str, dict] = attr.Factory(dict) tag: Optional[str] = None request_id_header: Optional[str] = None @@ -239,7 +239,7 @@ class TCPListenerConfig: """Object describing the configuration of a single TCP listener.""" port: int = attr.ib(validator=attr.validators.instance_of(int)) - bind_addresses: List[str] = attr.ib(validator=attr.validators.instance_of(List)) + bind_addresses: list[str] = attr.ib(validator=attr.validators.instance_of(List)) type: str = attr.ib(validator=attr.validators.in_(KNOWN_LISTENER_TYPES)) tls: bool = False @@ -344,7 +344,7 @@ class ProxyConfig: """ Proxy server to use for HTTPS requests. """ - no_proxy_hosts: Optional[List[str]] + no_proxy_hosts: Optional[list[str]] """ List of hosts, IP addresses, or IP ranges in CIDR format which should not use the proxy. Synapse will directly connect to these hosts. @@ -864,11 +864,11 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: ) # Whitelist of domain names that given next_link parameters must have - next_link_domain_whitelist: Optional[List[str]] = config.get( + next_link_domain_whitelist: Optional[list[str]] = config.get( "next_link_domain_whitelist" ) - self.next_link_domain_whitelist: Optional[Set[str]] = None + self.next_link_domain_whitelist: Optional[set[str]] = None if next_link_domain_whitelist is not None: if not isinstance(next_link_domain_whitelist, list): raise ConfigError("'next_link_domain_whitelist' must be a list") @@ -892,7 +892,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: config.get("use_account_validity_in_account_status") or False ) - self.rooms_to_exclude_from_sync: List[str] = ( + self.rooms_to_exclude_from_sync: list[str] = ( config.get("exclude_rooms_from_sync") or [] ) @@ -927,7 +927,7 @@ def generate_config_section( data_dir_path: str, server_name: str, open_private_ports: bool, - listeners: Optional[List[dict]], + listeners: Optional[list[dict]], **kwargs: Any, ) -> str: _, bind_port = parse_and_validate_server_name(server_name) @@ -1028,7 +1028,7 @@ def add_arguments(parser: argparse.ArgumentParser) -> None: help="Turn on the twisted telnet manhole service on the given port.", ) - def read_gc_intervals(self, durations: Any) -> Optional[Tuple[float, float, float]]: + def read_gc_intervals(self, durations: Any) -> Optional[tuple[float, float, float]]: """Reads the three durations for the GC min interval option, returning seconds.""" if durations is None: return None @@ -1048,7 +1048,7 @@ def read_gc_intervals(self, durations: Any) -> Optional[Tuple[float, float, floa def is_threepid_reserved( - reserved_threepids: List[JsonDict], threepid: JsonDict + reserved_threepids: list[JsonDict], threepid: JsonDict ) -> bool: """Check the threepid against the reserved threepid config Args: @@ -1066,8 +1066,8 @@ def is_threepid_reserved( def read_gc_thresholds( - thresholds: Optional[List[Any]], -) -> Optional[Tuple[int, int, int]]: + thresholds: Optional[list[Any]], +) -> Optional[tuple[int, int, int]]: """Reads the three integer thresholds for garbage collection. Ensures that the thresholds are integers if thresholds are supplied. """ diff --git a/synapse/config/spam_checker.py b/synapse/config/spam_checker.py index 014c55d702e..0a8b3d3093e 100644 --- a/synapse/config/spam_checker.py +++ b/synapse/config/spam_checker.py @@ -19,7 +19,7 @@ # import logging -from typing import Any, Dict, List, Tuple +from typing import Any, Dict from synapse.config import ConfigError from synapse.types import JsonDict @@ -41,7 +41,7 @@ class SpamCheckerConfig(Config): section = "spamchecker" def read_config(self, config: JsonDict, **kwargs: Any) -> None: - self.spam_checkers: List[Tuple[Any, Dict]] = [] + self.spam_checkers: list[tuple[Any, Dict]] = [] spam_checkers = config.get("spam_checker") or [] if isinstance(spam_checkers, dict): diff --git a/synapse/config/sso.py b/synapse/config/sso.py index cf27a7ee13a..facb4185108 100644 --- a/synapse/config/sso.py +++ b/synapse/config/sso.py @@ -19,7 +19,7 @@ # # import logging -from typing import Any, Dict, List, Optional +from typing import Any, Optional import attr @@ -45,7 +45,7 @@ class SsoAttributeRequirement: attribute: str # If neither `value` nor `one_of` is given, the attribute must simply exist. value: Optional[str] = None - one_of: Optional[List[str]] = None + one_of: Optional[list[str]] = None JSON_SCHEMA = { "type": "object", @@ -64,7 +64,7 @@ class SSOConfig(Config): section = "sso" def read_config(self, config: JsonDict, **kwargs: Any) -> None: - sso_config: Dict[str, Any] = config.get("sso") or {} + sso_config: dict[str, Any] = config.get("sso") or {} # The sso-specific template_dir self.sso_template_dir = sso_config.get("template_dir") diff --git a/synapse/config/tls.py b/synapse/config/tls.py index a48d81fdc3d..d03a77d9d2a 100644 --- a/synapse/config/tls.py +++ b/synapse/config/tls.py @@ -20,7 +20,7 @@ # import logging -from typing import Any, List, Optional, Pattern +from typing import Any, Optional, Pattern from matrix_common.regex import glob_to_regex @@ -84,7 +84,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: fed_whitelist_entries = [] # Support globs (*) in whitelist values - self.federation_certificate_verification_whitelist: List[Pattern] = [] + self.federation_certificate_verification_whitelist: list[Pattern] = [] for entry in fed_whitelist_entries: try: entry_regex = glob_to_regex(entry.encode("ascii").decode("ascii")) diff --git a/synapse/config/tracer.py b/synapse/config/tracer.py index d31fd41082b..ccfeed4d07c 100644 --- a/synapse/config/tracer.py +++ b/synapse/config/tracer.py @@ -19,7 +19,7 @@ # # -from typing import Any, List, Set +from typing import Any from synapse.types import JsonDict from synapse.util.check_dependencies import check_requirements @@ -42,7 +42,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: {"sampler": {"type": "const", "param": 1}, "logging": False}, ) - self.force_tracing_for_users: Set[str] = set() + self.force_tracing_for_users: set[str] = set() if not self.opentracer_enabled: return @@ -51,7 +51,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: # The tracer is enabled so sanitize the config - self.opentracer_whitelist: List[str] = opentracing_config.get( + self.opentracer_whitelist: list[str] = opentracing_config.get( "homeserver_whitelist", [] ) if not isinstance(self.opentracer_whitelist, list): diff --git a/synapse/config/user_types.py b/synapse/config/user_types.py index 2d9c9f7afb1..dd64425d6cf 100644 --- a/synapse/config/user_types.py +++ b/synapse/config/user_types.py @@ -12,7 +12,7 @@ # . # -from typing import Any, List, Optional +from typing import Any, Optional from synapse.api.constants import UserTypes from synapse.types import JsonDict @@ -29,9 +29,9 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: self.default_user_type: Optional[str] = user_types.get( "default_user_type", None ) - self.extra_user_types: List[str] = user_types.get("extra_user_types", []) + self.extra_user_types: list[str] = user_types.get("extra_user_types", []) - all_user_types: List[str] = [] + all_user_types: list[str] = [] all_user_types.extend(UserTypes.ALL_BUILTIN_USER_TYPES) all_user_types.extend(self.extra_user_types) diff --git a/synapse/config/workers.py b/synapse/config/workers.py index 825ba784820..da7148b3a16 100644 --- a/synapse/config/workers.py +++ b/synapse/config/workers.py @@ -22,7 +22,7 @@ import argparse import logging -from typing import Any, Dict, List, Optional, Union +from typing import Any, Optional, Union import attr @@ -79,7 +79,7 @@ logger = logging.getLogger(__name__) -def _instance_to_list_converter(obj: Union[str, List[str]]) -> List[str]: +def _instance_to_list_converter(obj: Union[str, list[str]]) -> list[str]: """Helper for allowing parsing a string or list of strings to a config option expecting a list of strings. """ @@ -142,39 +142,39 @@ class WriterLocations: device_lists: The instances that write to the device list stream. """ - events: List[str] = attr.ib( + events: list[str] = attr.ib( default=[MAIN_PROCESS_INSTANCE_NAME], converter=_instance_to_list_converter, ) - typing: List[str] = attr.ib( + typing: list[str] = attr.ib( default=[MAIN_PROCESS_INSTANCE_NAME], converter=_instance_to_list_converter, ) - to_device: List[str] = attr.ib( + to_device: list[str] = attr.ib( default=[MAIN_PROCESS_INSTANCE_NAME], converter=_instance_to_list_converter, ) - account_data: List[str] = attr.ib( + account_data: list[str] = attr.ib( default=[MAIN_PROCESS_INSTANCE_NAME], converter=_instance_to_list_converter, ) - receipts: List[str] = attr.ib( + receipts: list[str] = attr.ib( default=[MAIN_PROCESS_INSTANCE_NAME], converter=_instance_to_list_converter, ) - presence: List[str] = attr.ib( + presence: list[str] = attr.ib( default=[MAIN_PROCESS_INSTANCE_NAME], converter=_instance_to_list_converter, ) - push_rules: List[str] = attr.ib( + push_rules: list[str] = attr.ib( default=[MAIN_PROCESS_INSTANCE_NAME], converter=_instance_to_list_converter, ) - device_lists: List[str] = attr.ib( + device_lists: list[str] = attr.ib( default=[MAIN_PROCESS_INSTANCE_NAME], converter=_instance_to_list_converter, ) - thread_subscriptions: List[str] = attr.ib( + thread_subscriptions: list[str] = attr.ib( default=["master"], converter=_instance_to_list_converter, ) @@ -190,8 +190,8 @@ class OutboundFederationRestrictedTo: locations: list of instance locations to connect to proxy via. """ - instances: Optional[List[str]] - locations: List[InstanceLocationConfig] = attr.Factory(list) + instances: Optional[list[str]] + locations: list[InstanceLocationConfig] = attr.Factory(list) def __contains__(self, instance: str) -> bool: # It feels a bit dirty to return `True` if `instances` is `None`, but it makes @@ -295,7 +295,7 @@ def read_config( # A map from instance name to host/port of their HTTP replication endpoint. # Check if the main process is declared. The main process itself doesn't need # this data as it would never have to talk to itself. - instance_map: Dict[str, Any] = config.get("instance_map", {}) + instance_map: dict[str, Any] = config.get("instance_map", {}) if self.instance_name is not MAIN_PROCESS_INSTANCE_NAME: # TODO: The next 3 condition blocks can be deleted after some time has @@ -342,7 +342,7 @@ def read_config( ) # type-ignore: the expression `Union[A, B]` is not a Type[Union[A, B]] currently - self.instance_map: Dict[str, InstanceLocationConfig] = ( + self.instance_map: dict[str, InstanceLocationConfig] = ( parse_and_validate_mapping( instance_map, InstanceLocationConfig, # type: ignore[arg-type] @@ -481,7 +481,7 @@ def read_config( def _should_this_worker_perform_duty( self, - config: Dict[str, Any], + config: dict[str, Any], legacy_master_option_name: str, legacy_worker_app_name: str, new_option_name: str, @@ -574,11 +574,11 @@ def _should_this_worker_perform_duty( def _worker_names_performing_this_duty( self, - config: Dict[str, Any], + config: dict[str, Any], legacy_option_name: str, legacy_app_name: str, modern_instance_list_name: str, - ) -> List[str]: + ) -> list[str]: """ Retrieves the names of the workers handling a given duty, by either legacy option or instance list. diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py index c36398cec08..d13d5d04c3b 100644 --- a/synapse/crypto/event_signing.py +++ b/synapse/crypto/event_signing.py @@ -23,7 +23,7 @@ import collections.abc import hashlib import logging -from typing import Any, Callable, Dict, Tuple +from typing import Any, Callable from canonicaljson import encode_canonical_json from signedjson.sign import sign_json @@ -80,8 +80,8 @@ def check_event_content_hash( def compute_content_hash( - event_dict: Dict[str, Any], hash_algorithm: Hasher -) -> Tuple[str, bytes]: + event_dict: dict[str, Any], hash_algorithm: Hasher +) -> tuple[str, bytes]: """Compute the content hash of an event, which is the hash of the unredacted event. @@ -112,7 +112,7 @@ def compute_content_hash( def compute_event_reference_hash( event: EventBase, hash_algorithm: Hasher = hashlib.sha256 -) -> Tuple[str, bytes]: +) -> tuple[str, bytes]: """Computes the event reference hash. This is the hash of the redacted event. @@ -139,7 +139,7 @@ def compute_event_signature( event_dict: JsonDict, signature_name: str, signing_key: SigningKey, -) -> Dict[str, Dict[str, str]]: +) -> dict[str, dict[str, str]]: """Compute the signature of the event for the given name and key. Args: diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index 258bc29357e..e038dd54165 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -21,7 +21,7 @@ import abc import logging -from typing import TYPE_CHECKING, Callable, Dict, Iterable, List, Optional, Tuple +from typing import TYPE_CHECKING, Callable, Dict, Iterable, Optional import attr from signedjson.key import ( @@ -82,7 +82,7 @@ class VerifyJsonRequest: server_name: str get_json_object: Callable[[], JsonDict] minimum_valid_until_ts: int - key_ids: List[str] + key_ids: list[str] @staticmethod def from_json_object( @@ -141,7 +141,7 @@ class _FetchKeyRequest: server_name: str minimum_valid_until_ts: int - key_ids: List[str] + key_ids: list[str] class Keyring: @@ -156,7 +156,7 @@ def __init__( if key_fetchers is None: # Always fetch keys from the database. - mutable_key_fetchers: List[KeyFetcher] = [StoreKeyFetcher(hs)] + mutable_key_fetchers: list[KeyFetcher] = [StoreKeyFetcher(hs)] # Fetch keys from configured trusted key servers, if any exist. key_servers = hs.config.key.key_servers if key_servers: @@ -169,7 +169,7 @@ def __init__( self._key_fetchers = key_fetchers self._fetch_keys_queue: BatchingQueue[ - _FetchKeyRequest, Dict[str, Dict[str, FetchKeyResult]] + _FetchKeyRequest, dict[str, dict[str, FetchKeyResult]] ] = BatchingQueue( name="keyring_server", hs=hs, @@ -182,7 +182,7 @@ def __init__( # build a FetchKeyResult for each of our own keys, to shortcircuit the # fetcher. - self._local_verify_keys: Dict[str, FetchKeyResult] = {} + self._local_verify_keys: dict[str, FetchKeyResult] = {} for key_id, key in hs.config.key.old_signing_keys.items(): self._local_verify_keys[key_id] = FetchKeyResult( verify_key=key, valid_until_ts=key.expired @@ -229,8 +229,8 @@ async def verify_json_for_server( return await self.process_request(request) def verify_json_objects_for_server( - self, server_and_json: Iterable[Tuple[str, dict, int]] - ) -> List["defer.Deferred[None]"]: + self, server_and_json: Iterable[tuple[str, dict, int]] + ) -> list["defer.Deferred[None]"]: """Bulk verifies signatures of json objects, bulk fetching keys as necessary. @@ -286,7 +286,7 @@ async def process_request(self, verify_request: VerifyJsonRequest) -> None: Codes.UNAUTHORIZED, ) - found_keys: Dict[str, FetchKeyResult] = {} + found_keys: dict[str, FetchKeyResult] = {} # If we are the originating server, short-circuit the key-fetch for any keys # we already have @@ -368,8 +368,8 @@ async def process_json( ) async def _inner_fetch_key_requests( - self, requests: List[_FetchKeyRequest] - ) -> Dict[str, Dict[str, FetchKeyResult]]: + self, requests: list[_FetchKeyRequest] + ) -> dict[str, dict[str, FetchKeyResult]]: """Processing function for the queue of `_FetchKeyRequest`. Takes a list of key fetch requests, de-duplicates them and then carries out @@ -387,7 +387,7 @@ async def _inner_fetch_key_requests( # First we need to deduplicate requests for the same key. We do this by # taking the *maximum* requested `minimum_valid_until_ts` for each pair # of server name/key ID. - server_to_key_to_ts: Dict[str, Dict[str, int]] = {} + server_to_key_to_ts: dict[str, dict[str, int]] = {} for request in requests: by_server = server_to_key_to_ts.setdefault(request.server_name, {}) for key_id in request.key_ids: @@ -412,7 +412,7 @@ async def _inner_fetch_key_requests( # We now convert the returned list of results into a map from server # name to key ID to FetchKeyResult, to return. - to_return: Dict[str, Dict[str, FetchKeyResult]] = {} + to_return: dict[str, dict[str, FetchKeyResult]] = {} for request, results in zip(deduped_requests, results_per_request): to_return_by_server = to_return.setdefault(request.server_name, {}) for key_id, key_result in results.items(): @@ -424,7 +424,7 @@ async def _inner_fetch_key_requests( async def _inner_fetch_key_request( self, verify_request: _FetchKeyRequest - ) -> Dict[str, FetchKeyResult]: + ) -> dict[str, FetchKeyResult]: """Attempt to fetch the given key by calling each key fetcher one by one. If a key is found, check whether its `valid_until_ts` attribute satisfies the @@ -445,7 +445,7 @@ async def _inner_fetch_key_request( """ logger.debug("Starting fetch for %s", verify_request) - found_keys: Dict[str, FetchKeyResult] = {} + found_keys: dict[str, FetchKeyResult] = {} missing_key_ids = set(verify_request.key_ids) for fetcher in self._key_fetchers: @@ -499,8 +499,8 @@ def shutdown(self) -> None: self._queue.shutdown() async def get_keys( - self, server_name: str, key_ids: List[str], minimum_valid_until_ts: int - ) -> Dict[str, FetchKeyResult]: + self, server_name: str, key_ids: list[str], minimum_valid_until_ts: int + ) -> dict[str, FetchKeyResult]: results = await self._queue.add_to_queue( _FetchKeyRequest( server_name=server_name, @@ -512,8 +512,8 @@ async def get_keys( @abc.abstractmethod async def _fetch_keys( - self, keys_to_fetch: List[_FetchKeyRequest] - ) -> Dict[str, Dict[str, FetchKeyResult]]: + self, keys_to_fetch: list[_FetchKeyRequest] + ) -> dict[str, dict[str, FetchKeyResult]]: pass @@ -526,8 +526,8 @@ def __init__(self, hs: "HomeServer"): self.store = hs.get_datastores().main async def _fetch_keys( - self, keys_to_fetch: List[_FetchKeyRequest] - ) -> Dict[str, Dict[str, FetchKeyResult]]: + self, keys_to_fetch: list[_FetchKeyRequest] + ) -> dict[str, dict[str, FetchKeyResult]]: key_ids_to_fetch = ( (queue_value.server_name, key_id) for queue_value in keys_to_fetch @@ -535,7 +535,7 @@ async def _fetch_keys( ) res = await self.store.get_server_keys_json(key_ids_to_fetch) - keys: Dict[str, Dict[str, FetchKeyResult]] = {} + keys: dict[str, dict[str, FetchKeyResult]] = {} for (server_name, key_id), key in res.items(): keys.setdefault(server_name, {})[key_id] = key return keys @@ -549,7 +549,7 @@ def __init__(self, hs: "HomeServer"): async def process_v2_response( self, from_server: str, response_json: JsonDict, time_added_ms: int - ) -> Dict[str, FetchKeyResult]: + ) -> dict[str, FetchKeyResult]: """Parse a 'Server Keys' structure from the result of a /key request This is used to parse either the entirety of the response from @@ -640,8 +640,8 @@ def __init__(self, hs: "HomeServer"): self.key_servers = hs.config.key.key_servers async def _fetch_keys( - self, keys_to_fetch: List[_FetchKeyRequest] - ) -> Dict[str, Dict[str, FetchKeyResult]]: + self, keys_to_fetch: list[_FetchKeyRequest] + ) -> dict[str, dict[str, FetchKeyResult]]: """see KeyFetcher._fetch_keys""" async def get_key(key_server: TrustedKeyServer) -> Dict: @@ -670,7 +670,7 @@ async def get_key(key_server: TrustedKeyServer) -> Dict: ).addErrback(unwrapFirstError) ) - union_of_keys: Dict[str, Dict[str, FetchKeyResult]] = {} + union_of_keys: dict[str, dict[str, FetchKeyResult]] = {} for result in results: for server_name, keys in result.items(): union_of_keys.setdefault(server_name, {}).update(keys) @@ -678,8 +678,8 @@ async def get_key(key_server: TrustedKeyServer) -> Dict: return union_of_keys async def get_server_verify_key_v2_indirect( - self, keys_to_fetch: List[_FetchKeyRequest], key_server: TrustedKeyServer - ) -> Dict[str, Dict[str, FetchKeyResult]]: + self, keys_to_fetch: list[_FetchKeyRequest], key_server: TrustedKeyServer + ) -> dict[str, dict[str, FetchKeyResult]]: """ Args: keys_to_fetch: @@ -731,8 +731,8 @@ async def get_server_verify_key_v2_indirect( "Response from notary server %s: %s", perspective_name, query_response ) - keys: Dict[str, Dict[str, FetchKeyResult]] = {} - added_keys: Dict[Tuple[str, str], FetchKeyResult] = {} + keys: dict[str, dict[str, FetchKeyResult]] = {} + added_keys: dict[tuple[str, str], FetchKeyResult] = {} time_now_ms = self.clock.time_msec() @@ -836,8 +836,8 @@ def __init__(self, hs: "HomeServer"): self.client = hs.get_federation_http_client() async def get_keys( - self, server_name: str, key_ids: List[str], minimum_valid_until_ts: int - ) -> Dict[str, FetchKeyResult]: + self, server_name: str, key_ids: list[str], minimum_valid_until_ts: int + ) -> dict[str, FetchKeyResult]: results = await self._queue.add_to_queue( _FetchKeyRequest( server_name=server_name, @@ -849,8 +849,8 @@ async def get_keys( return results.get(server_name, {}) async def _fetch_keys( - self, keys_to_fetch: List[_FetchKeyRequest] - ) -> Dict[str, Dict[str, FetchKeyResult]]: + self, keys_to_fetch: list[_FetchKeyRequest] + ) -> dict[str, dict[str, FetchKeyResult]]: """ Args: keys_to_fetch: @@ -879,7 +879,7 @@ async def get_keys(server_name: str) -> None: async def get_server_verify_keys_v2_direct( self, server_name: str - ) -> Dict[str, FetchKeyResult]: + ) -> dict[str, FetchKeyResult]: """ Args: diff --git a/synapse/event_auth.py b/synapse/event_auth.py index 64de3f7ef8a..5d927a925a1 100644 --- a/synapse/event_auth.py +++ b/synapse/event_auth.py @@ -26,15 +26,11 @@ from typing import ( Any, ChainMap, - Dict, Iterable, - List, Mapping, MutableMapping, Optional, Protocol, - Set, - Tuple, Union, cast, ) @@ -91,7 +87,7 @@ async def get_events( redact_behaviour: EventRedactBehaviour, get_prev_content: bool = False, allow_rejected: bool = False, - ) -> Dict[str, "EventBase"]: ... + ) -> dict[str, "EventBase"]: ... def validate_event_for_room_version(event: "EventBase") -> None: @@ -993,7 +989,7 @@ def _check_power_levels( user_level = get_user_power_level(event.user_id, auth_events) # Check other levels: - levels_to_check: List[Tuple[str, Optional[str]]] = [ + levels_to_check: list[tuple[str, Optional[str]]] = [ ("users_default", None), ("events_default", None), ("state_default", None), @@ -1191,7 +1187,7 @@ def _verify_third_party_invite( return False -def get_public_keys(invite_event: "EventBase") -> List[Dict[str, Any]]: +def get_public_keys(invite_event: "EventBase") -> list[dict[str, Any]]: public_keys = [] if "public_key" in invite_event.content: o = {"public_key": invite_event.content["public_key"]} @@ -1204,7 +1200,7 @@ def get_public_keys(invite_event: "EventBase") -> List[Dict[str, Any]]: def auth_types_for_event( room_version: RoomVersion, event: Union["EventBase", "EventBuilder"] -) -> Set[Tuple[str, str]]: +) -> set[tuple[str, str]]: """Given an event, return a list of (EventType, StateKey) that may be needed to auth the event. The returned list may be a superset of what would actually be required depending on the full state of the room. diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py index db387542806..a353076e0d1 100644 --- a/synapse/events/__init__.py +++ b/synapse/events/__init__.py @@ -25,14 +25,10 @@ from typing import ( TYPE_CHECKING, Any, - Dict, Generic, Iterable, - List, Literal, Optional, - Tuple, - Type, TypeVar, Union, overload, @@ -94,20 +90,20 @@ def __init__(self, key: str): def __get__( self, instance: Literal[None], - owner: Optional[Type[_DictPropertyInstance]] = None, + owner: Optional[type[_DictPropertyInstance]] = None, ) -> "DictProperty": ... @overload def __get__( self, instance: _DictPropertyInstance, - owner: Optional[Type[_DictPropertyInstance]] = None, + owner: Optional[type[_DictPropertyInstance]] = None, ) -> T: ... def __get__( self, instance: Optional[_DictPropertyInstance], - owner: Optional[Type[_DictPropertyInstance]] = None, + owner: Optional[type[_DictPropertyInstance]] = None, ) -> Union[T, "DictProperty"]: # if the property is accessed as a class property rather than an instance # property, return the property itself rather than the value @@ -160,20 +156,20 @@ def __init__(self, key: str, default: T): def __get__( self, instance: Literal[None], - owner: Optional[Type[_DictPropertyInstance]] = None, + owner: Optional[type[_DictPropertyInstance]] = None, ) -> "DefaultDictProperty": ... @overload def __get__( self, instance: _DictPropertyInstance, - owner: Optional[Type[_DictPropertyInstance]] = None, + owner: Optional[type[_DictPropertyInstance]] = None, ) -> T: ... def __get__( self, instance: Optional[_DictPropertyInstance], - owner: Optional[Type[_DictPropertyInstance]] = None, + owner: Optional[type[_DictPropertyInstance]] = None, ) -> Union[T, "DefaultDictProperty"]: if instance is None: return self @@ -192,7 +188,7 @@ def __init__( self, event_dict: JsonDict, room_version: RoomVersion, - signatures: Dict[str, Dict[str, str]], + signatures: dict[str, dict[str, str]], unsigned: JsonDict, internal_metadata_dict: JsonDict, rejected_reason: Optional[str], @@ -210,7 +206,7 @@ def __init__( depth: DictProperty[int] = DictProperty("depth") content: DictProperty[JsonDict] = DictProperty("content") - hashes: DictProperty[Dict[str, str]] = DictProperty("hashes") + hashes: DictProperty[dict[str, str]] = DictProperty("hashes") origin_server_ts: DictProperty[int] = DictProperty("origin_server_ts") sender: DictProperty[str] = DictProperty("sender") # TODO state_key should be Optional[str]. This is generally asserted in Synapse @@ -293,13 +289,13 @@ def __getitem__(self, field: str) -> Optional[Any]: def __contains__(self, field: str) -> bool: return field in self._dict - def items(self) -> List[Tuple[str, Optional[Any]]]: + def items(self) -> list[tuple[str, Optional[Any]]]: return list(self._dict.items()) def keys(self) -> Iterable[str]: return self._dict.keys() - def prev_event_ids(self) -> List[str]: + def prev_event_ids(self) -> list[str]: """Returns the list of prev event IDs. The order matches the order specified in the event, though there is no meaning to it. @@ -457,7 +453,7 @@ def event_id(self) -> str: def room_id(self) -> str: return self._dict["room_id"] - def prev_event_ids(self) -> List[str]: + def prev_event_ids(self) -> list[str]: """Returns the list of prev event IDs. The order matches the order specified in the event, though there is no meaning to it. @@ -558,7 +554,7 @@ def auth_event_ids(self) -> StrCollection: def _event_type_from_format_version( format_version: int, -) -> Type[Union[FrozenEvent, FrozenEventV2, FrozenEventV3]]: +) -> type[Union[FrozenEvent, FrozenEventV2, FrozenEventV3]]: """Returns the python type to use to construct an Event object for the given event format version. @@ -669,4 +665,4 @@ class StrippedStateEvent: type: str state_key: str sender: str - content: Dict[str, Any] + content: dict[str, Any] diff --git a/synapse/events/auto_accept_invites.py b/synapse/events/auto_accept_invites.py index 9e17edd2278..4c59f0dffeb 100644 --- a/synapse/events/auto_accept_invites.py +++ b/synapse/events/auto_accept_invites.py @@ -20,7 +20,7 @@ # import logging from http import HTTPStatus -from typing import Any, Dict, Tuple +from typing import Any from synapse.api.constants import AccountDataTypes, EventTypes, Membership from synapse.api.errors import SynapseError @@ -146,7 +146,7 @@ async def _mark_room_as_direct_message( # Be careful: we convert the outer frozendict into a dict here, # but the contents of the dict are still frozen (tuples in lieu of lists, # etc.) - dm_map: Dict[str, Tuple[str, ...]] = dict( + dm_map: dict[str, tuple[str, ...]] = dict( await self._api.account_data_manager.get_global( user_id, AccountDataTypes.DIRECT ) diff --git a/synapse/events/builder.py b/synapse/events/builder.py index 1c9f78c7ca6..a57303c999c 100644 --- a/synapse/events/builder.py +++ b/synapse/events/builder.py @@ -19,7 +19,7 @@ # # import logging -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Optional, Union import attr from signedjson.types import SigningKey @@ -125,8 +125,8 @@ def is_mine_id(self, user_id: str) -> bool: async def build( self, - prev_event_ids: List[str], - auth_event_ids: Optional[List[str]], + prev_event_ids: list[str], + auth_event_ids: Optional[list[str]], depth: Optional[int] = None, ) -> EventBase: """Transform into a fully signed and hashed event @@ -205,8 +205,8 @@ async def build( format_version = self.room_version.event_format # The types of auth/prev events changes between event versions. - prev_events: Union[StrCollection, List[Tuple[str, Dict[str, str]]]] - auth_events: Union[List[str], List[Tuple[str, Dict[str, str]]]] + prev_events: Union[StrCollection, list[tuple[str, dict[str, str]]]] + auth_events: Union[list[str], list[tuple[str, dict[str, str]]]] if format_version == EventFormatVersions.ROOM_V1_V2: auth_events = await self._store.add_event_hashes(auth_event_ids) prev_events = await self._store.add_event_hashes(prev_event_ids) @@ -228,7 +228,7 @@ async def build( # the db) depth = min(depth, MAX_DEPTH) - event_dict: Dict[str, Any] = { + event_dict: dict[str, Any] = { "auth_events": auth_events, "prev_events": prev_events, "type": self.type, diff --git a/synapse/events/presence_router.py b/synapse/events/presence_router.py index 9713b141bce..006002d44ed 100644 --- a/synapse/events/presence_router.py +++ b/synapse/events/presence_router.py @@ -26,7 +26,6 @@ Callable, Dict, Iterable, - List, Optional, Set, TypeVar, @@ -44,10 +43,10 @@ from synapse.server import HomeServer GET_USERS_FOR_STATES_CALLBACK = Callable[ - [Iterable[UserPresenceState]], Awaitable[Dict[str, Set[UserPresenceState]]] + [Iterable[UserPresenceState]], Awaitable[dict[str, set[UserPresenceState]]] ] # This must either return a set of strings or the constant PresenceRouter.ALL_USERS. -GET_INTERESTED_USERS_CALLBACK = Callable[[str], Awaitable[Union[Set[str], str]]] +GET_INTERESTED_USERS_CALLBACK = Callable[[str], Awaitable[Union[set[str], str]]] logger = logging.getLogger(__name__) @@ -98,7 +97,7 @@ def run(*args: P.args, **kwargs: P.kwargs) -> Awaitable[R]: return run # Register the hooks through the module API. - hooks: Dict[str, Optional[Callable[..., Any]]] = { + hooks: dict[str, Optional[Callable[..., Any]]] = { hook: async_wrapper(getattr(presence_router, hook, None)) for hook in presence_router_methods } @@ -116,8 +115,8 @@ class PresenceRouter: def __init__(self, hs: "HomeServer"): # Initially there are no callbacks - self._get_users_for_states_callbacks: List[GET_USERS_FOR_STATES_CALLBACK] = [] - self._get_interested_users_callbacks: List[GET_INTERESTED_USERS_CALLBACK] = [] + self._get_users_for_states_callbacks: list[GET_USERS_FOR_STATES_CALLBACK] = [] + self._get_interested_users_callbacks: list[GET_INTERESTED_USERS_CALLBACK] = [] def register_presence_router_callbacks( self, @@ -143,7 +142,7 @@ def register_presence_router_callbacks( async def get_users_for_states( self, state_updates: Iterable[UserPresenceState], - ) -> Dict[str, Set[UserPresenceState]]: + ) -> dict[str, set[UserPresenceState]]: """ Given an iterable of user presence updates, determine where each one needs to go. @@ -161,7 +160,7 @@ async def get_users_for_states( # Don't include any extra destinations for presence updates return {} - users_for_states: Dict[str, Set[UserPresenceState]] = {} + users_for_states: dict[str, set[UserPresenceState]] = {} # run all the callbacks for get_users_for_states and combine the results for callback in self._get_users_for_states_callbacks: try: @@ -194,7 +193,7 @@ async def get_users_for_states( return users_for_states - async def get_interested_users(self, user_id: str) -> Union[Set[str], str]: + async def get_interested_users(self, user_id: str) -> Union[set[str], str]: """ Retrieve a list of users that `user_id` is interested in receiving the presence of. This will be in addition to those they share a room with. diff --git a/synapse/events/snapshot.py b/synapse/events/snapshot.py index 63551143d86..764d31ee66c 100644 --- a/synapse/events/snapshot.py +++ b/synapse/events/snapshot.py @@ -19,7 +19,7 @@ # # from abc import ABC, abstractmethod -from typing import TYPE_CHECKING, Dict, List, Optional, Tuple +from typing import TYPE_CHECKING, Optional import attr from immutabledict import immutabledict @@ -133,7 +133,7 @@ class EventContext(UnpersistedEventContextBase): """ _storage: "StorageControllers" - state_group_deltas: Dict[Tuple[int, int], StateMap[str]] + state_group_deltas: dict[tuple[int, int], StateMap[str]] rejected: Optional[str] = None _state_group: Optional[int] = None state_group_before_event: Optional[int] = None @@ -149,7 +149,7 @@ def with_state( state_group_before_event: Optional[int], state_delta_due_to_event: Optional[StateMap[str]], partial_state: bool, - state_group_deltas: Dict[Tuple[int, int], StateMap[str]], + state_group_deltas: dict[tuple[int, int], StateMap[str]], ) -> "EventContext": return EventContext( storage=storage, @@ -306,7 +306,7 @@ async def get_prev_state_ids( ) -EventPersistencePair = Tuple[EventBase, EventContext] +EventPersistencePair = tuple[EventBase, EventContext] """ The combination of an event to be persisted and its context. """ @@ -365,11 +365,11 @@ class UnpersistedEventContext(UnpersistedEventContextBase): @classmethod async def batch_persist_unpersisted_contexts( cls, - events_and_context: List[Tuple[EventBase, "UnpersistedEventContextBase"]], + events_and_context: list[tuple[EventBase, "UnpersistedEventContextBase"]], room_id: str, last_known_state_group: int, datastore: "StateGroupDataStore", - ) -> List[EventPersistencePair]: + ) -> list[EventPersistencePair]: """ Takes a list of events and their associated unpersisted contexts and persists the unpersisted contexts, returning a list of events and persisted contexts. @@ -472,7 +472,7 @@ async def persist(self, event: EventBase) -> EventContext: partial_state=self.partial_state, ) - def _build_state_group_deltas(self) -> Dict[Tuple[int, int], StateMap]: + def _build_state_group_deltas(self) -> dict[tuple[int, int], StateMap]: """ Collect deltas between the state groups associated with this context """ @@ -510,8 +510,8 @@ def _build_state_group_deltas(self) -> Dict[Tuple[int, int], StateMap]: def _encode_state_group_delta( - state_group_delta: Dict[Tuple[int, int], StateMap[str]], -) -> List[Tuple[int, int, Optional[List[Tuple[str, str, str]]]]]: + state_group_delta: dict[tuple[int, int], StateMap[str]], +) -> list[tuple[int, int, Optional[list[tuple[str, str, str]]]]]: if not state_group_delta: return [] @@ -523,8 +523,8 @@ def _encode_state_group_delta( def _decode_state_group_delta( - input: List[Tuple[int, int, List[Tuple[str, str, str]]]], -) -> Dict[Tuple[int, int], StateMap[str]]: + input: list[tuple[int, int, list[tuple[str, str, str]]]], +) -> dict[tuple[int, int], StateMap[str]]: if not input: return {} @@ -539,7 +539,7 @@ def _decode_state_group_delta( def _encode_state_dict( state_dict: Optional[StateMap[str]], -) -> Optional[List[Tuple[str, str, str]]]: +) -> Optional[list[tuple[str, str, str]]]: """Since dicts of (type, state_key) -> event_id cannot be serialized in JSON we need to convert them to a form that can. """ @@ -550,7 +550,7 @@ def _encode_state_dict( def _decode_state_dict( - input: Optional[List[Tuple[str, str, str]]], + input: Optional[list[tuple[str, str, str]]], ) -> Optional[StateMap[str]]: """Decodes a state dict encoded using `_encode_state_dict` above""" if input is None: diff --git a/synapse/events/utils.py b/synapse/events/utils.py index 942072cf84d..9fa251abd8a 100644 --- a/synapse/events/utils.py +++ b/synapse/events/utils.py @@ -27,8 +27,6 @@ Awaitable, Callable, Collection, - Dict, - List, Mapping, Match, MutableMapping, @@ -239,7 +237,7 @@ def add_fields(*fields: str) -> None: return allowed_fields -def _copy_field(src: JsonDict, dst: JsonDict, field: List[str]) -> None: +def _copy_field(src: JsonDict, dst: JsonDict, field: list[str]) -> None: """Copy the field in 'src' to 'dst'. For example, if src={"foo":{"bar":5}} and dst={}, and field=["foo","bar"] @@ -292,7 +290,7 @@ def _escape_slash(m: Match[str]) -> str: return m.group(0) -def _split_field(field: str) -> List[str]: +def _split_field(field: str) -> list[str]: """ Splits strings on unescaped dots and removes escaping. @@ -333,7 +331,7 @@ def _split_field(field: str) -> List[str]: return result -def only_fields(dictionary: JsonDict, fields: List[str]) -> JsonDict: +def only_fields(dictionary: JsonDict, fields: list[str]) -> JsonDict: """Return a new dict with only the fields in 'dictionary' which are present in 'fields'. @@ -419,7 +417,7 @@ class SerializeEventConfig: # the transaction_id in the unsigned section of the event. requester: Optional[Requester] = None # List of event fields to include. If empty, all fields will be returned. - only_event_fields: Optional[List[str]] = None + only_event_fields: Optional[list[str]] = None # Some events can have stripped room state stored in the `unsigned` field. # This is required for invite and knock functionality. If this option is # False, that state will be removed from the event before it is returned. @@ -573,7 +571,7 @@ class EventClientSerializer: def __init__(self, hs: "HomeServer") -> None: self._store = hs.get_datastores().main self._auth = hs.get_auth() - self._add_extra_fields_to_unsigned_client_event_callbacks: List[ + self._add_extra_fields_to_unsigned_client_event_callbacks: list[ ADD_EXTRA_FIELDS_TO_UNSIGNED_CLIENT_EVENT_CALLBACK ] = [] @@ -583,7 +581,7 @@ async def serialize_event( time_now: int, *, config: SerializeEventConfig = _DEFAULT_SERIALIZE_EVENT_CONFIG, - bundle_aggregations: Optional[Dict[str, "BundledAggregations"]] = None, + bundle_aggregations: Optional[dict[str, "BundledAggregations"]] = None, ) -> JsonDict: """Serializes a single event. @@ -641,7 +639,7 @@ async def _inject_bundled_aggregations( event: EventBase, time_now: int, config: SerializeEventConfig, - bundled_aggregations: Dict[str, "BundledAggregations"], + bundled_aggregations: dict[str, "BundledAggregations"], serialized_event: JsonDict, ) -> None: """Potentially injects bundled aggregations into the unsigned portion of the serialized event. @@ -718,8 +716,8 @@ async def serialize_events( time_now: int, *, config: SerializeEventConfig = _DEFAULT_SERIALIZE_EVENT_CONFIG, - bundle_aggregations: Optional[Dict[str, "BundledAggregations"]] = None, - ) -> List[JsonDict]: + bundle_aggregations: Optional[dict[str, "BundledAggregations"]] = None, + ) -> list[JsonDict]: """Serializes multiple events. Args: @@ -763,7 +761,7 @@ def register_add_extra_fields_to_unsigned_client_event_callback( def copy_and_fixup_power_levels_contents( old_power_levels: PowerLevelsContent, -) -> Dict[str, Union[int, Dict[str, int]]]: +) -> dict[str, Union[int, dict[str, int]]]: """Copy the content of a power_levels event, unfreezing immutabledicts along the way. We accept as input power level values which are strings, provided they represent an @@ -779,11 +777,11 @@ def copy_and_fixup_power_levels_contents( if not isinstance(old_power_levels, collections.abc.Mapping): raise TypeError("Not a valid power-levels content: %r" % (old_power_levels,)) - power_levels: Dict[str, Union[int, Dict[str, int]]] = {} + power_levels: dict[str, Union[int, dict[str, int]]] = {} for k, v in old_power_levels.items(): if isinstance(v, collections.abc.Mapping): - h: Dict[str, int] = {} + h: dict[str, int] = {} power_levels[k] = h for k1, v1 in v.items(): _copy_power_level_value_as_integer(v1, h, k1) diff --git a/synapse/events/validator.py b/synapse/events/validator.py index 4d9ba15829e..6fb52f82c17 100644 --- a/synapse/events/validator.py +++ b/synapse/events/validator.py @@ -19,7 +19,7 @@ # # import collections.abc -from typing import List, Type, Union, cast +from typing import Union, cast import jsonschema @@ -283,13 +283,13 @@ def _ensure_state_event(self, event: Union[EventBase, EventBuilder]) -> None: class Mentions(RequestBodyModel): - user_ids: List[StrictStr] = Field(default_factory=list) + user_ids: list[StrictStr] = Field(default_factory=list) room: StrictBool = False # This could return something newer than Draft 7, but that's the current "latest" # validator. -def _create_validator(schema: JsonDict) -> Type[jsonschema.Draft7Validator]: +def _create_validator(schema: JsonDict) -> type[jsonschema.Draft7Validator]: validator = jsonschema.validators.validator_for(schema) # by default jsonschema does not consider a immutabledict to be an object so diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py index a1c9c286ac7..13e445456a9 100644 --- a/synapse/federation/federation_base.py +++ b/synapse/federation/federation_base.py @@ -20,7 +20,7 @@ # # import logging -from typing import TYPE_CHECKING, Awaitable, Callable, List, Optional, Sequence +from typing import TYPE_CHECKING, Awaitable, Callable, Optional, Sequence from synapse.api.constants import MAX_DEPTH, EventContentFields, EventTypes, Membership from synapse.api.errors import Codes, SynapseError @@ -305,7 +305,7 @@ def _is_invite_via_3pid(event: EventBase) -> bool: def parse_events_from_pdu_json( pdus_json: Sequence[JsonDict], room_version: RoomVersion -) -> List[EventBase]: +) -> list[EventBase]: return [ event_from_pdu_json(pdu_json, room_version) for pdu_json in filter_pdus_for_valid_depth(pdus_json) diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 8c91336dbc1..c149366395c 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -34,11 +34,9 @@ Container, Dict, Iterable, - List, Mapping, Optional, Sequence, - Tuple, TypeVar, Union, ) @@ -120,8 +118,8 @@ class SendJoinResult: event: EventBase # A string giving the server the event was sent to. origin: str - state: List[EventBase] - auth_chain: List[EventBase] + state: list[EventBase] + auth_chain: list[EventBase] # True if 'state' elides non-critical membership events partial_state: bool @@ -135,7 +133,7 @@ class FederationClient(FederationBase): def __init__(self, hs: "HomeServer"): super().__init__(hs) - self.pdu_destination_tried: Dict[str, Dict[str, int]] = {} + self.pdu_destination_tried: dict[str, dict[str, int]] = {} self._clock.looping_call(self._clear_tried_cache, 60 * 1000) self.state = hs.get_state_handler() self.transport_layer = hs.get_federation_transport_client() @@ -145,7 +143,7 @@ def __init__(self, hs: "HomeServer"): # Cache mapping `event_id` to a tuple of the event itself and the `pull_origin` # (which server we pulled the event from) - self._get_pdu_cache: ExpiringCache[str, Tuple[EventBase, str]] = ExpiringCache( + self._get_pdu_cache: ExpiringCache[str, tuple[EventBase, str]] = ExpiringCache( cache_name="get_pdu_cache", server_name=self.server_name, hs=self.hs, @@ -163,8 +161,8 @@ def __init__(self, hs: "HomeServer"): # It is a map of (room ID, suggested-only) -> the response of # get_room_hierarchy. self._get_room_hierarchy_cache: ExpiringCache[ - Tuple[str, bool], - Tuple[JsonDict, Sequence[JsonDict], Sequence[JsonDict], Sequence[str]], + tuple[str, bool], + tuple[JsonDict, Sequence[JsonDict], Sequence[JsonDict], Sequence[str]], ] = ExpiringCache( cache_name="get_room_hierarchy_cache", server_name=self.server_name, @@ -265,7 +263,7 @@ async def claim_client_keys( self, user: UserID, destination: str, - query: Dict[str, Dict[str, Dict[str, int]]], + query: dict[str, dict[str, dict[str, int]]], timeout: Optional[int], ) -> JsonDict: """Claims one-time keys for a device hosted on a remote server. @@ -285,8 +283,8 @@ async def claim_client_keys( # Convert the query with counts into a stable and unstable query and check # if attempting to claim more than 1 OTK. - content: Dict[str, Dict[str, str]] = {} - unstable_content: Dict[str, Dict[str, List[str]]] = {} + content: dict[str, dict[str, str]] = {} + unstable_content: dict[str, dict[str, list[str]]] = {} use_unstable = False for user_id, one_time_keys in query.items(): for device_id, algorithms in one_time_keys.items(): @@ -337,7 +335,7 @@ async def claim_client_keys( @tag_args async def backfill( self, dest: str, room_id: str, limit: int, extremities: Collection[str] - ) -> Optional[List[EventBase]]: + ) -> Optional[list[EventBase]]: """Requests some more historic PDUs for the given room from the given destination server. @@ -662,7 +660,7 @@ async def get_pdu( @tag_args async def get_room_state_ids( self, destination: str, room_id: str, event_id: str - ) -> Tuple[List[str], List[str]]: + ) -> tuple[list[str], list[str]]: """Calls the /state_ids endpoint to fetch the state at a particular point in the room, and the auth events for the given event @@ -711,7 +709,7 @@ async def get_room_state( room_id: str, event_id: str, room_version: RoomVersion, - ) -> Tuple[List[EventBase], List[EventBase]]: + ) -> tuple[list[EventBase], list[EventBase]]: """Calls the /state endpoint to fetch the state at a particular point in the room. @@ -772,7 +770,7 @@ async def _check_sigs_and_hash_for_pulled_events_and_fetch( origin: str, pdus: Collection[EventBase], room_version: RoomVersion, - ) -> List[EventBase]: + ) -> list[EventBase]: """ Checks the signatures and hashes of a list of pulled events we got from federation and records any signature failures as failed pull attempts. @@ -806,7 +804,7 @@ async def _check_sigs_and_hash_for_pulled_events_and_fetch( # We limit how many PDUs we check at once, as if we try to do hundreds # of thousands of PDUs at once we see large memory spikes. - valid_pdus: List[EventBase] = [] + valid_pdus: list[EventBase] = [] async def _record_failure_callback(event: EventBase, cause: str) -> None: await self.store.record_event_failed_pull_attempt( @@ -916,7 +914,7 @@ async def _check_sigs_and_hash_and_fetch_one( async def get_event_auth( self, destination: str, room_id: str, event_id: str - ) -> List[EventBase]: + ) -> list[EventBase]: res = await self.transport_layer.get_event_auth(destination, room_id, event_id) room_version = await self.store.get_room_version(room_id) @@ -1050,7 +1048,7 @@ async def make_membership_event( membership: str, content: dict, params: Optional[Mapping[str, Union[str, Iterable[str]]]], - ) -> Tuple[str, EventBase, RoomVersion]: + ) -> tuple[str, EventBase, RoomVersion]: """ Creates an m.room.member event, with context, without participating in the room. @@ -1092,7 +1090,7 @@ async def make_membership_event( % (membership, ",".join(valid_memberships)) ) - async def send_request(destination: str) -> Tuple[str, EventBase, RoomVersion]: + async def send_request(destination: str) -> tuple[str, EventBase, RoomVersion]: ret = await self.transport_layer.make_membership_event( destination, room_id, user_id, membership, params ) @@ -1237,7 +1235,7 @@ async def send_request(destination: str) -> SendJoinResult: # We now go and check the signatures and hashes for the event. Note # that we limit how many events we process at a time to keep the # memory overhead from exploding. - valid_pdus_map: Dict[str, EventBase] = {} + valid_pdus_map: dict[str, EventBase] = {} async def _execute(pdu: EventBase) -> None: valid_pdu = await self._check_sigs_and_hash_and_fetch_one( @@ -1507,7 +1505,7 @@ async def _do_send_leave(self, destination: str, pdu: EventBase) -> JsonDict: # content. return resp[1] - async def send_knock(self, destinations: List[str], pdu: EventBase) -> JsonDict: + async def send_knock(self, destinations: list[str], pdu: EventBase) -> JsonDict: """Attempts to send a knock event to a given list of servers. Iterates through the list until one attempt succeeds. @@ -1612,7 +1610,7 @@ async def get_missing_events( limit: int, min_depth: int, timeout: int, - ) -> List[EventBase]: + ) -> list[EventBase]: """Tries to fetch events we are missing. This is called when we receive an event without having received all of its ancestors. @@ -1718,7 +1716,7 @@ async def get_room_hierarchy( destinations: Iterable[str], room_id: str, suggested_only: bool, - ) -> Tuple[JsonDict, Sequence[JsonDict], Sequence[JsonDict], Sequence[str]]: + ) -> tuple[JsonDict, Sequence[JsonDict], Sequence[JsonDict], Sequence[str]]: """ Call other servers to get a hierarchy of the given room. @@ -1749,7 +1747,7 @@ async def get_room_hierarchy( async def send_request( destination: str, - ) -> Tuple[JsonDict, Sequence[JsonDict], Sequence[JsonDict], Sequence[str]]: + ) -> tuple[JsonDict, Sequence[JsonDict], Sequence[JsonDict], Sequence[str]]: try: res = await self.transport_layer.get_room_hierarchy( destination=destination, @@ -1924,8 +1922,8 @@ async def _timestamp_to_event_from_destination( raise InvalidResponseError(str(e)) async def get_account_status( - self, destination: str, user_ids: List[str] - ) -> Tuple[JsonDict, List[str]]: + self, destination: str, user_ids: list[str] + ) -> tuple[JsonDict, list[str]]: """Retrieves account statuses for a given list of users on a given remote homeserver. @@ -1991,8 +1989,8 @@ async def federation_download_media( download_ratelimiter: Ratelimiter, ip_address: str, ) -> Union[ - Tuple[int, Dict[bytes, List[bytes]], bytes], - Tuple[int, Dict[bytes, List[bytes]]], + tuple[int, dict[bytes, list[bytes]], bytes], + tuple[int, dict[bytes, list[bytes]]], ]: try: return await self.transport_layer.federation_download_media( @@ -2036,7 +2034,7 @@ async def download_media( max_timeout_ms: int, download_ratelimiter: Ratelimiter, ip_address: str, - ) -> Tuple[int, Dict[bytes, List[bytes]]]: + ) -> tuple[int, dict[bytes, list[bytes]]]: try: return await self.transport_layer.download_media_v3( destination, diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index e7da8fda0dd..2046808225f 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -28,10 +28,8 @@ Callable, Collection, Dict, - List, Mapping, Optional, - Tuple, Union, ) @@ -163,10 +161,10 @@ def __init__(self, hs: "HomeServer"): # origins that we are currently processing a transaction from. # a dict from origin to txn id. - self._active_transactions: Dict[str, str] = {} + self._active_transactions: dict[str, str] = {} # We cache results for transaction with the same ID - self._transaction_resp_cache: ResponseCache[Tuple[str, str]] = ResponseCache( + self._transaction_resp_cache: ResponseCache[tuple[str, str]] = ResponseCache( clock=hs.get_clock(), name="fed_txn_handler", server_name=self.server_name, @@ -179,7 +177,7 @@ def __init__(self, hs: "HomeServer"): # We cache responses to state queries, as they take a while and often # come in waves. - self._state_resp_cache: ResponseCache[Tuple[str, Optional[str]]] = ( + self._state_resp_cache: ResponseCache[tuple[str, Optional[str]]] = ( ResponseCache( clock=hs.get_clock(), name="state_resp", @@ -187,7 +185,7 @@ def __init__(self, hs: "HomeServer"): timeout_ms=30000, ) ) - self._state_ids_resp_cache: ResponseCache[Tuple[str, str]] = ResponseCache( + self._state_ids_resp_cache: ResponseCache[tuple[str, str]] = ResponseCache( clock=hs.get_clock(), name="state_ids_resp", server_name=self.server_name, @@ -236,8 +234,8 @@ async def _handle_old_staged_events(self) -> None: await self._clock.sleep(random.uniform(0, 0.1)) async def on_backfill_request( - self, origin: str, room_id: str, versions: List[str], limit: int - ) -> Tuple[int, Dict[str, Any]]: + self, origin: str, room_id: str, versions: list[str], limit: int + ) -> tuple[int, dict[str, Any]]: async with self._server_linearizer.queue((origin, room_id)): origin_host, _ = parse_server_name(origin) await self.check_server_matches_acl(origin_host, room_id) @@ -252,7 +250,7 @@ async def on_backfill_request( async def on_timestamp_to_event_request( self, origin: str, room_id: str, timestamp: int, direction: Direction - ) -> Tuple[int, Dict[str, Any]]: + ) -> tuple[int, dict[str, Any]]: """When we receive a federated `/timestamp_to_event` request, handle all of the logic for validating and fetching the event. @@ -298,7 +296,7 @@ async def on_incoming_transaction( transaction_id: str, destination: str, transaction_data: JsonDict, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: # If we receive a transaction we should make sure that kick off handling # any old events in the staging area. if not self._started_handling_of_staged_events: @@ -365,7 +363,7 @@ async def on_incoming_transaction( async def _on_incoming_transaction_inner( self, origin: str, transaction: Transaction, request_time: int - ) -> Tuple[int, Dict[str, Any]]: + ) -> tuple[int, dict[str, Any]]: # CRITICAL SECTION: the first thing we must do (before awaiting) is # add an entry to _active_transactions. assert origin not in self._active_transactions @@ -381,7 +379,7 @@ async def _on_incoming_transaction_inner( async def _handle_incoming_transaction( self, origin: str, transaction: Transaction, request_time: int - ) -> Tuple[int, Dict[str, Any]]: + ) -> tuple[int, dict[str, Any]]: """Process an incoming transaction and return the HTTP response Args: @@ -429,7 +427,7 @@ async def _handle_incoming_transaction( async def _handle_pdus_in_txn( self, origin: str, transaction: Transaction, request_time: int - ) -> Dict[str, dict]: + ) -> dict[str, dict]: """Process the PDUs in a received transaction. Args: @@ -448,7 +446,7 @@ async def _handle_pdus_in_txn( origin_host, _ = parse_server_name(origin) - pdus_by_room: Dict[str, List[EventBase]] = {} + pdus_by_room: dict[str, list[EventBase]] = {} newest_pdu_ts = 0 @@ -601,7 +599,7 @@ async def _process_edu(edu_dict: JsonDict) -> None: async def on_room_state_request( self, origin: str, room_id: str, event_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await self._event_auth_handler.assert_host_in_room(room_id, origin) origin_host, _ = parse_server_name(origin) await self.check_server_matches_acl(origin_host, room_id) @@ -625,7 +623,7 @@ async def on_room_state_request( @tag_args async def on_state_ids_request( self, origin: str, room_id: str, event_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: if not event_id: raise NotImplementedError("Specify an event") @@ -653,7 +651,7 @@ async def _on_state_ids_request_compute( async def _on_context_state_request_compute( self, room_id: str, event_id: str - ) -> Dict[str, list]: + ) -> dict[str, list]: pdus: Collection[EventBase] event_ids = await self.handler.get_state_ids_for_pdu(room_id, event_id) pdus = await self.store.get_events_as_list(event_ids) @@ -669,7 +667,7 @@ async def _on_context_state_request_compute( async def on_pdu_request( self, origin: str, event_id: str - ) -> Tuple[int, Union[JsonDict, str]]: + ) -> tuple[int, Union[JsonDict, str]]: pdu = await self.handler.get_persisted_pdu(origin, event_id) if pdu: @@ -678,8 +676,8 @@ async def on_pdu_request( return 404, "" async def on_query_request( - self, query_type: str, args: Dict[str, str] - ) -> Tuple[int, Dict[str, Any]]: + self, query_type: str, args: dict[str, str] + ) -> tuple[int, dict[str, Any]]: received_queries_counter.labels( type=query_type, **{SERVER_NAME_LABEL: self.server_name}, @@ -688,8 +686,8 @@ async def on_query_request( return 200, resp async def on_make_join_request( - self, origin: str, room_id: str, user_id: str, supported_versions: List[str] - ) -> Dict[str, Any]: + self, origin: str, room_id: str, user_id: str, supported_versions: list[str] + ) -> dict[str, Any]: origin_host, _ = parse_server_name(origin) await self.check_server_matches_acl(origin_host, room_id) @@ -714,7 +712,7 @@ async def on_make_join_request( async def on_invite_request( self, origin: str, content: JsonDict, room_version_id: str - ) -> Dict[str, Any]: + ) -> dict[str, Any]: room_version = KNOWN_ROOM_VERSIONS.get(room_version_id) if not room_version: raise SynapseError( @@ -748,7 +746,7 @@ async def on_send_join_request( content: JsonDict, room_id: str, caller_supports_partial_state: bool = False, - ) -> Dict[str, Any]: + ) -> dict[str, Any]: set_tag( SynapseTags.SEND_JOIN_RESPONSE_IS_PARTIAL_STATE, caller_supports_partial_state, @@ -809,7 +807,7 @@ async def on_send_join_request( async def on_make_leave_request( self, origin: str, room_id: str, user_id: str - ) -> Dict[str, Any]: + ) -> dict[str, Any]: origin_host, _ = parse_server_name(origin) await self.check_server_matches_acl(origin_host, room_id) pdu = await self.handler.on_make_leave_request(origin, room_id, user_id) @@ -826,7 +824,7 @@ async def on_send_leave_request( return {} async def on_make_knock_request( - self, origin: str, room_id: str, user_id: str, supported_versions: List[str] + self, origin: str, room_id: str, user_id: str, supported_versions: list[str] ) -> JsonDict: """We've received a /make_knock/ request, so we create a partial knock event for the room and hand that back, along with the room version, to the knocking @@ -884,7 +882,7 @@ async def on_send_knock_request( origin: str, content: JsonDict, room_id: str, - ) -> Dict[str, List[JsonDict]]: + ) -> dict[str, list[JsonDict]]: """ We have received a knock event for a room. Verify and send the event into the room on the knocking homeserver's behalf. Then reply with some stripped state from the @@ -1034,7 +1032,7 @@ async def _on_send_membership_event( async def on_event_auth( self, origin: str, room_id: str, event_id: str - ) -> Tuple[int, Dict[str, Any]]: + ) -> tuple[int, dict[str, Any]]: async with self._server_linearizer.queue((origin, room_id)): await self._event_auth_handler.assert_host_in_room(room_id, origin) origin_host, _ = parse_server_name(origin) @@ -1046,20 +1044,20 @@ async def on_event_auth( return 200, res async def on_query_client_keys( - self, origin: str, content: Dict[str, str] - ) -> Tuple[int, Dict[str, Any]]: + self, origin: str, content: dict[str, str] + ) -> tuple[int, dict[str, Any]]: return await self.on_query_request("client_keys", content) async def on_query_user_devices( self, origin: str, user_id: str - ) -> Tuple[int, Dict[str, Any]]: + ) -> tuple[int, dict[str, Any]]: keys = await self.device_handler.on_federation_query_user_devices(user_id) return 200, keys @trace async def on_claim_client_keys( - self, query: List[Tuple[str, str, str, int]], always_include_fallback_keys: bool - ) -> Dict[str, Any]: + self, query: list[tuple[str, str, str, int]], always_include_fallback_keys: bool + ) -> dict[str, Any]: if any( not self.hs.is_mine(UserID.from_string(user_id)) for user_id, _, _, _ in query @@ -1071,7 +1069,7 @@ async def on_claim_client_keys( query, always_include_fallback_keys=always_include_fallback_keys ) - json_result: Dict[str, Dict[str, Dict[str, JsonDict]]] = {} + json_result: dict[str, dict[str, dict[str, JsonDict]]] = {} for result in results: for user_id, device_keys in result.items(): for device_id, keys in device_keys.items(): @@ -1098,10 +1096,10 @@ async def on_get_missing_events( self, origin: str, room_id: str, - earliest_events: List[str], - latest_events: List[str], + earliest_events: list[str], + latest_events: list[str], limit: int, - ) -> Dict[str, list]: + ) -> dict[str, list]: async with self._server_linearizer.queue((origin, room_id)): origin_host, _ = parse_server_name(origin) await self.check_server_matches_acl(origin_host, room_id) @@ -1133,7 +1131,7 @@ async def on_openid_userinfo(self, token: str) -> Optional[str]: ts_now_ms = self._clock.time_msec() return await self.store.get_user_id_for_open_id_token(token, ts_now_ms) - def _transaction_dict_from_pdus(self, pdu_list: List[EventBase]) -> JsonDict: + def _transaction_dict_from_pdus(self, pdu_list: list[EventBase]) -> JsonDict: """Returns a new Transaction containing the given PDUs suitable for transmission. """ @@ -1208,7 +1206,7 @@ async def _handle_received_pdu(self, origin: str, pdu: EventBase) -> None: async def _get_next_nonspam_staged_event_for_room( self, room_id: str, room_version: RoomVersion - ) -> Optional[Tuple[str, EventBase]]: + ) -> Optional[tuple[str, EventBase]]: """Fetch the first non-spam event from staging queue. Args: @@ -1407,13 +1405,13 @@ def __init__(self, hs: "HomeServer"): # the case. self._send_edu = ReplicationFederationSendEduRestServlet.make_client(hs) - self.edu_handlers: Dict[str, Callable[[str, dict], Awaitable[None]]] = {} - self.query_handlers: Dict[str, Callable[[dict], Awaitable[JsonDict]]] = {} + self.edu_handlers: dict[str, Callable[[str, dict], Awaitable[None]]] = {} + self.query_handlers: dict[str, Callable[[dict], Awaitable[JsonDict]]] = {} # Map from type to instance names that we should route EDU handling to. # We randomly choose one instance from the list to route to for each new # EDU received. - self._edu_type_to_instance: Dict[str, List[str]] = {} + self._edu_type_to_instance: dict[str, list[str]] = {} def register_edu_handler( self, edu_type: str, handler: Callable[[str, JsonDict], Awaitable[None]] @@ -1455,7 +1453,7 @@ def register_query_handler( self.query_handlers[query_type] = handler def register_instances_for_edu( - self, edu_type: str, instance_names: List[str] + self, edu_type: str, instance_names: list[str] ) -> None: """Register that the EDU handler is on multiple instances.""" self._edu_type_to_instance[edu_type] = instance_names diff --git a/synapse/federation/persistence.py b/synapse/federation/persistence.py index 8340b485031..56281304294 100644 --- a/synapse/federation/persistence.py +++ b/synapse/federation/persistence.py @@ -27,7 +27,7 @@ """ import logging -from typing import Optional, Tuple +from typing import Optional from synapse.federation.units import Transaction from synapse.storage.databases.main import DataStore @@ -44,7 +44,7 @@ def __init__(self, datastore: DataStore): async def have_responded( self, origin: str, transaction: Transaction - ) -> Optional[Tuple[int, JsonDict]]: + ) -> Optional[tuple[int, JsonDict]]: """Have we already responded to a transaction with the same id and origin? diff --git a/synapse/federation/send_queue.py b/synapse/federation/send_queue.py index 759df9836b9..634f7f3f0d6 100644 --- a/synapse/federation/send_queue.py +++ b/synapse/federation/send_queue.py @@ -40,14 +40,11 @@ from enum import Enum from typing import ( TYPE_CHECKING, - Dict, Hashable, Iterable, - List, Optional, Sized, Tuple, - Type, ) import attr @@ -77,7 +74,7 @@ class QueueNames(str, Enum): PRESENCE_DESTINATIONS = "presence_destinations" -queue_name_to_gauge_map: Dict[QueueNames, LaterGauge] = {} +queue_name_to_gauge_map: dict[QueueNames, LaterGauge] = {} for queue_name in QueueNames: queue_name_to_gauge_map[queue_name] = LaterGauge( @@ -100,23 +97,23 @@ def __init__(self, hs: "HomeServer"): # We may have multiple federation sender instances, so we need to track # their positions separately. self._sender_instances = hs.config.worker.federation_shard_config.instances - self._sender_positions: Dict[str, int] = {} + self._sender_positions: dict[str, int] = {} # Pending presence map user_id -> UserPresenceState - self.presence_map: Dict[str, UserPresenceState] = {} + self.presence_map: dict[str, UserPresenceState] = {} # Stores the destinations we need to explicitly send presence to about a # given user. # Stream position -> (user_id, destinations) - self.presence_destinations: SortedDict[int, Tuple[str, Iterable[str]]] = ( + self.presence_destinations: SortedDict[int, tuple[str, Iterable[str]]] = ( SortedDict() ) # (destination, key) -> EDU - self.keyed_edu: Dict[Tuple[str, tuple], Edu] = {} + self.keyed_edu: dict[tuple[str, tuple], Edu] = {} # stream position -> (destination, key) - self.keyed_edu_changed: SortedDict[int, Tuple[str, tuple]] = SortedDict() + self.keyed_edu_changed: SortedDict[int, tuple[str, tuple]] = SortedDict() self.edus: SortedDict[int, Edu] = SortedDict() @@ -295,7 +292,7 @@ def federation_ack(self, instance_name: str, token: int) -> None: async def get_replication_rows( self, instance_name: str, from_token: int, to_token: int, target_row_count: int - ) -> Tuple[List[Tuple[int, Tuple]], int, bool]: + ) -> tuple[list[tuple[int, Tuple]], int, bool]: """Get rows to be sent over federation between the two tokens Args: @@ -318,7 +315,7 @@ async def get_replication_rows( # list of tuple(int, BaseFederationRow), where the first is the position # of the federation stream. - rows: List[Tuple[int, BaseFederationRow]] = [] + rows: list[tuple[int, BaseFederationRow]] = [] # Fetch presence to send to destinations i = self.presence_destinations.bisect_right(from_token) @@ -413,7 +410,7 @@ def add_to_buffer(self, buff: "ParsedFederationStreamData") -> None: @attr.s(slots=True, frozen=True, auto_attribs=True) class PresenceDestinationsRow(BaseFederationRow): state: UserPresenceState - destinations: List[str] + destinations: list[str] TypeId = "pd" @@ -436,7 +433,7 @@ class KeyedEduRow(BaseFederationRow): typing EDUs clobber based on room_id. """ - key: Tuple[str, ...] # the edu key passed to send_edu + key: tuple[str, ...] # the edu key passed to send_edu edu: Edu TypeId = "k" @@ -471,7 +468,7 @@ def add_to_buffer(self, buff: "ParsedFederationStreamData") -> None: buff.edus.setdefault(self.edu.destination, []).append(self.edu) -_rowtypes: Tuple[Type[BaseFederationRow], ...] = ( +_rowtypes: tuple[type[BaseFederationRow], ...] = ( PresenceDestinationsRow, KeyedEduRow, EduRow, @@ -483,16 +480,16 @@ def add_to_buffer(self, buff: "ParsedFederationStreamData") -> None: @attr.s(slots=True, frozen=True, auto_attribs=True) class ParsedFederationStreamData: # list of tuples of UserPresenceState and destinations - presence_destinations: List[Tuple[UserPresenceState, List[str]]] + presence_destinations: list[tuple[UserPresenceState, list[str]]] # dict of destination -> { key -> Edu } - keyed_edus: Dict[str, Dict[Tuple[str, ...], Edu]] + keyed_edus: dict[str, dict[tuple[str, ...], Edu]] # dict of destination -> [Edu] - edus: Dict[str, List[Edu]] + edus: dict[str, list[Edu]] async def process_rows_for_federation( transaction_queue: FederationSender, - rows: List[FederationStream.FederationStreamRow], + rows: list[FederationStream.FederationStreamRow], ) -> None: """Parse a list of rows from the federation stream and put them in the transaction queue ready for sending to the relevant homeservers. diff --git a/synapse/federation/sender/__init__.py b/synapse/federation/sender/__init__.py index 4410ffc5c56..229ae647c01 100644 --- a/synapse/federation/sender/__init__.py +++ b/synapse/federation/sender/__init__.py @@ -135,13 +135,10 @@ from typing import ( TYPE_CHECKING, Collection, - Dict, Hashable, Iterable, - List, Literal, Optional, - Tuple, ) import attr @@ -312,7 +309,7 @@ def federation_ack(self, instance_name: str, token: int) -> None: @abc.abstractmethod async def get_replication_rows( self, instance_name: str, from_token: int, to_token: int, target_row_count: int - ) -> Tuple[List[Tuple[int, Tuple]], int, bool]: + ) -> tuple[list[tuple[int, tuple]], int, bool]: raise NotImplementedError() @@ -420,7 +417,7 @@ def __init__(self, hs: "HomeServer"): self._federation_shard_config = hs.config.worker.federation_shard_config # map from destination to PerDestinationQueue - self._per_destination_queues: Dict[str, PerDestinationQueue] = {} + self._per_destination_queues: dict[str, PerDestinationQueue] = {} transaction_queue_pending_destinations_gauge.register_hook( homeserver_instance_id=hs.get_instance_id(), @@ -724,7 +721,7 @@ async def handle_event(event: EventBase) -> None: **{SERVER_NAME_LABEL: self.server_name}, ).observe((now - ts) / 1000) - async def handle_room_events(events: List[EventBase]) -> None: + async def handle_room_events(events: list[EventBase]) -> None: logger.debug( "Handling %i events in room %s", len(events), events[0].room_id ) @@ -736,7 +733,7 @@ async def handle_room_events(events: List[EventBase]) -> None: for event in events: await handle_event(event) - events_by_room: Dict[str, List[EventBase]] = {} + events_by_room: dict[str, list[EventBase]] = {} for event_id in event_ids: # `event_entries` is unsorted, so we have to iterate over `event_ids` @@ -1124,7 +1121,7 @@ def federation_ack(self, instance_name: str, token: int) -> None: @staticmethod async def get_replication_rows( instance_name: str, from_token: int, to_token: int, target_row_count: int - ) -> Tuple[List[Tuple[int, Tuple]], int, bool]: + ) -> tuple[list[tuple[int, tuple]], int, bool]: # Dummy implementation for case where federation sender isn't offloaded # to a worker. return [], 0, False diff --git a/synapse/federation/sender/per_destination_queue.py b/synapse/federation/sender/per_destination_queue.py index 845af92facf..ecf4789d76a 100644 --- a/synapse/federation/sender/per_destination_queue.py +++ b/synapse/federation/sender/per_destination_queue.py @@ -23,7 +23,7 @@ import logging from collections import OrderedDict from types import TracebackType -from typing import TYPE_CHECKING, Dict, Hashable, Iterable, List, Optional, Tuple, Type +from typing import TYPE_CHECKING, Hashable, Iterable, Optional import attr from prometheus_client import Counter @@ -145,16 +145,16 @@ def __init__( self._last_successful_stream_ordering: Optional[int] = None # a queue of pending PDUs - self._pending_pdus: List[EventBase] = [] + self._pending_pdus: list[EventBase] = [] # XXX this is never actually used: see # https://github.com/matrix-org/synapse/issues/7549 - self._pending_edus: List[Edu] = [] + self._pending_edus: list[Edu] = [] # Pending EDUs by their "key". Keyed EDUs are EDUs that get clobbered # based on their key (e.g. typing events by room_id) # Map of (edu_type, key) -> Edu - self._pending_edus_keyed: Dict[Tuple[str, Hashable], Edu] = {} + self._pending_edus_keyed: dict[tuple[str, Hashable], Edu] = {} # Map of user_id -> UserPresenceState of pending presence to be sent to this # destination @@ -164,7 +164,7 @@ def __init__( # # Each receipt can only have a single receipt per # (room ID, receipt type, user ID, thread ID) tuple. - self._pending_receipt_edus: List[Dict[str, Dict[str, Dict[str, dict]]]] = [] + self._pending_receipt_edus: list[dict[str, dict[str, dict[str, dict]]]] = [] # stream_id of last successfully sent to-device message. # NB: may be a long or an int. @@ -340,7 +340,7 @@ def attempt_new_transaction(self) -> None: ) async def _transaction_transmission_loop(self) -> None: - pending_pdus: List[EventBase] = [] + pending_pdus: list[EventBase] = [] try: self.transmission_loop_running = True # This will throw if we wouldn't retry. We do this here so we fail @@ -665,12 +665,12 @@ def _get_receipt_edus(self, limit: int) -> Iterable[Edu]: if not self._pending_receipt_edus: self._rrs_pending_flush = False - def _pop_pending_edus(self, limit: int) -> List[Edu]: + def _pop_pending_edus(self, limit: int) -> list[Edu]: pending_edus = self._pending_edus pending_edus, self._pending_edus = pending_edus[:limit], pending_edus[limit:] return pending_edus - async def _get_device_update_edus(self, limit: int) -> Tuple[List[Edu], int]: + async def _get_device_update_edus(self, limit: int) -> tuple[list[Edu], int]: last_device_list = self._last_device_list_stream_id # Retrieve list of new device updates to send to the destination @@ -691,7 +691,7 @@ async def _get_device_update_edus(self, limit: int) -> Tuple[List[Edu], int]: return edus, now_stream_id - async def _get_to_device_message_edus(self, limit: int) -> Tuple[List[Edu], int]: + async def _get_to_device_message_edus(self, limit: int) -> tuple[list[Edu], int]: last_device_stream_id = self._last_device_stream_id to_device_stream_id = self._store.get_to_device_stream_token() contents, stream_id = await self._store.get_new_device_msgs_for_remote( @@ -745,9 +745,9 @@ class _TransactionQueueManager: _device_stream_id: Optional[int] = None _device_list_id: Optional[int] = None _last_stream_ordering: Optional[int] = None - _pdus: List[EventBase] = attr.Factory(list) + _pdus: list[EventBase] = attr.Factory(list) - async def __aenter__(self) -> Tuple[List[EventBase], List[Edu]]: + async def __aenter__(self) -> tuple[list[EventBase], list[Edu]]: # First we calculate the EDUs we want to send, if any. # There's a maximum number of EDUs that can be sent with a transaction, @@ -767,7 +767,7 @@ async def __aenter__(self) -> Tuple[List[EventBase], List[Edu]]: if self.queue._pending_presence: # Only send max 50 presence entries in the EDU, to bound the amount # of data we're sending. - presence_to_add: List[JsonDict] = [] + presence_to_add: list[JsonDict] = [] while ( self.queue._pending_presence and len(presence_to_add) < MAX_PRESENCE_STATES_PER_EDU @@ -845,7 +845,7 @@ async def __aenter__(self) -> Tuple[List[EventBase], List[Edu]]: async def __aexit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc: Optional[BaseException], tb: Optional[TracebackType], ) -> None: diff --git a/synapse/federation/sender/transaction_manager.py b/synapse/federation/sender/transaction_manager.py index f47c0114873..99aa05ebd6a 100644 --- a/synapse/federation/sender/transaction_manager.py +++ b/synapse/federation/sender/transaction_manager.py @@ -18,7 +18,7 @@ # # import logging -from typing import TYPE_CHECKING, List +from typing import TYPE_CHECKING from prometheus_client import Gauge @@ -82,8 +82,8 @@ def shutdown(self) -> None: async def send_new_transaction( self, destination: str, - pdus: List[EventBase], - edus: List[Edu], + pdus: list[EventBase], + edus: list[Edu], ) -> None: """ Args: diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index 02e56e8e278..a180cd5a553 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -34,7 +34,6 @@ List, Mapping, Optional, - Tuple, Union, ) @@ -428,7 +427,7 @@ async def send_join_v2( omit_members: bool, ) -> "SendJoinResponse": path = _create_v2_path("/send_join/%s/%s", room_id, event_id) - query_params: Dict[str, str] = {} + query_params: dict[str, str] = {} # lazy-load state on join query_params["omit_members"] = "true" if omit_members else "false" @@ -442,7 +441,7 @@ async def send_join_v2( async def send_leave_v1( self, destination: str, room_id: str, event_id: str, content: JsonDict - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: path = _create_v1_path("/send_leave/%s/%s", room_id, event_id) return await self.client.put_json( @@ -508,7 +507,7 @@ async def send_knock_v1( async def send_invite_v1( self, destination: str, room_id: str, event_id: str, content: JsonDict - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: path = _create_v1_path("/invite/%s/%s", room_id, event_id) return await self.client.put_json( @@ -546,7 +545,7 @@ async def get_public_rooms( if search_filter: # this uses MSC2197 (Search Filtering over Federation) - data: Dict[str, Any] = {"include_all_networks": include_all_networks} + data: dict[str, Any] = {"include_all_networks": include_all_networks} if third_party_instance_id: data["third_party_instance_id"] = third_party_instance_id if limit: @@ -570,7 +569,7 @@ async def get_public_rooms( ) raise else: - args: Dict[str, Union[str, Iterable[str]]] = { + args: dict[str, Union[str, Iterable[str]]] = { "include_all_networks": "true" if include_all_networks else "false" } if third_party_instance_id: @@ -854,7 +853,7 @@ async def get_room_hierarchy_unstable( ) async def get_account_status( - self, destination: str, user_ids: List[str] + self, destination: str, user_ids: list[str] ) -> JsonDict: """ Args: @@ -878,7 +877,7 @@ async def download_media_r0( max_timeout_ms: int, download_ratelimiter: Ratelimiter, ip_address: str, - ) -> Tuple[int, Dict[bytes, List[bytes]]]: + ) -> tuple[int, dict[bytes, list[bytes]]]: path = f"/_matrix/media/r0/download/{destination}/{media_id}" return await self.client.get_file( destination, @@ -905,7 +904,7 @@ async def download_media_v3( max_timeout_ms: int, download_ratelimiter: Ratelimiter, ip_address: str, - ) -> Tuple[int, Dict[bytes, List[bytes]]]: + ) -> tuple[int, dict[bytes, list[bytes]]]: path = f"/_matrix/media/v3/download/{destination}/{media_id}" return await self.client.get_file( destination, @@ -936,7 +935,7 @@ async def federation_download_media( max_timeout_ms: int, download_ratelimiter: Ratelimiter, ip_address: str, - ) -> Tuple[int, Dict[bytes, List[bytes]], bytes]: + ) -> tuple[int, dict[bytes, list[bytes]], bytes]: path = f"/_matrix/federation/v1/media/download/{media_id}" return await self.client.federation_get_file( destination, @@ -993,9 +992,9 @@ class SendJoinResponse: """The parsed response of a `/send_join` request.""" # The list of auth events from the /send_join response. - auth_events: List[EventBase] + auth_events: list[EventBase] # The list of state from the /send_join response. - state: List[EventBase] + state: list[EventBase] # The raw join event from the /send_join response. event_dict: JsonDict # The parsed join event from the /send_join response. This will be None if @@ -1006,19 +1005,19 @@ class SendJoinResponse: members_omitted: bool = False # List of servers in the room - servers_in_room: Optional[List[str]] = None + servers_in_room: Optional[list[str]] = None @attr.s(slots=True, auto_attribs=True) class StateRequestResponse: """The parsed response of a `/state` request.""" - auth_events: List[EventBase] - state: List[EventBase] + auth_events: list[EventBase] + state: list[EventBase] @ijson.coroutine -def _event_parser(event_dict: JsonDict) -> Generator[None, Tuple[str, Any], None]: +def _event_parser(event_dict: JsonDict) -> Generator[None, tuple[str, Any], None]: """Helper function for use with `ijson.kvitems_coro` to parse key-value pairs to add them to a given dictionary. """ @@ -1030,7 +1029,7 @@ def _event_parser(event_dict: JsonDict) -> Generator[None, Tuple[str, Any], None @ijson.coroutine def _event_list_parser( - room_version: RoomVersion, events: List[EventBase] + room_version: RoomVersion, events: list[EventBase] ) -> Generator[None, JsonDict, None]: """Helper function for use with `ijson.items_coro` to parse an array of events and add them to the given list. @@ -1086,7 +1085,7 @@ class SendJoinParser(ByteParser[SendJoinResponse]): def __init__(self, room_version: RoomVersion, v1_api: bool): self._response = SendJoinResponse([], [], event_dict={}) self._room_version = room_version - self._coros: List[Generator[None, bytes, None]] = [] + self._coros: list[Generator[None, bytes, None]] = [] # The V1 API has the shape of `[200, {...}]`, which we handle by # prefixing with `item.*`. @@ -1159,7 +1158,7 @@ class _StateParser(ByteParser[StateRequestResponse]): def __init__(self, room_version: RoomVersion): self._response = StateRequestResponse([], []) self._room_version = room_version - self._coros: List[Generator[None, bytes, None]] = [ + self._coros: list[Generator[None, bytes, None]] = [ ijson.items_coro( _event_list_parser(room_version, self._response.state), "pdus.item", diff --git a/synapse/federation/transport/server/__init__.py b/synapse/federation/transport/server/__init__.py index c4905e63ddf..d5f05f7290e 100644 --- a/synapse/federation/transport/server/__init__.py +++ b/synapse/federation/transport/server/__init__.py @@ -20,7 +20,7 @@ # # import logging -from typing import TYPE_CHECKING, Dict, Iterable, List, Literal, Optional, Tuple, Type +from typing import TYPE_CHECKING, Iterable, Literal, Optional from synapse.api.errors import FederationDeniedError, SynapseError from synapse.federation.transport.server._base import ( @@ -52,7 +52,7 @@ class TransportLayerServer(JsonResource): """Handles incoming federation HTTP requests""" - def __init__(self, hs: "HomeServer", servlet_groups: Optional[List[str]] = None): + def __init__(self, hs: "HomeServer", servlet_groups: Optional[list[str]] = None): """Initialize the TransportLayerServer Will by default register all servlets. For custom behaviour, pass in @@ -130,8 +130,8 @@ def __init__( self.allow_access = hs.config.server.allow_public_rooms_over_federation async def on_GET( - self, origin: str, content: Literal[None], query: Dict[bytes, List[bytes]] - ) -> Tuple[int, JsonDict]: + self, origin: str, content: Literal[None], query: dict[bytes, list[bytes]] + ) -> tuple[int, JsonDict]: if not self.allow_access: raise FederationDeniedError(origin) @@ -164,8 +164,8 @@ async def on_GET( return 200, data async def on_POST( - self, origin: str, content: JsonDict, query: Dict[bytes, List[bytes]] - ) -> Tuple[int, JsonDict]: + self, origin: str, content: JsonDict, query: dict[bytes, list[bytes]] + ) -> tuple[int, JsonDict]: # This implements MSC2197 (Search Filtering over Federation) if not self.allow_access: raise FederationDeniedError(origin) @@ -242,8 +242,8 @@ async def on_GET( self, origin: Optional[str], content: Literal[None], - query: Dict[bytes, List[bytes]], - ) -> Tuple[int, JsonDict]: + query: dict[bytes, list[bytes]], + ) -> tuple[int, JsonDict]: token = parse_string_from_args(query, "access_token") if token is None: return ( @@ -265,7 +265,7 @@ async def on_GET( return 200, {"sub": user_id} -SERVLET_GROUPS: Dict[str, Iterable[Type[BaseFederationServlet]]] = { +SERVLET_GROUPS: dict[str, Iterable[type[BaseFederationServlet]]] = { "federation": FEDERATION_SERVLET_CLASSES, "room_list": (PublicRoomList,), "openid": (OpenIdUserInfo,), diff --git a/synapse/federation/transport/server/_base.py b/synapse/federation/transport/server/_base.py index cba309635b7..146cbebb271 100644 --- a/synapse/federation/transport/server/_base.py +++ b/synapse/federation/transport/server/_base.py @@ -24,7 +24,7 @@ import re import time from http import HTTPStatus -from typing import TYPE_CHECKING, Any, Awaitable, Callable, Dict, Optional, Tuple, cast +from typing import TYPE_CHECKING, Any, Awaitable, Callable, Optional, cast from synapse.api.errors import Codes, FederationDeniedError, SynapseError from synapse.api.urls import FEDERATION_V1_PREFIX @@ -165,7 +165,7 @@ async def reset_retry_timings(self, origin: str) -> None: logger.exception("Error resetting retry timings on %s", origin) -def _parse_auth_header(header_bytes: bytes) -> Tuple[str, str, str, Optional[str]]: +def _parse_auth_header(header_bytes: bytes) -> tuple[str, str, str, Optional[str]]: """Parse an X-Matrix auth header Args: @@ -185,7 +185,7 @@ def _parse_auth_header(header_bytes: bytes) -> Tuple[str, str, str, Optional[str rf"{space_or_tab}*,{space_or_tab}*", re.split(r"^X-Matrix +", header_str, maxsplit=1)[1], ) - param_dict: Dict[str, str] = { + param_dict: dict[str, str] = { k.lower(): v for k, v in [param.split("=", maxsplit=1) for param in params] } @@ -252,7 +252,7 @@ class BaseFederationServlet: components as specified in the path match regexp. Returns: - Optional[Tuple[int, object]]: either (response code, response object) to + Optional[tuple[int, object]]: either (response code, response object) to return a JSON response, or None if the request has already been handled. Raises: @@ -282,14 +282,14 @@ def __init__( self.ratelimiter = ratelimiter self.server_name = server_name - def _wrap(self, func: Callable[..., Awaitable[Tuple[int, Any]]]) -> ServletCallback: + def _wrap(self, func: Callable[..., Awaitable[tuple[int, Any]]]) -> ServletCallback: authenticator = self.authenticator ratelimiter = self.ratelimiter @functools.wraps(func) async def new_func( request: SynapseRequest, *args: Any, **kwargs: str - ) -> Optional[Tuple[int, Any]]: + ) -> Optional[tuple[int, Any]]: """A callback which can be passed to HttpServer.RegisterPaths Args: diff --git a/synapse/federation/transport/server/federation.py b/synapse/federation/transport/server/federation.py index eb96ff27f9d..54c7dac1b71 100644 --- a/synapse/federation/transport/server/federation.py +++ b/synapse/federation/transport/server/federation.py @@ -22,14 +22,10 @@ from collections import Counter from typing import ( TYPE_CHECKING, - Dict, - List, Literal, Mapping, Optional, Sequence, - Tuple, - Type, Union, ) @@ -93,9 +89,9 @@ async def on_PUT( self, origin: str, content: JsonDict, - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], transaction_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: """Called on PUT /send// Args: @@ -158,9 +154,9 @@ async def on_GET( self, origin: str, content: Literal[None], - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], event_id: str, - ) -> Tuple[int, Union[JsonDict, str]]: + ) -> tuple[int, Union[JsonDict, str]]: return await self.handler.on_pdu_request(origin, event_id) @@ -173,9 +169,9 @@ async def on_GET( self, origin: str, content: Literal[None], - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: return await self.handler.on_room_state_request( origin, room_id, @@ -191,9 +187,9 @@ async def on_GET( self, origin: str, content: Literal[None], - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: return await self.handler.on_state_ids_request( origin, room_id, @@ -209,9 +205,9 @@ async def on_GET( self, origin: str, content: Literal[None], - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: versions = [x.decode("ascii") for x in query[b"v"]] limit = parse_integer_from_args(query, "limit", None) @@ -248,9 +244,9 @@ async def on_GET( self, origin: str, content: Literal[None], - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: timestamp = parse_integer_from_args(query, "ts", required=True) direction_str = parse_string_from_args( query, "dir", allowed_values=["f", "b"], required=True @@ -271,9 +267,9 @@ async def on_GET( self, origin: str, content: Literal[None], - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], query_type: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: args = {k.decode("utf8"): v[0].decode("utf-8") for k, v in query.items()} args["origin"] = origin return await self.handler.on_query_request(query_type, args) @@ -287,10 +283,10 @@ async def on_GET( self, origin: str, content: Literal[None], - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, user_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: """ Args: origin: The authenticated server_name of the calling server @@ -323,10 +319,10 @@ async def on_GET( self, origin: str, content: Literal[None], - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, user_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: result = await self.handler.on_make_leave_request(origin, room_id, user_id) return 200, result @@ -339,10 +335,10 @@ async def on_PUT( self, origin: str, content: JsonDict, - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, event_id: str, - ) -> Tuple[int, Tuple[int, JsonDict]]: + ) -> tuple[int, tuple[int, JsonDict]]: result = await self.handler.on_send_leave_request(origin, content, room_id) return 200, (200, result) @@ -357,10 +353,10 @@ async def on_PUT( self, origin: str, content: JsonDict, - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, event_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: result = await self.handler.on_send_leave_request(origin, content, room_id) return 200, result @@ -373,10 +369,10 @@ async def on_GET( self, origin: str, content: Literal[None], - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, user_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: # Retrieve the room versions the remote homeserver claims to support supported_versions = parse_strings_from_args( query, "ver", required=True, encoding="utf-8" @@ -396,10 +392,10 @@ async def on_PUT( self, origin: str, content: JsonDict, - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, event_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: result = await self.handler.on_send_knock_request(origin, content, room_id) return 200, result @@ -412,10 +408,10 @@ async def on_GET( self, origin: str, content: Literal[None], - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, event_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: return await self.handler.on_event_auth(origin, room_id, event_id) @@ -427,10 +423,10 @@ async def on_PUT( self, origin: str, content: JsonDict, - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, event_id: str, - ) -> Tuple[int, Tuple[int, JsonDict]]: + ) -> tuple[int, tuple[int, JsonDict]]: # TODO(paul): assert that event_id parsed from path actually # match those given in content result = await self.handler.on_send_join_request(origin, content, room_id) @@ -447,10 +443,10 @@ async def on_PUT( self, origin: str, content: JsonDict, - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, event_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: # TODO(paul): assert that event_id parsed from path actually # match those given in content @@ -470,10 +466,10 @@ async def on_PUT( self, origin: str, content: JsonDict, - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, event_id: str, - ) -> Tuple[int, Tuple[int, JsonDict]]: + ) -> tuple[int, tuple[int, JsonDict]]: # We don't get a room version, so we have to assume its EITHER v1 or # v2. This is "fine" as the only difference between V1 and V2 is the # state resolution algorithm, and we don't use that for processing @@ -497,10 +493,10 @@ async def on_PUT( self, origin: str, content: JsonDict, - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, event_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: # TODO(paul): assert that room_id/event_id parsed from path actually # match those given in content @@ -535,9 +531,9 @@ async def on_PUT( self, origin: str, content: JsonDict, - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await self.handler.on_exchange_third_party_invite_request(content) return 200, {} @@ -547,8 +543,8 @@ class FederationClientKeysQueryServlet(BaseFederationServerServlet): CATEGORY = "Federation requests" async def on_POST( - self, origin: str, content: JsonDict, query: Dict[bytes, List[bytes]] - ) -> Tuple[int, JsonDict]: + self, origin: str, content: JsonDict, query: dict[bytes, list[bytes]] + ) -> tuple[int, JsonDict]: return await self.handler.on_query_client_keys(origin, content) @@ -560,9 +556,9 @@ async def on_GET( self, origin: str, content: Literal[None], - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], user_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: return await self.handler.on_query_user_devices(origin, user_id) @@ -571,10 +567,10 @@ class FederationClientKeysClaimServlet(BaseFederationServerServlet): CATEGORY = "Federation requests" async def on_POST( - self, origin: str, content: JsonDict, query: Dict[bytes, List[bytes]] - ) -> Tuple[int, JsonDict]: + self, origin: str, content: JsonDict, query: dict[bytes, list[bytes]] + ) -> tuple[int, JsonDict]: # Generate a count for each algorithm, which is hard-coded to 1. - key_query: List[Tuple[str, str, str, int]] = [] + key_query: list[tuple[str, str, str, int]] = [] for user_id, device_keys in content.get("one_time_keys", {}).items(): for device_id, algorithm in device_keys.items(): key_query.append((user_id, device_id, algorithm, 1)) @@ -597,10 +593,10 @@ class FederationUnstableClientKeysClaimServlet(BaseFederationServerServlet): CATEGORY = "Federation requests" async def on_POST( - self, origin: str, content: JsonDict, query: Dict[bytes, List[bytes]] - ) -> Tuple[int, JsonDict]: + self, origin: str, content: JsonDict, query: dict[bytes, list[bytes]] + ) -> tuple[int, JsonDict]: # Generate a count for each algorithm. - key_query: List[Tuple[str, str, str, int]] = [] + key_query: list[tuple[str, str, str, int]] = [] for user_id, device_keys in content.get("one_time_keys", {}).items(): for device_id, algorithms in device_keys.items(): counts = Counter(algorithms) @@ -621,9 +617,9 @@ async def on_POST( self, origin: str, content: JsonDict, - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: limit = int(content.get("limit", 10)) earliest_events = content.get("earliest_events", []) latest_events = content.get("latest_events", []) @@ -646,8 +642,8 @@ class On3pidBindServlet(BaseFederationServerServlet): REQUIRE_AUTH = False async def on_POST( - self, origin: Optional[str], content: JsonDict, query: Dict[bytes, List[bytes]] - ) -> Tuple[int, JsonDict]: + self, origin: Optional[str], content: JsonDict, query: dict[bytes, list[bytes]] + ) -> tuple[int, JsonDict]: if "invites" in content: last_exception = None for invite in content["invites"]: @@ -682,8 +678,8 @@ async def on_GET( self, origin: Optional[str], content: Literal[None], - query: Dict[bytes, List[bytes]], - ) -> Tuple[int, JsonDict]: + query: dict[bytes, list[bytes]], + ) -> tuple[int, JsonDict]: return ( 200, { @@ -715,7 +711,7 @@ async def on_GET( content: Literal[None], query: Mapping[bytes, Sequence[bytes]], room_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: suggested_only = parse_boolean_from_args(query, "suggested_only", default=False) return 200, await self.handler.get_federation_hierarchy( origin, room_id, suggested_only @@ -746,9 +742,9 @@ async def on_GET( self, origin: str, content: Literal[None], - query: Dict[bytes, List[bytes]], + query: dict[bytes, list[bytes]], room_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: is_public = await self._store.is_room_world_readable_or_publicly_joinable( room_id ) @@ -780,7 +776,7 @@ async def on_POST( content: JsonDict, query: Mapping[bytes, Sequence[bytes]], room_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: if "user_ids" not in content: raise SynapseError( 400, "Required parameter 'user_ids' is missing", Codes.MISSING_PARAM @@ -882,7 +878,7 @@ async def on_GET( self.media_repo.mark_recently_accessed(None, media_id) -FEDERATION_SERVLET_CLASSES: Tuple[Type[BaseFederationServlet], ...] = ( +FEDERATION_SERVLET_CLASSES: tuple[type[BaseFederationServlet], ...] = ( FederationSendServlet, FederationEventServlet, FederationStateV1Servlet, diff --git a/synapse/federation/units.py b/synapse/federation/units.py index 3bb5f824b7a..bff45bc2a9f 100644 --- a/synapse/federation/units.py +++ b/synapse/federation/units.py @@ -24,7 +24,7 @@ """ import logging -from typing import List, Optional, Sequence +from typing import Optional, Sequence import attr @@ -70,7 +70,7 @@ def strip_context(self) -> None: getattr(self, "content", {})["org.matrix.opentracing_context"] = "{}" -def _none_to_list(edus: Optional[List[JsonDict]]) -> List[JsonDict]: +def _none_to_list(edus: Optional[list[JsonDict]]) -> list[JsonDict]: if edus is None: return [] return edus @@ -98,8 +98,8 @@ class Transaction: origin: str destination: str origin_server_ts: int - pdus: List[JsonDict] = attr.ib(factory=list, converter=_none_to_list) - edus: List[JsonDict] = attr.ib(factory=list, converter=_none_to_list) + pdus: list[JsonDict] = attr.ib(factory=list, converter=_none_to_list) + edus: list[JsonDict] = attr.ib(factory=list, converter=_none_to_list) def get_dict(self) -> JsonDict: """A JSON-ready dictionary of valid keys which aren't internal.""" @@ -113,7 +113,7 @@ def get_dict(self) -> JsonDict: return result -def filter_pdus_for_valid_depth(pdus: Sequence[JsonDict]) -> List[JsonDict]: +def filter_pdus_for_valid_depth(pdus: Sequence[JsonDict]) -> list[JsonDict]: filtered_pdus = [] for pdu in pdus: # Drop PDUs that have a depth that is outside of the range allowed @@ -129,5 +129,5 @@ def filter_pdus_for_valid_depth(pdus: Sequence[JsonDict]) -> List[JsonDict]: def serialize_and_filter_pdus( pdus: Sequence[EventBase], time_now: Optional[int] = None -) -> List[JsonDict]: +) -> list[JsonDict]: return filter_pdus_for_valid_depth([pdu.get_pdu_json(time_now) for pdu in pdus]) diff --git a/synapse/handlers/account.py b/synapse/handlers/account.py index 37cc3d3ff56..855027f08e2 100644 --- a/synapse/handlers/account.py +++ b/synapse/handlers/account.py @@ -19,7 +19,7 @@ # # -from typing import TYPE_CHECKING, Dict, List, Tuple +from typing import TYPE_CHECKING from synapse.api.errors import Codes, SynapseError from synapse.types import JsonDict, UserID @@ -40,9 +40,9 @@ def __init__(self, hs: "HomeServer"): async def get_account_statuses( self, - user_ids: List[str], + user_ids: list[str], allow_remote: bool, - ) -> Tuple[JsonDict, List[str]]: + ) -> tuple[JsonDict, list[str]]: """Get account statuses for a list of user IDs. If one or more account(s) belong to remote homeservers, retrieve their status(es) @@ -63,7 +63,7 @@ async def get_account_statuses( """ statuses = {} failures = [] - remote_users: List[UserID] = [] + remote_users: list[UserID] = [] for raw_user_id in user_ids: try: @@ -127,8 +127,8 @@ async def _get_local_account_status(self, user_id: UserID) -> JsonDict: return status async def _get_remote_account_statuses( - self, remote_users: List[UserID] - ) -> Tuple[JsonDict, List[str]]: + self, remote_users: list[UserID] + ) -> tuple[JsonDict, list[str]]: """Send out federation requests to retrieve the statuses of remote accounts. Args: @@ -140,7 +140,7 @@ async def _get_remote_account_statuses( """ # Group remote users by destination, so we only send one request per remote # homeserver. - by_destination: Dict[str, List[str]] = {} + by_destination: dict[str, list[str]] = {} for user in remote_users: if user.domain not in by_destination: by_destination[user.domain] = [] @@ -149,7 +149,7 @@ async def _get_remote_account_statuses( # Retrieve the statuses and failures for remote accounts. final_statuses: JsonDict = {} - final_failures: List[str] = [] + final_failures: list[str] = [] for destination, users in by_destination.items(): statuses, failures = await self._federation_client.get_account_status( destination, diff --git a/synapse/handlers/account_data.py b/synapse/handlers/account_data.py index 228132db485..44926128597 100644 --- a/synapse/handlers/account_data.py +++ b/synapse/handlers/account_data.py @@ -21,7 +21,7 @@ # import logging import random -from typing import TYPE_CHECKING, Awaitable, Callable, List, Optional, Tuple +from typing import TYPE_CHECKING, Awaitable, Callable, Optional from synapse.api.constants import AccountDataTypes from synapse.replication.http.account_data import ( @@ -67,7 +67,7 @@ def __init__(self, hs: "HomeServer"): self._remove_tag_client = ReplicationRemoveTagRestServlet.make_client(hs) self._account_data_writers = hs.config.worker.writers.account_data - self._on_account_data_updated_callbacks: List[ + self._on_account_data_updated_callbacks: list[ ON_ACCOUNT_DATA_UPDATED_CALLBACK ] = [] @@ -325,7 +325,7 @@ async def get_new_events( room_ids: StrCollection, is_guest: bool, explicit_room_id: Optional[str] = None, - ) -> Tuple[List[JsonDict], int]: + ) -> tuple[list[JsonDict], int]: user_id = user.to_string() last_stream_id = from_key diff --git a/synapse/handlers/account_validity.py b/synapse/handlers/account_validity.py index eed50ef69a7..a805de1f355 100644 --- a/synapse/handlers/account_validity.py +++ b/synapse/handlers/account_validity.py @@ -21,7 +21,7 @@ import email.mime.multipart import email.utils import logging -from typing import TYPE_CHECKING, List, Optional, Tuple +from typing import TYPE_CHECKING, Optional from synapse.api.errors import AuthError, StoreError, SynapseError from synapse.metrics.background_process_metrics import wrap_as_background_process @@ -222,7 +222,7 @@ async def _send_renewal_email(self, user_id: str, expiration_ts: int) -> None: await self.store.set_renewal_mail_status(user_id=user_id, email_sent=True) - async def _get_email_addresses_for_user(self, user_id: str) -> List[str]: + async def _get_email_addresses_for_user(self, user_id: str) -> list[str]: """Retrieve the list of email addresses attached to a user's account. Args: @@ -263,7 +263,7 @@ async def _get_renewal_token(self, user_id: str) -> str: attempts += 1 raise StoreError(500, "Couldn't generate a unique string as refresh string.") - async def renew_account(self, renewal_token: str) -> Tuple[bool, bool, int]: + async def renew_account(self, renewal_token: str) -> tuple[bool, bool, int]: """Renews the account attached to a given renewal token by pushing back the expiration date by the current validity period in the server's configuration. diff --git a/synapse/handlers/admin.py b/synapse/handlers/admin.py index e90d675b59c..3faaa4d2b3d 100644 --- a/synapse/handlers/admin.py +++ b/synapse/handlers/admin.py @@ -24,13 +24,9 @@ from typing import ( TYPE_CHECKING, Any, - Dict, - List, Mapping, Optional, Sequence, - Set, - Tuple, ) import attr @@ -218,7 +214,7 @@ async def export_user_data(self, user_id: str, writer: "ExfiltrationWriter") -> to_key = RoomStreamToken(stream=stream_ordering) # Events that we've processed in this room - written_events: Set[str] = set() + written_events: set[str] = set() # We need to track gaps in the events stream so that we can then # write out the state at those events. We do this by keeping track @@ -231,7 +227,7 @@ async def export_user_data(self, user_id: str, writer: "ExfiltrationWriter") -> # The reverse mapping to above, i.e. map from unseen event to events # that have the unseen event in their prev_events, i.e. the unseen # events "children". - unseen_to_child_events: Dict[str, Set[str]] = {} + unseen_to_child_events: dict[str, set[str]] = {} # We fetch events in the room the user could see by fetching *all* # events that we have and then filtering, this isn't the most @@ -412,7 +408,7 @@ async def start_redact_events( async def _redact_all_events( self, task: ScheduledTask - ) -> Tuple[TaskStatus, Optional[Mapping[str, Any]], Optional[str]]: + ) -> tuple[TaskStatus, Optional[Mapping[str, Any]], Optional[str]]: """ Task to redact all of a users events in the given rooms, tracking which, if any, events whose redaction failed @@ -518,7 +514,7 @@ class ExfiltrationWriter(metaclass=abc.ABCMeta): """Interface used to specify how to write exported data.""" @abc.abstractmethod - def write_events(self, room_id: str, events: List[EventBase]) -> None: + def write_events(self, room_id: str, events: list[EventBase]) -> None: """Write a batch of events for a room.""" raise NotImplementedError() diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index 6536d9fe510..5240178d805 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -22,12 +22,9 @@ from typing import ( TYPE_CHECKING, Collection, - Dict, Iterable, - List, Mapping, Optional, - Tuple, Union, ) @@ -143,7 +140,7 @@ async def _notify_interested_services(self, max_token: RoomStreamToken) -> None: event_to_received_ts.keys(), get_prev_content=True ) - events_by_room: Dict[str, List[EventBase]] = {} + events_by_room: dict[str, list[EventBase]] = {} for event in events: events_by_room.setdefault(event.room_id, []).append(event) @@ -341,7 +338,7 @@ def notify_interested_services_ephemeral( @wrap_as_background_process("notify_interested_services_ephemeral") async def _notify_interested_services_ephemeral( self, - services: List[ApplicationService], + services: list[ApplicationService], stream_key: StreamKeyType, new_token: Union[int, MultiWriterStreamToken], users: Collection[Union[str, UserID]], @@ -429,7 +426,7 @@ async def _notify_interested_services_ephemeral( async def _handle_typing( self, service: ApplicationService, new_token: int - ) -> List[JsonMapping]: + ) -> list[JsonMapping]: """ Return the typing events since the given stream token that the given application service should receive. @@ -464,7 +461,7 @@ async def _handle_typing( async def _handle_receipts( self, service: ApplicationService, new_token: MultiWriterStreamToken - ) -> List[JsonMapping]: + ) -> list[JsonMapping]: """ Return the latest read receipts that the given application service should receive. @@ -503,7 +500,7 @@ async def _handle_presence( service: ApplicationService, users: Collection[Union[str, UserID]], new_token: Optional[int], - ) -> List[JsonMapping]: + ) -> list[JsonMapping]: """ Return the latest presence updates that the given application service should receive. @@ -523,7 +520,7 @@ async def _handle_presence( A list of json dictionaries containing data derived from the presence events that should be sent to the given application service. """ - events: List[JsonMapping] = [] + events: list[JsonMapping] = [] presence_source = self.event_sources.sources.presence from_key = await self.store.get_type_stream_id_for_appservice( service, "presence" @@ -563,7 +560,7 @@ async def _get_to_device_messages( service: ApplicationService, new_token: int, users: Collection[Union[str, UserID]], - ) -> List[JsonDict]: + ) -> list[JsonDict]: """ Given an application service, determine which events it should receive from those between the last-recorded to-device message stream token for this @@ -585,7 +582,7 @@ async def _get_to_device_messages( ) # Filter out users that this appservice is not interested in - users_appservice_is_interested_in: List[str] = [] + users_appservice_is_interested_in: list[str] = [] for user in users: # FIXME: We should do this farther up the call stack. We currently repeat # this operation in _handle_presence. @@ -612,7 +609,7 @@ async def _get_to_device_messages( # # So we mangle this dict into a flat list of to-device messages with the relevant # user ID and device ID embedded inside each message dict. - message_payload: List[JsonDict] = [] + message_payload: list[JsonDict] = [] for ( user_id, device_id, @@ -761,8 +758,8 @@ async def query_room_alias_exists( return None async def query_3pe( - self, kind: str, protocol: str, fields: Dict[bytes, List[bytes]] - ) -> List[JsonDict]: + self, kind: str, protocol: str, fields: dict[bytes, list[bytes]] + ) -> list[JsonDict]: services = self._get_services_for_3pn(protocol) results = await make_deferred_yieldable( @@ -786,9 +783,9 @@ async def query_3pe( async def get_3pe_protocols( self, only_protocol: Optional[str] = None - ) -> Dict[str, JsonDict]: + ) -> dict[str, JsonDict]: services = self.store.get_app_services() - protocols: Dict[str, List[JsonDict]] = {} + protocols: dict[str, list[JsonDict]] = {} # Collect up all the individual protocol responses out of the ASes for s in services: @@ -804,7 +801,7 @@ async def get_3pe_protocols( if info is not None: protocols[p].append(info) - def _merge_instances(infos: List[JsonDict]) -> JsonDict: + def _merge_instances(infos: list[JsonDict]) -> JsonDict: # Merge the 'instances' lists of multiple results, but just take # the other fields from the first as they ought to be identical # copy the result so as not to corrupt the cached one @@ -822,7 +819,7 @@ def _merge_instances(infos: List[JsonDict]) -> JsonDict: async def _get_services_for_event( self, event: EventBase - ) -> List[ApplicationService]: + ) -> list[ApplicationService]: """Retrieve a list of application services interested in this event. Args: @@ -842,11 +839,11 @@ async def _get_services_for_event( return interested_list - def _get_services_for_user(self, user_id: str) -> List[ApplicationService]: + def _get_services_for_user(self, user_id: str) -> list[ApplicationService]: services = self.store.get_app_services() return [s for s in services if (s.is_interested_in_user(user_id))] - def _get_services_for_3pn(self, protocol: str) -> List[ApplicationService]: + def _get_services_for_3pn(self, protocol: str) -> list[ApplicationService]: services = self.store.get_app_services() return [s for s in services if s.is_interested_in_protocol(protocol)] @@ -872,9 +869,9 @@ async def _check_user_exists(self, user_id: str) -> bool: return True async def claim_e2e_one_time_keys( - self, query: Iterable[Tuple[str, str, str, int]] - ) -> Tuple[ - Dict[str, Dict[str, Dict[str, JsonDict]]], List[Tuple[str, str, str, int]] + self, query: Iterable[tuple[str, str, str, int]] + ) -> tuple[ + dict[str, dict[str, dict[str, JsonDict]]], list[tuple[str, str, str, int]] ]: """Claim one time keys from application services. @@ -896,7 +893,7 @@ async def claim_e2e_one_time_keys( services = self.store.get_app_services() # Partition the users by appservice. - query_by_appservice: Dict[str, List[Tuple[str, str, str, int]]] = {} + query_by_appservice: dict[str, list[tuple[str, str, str, int]]] = {} missing = [] for user_id, device, algorithm, count in query: if not self.store.get_if_app_services_interested_in_user(user_id): @@ -929,7 +926,7 @@ async def claim_e2e_one_time_keys( # Patch together the results -- they are all independent (since they # require exclusive control over the users, which is the outermost key). - claimed_keys: Dict[str, Dict[str, Dict[str, JsonDict]]] = {} + claimed_keys: dict[str, dict[str, dict[str, JsonDict]]] = {} for success, result in results: if success: claimed_keys.update(result[0]) @@ -938,8 +935,8 @@ async def claim_e2e_one_time_keys( return claimed_keys, missing async def query_keys( - self, query: Mapping[str, Optional[List[str]]] - ) -> Dict[str, Dict[str, Dict[str, JsonDict]]]: + self, query: Mapping[str, Optional[list[str]]] + ) -> dict[str, dict[str, dict[str, JsonDict]]]: """Query application services for device keys. Users which are exclusively owned by an application service are queried @@ -954,7 +951,7 @@ async def query_keys( services = self.store.get_app_services() # Partition the users by appservice. - query_by_appservice: Dict[str, Dict[str, List[str]]] = {} + query_by_appservice: dict[str, dict[str, list[str]]] = {} for user_id, device_ids in query.items(): if not self.store.get_if_app_services_interested_in_user(user_id): continue @@ -986,7 +983,7 @@ async def query_keys( # Patch together the results -- they are all independent (since they # require exclusive control over the users). They get returned as a single # dictionary. - key_queries: Dict[str, Dict[str, Dict[str, JsonDict]]] = {} + key_queries: dict[str, dict[str, dict[str, JsonDict]]] = {} for success, result in results: if success: key_queries.update(result) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 2d1990cce5b..622783d78ea 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -33,7 +33,6 @@ Callable, Dict, Iterable, - List, Mapping, Optional, Tuple, @@ -102,7 +101,7 @@ def convert_client_dict_legacy_fields_to_identifier( submission: JsonDict, -) -> Dict[str, str]: +) -> dict[str, str]: """ Convert a legacy-formatted login submission to an identifier dict. @@ -154,7 +153,7 @@ def convert_client_dict_legacy_fields_to_identifier( return identifier -def login_id_phone_to_thirdparty(identifier: JsonDict) -> Dict[str, str]: +def login_id_phone_to_thirdparty(identifier: JsonDict) -> dict[str, str]: """ Convert a phone login identifier type to a generic threepid identifier. @@ -205,7 +204,7 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.auth_blocking = hs.get_auth_blocking() self.clock = hs.get_clock() - self.checkers: Dict[str, UserInteractiveAuthChecker] = {} + self.checkers: dict[str, UserInteractiveAuthChecker] = {} for auth_checker_class in INTERACTIVE_AUTH_CHECKERS: inst = auth_checker_class(hs) if inst.is_enabled(): @@ -280,7 +279,7 @@ def __init__(self, hs: "HomeServer"): # A mapping of user ID to extra attributes to include in the login # response. - self._extra_attributes: Dict[str, SsoLoginExtraAttributes] = {} + self._extra_attributes: dict[str, SsoLoginExtraAttributes] = {} self._auth_delegation_enabled = ( hs.config.mas.enabled or hs.config.experimental.msc3861.enabled @@ -290,10 +289,10 @@ async def validate_user_via_ui_auth( self, requester: Requester, request: SynapseRequest, - request_body: Dict[str, Any], + request_body: dict[str, Any], description: str, can_skip_ui_auth: bool = False, - ) -> Tuple[dict, Optional[str]]: + ) -> tuple[dict, Optional[str]]: """ Checks that the user is who they claim to be, via a UI auth. @@ -440,12 +439,12 @@ def get_enabled_auth_types(self) -> Iterable[str]: async def check_ui_auth( self, - flows: List[List[str]], + flows: list[list[str]], request: SynapseRequest, - clientdict: Dict[str, Any], + clientdict: dict[str, Any], description: str, get_new_session_data: Optional[Callable[[], JsonDict]] = None, - ) -> Tuple[dict, dict, str]: + ) -> tuple[dict, dict, str]: """ Takes a dictionary sent by the client in the login / registration protocol and handles the User-Interactive Auth flow. @@ -579,7 +578,7 @@ async def check_ui_auth( ) # check auth type currently being presented - errordict: Dict[str, Any] = {} + errordict: dict[str, Any] = {} if "type" in authdict: login_type: str = authdict["type"] try: @@ -617,7 +616,7 @@ async def check_ui_auth( raise InteractiveAuthIncompleteError(session.session_id, ret) async def add_oob_auth( - self, stagetype: str, authdict: Dict[str, Any], clientip: str + self, stagetype: str, authdict: dict[str, Any], clientip: str ) -> None: """ Adds the result of out-of-band authentication into an existing auth @@ -641,7 +640,7 @@ async def add_oob_auth( authdict["session"], stagetype, result ) - def get_session_id(self, clientdict: Dict[str, Any]) -> Optional[str]: + def get_session_id(self, clientdict: dict[str, Any]) -> Optional[str]: """ Gets the session ID for a client given the client dictionary @@ -702,8 +701,8 @@ async def _expire_old_sessions(self) -> None: await self.store.delete_old_ui_auth_sessions(expiration_time) async def _check_auth_dict( - self, authdict: Dict[str, Any], clientip: str - ) -> Union[Dict[str, Any], str]: + self, authdict: dict[str, Any], clientip: str + ) -> Union[dict[str, Any], str]: """Attempt to validate the auth dict provided by a client Args: @@ -750,9 +749,9 @@ def _get_params_terms(self) -> dict: def _auth_dict_for_flows( self, - flows: List[List[str]], + flows: list[list[str]], session_id: str, - ) -> Dict[str, Any]: + ) -> dict[str, Any]: public_flows = [] for f in flows: public_flows.append(f) @@ -762,7 +761,7 @@ def _auth_dict_for_flows( LoginType.TERMS: self._get_params_terms, } - params: Dict[str, Any] = {} + params: dict[str, Any] = {} for f in public_flows: for stage in f: @@ -780,7 +779,7 @@ async def refresh_token( refresh_token: str, access_token_valid_until_ms: Optional[int], refresh_token_valid_until_ms: Optional[int], - ) -> Tuple[str, str, Optional[int]]: + ) -> tuple[str, str, Optional[int]]: """ Consumes a refresh token and generate both a new access token and a new refresh token from it. @@ -934,7 +933,7 @@ async def create_refresh_token_for_user_id( device_id: str, expiry_ts: Optional[int], ultimate_session_expiry_ts: Optional[int], - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """ Creates a new refresh token for the user with the given user ID. @@ -1067,7 +1066,7 @@ async def is_user_approved(self, user_id: str) -> bool: async def _find_user_id_and_pwd_hash( self, user_id: str - ) -> Optional[Tuple[str, str]]: + ) -> Optional[tuple[str, str]]: """Checks to see if a user with the given id exists. Will check case insensitively, but will return None if there are multiple inexact matches. @@ -1142,10 +1141,10 @@ def get_supported_login_types(self) -> Iterable[str]: async def validate_login( self, - login_submission: Dict[str, Any], + login_submission: dict[str, Any], ratelimit: bool = False, is_reauth: bool = False, - ) -> Tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]: + ) -> tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]: """Authenticates the user for the /login API Also used by the user-interactive auth flow to validate auth types which don't @@ -1300,8 +1299,8 @@ async def validate_login( async def _validate_userid_login( self, username: str, - login_submission: Dict[str, Any], - ) -> Tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]: + login_submission: dict[str, Any], + ) -> tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]: """Helper for validate_login Handles login, once we've mapped 3pids onto userids @@ -1390,7 +1389,7 @@ async def _validate_userid_login( async def check_password_provider_3pid( self, medium: str, address: str, password: str - ) -> Tuple[Optional[str], Optional[Callable[["LoginResponse"], Awaitable[None]]]]: + ) -> tuple[Optional[str], Optional[Callable[["LoginResponse"], Awaitable[None]]]]: """Check if a password provider is able to validate a thirdparty login Args: @@ -1891,7 +1890,7 @@ async def _sso_login_callback(self, login_result: "LoginResponse") -> None: extra_attributes = self._extra_attributes.get(login_result["user_id"]) if extra_attributes: - login_result_dict = cast(Dict[str, Any], login_result) + login_result_dict = cast(dict[str, Any], login_result) login_result_dict.update(extra_attributes.extra_attributes) def _expire_sso_extra_attributes(self) -> None: @@ -1952,7 +1951,7 @@ def async_wrapper(f: Optional[Callable]) -> Optional[Callable[..., Awaitable]]: async def wrapped_check_password( username: str, login_type: str, login_dict: JsonDict - ) -> Optional[Tuple[str, Optional[Callable]]]: + ) -> Optional[tuple[str, Optional[Callable]]]: # We've already made sure f is not None above, but mypy doesn't do well # across function boundaries so we need to tell it f is definitely not # None. @@ -1971,12 +1970,12 @@ async def wrapped_check_password( return wrapped_check_password # We need to wrap check_auth as in the old form it could return - # just a str, but now it must return Optional[Tuple[str, Optional[Callable]] + # just a str, but now it must return Optional[tuple[str, Optional[Callable]] if f.__name__ == "check_auth": async def wrapped_check_auth( username: str, login_type: str, login_dict: JsonDict - ) -> Optional[Tuple[str, Optional[Callable]]]: + ) -> Optional[tuple[str, Optional[Callable]]]: # We've already made sure f is not None above, but mypy doesn't do well # across function boundaries so we need to tell it f is definitely not # None. @@ -1992,12 +1991,12 @@ async def wrapped_check_auth( return wrapped_check_auth # We need to wrap check_3pid_auth as in the old form it could return - # just a str, but now it must return Optional[Tuple[str, Optional[Callable]] + # just a str, but now it must return Optional[tuple[str, Optional[Callable]] if f.__name__ == "check_3pid_auth": async def wrapped_check_3pid_auth( medium: str, address: str, password: str - ) -> Optional[Tuple[str, Optional[Callable]]]: + ) -> Optional[tuple[str, Optional[Callable]]]: # We've already made sure f is not None above, but mypy doesn't do well # across function boundaries so we need to tell it f is definitely not # None. @@ -2065,14 +2064,14 @@ def run(*args: Tuple, **kwargs: Dict) -> Awaitable: CHECK_3PID_AUTH_CALLBACK = Callable[ [str, str, str], Awaitable[ - Optional[Tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]] + Optional[tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]] ], ] ON_LOGGED_OUT_CALLBACK = Callable[[str, Optional[str], str], Awaitable] CHECK_AUTH_CALLBACK = Callable[ [str, str, JsonDict], Awaitable[ - Optional[Tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]] + Optional[tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]] ], ] GET_USERNAME_FOR_REGISTRATION_CALLBACK = Callable[ @@ -2094,21 +2093,21 @@ class PasswordAuthProvider: def __init__(self) -> None: # lists of callbacks - self.check_3pid_auth_callbacks: List[CHECK_3PID_AUTH_CALLBACK] = [] - self.on_logged_out_callbacks: List[ON_LOGGED_OUT_CALLBACK] = [] - self.get_username_for_registration_callbacks: List[ + self.check_3pid_auth_callbacks: list[CHECK_3PID_AUTH_CALLBACK] = [] + self.on_logged_out_callbacks: list[ON_LOGGED_OUT_CALLBACK] = [] + self.get_username_for_registration_callbacks: list[ GET_USERNAME_FOR_REGISTRATION_CALLBACK ] = [] - self.get_displayname_for_registration_callbacks: List[ + self.get_displayname_for_registration_callbacks: list[ GET_DISPLAYNAME_FOR_REGISTRATION_CALLBACK ] = [] - self.is_3pid_allowed_callbacks: List[IS_3PID_ALLOWED_CALLBACK] = [] + self.is_3pid_allowed_callbacks: list[IS_3PID_ALLOWED_CALLBACK] = [] # Mapping from login type to login parameters - self._supported_login_types: Dict[str, Tuple[str, ...]] = {} + self._supported_login_types: dict[str, tuple[str, ...]] = {} # Mapping from login type to auth checker callbacks - self.auth_checker_callbacks: Dict[str, List[CHECK_AUTH_CALLBACK]] = {} + self.auth_checker_callbacks: dict[str, list[CHECK_AUTH_CALLBACK]] = {} def register_password_auth_provider_callbacks( self, @@ -2116,7 +2115,7 @@ def register_password_auth_provider_callbacks( on_logged_out: Optional[ON_LOGGED_OUT_CALLBACK] = None, is_3pid_allowed: Optional[IS_3PID_ALLOWED_CALLBACK] = None, auth_checkers: Optional[ - Dict[Tuple[str, Tuple[str, ...]], CHECK_AUTH_CALLBACK] + dict[tuple[str, tuple[str, ...]], CHECK_AUTH_CALLBACK] ] = None, get_username_for_registration: Optional[ GET_USERNAME_FOR_REGISTRATION_CALLBACK @@ -2193,7 +2192,7 @@ def get_supported_login_types(self) -> Mapping[str, Iterable[str]]: async def check_auth( self, username: str, login_type: str, login_dict: JsonDict - ) -> Optional[Tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]]: + ) -> Optional[tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]]: """Check if the user has presented valid login credentials Args: @@ -2231,7 +2230,7 @@ async def check_auth( if not isinstance(result, tuple) or len(result) != 2: logger.warning( # type: ignore[unreachable] "Wrong type returned by module API callback %s: %s, expected" - " Optional[Tuple[str, Optional[Callable]]]", + " Optional[tuple[str, Optional[Callable]]]", callback, result, ) @@ -2244,7 +2243,7 @@ async def check_auth( if not isinstance(str_result, str): logger.warning( # type: ignore[unreachable] "Wrong type returned by module API callback %s: %s, expected" - " Optional[Tuple[str, Optional[Callable]]]", + " Optional[tuple[str, Optional[Callable]]]", callback, result, ) @@ -2255,7 +2254,7 @@ async def check_auth( if not callable(callback_result): logger.warning( # type: ignore[unreachable] "Wrong type returned by module API callback %s: %s, expected" - " Optional[Tuple[str, Optional[Callable]]]", + " Optional[tuple[str, Optional[Callable]]]", callback, result, ) @@ -2270,7 +2269,7 @@ async def check_auth( async def check_3pid_auth( self, medium: str, address: str, password: str - ) -> Optional[Tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]]: + ) -> Optional[tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]]: # This function is able to return a deferred that either # resolves None, meaning authentication failure, or upon # success, to a str (which is the user_id) or a tuple of @@ -2294,7 +2293,7 @@ async def check_3pid_auth( if not isinstance(result, tuple) or len(result) != 2: logger.warning( # type: ignore[unreachable] "Wrong type returned by module API callback %s: %s, expected" - " Optional[Tuple[str, Optional[Callable]]]", + " Optional[tuple[str, Optional[Callable]]]", callback, result, ) @@ -2307,7 +2306,7 @@ async def check_3pid_auth( if not isinstance(str_result, str): logger.warning( # type: ignore[unreachable] "Wrong type returned by module API callback %s: %s, expected" - " Optional[Tuple[str, Optional[Callable]]]", + " Optional[tuple[str, Optional[Callable]]]", callback, result, ) @@ -2318,7 +2317,7 @@ async def check_3pid_auth( if not callable(callback_result): logger.warning( # type: ignore[unreachable] "Wrong type returned by module API callback %s: %s, expected" - " Optional[Tuple[str, Optional[Callable]]]", + " Optional[tuple[str, Optional[Callable]]]", callback, result, ) diff --git a/synapse/handlers/cas.py b/synapse/handlers/cas.py index fbe79c2e4c3..438dcf9f2c7 100644 --- a/synapse/handlers/cas.py +++ b/synapse/handlers/cas.py @@ -20,7 +20,7 @@ # import logging import urllib.parse -from typing import TYPE_CHECKING, Dict, List, Optional +from typing import TYPE_CHECKING, Optional from xml.etree import ElementTree as ET import attr @@ -54,7 +54,7 @@ def __str__(self) -> str: @attr.s(slots=True, frozen=True, auto_attribs=True) class CasResponse: username: str - attributes: Dict[str, List[Optional[str]]] + attributes: dict[str, list[Optional[str]]] class CasHandler: @@ -99,7 +99,7 @@ def __init__(self, hs: "HomeServer"): self._sso_handler.register_identity_provider(self) - def _build_service_param(self, args: Dict[str, str]) -> str: + def _build_service_param(self, args: dict[str, str]) -> str: """ Generates a value to use as the "service" parameter when redirecting or querying the CAS service. @@ -116,7 +116,7 @@ def _build_service_param(self, args: Dict[str, str]) -> str: ) async def _validate_ticket( - self, ticket: str, service_args: Dict[str, str] + self, ticket: str, service_args: dict[str, str] ) -> CasResponse: """ Validate a CAS ticket with the server, and return the parsed the response. @@ -186,7 +186,7 @@ def _parse_cas_response(self, cas_response_body: bytes) -> CasResponse: # Iterate through the nodes and pull out the user and any extra attributes. user = None - attributes: Dict[str, List[Optional[str]]] = {} + attributes: dict[str, list[Optional[str]]] = {} for child in root[0]: if child.tag.endswith("user"): user = child.text diff --git a/synapse/handlers/delayed_events.py b/synapse/handlers/delayed_events.py index 79dd3e84165..b89b7416e63 100644 --- a/synapse/handlers/delayed_events.py +++ b/synapse/handlers/delayed_events.py @@ -13,7 +13,7 @@ # import logging -from typing import TYPE_CHECKING, List, Optional, Set, Tuple +from typing import TYPE_CHECKING, Optional from twisted.internet.interfaces import IDelayedCall @@ -226,7 +226,7 @@ async def _unsafe_process_new_event(self) -> None: await self._store.update_delayed_events_stream_pos(max_pos) - async def _handle_state_deltas(self, deltas: List[StateDelta]) -> None: + async def _handle_state_deltas(self, deltas: list[StateDelta]) -> None: """ Process current state deltas to cancel other users' pending delayed events that target the same state. @@ -502,8 +502,8 @@ async def _send_on_timeout(self) -> None: await self._send_events(events) - async def _send_events(self, events: List[DelayedEventDetails]) -> None: - sent_state: Set[Tuple[RoomID, EventType, StateKey]] = set() + async def _send_events(self, events: list[DelayedEventDetails]) -> None: + sent_state: set[tuple[RoomID, EventType, StateKey]] = set() for event in events: if event.state_key is not None: state_info = (event.room_id, event.type, event.state_key) @@ -547,7 +547,7 @@ def _schedule_next_at(self, next_send_ts: Timestamp) -> None: else: self._next_delayed_event_call.reset(delay_sec) - async def get_all_for_user(self, requester: Requester) -> List[JsonDict]: + async def get_all_for_user(self, requester: Requester) -> list[JsonDict]: """Return all pending delayed events requested by the given user.""" await self._delayed_event_mgmt_ratelimiter.ratelimit( requester, diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index c6024597b74..f0558fc7375 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -25,13 +25,9 @@ from typing import ( TYPE_CHECKING, AbstractSet, - Dict, Iterable, - List, Mapping, Optional, - Set, - Tuple, cast, ) @@ -407,7 +403,7 @@ async def update_device(self, user_id: str, device_id: str, content: dict) -> No raise @trace - async def get_devices_by_user(self, user_id: str) -> List[JsonDict]: + async def get_devices_by_user(self, user_id: str) -> list[JsonDict]: """ Retrieve the given user's devices @@ -431,7 +427,7 @@ async def get_devices_by_user(self, user_id: str) -> List[JsonDict]: async def get_dehydrated_device( self, user_id: str - ) -> Optional[Tuple[str, JsonDict]]: + ) -> Optional[tuple[str, JsonDict]]: """Retrieve the information for a dehydrated device. Args: @@ -568,7 +564,7 @@ async def get_device_changes_in_shared_rooms( room_ids: StrCollection, from_token: StreamToken, now_token: Optional[StreamToken] = None, - ) -> Set[str]: + ) -> set[str]: """Get the set of users whose devices have changed who share a room with the given user. """ @@ -644,8 +640,8 @@ async def get_user_ids_changed( # Check for newly joined or left rooms. We need to make sure that we add # to newly joined in the case membership goes from join -> leave -> join # again. - newly_joined_rooms: Set[str] = set() - newly_left_rooms: Set[str] = set() + newly_joined_rooms: set[str] = set() + newly_left_rooms: set[str] = set() for change in membership_changes: # We check for changes in "joinedness", i.e. if the membership has # changed to or from JOIN. @@ -661,10 +657,10 @@ async def get_user_ids_changed( # the user is currently in. # List of membership changes per room - room_to_deltas: Dict[str, List[StateDelta]] = {} + room_to_deltas: dict[str, list[StateDelta]] = {} # The set of event IDs of membership events (so we can fetch their # associated membership). - memberships_to_fetch: Set[str] = set() + memberships_to_fetch: set[str] = set() # TODO: Only pull out membership events? state_changes = await self.store.get_current_state_deltas_for_rooms( @@ -695,8 +691,8 @@ async def get_user_ids_changed( # We now want to find any user that have newly joined/invited/knocked, # or newly left, similarly to above. - newly_joined_or_invited_or_knocked_users: Set[str] = set() - newly_left_users: Set[str] = set() + newly_joined_or_invited_or_knocked_users: set[str] = set() + newly_left_users: set[str] = set() for _, deltas in room_to_deltas.items(): for delta in deltas: # Get the prev/new memberships for the delta @@ -838,7 +834,7 @@ async def on_federation_query_user_devices(self, user_id: str) -> JsonDict: # Check if the application services have any results. if self._query_appservices_for_keys: # Query the appservice for all devices for this user. - query: Dict[str, Optional[List[str]]] = {user_id: None} + query: dict[str, Optional[list[str]]] = {user_id: None} # Query the appservices for any keys. appservice_results = await self._appservice_handler.query_keys(query) @@ -898,7 +894,7 @@ async def notify_device_update( async def notify_user_signature_update( self, from_user_id: str, - user_ids: List[str], + user_ids: list[str], ) -> None: """Notify a device writer that a user have made new signatures of other users. @@ -927,7 +923,7 @@ async def handle_new_device_update(self) -> None: async def _delete_device_messages( self, task: ScheduledTask, - ) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: + ) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: """Scheduler task to delete device messages in batch of `DEVICE_MSGS_DELETE_BATCH_LIMIT`.""" assert task.params is not None user_id = task.params["user_id"] @@ -1051,7 +1047,7 @@ async def notify_device_update( await self.handle_new_device_update() async def notify_user_signature_update( - self, from_user_id: str, user_ids: List[str] + self, from_user_id: str, user_ids: list[str] ) -> None: """Notify a user that they have made new signatures of other users. @@ -1112,7 +1108,7 @@ async def _handle_new_device_update_async(self) -> None: # hosts we've already poked about for this update. This is so that we # don't poke the same remote server about the same update repeatedly. current_stream_id = None - hosts_already_sent_to: Set[str] = set() + hosts_already_sent_to: set[str] = set() try: stream_id, room_id = await self.store.get_device_change_last_converted_pos() @@ -1311,7 +1307,7 @@ async def handle_room_un_partial_stated(self, room_id: str) -> None: def _update_device_from_client_ips( - device: JsonDict, client_ips: Mapping[Tuple[str, str], DeviceLastConnectionInfo] + device: JsonDict, client_ips: Mapping[tuple[str, str], DeviceLastConnectionInfo] ) -> None: ip = client_ips.get((device["user_id"], device["device_id"])) device.update( @@ -1338,8 +1334,8 @@ def __init__(self, hs: "HomeServer"): async def multi_user_device_resync( self, - user_ids: List[str], - ) -> Dict[str, Optional[JsonMapping]]: + user_ids: list[str], + ) -> dict[str, Optional[JsonMapping]]: """ Like `user_device_resync` but operates on multiple users **from the same origin** at once. @@ -1365,7 +1361,7 @@ async def process_cross_signing_key_update( user_id: str, master_key: Optional[JsonDict], self_signing_key: Optional[JsonDict], - ) -> List[str]: + ) -> list[str]: """Process the given new master and self-signing key for the given remote user. Args: @@ -1455,14 +1451,14 @@ def __init__(self, hs: "HomeServer", device_handler: DeviceWriterHandler): ) # user_id -> list of updates waiting to be handled. - self._pending_updates: Dict[ - str, List[Tuple[str, str, Iterable[str], JsonDict]] + self._pending_updates: dict[ + str, list[tuple[str, str, Iterable[str], JsonDict]] ] = {} # Recently seen stream ids. We don't bother keeping these in the DB, # but they're useful to have them about to reduce the number of spurious # resyncs. - self._seen_updates: ExpiringCache[str, Set[str]] = ExpiringCache( + self._seen_updates: ExpiringCache[str, set[str]] = ExpiringCache( cache_name="device_update_edu", server_name=self.server_name, hs=self.hs, @@ -1619,12 +1615,12 @@ async def _handle_device_updates(self, user_id: str) -> None: ) async def _need_to_do_resync( - self, user_id: str, updates: Iterable[Tuple[str, str, Iterable[str], JsonDict]] + self, user_id: str, updates: Iterable[tuple[str, str, Iterable[str], JsonDict]] ) -> bool: """Given a list of updates for a user figure out if we need to do a full resync, or whether we have enough data that we can just apply the delta. """ - seen_updates: Set[str] = self._seen_updates.get(user_id, set()) + seen_updates: set[str] = self._seen_updates.get(user_id, set()) extremity = await self.store.get_device_list_last_stream_id_for_remote(user_id) @@ -1702,8 +1698,8 @@ async def _maybe_retry_device_resync(self) -> None: self._resync_retry_lock.release() async def multi_user_device_resync( - self, user_ids: List[str], mark_failed_as_stale: bool = True - ) -> Dict[str, Optional[JsonMapping]]: + self, user_ids: list[str], mark_failed_as_stale: bool = True + ) -> dict[str, Optional[JsonMapping]]: """ Like `user_device_resync` but operates on multiple users **from the same origin** at once. @@ -1739,7 +1735,7 @@ async def multi_user_device_resync( async def _user_device_resync_returning_failed( self, user_id: str - ) -> Tuple[Optional[JsonMapping], bool]: + ) -> tuple[Optional[JsonMapping], bool]: """Fetches all devices for a user and updates the device cache with them. Args: diff --git a/synapse/handlers/devicemessage.py b/synapse/handlers/devicemessage.py index 860e24d79d3..4dcdcc42fe6 100644 --- a/synapse/handlers/devicemessage.py +++ b/synapse/handlers/devicemessage.py @@ -21,7 +21,7 @@ import logging from http import HTTPStatus -from typing import TYPE_CHECKING, Any, Dict, Optional +from typing import TYPE_CHECKING, Any, Optional from synapse.api.constants import EduTypes, EventContentFields, ToDeviceEventTypes from synapse.api.errors import Codes, SynapseError @@ -158,7 +158,7 @@ async def _check_for_unknown_devices( self, message_type: str, sender_user_id: str, - by_device: Dict[str, Dict[str, Any]], + by_device: dict[str, dict[str, Any]], ) -> None: """Checks inbound device messages for unknown remote devices, and if found marks the remote cache for the user as stale. @@ -207,7 +207,7 @@ async def send_device_message( self, requester: Requester, message_type: str, - messages: Dict[str, Dict[str, JsonDict]], + messages: dict[str, dict[str, JsonDict]], ) -> None: """ Handle a request from a user to send to-device message(s). @@ -222,7 +222,7 @@ async def send_device_message( set_tag(SynapseTags.TO_DEVICE_TYPE, message_type) set_tag(SynapseTags.TO_DEVICE_SENDER, sender_user_id) local_messages = {} - remote_messages: Dict[str, Dict[str, Dict[str, JsonDict]]] = {} + remote_messages: dict[str, dict[str, dict[str, JsonDict]]] = {} for user_id, by_device in messages.items(): if not UserID.is_valid(user_id): logger.warning( diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index 11284ccd0bc..865c32d19eb 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -21,7 +21,7 @@ import logging import string -from typing import TYPE_CHECKING, Iterable, List, Literal, Optional, Sequence +from typing import TYPE_CHECKING, Iterable, Literal, Optional, Sequence from synapse.api.constants import MAX_ALIAS_LENGTH, EventTypes from synapse.api.errors import ( @@ -108,7 +108,7 @@ async def create_association( requester: Requester, room_alias: RoomAlias, room_id: str, - servers: Optional[List[str]] = None, + servers: Optional[list[str]] = None, check_membership: bool = True, ) -> None: """Attempt to create a new alias diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index b10472f1d24..85a150b71aa 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -20,7 +20,7 @@ # # import logging -from typing import TYPE_CHECKING, Dict, Iterable, List, Mapping, Optional, Tuple +from typing import TYPE_CHECKING, Iterable, Mapping, Optional import attr from canonicaljson import encode_canonical_json @@ -162,8 +162,8 @@ async def query_devices( async with self._query_devices_linearizer.queue((from_user_id, from_device_id)): async def filter_device_key_query( - query: Dict[str, List[str]], - ) -> Dict[str, List[str]]: + query: dict[str, list[str]], + ) -> dict[str, list[str]]: if not self.config.experimental.msc4263_limit_key_queries_to_users_who_share_rooms: # Only ignore invalid user IDs, which is the same behaviour as if # the user existed but had no keys. @@ -188,7 +188,7 @@ async def filter_device_key_query( if user_id in allowed_user_ids } - device_keys_query: Dict[str, List[str]] = await filter_device_key_query( + device_keys_query: dict[str, list[str]] = await filter_device_key_query( query_body.get("device_keys", {}) ) @@ -209,7 +209,7 @@ async def filter_device_key_query( # First get local devices. # A map of destination -> failure response. - failures: Dict[str, JsonDict] = {} + failures: dict[str, JsonDict] = {} results = {} if local_query: local_result = await self.query_local_devices(local_query) @@ -224,10 +224,10 @@ async def filter_device_key_query( # Now attempt to get any remote devices from our local cache. # A map of destination -> user ID -> device IDs. - remote_queries_not_in_cache: Dict[str, Dict[str, Iterable[str]]] = {} + remote_queries_not_in_cache: dict[str, dict[str, Iterable[str]]] = {} if remote_queries: user_ids = set() - user_and_device_ids: List[Tuple[str, str]] = [] + user_and_device_ids: list[tuple[str, str]] = [] for user_id, device_ids in remote_queries.items(): if device_ids: user_and_device_ids.extend( @@ -355,9 +355,9 @@ async def _query_devices_for_destination( self, results: JsonDict, cross_signing_keys: JsonDict, - failures: Dict[str, JsonDict], + failures: dict[str, JsonDict], destination: str, - destination_query: Dict[str, Iterable[str]], + destination_query: dict[str, Iterable[str]], timeout: int, ) -> None: """This is called when we are querying the device list of a user on @@ -480,7 +480,7 @@ async def _query_devices_for_destination( @cancellable async def get_cross_signing_keys_from_cache( self, query: Iterable[str], from_user_id: Optional[str] - ) -> Dict[str, Dict[str, JsonMapping]]: + ) -> dict[str, dict[str, JsonMapping]]: """Get cross-signing keys for users from the database Args: @@ -527,9 +527,9 @@ async def get_cross_signing_keys_from_cache( @cancellable async def query_local_devices( self, - query: Mapping[str, Optional[List[str]]], + query: Mapping[str, Optional[list[str]]], include_displaynames: bool = True, - ) -> Dict[str, Dict[str, dict]]: + ) -> dict[str, dict[str, dict]]: """Get E2E device keys for local users Args: @@ -542,9 +542,9 @@ async def query_local_devices( A map from user_id -> device_id -> device details """ set_tag("local_query", str(query)) - local_query: List[Tuple[str, Optional[str]]] = [] + local_query: list[tuple[str, Optional[str]]] = [] - result_dict: Dict[str, Dict[str, dict]] = {} + result_dict: dict[str, dict[str, dict]] = {} for user_id, device_ids in query.items(): # we use UserID.from_string to catch invalid user ids if not self.is_mine(UserID.from_string(user_id)): @@ -594,7 +594,7 @@ async def query_local_devices( return result_dict async def on_federation_query_client_keys( - self, query_body: Dict[str, Dict[str, Optional[List[str]]]] + self, query_body: dict[str, dict[str, Optional[list[str]]]] ) -> JsonDict: """Handle a device key query from a federated server: @@ -614,7 +614,7 @@ async def on_federation_query_client_keys( - self_signing_key: An optional dictionary of user ID -> self-signing key info. """ - device_keys_query: Dict[str, Optional[List[str]]] = query_body.get( + device_keys_query: dict[str, Optional[list[str]]] = query_body.get( "device_keys", {} ) if any( @@ -639,9 +639,9 @@ async def on_federation_query_client_keys( async def claim_local_one_time_keys( self, - local_query: List[Tuple[str, str, str, int]], + local_query: list[tuple[str, str, str, int]], always_include_fallback_keys: bool, - ) -> Iterable[Dict[str, Dict[str, Dict[str, JsonDict]]]]: + ) -> Iterable[dict[str, dict[str, dict[str, JsonDict]]]]: """Claim one time keys for local users. 1. Attempt to claim OTKs from the database. @@ -735,7 +735,7 @@ async def claim_local_one_time_keys( @trace async def claim_one_time_keys( self, - query: Dict[str, Dict[str, Dict[str, int]]], + query: dict[str, dict[str, dict[str, int]]], user: UserID, timeout: Optional[int], always_include_fallback_keys: bool, @@ -754,8 +754,8 @@ async def claim_one_time_keys( one_time_keys: chain of maps user ID -> device ID -> key ID -> key. failures: map from remote destination to a JsonDict describing the error. """ - local_query: List[Tuple[str, str, str, int]] = [] - remote_queries: Dict[str, Dict[str, Dict[str, Dict[str, int]]]] = {} + local_query: list[tuple[str, str, str, int]] = [] + remote_queries: dict[str, dict[str, dict[str, dict[str, int]]]] = {} for user_id, one_time_keys in query.items(): # we use UserID.from_string to catch invalid user ids @@ -775,7 +775,7 @@ async def claim_one_time_keys( ) # A map of user ID -> device ID -> key ID -> key. - json_result: Dict[str, Dict[str, Dict[str, JsonDict]]] = {} + json_result: dict[str, dict[str, dict[str, JsonDict]]] = {} for result in results: for user_id, device_keys in result.items(): for device_id, keys in device_keys.items(): @@ -785,7 +785,7 @@ async def claim_one_time_keys( ).update({key_id: key}) # Remote failures. - failures: Dict[str, JsonDict] = {} + failures: dict[str, JsonDict] = {} @trace async def claim_client_keys(destination: str) -> None: @@ -1131,7 +1131,7 @@ async def upload_signatures_for_device_keys( async def _process_self_signatures( self, user_id: str, signatures: JsonDict - ) -> Tuple[List["SignatureListItem"], Dict[str, Dict[str, dict]]]: + ) -> tuple[list["SignatureListItem"], dict[str, dict[str, dict]]]: """Process uploaded signatures of the user's own keys. Signatures of the user's own keys from this API come in two forms: @@ -1149,8 +1149,8 @@ async def _process_self_signatures( Raises: SynapseError: if the input is malformed """ - signature_list: List["SignatureListItem"] = [] - failures: Dict[str, Dict[str, JsonDict]] = {} + signature_list: list["SignatureListItem"] = [] + failures: dict[str, dict[str, JsonDict]] = {} if not signatures: return signature_list, failures @@ -1250,8 +1250,8 @@ def _check_master_key_signature( master_key_id: str, signed_master_key: JsonDict, stored_master_key: JsonMapping, - devices: Dict[str, Dict[str, JsonDict]], - ) -> List["SignatureListItem"]: + devices: dict[str, dict[str, JsonDict]], + ) -> list["SignatureListItem"]: """Check signatures of a user's master key made by their devices. Args: @@ -1294,8 +1294,8 @@ def _check_master_key_signature( return master_key_signature_list async def _process_other_signatures( - self, user_id: str, signatures: Dict[str, dict] - ) -> Tuple[List["SignatureListItem"], Dict[str, Dict[str, dict]]]: + self, user_id: str, signatures: dict[str, dict] + ) -> tuple[list["SignatureListItem"], dict[str, dict[str, dict]]]: """Process uploaded signatures of other users' keys. These will be the target user's master keys, signed by the uploading user's user-signing key. @@ -1311,8 +1311,8 @@ async def _process_other_signatures( Raises: SynapseError: if the input is malformed """ - signature_list: List["SignatureListItem"] = [] - failures: Dict[str, Dict[str, JsonDict]] = {} + signature_list: list["SignatureListItem"] = [] + failures: dict[str, dict[str, JsonDict]] = {} if not signatures: return signature_list, failures @@ -1396,7 +1396,7 @@ async def _process_other_signatures( async def _get_e2e_cross_signing_verify_key( self, user_id: str, key_type: str, from_user_id: Optional[str] = None - ) -> Tuple[JsonMapping, str, VerifyKey]: + ) -> tuple[JsonMapping, str, VerifyKey]: """Fetch locally or remotely query for a cross-signing public key. First, attempt to fetch the cross-signing public key from storage. @@ -1451,7 +1451,7 @@ async def _retrieve_cross_signing_keys_for_remote_user( self, user: UserID, desired_key_type: str, - ) -> Optional[Tuple[JsonMapping, str, VerifyKey]]: + ) -> Optional[tuple[JsonMapping, str, VerifyKey]]: """Queries cross-signing keys for a remote user and saves them to the database Only the key specified by `key_type` will be returned, while all retrieved keys @@ -1541,7 +1541,7 @@ async def _retrieve_cross_signing_keys_for_remote_user( return desired_key_data - async def check_cross_signing_setup(self, user_id: str) -> Tuple[bool, bool]: + async def check_cross_signing_setup(self, user_id: str) -> tuple[bool, bool]: """Checks if the user has cross-signing set up Args: @@ -1599,7 +1599,7 @@ async def has_different_keys(self, user_id: str, body: JsonDict) -> bool: async def _delete_old_one_time_keys_task( self, task: ScheduledTask - ) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: + ) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: """Scheduler task to delete old one time keys. Until Synapse 1.119, Synapse used to issue one-time-keys in a random order, leading to the possibility @@ -1773,7 +1773,7 @@ def __init__(self, hs: "HomeServer"): ) # user_id -> list of updates waiting to be handled. - self._pending_updates: Dict[str, List[Tuple[JsonDict, JsonDict]]] = {} + self._pending_updates: dict[str, list[tuple[JsonDict, JsonDict]]] = {} async def incoming_signing_key_update( self, origin: str, edu_content: JsonDict @@ -1819,7 +1819,7 @@ async def _handle_signing_key_updates(self, user_id: str) -> None: # This can happen since we batch updates return - device_ids: List[str] = [] + device_ids: list[str] = [] logger.info("pending updates: %r", pending_updates) diff --git a/synapse/handlers/e2e_room_keys.py b/synapse/handlers/e2e_room_keys.py index 623fd33f13c..094b4bc27c9 100644 --- a/synapse/handlers/e2e_room_keys.py +++ b/synapse/handlers/e2e_room_keys.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Dict, Literal, Optional, cast +from typing import TYPE_CHECKING, Literal, Optional, cast from synapse.api.errors import ( Codes, @@ -65,8 +65,8 @@ async def get_room_keys( version: str, room_id: Optional[str] = None, session_id: Optional[str] = None, - ) -> Dict[ - Literal["rooms"], Dict[str, Dict[Literal["sessions"], Dict[str, RoomKey]]] + ) -> dict[ + Literal["rooms"], dict[str, dict[Literal["sessions"], dict[str, RoomKey]]] ]: """Bulk get the E2E room keys for a given backup, optionally filtered to a given room, or a given session. diff --git a/synapse/handlers/event_auth.py b/synapse/handlers/event_auth.py index 1f1f67dc0df..b2caca8ce7e 100644 --- a/synapse/handlers/event_auth.py +++ b/synapse/handlers/event_auth.py @@ -19,7 +19,7 @@ # # import logging -from typing import TYPE_CHECKING, List, Mapping, Optional, Union +from typing import TYPE_CHECKING, Mapping, Optional, Union from synapse import event_auth from synapse.api.constants import ( @@ -92,7 +92,7 @@ def compute_auth_events( event: Union[EventBase, EventBuilder], current_state_ids: StateMap[str], for_verification: bool = False, - ) -> List[str]: + ) -> list[str]: """Given an event and current state return the list of event IDs used to auth an event. diff --git a/synapse/handlers/events.py b/synapse/handlers/events.py index 3f46032a43a..9522d5a6966 100644 --- a/synapse/handlers/events.py +++ b/synapse/handlers/events.py @@ -21,7 +21,7 @@ import logging import random -from typing import TYPE_CHECKING, Iterable, List, Optional +from typing import TYPE_CHECKING, Iterable, Optional from synapse.api.constants import EduTypes, EventTypes, Membership, PresenceState from synapse.api.errors import AuthError, SynapseError @@ -100,7 +100,7 @@ async def get_stream( # When the user joins a new room, or another user joins a currently # joined room, we need to send down presence for those users. - to_add: List[JsonDict] = [] + to_add: list[JsonDict] = [] for event in events: if not isinstance(event, EventBase): continue diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index adc20f4ad02..3eb1d166f88 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -30,12 +30,8 @@ from typing import ( TYPE_CHECKING, AbstractSet, - Dict, Iterable, - List, Optional, - Set, - Tuple, Union, ) @@ -168,12 +164,12 @@ def __init__(self, hs: "HomeServer"): # Tracks running partial state syncs by room ID. # Partial state syncs currently only run on the main process, so it's okay to # track them in-memory for now. - self._active_partial_state_syncs: Set[str] = set() + self._active_partial_state_syncs: set[str] = set() # Tracks partial state syncs we may want to restart. # A dictionary mapping room IDs to (initial destination, other destinations) # tuples. - self._partial_state_syncs_maybe_needing_restart: Dict[ - str, Tuple[Optional[str], AbstractSet[str]] + self._partial_state_syncs_maybe_needing_restart: dict[ + str, tuple[Optional[str], AbstractSet[str]] ] = {} # A lock guarding the partial state flag for rooms. # When the lock is held for a given room, no other concurrent code may @@ -272,7 +268,7 @@ async def _maybe_backfill_inner( # we now have a list of potential places to backpaginate from. We prefer to # start with the most recent (ie, max depth), so let's sort the list. - sorted_backfill_points: List[_BackfillPoint] = sorted( + sorted_backfill_points: list[_BackfillPoint] = sorted( backwards_extremities, key=lambda e: -int(e.depth), ) @@ -380,7 +376,7 @@ async def _maybe_backfill_inner( # there is it's often sufficiently long ago that clients would stop # attempting to paginate before backfill reached the visible history. - extremities_to_request: List[str] = [] + extremities_to_request: list[str] = [] for bp in sorted_backfill_points: if len(extremities_to_request) >= 5: break @@ -562,7 +558,7 @@ async def send_invite(self, target_host: str, event: EventBase) -> EventBase: return pdu - async def on_event_auth(self, event_id: str) -> List[EventBase]: + async def on_event_auth(self, event_id: str) -> list[EventBase]: event = await self.store.get_event(event_id) auth = await self.store.get_auth_chain( event.room_id, list(event.auth_event_ids()), include_given=True @@ -571,7 +567,7 @@ async def on_event_auth(self, event_id: str) -> List[EventBase]: async def do_invite_join( self, target_hosts: Iterable[str], room_id: str, joinee: str, content: JsonDict - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """Attempts to join the `joinee` to the room `room_id` via the servers contained in `target_hosts`. @@ -807,11 +803,11 @@ async def do_invite_join( async def do_knock( self, - target_hosts: List[str], + target_hosts: list[str], room_id: str, knockee: str, content: JsonDict, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """Sends the knock to the remote server. This first triggers a make_knock request that returns a partial @@ -840,7 +836,7 @@ async def do_knock( # Ask the remote server to create a valid knock event for us. Once received, # we sign the event - params: Dict[str, Iterable[str]] = {"ver": supported_room_versions} + params: dict[str, Iterable[str]] = {"ver": supported_room_versions} origin, event, event_format_version = await self._make_and_verify_event( target_hosts, room_id, knockee, Membership.KNOCK, content, params=params ) @@ -889,7 +885,7 @@ async def do_knock( return event.event_id, stream_id async def _handle_queued_pdus( - self, room_queue: List[Tuple[EventBase, str]] + self, room_queue: list[tuple[EventBase, str]] ) -> None: """Process PDUs which got queued up while we were busy send_joining. @@ -1144,7 +1140,7 @@ async def on_invite_request( async def do_remotely_reject_invite( self, target_hosts: Iterable[str], room_id: str, user_id: str, content: JsonDict - ) -> Tuple[EventBase, int]: + ) -> tuple[EventBase, int]: origin, event, room_version = await self._make_and_verify_event( target_hosts, room_id, user_id, "leave", content=content ) @@ -1178,8 +1174,8 @@ async def _make_and_verify_event( user_id: str, membership: str, content: JsonDict, - params: Optional[Dict[str, Union[str, Iterable[str]]]] = None, - ) -> Tuple[str, EventBase, RoomVersion]: + params: Optional[dict[str, Union[str, Iterable[str]]]] = None, + ) -> tuple[str, EventBase, RoomVersion]: ( origin, event, @@ -1306,7 +1302,7 @@ async def on_make_knock_request( @trace @tag_args - async def get_state_ids_for_pdu(self, room_id: str, event_id: str) -> List[str]: + async def get_state_ids_for_pdu(self, room_id: str, event_id: str) -> list[str]: """Returns the state at the event. i.e. not including said event.""" event = await self.store.get_event(event_id, check_room_id=room_id) if event.internal_metadata.outlier: @@ -1339,8 +1335,8 @@ async def get_state_ids_for_pdu(self, room_id: str, event_id: str) -> List[str]: return list(state_map.values()) async def on_backfill_request( - self, origin: str, room_id: str, pdu_list: List[str], limit: int - ) -> List[EventBase]: + self, origin: str, room_id: str, pdu_list: list[str], limit: int + ) -> list[EventBase]: # We allow partially joined rooms since in this case we are filtering out # non-local events in `filter_events_for_server`. await self._event_auth_handler.assert_host_in_room(room_id, origin, True) @@ -1416,10 +1412,10 @@ async def on_get_missing_events( self, origin: str, room_id: str, - earliest_events: List[str], - latest_events: List[str], + earliest_events: list[str], + latest_events: list[str], limit: int, - ) -> List[EventBase]: + ) -> list[EventBase]: # We allow partially joined rooms since in this case we are filtering out # non-local events in `filter_events_for_server`. await self._event_auth_handler.assert_host_in_room(room_id, origin, True) @@ -1602,7 +1598,7 @@ async def add_display_name_to_third_party_invite( event_dict: JsonDict, event: EventBase, context: UnpersistedEventContextBase, - ) -> Tuple[EventBase, UnpersistedEventContextBase]: + ) -> tuple[EventBase, UnpersistedEventContextBase]: key = ( EventTypes.ThirdPartyInvite, event.content["third_party_invite"]["signed"]["token"], @@ -1758,7 +1754,7 @@ async def _check_key_revocation(self, public_key: str, url: str) -> None: raise AuthError(403, "Third party certificate was invalid") async def get_room_complexity( - self, remote_room_hosts: List[str], room_id: str + self, remote_room_hosts: list[str], room_id: str ) -> Optional[dict]: """ Fetch the complexity of a remote room over federation. diff --git a/synapse/handlers/federation_event.py b/synapse/handlers/federation_event.py index d6390b79c7b..32b603e9470 100644 --- a/synapse/handlers/federation_event.py +++ b/synapse/handlers/federation_event.py @@ -27,13 +27,9 @@ TYPE_CHECKING, Collection, Container, - Dict, Iterable, - List, Optional, Sequence, - Set, - Tuple, ) from prometheus_client import Counter, Histogram @@ -190,7 +186,7 @@ def __init__(self, hs: "HomeServer"): # For each room, a list of (pdu, origin) tuples. # TODO: replace this with something more elegant, probably based around the # federation event staging area. - self.room_queues: Dict[str, List[Tuple[EventBase, str]]] = {} + self.room_queues: dict[str, list[tuple[EventBase, str]]] = {} self._room_pdu_linearizer = Linearizer(name="fed_room_pdu", clock=self._clock) @@ -511,8 +507,8 @@ async def process_remote_join( self, origin: str, room_id: str, - auth_events: List[EventBase], - state: List[EventBase], + auth_events: list[EventBase], + state: list[EventBase], event: EventBase, room_version: RoomVersion, partial_state: bool, @@ -595,7 +591,7 @@ async def process_remote_join( ) missing_event_ids = prev_event_ids - seen_event_ids - state_maps_to_resolve: List[StateMap[str]] = [] + state_maps_to_resolve: list[StateMap[str]] = [] # Fetch the state after the prev events that we know about. state_maps_to_resolve.extend( @@ -755,7 +751,7 @@ async def backfill( @trace async def _get_missing_events_for_pdu( - self, origin: str, pdu: EventBase, prevs: Set[str], min_depth: int + self, origin: str, pdu: EventBase, prevs: set[str], min_depth: int ) -> None: """ Args: @@ -902,7 +898,7 @@ async def _process_pulled_events( [event.event_id for event in events] ) - new_events: List[EventBase] = [] + new_events: list[EventBase] = [] for event in events: event_id = event.event_id @@ -1186,7 +1182,7 @@ async def _compute_event_context_with_maybe_missing_prevs( partial_state = any(partial_state_flags.values()) # state_maps is a list of mappings from (type, state_key) to event_id - state_maps: List[StateMap[str]] = [] + state_maps: list[StateMap[str]] = [] # Ask the remote server for the states we don't # know about @@ -1647,7 +1643,7 @@ async def _get_events_and_persist( room_version = await self._store.get_room_version(room_id) - events: List[EventBase] = [] + events: list[EventBase] = [] async def get_event(event_id: str) -> None: with nested_logging_context(event_id): @@ -1753,7 +1749,7 @@ async def _auth_and_persist_outliers( ) auth_map.update(persisted_events) - events_and_contexts_to_persist: List[EventPersistencePair] = [] + events_and_contexts_to_persist: list[EventPersistencePair] = [] async def prep(event: EventBase) -> None: with nested_logging_context(suffix=event.event_id): @@ -2050,7 +2046,7 @@ async def _check_for_soft_fail( state_sets_d = await self._state_storage_controller.get_state_groups_ids( event.room_id, extrem_ids ) - state_sets: List[StateMap[str]] = list(state_sets_d.values()) + state_sets: list[StateMap[str]] = list(state_sets_d.values()) state_ids = await context.get_prev_state_ids() state_sets.append(state_ids) current_state_ids = ( diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py index be757201fc6..0f507b3317a 100644 --- a/synapse/handlers/identity.py +++ b/synapse/handlers/identity.py @@ -24,7 +24,7 @@ import logging import urllib.parse -from typing import TYPE_CHECKING, Awaitable, Callable, Dict, List, Optional, Tuple +from typing import TYPE_CHECKING, Awaitable, Callable, Optional import attr @@ -105,7 +105,7 @@ async def ratelimit_request_token_requests( ) async def threepid_from_creds( - self, id_server: str, creds: Dict[str, str] + self, id_server: str, creds: dict[str, str] ) -> Optional[JsonDict]: """ Retrieve and validate a threepid identifier from a "credentials" dictionary against a @@ -693,7 +693,7 @@ async def ask_id_server_for_third_party_invite( inviter_display_name: str, inviter_avatar_url: str, id_access_token: str, - ) -> Tuple[str, List[Dict[str, str]], Dict[str, str], str]: + ) -> tuple[str, list[dict[str, str]], dict[str, str], str]: """ Asks an identity server for a third party invite. @@ -779,7 +779,7 @@ async def ask_id_server_for_third_party_invite( return token, public_keys, fallback_public_key, display_name -def create_id_access_token_header(id_access_token: str) -> List[str]: +def create_id_access_token_header(id_access_token: str) -> list[str]: """Create an Authorization header for passing to SimpleHttpClient as the header value of an HTTP request. diff --git a/synapse/handlers/initial_sync.py b/synapse/handlers/initial_sync.py index 75d64d2d50b..1c6f8bf53bf 100644 --- a/synapse/handlers/initial_sync.py +++ b/synapse/handlers/initial_sync.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, List, Optional, Tuple +from typing import TYPE_CHECKING, Optional from synapse.api.constants import ( AccountDataTypes, @@ -69,7 +69,7 @@ def __init__(self, hs: "HomeServer"): self.clock = hs.get_clock() self.validator = EventValidator() self.snapshot_cache: ResponseCache[ - Tuple[ + tuple[ str, Optional[StreamToken], Optional[StreamToken], @@ -451,7 +451,7 @@ async def _room_initial_sync_joined( presence_handler = self.hs.get_presence_handler() - async def get_presence() -> List[JsonDict]: + async def get_presence() -> list[JsonDict]: # If presence is disabled, return an empty list if not self.hs.config.server.presence_enabled: return [] @@ -468,7 +468,7 @@ async def get_presence() -> List[JsonDict]: for s in states ] - async def get_receipts() -> List[JsonMapping]: + async def get_receipts() -> list[JsonMapping]: receipts = await self.store.get_linearized_receipts_for_room( room_id, to_key=now_token.receipt_key ) diff --git a/synapse/handlers/jwt.py b/synapse/handlers/jwt.py index 400f3a59aa1..f1715f64959 100644 --- a/synapse/handlers/jwt.py +++ b/synapse/handlers/jwt.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Optional from authlib.jose import JsonWebToken, JWTClaims from authlib.jose.errors import BadSignatureError, InvalidClaimError, JoseError @@ -41,7 +41,7 @@ def __init__(self, hs: "HomeServer"): self.jwt_issuer = hs.config.jwt.jwt_issuer self.jwt_audiences = hs.config.jwt.jwt_audiences - def validate_login(self, login_submission: JsonDict) -> Tuple[str, Optional[str]]: + def validate_login(self, login_submission: JsonDict) -> tuple[str, Optional[str]]: """ Authenticates the user for the /login API diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index e874b600008..2ad1dbe73f3 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -22,7 +22,7 @@ import logging import random from http import HTTPStatus -from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional, Sequence, Tuple +from typing import TYPE_CHECKING, Any, Mapping, Optional, Sequence from canonicaljson import encode_canonical_json @@ -180,7 +180,7 @@ async def get_state_events( room_id: str, state_filter: Optional[StateFilter] = None, at_token: Optional[StreamToken] = None, - ) -> List[dict]: + ) -> list[dict]: """Retrieve all state events for a given room. If the user is joined to the room then return the current state. If the user has left the room return the state events from when they left. If an explicit @@ -538,7 +538,7 @@ def __init__(self, hs: "HomeServer"): # # map from room id to time-of-last-attempt. # - self._rooms_to_exclude_from_dummy_event_insertion: Dict[str, int] = {} + self._rooms_to_exclude_from_dummy_event_insertion: dict[str, int] = {} # The number of forward extremeities before a dummy event is sent. self._dummy_events_threshold = hs.config.server.dummy_events_threshold @@ -578,16 +578,16 @@ async def create_event( requester: Requester, event_dict: dict, txn_id: Optional[str] = None, - prev_event_ids: Optional[List[str]] = None, - auth_event_ids: Optional[List[str]] = None, - state_event_ids: Optional[List[str]] = None, + prev_event_ids: Optional[list[str]] = None, + auth_event_ids: Optional[list[str]] = None, + state_event_ids: Optional[list[str]] = None, require_consent: bool = True, outlier: bool = False, depth: Optional[int] = None, state_map: Optional[StateMap[str]] = None, for_batch: bool = False, current_state_group: Optional[int] = None, - ) -> Tuple[EventBase, UnpersistedEventContextBase]: + ) -> tuple[EventBase, UnpersistedEventContextBase]: """ Given a dict from a client, create a new event. If bool for_batch is true, will create an event using the prev_event_ids, and will create an event context for @@ -961,14 +961,14 @@ async def create_and_send_nonmember_event( self, requester: Requester, event_dict: dict, - prev_event_ids: Optional[List[str]] = None, - state_event_ids: Optional[List[str]] = None, + prev_event_ids: Optional[list[str]] = None, + state_event_ids: Optional[list[str]] = None, ratelimit: bool = True, txn_id: Optional[str] = None, ignore_shadow_ban: bool = False, outlier: bool = False, depth: Optional[int] = None, - ) -> Tuple[EventBase, int]: + ) -> tuple[EventBase, int]: """ Creates an event, then sends it. @@ -1098,14 +1098,14 @@ async def _create_and_send_nonmember_event_locked( self, requester: Requester, event_dict: dict, - prev_event_ids: Optional[List[str]] = None, - state_event_ids: Optional[List[str]] = None, + prev_event_ids: Optional[list[str]] = None, + state_event_ids: Optional[list[str]] = None, ratelimit: bool = True, txn_id: Optional[str] = None, ignore_shadow_ban: bool = False, outlier: bool = False, depth: Optional[int] = None, - ) -> Tuple[EventBase, int]: + ) -> tuple[EventBase, int]: room_id = event_dict["room_id"] # If we don't have any prev event IDs specified then we need to @@ -1220,14 +1220,14 @@ async def create_new_client_event( self, builder: EventBuilder, requester: Optional[Requester] = None, - prev_event_ids: Optional[List[str]] = None, - auth_event_ids: Optional[List[str]] = None, - state_event_ids: Optional[List[str]] = None, + prev_event_ids: Optional[list[str]] = None, + auth_event_ids: Optional[list[str]] = None, + state_event_ids: Optional[list[str]] = None, depth: Optional[int] = None, state_map: Optional[StateMap[str]] = None, for_batch: bool = False, current_state_group: Optional[int] = None, - ) -> Tuple[EventBase, UnpersistedEventContextBase]: + ) -> tuple[EventBase, UnpersistedEventContextBase]: """Create a new event for a local client. If bool for_batch is true, will create an event using the prev_event_ids, and will create an event context for the event using the parameters state_map and current_state_group, thus these parameters @@ -1471,9 +1471,9 @@ async def _validate_event_relation(self, event: EventBase) -> None: async def handle_new_client_event( self, requester: Requester, - events_and_context: List[EventPersistencePair], + events_and_context: list[EventPersistencePair], ratelimit: bool = True, - extra_users: Optional[List[UserID]] = None, + extra_users: Optional[list[UserID]] = None, ignore_shadow_ban: bool = False, ) -> EventBase: """Processes new events. Please note that if batch persisting events, an error in @@ -1683,9 +1683,9 @@ async def create_and_send_new_client_events( async def _persist_events( self, requester: Requester, - events_and_context: List[EventPersistencePair], + events_and_context: list[EventPersistencePair], ratelimit: bool = True, - extra_users: Optional[List[UserID]] = None, + extra_users: Optional[list[UserID]] = None, ) -> EventBase: """Actually persists new events. Should only be called by `handle_new_client_event`, and see its docstring for documentation of @@ -1769,7 +1769,7 @@ async def _persist_events( raise async def cache_joined_hosts_for_events( - self, events_and_context: List[EventPersistencePair] + self, events_and_context: list[EventPersistencePair] ) -> None: """Precalculate the joined hosts at each of the given events, when using Redis, so that external federation senders don't have to recalculate it themselves. @@ -1875,9 +1875,9 @@ async def _validate_canonical_alias( async def persist_and_notify_client_events( self, requester: Requester, - events_and_context: List[EventPersistencePair], + events_and_context: list[EventPersistencePair], ratelimit: bool = True, - extra_users: Optional[List[UserID]] = None, + extra_users: Optional[list[UserID]] = None, ) -> EventBase: """Called when we have fully built the events, have already calculated the push actions for the events, and checked auth. @@ -2285,7 +2285,7 @@ def _expire_rooms_to_exclude_from_dummy_event_insertion(self) -> None: async def _rebuild_event_after_third_party_rules( self, third_party_result: dict, original_event: EventBase - ) -> Tuple[EventBase, UnpersistedEventContextBase]: + ) -> tuple[EventBase, UnpersistedEventContextBase]: # the third_party_event_rules want to replace the event. # we do some basic checks, and then return the replacement event. diff --git a/synapse/handlers/oidc.py b/synapse/handlers/oidc.py index fc93c6b2a8e..d6ee091c091 100644 --- a/synapse/handlers/oidc.py +++ b/synapse/handlers/oidc.py @@ -26,11 +26,8 @@ from typing import ( TYPE_CHECKING, Any, - Dict, Generic, - List, Optional, - Type, TypedDict, TypeVar, Union, @@ -113,14 +110,14 @@ class Token(TypedDict): #: A JWK, as per RFC7517 sec 4. The type could be more precise than that, but #: there is no real point of doing this in our case. -JWK = Dict[str, str] +JWK = dict[str, str] C = TypeVar("C") #: A JWK Set, as per RFC7517 sec 5. class JWKS(TypedDict): - keys: List[JWK] + keys: list[JWK] class OidcHandler: @@ -134,7 +131,7 @@ def __init__(self, hs: "HomeServer"): assert provider_confs self._macaroon_generator = hs.get_macaroon_generator() - self._providers: Dict[str, "OidcProvider"] = { + self._providers: dict[str, "OidcProvider"] = { p.idp_id: OidcProvider(hs, self._macaroon_generator, p) for p in provider_confs } @@ -332,7 +329,7 @@ async def handle_backchannel_logout(self, request: SynapseRequest) -> None: # At this point we properly checked both claims types issuer: str = iss - audience: List[str] = aud + audience: list[str] = aud except (TypeError, KeyError): raise SynapseError(400, "Invalid issuer/audience in logout_token") @@ -758,7 +755,7 @@ async def _exchange_code(self, code: str, code_verifier: str) -> Token: """ metadata = await self.load_metadata() token_endpoint = metadata.get("token_endpoint") - raw_headers: Dict[str, str] = { + raw_headers: dict[str, str] = { "Content-Type": "application/x-www-form-urlencoded", "User-Agent": self._http_client.user_agent.decode("ascii"), "Accept": "application/json", @@ -902,9 +899,9 @@ async def _fetch_userinfo(self, token: Token) -> UserInfo: async def _verify_jwt( self, - alg_values: List[str], + alg_values: list[str], token: str, - claims_cls: Type[C], + claims_cls: type[C], claims_options: Optional[dict] = None, claims_params: Optional[dict] = None, ) -> C: @@ -1589,7 +1586,7 @@ class UserAttributeDict(TypedDict): confirm_localpart: bool display_name: Optional[str] picture: Optional[str] # may be omitted by older `OidcMappingProviders` - emails: List[str] + emails: list[str] class OidcMappingProvider(Generic[C]): @@ -1678,7 +1675,7 @@ class JinjaOidcMappingConfig: localpart_template: Optional[Template] display_name_template: Optional[Template] email_template: Optional[Template] - extra_attributes: Dict[str, Template] + extra_attributes: dict[str, Template] confirm_localpart: bool = False @@ -1778,7 +1775,7 @@ def render_template_field(template: Optional[Template]) -> Optional[str]: if display_name == "": display_name = None - emails: List[str] = [] + emails: list[str] = [] email = render_template_field(self._config.email_template) if email: emails.append(email) @@ -1794,7 +1791,7 @@ def render_template_field(template: Optional[Template]) -> Optional[str]: ) async def get_extra_attributes(self, userinfo: UserInfo, token: Token) -> JsonDict: - extras: Dict[str, str] = {} + extras: dict[str, str] = {} for key, template in self._config.extra_attributes.items(): try: extras[key] = template.render(user=userinfo).strip() diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py index 02a67581e75..7274a512b0e 100644 --- a/synapse/handlers/pagination.py +++ b/synapse/handlers/pagination.py @@ -19,7 +19,7 @@ # # import logging -from typing import TYPE_CHECKING, List, Optional, Set, Tuple, cast +from typing import TYPE_CHECKING, Optional, cast from twisted.python.failure import Failure @@ -91,7 +91,7 @@ def __init__(self, hs: "HomeServer"): self.pagination_lock = ReadWriteLock() # IDs of rooms in which there currently an active purge *or delete* operation. - self._purges_in_progress_by_room: Set[str] = set() + self._purges_in_progress_by_room: set[str] = set() self._event_serializer = hs.get_event_client_serializer() self._retention_default_max_lifetime = ( @@ -279,7 +279,7 @@ async def start_purge_history( async def _purge_history( self, task: ScheduledTask, - ) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: + ) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: """ Scheduler action to purge some history of a room. """ @@ -343,7 +343,7 @@ async def get_delete_task(self, delete_id: str) -> Optional[ScheduledTask]: async def get_delete_tasks_by_room( self, room_id: str, only_active: Optional[bool] = False - ) -> List[ScheduledTask]: + ) -> list[ScheduledTask]: """Get complete, failed or active delete tasks by room Args: @@ -363,7 +363,7 @@ async def get_delete_tasks_by_room( async def _purge_room( self, task: ScheduledTask, - ) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: + ) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: """ Scheduler action to purge a room. """ @@ -523,7 +523,7 @@ async def get_messages( # We use a `Set` because there can be multiple events at a given depth # and we only care about looking at the unique continum of depths to # find gaps. - event_depths: Set[int] = {event.depth for event in events} + event_depths: set[int] = {event.depth for event in events} sorted_event_depths = sorted(event_depths) # Inspect the depths of the returned events to see if there are any gaps @@ -691,7 +691,7 @@ async def get_messages( async def _shutdown_and_purge_room( self, task: ScheduledTask, - ) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: + ) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: """ Scheduler action to shutdown and purge a room. """ diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 1610683066a..d8150a5857f 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -86,14 +86,9 @@ Callable, Collection, ContextManager, - Dict, Generator, Iterable, - List, Optional, - Set, - Tuple, - Type, ) from prometheus_client import Counter @@ -236,7 +231,7 @@ def __init__(self, hs: "HomeServer"): self._federation_queue = PresenceFederationQueue(hs, self) - self.VALID_PRESENCE: Tuple[str, ...] = ( + self.VALID_PRESENCE: tuple[str, ...] = ( PresenceState.ONLINE, PresenceState.UNAVAILABLE, PresenceState.OFFLINE, @@ -276,7 +271,7 @@ async def user_syncing( @abc.abstractmethod def get_currently_syncing_users_for_replication( self, - ) -> Iterable[Tuple[str, Optional[str]]]: + ) -> Iterable[tuple[str, Optional[str]]]: """Get an iterable of syncing users and devices on this worker, to send to the presence handler This is called when a replication connection is established. It should return @@ -293,7 +288,7 @@ async def get_state(self, target_user: UserID) -> UserPresenceState: async def get_states( self, target_user_ids: Iterable[str] - ) -> List[UserPresenceState]: + ) -> list[UserPresenceState]: """Get the presence state for users.""" updates_d = await self.current_state_for_users(target_user_ids) @@ -306,7 +301,7 @@ async def get_states( async def current_state_for_users( self, user_ids: Iterable[str] - ) -> Dict[str, UserPresenceState]: + ) -> dict[str, UserPresenceState]: """Get the current presence state for multiple users. Returns: @@ -417,7 +412,7 @@ def get_federation_queue(self) -> "PresenceFederationQueue": return self._federation_queue async def maybe_send_presence_to_interested_destinations( - self, states: List[UserPresenceState] + self, states: list[UserPresenceState] ) -> None: """If this instance is a federation sender, send the states to all destinations that are interested. Filters out any states for remote @@ -501,7 +496,7 @@ class _NullContextManager(ContextManager[None]): def __exit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: @@ -522,8 +517,8 @@ def __init__(self, hs: "HomeServer"): # The number of ongoing syncs on this process, by (user ID, device ID). # Empty if _presence_enabled is false. - self._user_device_to_num_current_syncs: Dict[ - Tuple[str, Optional[str]], int + self._user_device_to_num_current_syncs: dict[ + tuple[str, Optional[str]], int ] = {} self.notifier = hs.get_notifier() @@ -531,7 +526,7 @@ def __init__(self, hs: "HomeServer"): # (user_id, device_id) -> last_sync_ms. Lists the devices that have stopped # syncing but we haven't notified the presence writer of that yet - self._user_devices_going_offline: Dict[Tuple[str, Optional[str]], int] = {} + self._user_devices_going_offline: dict[tuple[str, Optional[str]], int] = {} self._bump_active_client = ReplicationBumpPresenceActiveTime.make_client(hs) self._set_state_client = ReplicationPresenceSetState.make_client(hs) @@ -645,7 +640,7 @@ def _user_syncing() -> Generator[None, None, None]: return _user_syncing() async def notify_from_replication( - self, states: List[UserPresenceState], stream_id: int + self, states: list[UserPresenceState], stream_id: int ) -> None: parties = await get_interested_parties(self.store, self.presence_router, states) room_ids_to_states, users_to_states = parties @@ -704,7 +699,7 @@ async def process_replication_rows( def get_currently_syncing_users_for_replication( self, - ) -> Iterable[Tuple[str, Optional[str]]]: + ) -> Iterable[tuple[str, Optional[str]]]: return [ user_id_device_id for user_id_device_id, count in self._user_device_to_num_current_syncs.items() @@ -790,8 +785,8 @@ def __init__(self, hs: "HomeServer"): ) # The per-device presence state, maps user to devices to per-device presence state. - self._user_to_device_to_current_state: Dict[ - str, Dict[Optional[str], UserDevicePresenceState] + self._user_to_device_to_current_state: dict[ + str, dict[Optional[str], UserDevicePresenceState] ] = {} now = self.clock.time_msec() @@ -833,7 +828,7 @@ def __init__(self, hs: "HomeServer"): # Set of users who have presence in the `user_to_current_state` that # have not yet been persisted - self.unpersisted_users_changes: Set[str] = set() + self.unpersisted_users_changes: set[str] = set() hs.register_async_shutdown_handler( phase="before", @@ -843,8 +838,8 @@ def __init__(self, hs: "HomeServer"): # Keeps track of the number of *ongoing* syncs on this process. While # this is non zero a user will never go offline. - self._user_device_to_num_current_syncs: Dict[ - Tuple[str, Optional[str]], int + self._user_device_to_num_current_syncs: dict[ + tuple[str, Optional[str]], int ] = {} # Keeps track of the number of *ongoing* syncs on other processes. @@ -857,10 +852,10 @@ def __init__(self, hs: "HomeServer"): # we assume that all the sync requests on that process have stopped. # Stored as a dict from process_id to set of (user_id, device_id), and # a dict of process_id to millisecond timestamp last updated. - self.external_process_to_current_syncs: Dict[ - str, Set[Tuple[str, Optional[str]]] + self.external_process_to_current_syncs: dict[ + str, set[tuple[str, Optional[str]]] ] = {} - self.external_process_last_updated_ms: Dict[str, int] = {} + self.external_process_last_updated_ms: dict[str, int] = {} self.external_sync_linearizer = Linearizer( name="external_sync_linearizer", clock=self.clock @@ -1151,7 +1146,7 @@ async def bump_presence_active_time( # Update the user state, this will always update last_active_ts and # might update the presence state. prev_state = await self.current_state_for_user(user_id) - new_fields: Dict[str, Any] = { + new_fields: dict[str, Any] = { "last_active_ts": now, "state": _combine_device_states(devices.values()), } @@ -1221,7 +1216,7 @@ def _user_syncing() -> Generator[None, None, None]: def get_currently_syncing_users_for_replication( self, - ) -> Iterable[Tuple[str, Optional[str]]]: + ) -> Iterable[tuple[str, Optional[str]]]: # since we are the process handling presence, there is nothing to do here. return [] @@ -1317,7 +1312,7 @@ async def update_external_syncs_clear(self, process_id: str) -> None: ) self.external_process_last_updated_ms.pop(process_id, None) - async def _persist_and_notify(self, states: List[UserPresenceState]) -> None: + async def _persist_and_notify(self, states: list[UserPresenceState]) -> None: """Persist states in the database, poke the notifier and send to interested remote servers """ @@ -1477,7 +1472,7 @@ async def is_visible(self, observed_user: UserID, observer_user: UserID) -> bool async def get_all_presence_updates( self, instance_name: str, last_id: int, current_id: int, limit: int - ) -> Tuple[List[Tuple[int, list]], int, bool]: + ) -> tuple[list[tuple[int, list]], int, bool]: """ Gets a list of presence update rows from between the given stream ids. Each row has: @@ -1562,7 +1557,7 @@ async def _unsafe_process(self) -> None: # We may get multiple deltas for different rooms, but we want to # handle them on a room by room basis, so we batch them up by # room. - deltas_by_room: Dict[str, List[StateDelta]] = {} + deltas_by_room: dict[str, list[StateDelta]] = {} for delta in deltas: deltas_by_room.setdefault(delta.room_id, []).append(delta) @@ -1576,7 +1571,7 @@ async def _unsafe_process(self) -> None: name="presence", **{SERVER_NAME_LABEL: self.server_name} ).set(max_pos) - async def _handle_state_delta(self, room_id: str, deltas: List[StateDelta]) -> None: + async def _handle_state_delta(self, room_id: str, deltas: list[StateDelta]) -> None: """Process current state deltas for the room to find new joins that need to be handled. """ @@ -1849,7 +1844,7 @@ async def get_new_events( explicit_room_id: Optional[str] = None, include_offline: bool = True, service: Optional[ApplicationService] = None, - ) -> Tuple[List[UserPresenceState], int]: + ) -> tuple[list[UserPresenceState], int]: # The process for getting presence events are: # 1. Get the rooms the user is in. # 2. Get the list of user in the rooms. @@ -2001,7 +1996,7 @@ async def _filter_all_presence_updates_for_user( user_id: str, include_offline: bool, from_key: Optional[int] = None, - ) -> List[UserPresenceState]: + ) -> list[UserPresenceState]: """ Computes the presence updates a user should receive. @@ -2058,7 +2053,7 @@ async def _filter_all_presence_updates_for_user( def _filter_offline_presence_state( self, presence_updates: Iterable[UserPresenceState] - ) -> List[UserPresenceState]: + ) -> list[UserPresenceState]: """Given an iterable containing user presence updates, return a list with any offline presence states removed. @@ -2079,12 +2074,12 @@ def get_current_key(self) -> int: def handle_timeouts( - user_states: List[UserPresenceState], + user_states: list[UserPresenceState], is_mine_fn: Callable[[str], bool], - syncing_user_devices: AbstractSet[Tuple[str, Optional[str]]], - user_to_devices: Dict[str, Dict[Optional[str], UserDevicePresenceState]], + syncing_user_devices: AbstractSet[tuple[str, Optional[str]]], + user_to_devices: dict[str, dict[Optional[str], UserDevicePresenceState]], now: int, -) -> List[UserPresenceState]: +) -> list[UserPresenceState]: """Checks the presence of users that have timed out and updates as appropriate. @@ -2120,8 +2115,8 @@ def handle_timeouts( def handle_timeout( state: UserPresenceState, is_mine: bool, - syncing_device_ids: AbstractSet[Tuple[str, Optional[str]]], - user_devices: Dict[Optional[str], UserDevicePresenceState], + syncing_device_ids: AbstractSet[tuple[str, Optional[str]]], + user_devices: dict[Optional[str], UserDevicePresenceState], now: int, ) -> Optional[UserPresenceState]: """Checks the presence of the user to see if any of the timers have elapsed @@ -2218,7 +2213,7 @@ def handle_update( wheel_timer: WheelTimer, now: int, persist: bool, -) -> Tuple[UserPresenceState, bool, bool]: +) -> tuple[UserPresenceState, bool, bool]: """Given a presence update: 1. Add any appropriate timers. 2. Check if we should notify anyone. @@ -2344,8 +2339,8 @@ def _combine_device_states( async def get_interested_parties( - store: DataStore, presence_router: PresenceRouter, states: List[UserPresenceState] -) -> Tuple[Dict[str, List[UserPresenceState]], Dict[str, List[UserPresenceState]]]: + store: DataStore, presence_router: PresenceRouter, states: list[UserPresenceState] +) -> tuple[dict[str, list[UserPresenceState]], dict[str, list[UserPresenceState]]]: """Given a list of states return which entities (rooms, users) are interested in the given states. @@ -2358,8 +2353,8 @@ async def get_interested_parties( A 2-tuple of `(room_ids_to_states, users_to_states)`, with each item being a dict of `entity_name` -> `[UserPresenceState]` """ - room_ids_to_states: Dict[str, List[UserPresenceState]] = {} - users_to_states: Dict[str, List[UserPresenceState]] = {} + room_ids_to_states: dict[str, list[UserPresenceState]] = {} + users_to_states: dict[str, list[UserPresenceState]] = {} for state in states: room_ids = await store.get_rooms_for_user(state.user_id) for room_id in room_ids: @@ -2382,8 +2377,8 @@ async def get_interested_parties( async def get_interested_remotes( store: DataStore, presence_router: PresenceRouter, - states: List[UserPresenceState], -) -> List[Tuple[StrCollection, Collection[UserPresenceState]]]: + states: list[UserPresenceState], +) -> list[tuple[StrCollection, Collection[UserPresenceState]]]: """Given a list of presence states figure out which remote servers should be sent which. @@ -2397,14 +2392,14 @@ async def get_interested_remotes( Returns: A map from destinations to presence states to send to that destination. """ - hosts_and_states: List[Tuple[StrCollection, Collection[UserPresenceState]]] = [] + hosts_and_states: list[tuple[StrCollection, Collection[UserPresenceState]]] = [] # First we look up the rooms each user is in (as well as any explicit # subscriptions), then for each distinct room we look up the remote # hosts in those rooms. for state in states: room_ids = await store.get_rooms_for_user(state.user_id) - hosts: Set[str] = set() + hosts: set[str] = set() for room_id in room_ids: room_hosts = await store.get_current_hosts_in_room(room_id) hosts.update(room_hosts) @@ -2473,12 +2468,12 @@ def __init__(self, hs: "HomeServer", presence_handler: BasePresenceHandler): # stream_id, destinations, user_ids)`. We don't store the full states # for efficiency, and remote workers will already have the full states # cached. - self._queue: List[Tuple[int, int, StrCollection, Set[str]]] = [] + self._queue: list[tuple[int, int, StrCollection, set[str]]] = [] self._next_id = 1 # Map from instance name to current token - self._current_tokens: Dict[str, int] = {} + self._current_tokens: dict[str, int] = {} if self._queue_presence_updates: self._clock.looping_call(self._clear_queue, self._CLEAR_ITEMS_EVERY_MS) @@ -2547,7 +2542,7 @@ async def get_replication_rows( from_token: int, upto_token: int, target_row_count: int, - ) -> Tuple[List[Tuple[int, Tuple[str, str]]], int, bool]: + ) -> tuple[list[tuple[int, tuple[str, str]]], int, bool]: """Get all the updates between the two tokens. We return rows in the form of `(destination, user_id)` to keep the size @@ -2583,7 +2578,7 @@ async def get_replication_rows( # handle the case where `from_token` stream ID has already been dropped. start_idx = max(from_token + 1 - self._next_id, -len(self._queue)) - to_send: List[Tuple[int, Tuple[str, str]]] = [] + to_send: list[tuple[int, tuple[str, str]]] = [] limited = False new_id = upto_token for _, stream_id, destinations, user_ids in self._queue[start_idx:]: @@ -2631,7 +2626,7 @@ async def process_replication_rows( if not self._federation: return - hosts_to_users: Dict[str, Set[str]] = {} + hosts_to_users: dict[str, set[str]] = {} for row in rows: hosts_to_users.setdefault(row.destination, set()).add(row.user_id) diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index 9dda89d85bb..240a235a0e7 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -20,7 +20,7 @@ # import logging import random -from typing import TYPE_CHECKING, List, Optional, Union +from typing import TYPE_CHECKING, Optional, Union from synapse.api.constants import ProfileFields from synapse.api.errors import ( @@ -69,7 +69,7 @@ def __init__(self, hs: "HomeServer"): self.request_ratelimiter = hs.get_request_ratelimiter() self.max_avatar_size: Optional[int] = hs.config.server.max_avatar_size - self.allowed_avatar_mimetypes: Optional[List[str]] = ( + self.allowed_avatar_mimetypes: Optional[list[str]] = ( hs.config.server.allowed_avatar_mimetypes ) diff --git a/synapse/handlers/push_rules.py b/synapse/handlers/push_rules.py index 4ef6a04c515..643fa72f3ff 100644 --- a/synapse/handlers/push_rules.py +++ b/synapse/handlers/push_rules.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union +from typing import TYPE_CHECKING, Any, Optional, Union import attr @@ -127,7 +127,7 @@ def notify_user(self, user_id: str) -> None: async def push_rules_for_user( self, user: UserID - ) -> Dict[str, Dict[str, List[Dict[str, Any]]]]: + ) -> dict[str, dict[str, list[dict[str, Any]]]]: """ Push rules aren't really account data, but get formatted as such for /sync. """ @@ -137,7 +137,7 @@ async def push_rules_for_user( return rules -def check_actions(actions: List[Union[str, JsonDict]]) -> None: +def check_actions(actions: list[Union[str, JsonDict]]) -> None: """Check if the given actions are spec compliant. Args: diff --git a/synapse/handlers/receipts.py b/synapse/handlers/receipts.py index c776654d12c..ad41113b5b3 100644 --- a/synapse/handlers/receipts.py +++ b/synapse/handlers/receipts.py @@ -19,7 +19,7 @@ # # import logging -from typing import TYPE_CHECKING, Iterable, List, Optional, Sequence, Tuple +from typing import TYPE_CHECKING, Iterable, Optional, Sequence from synapse.api.constants import EduTypes, ReceiptTypes from synapse.appservice import ApplicationService @@ -136,10 +136,10 @@ async def _received_remote_receipt(self, origin: str, content: JsonDict) -> None await self._handle_new_receipts(receipts) - async def _handle_new_receipts(self, receipts: List[ReadReceipt]) -> bool: + async def _handle_new_receipts(self, receipts: list[ReadReceipt]) -> bool: """Takes a list of receipts, stores them and informs the notifier.""" - receipts_persisted: List[ReadReceipt] = [] + receipts_persisted: list[ReadReceipt] = [] for receipt in receipts: stream_id = await self.store.insert_receipt( receipt.room_id, @@ -216,7 +216,7 @@ def __init__(self, hs: "HomeServer"): @staticmethod def filter_out_private_receipts( rooms: Sequence[JsonMapping], user_id: str - ) -> List[JsonMapping]: + ) -> list[JsonMapping]: """ Filters a list of serialized receipts (as returned by /sync and /initialSync) and removes private read receipts of other users. @@ -233,7 +233,7 @@ def filter_out_private_receipts( The same as rooms, but filtered. """ - result: List[JsonMapping] = [] + result: list[JsonMapping] = [] # Iterate through each room's receipt content. for room in rooms: @@ -287,7 +287,7 @@ async def get_new_events( is_guest: bool, explicit_room_id: Optional[str] = None, to_key: Optional[MultiWriterStreamToken] = None, - ) -> Tuple[List[JsonMapping], MultiWriterStreamToken]: + ) -> tuple[list[JsonMapping], MultiWriterStreamToken]: """ Find read receipts for given rooms (> `from_token` and <= `to_token`) """ @@ -313,7 +313,7 @@ async def get_new_events_as( from_key: MultiWriterStreamToken, to_key: MultiWriterStreamToken, service: ApplicationService, - ) -> Tuple[List[JsonMapping], MultiWriterStreamToken]: + ) -> tuple[list[JsonMapping], MultiWriterStreamToken]: """Returns a set of new read receipt events that an appservice may be interested in. diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index c3ff0cfaf81..8b620a91bc9 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -26,9 +26,7 @@ from typing import ( TYPE_CHECKING, Iterable, - List, Optional, - Tuple, TypedDict, ) @@ -241,7 +239,7 @@ async def register_user( address: Optional[str] = None, bind_emails: Optional[Iterable[str]] = None, by_admin: bool = False, - user_agent_ips: Optional[List[Tuple[str, str]]] = None, + user_agent_ips: Optional[list[tuple[str, str]]] = None, auth_provider_id: Optional[str] = None, approved: bool = False, ) -> str: @@ -655,7 +653,7 @@ async def post_consent_actions(self, user_id: str) -> None: async def appservice_register( self, user_localpart: str, as_token: str - ) -> Tuple[str, ApplicationService]: + ) -> tuple[str, ApplicationService]: user = UserID(user_localpart, self.hs.hostname) user_id = user.to_string() service = self.store.get_app_service_by_token(as_token) @@ -780,7 +778,7 @@ async def register_device( auth_provider_id: Optional[str] = None, should_issue_refresh_token: bool = False, auth_provider_session_id: Optional[str] = None, - ) -> Tuple[str, str, Optional[int], Optional[str]]: + ) -> tuple[str, str, Optional[int], Optional[str]]: """Register a device for a user and generate an access token. The access token will be limited by the homeserver's session_lifetime config. diff --git a/synapse/handlers/relations.py b/synapse/handlers/relations.py index b1158ee77d5..217681f7c0f 100644 --- a/synapse/handlers/relations.py +++ b/synapse/handlers/relations.py @@ -23,10 +23,7 @@ from typing import ( TYPE_CHECKING, Collection, - Dict, - FrozenSet, Iterable, - List, Mapping, Optional, Sequence, @@ -212,7 +209,7 @@ async def redact_events_related_to( requester: Requester, event_id: str, initial_redaction_event: EventBase, - relation_types: List[str], + relation_types: list[str], ) -> None: """Redacts all events related to the given event ID with one of the given relation types. @@ -267,7 +264,7 @@ async def redact_events_related_to( ) async def get_references_for_events( - self, event_ids: Collection[str], ignored_users: FrozenSet[str] = frozenset() + self, event_ids: Collection[str], ignored_users: frozenset[str] = frozenset() ) -> Mapping[str, Sequence[_RelatedEvent]]: """Get a list of references to the given events. @@ -308,11 +305,11 @@ async def get_references_for_events( async def _get_threads_for_events( self, - events_by_id: Dict[str, EventBase], - relations_by_id: Dict[str, str], + events_by_id: dict[str, EventBase], + relations_by_id: dict[str, str], user_id: str, - ignored_users: FrozenSet[str], - ) -> Dict[str, _ThreadAggregation]: + ignored_users: frozenset[str], + ) -> dict[str, _ThreadAggregation]: """Get the bundled aggregations for threads for the requested events. Args: @@ -437,7 +434,7 @@ async def _get_threads_for_events( @trace async def get_bundled_aggregations( self, events: Iterable[EventBase], user_id: str - ) -> Dict[str, BundledAggregations]: + ) -> dict[str, BundledAggregations]: """Generate bundled aggregations for events. Args: @@ -456,7 +453,7 @@ async def get_bundled_aggregations( # De-duplicated events by ID to handle the same event requested multiple times. events_by_id = {} # A map of event ID to the relation in that event, if there is one. - relations_by_id: Dict[str, str] = {} + relations_by_id: dict[str, str] = {} for event in events: # State events do not get bundled aggregations. if event.is_state(): @@ -479,7 +476,7 @@ async def get_bundled_aggregations( events_by_id[event.event_id] = event # event ID -> bundled aggregation in non-serialized form. - results: Dict[str, BundledAggregations] = {} + results: dict[str, BundledAggregations] = {} # Fetch any ignored users of the requesting user. ignored_users = await self._main_store.ignored_users(user_id) diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index db6dc5efd04..f242accef18 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -33,10 +33,7 @@ Any, Awaitable, Callable, - Dict, - List, Optional, - Tuple, cast, ) @@ -112,11 +109,11 @@ @attr.s(slots=True, frozen=True, auto_attribs=True) class EventContext: - events_before: List[EventBase] + events_before: list[EventBase] event: EventBase - events_after: List[EventBase] - state: List[EventBase] - aggregations: Dict[str, BundledAggregations] + events_after: list[EventBase] + state: list[EventBase] + aggregations: dict[str, BundledAggregations] start: str end: str @@ -143,7 +140,7 @@ def __init__(self, hs: "HomeServer"): ) # Room state based off defined presets - self._presets_dict: Dict[str, Dict[str, Any]] = { + self._presets_dict: dict[str, dict[str, Any]] = { RoomCreationPreset.PRIVATE_CHAT: { "join_rules": JoinRules.INVITE, "history_visibility": HistoryVisibility.SHARED, @@ -184,7 +181,7 @@ def __init__(self, hs: "HomeServer"): # If a user tries to update the same room multiple times in quick # succession, only process the first attempt and return its result to # subsequent requests - self._upgrade_response_cache: ResponseCache[Tuple[str, str]] = ResponseCache( + self._upgrade_response_cache: ResponseCache[tuple[str, str]] = ResponseCache( clock=hs.get_clock(), name="room_upgrade", server_name=self.server_name, @@ -201,7 +198,7 @@ async def upgrade_room( requester: Requester, old_room_id: str, new_version: RoomVersion, - additional_creators: Optional[List[str]], + additional_creators: Optional[list[str]], auto_member: bool = False, ratelimit: bool = True, ) -> str: @@ -339,14 +336,14 @@ async def _upgrade_room( self, requester: Requester, old_room_id: str, - old_room: Tuple[bool, str, bool], + old_room: tuple[bool, str, bool], new_room_id: str, new_version: RoomVersion, tombstone_event: EventBase, tombstone_context: synapse.events.snapshot.EventContext, - additional_creators: Optional[List[str]], + additional_creators: Optional[list[str]], creation_event_with_context: Optional[ - Tuple[EventBase, synapse.events.snapshot.EventContext] + tuple[EventBase, synapse.events.snapshot.EventContext] ] = None, auto_member: bool = False, ) -> str: @@ -437,7 +434,7 @@ async def _update_upgraded_room_pls( old_room_id: str, new_room_id: str, old_room_state: StateMap[str], - additional_creators: Optional[List[str]], + additional_creators: Optional[list[str]], ) -> None: """Send updated power levels in both rooms after an upgrade @@ -529,7 +526,7 @@ def _calculate_upgraded_room_creation_content( old_room_create_event: EventBase, tombstone_event_id: Optional[str], new_room_version: RoomVersion, - additional_creators: Optional[List[str]], + additional_creators: Optional[list[str]], ) -> JsonDict: creation_content: JsonDict = { "room_version": new_room_version.identifier, @@ -561,9 +558,9 @@ async def clone_existing_room( new_room_id: str, new_room_version: RoomVersion, tombstone_event_id: str, - additional_creators: Optional[List[str]], + additional_creators: Optional[list[str]], creation_event_with_context: Optional[ - Tuple[EventBase, synapse.events.snapshot.EventContext] + tuple[EventBase, synapse.events.snapshot.EventContext] ] = None, auto_member: bool = False, ) -> None: @@ -600,7 +597,7 @@ async def clone_existing_room( initial_state: MutableStateMap = {} # Replicate relevant room events - types_to_copy: List[Tuple[str, Optional[str]]] = [ + types_to_copy: list[tuple[str, Optional[str]]] = [ (EventTypes.JoinRules, ""), (EventTypes.Name, ""), (EventTypes.Topic, ""), @@ -1044,7 +1041,7 @@ async def create_room( ratelimit: bool = True, creator_join_profile: Optional[JsonDict] = None, ignore_forced_encryption: bool = False, - ) -> Tuple[str, Optional[RoomAlias], int]: + ) -> tuple[str, Optional[RoomAlias], int]: """Creates a new room. Args: @@ -1394,7 +1391,7 @@ async def _generate_create_event_for_room_id( creation_content: JsonDict, is_public: bool, room_version: RoomVersion, - ) -> Tuple[EventBase, synapse.events.snapshot.EventContext]: + ) -> tuple[EventBase, synapse.events.snapshot.EventContext]: ( creation_event, new_unpersisted_context, @@ -1426,7 +1423,7 @@ async def _send_events_for_new_room( room_id: str, room_version: RoomVersion, room_config: JsonDict, - invite_list: List[str], + invite_list: list[str], initial_state: MutableStateMap, creation_content: JsonDict, room_alias: Optional[RoomAlias] = None, @@ -1434,9 +1431,9 @@ async def _send_events_for_new_room( creator_join_profile: Optional[JsonDict] = None, ignore_forced_encryption: bool = False, creation_event_with_context: Optional[ - Tuple[EventBase, synapse.events.snapshot.EventContext] + tuple[EventBase, synapse.events.snapshot.EventContext] ] = None, - ) -> Tuple[int, str, int]: + ) -> tuple[int, str, int]: """Sends the initial events into a new room. Sends the room creation, membership, and power level events into the room sequentially, then creates and batches up the rest of the events to persist as a batch to the DB. @@ -1485,7 +1482,7 @@ async def _send_events_for_new_room( depth = 1 # the most recently created event - prev_event: List[str] = [] + prev_event: list[str] = [] # a map of event types, state keys -> event_ids. We collect these mappings this as events are # created (but not persisted to the db) to determine state for future created events # (as this info can't be pulled from the db) @@ -1496,7 +1493,7 @@ async def create_event( content: JsonDict, for_batch: bool, **kwargs: Any, - ) -> Tuple[EventBase, synapse.events.snapshot.UnpersistedEventContextBase]: + ) -> tuple[EventBase, synapse.events.snapshot.UnpersistedEventContextBase]: """ Creates an event and associated event context. Args: @@ -1792,7 +1789,7 @@ def _validate_room_config( f"You cannot create an encrypted room. user_level ({room_admin_level}) < send_level ({encryption_level})", ) - def _room_preset_config(self, room_config: JsonDict) -> Tuple[str, dict]: + def _room_preset_config(self, room_config: JsonDict) -> tuple[str, dict]: # The spec says rooms should default to private visibility if # `visibility` is not specified. visibility = room_config.get("visibility", "private") @@ -1814,9 +1811,9 @@ def _room_preset_config(self, room_config: JsonDict) -> Tuple[str, dict]: def _remove_creators_from_pl_users_map( self, - users_map: Dict[str, int], + users_map: dict[str, int], creator: str, - additional_creators: Optional[List[str]], + additional_creators: Optional[list[str]], ) -> None: creators = [creator] if additional_creators: @@ -1916,7 +1913,7 @@ async def get_event_context( # The user is peeking if they aren't in the room already is_peeking = not is_user_in_room - async def filter_evts(events: List[EventBase]) -> List[EventBase]: + async def filter_evts(events: list[EventBase]) -> list[EventBase]: if use_admin_priviledge: return events return await filter_events_for_client( @@ -2021,7 +2018,7 @@ async def get_event_for_timestamp( room_id: str, timestamp: int, direction: Direction, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """Find the closest event to the given timestamp in the given direction. If we can't find an event locally or the event we have locally is next to a gap, it will ask other federated homeservers for an event. @@ -2172,7 +2169,7 @@ async def get_new_events( room_ids: StrCollection, is_guest: bool, explicit_room_id: Optional[str] = None, - ) -> Tuple[List[EventBase], RoomStreamToken]: + ) -> tuple[list[EventBase], RoomStreamToken]: # We just ignore the key for now. to_key = self.get_current_key() diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index 9d4307fb078..97a5d07c7cb 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Any, List, Optional, Tuple +from typing import TYPE_CHECKING, Any, Optional import attr import msgpack @@ -67,14 +67,14 @@ def __init__(self, hs: "HomeServer"): self.hs = hs self.enable_room_list_search = hs.config.roomdirectory.enable_room_list_search self.response_cache: ResponseCache[ - Tuple[Optional[int], Optional[str], Optional[ThirdPartyInstanceID]] + tuple[Optional[int], Optional[str], Optional[ThirdPartyInstanceID]] ] = ResponseCache( clock=hs.get_clock(), name="room_list", server_name=self.server_name, ) self.remote_response_cache: ResponseCache[ - Tuple[str, Optional[int], Optional[str], bool, Optional[str]] + tuple[str, Optional[int], Optional[str], bool, Optional[str]] ] = ResponseCache( clock=hs.get_clock(), name="remote_room_list", @@ -175,7 +175,7 @@ async def _get_public_room_list( if since_token: batch_token = RoomListNextBatch.from_token(since_token) - bounds: Optional[Tuple[int, str]] = ( + bounds: Optional[tuple[int, str]] = ( batch_token.last_joined_members, batch_token.last_room_id, ) @@ -226,7 +226,7 @@ def build_room_entry(room: LargestRoomStats) -> JsonDict: return {k: v for k, v in entry.items() if v is not None} # Build a list of up to `limit` entries. - room_entries: List[JsonDict] = [] + room_entries: list[JsonDict] = [] rooms_iterator = results if forwards else reversed(results) # Track the first and last 'considered' rooms so that we can provide correct diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index 2ab9b70f8c5..03cfc992605 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -23,7 +23,7 @@ import logging import random from http import HTTPStatus -from typing import TYPE_CHECKING, Iterable, List, Optional, Set, Tuple +from typing import TYPE_CHECKING, Iterable, Optional from synapse import types from synapse.api.constants import ( @@ -217,11 +217,11 @@ def _on_user_joined_room(self, event_id: str, room_id: str) -> None: async def _remote_join( self, requester: Requester, - remote_room_hosts: List[str], + remote_room_hosts: list[str], room_id: str, user: UserID, content: dict, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """Try and join a room that this server is not in Args: @@ -241,11 +241,11 @@ async def _remote_join( async def remote_knock( self, requester: Requester, - remote_room_hosts: List[str], + remote_room_hosts: list[str], room_id: str, user: UserID, content: dict, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """Try and knock on a room that this server is not in Args: @@ -263,7 +263,7 @@ async def remote_reject_invite( txn_id: Optional[str], requester: Requester, content: JsonDict, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """ Rejects an out-of-band invite we have received from a remote server @@ -286,7 +286,7 @@ async def remote_rescind_knock( txn_id: Optional[str], requester: Requester, content: JsonDict, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """Rescind a local knock made on a remote room. Args: @@ -396,8 +396,8 @@ async def _local_membership_update( target: UserID, room_id: str, membership: str, - prev_event_ids: Optional[List[str]] = None, - state_event_ids: Optional[List[str]] = None, + prev_event_ids: Optional[list[str]] = None, + state_event_ids: Optional[list[str]] = None, depth: Optional[int] = None, txn_id: Optional[str] = None, ratelimit: bool = True, @@ -405,7 +405,7 @@ async def _local_membership_update( require_consent: bool = True, outlier: bool = False, origin_server_ts: Optional[int] = None, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """ Internal membership update function to get an existing event or create and persist a new event for the new membership change. @@ -573,18 +573,18 @@ async def update_membership( room_id: str, action: str, txn_id: Optional[str] = None, - remote_room_hosts: Optional[List[str]] = None, + remote_room_hosts: Optional[list[str]] = None, third_party_signed: Optional[dict] = None, ratelimit: bool = True, content: Optional[dict] = None, new_room: bool = False, require_consent: bool = True, outlier: bool = False, - prev_event_ids: Optional[List[str]] = None, - state_event_ids: Optional[List[str]] = None, + prev_event_ids: Optional[list[str]] = None, + state_event_ids: Optional[list[str]] = None, depth: Optional[int] = None, origin_server_ts: Optional[int] = None, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """Update a user's membership in a room. Params: @@ -687,18 +687,18 @@ async def update_membership_locked( room_id: str, action: str, txn_id: Optional[str] = None, - remote_room_hosts: Optional[List[str]] = None, + remote_room_hosts: Optional[list[str]] = None, third_party_signed: Optional[dict] = None, ratelimit: bool = True, content: Optional[dict] = None, new_room: bool = False, require_consent: bool = True, outlier: bool = False, - prev_event_ids: Optional[List[str]] = None, - state_event_ids: Optional[List[str]] = None, + prev_event_ids: Optional[list[str]] = None, + state_event_ids: Optional[list[str]] = None, depth: Optional[int] = None, origin_server_ts: Optional[int] = None, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """Helper for update_membership. Assumes that the membership linearizer is already held for the room. @@ -1224,12 +1224,12 @@ async def _should_perform_remote_join( self, user_id: str, room_id: str, - remote_room_hosts: List[str], + remote_room_hosts: list[str], content: JsonDict, is_partial_state_room: bool, is_host_in_room: bool, partial_state_before_join: StateMap[str], - ) -> Tuple[bool, List[str]]: + ) -> tuple[bool, list[str]]: """ Check whether the server should do a remote join (as opposed to a local join) for a user. @@ -1565,7 +1565,7 @@ async def kick_guest_users(self, current_state: Iterable[EventBase]) -> None: async def lookup_room_alias( self, room_alias: RoomAlias - ) -> Tuple[RoomID, List[str]]: + ) -> tuple[RoomID, list[str]]: """ Get the room ID associated with a room alias. @@ -1612,9 +1612,9 @@ async def do_3pid_invite( requester: Requester, txn_id: Optional[str], id_access_token: str, - prev_event_ids: Optional[List[str]] = None, + prev_event_ids: Optional[list[str]] = None, depth: Optional[int] = None, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """Invite a 3PID to a room. Args: @@ -1726,9 +1726,9 @@ async def _make_and_store_3pid_invite( user: UserID, txn_id: Optional[str], id_access_token: str, - prev_event_ids: Optional[List[str]] = None, + prev_event_ids: Optional[list[str]] = None, depth: Optional[int] = None, - ) -> Tuple[EventBase, int]: + ) -> tuple[EventBase, int]: room_state = await self._storage_controllers.state.get_current_state( room_id, StateFilter.from_types( @@ -1863,7 +1863,7 @@ def __init__(self, hs: "HomeServer"): self.distributor.declare("user_left_room") async def _is_remote_room_too_complex( - self, room_id: str, remote_room_hosts: List[str] + self, room_id: str, remote_room_hosts: list[str] ) -> Optional[bool]: """ Check if complexity of a remote room is too great. @@ -1899,11 +1899,11 @@ async def _is_local_room_too_complex(self, room_id: str) -> bool: async def _remote_join( self, requester: Requester, - remote_room_hosts: List[str], + remote_room_hosts: list[str], room_id: str, user: UserID, content: dict, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """Implements RoomMemberHandler._remote_join""" # filter ourselves out of remote_room_hosts: do_invite_join ignores it # and if it is the only entry we'd like to return a 404 rather than a @@ -1980,7 +1980,7 @@ async def remote_reject_invite( txn_id: Optional[str], requester: Requester, content: JsonDict, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """ Rejects an out-of-band invite received from a remote user @@ -2017,7 +2017,7 @@ async def remote_rescind_knock( txn_id: Optional[str], requester: Requester, content: JsonDict, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """ Rescinds a local knock made on a remote room @@ -2046,7 +2046,7 @@ async def _generate_local_out_of_band_leave( txn_id: Optional[str], requester: Requester, content: JsonDict, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """Generate a local leave event for a room This can be called after we e.g fail to reject an invite via a remote server. @@ -2126,11 +2126,11 @@ async def _generate_local_out_of_band_leave( async def remote_knock( self, requester: Requester, - remote_room_hosts: List[str], + remote_room_hosts: list[str], room_id: str, user: UserID, content: dict, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """Sends a knock to a room. Attempts to do so via one remote out of a given list. Args: @@ -2270,7 +2270,7 @@ async def _unsafe_process(self) -> None: await self._store.update_room_forgetter_stream_pos(max_pos) - async def _handle_deltas(self, deltas: List[StateDelta]) -> None: + async def _handle_deltas(self, deltas: list[StateDelta]) -> None: """Called with the state deltas to process""" for delta in deltas: if delta.event_type != EventTypes.Member: @@ -2300,7 +2300,7 @@ async def _handle_deltas(self, deltas: List[StateDelta]) -> None: raise -def get_users_which_can_issue_invite(auth_events: StateMap[EventBase]) -> List[str]: +def get_users_which_can_issue_invite(auth_events: StateMap[EventBase]) -> list[str]: """ Return the list of users which can issue invites. @@ -2346,7 +2346,7 @@ def get_users_which_can_issue_invite(auth_events: StateMap[EventBase]) -> List[s return result -def get_servers_from_users(users: List[str]) -> Set[str]: +def get_servers_from_users(users: list[str]) -> set[str]: """ Resolve a list of users into their servers. diff --git a/synapse/handlers/room_member_worker.py b/synapse/handlers/room_member_worker.py index 0616a9864d9..0927c031f71 100644 --- a/synapse/handlers/room_member_worker.py +++ b/synapse/handlers/room_member_worker.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, List, Optional, Tuple +from typing import TYPE_CHECKING, Optional from synapse.handlers.room_member import NoKnownServersError, RoomMemberHandler from synapse.replication.http.membership import ( @@ -51,11 +51,11 @@ def __init__(self, hs: "HomeServer"): async def _remote_join( self, requester: Requester, - remote_room_hosts: List[str], + remote_room_hosts: list[str], room_id: str, user: UserID, content: dict, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """Implements RoomMemberHandler._remote_join""" if len(remote_room_hosts) == 0: raise NoKnownServersError() @@ -76,7 +76,7 @@ async def remote_reject_invite( txn_id: Optional[str], requester: Requester, content: dict, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """ Rejects an out-of-band invite received from a remote user @@ -96,7 +96,7 @@ async def remote_rescind_knock( txn_id: Optional[str], requester: Requester, content: JsonDict, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """ Rescinds a local knock made on a remote room @@ -121,11 +121,11 @@ async def remote_rescind_knock( async def remote_knock( self, requester: Requester, - remote_room_hosts: List[str], + remote_room_hosts: list[str], room_id: str, user: UserID, content: dict, - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """Sends a knock to a room. Implements RoomMemberHandler.remote_knock diff --git a/synapse/handlers/room_summary.py b/synapse/handlers/room_summary.py index 838fee6a303..a9482020561 100644 --- a/synapse/handlers/room_summary.py +++ b/synapse/handlers/room_summary.py @@ -22,7 +22,7 @@ import itertools import logging import re -from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Sequence, Set, Tuple +from typing import TYPE_CHECKING, Iterable, Optional, Sequence import attr @@ -83,9 +83,9 @@ class _PaginationSession: # The time the pagination session was created, in milliseconds. creation_time_ms: int # The queue of rooms which are still to process. - room_queue: List["_RoomQueueEntry"] + room_queue: list["_RoomQueueEntry"] # A set of rooms which have been processed. - processed_rooms: Set[str] + processed_rooms: set[str] class RoomSummaryHandler: @@ -112,14 +112,14 @@ def __init__(self, hs: "HomeServer"): # If a user tries to fetch the same page multiple times in quick succession, # only process the first attempt and return its result to subsequent requests. self._pagination_response_cache: ResponseCache[ - Tuple[ + tuple[ str, str, bool, Optional[int], Optional[int], Optional[str], - Optional[Tuple[str, ...]], + Optional[tuple[str, ...]], ] ] = ResponseCache( clock=hs.get_clock(), @@ -136,7 +136,7 @@ async def get_room_hierarchy( max_depth: Optional[int] = None, limit: Optional[int] = None, from_token: Optional[str] = None, - remote_room_hosts: Optional[Tuple[str, ...]] = None, + remote_room_hosts: Optional[tuple[str, ...]] = None, ) -> JsonDict: """ Implementation of the room hierarchy C-S API. @@ -196,7 +196,7 @@ async def _get_room_hierarchy( max_depth: Optional[int] = None, limit: Optional[int] = None, from_token: Optional[str] = None, - remote_room_hosts: Optional[Tuple[str, ...]] = None, + remote_room_hosts: Optional[tuple[str, ...]] = None, ) -> JsonDict: """See docstring for SpaceSummaryHandler.get_room_hierarchy.""" @@ -262,7 +262,7 @@ async def _get_room_hierarchy( # Rooms we have already processed. processed_rooms = set() - rooms_result: List[JsonDict] = [] + rooms_result: list[JsonDict] = [] # Cap the limit to a server-side maximum. if limit is None: @@ -286,12 +286,12 @@ async def _get_room_hierarchy( # federation. The rationale for caching these and *maybe* using them # is to prefer any information local to the homeserver before trusting # data received over federation. - children_room_entries: Dict[str, JsonDict] = {} + children_room_entries: dict[str, JsonDict] = {} # A set of room IDs which are children that did not have information # returned over federation and are known to be inaccessible to the # current server. We should not reach out over federation to try to # summarise these rooms. - inaccessible_children: Set[str] = set() + inaccessible_children: set[str] = set() # If the room is known locally, summarise it! is_in_room = await self._store.is_host_joined(room_id, self._server_name) @@ -418,8 +418,8 @@ async def get_federation_hierarchy( # Room is inaccessible to the requesting server. raise SynapseError(404, "Unknown room: %s" % (requested_room_id,)) - children_rooms_result: List[JsonDict] = [] - inaccessible_children: List[str] = [] + children_rooms_result: list[JsonDict] = [] + inaccessible_children: list[str] = [] # Iterate through each child and potentially add it, but not its children, # to the response. @@ -496,7 +496,7 @@ async def _summarize_local_room( # we only care about suggested children child_events = filter(_is_suggested_child_event, child_events) - stripped_events: List[JsonDict] = [ + stripped_events: list[JsonDict] = [ { "type": e.type, "state_key": e.state_key, @@ -510,7 +510,7 @@ async def _summarize_local_room( async def _summarize_remote_room_hierarchy( self, room: "_RoomQueueEntry", suggested_only: bool - ) -> Tuple[Optional["_RoomEntry"], Dict[str, JsonDict], Set[str]]: + ) -> tuple[Optional["_RoomEntry"], dict[str, JsonDict], set[str]]: """ Request room entries and a list of event entries for a given room by querying a remote server. @@ -835,7 +835,7 @@ async def get_room_summary( self, requester: Optional[str], room_id: str, - remote_room_hosts: Optional[List[str]] = None, + remote_room_hosts: Optional[list[str]] = None, ) -> JsonDict: """ Implementation of the room summary C-S API from MSC3266 @@ -995,7 +995,7 @@ def _is_suggested_child_event(edge_event: EventBase) -> bool: def _child_events_comparison_key( child: EventBase, -) -> Tuple[bool, Optional[str], int, str]: +) -> tuple[bool, Optional[str], int, str]: """ Generate a value for comparing two child events for ordering. diff --git a/synapse/handlers/saml.py b/synapse/handlers/saml.py index 81bec7499c8..e91c25cabca 100644 --- a/synapse/handlers/saml.py +++ b/synapse/handlers/saml.py @@ -20,13 +20,13 @@ # import logging import re -from typing import TYPE_CHECKING, Callable, Dict, Optional, Set, Tuple +from typing import TYPE_CHECKING, Callable, Optional import attr + import saml2 import saml2.response from saml2.client import Saml2Client - from synapse.api.errors import SynapseError from synapse.config import ConfigError from synapse.handlers.sso import MappingException, UserAttributes @@ -90,7 +90,7 @@ def __init__(self, hs: "HomeServer"): self.idp_brand = hs.config.saml2.idp_brand # a map from saml session id to Saml2SessionData object - self._outstanding_requests_dict: Dict[str, Saml2SessionData] = {} + self._outstanding_requests_dict: dict[str, Saml2SessionData] = {} self._sso_handler = hs.get_sso_handler() self._sso_handler.register_identity_provider(self) @@ -393,7 +393,7 @@ def dot_replace_for_mxid(username: str) -> str: return username -MXID_MAPPER_MAP: Dict[str, Callable[[str], str]] = { +MXID_MAPPER_MAP: dict[str, Callable[[str], str]] = { "hexencode": map_username_to_mxid_localpart, "dotreplace": dot_replace_for_mxid, } @@ -509,7 +509,7 @@ def parse_config(config: dict) -> SamlConfig: return SamlConfig(mxid_source_attribute, mxid_mapper) @staticmethod - def get_saml_attributes(config: SamlConfig) -> Tuple[Set[str], Set[str]]: + def get_saml_attributes(config: SamlConfig) -> tuple[set[str], set[str]]: """Returns the required attributes of a SAML Args: diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py index 1a71135d5fa..8f39c6ec6bd 100644 --- a/synapse/handlers/search.py +++ b/synapse/handlers/search.py @@ -21,7 +21,7 @@ import itertools import logging -from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Set, Tuple +from typing import TYPE_CHECKING, Iterable, Optional import attr from unpaddedbase64 import decode_base64, encode_base64 @@ -46,13 +46,13 @@ class _SearchResult: # The count of results. count: int # A mapping of event ID to the rank of that event. - rank_map: Dict[str, int] + rank_map: dict[str, int] # A list of the resulting events. - allowed_events: List[EventBase] + allowed_events: list[EventBase] # A map of room ID to results. - room_groups: Dict[str, JsonDict] + room_groups: dict[str, JsonDict] # A set of event IDs to highlight. - highlights: Set[str] + highlights: set[str] class SearchHandler: @@ -230,11 +230,11 @@ async def _search( batch_group_key: Optional[str], batch_token: Optional[str], search_term: str, - keys: List[str], + keys: list[str], filter_dict: JsonDict, order_by: str, include_state: bool, - group_keys: List[str], + group_keys: list[str], event_context: Optional[bool], before_limit: Optional[int], after_limit: Optional[int], @@ -286,7 +286,7 @@ async def _search( # If doing a subset of all rooms search, check if any of the rooms # are from an upgraded room, and search their contents as well if search_filter.rooms: - historical_room_ids: List[str] = [] + historical_room_ids: list[str] = [] for room_id in search_filter.rooms: # Add any previous rooms to the search if they exist ids = await self.get_old_rooms_from_upgraded_room(room_id) @@ -307,7 +307,7 @@ async def _search( } } - sender_group: Optional[Dict[str, JsonDict]] + sender_group: Optional[dict[str, JsonDict]] if order_by == "rank": search_result, sender_group = await self._search_by_rank( @@ -442,7 +442,7 @@ async def _search_by_rank( search_term: str, keys: Iterable[str], search_filter: Filter, - ) -> Tuple[_SearchResult, Dict[str, JsonDict]]: + ) -> tuple[_SearchResult, dict[str, JsonDict]]: """ Performs a full text search for a user ordering by rank. @@ -461,9 +461,9 @@ async def _search_by_rank( """ rank_map = {} # event_id -> rank of event # Holds result of grouping by room, if applicable - room_groups: Dict[str, JsonDict] = {} + room_groups: dict[str, JsonDict] = {} # Holds result of grouping by sender, if applicable - sender_group: Dict[str, JsonDict] = {} + sender_group: dict[str, JsonDict] = {} search_result = await self.store.search_msgs(room_ids, search_term, keys) @@ -520,7 +520,7 @@ async def _search_by_recent( batch_group: Optional[str], batch_group_key: Optional[str], batch_token: Optional[str], - ) -> Tuple[_SearchResult, Optional[str]]: + ) -> tuple[_SearchResult, Optional[str]]: """ Performs a full text search for a user ordering by recent. @@ -542,14 +542,14 @@ async def _search_by_recent( """ rank_map = {} # event_id -> rank of event # Holds result of grouping by room, if applicable - room_groups: Dict[str, JsonDict] = {} + room_groups: dict[str, JsonDict] = {} # Holds the next_batch for the entire result set if one of those exists global_next_batch = None highlights = set() - room_events: List[EventBase] = [] + room_events: list[EventBase] = [] i = 0 pagination_token = batch_token @@ -632,11 +632,11 @@ async def _search_by_recent( async def _calculate_event_contexts( self, user: UserID, - allowed_events: List[EventBase], + allowed_events: list[EventBase], before_limit: int, after_limit: int, include_profile: bool, - ) -> Dict[str, JsonDict]: + ) -> dict[str, JsonDict]: """ Calculates the contextual events for any search results. diff --git a/synapse/handlers/send_email.py b/synapse/handlers/send_email.py index 6469b182c8a..02fd48dbadd 100644 --- a/synapse/handlers/send_email.py +++ b/synapse/handlers/send_email.py @@ -24,7 +24,7 @@ from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText from io import BytesIO -from typing import TYPE_CHECKING, Dict, Optional +from typing import TYPE_CHECKING, Optional from twisted.internet.defer import Deferred from twisted.internet.endpoints import HostnameEndpoint @@ -136,7 +136,7 @@ async def send_email( app_name: str, html: str, text: str, - additional_headers: Optional[Dict[str, str]] = None, + additional_headers: Optional[dict[str, str]] = None, ) -> None: """Send a multipart email with the given information. diff --git a/synapse/handlers/sliding_sync/__init__.py b/synapse/handlers/sliding_sync/__init__.py index 255a041d0eb..cea4b857ee0 100644 --- a/synapse/handlers/sliding_sync/__init__.py +++ b/synapse/handlers/sliding_sync/__init__.py @@ -15,7 +15,7 @@ import itertools import logging from itertools import chain -from typing import TYPE_CHECKING, AbstractSet, Dict, List, Mapping, Optional, Set, Tuple +from typing import TYPE_CHECKING, AbstractSet, Mapping, Optional from prometheus_client import Histogram from typing_extensions import assert_never @@ -116,7 +116,7 @@ async def wait_for_sync_for_user( sync_config: SlidingSyncConfig, from_token: Optional[SlidingSyncStreamToken] = None, timeout_ms: int = 0, - ) -> Tuple[SlidingSyncResult, bool]: + ) -> tuple[SlidingSyncResult, bool]: """ Get the sync for a client if we have new data for it now. Otherwise wait for new data to arrive on the server. If the timeout expires, then @@ -262,7 +262,7 @@ async def current_sync_for_user( relevant_rooms_to_send_map = interested_rooms.relevant_rooms_to_send_map # Fetch room data - rooms: Dict[str, SlidingSyncResult.RoomResult] = {} + rooms: dict[str, SlidingSyncResult.RoomResult] = {} new_connection_state = previous_connection_state.get_mutable() @@ -490,7 +490,7 @@ async def get_current_state_deltas_for_room( room_membership_for_user_at_to_token: RoomsForUserType, from_token: RoomStreamToken, to_token: RoomStreamToken, - ) -> List[StateDelta]: + ) -> list[StateDelta]: """ Get the state deltas between two tokens taking into account the user's membership. If the user is LEAVE/BAN, we will only get the state deltas up to @@ -677,8 +677,8 @@ async def get_room_sync_data( # membership. Currently, we have to make all of these optional because # `invite`/`knock` rooms only have `stripped_state`. See # https://github.com/matrix-org/matrix-spec-proposals/pull/3575#discussion_r1653045932 - timeline_events: List[EventBase] = [] - bundled_aggregations: Optional[Dict[str, BundledAggregations]] = None + timeline_events: list[EventBase] = [] + bundled_aggregations: Optional[dict[str, BundledAggregations]] = None limited: Optional[bool] = None prev_batch_token: Optional[StreamToken] = None num_live: Optional[int] = None @@ -813,7 +813,7 @@ async def get_room_sync_data( # Figure out any stripped state events for invite/knocks. This allows the # potential joiner to identify the room. - stripped_state: List[JsonDict] = [] + stripped_state: list[JsonDict] = [] if room_membership_for_user_at_to_token.membership in ( Membership.INVITE, Membership.KNOCK, @@ -924,7 +924,7 @@ async def get_room_sync_data( # see https://github.com/matrix-org/matrix-spec/issues/380. This means that # clients won't be able to calculate the room name when necessary and just a # pitfall we have to deal with until that spec issue is resolved. - hero_user_ids: List[str] = [] + hero_user_ids: list[str] = [] # TODO: Should we also check for `EventTypes.CanonicalAlias` # (`m.room.canonical_alias`) as a fallback for the room name? see # https://github.com/matrix-org/matrix-spec-proposals/pull/3575#discussion_r1671260153 @@ -1036,7 +1036,7 @@ async def get_room_sync_data( ) required_state_filter = StateFilter.all() else: - required_state_types: List[Tuple[str, Optional[str]]] = [] + required_state_types: list[tuple[str, Optional[str]]] = [] num_wild_state_keys = 0 lazy_load_room_members = False num_others = 0 @@ -1057,7 +1057,7 @@ async def get_room_sync_data( lazy_load_room_members = True # Everyone in the timeline is relevant - timeline_membership: Set[str] = set() + timeline_membership: set[str] = set() if timeline_events is not None: for timeline_event in timeline_events: # Anyone who sent a message is relevant @@ -1219,7 +1219,7 @@ async def get_room_sync_data( room_avatar = avatar_event.content.get("url") # Assemble heroes: extract the info from the state we just fetched - heroes: List[SlidingSyncResult.RoomResult.StrippedHero] = [] + heroes: list[SlidingSyncResult.RoomResult.StrippedHero] = [] for hero_user_id in hero_user_ids: member_event = room_state.get((EventTypes.Member, hero_user_id)) if member_event is not None: @@ -1374,7 +1374,7 @@ async def _get_bump_stamp( self, room_id: str, to_token: StreamToken, - timeline: List[EventBase], + timeline: list[EventBase], check_outside_timeline: bool, ) -> Optional[int]: """Get a bump stamp for the room, if we have a bump event and it has @@ -1479,7 +1479,7 @@ def _required_state_changes( prev_required_state_map: Mapping[str, AbstractSet[str]], request_required_state_map: Mapping[str, AbstractSet[str]], state_deltas: StateMap[str], -) -> Tuple[Optional[Mapping[str, AbstractSet[str]]], StateFilter]: +) -> tuple[Optional[Mapping[str, AbstractSet[str]]], StateFilter]: """Calculates the changes between the required state room config from the previous requests compared with the current request. @@ -1524,15 +1524,15 @@ def _required_state_changes( # Contains updates to the required state map compared with the previous room # config. This has the same format as `RoomSyncConfig.required_state` - changes: Dict[str, AbstractSet[str]] = {} + changes: dict[str, AbstractSet[str]] = {} # The set of types/state keys that we need to fetch and return to the # client. Passed to `StateFilter.from_types(...)` - added: List[Tuple[str, Optional[str]]] = [] + added: list[tuple[str, Optional[str]]] = [] # Convert the list of state deltas to map from type to state_keys that have # changed. - changed_types_to_state_keys: Dict[str, Set[str]] = {} + changed_types_to_state_keys: dict[str, set[str]] = {} for event_type, state_key in state_deltas: changed_types_to_state_keys.setdefault(event_type, set()).add(state_key) diff --git a/synapse/handlers/sliding_sync/extensions.py b/synapse/handlers/sliding_sync/extensions.py index 25ee954b7fd..221af86f7db 100644 --- a/synapse/handlers/sliding_sync/extensions.py +++ b/synapse/handlers/sliding_sync/extensions.py @@ -18,12 +18,10 @@ TYPE_CHECKING, AbstractSet, ChainMap, - Dict, Mapping, MutableMapping, Optional, Sequence, - Set, cast, ) @@ -85,7 +83,7 @@ async def get_extensions_response( previous_connection_state: "PerConnectionState", new_connection_state: "MutablePerConnectionState", actual_lists: Mapping[str, SlidingSyncResult.SlidingWindowList], - actual_room_ids: Set[str], + actual_room_ids: set[str], actual_room_response_map: Mapping[str, SlidingSyncResult.RoomResult], to_token: StreamToken, from_token: Optional[SlidingSyncStreamToken], @@ -208,7 +206,7 @@ def find_relevant_room_ids_for_extension( requested_room_ids: Optional[StrCollection], actual_lists: Mapping[str, SlidingSyncResult.SlidingWindowList], actual_room_ids: AbstractSet[str], - ) -> Set[str]: + ) -> set[str]: """ Handle the reserved `lists`/`rooms` keys for extensions. Extensions should only return results for rooms in the Sliding Sync response. This matches up the @@ -231,7 +229,7 @@ def find_relevant_room_ids_for_extension( # We only want to include account data for rooms that are already in the sliding # sync response AND that were requested in the account data request. - relevant_room_ids: Set[str] = set() + relevant_room_ids: set[str] = set() # See what rooms from the room subscriptions we should get account data for if requested_room_ids is not None: @@ -406,7 +404,7 @@ async def get_account_data_extension_response( previous_connection_state: "PerConnectionState", new_connection_state: "MutablePerConnectionState", actual_lists: Mapping[str, SlidingSyncResult.SlidingWindowList], - actual_room_ids: Set[str], + actual_room_ids: set[str], account_data_request: SlidingSyncConfig.Extensions.AccountDataExtension, to_token: StreamToken, from_token: Optional[SlidingSyncStreamToken], @@ -481,7 +479,7 @@ async def get_account_data_extension_response( # down account data previously or not, so we split the relevant # rooms up into different collections based on status. live_rooms = set() - previously_rooms: Dict[str, int] = {} + previously_rooms: dict[str, int] = {} initial_rooms = set() for room_id in relevant_room_ids: @@ -638,7 +636,7 @@ async def get_receipts_extension_response( previous_connection_state: "PerConnectionState", new_connection_state: "MutablePerConnectionState", actual_lists: Mapping[str, SlidingSyncResult.SlidingWindowList], - actual_room_ids: Set[str], + actual_room_ids: set[str], actual_room_response_map: Mapping[str, SlidingSyncResult.RoomResult], receipts_request: SlidingSyncConfig.Extensions.ReceiptsExtension, to_token: StreamToken, @@ -671,13 +669,13 @@ async def get_receipts_extension_response( actual_room_ids=actual_room_ids, ) - room_id_to_receipt_map: Dict[str, JsonMapping] = {} + room_id_to_receipt_map: dict[str, JsonMapping] = {} if len(relevant_room_ids) > 0: # We need to handle the different cases depending on if we have sent # down receipts previously or not, so we split the relevant rooms # up into different collections based on status. live_rooms = set() - previously_rooms: Dict[str, MultiWriterStreamToken] = {} + previously_rooms: dict[str, MultiWriterStreamToken] = {} initial_rooms = set() for room_id in relevant_room_ids: @@ -842,7 +840,7 @@ async def get_typing_extension_response( self, sync_config: SlidingSyncConfig, actual_lists: Mapping[str, SlidingSyncResult.SlidingWindowList], - actual_room_ids: Set[str], + actual_room_ids: set[str], actual_room_response_map: Mapping[str, SlidingSyncResult.RoomResult], typing_request: SlidingSyncConfig.Extensions.TypingExtension, to_token: StreamToken, @@ -872,7 +870,7 @@ async def get_typing_extension_response( actual_room_ids=actual_room_ids, ) - room_id_to_typing_map: Dict[str, JsonMapping] = {} + room_id_to_typing_map: dict[str, JsonMapping] = {} if len(relevant_room_ids) > 0: # Note: We don't need to take connection tracking into account for typing # notifications because they'll get anything still relevant and hasn't timed @@ -942,8 +940,8 @@ async def get_thread_subscriptions_extension_response( if len(updates) == 0: return None - subscribed_threads: Dict[str, Dict[str, _ThreadSubscription]] = {} - unsubscribed_threads: Dict[str, Dict[str, _ThreadUnsubscription]] = {} + subscribed_threads: dict[str, dict[str, _ThreadSubscription]] = {} + unsubscribed_threads: dict[str, dict[str, _ThreadUnsubscription]] = {} for stream_id, room_id, thread_root_id, subscribed, automatic in updates: if subscribed: subscribed_threads.setdefault(room_id, {})[thread_root_id] = ( diff --git a/synapse/handlers/sliding_sync/room_lists.py b/synapse/handlers/sliding_sync/room_lists.py index 19116590f74..fc77fd3c651 100644 --- a/synapse/handlers/sliding_sync/room_lists.py +++ b/synapse/handlers/sliding_sync/room_lists.py @@ -18,14 +18,10 @@ from typing import ( TYPE_CHECKING, AbstractSet, - Dict, - List, Literal, Mapping, MutableMapping, Optional, - Set, - Tuple, Union, cast, ) @@ -113,7 +109,7 @@ class SlidingSyncInterestedRooms: lists: Mapping[str, SlidingSyncResult.SlidingWindowList] relevant_room_map: Mapping[str, RoomSyncConfig] relevant_rooms_to_send_map: Mapping[str, RoomSyncConfig] - all_rooms: Set[str] + all_rooms: set[str] room_membership_for_user_map: Mapping[str, RoomsForUserType] newly_joined_rooms: AbstractSet[str] @@ -231,12 +227,12 @@ async def _compute_interested_rooms_new_tables( user_id = sync_config.user.to_string() # Assemble sliding window lists - lists: Dict[str, SlidingSyncResult.SlidingWindowList] = {} + lists: dict[str, SlidingSyncResult.SlidingWindowList] = {} # Keep track of the rooms that we can display and need to fetch more info about - relevant_room_map: Dict[str, RoomSyncConfig] = {} + relevant_room_map: dict[str, RoomSyncConfig] = {} # The set of room IDs of all rooms that could appear in any list. These # include rooms that are outside the list ranges. - all_rooms: Set[str] = set() + all_rooms: set[str] = set() # Note: this won't include rooms the user has left themselves. We add back # `newly_left` rooms below. This is more efficient than fetching all rooms and @@ -472,7 +468,7 @@ async def _compute_interested_rooms_new_tables( all_rooms.update(filtered_sync_room_map) - ops: List[SlidingSyncResult.SlidingWindowList.Operation] = [] + ops: list[SlidingSyncResult.SlidingWindowList.Operation] = [] if list_config.ranges: # Optimization: If we are asking for the full range, we don't @@ -487,7 +483,7 @@ async def _compute_interested_rooms_new_tables( and list_config.ranges[0][1] >= len(filtered_sync_room_map) - 1 ): - sorted_room_info: List[RoomsForUserType] = list( + sorted_room_info: list[RoomsForUserType] = list( filtered_sync_room_map.values() ) else: @@ -496,7 +492,7 @@ async def _compute_interested_rooms_new_tables( # Cast is safe because RoomsForUserSlidingSync is part # of the `RoomsForUserType` union. Why can't it detect this? cast( - Dict[str, RoomsForUserType], filtered_sync_room_map + dict[str, RoomsForUserType], filtered_sync_room_map ), to_token, # We only need to sort the rooms up to the end @@ -506,7 +502,7 @@ async def _compute_interested_rooms_new_tables( ) for range in list_config.ranges: - room_ids_in_list: List[str] = [] + room_ids_in_list: list[str] = [] # We're going to loop through the sorted list of rooms starting # at the range start index and keep adding rooms until we fill @@ -639,12 +635,12 @@ async def _compute_interested_rooms_fallback( dm_room_ids = await self._get_dm_rooms_for_user(sync_config.user.to_string()) # Assemble sliding window lists - lists: Dict[str, SlidingSyncResult.SlidingWindowList] = {} + lists: dict[str, SlidingSyncResult.SlidingWindowList] = {} # Keep track of the rooms that we can display and need to fetch more info about - relevant_room_map: Dict[str, RoomSyncConfig] = {} + relevant_room_map: dict[str, RoomSyncConfig] = {} # The set of room IDs of all rooms that could appear in any list. These # include rooms that are outside the list ranges. - all_rooms: Set[str] = set() + all_rooms: set[str] = set() if sync_config.lists: with start_active_span("assemble_sliding_window_lists"): @@ -691,10 +687,10 @@ async def _compute_interested_rooms_fallback( filtered_sync_room_map, to_token ) - ops: List[SlidingSyncResult.SlidingWindowList.Operation] = [] + ops: list[SlidingSyncResult.SlidingWindowList.Operation] = [] if list_config.ranges: for range in list_config.ranges: - room_ids_in_list: List[str] = [] + room_ids_in_list: list[str] = [] # We're going to loop through the sorted list of rooms starting # at the range start index and keep adding rooms until we fill @@ -811,14 +807,14 @@ async def _filter_relevant_rooms_to_send( self, previous_connection_state: PerConnectionState, from_token: Optional[StreamToken], - relevant_room_map: Dict[str, RoomSyncConfig], - ) -> Dict[str, RoomSyncConfig]: + relevant_room_map: dict[str, RoomSyncConfig], + ) -> dict[str, RoomSyncConfig]: """Filters the `relevant_room_map` down to those rooms that may have updates we need to fetch and return.""" # Filtered subset of `relevant_room_map` for rooms that may have updates # (in the event stream) - relevant_rooms_to_send_map: Dict[str, RoomSyncConfig] = relevant_room_map + relevant_rooms_to_send_map: dict[str, RoomSyncConfig] = relevant_room_map if relevant_room_map: with start_active_span("filter_relevant_rooms_to_send"): if from_token: @@ -908,7 +904,7 @@ async def _get_rewind_changes_to_current_membership_to_token( # # First, we need to get the max stream_ordering of each event persister instance # that we queried events from. - instance_to_max_stream_ordering_map: Dict[str, int] = {} + instance_to_max_stream_ordering_map: dict[str, int] = {} for room_for_user in rooms_for_user.values(): instance_name = room_for_user.event_pos.instance_name stream_ordering = room_for_user.event_pos.stream @@ -966,12 +962,12 @@ async def _get_rewind_changes_to_current_membership_to_token( # Otherwise we're about to make changes to `rooms_for_user`, so we turn # it into a mutable dict. - changes: Dict[str, Optional[RoomsForUser]] = {} + changes: dict[str, Optional[RoomsForUser]] = {} # Assemble a list of the first membership event after the `to_token` so we can # step backward to the previous membership that would apply to the from/to # range. - first_membership_change_by_room_id_after_to_token: Dict[ + first_membership_change_by_room_id_after_to_token: dict[ str, CurrentStateDeltaMembership ] = {} for membership_change in current_state_delta_membership_changes_after_to_token: @@ -1033,7 +1029,7 @@ async def get_room_membership_for_user_at_to_token( user: UserID, to_token: StreamToken, from_token: Optional[StreamToken], - ) -> Tuple[Dict[str, RoomsForUserType], AbstractSet[str], AbstractSet[str]]: + ) -> tuple[dict[str, RoomsForUserType], AbstractSet[str], AbstractSet[str]]: """ Fetch room IDs that the user has had membership in (the full room list including long-lost left rooms that will be filtered, sorted, and sliced). @@ -1108,7 +1104,7 @@ async def get_room_membership_for_user_at_to_token( # Since we fetched the users room list at some point in time after the # tokens, we need to revert/rewind some membership changes to match the point in # time of the `to_token`. - rooms_for_user: Dict[str, RoomsForUserType] = { + rooms_for_user: dict[str, RoomsForUserType] = { room.room_id: room for room in room_for_user_list } changes = await self._get_rewind_changes_to_current_membership_to_token( @@ -1143,7 +1139,7 @@ async def _get_newly_joined_and_left_rooms( user_id: str, to_token: StreamToken, from_token: Optional[StreamToken], - ) -> Tuple[AbstractSet[str], Mapping[str, RoomsForUserStateReset]]: + ) -> tuple[AbstractSet[str], Mapping[str, RoomsForUserStateReset]]: """Fetch the sets of rooms that the user newly joined or left in the given token range. @@ -1163,8 +1159,8 @@ async def _get_newly_joined_and_left_rooms( need to check if a membership still exists in the room. """ - newly_joined_room_ids: Set[str] = set() - newly_left_room_map: Dict[str, RoomsForUserStateReset] = {} + newly_joined_room_ids: set[str] = set() + newly_left_room_map: dict[str, RoomsForUserStateReset] = {} if not from_token: return newly_joined_room_ids, newly_left_room_map @@ -1190,7 +1186,7 @@ async def _get_newly_joined_and_left_rooms_fallback( user_id: str, to_token: StreamToken, from_token: Optional[StreamToken], - ) -> Tuple[AbstractSet[str], Mapping[str, RoomsForUserStateReset]]: + ) -> tuple[AbstractSet[str], Mapping[str, RoomsForUserStateReset]]: """Fetch the sets of rooms that the user newly joined or left in the given token range. @@ -1209,8 +1205,8 @@ async def _get_newly_joined_and_left_rooms_fallback( was state reset out of the room. To actually check for a state reset, you need to check if a membership still exists in the room. """ - newly_joined_room_ids: Set[str] = set() - newly_left_room_map: Dict[str, RoomsForUserStateReset] = {} + newly_joined_room_ids: set[str] = set() + newly_left_room_map: dict[str, RoomsForUserStateReset] = {} # We need to figure out the # @@ -1232,20 +1228,20 @@ async def _get_newly_joined_and_left_rooms_fallback( # 1) Assemble a list of the last membership events in some given ranges. Someone # could have left and joined multiple times during the given range but we only # care about end-result so we grab the last one. - last_membership_change_by_room_id_in_from_to_range: Dict[ + last_membership_change_by_room_id_in_from_to_range: dict[ str, CurrentStateDeltaMembership ] = {} # We also want to assemble a list of the first membership events during the token # range so we can step backward to the previous membership that would apply to # before the token range to see if we have `newly_joined` the room. - first_membership_change_by_room_id_in_from_to_range: Dict[ + first_membership_change_by_room_id_in_from_to_range: dict[ str, CurrentStateDeltaMembership ] = {} # Keep track if the room has a non-join event in the token range so we can later # tell if it was a `newly_joined` room. If the last membership event in the # token range is a join and there is also some non-join in the range, we know # they `newly_joined`. - has_non_join_event_by_room_id_in_from_to_range: Dict[str, bool] = {} + has_non_join_event_by_room_id_in_from_to_range: dict[str, bool] = {} for ( membership_change ) in current_state_delta_membership_changes_in_from_to_range: @@ -1355,9 +1351,9 @@ async def _get_dm_rooms_for_user( async def filter_rooms_relevant_for_sync( self, user: UserID, - room_membership_for_user_map: Dict[str, RoomsForUserType], + room_membership_for_user_map: dict[str, RoomsForUserType], newly_left_room_ids: AbstractSet[str], - ) -> Dict[str, RoomsForUserType]: + ) -> dict[str, RoomsForUserType]: """ Filter room IDs that should/can be listed for this user in the sync response (the full room list that will be further filtered, sorted, and sliced). @@ -1402,7 +1398,7 @@ async def filter_rooms_relevant_for_sync( async def check_room_subscription_allowed_for_user( self, room_id: str, - room_membership_for_user_map: Dict[str, RoomsForUserType], + room_membership_for_user_map: dict[str, RoomsForUserType], to_token: StreamToken, ) -> Optional[RoomsForUserType]: """ @@ -1469,8 +1465,8 @@ async def check_room_subscription_allowed_for_user( async def _bulk_get_stripped_state_for_rooms_from_sync_room_map( self, room_ids: StrCollection, - sync_room_map: Dict[str, RoomsForUserType], - ) -> Dict[str, Optional[StateMap[StrippedStateEvent]]]: + sync_room_map: dict[str, RoomsForUserType], + ) -> dict[str, Optional[StateMap[StrippedStateEvent]]]: """ Fetch stripped state for a list of room IDs. Stripped state is only applicable to invite/knock rooms. Other rooms will have `None` as their @@ -1488,7 +1484,7 @@ async def _bulk_get_stripped_state_for_rooms_from_sync_room_map( Mapping from room_id to mapping of (type, state_key) to stripped state event. """ - room_id_to_stripped_state_map: Dict[ + room_id_to_stripped_state_map: dict[ str, Optional[StateMap[StrippedStateEvent]] ] = {} @@ -1500,7 +1496,7 @@ async def _bulk_get_stripped_state_for_rooms_from_sync_room_map( ] # Gather a list of event IDs we can grab stripped state from - invite_or_knock_event_ids: List[str] = [] + invite_or_knock_event_ids: list[str] = [] for room_id in room_ids_to_fetch: if sync_room_map[room_id].membership in ( Membership.INVITE, @@ -1565,10 +1561,10 @@ async def _bulk_get_partial_current_state_content_for_rooms( # `content.algorithm` from `EventTypes.RoomEncryption` "room_encryption", ], - room_ids: Set[str], - sync_room_map: Dict[str, RoomsForUserType], + room_ids: set[str], + sync_room_map: dict[str, RoomsForUserType], to_token: StreamToken, - room_id_to_stripped_state_map: Dict[ + room_id_to_stripped_state_map: dict[ str, Optional[StateMap[StrippedStateEvent]] ], ) -> Mapping[str, Union[Optional[str], StateSentinel]]: @@ -1593,7 +1589,7 @@ async def _bulk_get_partial_current_state_content_for_rooms( the given state event (event_type, ""), otherwise `None`. Rooms unknown to this server will return `ROOM_UNKNOWN_SENTINEL`. """ - room_id_to_content: Dict[str, Union[Optional[str], StateSentinel]] = {} + room_id_to_content: dict[str, Union[Optional[str], StateSentinel]] = {} # As a bulk shortcut, use the current state if the server is particpating in the # room (meaning we have current state). Ideally, for leave/ban rooms, we would @@ -1650,7 +1646,7 @@ async def _bulk_get_partial_current_state_content_for_rooms( # Update our `room_id_to_content` map based on the stripped state # (applies to invite/knock rooms) - rooms_ids_without_stripped_state: Set[str] = set() + rooms_ids_without_stripped_state: set[str] = set() for room_id in room_ids_without_results: stripped_state_map = room_id_to_stripped_state_map.get( room_id, Sentinel.UNSET_SENTINEL @@ -1730,12 +1726,12 @@ async def _bulk_get_partial_current_state_content_for_rooms( async def filter_rooms( self, user: UserID, - sync_room_map: Dict[str, RoomsForUserType], + sync_room_map: dict[str, RoomsForUserType], previous_connection_state: PerConnectionState, filters: SlidingSyncConfig.SlidingSyncList.Filters, to_token: StreamToken, dm_room_ids: AbstractSet[str], - ) -> Dict[str, RoomsForUserType]: + ) -> dict[str, RoomsForUserType]: """ Filter rooms based on the sync request. @@ -1753,7 +1749,7 @@ async def filter_rooms( """ user_id = user.to_string() - room_id_to_stripped_state_map: Dict[ + room_id_to_stripped_state_map: dict[ str, Optional[StateMap[StrippedStateEvent]] ] = {} @@ -1891,7 +1887,7 @@ async def filter_rooms( with start_active_span("filters.tags"): # Fetch the user tags for their rooms room_tags = await self.store.get_tags_for_user(user_id) - room_id_to_tag_name_set: Dict[str, Set[str]] = { + room_id_to_tag_name_set: dict[str, set[str]] = { room_id: set(tags.keys()) for room_id, tags in room_tags.items() } @@ -1947,7 +1943,7 @@ async def filter_rooms_using_tables( filters: SlidingSyncConfig.SlidingSyncList.Filters, to_token: StreamToken, dm_room_ids: AbstractSet[str], - ) -> Dict[str, RoomsForUserSlidingSync]: + ) -> dict[str, RoomsForUserSlidingSync]: """ Filter rooms based on the sync request. @@ -2059,7 +2055,7 @@ async def filter_rooms_using_tables( with start_active_span("filters.tags"): # Fetch the user tags for their rooms room_tags = await self.store.get_tags_for_user(user_id) - room_id_to_tag_name_set: Dict[str, Set[str]] = { + room_id_to_tag_name_set: dict[str, set[str]] = { room_id: set(tags.keys()) for room_id, tags in room_tags.items() } @@ -2109,10 +2105,10 @@ async def filter_rooms_using_tables( @trace async def sort_rooms( self, - sync_room_map: Dict[str, RoomsForUserType], + sync_room_map: dict[str, RoomsForUserType], to_token: StreamToken, limit: Optional[int] = None, - ) -> List[RoomsForUserType]: + ) -> list[RoomsForUserType]: """ Sort by `stream_ordering` of the last event that the user should see in the room. `stream_ordering` is unique so we get a stable sort. @@ -2133,11 +2129,11 @@ async def sort_rooms( # Assemble a map of room ID to the `stream_ordering` of the last activity that the # user should see in the room (<= `to_token`) - last_activity_in_room_map: Dict[str, int] = {} + last_activity_in_room_map: dict[str, int] = {} # Same as above, except for positions that we know are in the event # stream cache. - cached_positions: Dict[str, int] = {} + cached_positions: dict[str, int] = {} earliest_cache_position = ( self.store._events_stream_cache.get_earliest_known_position() diff --git a/synapse/handlers/sso.py b/synapse/handlers/sso.py index 735cfa0a0f8..641241287e7 100644 --- a/synapse/handlers/sso.py +++ b/synapse/handlers/sso.py @@ -27,14 +27,11 @@ Any, Awaitable, Callable, - Dict, Iterable, - List, Mapping, NoReturn, Optional, Protocol, - Set, ) from urllib.parse import urlencode @@ -227,10 +224,10 @@ def __init__(self, hs: "HomeServer"): self._mapping_lock = Linearizer(clock=hs.get_clock(), name="sso_user_mapping") # a map from session id to session data - self._username_mapping_sessions: Dict[str, UsernameMappingSession] = {} + self._username_mapping_sessions: dict[str, UsernameMappingSession] = {} # map from idp_id to SsoIdentityProvider - self._identity_providers: Dict[str, SsoIdentityProvider] = {} + self._identity_providers: dict[str, SsoIdentityProvider] = {} self._consent_at_registration = hs.config.consent.user_consent_at_registration @@ -999,7 +996,7 @@ async def handle_submit_username_request( session.use_avatar = use_avatar emails_from_idp = set(session.emails) - filtered_emails: Set[str] = set() + filtered_emails: set[str] = set() # we iterate through the list rather than just building a set conjunction, so # that we can log attempts to use unknown addresses @@ -1142,7 +1139,7 @@ def _expire_old_sessions(self) -> None: def check_required_attributes( self, request: SynapseRequest, - attributes: Mapping[str, List[Any]], + attributes: Mapping[str, list[Any]], attribute_requirements: Iterable[SsoAttributeRequirement], ) -> bool: """ @@ -1259,7 +1256,7 @@ def get_username_mapping_session_cookie_from_request(request: IRequest) -> str: def _check_attribute_requirement( - attributes: Mapping[str, List[Any]], req: SsoAttributeRequirement + attributes: Mapping[str, list[Any]], req: SsoAttributeRequirement ) -> bool: """Check if SSO attributes meet the proper requirements. diff --git a/synapse/handlers/stats.py b/synapse/handlers/stats.py index 5b4a2cc62dc..0804f72c472 100644 --- a/synapse/handlers/stats.py +++ b/synapse/handlers/stats.py @@ -25,10 +25,8 @@ TYPE_CHECKING, Any, Counter as CounterType, - Dict, Iterable, Optional, - Tuple, ) from synapse.api.constants import EventContentFields, EventTypes, Membership @@ -157,7 +155,7 @@ async def _unsafe_process(self) -> None: async def _handle_deltas( self, deltas: Iterable[StateDelta] - ) -> Tuple[Dict[str, CounterType[str]], Dict[str, CounterType[str]]]: + ) -> tuple[dict[str, CounterType[str]], dict[str, CounterType[str]]]: """Called with the state deltas to process Returns: @@ -165,10 +163,10 @@ async def _handle_deltas( mapping from room/user ID to changes in the various fields. """ - room_to_stats_deltas: Dict[str, CounterType[str]] = {} - user_to_stats_deltas: Dict[str, CounterType[str]] = {} + room_to_stats_deltas: dict[str, CounterType[str]] = {} + user_to_stats_deltas: dict[str, CounterType[str]] = {} - room_to_state_updates: Dict[str, Dict[str, Any]] = {} + room_to_state_updates: dict[str, dict[str, Any]] = {} for delta in deltas: logger.debug( diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 2a6652b585e..a19b75203b4 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -24,14 +24,9 @@ TYPE_CHECKING, AbstractSet, Any, - Dict, - FrozenSet, - List, Mapping, Optional, Sequence, - Set, - Tuple, ) import attr @@ -113,7 +108,7 @@ LAZY_LOADED_MEMBERS_CACHE_MAX_SIZE = 100 -SyncRequestKey = Tuple[Any, ...] +SyncRequestKey = tuple[Any, ...] @attr.s(slots=True, frozen=True, auto_attribs=True) @@ -132,7 +127,7 @@ class TimelineBatch: limited: bool # A mapping of event ID to the bundled aggregations for the above events. # This is only calculated if limited is true. - bundled_aggregations: Optional[Dict[str, BundledAggregations]] = None + bundled_aggregations: Optional[dict[str, BundledAggregations]] = None def __bool__(self) -> bool: """Make the result appear empty if there are no updates. This is used @@ -151,8 +146,8 @@ class JoinedSyncResult: room_id: str timeline: TimelineBatch state: StateMap[EventBase] - ephemeral: List[JsonDict] - account_data: List[JsonDict] + ephemeral: list[JsonDict] + account_data: list[JsonDict] unread_notifications: JsonDict unread_thread_notifications: JsonDict summary: Optional[JsonDict] @@ -174,7 +169,7 @@ class ArchivedSyncResult: room_id: str timeline: TimelineBatch state: StateMap[EventBase] - account_data: List[JsonDict] + account_data: list[JsonDict] def __bool__(self) -> bool: """Make the result appear empty if there are no updates. This is used @@ -209,11 +204,11 @@ class _RoomChanges: and left room IDs since last sync. """ - room_entries: List["RoomSyncResultBuilder"] - invited: List[InvitedSyncResult] - knocked: List[KnockedSyncResult] - newly_joined_rooms: List[str] - newly_left_rooms: List[str] + room_entries: list["RoomSyncResultBuilder"] + invited: list[InvitedSyncResult] + knocked: list[KnockedSyncResult] + newly_joined_rooms: list[str] + newly_left_rooms: list[str] @attr.s(slots=True, frozen=True, auto_attribs=True) @@ -236,16 +231,16 @@ class SyncResult: """ next_batch: StreamToken - presence: List[UserPresenceState] - account_data: List[JsonDict] - joined: List[JoinedSyncResult] - invited: List[InvitedSyncResult] - knocked: List[KnockedSyncResult] - archived: List[ArchivedSyncResult] - to_device: List[JsonDict] + presence: list[UserPresenceState] + account_data: list[JsonDict] + joined: list[JoinedSyncResult] + invited: list[InvitedSyncResult] + knocked: list[KnockedSyncResult] + archived: list[ArchivedSyncResult] + to_device: list[JsonDict] device_lists: DeviceListUpdates device_one_time_keys_count: JsonMapping - device_unused_fallback_key_types: List[str] + device_unused_fallback_key_types: list[str] def __bool__(self) -> bool: """Make the result appear empty if there are no updates. This is used @@ -267,7 +262,7 @@ def __bool__(self) -> bool: def empty( next_batch: StreamToken, device_one_time_keys_count: JsonMapping, - device_unused_fallback_key_types: List[str], + device_unused_fallback_key_types: list[str], ) -> "SyncResult": "Return a new empty result" return SyncResult( @@ -319,7 +314,7 @@ def __init__(self, hs: "HomeServer"): # ExpiringCache((User, Device)) -> LruCache(user_id => event_id) self.lazy_loaded_members_cache: ExpiringCache[ - Tuple[str, Optional[str]], LruCache[str, str] + tuple[str, Optional[str]], LruCache[str, str] ] = ExpiringCache( cache_name="lazy_loaded_members_cache", server_name=self.server_name, @@ -419,7 +414,7 @@ async def _wait_for_sync_for_user( ) device_id = sync_config.device_id one_time_keys_count: JsonMapping = {} - unused_fallback_key_types: List[str] = [] + unused_fallback_key_types: list[str] = [] if device_id: user_id = sync_config.user.to_string() # TODO: We should have a way to let clients differentiate between the states of: @@ -543,7 +538,7 @@ async def ephemeral_by_room( sync_result_builder: "SyncResultBuilder", now_token: StreamToken, since_token: Optional[StreamToken] = None, - ) -> Tuple[StreamToken, Dict[str, List[JsonDict]]]: + ) -> tuple[StreamToken, dict[str, list[JsonDict]]]: """Get the ephemeral events for each room the user is in Args: sync_result_builder @@ -610,7 +605,7 @@ async def _load_filtered_recents( sync_config: SyncConfig, upto_token: StreamToken, since_token: Optional[StreamToken] = None, - potential_recents: Optional[List[EventBase]] = None, + potential_recents: Optional[list[EventBase]] = None, newly_joined_room: bool = False, ) -> TimelineBatch: """Create a timeline batch for the room @@ -669,7 +664,7 @@ async def _load_filtered_recents( # We check if there are any state events, if there are then we pass # all current state events to the filter_events function. This is to # ensure that we always include current state in the timeline - current_state_ids: FrozenSet[str] = frozenset() + current_state_ids: frozenset[str] = frozenset() if any(e.is_state() for e in recents): # FIXME(faster_joins): We use the partial state here as # we don't want to block `/sync` on finishing a lazy join. @@ -968,7 +963,7 @@ async def compute_summary( return summary def get_lazy_loaded_members_cache( - self, cache_key: Tuple[str, Optional[str]] + self, cache_key: tuple[str, Optional[str]] ) -> LruCache[str, str]: cache: Optional[LruCache[str, str]] = self.lazy_loaded_members_cache.get( cache_key @@ -1029,11 +1024,11 @@ async def compute_state_delta( ): # The memberships needed for events in the timeline. # Only calculated when `lazy_load_members` is on. - members_to_fetch: Optional[Set[str]] = None + members_to_fetch: Optional[set[str]] = None # A dictionary mapping user IDs to the first event in the timeline sent by # them. Only calculated when `lazy_load_members` is on. - first_event_by_sender_map: Optional[Dict[str, EventBase]] = None + first_event_by_sender_map: Optional[dict[str, EventBase]] = None # The contribution to the room state from state events in the timeline. # Only contains the last event for any given state key. @@ -1159,7 +1154,7 @@ async def compute_state_delta( if t[0] == EventTypes.Member: cache.set(t[1], event_id) - state: Dict[str, EventBase] = {} + state: dict[str, EventBase] = {} if state_ids: state = await self.store.get_events(list(state_ids.values())) @@ -1177,7 +1172,7 @@ async def _compute_state_delta_for_full_sync( sync_config: SyncConfig, batch: TimelineBatch, end_token: StreamToken, - members_to_fetch: Optional[Set[str]], + members_to_fetch: Optional[set[str]], timeline_state: StateMap[str], joined: bool, ) -> StateMap[str]: @@ -1327,7 +1322,7 @@ async def _compute_state_delta_for_incremental_sync( batch: TimelineBatch, since_token: StreamToken, end_token: StreamToken, - members_to_fetch: Optional[Set[str]], + members_to_fetch: Optional[set[str]], timeline_state: StateMap[str], ) -> StateMap[str]: """Calculate the state events to be included in an incremental sync response. @@ -1562,7 +1557,7 @@ async def _find_missing_partial_state_memberships( # Identify memberships missing from `found_state_ids` and pick out the auth # events in which to look for them. - auth_event_ids: Set[str] = set() + auth_event_ids: set[str] = set() for member in members_to_fetch: if (EventTypes.Member, member) in found_state_ids: continue @@ -1765,7 +1760,7 @@ async def generate_sync_result( logger.debug("Fetching OTK data") device_id = sync_config.device_id one_time_keys_count: JsonMapping = {} - unused_fallback_key_types: List[str] = [] + unused_fallback_key_types: list[str] = [] if device_id: # TODO: We should have a way to let clients differentiate between the states of: # * no change in OTK count since the provided since token @@ -1855,7 +1850,7 @@ async def get_sync_result_builder( self.rooms_to_exclude_globally, ) - last_membership_change_by_room_id: Dict[str, EventBase] = {} + last_membership_change_by_room_id: dict[str, EventBase] = {} for event in membership_change_events: last_membership_change_by_room_id[event.room_id] = event @@ -1914,7 +1909,7 @@ async def get_sync_result_builder( # - are full-stated # - became fully-stated at some point during the sync period # (These rooms will have been omitted during a previous eager sync.) - forced_newly_joined_room_ids: Set[str] = set() + forced_newly_joined_room_ids: set[str] = set() if since_token and not sync_config.filter_collection.lazy_load_members(): un_partial_stated_rooms = ( await self.store.get_un_partial_stated_rooms_between( @@ -2123,7 +2118,7 @@ async def _generate_sync_entry_for_presence( async def _generate_sync_entry_for_rooms( self, sync_result_builder: "SyncResultBuilder" - ) -> Tuple[AbstractSet[str], AbstractSet[str]]: + ) -> tuple[AbstractSet[str], AbstractSet[str]]: """Generates the rooms portion of the sync response. Populates the `sync_result_builder` with the result. @@ -2172,7 +2167,7 @@ async def _generate_sync_entry_for_rooms( or sync_result_builder.sync_config.filter_collection.blocks_all_room_ephemeral() ) if block_all_room_ephemeral: - ephemeral_by_room: Dict[str, List[JsonDict]] = {} + ephemeral_by_room: dict[str, list[JsonDict]] = {} else: now_token, ephemeral_by_room = await self.ephemeral_by_room( sync_result_builder, @@ -2266,7 +2261,7 @@ async def _have_rooms_changed( async def _get_room_changes_for_incremental_sync( self, sync_result_builder: "SyncResultBuilder", - ignored_users: FrozenSet[str], + ignored_users: frozenset[str], ) -> _RoomChanges: """Determine the changes in rooms to report to the user. @@ -2297,17 +2292,17 @@ async def _get_room_changes_for_incremental_sync( assert since_token - mem_change_events_by_room_id: Dict[str, List[EventBase]] = {} + mem_change_events_by_room_id: dict[str, list[EventBase]] = {} for event in membership_change_events: mem_change_events_by_room_id.setdefault(event.room_id, []).append(event) - newly_joined_rooms: List[str] = list( + newly_joined_rooms: list[str] = list( sync_result_builder.forced_newly_joined_room_ids ) - newly_left_rooms: List[str] = [] - room_entries: List[RoomSyncResultBuilder] = [] - invited: List[InvitedSyncResult] = [] - knocked: List[KnockedSyncResult] = [] + newly_left_rooms: list[str] = [] + room_entries: list[RoomSyncResultBuilder] = [] + invited: list[InvitedSyncResult] = [] + knocked: list[KnockedSyncResult] = [] invite_config = await self.store.get_invite_config_for_user(user_id) for room_id, events in mem_change_events_by_room_id.items(): # The body of this loop will add this room to at least one of the five lists @@ -2444,7 +2439,7 @@ async def _get_room_changes_for_incremental_sync( # This is all screaming out for a refactor, as the logic here is # subtle and the moving parts numerous. if leave_event.internal_metadata.is_out_of_band_membership(): - batch_events: Optional[List[EventBase]] = [leave_event] + batch_events: Optional[list[EventBase]] = [leave_event] else: batch_events = None @@ -2526,7 +2521,7 @@ async def _get_room_changes_for_incremental_sync( async def _get_room_changes_for_initial_sync( self, sync_result_builder: "SyncResultBuilder", - ignored_users: FrozenSet[str], + ignored_users: frozenset[str], ) -> _RoomChanges: """Returns entries for all rooms for the user. @@ -2612,7 +2607,7 @@ async def _generate_room_entry( self, sync_result_builder: "SyncResultBuilder", room_builder: "RoomSyncResultBuilder", - ephemeral: List[JsonDict], + ephemeral: list[JsonDict], tags: Optional[Mapping[str, JsonMapping]], account_data: Mapping[str, JsonMapping], always_include: bool = False, @@ -2791,7 +2786,7 @@ async def _generate_room_entry( ) if room_builder.rtype == "joined": - unread_notifications: Dict[str, int] = {} + unread_notifications: dict[str, int] = {} room_sync = JoinedSyncResult( room_id=room_id, timeline=batch, @@ -2858,7 +2853,7 @@ async def _generate_room_entry( raise Exception("Unrecognized rtype: %r", room_builder.rtype) -def _action_has_highlight(actions: List[JsonDict]) -> bool: +def _action_has_highlight(actions: list[JsonDict]) -> bool: for action in actions: try: if action.get("set_tweak", None) == "highlight": @@ -3014,20 +3009,20 @@ class SyncResultBuilder: full_state: bool since_token: Optional[StreamToken] now_token: StreamToken - joined_room_ids: FrozenSet[str] - excluded_room_ids: FrozenSet[str] - forced_newly_joined_room_ids: FrozenSet[str] - membership_change_events: List[EventBase] - - presence: List[UserPresenceState] = attr.Factory(list) - account_data: List[JsonDict] = attr.Factory(list) - joined: List[JoinedSyncResult] = attr.Factory(list) - invited: List[InvitedSyncResult] = attr.Factory(list) - knocked: List[KnockedSyncResult] = attr.Factory(list) - archived: List[ArchivedSyncResult] = attr.Factory(list) - to_device: List[JsonDict] = attr.Factory(list) - - def calculate_user_changes(self) -> Tuple[AbstractSet[str], AbstractSet[str]]: + joined_room_ids: frozenset[str] + excluded_room_ids: frozenset[str] + forced_newly_joined_room_ids: frozenset[str] + membership_change_events: list[EventBase] + + presence: list[UserPresenceState] = attr.Factory(list) + account_data: list[JsonDict] = attr.Factory(list) + joined: list[JoinedSyncResult] = attr.Factory(list) + invited: list[InvitedSyncResult] = attr.Factory(list) + knocked: list[KnockedSyncResult] = attr.Factory(list) + archived: list[ArchivedSyncResult] = attr.Factory(list) + to_device: list[JsonDict] = attr.Factory(list) + + def calculate_user_changes(self) -> tuple[AbstractSet[str], AbstractSet[str]]: """Work out which other users have joined or left rooms we are joined to. This data only is only useful for an incremental sync. @@ -3105,7 +3100,7 @@ class RoomSyncResultBuilder: room_id: str rtype: str - events: Optional[List[EventBase]] + events: Optional[list[EventBase]] newly_joined: bool full_state: bool since_token: Optional[StreamToken] diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py index 77c5b747c3f..17e43858c9a 100644 --- a/synapse/handlers/typing.py +++ b/synapse/handlers/typing.py @@ -20,7 +20,7 @@ # import logging import random -from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Set, Tuple +from typing import TYPE_CHECKING, Iterable, Optional import attr @@ -96,15 +96,15 @@ def __init__(self, hs: "HomeServer"): ) # map room IDs to serial numbers - self._room_serials: Dict[str, int] = {} + self._room_serials: dict[str, int] = {} # map room IDs to sets of users currently typing - self._room_typing: Dict[str, Set[str]] = {} + self._room_typing: dict[str, set[str]] = {} - self._member_last_federation_poke: Dict[RoomMember, int] = {} + self._member_last_federation_poke: dict[RoomMember, int] = {} self.wheel_timer: WheelTimer[RoomMember] = WheelTimer(bucket_size=5000) self._latest_room_serial = 0 - self._rooms_updated: Set[str] = set() + self._rooms_updated: set[str] = set() self.clock.looping_call(self._handle_timeouts, 5000) self.clock.looping_call(self._prune_old_typing, FORGET_TIMEOUT) @@ -195,7 +195,7 @@ async def _push_remote(self, member: RoomMember, typing: bool) -> None: logger.exception("Error pushing typing notif to remotes") def process_replication_rows( - self, token: int, rows: List[TypingStream.TypingStreamRow] + self, token: int, rows: list[TypingStream.TypingStreamRow] ) -> None: """Should be called whenever we receive updates for typing stream.""" @@ -226,7 +226,7 @@ def process_replication_rows( ) async def _send_changes_in_typing_to_remotes( - self, room_id: str, prev_typing: Set[str], now_typing: Set[str] + self, room_id: str, prev_typing: set[str], now_typing: set[str] ) -> None: """Process a change in typing of a room from replication, sending EDUs for any local users. @@ -280,7 +280,7 @@ def __init__(self, hs: "HomeServer"): hs.get_distributor().observe("user_left_room", self.user_left_room) # clock time we expect to stop - self._member_typing_until: Dict[RoomMember, int] = {} + self._member_typing_until: dict[RoomMember, int] = {} # caches which room_ids changed at which serials self._typing_stream_change_cache = StreamChangeCache( @@ -452,7 +452,7 @@ def _push_update_local(self, member: RoomMember, typing: bool) -> None: async def get_all_typing_updates( self, instance_name: str, last_id: int, current_id: int, limit: int - ) -> Tuple[List[Tuple[int, list]], int, bool]: + ) -> tuple[list[tuple[int, list]], int, bool]: """Get updates for typing replication stream. Args: @@ -504,7 +504,7 @@ async def get_all_typing_updates( return rows, current_id, limited def process_replication_rows( - self, token: int, rows: List[TypingStream.TypingStreamRow] + self, token: int, rows: list[TypingStream.TypingStreamRow] ) -> None: # The writing process should never get updates from replication. raise Exception("Typing writer instance got typing info over replication") @@ -531,7 +531,7 @@ def _make_event_for(self, room_id: str) -> JsonMapping: async def get_new_events_as( self, from_key: int, service: ApplicationService - ) -> Tuple[List[JsonMapping], int]: + ) -> tuple[list[JsonMapping], int]: """Returns a set of new typing events that an appservice may be interested in. @@ -578,7 +578,7 @@ async def get_new_events( is_guest: bool, explicit_room_id: Optional[str] = None, to_key: Optional[int] = None, - ) -> Tuple[List[JsonMapping], int]: + ) -> tuple[list[JsonMapping], int]: """ Find typing notifications for given rooms (> `from_token` and <= `to_token`) """ diff --git a/synapse/handlers/ui_auth/checkers.py b/synapse/handlers/ui_auth/checkers.py index f3c295d9f2a..cbae33eaecd 100644 --- a/synapse/handlers/ui_auth/checkers.py +++ b/synapse/handlers/ui_auth/checkers.py @@ -21,7 +21,7 @@ import logging from abc import ABC, abstractmethod -from typing import TYPE_CHECKING, Any, ClassVar, Sequence, Type +from typing import TYPE_CHECKING, Any, ClassVar, Sequence from twisted.web.client import PartialDownloadError @@ -321,7 +321,7 @@ async def check_auth(self, authdict: dict, clientip: str) -> Any: ) -INTERACTIVE_AUTH_CHECKERS: Sequence[Type[UserInteractiveAuthChecker]] = [ +INTERACTIVE_AUTH_CHECKERS: Sequence[type[UserInteractiveAuthChecker]] = [ DummyAuthChecker, TermsAuthChecker, RecaptchaAuthChecker, diff --git a/synapse/handlers/user_directory.py b/synapse/handlers/user_directory.py index 28961f5925f..fd05aff4c85 100644 --- a/synapse/handlers/user_directory.py +++ b/synapse/handlers/user_directory.py @@ -21,7 +21,7 @@ import logging from http import HTTPStatus -from typing import TYPE_CHECKING, List, Optional, Set, Tuple +from typing import TYPE_CHECKING, Optional from twisted.internet.interfaces import IDelayedCall @@ -129,7 +129,7 @@ def __init__(self, hs: "HomeServer"): # Guard to ensure we only have one process for refreshing remote profiles # for the given servers. # Set of server names. - self._is_refreshing_remote_profiles_for_servers: Set[str] = set() + self._is_refreshing_remote_profiles_for_servers: set[str] = set() if self.update_user_directory: self.notifier.add_replication_callback(self.notify_new_event) @@ -270,7 +270,7 @@ async def _unsafe_process(self) -> None: await self.store.update_user_directory_stream_pos(max_pos) - async def _handle_deltas(self, deltas: List[StateDelta]) -> None: + async def _handle_deltas(self, deltas: list[StateDelta]) -> None: """Called with the state deltas to process""" for delta in deltas: logger.debug( @@ -466,7 +466,7 @@ async def _track_user_joined_room(self, room_id: str, joining_user_id: str) -> N or await self.store.should_include_local_user_in_dir(other) ) ] - updates_to_users_who_share_rooms: Set[Tuple[str, str]] = set() + updates_to_users_who_share_rooms: set[tuple[str, str]] = set() # First, if the joining user is our local user then we need an # update for every other user in the room. diff --git a/synapse/handlers/worker_lock.py b/synapse/handlers/worker_lock.py index ca1e2b166c3..af5498c560e 100644 --- a/synapse/handlers/worker_lock.py +++ b/synapse/handlers/worker_lock.py @@ -26,10 +26,7 @@ TYPE_CHECKING, AsyncContextManager, Collection, - Dict, Optional, - Tuple, - Type, Union, ) from weakref import WeakSet @@ -75,8 +72,8 @@ def __init__(self, hs: "HomeServer") -> None: # Map from lock name/key to set of `WaitingLock` that are active for # that lock. - self._locks: Dict[ - Tuple[str, str], WeakSet[Union[WaitingLock, WaitingMultiLock]] + self._locks: dict[ + tuple[str, str], WeakSet[Union[WaitingLock, WaitingMultiLock]] ] = {} self._clock.looping_call(self._cleanup_locks, 30_000) @@ -141,7 +138,7 @@ def acquire_read_write_lock( def acquire_multi_read_write_lock( self, - lock_names: Collection[Tuple[str, str]], + lock_names: Collection[tuple[str, str]], *, write: bool, ) -> "WaitingMultiLock": @@ -261,7 +258,7 @@ async def __aenter__(self) -> None: async def __aexit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc: Optional[BaseException], tb: Optional[TracebackType], ) -> Optional[bool]: @@ -289,7 +286,7 @@ def _get_next_retry_interval(self) -> float: @attr.s(auto_attribs=True, eq=False) class WaitingMultiLock: - lock_names: Collection[Tuple[str, str]] + lock_names: Collection[tuple[str, str]] write: bool @@ -341,7 +338,7 @@ async def __aenter__(self) -> None: async def __aexit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc: Optional[BaseException], tb: Optional[TracebackType], ) -> Optional[bool]: diff --git a/synapse/http/additional_resource.py b/synapse/http/additional_resource.py index 59eae841d58..1a17b8461f7 100644 --- a/synapse/http/additional_resource.py +++ b/synapse/http/additional_resource.py @@ -18,7 +18,7 @@ # # -from typing import TYPE_CHECKING, Any, Awaitable, Callable, Optional, Tuple +from typing import TYPE_CHECKING, Any, Awaitable, Callable, Optional from twisted.web.server import Request @@ -41,7 +41,7 @@ class AdditionalResource(DirectServeJsonResource): def __init__( self, hs: "HomeServer", - handler: Callable[[Request], Awaitable[Optional[Tuple[int, Any]]]], + handler: Callable[[Request], Awaitable[Optional[tuple[int, Any]]]], ): """Initialise AdditionalResource @@ -56,7 +56,7 @@ def __init__( super().__init__(clock=hs.get_clock()) self._handler = handler - async def _async_render(self, request: Request) -> Optional[Tuple[int, Any]]: + async def _async_render(self, request: Request) -> Optional[tuple[int, Any]]: # Cheekily pass the result straight through, so we don't need to worry # if its an awaitable or not. return await self._handler(request) diff --git a/synapse/http/client.py b/synapse/http/client.py index 370cdc3568b..ff1f7c7128f 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -27,12 +27,9 @@ Any, BinaryIO, Callable, - Dict, - List, Mapping, Optional, Protocol, - Tuple, Union, ) @@ -135,10 +132,10 @@ # the entries can either be Lists or bytes. RawHeaderValue = Union[ StrSequence, - List[bytes], - List[Union[str, bytes]], - Tuple[bytes, ...], - Tuple[Union[str, bytes], ...], + list[bytes], + list[Union[str, bytes]], + tuple[bytes, ...], + tuple[Union[str, bytes], ...], ] @@ -205,7 +202,7 @@ def __init__( def resolveHostName( self, recv: IResolutionReceiver, hostname: str, portNumber: int = 0 ) -> IResolutionReceiver: - addresses: List[IAddress] = [] + addresses: list[IAddress] = [] def _callback() -> None: has_bad_ip = False @@ -349,7 +346,7 @@ class BaseHttpClient: def __init__( self, hs: "HomeServer", - treq_args: Optional[Dict[str, Any]] = None, + treq_args: Optional[dict[str, Any]] = None, ): self.hs = hs self.server_name = hs.hostname @@ -479,7 +476,7 @@ async def request( async def post_urlencoded_get_json( self, uri: str, - args: Optional[Mapping[str, Union[str, List[str]]]] = None, + args: Optional[Mapping[str, Union[str, list[str]]]] = None, headers: Optional[RawHeaders] = None, ) -> Any: """ @@ -707,7 +704,7 @@ async def get_file( max_size: Optional[int] = None, headers: Optional[RawHeaders] = None, is_allowed_content_type: Optional[Callable[[str], bool]] = None, - ) -> Tuple[int, Dict[bytes, List[bytes]], str, int]: + ) -> tuple[int, dict[bytes, list[bytes]], str, int]: """GETs a file from a given URL Args: url: The URL to GET @@ -815,7 +812,7 @@ class SimpleHttpClient(BaseHttpClient): def __init__( self, hs: "HomeServer", - treq_args: Optional[Dict[str, Any]] = None, + treq_args: Optional[dict[str, Any]] = None, ip_allowlist: Optional[IPSet] = None, ip_blocklist: Optional[IPSet] = None, use_proxy: bool = False, diff --git a/synapse/http/federation/matrix_federation_agent.py b/synapse/http/federation/matrix_federation_agent.py index 9d87514be00..f8482d9c482 100644 --- a/synapse/http/federation/matrix_federation_agent.py +++ b/synapse/http/federation/matrix_federation_agent.py @@ -19,7 +19,7 @@ # import logging import urllib.parse -from typing import Any, Generator, List, Optional +from typing import Any, Generator, Optional from urllib.request import ( # type: ignore[attr-defined] proxy_bypass_environment, ) @@ -413,7 +413,7 @@ async def _do_connect(self, protocol_factory: IProtocolFactory) -> IProtocol: # to try and if that doesn't work then we'll have an exception. raise Exception("Failed to resolve server %r" % (self._parsed_uri.netloc,)) - async def _resolve_server(self) -> List[Server]: + async def _resolve_server(self) -> list[Server]: """Resolves the server name to a list of hosts and ports to attempt to connect to. """ diff --git a/synapse/http/federation/srv_resolver.py b/synapse/http/federation/srv_resolver.py index 639bf309d67..76a51e48731 100644 --- a/synapse/http/federation/srv_resolver.py +++ b/synapse/http/federation/srv_resolver.py @@ -22,7 +22,7 @@ import logging import random import time -from typing import Any, Callable, Dict, List +from typing import Any, Callable import attr @@ -34,7 +34,7 @@ logger = logging.getLogger(__name__) -SERVER_CACHE: Dict[bytes, List["Server"]] = {} +SERVER_CACHE: dict[bytes, list["Server"]] = {} @attr.s(auto_attribs=True, slots=True, frozen=True) @@ -58,11 +58,11 @@ class Server: expires: int = 0 -def _sort_server_list(server_list: List[Server]) -> List[Server]: +def _sort_server_list(server_list: list[Server]) -> list[Server]: """Given a list of SRV records sort them into priority order and shuffle each priority with the given weight. """ - priority_map: Dict[int, List[Server]] = {} + priority_map: dict[int, list[Server]] = {} for server in server_list: priority_map.setdefault(server.priority, []).append(server) @@ -116,14 +116,14 @@ class SrvResolver: def __init__( self, dns_client: Any = client, - cache: Dict[bytes, List[Server]] = SERVER_CACHE, + cache: dict[bytes, list[Server]] = SERVER_CACHE, get_time: Callable[[], float] = time.time, ): self._dns_client = dns_client self._cache = cache self._get_time = get_time - async def resolve_service(self, service_name: bytes) -> List[Server]: + async def resolve_service(self, service_name: bytes) -> list[Server]: """Look up a SRV record Args: diff --git a/synapse/http/federation/well_known_resolver.py b/synapse/http/federation/well_known_resolver.py index 2f52abcc035..ac4d954c2c4 100644 --- a/synapse/http/federation/well_known_resolver.py +++ b/synapse/http/federation/well_known_resolver.py @@ -22,7 +22,7 @@ import random import time from io import BytesIO -from typing import Callable, Dict, Optional, Tuple +from typing import Callable, Optional import attr @@ -188,7 +188,7 @@ async def get_well_known(self, server_name: bytes) -> WellKnownLookupResult: return WellKnownLookupResult(delegated_server=result) - async def _fetch_well_known(self, server_name: bytes) -> Tuple[bytes, float]: + async def _fetch_well_known(self, server_name: bytes) -> tuple[bytes, float]: """Actually fetch and parse a .well-known, without checking the cache Args: @@ -251,7 +251,7 @@ async def _fetch_well_known(self, server_name: bytes) -> Tuple[bytes, float]: async def _make_well_known_request( self, server_name: bytes, retry: bool - ) -> Tuple[IResponse, bytes]: + ) -> tuple[IResponse, bytes]: """Make the well known request. This will retry the request if requested and it fails (with unable @@ -348,7 +348,7 @@ def _cache_period_from_headers( return None -def _parse_cache_control(headers: Headers) -> Dict[bytes, Optional[bytes]]: +def _parse_cache_control(headers: Headers) -> dict[bytes, Optional[bytes]]: cache_controls = {} cache_control_headers = headers.getRawHeaders(b"cache-control") or [] for hdr in cache_control_headers: diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 4d72c72d018..d0e47cf8dc8 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -31,13 +31,10 @@ Any, BinaryIO, Callable, - Dict, Generic, - List, Literal, Optional, TextIO, - Tuple, TypeVar, Union, cast, @@ -253,7 +250,7 @@ def _validate(v: Any) -> bool: return isinstance(v, dict) -class LegacyJsonSendParser(_BaseJsonParser[Tuple[int, JsonDict]]): +class LegacyJsonSendParser(_BaseJsonParser[tuple[int, JsonDict]]): """Ensure the legacy responses of /send_join & /send_leave are correct.""" def __init__(self) -> None: @@ -667,7 +664,7 @@ async def _send_request( ) # Inject the span into the headers - headers_dict: Dict[bytes, List[bytes]] = {} + headers_dict: dict[bytes, list[bytes]] = {} opentracing.inject_header_dict(headers_dict, request.destination) headers_dict[b"User-Agent"] = [self.version_string_bytes] @@ -913,7 +910,7 @@ def build_auth_headers( url_bytes: bytes, content: Optional[JsonDict] = None, destination_is: Optional[bytes] = None, - ) -> List[bytes]: + ) -> list[bytes]: """ Builds the Authorization headers for a federation request Args: @@ -1291,7 +1288,7 @@ async def get_json_with_headers( ignore_backoff: bool = False, try_trailing_slash_on_400: bool = False, parser: Literal[None] = None, - ) -> Tuple[JsonDict, Dict[bytes, List[bytes]]]: ... + ) -> tuple[JsonDict, dict[bytes, list[bytes]]]: ... @overload async def get_json_with_headers( @@ -1304,7 +1301,7 @@ async def get_json_with_headers( ignore_backoff: bool = ..., try_trailing_slash_on_400: bool = ..., parser: ByteParser[T] = ..., - ) -> Tuple[T, Dict[bytes, List[bytes]]]: ... + ) -> tuple[T, dict[bytes, list[bytes]]]: ... async def get_json_with_headers( self, @@ -1316,7 +1313,7 @@ async def get_json_with_headers( ignore_backoff: bool = False, try_trailing_slash_on_400: bool = False, parser: Optional[ByteParser[T]] = None, - ) -> Tuple[Union[JsonDict, T], Dict[bytes, List[bytes]]]: + ) -> tuple[Union[JsonDict, T], dict[bytes, list[bytes]]]: """GETs some json from the given host homeserver and path Args: @@ -1484,7 +1481,7 @@ async def get_file( retry_on_dns_fail: bool = True, ignore_backoff: bool = False, follow_redirects: bool = False, - ) -> Tuple[int, Dict[bytes, List[bytes]]]: + ) -> tuple[int, dict[bytes, list[bytes]]]: """GETs a file from a given homeserver Args: destination: The remote server to send the HTTP request to. @@ -1645,7 +1642,7 @@ async def federation_get_file( args: Optional[QueryParams] = None, retry_on_dns_fail: bool = True, ignore_backoff: bool = False, - ) -> Tuple[int, Dict[bytes, List[bytes]], bytes]: + ) -> tuple[int, dict[bytes, list[bytes]], bytes]: """GETs a file from a given homeserver over the federation /download endpoint Args: destination: The remote server to send the HTTP request to. diff --git a/synapse/http/proxy.py b/synapse/http/proxy.py index fa17432984a..583dd092bd9 100644 --- a/synapse/http/proxy.py +++ b/synapse/http/proxy.py @@ -22,7 +22,7 @@ import json import logging import urllib.parse -from typing import TYPE_CHECKING, Any, Optional, Set, Tuple, cast +from typing import TYPE_CHECKING, Any, Optional, cast from twisted.internet import protocol from twisted.internet.interfaces import ITCPTransport @@ -66,7 +66,7 @@ def parse_connection_header_value( connection_header_value: Optional[bytes], -) -> Set[str]: +) -> set[str]: """ Parse the `Connection` header to determine which headers we should not be copied over from the remote response. @@ -86,7 +86,7 @@ def parse_connection_header_value( The set of header names that should not be copied over from the remote response. The keys are lowercased. """ - extra_headers_to_remove: Set[str] = set() + extra_headers_to_remove: set[str] = set() if connection_header_value: extra_headers_to_remove = { connection_option.decode("ascii").strip().lower() @@ -140,7 +140,7 @@ def _check_auth(self, request: Request) -> None: "Invalid Proxy-Authorization header.", Codes.UNAUTHORIZED ) - async def _async_render(self, request: "SynapseRequest") -> Tuple[int, Any]: + async def _async_render(self, request: "SynapseRequest") -> tuple[int, Any]: uri = urllib.parse.urlparse(request.uri) assert uri.scheme == b"matrix-federation" diff --git a/synapse/http/proxyagent.py b/synapse/http/proxyagent.py index ab413990c5b..67e04b18d9b 100644 --- a/synapse/http/proxyagent.py +++ b/synapse/http/proxyagent.py @@ -21,7 +21,7 @@ import logging import random import re -from typing import Any, Collection, Dict, List, Optional, Sequence, Tuple, Union, cast +from typing import Any, Collection, Optional, Sequence, Union, cast from urllib.parse import urlparse from urllib.request import ( # type: ignore[attr-defined] proxy_bypass_environment, @@ -139,7 +139,7 @@ def __init__( else: self.proxy_reactor = proxy_reactor - self._endpoint_kwargs: Dict[str, Any] = {} + self._endpoint_kwargs: dict[str, Any] = {} if connectTimeout is not None: self._endpoint_kwargs["timeout"] = connectTimeout if bindAddress is not None: @@ -182,7 +182,7 @@ def __init__( "`federation_proxy_credentials` are required when using `federation_proxy_locations`" ) - endpoints: List[IStreamClientEndpoint] = [] + endpoints: list[IStreamClientEndpoint] = [] for federation_proxy_location in federation_proxy_locations: endpoint: IStreamClientEndpoint if isinstance(federation_proxy_location, InstanceTcpLocationConfig): @@ -369,7 +369,7 @@ def http_proxy_endpoint( timeout: float = 30, bindAddress: Optional[Union[bytes, str, tuple[Union[bytes, str], int]]] = None, attemptDelay: Optional[float] = None, -) -> Tuple[Optional[IStreamClientEndpoint], Optional[ProxyCredentials]]: +) -> tuple[Optional[IStreamClientEndpoint], Optional[ProxyCredentials]]: """Parses an http proxy setting and returns an endpoint for the proxy Args: @@ -418,7 +418,7 @@ def http_proxy_endpoint( def parse_proxy( proxy: bytes, default_scheme: bytes = b"http", default_port: int = 1080 -) -> Tuple[bytes, bytes, int, Optional[ProxyCredentials]]: +) -> tuple[bytes, bytes, int, Optional[ProxyCredentials]]: """ Parse a proxy connection string. @@ -487,7 +487,7 @@ def connect( return run_in_background(self._do_connect, protocol_factory) async def _do_connect(self, protocol_factory: IProtocolFactory) -> IProtocol: - failures: List[Failure] = [] + failures: list[Failure] = [] for endpoint in random.sample(self._endpoints, k=len(self._endpoints)): try: return await endpoint.connect(protocol_factory) diff --git a/synapse/http/replicationagent.py b/synapse/http/replicationagent.py index d70575dbd5f..f4799bd1b27 100644 --- a/synapse/http/replicationagent.py +++ b/synapse/http/replicationagent.py @@ -20,7 +20,7 @@ # import logging -from typing import Dict, Optional +from typing import Optional from zope.interface import implementer @@ -60,7 +60,7 @@ class ReplicationEndpointFactory: def __init__( self, reactor: ISynapseReactor, - instance_map: Dict[str, InstanceLocationConfig], + instance_map: dict[str, InstanceLocationConfig], context_factory: IPolicyForHTTPS, ) -> None: self.reactor = reactor @@ -117,7 +117,7 @@ class ReplicationAgent(_AgentBase): def __init__( self, reactor: ISynapseReactor, - instance_map: Dict[str, InstanceLocationConfig], + instance_map: dict[str, InstanceLocationConfig], contextFactory: IPolicyForHTTPS, connectTimeout: Optional[float] = None, bindAddress: Optional[bytes] = None, diff --git a/synapse/http/request_metrics.py b/synapse/http/request_metrics.py index 83f52edb7c7..5cc8a2ebd89 100644 --- a/synapse/http/request_metrics.py +++ b/synapse/http/request_metrics.py @@ -22,7 +22,7 @@ import logging import threading import traceback -from typing import Dict, Mapping, Set, Tuple +from typing import Mapping from prometheus_client.core import Counter, Histogram @@ -133,13 +133,13 @@ labelnames=["method", "servlet", SERVER_NAME_LABEL], ) -_in_flight_requests: Set["RequestMetrics"] = set() +_in_flight_requests: set["RequestMetrics"] = set() # Protects the _in_flight_requests set from concurrent access _in_flight_requests_lock = threading.Lock() -def _get_in_flight_counts() -> Mapping[Tuple[str, ...], int]: +def _get_in_flight_counts() -> Mapping[tuple[str, ...], int]: """Returns a count of all in flight requests by (method, server_name)""" # Cast to a list to prevent it changing while the Prometheus # thread is collecting metrics @@ -152,7 +152,7 @@ def _get_in_flight_counts() -> Mapping[Tuple[str, ...], int]: # Map from (method, name) -> int, the number of in flight requests of that # type. The key type is Tuple[str, str], but we leave the length unspecified # for compatability with LaterGauge's annotations. - counts: Dict[Tuple[str, ...], int] = {} + counts: dict[tuple[str, ...], int] = {} for request_metric in request_metrics: key = ( request_metric.method, diff --git a/synapse/http/server.py b/synapse/http/server.py index d5af8758ac2..1f4728fba27 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -33,14 +33,11 @@ Any, Awaitable, Callable, - Dict, Iterable, Iterator, - List, Optional, Pattern, Protocol, - Tuple, Union, cast, ) @@ -267,7 +264,7 @@ async def wrapped_async_request_handler( # it is actually called with a SynapseRequest and a kwargs dict for the params, # but I can't figure out how to represent that. ServletCallback = Callable[ - ..., Union[None, Awaitable[None], Tuple[int, Any], Awaitable[Tuple[int, Any]]] + ..., Union[None, Awaitable[None], tuple[int, Any], Awaitable[tuple[int, Any]]] ] @@ -354,7 +351,7 @@ async def _async_render_wrapper(self, request: "SynapseRequest") -> None: async def _async_render( self, request: "SynapseRequest" - ) -> Optional[Tuple[int, Any]]: + ) -> Optional[tuple[int, Any]]: """Delegates to `_async_render_` methods, or returns a 400 if no appropriate method exists. Can be overridden in sub classes for different routing. @@ -491,7 +488,7 @@ def __init__( self.clock = hs.get_clock() super().__init__(canonical_json, extract_context, clock=self.clock) # Map of path regex -> method -> callback. - self._routes: Dict[Pattern[str], Dict[bytes, _PathEntry]] = {} + self._routes: dict[Pattern[str], dict[bytes, _PathEntry]] = {} self.hs = hs def register_paths( @@ -527,7 +524,7 @@ def register_paths( def _get_handler_for_request( self, request: "SynapseRequest" - ) -> Tuple[ServletCallback, str, Dict[str, str]]: + ) -> tuple[ServletCallback, str, dict[str, str]]: """Finds a callback method to handle the given request. Returns: @@ -556,7 +553,7 @@ def _get_handler_for_request( # Huh. No one wanted to handle that? Fiiiiiine. raise UnrecognizedRequestError(code=404) - async def _async_render(self, request: "SynapseRequest") -> Tuple[int, Any]: + async def _async_render(self, request: "SynapseRequest") -> tuple[int, Any]: callback, servlet_classname, group_dict = self._get_handler_for_request(request) request.is_render_cancellable = is_function_cancellable(callback) @@ -758,7 +755,7 @@ def __init__( # Start producing if `registerProducer` was successful self.resumeProducing() - def _send_data(self, data: List[bytes]) -> None: + def _send_data(self, data: list[bytes]) -> None: """ Send a list of bytes as a chunk of a response. """ diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py index 71e809b3f1c..66694e06079 100644 --- a/synapse/http/servlet.py +++ b/synapse/http/servlet.py @@ -27,13 +27,10 @@ from http import HTTPStatus from typing import ( TYPE_CHECKING, - List, Literal, Mapping, Optional, Sequence, - Tuple, - Type, TypeVar, overload, ) @@ -548,7 +545,7 @@ def parse_json_from_args( def parse_enum( request: Request, name: str, - E: Type[EnumT], + E: type[EnumT], default: EnumT, ) -> EnumT: ... @@ -557,7 +554,7 @@ def parse_enum( def parse_enum( request: Request, name: str, - E: Type[EnumT], + E: type[EnumT], *, required: Literal[True], ) -> EnumT: ... @@ -566,7 +563,7 @@ def parse_enum( def parse_enum( request: Request, name: str, - E: Type[EnumT], + E: type[EnumT], default: Optional[EnumT] = None, required: bool = False, ) -> Optional[EnumT]: @@ -637,18 +634,18 @@ def parse_strings_from_args( *, allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> Optional[List[str]]: ... +) -> Optional[list[str]]: ... @overload def parse_strings_from_args( args: Mapping[bytes, Sequence[bytes]], name: str, - default: List[str], + default: list[str], *, allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> List[str]: ... +) -> list[str]: ... @overload @@ -659,29 +656,29 @@ def parse_strings_from_args( required: Literal[True], allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> List[str]: ... +) -> list[str]: ... @overload def parse_strings_from_args( args: Mapping[bytes, Sequence[bytes]], name: str, - default: Optional[List[str]] = None, + default: Optional[list[str]] = None, *, required: bool = False, allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> Optional[List[str]]: ... +) -> Optional[list[str]]: ... def parse_strings_from_args( args: Mapping[bytes, Sequence[bytes]], name: str, - default: Optional[List[str]] = None, + default: Optional[list[str]] = None, required: bool = False, allowed_values: Optional[StrCollection] = None, encoding: str = "ascii", -) -> Optional[List[str]]: +) -> Optional[list[str]]: """ Parse a string parameter from the request query string list. @@ -892,7 +889,7 @@ def parse_json_object_from_request( Model = TypeVar("Model", bound=BaseModel) -def validate_json_object(content: JsonDict, model_type: Type[Model]) -> Model: +def validate_json_object(content: JsonDict, model_type: type[Model]) -> Model: """Validate a deserialized JSON object using the given pydantic model. Raises: @@ -922,7 +919,7 @@ def validate_json_object(content: JsonDict, model_type: Type[Model]) -> Model: def parse_and_validate_json_object_from_request( - request: Request, model_type: Type[Model] + request: Request, model_type: type[Model] ) -> Model: """Parse a JSON object from the body of a twisted HTTP request, then deserialise and validate using the given pydantic model. @@ -988,8 +985,8 @@ def __init__(self, hs: "HomeServer"): self.room_member_handler = hs.get_room_member_handler() async def resolve_room_id( - self, room_identifier: str, remote_room_hosts: Optional[List[str]] = None - ) -> Tuple[str, Optional[List[str]]]: + self, room_identifier: str, remote_room_hosts: Optional[list[str]] = None + ) -> tuple[str, Optional[list[str]]]: """ Resolve a room identifier to a room ID, if necessary. diff --git a/synapse/http/site.py b/synapse/http/site.py index cf31b64d80f..ccf6ff27f04 100644 --- a/synapse/http/site.py +++ b/synapse/http/site.py @@ -22,7 +22,7 @@ import logging import time from http import HTTPStatus -from typing import TYPE_CHECKING, Any, Generator, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Generator, Optional, Union import attr from zope.interface import implementer @@ -266,7 +266,7 @@ def get_method(self) -> str: return self.method.decode("ascii") return method - def get_authenticated_entity(self) -> Tuple[Optional[str], Optional[str]]: + def get_authenticated_entity(self) -> tuple[Optional[str], Optional[str]]: """ Get the "authenticated" entity of the request, which might be the user performing the action, or a user being puppeted by a server admin. @@ -783,7 +783,7 @@ def __init__( self.access_logger = logging.getLogger(logger_name) self.server_version_string = server_version_string.encode("ascii") - self.connections: List[Protocol] = [] + self.connections: list[Protocol] = [] def buildProtocol(self, addr: IAddress) -> SynapseProtocol: protocol = SynapseProtocol( diff --git a/synapse/logging/context.py b/synapse/logging/context.py index 1b9c7703119..6a4425ff1de 100644 --- a/synapse/logging/context.py +++ b/synapse/logging/context.py @@ -41,8 +41,6 @@ Callable, Literal, Optional, - Tuple, - Type, TypeVar, Union, overload, @@ -393,7 +391,7 @@ def __enter__(self) -> "LoggingContext": def __exit__( self, - type: Optional[Type[BaseException]], + type: Optional[type[BaseException]], value: Optional[BaseException], traceback: Optional[TracebackType], ) -> None: @@ -498,13 +496,13 @@ def get_resource_usage(self) -> ContextResourceUsage: return res - def _get_cputime(self, current: "resource.struct_rusage") -> Tuple[float, float]: + def _get_cputime(self, current: "resource.struct_rusage") -> tuple[float, float]: """Get the cpu usage time between start() and the given rusage Args: rusage: the current resource usage - Returns: Tuple[float, float]: seconds in user mode, seconds in system mode + Returns: tuple[float, float]: seconds in user mode, seconds in system mode """ assert self.usage_start is not None @@ -672,7 +670,7 @@ def __enter__(self) -> None: def __exit__( self, - type: Optional[Type[BaseException]], + type: Optional[type[BaseException]], value: Optional[BaseException], traceback: Optional[TracebackType], ) -> None: diff --git a/synapse/logging/formatter.py b/synapse/logging/formatter.py index 228e5ed2784..e5d73a47a8f 100644 --- a/synapse/logging/formatter.py +++ b/synapse/logging/formatter.py @@ -23,7 +23,7 @@ import traceback from io import StringIO from types import TracebackType -from typing import Optional, Tuple, Type +from typing import Optional class LogFormatter(logging.Formatter): @@ -38,8 +38,8 @@ class LogFormatter(logging.Formatter): def formatException( self, - ei: Tuple[ - Optional[Type[BaseException]], + ei: tuple[ + Optional[type[BaseException]], Optional[BaseException], Optional[TracebackType], ], diff --git a/synapse/logging/opentracing.py b/synapse/logging/opentracing.py index 1c89a358dfc..fbb9971b328 100644 --- a/synapse/logging/opentracing.py +++ b/synapse/logging/opentracing.py @@ -183,13 +183,10 @@ def set_fates(clotho, lachesis, atropos, father="Zues", mother="Themis"): Callable, Collection, ContextManager, - Dict, Generator, Iterable, - List, Optional, Pattern, - Type, TypeVar, Union, cast, @@ -292,7 +289,7 @@ def report_span(self, span: "opentracing.Span") -> None: except Exception: logger.exception("Failed to report span") - RustReporter: Optional[Type[_WrappedRustReporter]] = _WrappedRustReporter + RustReporter: Optional[type[_WrappedRustReporter]] = _WrappedRustReporter except ImportError: RustReporter = None @@ -536,8 +533,8 @@ def whitelisted_homeserver(destination: str) -> bool: def start_active_span( operation_name: str, child_of: Optional[Union["opentracing.Span", "opentracing.SpanContext"]] = None, - references: Optional[List["opentracing.Reference"]] = None, - tags: Optional[Dict[str, str]] = None, + references: Optional[list["opentracing.Reference"]] = None, + tags: Optional[dict[str, str]] = None, start_time: Optional[float] = None, ignore_active_span: bool = False, finish_on_close: bool = True, @@ -577,7 +574,7 @@ def start_active_span_follows_from( operation_name: str, contexts: Collection, child_of: Optional[Union["opentracing.Span", "opentracing.SpanContext"]] = None, - tags: Optional[Dict[str, str]] = None, + tags: Optional[dict[str, str]] = None, start_time: Optional[float] = None, ignore_active_span: bool = False, *, @@ -631,10 +628,10 @@ def start_active_span_follows_from( def start_active_span_from_edu( - edu_content: Dict[str, Any], + edu_content: dict[str, Any], operation_name: str, - references: Optional[List["opentracing.Reference"]] = None, - tags: Optional[Dict[str, str]] = None, + references: Optional[list["opentracing.Reference"]] = None, + tags: Optional[dict[str, str]] = None, start_time: Optional[float] = None, ignore_active_span: bool = False, finish_on_close: bool = True, @@ -709,7 +706,7 @@ def set_tag(key: str, value: Union[str, bool, int, float]) -> None: @ensure_active_span("log") -def log_kv(key_values: Dict[str, Any], timestamp: Optional[float] = None) -> None: +def log_kv(key_values: dict[str, Any], timestamp: Optional[float] = None) -> None: """Log to the active span""" assert opentracing.tracer.active_span is not None opentracing.tracer.active_span.log_kv(key_values, timestamp) @@ -760,7 +757,7 @@ def is_context_forced_tracing( @ensure_active_span("inject the span into a header dict") def inject_header_dict( - headers: Dict[bytes, List[bytes]], + headers: dict[bytes, list[bytes]], destination: Optional[str] = None, check_destination: bool = True, ) -> None: @@ -792,7 +789,7 @@ def inject_header_dict( span = opentracing.tracer.active_span - carrier: Dict[str, str] = {} + carrier: dict[str, str] = {} assert span is not None opentracing.tracer.inject(span.context, opentracing.Format.HTTP_HEADERS, carrier) @@ -820,16 +817,16 @@ def inject_response_headers(response_headers: Headers) -> None: @ensure_active_span("inject the span into a header dict") -def inject_request_headers(headers: Dict[str, str]) -> None: +def inject_request_headers(headers: dict[str, str]) -> None: span = opentracing.tracer.active_span assert span is not None opentracing.tracer.inject(span.context, opentracing.Format.HTTP_HEADERS, headers) @ensure_active_span( - "get the active span context as a dict", ret=cast(Dict[str, str], {}) + "get the active span context as a dict", ret=cast(dict[str, str], {}) ) -def get_active_span_text_map(destination: Optional[str] = None) -> Dict[str, str]: +def get_active_span_text_map(destination: Optional[str] = None) -> dict[str, str]: """ Gets a span context as a dict. This can be used instead of manually injecting a span into an empty carrier. @@ -844,7 +841,7 @@ def get_active_span_text_map(destination: Optional[str] = None) -> Dict[str, str if destination and not whitelisted_homeserver(destination): return {} - carrier: Dict[str, str] = {} + carrier: dict[str, str] = {} assert opentracing.tracer.active_span is not None opentracing.tracer.inject( opentracing.tracer.active_span.context, opentracing.Format.TEXT_MAP, carrier @@ -859,7 +856,7 @@ def active_span_context_as_string() -> str: Returns: The active span context encoded as a string. """ - carrier: Dict[str, str] = {} + carrier: dict[str, str] = {} if opentracing: assert opentracing.tracer.active_span is not None opentracing.tracer.inject( @@ -888,12 +885,12 @@ def span_context_from_string(carrier: str) -> Optional["opentracing.SpanContext" Returns: The active span context decoded from a string. """ - payload: Dict[str, str] = json_decoder.decode(carrier) + payload: dict[str, str] = json_decoder.decode(carrier) return opentracing.tracer.extract(opentracing.Format.TEXT_MAP, payload) @only_if_tracing -def extract_text_map(carrier: Dict[str, str]) -> Optional["opentracing.SpanContext"]: +def extract_text_map(carrier: dict[str, str]) -> Optional["opentracing.SpanContext"]: """ Wrapper method for opentracing's tracer.extract for TEXT_MAP. Args: diff --git a/synapse/media/_base.py b/synapse/media/_base.py index d3a9a66f5a9..319ca662e23 100644 --- a/synapse/media/_base.py +++ b/synapse/media/_base.py @@ -29,12 +29,8 @@ TYPE_CHECKING, Awaitable, BinaryIO, - Dict, Generator, - List, Optional, - Tuple, - Type, ) import attr @@ -505,7 +501,7 @@ def __enter__(self) -> None: # noqa: B027 def __exit__( # noqa: B027 self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: @@ -570,7 +566,7 @@ def thumbnail_length(self) -> Optional[int]: return self.thumbnail.length -def get_filename_from_headers(headers: Dict[bytes, List[bytes]]) -> Optional[str]: +def get_filename_from_headers(headers: dict[bytes, list[bytes]]) -> Optional[str]: """ Get the filename of the downloaded file by inspecting the Content-Disposition HTTP header. @@ -618,7 +614,7 @@ def get_filename_from_headers(headers: Dict[bytes, List[bytes]]) -> Optional[str return upload_name -def _parse_header(line: bytes) -> Tuple[bytes, Dict[bytes, bytes]]: +def _parse_header(line: bytes) -> tuple[bytes, dict[bytes, bytes]]: """Parse a Content-type like header. Cargo-culted from `cgi`, but works on bytes rather than strings. diff --git a/synapse/media/filepath.py b/synapse/media/filepath.py index 3d7863e2fb5..76599716615 100644 --- a/synapse/media/filepath.py +++ b/synapse/media/filepath.py @@ -24,7 +24,7 @@ import os import re import string -from typing import Any, Callable, List, TypeVar, Union, cast +from typing import Any, Callable, TypeVar, Union, cast NEW_FORMAT_ID_RE = re.compile(r"^\d\d\d\d-\d\d-\d\d") @@ -46,7 +46,7 @@ def _wrapped(self: "MediaFilePaths", *args: Any, **kwargs: Any) -> str: GetPathMethod = TypeVar( - "GetPathMethod", bound=Union[Callable[..., str], Callable[..., List[str]]] + "GetPathMethod", bound=Union[Callable[..., str], Callable[..., list[str]]] ) @@ -73,7 +73,7 @@ def _wrap_with_jail_check_inner(func: GetPathMethod) -> GetPathMethod: @functools.wraps(func) def _wrapped( self: "MediaFilePaths", *args: Any, **kwargs: Any - ) -> Union[str, List[str]]: + ) -> Union[str, list[str]]: path_or_paths = func(self, *args, **kwargs) if isinstance(path_or_paths, list): @@ -303,7 +303,7 @@ def url_cache_filepath_rel(self, media_id: str) -> str: url_cache_filepath = _wrap_in_base_path(url_cache_filepath_rel) @_wrap_with_jail_check(relative=False) - def url_cache_filepath_dirs_to_delete(self, media_id: str) -> List[str]: + def url_cache_filepath_dirs_to_delete(self, media_id: str) -> list[str]: "The dirs to try and remove if we delete the media_id file" if NEW_FORMAT_ID_RE.match(media_id): return [ @@ -376,7 +376,7 @@ def url_cache_thumbnail_directory_rel(self, media_id: str) -> str: ) @_wrap_with_jail_check(relative=False) - def url_cache_thumbnail_dirs_to_delete(self, media_id: str) -> List[str]: + def url_cache_thumbnail_dirs_to_delete(self, media_id: str) -> list[str]: "The dirs to try and remove if we delete the media_id thumbnails" # Media id is of the form # E.g.: 2017-09-28-fsdRDt24DS234dsf diff --git a/synapse/media/media_repository.py b/synapse/media/media_repository.py index 238dc6cb2f3..eda14107673 100644 --- a/synapse/media/media_repository.py +++ b/synapse/media/media_repository.py @@ -24,7 +24,7 @@ import os import shutil from io import BytesIO -from typing import IO, TYPE_CHECKING, Dict, List, Optional, Set, Tuple +from typing import IO, TYPE_CHECKING, Optional import attr from matrix_common.types.mxc_uri import MXCUri @@ -109,8 +109,8 @@ def __init__(self, hs: "HomeServer"): self.remote_media_linearizer = Linearizer(name="media_remote", clock=self.clock) - self.recently_accessed_remotes: Set[Tuple[str, str]] = set() - self.recently_accessed_locals: Set[str] = set() + self.recently_accessed_remotes: set[tuple[str, str]] = set() + self.recently_accessed_locals: set[str] = set() self.federation_domain_whitelist = ( hs.config.federation.federation_domain_whitelist @@ -221,7 +221,7 @@ def mark_recently_accessed(self, server_name: Optional[str], media_id: str) -> N self.recently_accessed_locals.add(media_id) @trace - async def create_media_id(self, auth_user: UserID) -> Tuple[str, int]: + async def create_media_id(self, auth_user: UserID) -> tuple[str, int]: """Create and store a media ID for a local user and return the MXC URI and its expiration. @@ -242,7 +242,7 @@ async def create_media_id(self, auth_user: UserID) -> Tuple[str, int]: return f"mxc://{self.server_name}/{media_id}", now + self.unused_expiration_time @trace - async def reached_pending_media_limit(self, auth_user: UserID) -> Tuple[bool, int]: + async def reached_pending_media_limit(self, auth_user: UserID) -> tuple[bool, int]: """Check if the user is over the limit for pending media uploads. Args: @@ -696,7 +696,7 @@ async def _get_remote_media_impl( ip_address: str, use_federation_endpoint: bool, allow_authenticated: bool, - ) -> Tuple[Optional[Responder], RemoteMedia]: + ) -> tuple[Optional[Responder], RemoteMedia]: """Looks for media in local cache, if not there then attempt to download from remote server. @@ -1052,7 +1052,7 @@ async def _federation_download_remote_file( def _get_thumbnail_requirements( self, media_type: str - ) -> Tuple[ThumbnailRequirement, ...]: + ) -> tuple[ThumbnailRequirement, ...]: scpos = media_type.find(";") if scpos > 0: media_type = media_type[:scpos] @@ -1099,7 +1099,7 @@ async def generate_local_exact_thumbnail( t_method: str, t_type: str, url_cache: bool, - ) -> Optional[Tuple[str, FileInfo]]: + ) -> Optional[tuple[str, FileInfo]]: input_path = await self.media_storage.ensure_media_is_in_local_cache( FileInfo(None, media_id, url_cache=url_cache) ) @@ -1308,7 +1308,7 @@ async def _generate_thumbnails( # We deduplicate the thumbnail sizes by ignoring the cropped versions if # they have the same dimensions of a scaled one. - thumbnails: Dict[Tuple[int, int, str], str] = {} + thumbnails: dict[tuple[int, int, str], str] = {} for requirement in requirements: if requirement.method == "crop": thumbnails.setdefault( @@ -1461,7 +1461,7 @@ async def _apply_media_retention_rules(self) -> None: delete_protected_media=False, ) - async def delete_old_remote_media(self, before_ts: int) -> Dict[str, int]: + async def delete_old_remote_media(self, before_ts: int) -> dict[str, int]: old_media = await self.store.get_remote_media_ids( before_ts, include_quarantined_media=False ) @@ -1497,8 +1497,8 @@ async def delete_old_remote_media(self, before_ts: int) -> Dict[str, int]: return {"deleted": deleted} async def delete_local_media_ids( - self, media_ids: List[str] - ) -> Tuple[List[str], int]: + self, media_ids: list[str] + ) -> tuple[list[str], int]: """ Delete the given local or remote media ID from this server @@ -1516,7 +1516,7 @@ async def delete_old_local_media( keep_profiles: bool = True, delete_quarantined_media: bool = False, delete_protected_media: bool = False, - ) -> Tuple[List[str], int]: + ) -> tuple[list[str], int]: """ Delete local or remote media from this server by size and timestamp. Removes media files, any thumbnails and cached URLs. @@ -1543,8 +1543,8 @@ async def delete_old_local_media( return await self._remove_local_media_from_disk(old_media) async def _remove_local_media_from_disk( - self, media_ids: List[str] - ) -> Tuple[List[str], int]: + self, media_ids: list[str] + ) -> tuple[list[str], int]: """ Delete local or remote media from this server. Removes media files, any thumbnails and cached URLs. diff --git a/synapse/media/media_storage.py b/synapse/media/media_storage.py index 99d002a8df0..f6be9edf502 100644 --- a/synapse/media/media_storage.py +++ b/synapse/media/media_storage.py @@ -34,11 +34,8 @@ AsyncIterator, BinaryIO, Callable, - List, Optional, Sequence, - Tuple, - Type, Union, cast, ) @@ -205,7 +202,7 @@ async def write_to_file(self, source: IO, output: IO) -> None: @contextlib.asynccontextmanager async def store_into_file( self, file_info: FileInfo - ) -> AsyncIterator[Tuple[BinaryIO, str]]: + ) -> AsyncIterator[tuple[BinaryIO, str]]: """Async Context manager used to get a file like object to write into, as described by file_info. @@ -423,7 +420,7 @@ def write_to_consumer(self, consumer: IConsumer) -> Deferred: def __exit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: @@ -674,7 +671,7 @@ def __init__( self, name: bytes, value: Any, - params: Optional[List[Tuple[Any, Any]]] = None, + params: Optional[list[tuple[Any, Any]]] = None, ): self.name = name self.value = value diff --git a/synapse/media/oembed.py b/synapse/media/oembed.py index 45b481f229b..059d8ad1cf6 100644 --- a/synapse/media/oembed.py +++ b/synapse/media/oembed.py @@ -21,7 +21,7 @@ import html import logging import urllib.parse -from typing import TYPE_CHECKING, List, Optional, cast +from typing import TYPE_CHECKING, Optional, cast import attr @@ -118,7 +118,7 @@ def autodiscover_from_html(self, tree: "etree._Element") -> Optional[str]: # Search for link elements with the proper rel and type attributes. # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. for tag in cast( - List["etree._Element"], + list["etree._Element"], tree.xpath("//link[@rel='alternate'][@type='application/json+oembed']"), ): if "href" in tag.attrib: @@ -127,7 +127,7 @@ def autodiscover_from_html(self, tree: "etree._Element") -> Optional[str]: # Some providers (e.g. Flickr) use alternative instead of alternate. # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. for tag in cast( - List["etree._Element"], + list["etree._Element"], tree.xpath("//link[@rel='alternative'][@type='application/json+oembed']"), ): if "href" in tag.attrib: @@ -223,10 +223,10 @@ def parse_oembed_response(self, url: str, raw_body: bytes) -> OEmbedResult: return OEmbedResult(open_graph_response, author_name, cache_age) -def _fetch_urls(tree: "etree._Element", tag_name: str) -> List[str]: +def _fetch_urls(tree: "etree._Element", tag_name: str) -> list[str]: results = [] # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. - for tag in cast(List["etree._Element"], tree.xpath("//*/" + tag_name)): + for tag in cast(list["etree._Element"], tree.xpath("//*/" + tag_name)): if "src" in tag.attrib: results.append(cast(str, tag.attrib["src"])) return results diff --git a/synapse/media/preview_html.py b/synapse/media/preview_html.py index 38ae126a239..6a8e4791527 100644 --- a/synapse/media/preview_html.py +++ b/synapse/media/preview_html.py @@ -24,12 +24,9 @@ from typing import ( TYPE_CHECKING, Callable, - Dict, Generator, Iterable, - List, Optional, - Set, Union, cast, ) @@ -83,7 +80,7 @@ def _get_html_media_encodings( The character encoding of the body, as a string. """ # There's no point in returning an encoding more than once. - attempted_encodings: Set[str] = set() + attempted_encodings: set[str] = set() # Limit searches to the first 1kb, since it ought to be at the top. body_start = body[:1024] @@ -190,7 +187,7 @@ def _get_meta_tags( property: str, prefix: str, property_mapper: Optional[Callable[[str], Optional[str]]] = None, -) -> Dict[str, Optional[str]]: +) -> dict[str, Optional[str]]: """ Search for meta tags prefixed with a particular string. @@ -207,10 +204,10 @@ def _get_meta_tags( """ # This actually returns Dict[str, str], but the caller sets this as a variable # which is Dict[str, Optional[str]]. - results: Dict[str, Optional[str]] = {} + results: dict[str, Optional[str]] = {} # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. for tag in cast( - List["etree._Element"], + list["etree._Element"], tree.xpath( f"//*/meta[starts-with(@{property}, '{prefix}:')][@content][not(@content='')]" ), @@ -256,7 +253,7 @@ def _map_twitter_to_open_graph(key: str) -> Optional[str]: return "og" + key[7:] -def parse_html_to_open_graph(tree: "etree._Element") -> Dict[str, Optional[str]]: +def parse_html_to_open_graph(tree: "etree._Element") -> dict[str, Optional[str]]: """ Parse the HTML document into an Open Graph response. @@ -315,7 +312,7 @@ def parse_html_to_open_graph(tree: "etree._Element") -> Dict[str, Optional[str]] # Attempt to find a title from the title tag, or the biggest header on the page. # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. title = cast( - List["etree._ElementUnicodeResult"], + list["etree._ElementUnicodeResult"], tree.xpath("((//title)[1] | (//h1)[1] | (//h2)[1] | (//h3)[1])/text()"), ) if title: @@ -326,7 +323,7 @@ def parse_html_to_open_graph(tree: "etree._Element") -> Dict[str, Optional[str]] if "og:image" not in og: # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. meta_image = cast( - List["etree._ElementUnicodeResult"], + list["etree._ElementUnicodeResult"], tree.xpath( "//*/meta[translate(@itemprop, 'IMAGE', 'image')='image'][not(@content='')]/@content[1]" ), @@ -340,7 +337,7 @@ def parse_html_to_open_graph(tree: "etree._Element") -> Dict[str, Optional[str]] # # TODO: consider inlined CSS styles as well as width & height attribs images = cast( - List["etree._Element"], + list["etree._Element"], tree.xpath("//img[@src][number(@width)>10][number(@height)>10]"), ) images = sorted( @@ -352,7 +349,7 @@ def parse_html_to_open_graph(tree: "etree._Element") -> Dict[str, Optional[str]] # If no images were found, try to find *any* images. if not images: # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. - images = cast(List["etree._Element"], tree.xpath("//img[@src][1]")) + images = cast(list["etree._Element"], tree.xpath("//img[@src][1]")) if images: og["og:image"] = cast(str, images[0].attrib["src"]) @@ -360,7 +357,7 @@ def parse_html_to_open_graph(tree: "etree._Element") -> Dict[str, Optional[str]] else: # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. favicons = cast( - List["etree._ElementUnicodeResult"], + list["etree._ElementUnicodeResult"], tree.xpath("//link[@href][contains(@rel, 'icon')]/@href[1]"), ) if favicons: @@ -370,7 +367,7 @@ def parse_html_to_open_graph(tree: "etree._Element") -> Dict[str, Optional[str]] # Check the first meta description tag for content. # Cast: the type returned by xpath depends on the xpath expression: mypy can't deduce this. meta_description = cast( - List["etree._ElementUnicodeResult"], + list["etree._ElementUnicodeResult"], tree.xpath( "//*/meta[translate(@name, 'DESCRIPTION', 'description')='description'][not(@content='')]/@content[1]" ), @@ -443,7 +440,7 @@ def parse_html_description(tree: "etree._Element") -> Optional[str]: def _iterate_over_text( tree: Optional["etree._Element"], - tags_to_ignore: Set[object], + tags_to_ignore: set[object], stack_limit: int = 1024, ) -> Generator[str, None, None]: """Iterate over the tree returning text nodes in a depth first fashion, @@ -463,7 +460,7 @@ def _iterate_over_text( # This is a stack whose items are elements to iterate over *or* strings # to be returned. - elements: List[Union[str, "etree._Element"]] = [tree] + elements: list[Union[str, "etree._Element"]] = [tree] while elements: el = elements.pop() diff --git a/synapse/media/thumbnailer.py b/synapse/media/thumbnailer.py index 5d9afda3229..cc2fe7318b3 100644 --- a/synapse/media/thumbnailer.py +++ b/synapse/media/thumbnailer.py @@ -22,7 +22,7 @@ import logging from io import BytesIO from types import TracebackType -from typing import TYPE_CHECKING, List, Optional, Tuple, Type +from typing import TYPE_CHECKING, Optional from PIL import Image @@ -116,7 +116,7 @@ def __init__(self, input_path: str): logger.info("Error parsing image EXIF information: %s", e) @trace - def transpose(self) -> Tuple[int, int]: + def transpose(self) -> tuple[int, int]: """Transpose the image using its EXIF Orientation tag Returns: @@ -134,7 +134,7 @@ def transpose(self) -> Tuple[int, int]: self.image.info["exif"] = None return self.image.size - def aspect(self, max_width: int, max_height: int) -> Tuple[int, int]: + def aspect(self, max_width: int, max_height: int) -> tuple[int, int]: """Calculate the largest size that preserves aspect ratio which fits within the given rectangle:: @@ -246,7 +246,7 @@ def __enter__(self) -> "Thumbnailer": def __exit__( self, - type: Optional[Type[BaseException]], + type: Optional[type[BaseException]], value: Optional[BaseException], traceback: Optional[TracebackType], ) -> None: @@ -553,7 +553,7 @@ async def _select_and_respond_with_thumbnail( desired_height: int, desired_method: str, desired_type: str, - thumbnail_infos: List[ThumbnailInfo], + thumbnail_infos: list[ThumbnailInfo], media_id: str, file_id: str, url_cache: bool, @@ -719,7 +719,7 @@ def _select_thumbnail( desired_height: int, desired_method: str, desired_type: str, - thumbnail_infos: List[ThumbnailInfo], + thumbnail_infos: list[ThumbnailInfo], file_id: str, url_cache: bool, server_name: Optional[str], @@ -750,12 +750,12 @@ def _select_thumbnail( if desired_method == "crop": # Thumbnails that match equal or larger sizes of desired width/height. - crop_info_list: List[ - Tuple[int, int, int, bool, Optional[int], ThumbnailInfo] + crop_info_list: list[ + tuple[int, int, int, bool, Optional[int], ThumbnailInfo] ] = [] # Other thumbnails. - crop_info_list2: List[ - Tuple[int, int, int, bool, Optional[int], ThumbnailInfo] + crop_info_list2: list[ + tuple[int, int, int, bool, Optional[int], ThumbnailInfo] ] = [] for info in thumbnail_infos: # Skip thumbnails generated with different methods. @@ -801,9 +801,9 @@ def _select_thumbnail( thumbnail_info = min(crop_info_list2, key=lambda t: t[:-1])[-1] elif desired_method == "scale": # Thumbnails that match equal or larger sizes of desired width/height. - info_list: List[Tuple[int, bool, int, ThumbnailInfo]] = [] + info_list: list[tuple[int, bool, int, ThumbnailInfo]] = [] # Other thumbnails. - info_list2: List[Tuple[int, bool, int, ThumbnailInfo]] = [] + info_list2: list[tuple[int, bool, int, ThumbnailInfo]] = [] for info in thumbnail_infos: # Skip thumbnails generated with different methods. diff --git a/synapse/media/url_previewer.py b/synapse/media/url_previewer.py index 1a82cc46e3e..2a63842fb70 100644 --- a/synapse/media/url_previewer.py +++ b/synapse/media/url_previewer.py @@ -28,7 +28,7 @@ import shutil import sys import traceback -from typing import TYPE_CHECKING, BinaryIO, Iterable, Optional, Tuple +from typing import TYPE_CHECKING, BinaryIO, Iterable, Optional from urllib.parse import urljoin, urlparse, urlsplit from urllib.request import urlopen @@ -705,7 +705,7 @@ async def _precache_image_url( async def _handle_oembed_response( self, url: str, media_info: MediaInfo, expiration_ms: int - ) -> Tuple[JsonDict, Optional[str], int]: + ) -> tuple[JsonDict, Optional[str], int]: """ Parse the downloaded oEmbed info. diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index 2ffb14070b7..def21ac942e 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -28,15 +28,11 @@ from importlib import metadata from typing import ( Callable, - Dict, Generic, Iterable, Mapping, Optional, Sequence, - Set, - Tuple, - Type, TypeVar, Union, cast, @@ -161,10 +157,10 @@ class LaterGauge(Collector): name: str desc: str labelnames: Optional[StrSequence] = attr.ib(hash=False) - _instance_id_to_hook_map: Dict[ + _instance_id_to_hook_map: dict[ Optional[str], # instance_id Callable[ - [], Union[Mapping[Tuple[str, ...], Union[int, float]], Union[int, float]] + [], Union[Mapping[tuple[str, ...], Union[int, float]], Union[int, float]] ], ] = attr.ib(factory=dict, hash=False) """ @@ -206,7 +202,7 @@ def register_hook( *, homeserver_instance_id: Optional[str], hook: Callable[ - [], Union[Mapping[Tuple[str, ...], Union[int, float]], Union[int, float]] + [], Union[Mapping[tuple[str, ...], Union[int, float]], Union[int, float]] ], ) -> None: """ @@ -260,7 +256,7 @@ def __attrs_post_init__(self) -> None: all_later_gauges_to_clean_up_on_shutdown[self.name] = self -all_later_gauges_to_clean_up_on_shutdown: Dict[str, LaterGauge] = {} +all_later_gauges_to_clean_up_on_shutdown: dict[str, LaterGauge] = {} """ Track all `LaterGauge` instances so we can remove any associated hooks during homeserver shutdown. @@ -302,15 +298,15 @@ def __init__( # Create a class which have the sub_metrics values as attributes, which # default to 0 on initialization. Used to pass to registered callbacks. - self._metrics_class: Type[MetricsEntry] = attr.make_class( + self._metrics_class: type[MetricsEntry] = attr.make_class( "_MetricsEntry", attrs={x: attr.ib(default=0) for x in sub_metrics}, slots=True, ) # Counts number of in flight blocks for a given set of label values - self._registrations: Dict[ - Tuple[str, ...], Set[Callable[[MetricsEntry], None]] + self._registrations: dict[ + tuple[str, ...], set[Callable[[MetricsEntry], None]] ] = {} # Protects access to _registrations @@ -320,7 +316,7 @@ def __init__( def register( self, - key: Tuple[str, ...], + key: tuple[str, ...], callback: Callable[[MetricsEntry], None], ) -> None: """Registers that we've entered a new block with labels `key`. @@ -349,7 +345,7 @@ def register( def unregister( self, - key: Tuple[str, ...], + key: tuple[str, ...], callback: Callable[[MetricsEntry], None], ) -> None: """ @@ -424,7 +420,7 @@ def __init__( name: str, documentation: str, gsum_value: float, - buckets: Optional[Sequence[Tuple[str, float]]] = None, + buckets: Optional[Sequence[tuple[str, float]]] = None, labelnames: StrSequence = (), labelvalues: StrSequence = (), unit: str = "", diff --git a/synapse/metrics/background_process_metrics.py b/synapse/metrics/background_process_metrics.py index 05e84038acd..b457369211d 100644 --- a/synapse/metrics/background_process_metrics.py +++ b/synapse/metrics/background_process_metrics.py @@ -29,13 +29,11 @@ Awaitable, Callable, ContextManager, - Dict, Generator, Iterable, Optional, Protocol, Set, - Type, TypeVar, Union, ) @@ -134,7 +132,7 @@ # map from description to a counter, so that we can name our logcontexts # incrementally. (It actually duplicates _background_process_start_count, but # it's much simpler to do so than to try to combine them.) -_background_process_counts: Dict[str, int] = {} +_background_process_counts: dict[str, int] = {} # Set of all running background processes that became active active since the # last time metrics were scraped (i.e. background processes that performed some @@ -531,7 +529,7 @@ def start(self, rusage: "Optional[resource.struct_rusage]") -> None: def __exit__( self, - type: Optional[Type[BaseException]], + type: Optional[type[BaseException]], value: Optional[BaseException], traceback: Optional[TracebackType], ) -> None: diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py index ea0887966ae..9287747ceaf 100644 --- a/synapse/module_api/__init__.py +++ b/synapse/module_api/__init__.py @@ -26,13 +26,10 @@ Awaitable, Callable, Collection, - Dict, Generator, Iterable, - List, Mapping, Optional, - Tuple, TypeVar, Union, ) @@ -559,7 +556,7 @@ def register_password_auth_provider_callbacks( check_3pid_auth: Optional[CHECK_3PID_AUTH_CALLBACK] = None, on_logged_out: Optional[ON_LOGGED_OUT_CALLBACK] = None, auth_checkers: Optional[ - Dict[Tuple[str, Tuple[str, ...]], CHECK_AUTH_CALLBACK] + dict[tuple[str, tuple[str, ...]], CHECK_AUTH_CALLBACK] ] = None, is_3pid_allowed: Optional[IS_3PID_ALLOWED_CALLBACK] = None, get_username_for_registration: Optional[ @@ -829,7 +826,7 @@ async def get_profile_for_user(self, localpart: str) -> ProfileInfo: user_id = UserID.from_string(f"@{localpart}:{server_name}") return await self._store.get_profileinfo(user_id) - async def get_threepids_for_user(self, user_id: str) -> List[Dict[str, str]]: + async def get_threepids_for_user(self, user_id: str) -> list[dict[str, str]]: """Look up the threepids (email addresses and phone numbers) associated with the given Matrix user ID. @@ -865,8 +862,8 @@ def register( self, localpart: str, displayname: Optional[str] = None, - emails: Optional[List[str]] = None, - ) -> Generator["defer.Deferred[Any]", Any, Tuple[str, str]]: + emails: Optional[list[str]] = None, + ) -> Generator["defer.Deferred[Any]", Any, tuple[str, str]]: """Registers a new user with given localpart and optional displayname, emails. Also returns an access token for the new user. @@ -896,7 +893,7 @@ def register_user( self, localpart: str, displayname: Optional[str] = None, - emails: Optional[List[str]] = None, + emails: Optional[list[str]] = None, admin: bool = False, ) -> "defer.Deferred[str]": """Registers a new user with given localpart and optional displayname, emails. @@ -931,7 +928,7 @@ def register_device( user_id: str, device_id: Optional[str] = None, initial_display_name: Optional[str] = None, - ) -> "defer.Deferred[Tuple[str, str, Optional[int], Optional[str]]]": + ) -> "defer.Deferred[tuple[str, str, Optional[int], Optional[str]]]": """Register a device for a user and generate an access token. Added in Synapse v1.2.0. @@ -1085,7 +1082,7 @@ def register_cached_function(self, cached_func: CachedFunction) -> None: ) async def invalidate_cache( - self, cached_func: CachedFunction, keys: Tuple[Any, ...] + self, cached_func: CachedFunction, keys: tuple[Any, ...] ) -> None: """Invalidate a cache entry of a cached function across workers. The cached function needs to be registered on all workers first with `register_cached_function`. @@ -1138,7 +1135,7 @@ async def complete_sso_login_async( @defer.inlineCallbacks def get_state_events_in_room( - self, room_id: str, types: Iterable[Tuple[str, Optional[str]]] + self, room_id: str, types: Iterable[tuple[str, Optional[str]]] ) -> Generator[defer.Deferred, Any, Iterable[EventBase]]: """Gets current state events for the given room. @@ -1170,7 +1167,7 @@ async def update_room_membership( room_id: str, new_membership: str, content: Optional[JsonDict] = None, - remote_room_hosts: Optional[List[str]] = None, + remote_room_hosts: Optional[list[str]] = None, ) -> EventBase: """Updates the membership of a user to the given value. @@ -1346,7 +1343,7 @@ async def send_local_online_presence_to(self, users: Iterable[str]) -> None: ) async def set_presence_for_users( - self, users: Mapping[str, Tuple[str, Optional[str]]] + self, users: Mapping[str, tuple[str, Optional[str]]] ) -> None: """ Update the internal presence state of users. @@ -1490,7 +1487,7 @@ async def send_http_push_notification( content: JsonDict, tweaks: Optional[JsonMapping] = None, default_payload: Optional[JsonMapping] = None, - ) -> Dict[str, bool]: + ) -> dict[str, bool]: """Send an HTTP push notification that is forwarded to the registered push gateway for the specified user/device. @@ -1554,9 +1551,9 @@ async def send_mail( def read_templates( self, - filenames: List[str], + filenames: list[str], custom_template_directory: Optional[str] = None, - ) -> List[jinja2.Template]: + ) -> list[jinja2.Template]: """Read and load the content of the template files at the given location. By default, Synapse will look for these templates in its configured template directory, but another directory to search in can be provided. @@ -1595,7 +1592,7 @@ def is_mine(self, id: Union[str, DomainSpecificString]) -> bool: async def get_user_ip_and_agents( self, user_id: str, since_ts: int = 0 - ) -> List[UserIpAndAgent]: + ) -> list[UserIpAndAgent]: """ Return the list of user IPs and agents for a user. @@ -1638,7 +1635,7 @@ async def get_user_ip_and_agents( async def get_room_state( self, room_id: str, - event_filter: Optional[Iterable[Tuple[str, Optional[str]]]] = None, + event_filter: Optional[Iterable[tuple[str, Optional[str]]]] = None, ) -> StateMap[EventBase]: """Returns the current state of the given room. @@ -1803,7 +1800,7 @@ async def store_remote_3pid_association( await self._store.add_user_bound_threepid(user_id, medium, address, id_server) def check_push_rule_actions( - self, actions: List[Union[str, Dict[str, str]]] + self, actions: list[Union[str, dict[str, str]]] ) -> None: """Checks if the given push rule actions are valid according to the Matrix specification. @@ -1827,7 +1824,7 @@ async def set_push_rule_action( scope: str, kind: str, rule_id: str, - actions: List[Union[str, Dict[str, str]]], + actions: list[Union[str, dict[str, str]]], ) -> None: """Changes the actions of an existing push rule for the given user. @@ -1866,7 +1863,7 @@ async def set_push_rule_action( async def get_monthly_active_users_by_service( self, start_timestamp: Optional[int] = None, end_timestamp: Optional[int] = None - ) -> List[Tuple[str, str]]: + ) -> list[tuple[str, str]]: """Generates list of monthly active users and their services. Please see corresponding storage docstring for more details. @@ -1912,7 +1909,7 @@ async def get_canonical_room_alias(self, room_id: RoomID) -> Optional[RoomAlias] return RoomAlias.from_string(room_alias_str) return None - async def lookup_room_alias(self, room_alias: str) -> Tuple[str, List[str]]: + async def lookup_room_alias(self, room_alias: str) -> tuple[str, list[str]]: """ Get the room ID associated with a room alias. @@ -1942,7 +1939,7 @@ async def create_room( config: JsonDict, ratelimit: bool = True, creator_join_profile: Optional[JsonDict] = None, - ) -> Tuple[str, Optional[str]]: + ) -> tuple[str, Optional[str]]: """Creates a new room. Added in Synapse v1.65.0. diff --git a/synapse/module_api/callbacks/account_validity_callbacks.py b/synapse/module_api/callbacks/account_validity_callbacks.py index a989249280e..da01414d9ab 100644 --- a/synapse/module_api/callbacks/account_validity_callbacks.py +++ b/synapse/module_api/callbacks/account_validity_callbacks.py @@ -20,7 +20,7 @@ # import logging -from typing import Awaitable, Callable, List, Optional, Tuple +from typing import Awaitable, Callable, Optional from twisted.web.http import Request @@ -33,15 +33,15 @@ # Temporary hooks to allow for a transition from `/_matrix/client` endpoints # to `/_synapse/client/account_validity`. See `register_callbacks` below. ON_LEGACY_SEND_MAIL_CALLBACK = Callable[[str], Awaitable] -ON_LEGACY_RENEW_CALLBACK = Callable[[str], Awaitable[Tuple[bool, bool, int]]] +ON_LEGACY_RENEW_CALLBACK = Callable[[str], Awaitable[tuple[bool, bool, int]]] ON_LEGACY_ADMIN_REQUEST = Callable[[Request], Awaitable] class AccountValidityModuleApiCallbacks: def __init__(self) -> None: - self.is_user_expired_callbacks: List[IS_USER_EXPIRED_CALLBACK] = [] - self.on_user_registration_callbacks: List[ON_USER_REGISTRATION_CALLBACK] = [] - self.on_user_login_callbacks: List[ON_USER_LOGIN_CALLBACK] = [] + self.is_user_expired_callbacks: list[IS_USER_EXPIRED_CALLBACK] = [] + self.on_user_registration_callbacks: list[ON_USER_REGISTRATION_CALLBACK] = [] + self.on_user_login_callbacks: list[ON_USER_LOGIN_CALLBACK] = [] self.on_legacy_send_mail_callback: Optional[ON_LEGACY_SEND_MAIL_CALLBACK] = None self.on_legacy_renew_callback: Optional[ON_LEGACY_RENEW_CALLBACK] = None diff --git a/synapse/module_api/callbacks/media_repository_callbacks.py b/synapse/module_api/callbacks/media_repository_callbacks.py index 7d3aed9d666..7cb56e558ba 100644 --- a/synapse/module_api/callbacks/media_repository_callbacks.py +++ b/synapse/module_api/callbacks/media_repository_callbacks.py @@ -13,7 +13,7 @@ # import logging -from typing import TYPE_CHECKING, Awaitable, Callable, List, Optional +from typing import TYPE_CHECKING, Awaitable, Callable, Optional from synapse.config.repository import MediaUploadLimit from synapse.types import JsonDict @@ -30,7 +30,7 @@ IS_USER_ALLOWED_TO_UPLOAD_MEDIA_OF_SIZE_CALLBACK = Callable[[str, int], Awaitable[bool]] GET_MEDIA_UPLOAD_LIMITS_FOR_USER_CALLBACK = Callable[ - [str], Awaitable[Optional[List[MediaUploadLimit]]] + [str], Awaitable[Optional[list[MediaUploadLimit]]] ] ON_MEDIA_UPLOAD_LIMIT_EXCEEDED_CALLBACK = Callable[ @@ -42,16 +42,16 @@ class MediaRepositoryModuleApiCallbacks: def __init__(self, hs: "HomeServer") -> None: self.server_name = hs.hostname self.clock = hs.get_clock() - self._get_media_config_for_user_callbacks: List[ + self._get_media_config_for_user_callbacks: list[ GET_MEDIA_CONFIG_FOR_USER_CALLBACK ] = [] - self._is_user_allowed_to_upload_media_of_size_callbacks: List[ + self._is_user_allowed_to_upload_media_of_size_callbacks: list[ IS_USER_ALLOWED_TO_UPLOAD_MEDIA_OF_SIZE_CALLBACK ] = [] - self._get_media_upload_limits_for_user_callbacks: List[ + self._get_media_upload_limits_for_user_callbacks: list[ GET_MEDIA_UPLOAD_LIMITS_FOR_USER_CALLBACK ] = [] - self._on_media_upload_limit_exceeded_callbacks: List[ + self._on_media_upload_limit_exceeded_callbacks: list[ ON_MEDIA_UPLOAD_LIMIT_EXCEEDED_CALLBACK ] = [] @@ -117,7 +117,7 @@ async def is_user_allowed_to_upload_media_of_size( async def get_media_upload_limits_for_user( self, user_id: str - ) -> Optional[List[MediaUploadLimit]]: + ) -> Optional[list[MediaUploadLimit]]: """ Get the first non-None list of MediaUploadLimits for the user from the registered callbacks. If a list is returned it will be sorted in descending order of duration. @@ -128,7 +128,7 @@ async def get_media_upload_limits_for_user( name=f"{callback.__module__}.{callback.__qualname__}", server_name=self.server_name, ): - res: Optional[List[MediaUploadLimit]] = await delay_cancellation( + res: Optional[list[MediaUploadLimit]] = await delay_cancellation( callback(user_id) ) if res is not None: # to allow [] to be returned meaning no limit diff --git a/synapse/module_api/callbacks/ratelimit_callbacks.py b/synapse/module_api/callbacks/ratelimit_callbacks.py index a580ea7d7c4..6afcda1216e 100644 --- a/synapse/module_api/callbacks/ratelimit_callbacks.py +++ b/synapse/module_api/callbacks/ratelimit_callbacks.py @@ -13,7 +13,7 @@ # import logging -from typing import TYPE_CHECKING, Awaitable, Callable, List, Optional +from typing import TYPE_CHECKING, Awaitable, Callable, Optional import attr @@ -45,7 +45,7 @@ class RatelimitModuleApiCallbacks: def __init__(self, hs: "HomeServer") -> None: self.server_name = hs.hostname self.clock = hs.get_clock() - self._get_ratelimit_override_for_user_callbacks: List[ + self._get_ratelimit_override_for_user_callbacks: list[ GET_RATELIMIT_OVERRIDE_FOR_USER_CALLBACK ] = [] diff --git a/synapse/module_api/callbacks/spamchecker_callbacks.py b/synapse/module_api/callbacks/spamchecker_callbacks.py index 428e733979c..4c331c42103 100644 --- a/synapse/module_api/callbacks/spamchecker_callbacks.py +++ b/synapse/module_api/callbacks/spamchecker_callbacks.py @@ -29,10 +29,8 @@ Awaitable, Callable, Collection, - List, Literal, Optional, - Tuple, Union, cast, ) @@ -63,7 +61,7 @@ # disappear without warning depending on the results of ongoing # experiments. # Use this to return additional information as part of an error. - Tuple[Codes, JsonDict], + tuple[Codes, JsonDict], # Deprecated bool, ] @@ -83,7 +81,7 @@ # disappear without warning depending on the results of ongoing # experiments. # Use this to return additional information as part of an error. - Tuple[Codes, JsonDict], + tuple[Codes, JsonDict], # Deprecated bool, ] @@ -99,7 +97,7 @@ # disappear without warning depending on the results of ongoing # experiments. # Use this to return additional information as part of an error. - Tuple[Codes, JsonDict], + tuple[Codes, JsonDict], # Deprecated bool, ] @@ -115,7 +113,7 @@ # disappear without warning depending on the results of ongoing # experiments. # Use this to return additional information as part of an error. - Tuple[Codes, JsonDict], + tuple[Codes, JsonDict], # Deprecated bool, ] @@ -131,7 +129,7 @@ # disappear without warning depending on the results of ongoing # experiments. # Use this to return additional information as part of an error. - Tuple[Codes, JsonDict], + tuple[Codes, JsonDict], # Deprecated bool, ] @@ -144,7 +142,7 @@ # disappear without warning depending on the results of ongoing # experiments. # Use this to return additional information as part of an error. - Tuple[Codes, JsonDict], + tuple[Codes, JsonDict], # Deprecated bool, ] @@ -167,7 +165,7 @@ # disappear without warning depending on the results of ongoing # experiments. # Use this to return additional information as part of an error. - Tuple[Codes, JsonDict], + tuple[Codes, JsonDict], # Deprecated bool, ] @@ -183,7 +181,7 @@ # disappear without warning depending on the results of ongoing # experiments. # Use this to return additional information as part of an error. - Tuple[Codes, JsonDict], + tuple[Codes, JsonDict], # Deprecated bool, ] @@ -199,7 +197,7 @@ # disappear without warning depending on the results of ongoing # experiments. # Use this to return additional information as part of an error. - Tuple[Codes, JsonDict], + tuple[Codes, JsonDict], ] ], ] @@ -211,7 +209,7 @@ [ Optional[dict], Optional[str], - Collection[Tuple[str, str]], + Collection[tuple[str, str]], ], Awaitable[RegistrationBehaviour], ] @@ -219,7 +217,7 @@ [ Optional[dict], Optional[str], - Collection[Tuple[str, str]], + Collection[tuple[str, str]], Optional[str], ], Awaitable[RegistrationBehaviour], @@ -234,7 +232,7 @@ # disappear without warning depending on the results of ongoing # experiments. # Use this to return additional information as part of an error. - Tuple[Codes, JsonDict], + tuple[Codes, JsonDict], # Deprecated bool, ] @@ -245,7 +243,7 @@ str, Optional[str], Optional[str], - Collection[Tuple[Optional[str], str]], + Collection[tuple[Optional[str], str]], Optional[str], ], Awaitable[ @@ -256,7 +254,7 @@ # disappear without warning depending on the results of ongoing # experiments. # Use this to return additional information as part of an error. - Tuple[Codes, JsonDict], + tuple[Codes, JsonDict], ] ], ] @@ -266,7 +264,7 @@ def load_legacy_spam_checkers(hs: "synapse.server.HomeServer") -> None: """Wrapper that loads spam checkers configured using the old configuration, and registers the spam checker hooks they implement. """ - spam_checkers: List[Any] = [] + spam_checkers: list[Any] = [] api = hs.get_module_api() for module, config in hs.config.spamchecker.spam_checkers: # Older spam checkers don't accept the `api` argument, so we @@ -312,7 +310,7 @@ def async_wrapper(f: Optional[Callable]) -> Optional[Callable[..., Awaitable]]: def wrapper( email_threepid: Optional[dict], username: Optional[str], - request_info: Collection[Tuple[str, str]], + request_info: Collection[tuple[str, str]], auth_provider_id: Optional[str], ) -> Union[Awaitable[RegistrationBehaviour], RegistrationBehaviour]: # Assertion required because mypy can't prove we won't @@ -359,36 +357,36 @@ def __init__(self, hs: "synapse.server.HomeServer") -> None: self.server_name = hs.hostname self.clock = hs.get_clock() - self._check_event_for_spam_callbacks: List[CHECK_EVENT_FOR_SPAM_CALLBACK] = [] - self._should_drop_federated_event_callbacks: List[ + self._check_event_for_spam_callbacks: list[CHECK_EVENT_FOR_SPAM_CALLBACK] = [] + self._should_drop_federated_event_callbacks: list[ SHOULD_DROP_FEDERATED_EVENT_CALLBACK ] = [] - self._user_may_join_room_callbacks: List[USER_MAY_JOIN_ROOM_CALLBACK] = [] - self._user_may_invite_callbacks: List[USER_MAY_INVITE_CALLBACK] = [] - self._federated_user_may_invite_callbacks: List[ + self._user_may_join_room_callbacks: list[USER_MAY_JOIN_ROOM_CALLBACK] = [] + self._user_may_invite_callbacks: list[USER_MAY_INVITE_CALLBACK] = [] + self._federated_user_may_invite_callbacks: list[ FEDERATED_USER_MAY_INVITE_CALLBACK ] = [] - self._user_may_send_3pid_invite_callbacks: List[ + self._user_may_send_3pid_invite_callbacks: list[ USER_MAY_SEND_3PID_INVITE_CALLBACK ] = [] - self._user_may_create_room_callbacks: List[USER_MAY_CREATE_ROOM_CALLBACK] = [] - self._user_may_send_state_event_callbacks: List[ + self._user_may_create_room_callbacks: list[USER_MAY_CREATE_ROOM_CALLBACK] = [] + self._user_may_send_state_event_callbacks: list[ USER_MAY_SEND_STATE_EVENT_CALLBACK ] = [] - self._user_may_create_room_alias_callbacks: List[ + self._user_may_create_room_alias_callbacks: list[ USER_MAY_CREATE_ROOM_ALIAS_CALLBACK ] = [] - self._user_may_publish_room_callbacks: List[USER_MAY_PUBLISH_ROOM_CALLBACK] = [] - self._check_username_for_spam_callbacks: List[ + self._user_may_publish_room_callbacks: list[USER_MAY_PUBLISH_ROOM_CALLBACK] = [] + self._check_username_for_spam_callbacks: list[ CHECK_USERNAME_FOR_SPAM_CALLBACK ] = [] - self._check_registration_for_spam_callbacks: List[ + self._check_registration_for_spam_callbacks: list[ CHECK_REGISTRATION_FOR_SPAM_CALLBACK ] = [] - self._check_media_file_for_spam_callbacks: List[ + self._check_media_file_for_spam_callbacks: list[ CHECK_MEDIA_FILE_FOR_SPAM_CALLBACK ] = [] - self._check_login_for_spam_callbacks: List[CHECK_LOGIN_FOR_SPAM_CALLBACK] = [] + self._check_login_for_spam_callbacks: list[CHECK_LOGIN_FOR_SPAM_CALLBACK] = [] def register_callbacks( self, @@ -471,7 +469,7 @@ def register_callbacks( @trace async def check_event_for_spam( self, event: "synapse.events.EventBase" - ) -> Union[Tuple[Codes, JsonDict], str]: + ) -> Union[tuple[Codes, JsonDict], str]: """Checks if a given event is considered "spammy" by this server. If the server considers an event spammy, then it will be rejected if @@ -561,7 +559,7 @@ async def should_drop_federated_event( async def user_may_join_room( self, user_id: str, room_id: str, is_invited: bool - ) -> Union[Tuple[Codes, JsonDict], Literal["NOT_SPAM"]]: + ) -> Union[tuple[Codes, JsonDict], Literal["NOT_SPAM"]]: """Checks if a given users is allowed to join a room. Not called when a user creates a room. @@ -605,7 +603,7 @@ async def user_may_join_room( async def user_may_invite( self, inviter_userid: str, invitee_userid: str, room_id: str - ) -> Union[Tuple[Codes, dict], Literal["NOT_SPAM"]]: + ) -> Union[tuple[Codes, dict], Literal["NOT_SPAM"]]: """Checks if a given user may send an invite Args: @@ -650,7 +648,7 @@ async def user_may_invite( async def federated_user_may_invite( self, event: "synapse.events.EventBase" - ) -> Union[Tuple[Codes, dict], Literal["NOT_SPAM"]]: + ) -> Union[tuple[Codes, dict], Literal["NOT_SPAM"]]: """Checks if a given user may send an invite Args: @@ -691,7 +689,7 @@ async def federated_user_may_invite( async def user_may_send_3pid_invite( self, inviter_userid: str, medium: str, address: str, room_id: str - ) -> Union[Tuple[Codes, dict], Literal["NOT_SPAM"]]: + ) -> Union[tuple[Codes, dict], Literal["NOT_SPAM"]]: """Checks if a given user may invite a given threepid into the room Note that if the threepid is already associated with a Matrix user ID, Synapse @@ -739,7 +737,7 @@ async def user_may_send_3pid_invite( async def user_may_create_room( self, userid: str, room_config: JsonDict - ) -> Union[Tuple[Codes, dict], Literal["NOT_SPAM"]]: + ) -> Union[tuple[Codes, dict], Literal["NOT_SPAM"]]: """Checks if a given user may create a room Args: @@ -805,7 +803,7 @@ async def user_may_send_state_event( event_type: str, state_key: str, content: JsonDict, - ) -> Union[Tuple[Codes, dict], Literal["NOT_SPAM"]]: + ) -> Union[tuple[Codes, dict], Literal["NOT_SPAM"]]: """Checks if a given user may create a room with a given visibility Args: user_id: The ID of the user attempting to create a room @@ -838,7 +836,7 @@ async def user_may_send_state_event( async def user_may_create_room_alias( self, userid: str, room_alias: RoomAlias - ) -> Union[Tuple[Codes, dict], Literal["NOT_SPAM"]]: + ) -> Union[tuple[Codes, dict], Literal["NOT_SPAM"]]: """Checks if a given user may create a room alias Args: @@ -876,7 +874,7 @@ async def user_may_create_room_alias( async def user_may_publish_room( self, userid: str, room_id: str - ) -> Union[Tuple[Codes, dict], Literal["NOT_SPAM"]]: + ) -> Union[tuple[Codes, dict], Literal["NOT_SPAM"]]: """Checks if a given user may publish a room to the directory Args: @@ -964,7 +962,7 @@ async def check_registration_for_spam( self, email_threepid: Optional[dict], username: Optional[str], - request_info: Collection[Tuple[str, str]], + request_info: Collection[tuple[str, str]], auth_provider_id: Optional[str] = None, ) -> RegistrationBehaviour: """Checks if we should allow the given registration request. @@ -1000,7 +998,7 @@ async def check_registration_for_spam( @trace async def check_media_file_for_spam( self, file_wrapper: ReadableFileWrapper, file_info: FileInfo - ) -> Union[Tuple[Codes, dict], Literal["NOT_SPAM"]]: + ) -> Union[tuple[Codes, dict], Literal["NOT_SPAM"]]: """Checks if a piece of newly uploaded media should be blocked. This will be called for local uploads, downloads of remote media, each @@ -1062,9 +1060,9 @@ async def check_login_for_spam( user_id: str, device_id: Optional[str], initial_display_name: Optional[str], - request_info: Collection[Tuple[Optional[str], str]], + request_info: Collection[tuple[Optional[str], str]], auth_provider_id: Optional[str] = None, - ) -> Union[Tuple[Codes, dict], Literal["NOT_SPAM"]]: + ) -> Union[tuple[Codes, dict], Literal["NOT_SPAM"]]: """Checks if we should allow the given registration request. Args: diff --git a/synapse/module_api/callbacks/third_party_event_rules_callbacks.py b/synapse/module_api/callbacks/third_party_event_rules_callbacks.py index 9f7a04372de..2b886cbabb0 100644 --- a/synapse/module_api/callbacks/third_party_event_rules_callbacks.py +++ b/synapse/module_api/callbacks/third_party_event_rules_callbacks.py @@ -19,7 +19,7 @@ # # import logging -from typing import TYPE_CHECKING, Any, Awaitable, Callable, List, Optional, Tuple +from typing import TYPE_CHECKING, Any, Awaitable, Callable, Optional from twisted.internet.defer import CancelledError @@ -37,7 +37,7 @@ CHECK_EVENT_ALLOWED_CALLBACK = Callable[ - [EventBase, StateMap[EventBase]], Awaitable[Tuple[bool, Optional[dict]]] + [EventBase, StateMap[EventBase]], Awaitable[tuple[bool, Optional[dict]]] ] ON_CREATE_ROOM_CALLBACK = Callable[[Requester, dict, bool], Awaitable] CHECK_THREEPID_CAN_BE_INVITED_CALLBACK = Callable[ @@ -93,7 +93,7 @@ def async_wrapper(f: Optional[Callable]) -> Optional[Callable[..., Awaitable]]: async def wrap_check_event_allowed( event: EventBase, state_events: StateMap[EventBase], - ) -> Tuple[bool, Optional[dict]]: + ) -> tuple[bool, Optional[dict]]: # Assertion required because mypy can't prove we won't change # `f` back to `None`. See # https://mypy.readthedocs.io/en/latest/common_issues.html#narrowing-and-inner-functions @@ -159,30 +159,30 @@ def __init__(self, hs: "HomeServer"): self.store = hs.get_datastores().main self._storage_controllers = hs.get_storage_controllers() - self._check_event_allowed_callbacks: List[CHECK_EVENT_ALLOWED_CALLBACK] = [] - self._on_create_room_callbacks: List[ON_CREATE_ROOM_CALLBACK] = [] - self._check_threepid_can_be_invited_callbacks: List[ + self._check_event_allowed_callbacks: list[CHECK_EVENT_ALLOWED_CALLBACK] = [] + self._on_create_room_callbacks: list[ON_CREATE_ROOM_CALLBACK] = [] + self._check_threepid_can_be_invited_callbacks: list[ CHECK_THREEPID_CAN_BE_INVITED_CALLBACK ] = [] - self._check_visibility_can_be_modified_callbacks: List[ + self._check_visibility_can_be_modified_callbacks: list[ CHECK_VISIBILITY_CAN_BE_MODIFIED_CALLBACK ] = [] - self._on_new_event_callbacks: List[ON_NEW_EVENT_CALLBACK] = [] - self._check_can_shutdown_room_callbacks: List[ + self._on_new_event_callbacks: list[ON_NEW_EVENT_CALLBACK] = [] + self._check_can_shutdown_room_callbacks: list[ CHECK_CAN_SHUTDOWN_ROOM_CALLBACK ] = [] - self._check_can_deactivate_user_callbacks: List[ + self._check_can_deactivate_user_callbacks: list[ CHECK_CAN_DEACTIVATE_USER_CALLBACK ] = [] - self._on_profile_update_callbacks: List[ON_PROFILE_UPDATE_CALLBACK] = [] - self._on_user_deactivation_status_changed_callbacks: List[ + self._on_profile_update_callbacks: list[ON_PROFILE_UPDATE_CALLBACK] = [] + self._on_user_deactivation_status_changed_callbacks: list[ ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK ] = [] - self._on_threepid_bind_callbacks: List[ON_THREEPID_BIND_CALLBACK] = [] - self._on_add_user_third_party_identifier_callbacks: List[ + self._on_threepid_bind_callbacks: list[ON_THREEPID_BIND_CALLBACK] = [] + self._on_add_user_third_party_identifier_callbacks: list[ ON_ADD_USER_THIRD_PARTY_IDENTIFIER_CALLBACK ] = [] - self._on_remove_user_third_party_identifier_callbacks: List[ + self._on_remove_user_third_party_identifier_callbacks: list[ ON_REMOVE_USER_THIRD_PARTY_IDENTIFIER_CALLBACK ] = [] @@ -261,7 +261,7 @@ async def check_event_allowed( self, event: EventBase, context: UnpersistedEventContextBase, - ) -> Tuple[bool, Optional[dict]]: + ) -> tuple[bool, Optional[dict]]: """Check if a provided event should be allowed in the given context. The module can return: diff --git a/synapse/notifier.py b/synapse/notifier.py index 9169f50c4dd..4a75d07e37c 100644 --- a/synapse/notifier.py +++ b/synapse/notifier.py @@ -25,14 +25,10 @@ Awaitable, Callable, Collection, - Dict, Iterable, - List, Literal, Mapping, Optional, - Set, - Tuple, TypeVar, Union, overload, @@ -148,7 +144,7 @@ def __init__( self.last_notified_ms = time_now_ms # Set of listeners that we need to wake up when there has been a change. - self.listeners: Set[Deferred[StreamToken]] = set() + self.listeners: set[Deferred[StreamToken]] = set() def update_and_fetch_deferreds( self, @@ -215,7 +211,7 @@ def new_listener(self, token: StreamToken) -> "Deferred[StreamToken]": @attr.s(slots=True, frozen=True, auto_attribs=True) class EventStreamResult: - events: List[Union[JsonDict, EventBase]] + events: list[Union[JsonDict, EventBase]] start_token: StreamToken end_token: StreamToken @@ -244,25 +240,25 @@ class Notifier: UNUSED_STREAM_EXPIRY_MS = 10 * 60 * 1000 def __init__(self, hs: "HomeServer"): - self.user_to_user_stream: Dict[str, _NotifierUserStream] = {} - self.room_to_user_streams: Dict[str, Set[_NotifierUserStream]] = {} + self.user_to_user_stream: dict[str, _NotifierUserStream] = {} + self.room_to_user_streams: dict[str, set[_NotifierUserStream]] = {} self.hs = hs self.server_name = hs.hostname self._storage_controllers = hs.get_storage_controllers() self.event_sources = hs.get_event_sources() self.store = hs.get_datastores().main - self.pending_new_room_events: List[_PendingRoomEventEntry] = [] + self.pending_new_room_events: list[_PendingRoomEventEntry] = [] self._replication_notifier = hs.get_replication_notifier() - self._new_join_in_room_callbacks: List[Callable[[str, str], None]] = [] + self._new_join_in_room_callbacks: list[Callable[[str, str], None]] = [] self._federation_client = hs.get_federation_http_client() self._third_party_rules = hs.get_module_api_callbacks().third_party_event_rules # List of callbacks to be notified when a lock is released - self._lock_released_callback: List[Callable[[str, str, str], None]] = [] + self._lock_released_callback: list[Callable[[str, str, str], None]] = [] self.reactor = hs.get_reactor() self.clock = hs.get_clock() @@ -283,10 +279,10 @@ def __init__(self, hs: "HomeServer"): # when rendering the metrics page, which is likely once per minute at # most when scraping it. # - # Ideally, we'd use `Mapping[Tuple[str], int]` here but mypy doesn't like it. + # Ideally, we'd use `Mapping[tuple[str], int]` here but mypy doesn't like it. # This is close enough and better than a type ignore. - def count_listeners() -> Mapping[Tuple[str, ...], int]: - all_user_streams: Set[_NotifierUserStream] = set() + def count_listeners() -> Mapping[tuple[str, ...], int]: + all_user_streams: set[_NotifierUserStream] = set() for streams in list(self.room_to_user_streams.values()): all_user_streams |= streams @@ -338,7 +334,7 @@ def add_new_join_in_room_callback(self, cb: Callable[[str, str], None]) -> None: async def on_new_room_events( self, - events_and_pos: List[Tuple[EventBase, PersistedEventPosition]], + events_and_pos: list[tuple[EventBase, PersistedEventPosition]], max_room_stream_token: RoomStreamToken, extra_users: Optional[Collection[UserID]] = None, ) -> None: @@ -373,7 +369,7 @@ async def on_un_partial_stated_room( time_now_ms = self.clock.time_msec() current_token = self.event_sources.get_current_token() - listeners: List["Deferred[StreamToken]"] = [] + listeners: list["Deferred[StreamToken]"] = [] for user_stream in user_streams: try: listeners.extend( @@ -397,7 +393,7 @@ async def on_un_partial_stated_room( async def notify_new_room_events( self, - event_entries: List[Tuple[_PendingRoomEventEntry, str]], + event_entries: list[tuple[_PendingRoomEventEntry, str]], max_room_stream_token: RoomStreamToken, ) -> None: """Used by handlers to inform the notifier something has happened @@ -453,8 +449,8 @@ def _notify_pending_new_room_events( pending = self.pending_new_room_events self.pending_new_room_events = [] - users: Set[UserID] = set() - rooms: Set[str] = set() + users: set[UserID] = set() + rooms: set[str] = set() for entry in pending: if entry.event_pos.persisted_after(max_room_stream_token): @@ -560,7 +556,7 @@ def on_new_event( users = users or [] rooms = rooms or [] - user_streams: Set[_NotifierUserStream] = set() + user_streams: set[_NotifierUserStream] = set() log_kv( { @@ -593,7 +589,7 @@ def on_new_event( time_now_ms = self.clock.time_msec() current_token = self.event_sources.get_current_token() - listeners: List["Deferred[StreamToken]"] = [] + listeners: list["Deferred[StreamToken]"] = [] for user_stream in user_streams: try: listeners.extend( @@ -771,7 +767,7 @@ async def check_for_updates( # The events fetched from each source are a JsonDict, EventBase, or # UserPresenceState, but see below for UserPresenceState being # converted to JsonDict. - events: List[Union[JsonDict, EventBase]] = [] + events: list[Union[JsonDict, EventBase]] = [] end_token = from_token for keyname, source in self.event_sources.sources.get_sources(): @@ -871,7 +867,7 @@ async def wait_for_stream_token(self, stream_token: StreamToken) -> bool: async def _get_room_ids( self, user: UserID, explicit_room_id: Optional[str] - ) -> Tuple[StrCollection, bool]: + ) -> tuple[StrCollection, bool]: joined_room_ids = await self.store.get_rooms_for_user(user.to_string()) if explicit_room_id: if explicit_room_id in joined_room_ids: @@ -960,7 +956,7 @@ class ReplicationNotifier: This is separate from the notifier to avoid circular dependencies. """ - _replication_callbacks: List[Callable[[], None]] = attr.Factory(list) + _replication_callbacks: list[Callable[[], None]] = attr.Factory(list) def add_replication_callback(self, cb: Callable[[], None]) -> None: """Add a callback that will be called when some new data is available. diff --git a/synapse/push/__init__.py b/synapse/push/__init__.py index 7bc99bd7857..552af8e14aa 100644 --- a/synapse/push/__init__.py +++ b/synapse/push/__init__.py @@ -94,7 +94,7 @@ """ import abc -from typing import TYPE_CHECKING, Any, Dict, Optional +from typing import TYPE_CHECKING, Any, Optional import attr @@ -131,7 +131,7 @@ class PusherConfig: # while the "set_device_id_for_pushers" background update is running. access_token: Optional[int] - def as_dict(self) -> Dict[str, Any]: + def as_dict(self) -> dict[str, Any]: """Information that can be retrieved about a pusher after creation.""" return { "app_display_name": self.app_display_name, diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py index ea9169aef02..9fcd7fdc6e9 100644 --- a/synapse/push/bulk_push_rule_evaluator.py +++ b/synapse/push/bulk_push_rule_evaluator.py @@ -24,13 +24,9 @@ TYPE_CHECKING, Any, Collection, - Dict, - FrozenSet, - List, Mapping, Optional, Sequence, - Tuple, Union, cast, ) @@ -237,7 +233,7 @@ async def _get_power_levels_and_sender_level( event: EventBase, context: EventContext, event_id_to_event: Mapping[str, EventBase], - ) -> Tuple[dict, Optional[int]]: + ) -> tuple[dict, Optional[int]]: """ Given an event and an event context, get the power level event relevant to the event and the power level of the sender of the event. @@ -309,13 +305,13 @@ async def _get_power_levels_and_sender_level( async def _related_events( self, event: EventBase - ) -> Dict[str, Dict[str, JsonValue]]: + ) -> dict[str, dict[str, JsonValue]]: """Fetches the related events for 'event'. Sets the im.vector.is_falling_back key if the event is from a fallback relation Returns: Mapping of relation type to flattened events. """ - related_events: Dict[str, Dict[str, JsonValue]] = {} + related_events: dict[str, dict[str, JsonValue]] = {} if self._related_event_match_enabled: related_event_id = event.content.get("m.relates_to", {}).get("event_id") relation_type = event.content.get("m.relates_to", {}).get("rel_type") @@ -352,7 +348,7 @@ async def _related_events( return related_events async def action_for_events_by_user( - self, events_and_context: List[EventPersistencePair] + self, events_and_context: list[EventPersistencePair] ) -> None: """Given a list of events and their associated contexts, evaluate the push rules for each event, check if the message should increment the unread count, and @@ -394,7 +390,7 @@ async def _action_for_event_by_user( count_as_unread = _should_count_as_unread(event, context) rules_by_user = await self._get_rules_for_event(event) - actions_by_user: Dict[str, Collection[Union[Mapping, str]]] = {} + actions_by_user: dict[str, Collection[Union[Mapping, str]]] = {} # Gather a bunch of info in parallel. # @@ -409,7 +405,7 @@ async def _action_for_event_by_user( profiles, ) = await make_deferred_yieldable( cast( - "Deferred[Tuple[int, Tuple[dict, Optional[int]], Dict[str, Dict[str, JsonValue]], Mapping[str, ProfileInfo]]]", + "Deferred[tuple[int, tuple[dict, Optional[int]], dict[str, dict[str, JsonValue]], Mapping[str, ProfileInfo]]]", gather_results( ( run_in_background( # type: ignore[call-overload] @@ -481,7 +477,7 @@ async def _action_for_event_by_user( self.hs.config.experimental.msc4306_enabled, ) - msc4306_thread_subscribers: Optional[FrozenSet[str]] = None + msc4306_thread_subscribers: Optional[frozenset[str]] = None if self.hs.config.experimental.msc4306_enabled and thread_id != MAIN_TIMELINE: # pull out, in batch, all local subscribers to this thread # (in the common case, they will all be getting processed for push @@ -556,9 +552,9 @@ async def _action_for_event_by_user( ) -MemberMap = Dict[str, Optional[EventIdMembership]] -Rule = Dict[str, dict] -RulesByUser = Dict[str, List[Rule]] +MemberMap = dict[str, Optional[EventIdMembership]] +Rule = dict[str, dict] +RulesByUser = dict[str, list[Rule]] StateGroup = Union[object, int] @@ -572,9 +568,9 @@ def _is_simple_value(value: Any) -> bool: def _flatten_dict( d: Union[EventBase, Mapping[str, Any]], - prefix: Optional[List[str]] = None, - result: Optional[Dict[str, JsonValue]] = None, -) -> Dict[str, JsonValue]: + prefix: Optional[list[str]] = None, + result: Optional[dict[str, JsonValue]] = None, +) -> dict[str, JsonValue]: """ Given a JSON dictionary (or event) which might contain sub dictionaries, flatten it into a single layer dictionary by combining the keys & sub-keys. diff --git a/synapse/push/clientformat.py b/synapse/push/clientformat.py index 4f647491f1a..fd1758db9dd 100644 --- a/synapse/push/clientformat.py +++ b/synapse/push/clientformat.py @@ -20,7 +20,7 @@ # import copy -from typing import Any, Dict, List, Optional +from typing import Any, Optional from synapse.push.rulekinds import PRIORITY_CLASS_INVERSE_MAP, PRIORITY_CLASS_MAP from synapse.synapse_rust.push import FilteredPushRules, PushRule @@ -29,11 +29,11 @@ def format_push_rules_for_user( user: UserID, ruleslist: FilteredPushRules -) -> Dict[str, Dict[str, List[Dict[str, Any]]]]: +) -> dict[str, dict[str, list[dict[str, Any]]]]: """Converts a list of rawrules and a enabled map into nested dictionaries to match the Matrix client-server format for push rules""" - rules: Dict[str, Dict[str, List[Dict[str, Any]]]] = {"global": {}} + rules: dict[str, dict[str, list[dict[str, Any]]]] = {"global": {}} rules["global"] = _add_empty_priority_class_arrays(rules["global"]) @@ -70,7 +70,7 @@ def format_push_rules_for_user( return rules -def _convert_type_to_value(rule_or_cond: Dict[str, Any], user: UserID) -> None: +def _convert_type_to_value(rule_or_cond: dict[str, Any], user: UserID) -> None: for type_key in ("pattern", "value"): type_value = rule_or_cond.pop(f"{type_key}_type", None) if type_value == "user_id": @@ -79,14 +79,14 @@ def _convert_type_to_value(rule_or_cond: Dict[str, Any], user: UserID) -> None: rule_or_cond[type_key] = user.localpart -def _add_empty_priority_class_arrays(d: Dict[str, list]) -> Dict[str, list]: +def _add_empty_priority_class_arrays(d: dict[str, list]) -> dict[str, list]: for pc in PRIORITY_CLASS_MAP.keys(): d[pc] = [] return d -def _rule_to_template(rule: PushRule) -> Optional[Dict[str, Any]]: - templaterule: Dict[str, Any] +def _rule_to_template(rule: PushRule) -> Optional[dict[str, Any]]: + templaterule: dict[str, Any] unscoped_rule_id = _rule_id_from_namespaced(rule.rule_id) diff --git a/synapse/push/emailpusher.py b/synapse/push/emailpusher.py index 1484bc8fc01..83823c22843 100644 --- a/synapse/push/emailpusher.py +++ b/synapse/push/emailpusher.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Dict, List, Optional +from typing import TYPE_CHECKING, Optional from twisted.internet.error import AlreadyCalled, AlreadyCancelled from twisted.internet.interfaces import IDelayedCall @@ -71,7 +71,7 @@ def __init__(self, hs: "HomeServer", pusher_config: PusherConfig, mailer: Mailer self.store = self.hs.get_datastores().main self.email = pusher_config.pushkey self.timed_call: Optional[IDelayedCall] = None - self.throttle_params: Dict[str, ThrottleParams] = {} + self.throttle_params: dict[str, ThrottleParams] = {} self._inited = False self._is_processing = False @@ -324,7 +324,7 @@ async def sent_notif_update_throttle( ) async def send_notification( - self, push_actions: List[EmailPushAction], reason: EmailReason + self, push_actions: list[EmailPushAction], reason: EmailReason ) -> None: logger.info("Sending notif email for user %r", self.user_id) diff --git a/synapse/push/httppusher.py b/synapse/push/httppusher.py index 5cac5de8cb4..c6c19ab15a7 100644 --- a/synapse/push/httppusher.py +++ b/synapse/push/httppusher.py @@ -21,7 +21,7 @@ import logging import random import urllib.parse -from typing import TYPE_CHECKING, Dict, List, Optional, Union +from typing import TYPE_CHECKING, Dict, Optional, Union from prometheus_client import Counter @@ -68,7 +68,7 @@ ) -def tweaks_for_actions(actions: List[Union[str, Dict]]) -> JsonMapping: +def tweaks_for_actions(actions: list[Union[str, Dict]]) -> JsonMapping: """ Converts a list of actions into a `tweaks` dict (which can then be passed to the push gateway). @@ -396,7 +396,7 @@ async def dispatch_push( content: JsonDict, tweaks: Optional[JsonMapping] = None, default_payload: Optional[JsonMapping] = None, - ) -> Union[bool, List[str]]: + ) -> Union[bool, list[str]]: """Send a notification to the registered push gateway, with `content` being the content of the `notification` top property specified in the spec. Note that the `devices` property will be added with device-specific @@ -453,7 +453,7 @@ async def dispatch_push_event( event: EventBase, tweaks: JsonMapping, badge: int, - ) -> Union[bool, List[str]]: + ) -> Union[bool, list[str]]: """Send a notification to the registered push gateway by building it from an event. diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py index d76cc8237ba..3dac61aed5b 100644 --- a/synapse/push/mailer.py +++ b/synapse/push/mailer.py @@ -21,7 +21,7 @@ import logging import urllib.parse -from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, TypeVar +from typing import TYPE_CHECKING, Iterable, Optional, TypeVar import bleach import jinja2 @@ -287,7 +287,7 @@ async def send_notification_mail( notif_events = await self.store.get_events([pa.event_id for pa in push_actions]) - notifs_by_room: Dict[str, List[EmailPushAction]] = {} + notifs_by_room: dict[str, list[EmailPushAction]] = {} for pa in push_actions: notifs_by_room.setdefault(pa.room_id, []).append(pa) @@ -317,7 +317,7 @@ async def _fetch_room_state(room_id: str) -> None: # actually sort our so-called rooms_in_order list, most recent room first rooms_in_order.sort(key=lambda r: -(notifs_by_room[r][-1].received_ts or 0)) - rooms: List[RoomVars] = [] + rooms: list[RoomVars] = [] for r in rooms_in_order: roomvars = await self._get_room_vars( @@ -417,7 +417,7 @@ async def _get_room_vars( room_id: str, user_id: str, notifs: Iterable[EmailPushAction], - notif_events: Dict[str, EventBase], + notif_events: dict[str, EventBase], room_state_ids: StateMap[str], ) -> RoomVars: """ @@ -665,9 +665,9 @@ def _add_image_message_vars( async def _make_summary_text_single_room( self, room_id: str, - notifs: List[EmailPushAction], + notifs: list[EmailPushAction], room_state_ids: StateMap[str], - notif_events: Dict[str, EventBase], + notif_events: dict[str, EventBase], user_id: str, ) -> str: """ @@ -781,9 +781,9 @@ async def _make_summary_text_single_room( async def _make_summary_text( self, - notifs_by_room: Dict[str, List[EmailPushAction]], - room_state_ids: Dict[str, StateMap[str]], - notif_events: Dict[str, EventBase], + notifs_by_room: dict[str, list[EmailPushAction]], + room_state_ids: dict[str, StateMap[str]], + notif_events: dict[str, EventBase], reason: EmailReason, ) -> str: """ @@ -814,9 +814,9 @@ async def _make_summary_text( async def _make_summary_text_from_member_events( self, room_id: str, - notifs: List[EmailPushAction], + notifs: list[EmailPushAction], room_state_ids: StateMap[str], - notif_events: Dict[str, EventBase], + notif_events: dict[str, EventBase], ) -> str: """ Make a summary text for the email when only a single room has notifications. @@ -995,7 +995,7 @@ def safe_text(raw_text: str) -> Markup: ) -def deduped_ordered_list(it: Iterable[T]) -> List[T]: +def deduped_ordered_list(it: Iterable[T]) -> list[T]: seen = set() ret = [] for item in it: diff --git a/synapse/push/presentable_names.py b/synapse/push/presentable_names.py index 1faa57e9f5b..2f32e18b9ab 100644 --- a/synapse/push/presentable_names.py +++ b/synapse/push/presentable_names.py @@ -21,7 +21,7 @@ import logging import re -from typing import TYPE_CHECKING, Dict, Iterable, Optional +from typing import TYPE_CHECKING, Iterable, Optional from synapse.api.constants import EventTypes, Membership from synapse.events import EventBase @@ -205,8 +205,8 @@ def name_from_member_event(member_event: EventBase) -> str: return member_event.state_key -def _state_as_two_level_dict(state: StateMap[str]) -> Dict[str, Dict[str, str]]: - ret: Dict[str, Dict[str, str]] = {} +def _state_as_two_level_dict(state: StateMap[str]) -> dict[str, dict[str, str]]: + ret: dict[str, dict[str, str]] = {} for k, v in state.items(): ret.setdefault(k[0], {})[k[1]] = v return ret diff --git a/synapse/push/push_tools.py b/synapse/push/push_tools.py index 3f3e4a92343..8e2ff2bcb4b 100644 --- a/synapse/push/push_tools.py +++ b/synapse/push/push_tools.py @@ -18,7 +18,6 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Dict from synapse.api.constants import EventTypes, Membership from synapse.events import EventBase @@ -56,8 +55,8 @@ async def get_badge_count(store: DataStore, user_id: str, group_by_room: bool) - async def get_context_for_event( storage: StorageControllers, ev: EventBase, user_id: str -) -> Dict[str, str]: - ctx: Dict[str, str] = {} +) -> dict[str, str]: + ctx: dict[str, str] = {} if ev.internal_metadata.outlier: # We don't have state for outliers, so we can't compute the context diff --git a/synapse/push/push_types.py b/synapse/push/push_types.py index 57fa926a46d..e1678cd7173 100644 --- a/synapse/push/push_types.py +++ b/synapse/push/push_types.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import List, Optional, TypedDict +from typing import Optional, TypedDict class EmailReason(TypedDict, total=False): @@ -91,7 +91,7 @@ class NotifVars(TypedDict): link: str ts: Optional[int] - messages: List[MessageVars] + messages: list[MessageVars] class RoomVars(TypedDict): @@ -110,7 +110,7 @@ class RoomVars(TypedDict): title: Optional[str] hash: int invite: bool - notifs: List[NotifVars] + notifs: list[NotifVars] link: str avatar_url: Optional[str] @@ -137,5 +137,5 @@ class TemplateVars(TypedDict, total=False): user_display_name: str unsubscribe_link: str summary_text: str - rooms: List[RoomVars] + rooms: list[RoomVars] reason: EmailReason diff --git a/synapse/push/pusher.py b/synapse/push/pusher.py index 9a5dd7a9d4b..17238c95c07 100644 --- a/synapse/push/pusher.py +++ b/synapse/push/pusher.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Callable, Dict, Optional +from typing import TYPE_CHECKING, Callable, Optional from synapse.push import Pusher, PusherConfig from synapse.push.emailpusher import EmailPusher @@ -38,13 +38,13 @@ def __init__(self, hs: "HomeServer"): self.hs = hs self.config = hs.config - self.pusher_types: Dict[str, Callable[[HomeServer, PusherConfig], Pusher]] = { + self.pusher_types: dict[str, Callable[[HomeServer, PusherConfig], Pusher]] = { "http": HttpPusher } logger.info("email enable notifs: %r", hs.config.email.email_enable_notifs) if hs.config.email.email_enable_notifs: - self.mailers: Dict[str, Mailer] = {} + self.mailers: dict[str, Mailer] = {} self._notif_template_html = hs.config.email.email_notif_template_html self._notif_template_text = hs.config.email.email_notif_template_text diff --git a/synapse/push/pusherpool.py b/synapse/push/pusherpool.py index 977c55b6836..6b70de976ad 100644 --- a/synapse/push/pusherpool.py +++ b/synapse/push/pusherpool.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Dict, Iterable, Optional +from typing import TYPE_CHECKING, Iterable, Optional from prometheus_client import Gauge @@ -100,7 +100,7 @@ def __init__(self, hs: "HomeServer"): self._last_room_stream_id_seen = self.store.get_room_max_stream_ordering() # map from user id to app_id:pushkey to pusher - self.pushers: Dict[str, Dict[str, Pusher]] = {} + self.pushers: dict[str, dict[str, Pusher]] = {} self._account_validity_handler = hs.get_account_validity_handler() diff --git a/synapse/replication/http/_base.py b/synapse/replication/http/_base.py index 0850a99e0c2..d76b40cf39b 100644 --- a/synapse/replication/http/_base.py +++ b/synapse/replication/http/_base.py @@ -23,7 +23,7 @@ import re import urllib.parse from inspect import signature -from typing import TYPE_CHECKING, Any, Awaitable, Callable, ClassVar, Dict, List, Tuple +from typing import TYPE_CHECKING, Any, Awaitable, Callable, ClassVar from prometheus_client import Counter, Gauge @@ -112,7 +112,7 @@ class ReplicationEndpoint(metaclass=abc.ABCMeta): """ NAME: str = abc.abstractproperty() # type: ignore - PATH_ARGS: Tuple[str, ...] = abc.abstractproperty() # type: ignore + PATH_ARGS: tuple[str, ...] = abc.abstractproperty() # type: ignore METHOD = "POST" CACHE = True RETRY_ON_TIMEOUT = True @@ -187,7 +187,7 @@ async def _serialize_payload(**kwargs) -> JsonDict: @abc.abstractmethod async def _handle_request( self, request: Request, content: JsonDict, **kwargs: Any - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: """Handle incoming request. This is called with the request object and PATH_ARGS. @@ -292,7 +292,7 @@ async def send_request( "/".join(url_args), ) - headers: Dict[bytes, List[bytes]] = {} + headers: dict[bytes, list[bytes]] = {} # Add an authorization header, if configured. if replication_secret: headers[b"Authorization"] = [b"Bearer " + replication_secret] @@ -403,7 +403,7 @@ def register(self, http_server: HttpServer) -> None: async def _check_auth_and_handle( self, request: SynapseRequest, **kwargs: Any - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: """Called on new incoming requests when caching is enabled. Checks if there is a cached response for the request and returns that, otherwise calls `_handle_request` and caches its response. diff --git a/synapse/replication/http/account_data.py b/synapse/replication/http/account_data.py index b6eac153ba8..560973b9162 100644 --- a/synapse/replication/http/account_data.py +++ b/synapse/replication/http/account_data.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from twisted.web.server import Request @@ -68,7 +68,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, user_id: str, account_data_type: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: max_stream_id = await self.handler.add_account_data_for_user( user_id, account_data_type, content["content"] ) @@ -106,7 +106,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, user_id: str, account_data_type: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: max_stream_id = await self.handler.remove_account_data_for_user( user_id, account_data_type ) @@ -153,7 +153,7 @@ async def _handle_request( # type: ignore[override] user_id: str, room_id: str, account_data_type: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: max_stream_id = await self.handler.add_account_data_to_room( user_id, room_id, account_data_type, content["content"] ) @@ -196,7 +196,7 @@ async def _handle_request( # type: ignore[override] user_id: str, room_id: str, account_data_type: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: max_stream_id = await self.handler.remove_account_data_for_room( user_id, room_id, account_data_type ) @@ -238,7 +238,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, user_id: str, room_id: str, tag: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: max_stream_id = await self.handler.add_tag_to_room( user_id, room_id, tag, content["content"] ) @@ -276,7 +276,7 @@ async def _serialize_payload(user_id: str, room_id: str, tag: str) -> JsonDict: async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, user_id: str, room_id: str, tag: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: max_stream_id = await self.handler.remove_tag_from_room( user_id, room_id, diff --git a/synapse/replication/http/deactivate_account.py b/synapse/replication/http/deactivate_account.py index 89658350a5f..82df1e1322e 100644 --- a/synapse/replication/http/deactivate_account.py +++ b/synapse/replication/http/deactivate_account.py @@ -19,7 +19,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from twisted.web.server import Request @@ -69,7 +69,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: by_admin = content["by_admin"] await self.deactivate_account_handler.notify_account_deactivated( user_id, by_admin=by_admin diff --git a/synapse/replication/http/delayed_events.py b/synapse/replication/http/delayed_events.py index 229022070c8..e448ac32bf7 100644 --- a/synapse/replication/http/delayed_events.py +++ b/synapse/replication/http/delayed_events.py @@ -13,7 +13,7 @@ # import logging -from typing import TYPE_CHECKING, Dict, Optional, Tuple +from typing import TYPE_CHECKING, Optional from twisted.web.server import Request @@ -52,7 +52,7 @@ async def _serialize_payload(next_send_ts: int) -> JsonDict: # type: ignore[ove async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict - ) -> Tuple[int, Dict[str, Optional[JsonMapping]]]: + ) -> tuple[int, dict[str, Optional[JsonMapping]]]: self.handler.on_added(int(content["next_send_ts"])) return 200, {} diff --git a/synapse/replication/http/devices.py b/synapse/replication/http/devices.py index 94981e22eba..2fadee8a063 100644 --- a/synapse/replication/http/devices.py +++ b/synapse/replication/http/devices.py @@ -19,7 +19,7 @@ # import logging -from typing import TYPE_CHECKING, Dict, List, Optional, Tuple +from typing import TYPE_CHECKING, Optional from twisted.web.server import Request @@ -59,13 +59,13 @@ def __init__(self, hs: "HomeServer"): @staticmethod async def _serialize_payload( # type: ignore[override] - user_id: str, device_ids: List[str] + user_id: str, device_ids: list[str] ) -> JsonDict: return {"device_ids": device_ids} async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: device_ids = content["device_ids"] span = active_span() @@ -102,12 +102,12 @@ def __init__(self, hs: "HomeServer"): self.clock = hs.get_clock() @staticmethod - async def _serialize_payload(from_user_id: str, user_ids: List[str]) -> JsonDict: # type: ignore[override] + async def _serialize_payload(from_user_id: str, user_ids: list[str]) -> JsonDict: # type: ignore[override] return {"user_ids": user_ids} async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, from_user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: user_ids = content["user_ids"] span = active_span() @@ -165,13 +165,13 @@ def __init__(self, hs: "HomeServer"): self.clock = hs.get_clock() @staticmethod - async def _serialize_payload(user_ids: List[str]) -> JsonDict: # type: ignore[override] + async def _serialize_payload(user_ids: list[str]) -> JsonDict: # type: ignore[override] return {"user_ids": user_ids} async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict - ) -> Tuple[int, Dict[str, Optional[JsonMapping]]]: - user_ids: List[str] = content["user_ids"] + ) -> tuple[int, dict[str, Optional[JsonMapping]]]: + user_ids: list[str] = content["user_ids"] logger.info("Resync for %r", user_ids) span = active_span() @@ -210,7 +210,7 @@ async def _serialize_payload() -> JsonDict: # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await self.device_handler.handle_new_device_update() return 200, {} @@ -241,7 +241,7 @@ async def _serialize_payload(room_id: str) -> JsonDict: # type: ignore[override async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await self.device_handler.handle_room_un_partial_stated(room_id) return 200, {} diff --git a/synapse/replication/http/federation.py b/synapse/replication/http/federation.py index 1e302ef59fb..448a1f8a718 100644 --- a/synapse/replication/http/federation.py +++ b/synapse/replication/http/federation.py @@ -19,7 +19,7 @@ # import logging -from typing import TYPE_CHECKING, List, Tuple +from typing import TYPE_CHECKING from twisted.web.server import Request @@ -86,7 +86,7 @@ def __init__(self, hs: "HomeServer"): async def _serialize_payload( # type: ignore[override] store: "DataStore", room_id: str, - event_and_contexts: List[EventPersistencePair], + event_and_contexts: list[EventPersistencePair], backfilled: bool, ) -> JsonDict: """ @@ -122,7 +122,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: with Measure( self.clock, name="repl_fed_send_events_parse", server_name=self.server_name ): @@ -194,7 +194,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, edu_type: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: origin = content["origin"] edu_content = content["content"] @@ -243,7 +243,7 @@ async def _serialize_payload(query_type: str, args: JsonDict) -> JsonDict: # ty async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, query_type: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: args = content["args"] args["origin"] = content["origin"] @@ -285,7 +285,7 @@ async def _serialize_payload(room_id: str) -> JsonDict: # type: ignore[override async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await self.store.clean_room_for_join(room_id) return 200, {} @@ -320,7 +320,7 @@ async def _serialize_payload(room_id: str, room_version: RoomVersion) -> JsonDic async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: room_version = KNOWN_ROOM_VERSIONS[content["room_version"]] await self.store.maybe_store_room_on_outlier_membership(room_id, room_version) return 200, {} diff --git a/synapse/replication/http/login.py b/synapse/replication/http/login.py index 8b5b7f755ae..0022e12eac9 100644 --- a/synapse/replication/http/login.py +++ b/synapse/replication/http/login.py @@ -19,7 +19,7 @@ # import logging -from typing import TYPE_CHECKING, Optional, Tuple, cast +from typing import TYPE_CHECKING, Optional, cast from twisted.web.server import Request @@ -79,7 +79,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: device_id = content["device_id"] initial_display_name = content["initial_display_name"] is_guest = content["is_guest"] diff --git a/synapse/replication/http/membership.py b/synapse/replication/http/membership.py index fc66039b2f0..0e588037b6f 100644 --- a/synapse/replication/http/membership.py +++ b/synapse/replication/http/membership.py @@ -18,7 +18,7 @@ # # import logging -from typing import TYPE_CHECKING, List, Optional, Tuple +from typing import TYPE_CHECKING, Optional from twisted.web.server import Request @@ -63,7 +63,7 @@ async def _serialize_payload( # type: ignore[override] requester: Requester, room_id: str, user_id: str, - remote_room_hosts: List[str], + remote_room_hosts: list[str], content: JsonDict, ) -> JsonDict: """ @@ -85,7 +85,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: SynapseRequest, content: JsonDict, room_id: str, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: remote_room_hosts = content["remote_room_hosts"] event_content = content["content"] @@ -130,7 +130,7 @@ async def _serialize_payload( # type: ignore[override] requester: Requester, room_id: str, user_id: str, - remote_room_hosts: List[str], + remote_room_hosts: list[str], content: JsonDict, ) -> JsonDict: """ @@ -149,7 +149,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: SynapseRequest, content: JsonDict, room_id: str, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: remote_room_hosts = content["remote_room_hosts"] event_content = content["content"] @@ -215,7 +215,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: SynapseRequest, content: JsonDict, invite_event_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: txn_id = content["txn_id"] event_content = content["content"] @@ -279,7 +279,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: SynapseRequest, content: JsonDict, knock_event_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: txn_id = content["txn_id"] event_content = content["content"] @@ -343,7 +343,7 @@ async def _handle_request( # type: ignore[override] room_id: str, user_id: str, change: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: logger.info("user membership change: %s in %s", user_id, room_id) user = UserID.from_string(user_id) diff --git a/synapse/replication/http/presence.py b/synapse/replication/http/presence.py index 8a3f3b0e678..4a894b0221e 100644 --- a/synapse/replication/http/presence.py +++ b/synapse/replication/http/presence.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Optional from twisted.web.server import Request @@ -63,7 +63,7 @@ async def _serialize_payload(user_id: str, device_id: Optional[str]) -> JsonDict async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await self._presence_handler.bump_presence_active_time( UserID.from_string(user_id), content.get("device_id") ) @@ -116,7 +116,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await self._presence_handler.set_state( UserID.from_string(user_id), content.get("device_id"), diff --git a/synapse/replication/http/push.py b/synapse/replication/http/push.py index 6e20a208b6c..905414b5ee0 100644 --- a/synapse/replication/http/push.py +++ b/synapse/replication/http/push.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from twisted.web.server import Request @@ -68,7 +68,7 @@ async def _serialize_payload(app_id: str, pushkey: str, user_id: str) -> JsonDic async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: app_id = content["app_id"] pushkey = content["pushkey"] @@ -110,7 +110,7 @@ async def _handle_request( # type: ignore[override] user_id: str, old_room_id: str, new_room_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await self._store.copy_push_rules_from_room_to_room_for_user( old_room_id, new_room_id, user_id ) @@ -144,7 +144,7 @@ async def _serialize_payload(user_id: str) -> JsonDict: # type: ignore[override async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await self._store.delete_all_pushers_for_user(user_id) return 200, {} diff --git a/synapse/replication/http/register.py b/synapse/replication/http/register.py index 27d3504c3c4..780fcc463a9 100644 --- a/synapse/replication/http/register.py +++ b/synapse/replication/http/register.py @@ -19,7 +19,7 @@ # import logging -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Optional from twisted.web.server import Request @@ -104,7 +104,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await self.registration_handler.check_registration_ratelimit(content["address"]) # Always default admin users to approved (since it means they were created by @@ -156,7 +156,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: auth_result = content["auth_result"] access_token = content["access_token"] diff --git a/synapse/replication/http/send_events.py b/synapse/replication/http/send_events.py index 6b1a5a99564..b020a0fe7c6 100644 --- a/synapse/replication/http/send_events.py +++ b/synapse/replication/http/send_events.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, List, Tuple +from typing import TYPE_CHECKING from twisted.web.server import Request @@ -85,11 +85,11 @@ def __init__(self, hs: "HomeServer"): @staticmethod async def _serialize_payload( # type: ignore[override] - events_and_context: List[EventPersistencePair], + events_and_context: list[EventPersistencePair], store: "DataStore", requester: Requester, ratelimit: bool, - extra_users: List[UserID], + extra_users: list[UserID], ) -> JsonDict: """ Args: @@ -122,7 +122,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: Request, payload: JsonDict - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: with Measure( self.clock, name="repl_send_events_parse", server_name=self.server_name ): diff --git a/synapse/replication/http/state.py b/synapse/replication/http/state.py index 3ec4ca5de30..823d3300412 100644 --- a/synapse/replication/http/state.py +++ b/synapse/replication/http/state.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from twisted.web.server import Request @@ -65,7 +65,7 @@ async def _serialize_payload(room_id: str) -> JsonDict: # type: ignore[override async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: writer_instance = self._events_shard_config.get_instance(room_id) if writer_instance != self._instance_name: raise SynapseError( diff --git a/synapse/replication/http/streams.py b/synapse/replication/http/streams.py index 61f70d57905..42e78c976f9 100644 --- a/synapse/replication/http/streams.py +++ b/synapse/replication/http/streams.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from twisted.web.server import Request @@ -79,7 +79,7 @@ async def _serialize_payload( # type: ignore[override] async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict, stream_name: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: stream = self.streams.get(stream_name) if stream is None: raise SynapseError(400, "Unknown stream") diff --git a/synapse/replication/tcp/client.py b/synapse/replication/tcp/client.py index f2561bc0c52..f9605407af4 100644 --- a/synapse/replication/tcp/client.py +++ b/synapse/replication/tcp/client.py @@ -21,7 +21,7 @@ """A replication client for use by synapse workers.""" import logging -from typing import TYPE_CHECKING, Dict, Iterable, Optional, Set, Tuple +from typing import TYPE_CHECKING, Iterable, Optional from sortedcontainers import SortedList @@ -95,8 +95,8 @@ def __init__(self, hs: "HomeServer"): # Map from stream and instance to list of deferreds waiting for the stream to # arrive at a particular position. The lists are sorted by stream position. - self._streams_to_waiters: Dict[ - Tuple[str, str], SortedList[Tuple[int, Deferred]] + self._streams_to_waiters: dict[ + tuple[str, str], SortedList[tuple[int, Deferred]] ] = {} async def on_rdata( @@ -113,7 +113,7 @@ async def on_rdata( token: stream token for this batch of rows rows: a list of Stream.ROW_TYPE objects as returned by Stream.parse_row. """ - all_room_ids: Set[str] = set() + all_room_ids: set[str] = set() if stream_name == DeviceListsStream.NAME: if any(not row.is_signature and not row.hosts_calculated for row in rows): # This only uses the minimum stream position on the device lists @@ -200,7 +200,7 @@ async def on_rdata( if row.data.rejected: continue - extra_users: Tuple[UserID, ...] = () + extra_users: tuple[UserID, ...] = () if row.data.type == EventTypes.Member and row.data.state_key: extra_users = (UserID.from_string(row.data.state_key),) diff --git a/synapse/replication/tcp/commands.py b/synapse/replication/tcp/commands.py index 8eec68c3ddc..f115cc4db91 100644 --- a/synapse/replication/tcp/commands.py +++ b/synapse/replication/tcp/commands.py @@ -26,7 +26,7 @@ import abc import logging -from typing import List, Optional, Tuple, Type, TypeVar +from typing import Optional, TypeVar from synapse.replication.tcp.streams._base import StreamRow from synapse.util.json import json_decoder, json_encoder @@ -49,7 +49,7 @@ class Command(metaclass=abc.ABCMeta): @classmethod @abc.abstractmethod - def from_line(cls: Type[T], line: str) -> T: + def from_line(cls: type[T], line: str) -> T: """Deserialises a line from the wire into this command. `line` does not include the command. """ @@ -88,7 +88,7 @@ def __init__(self, data: str): self.data = data @classmethod - def from_line(cls: Type[SC], line: str) -> SC: + def from_line(cls: type[SC], line: str) -> SC: return cls(line) def to_line(self) -> str: @@ -145,7 +145,7 @@ def __init__( self.row = row @classmethod - def from_line(cls: Type["RdataCommand"], line: str) -> "RdataCommand": + def from_line(cls: type["RdataCommand"], line: str) -> "RdataCommand": stream_name, instance_name, token, row_json = line.split(" ", 3) return cls( stream_name, @@ -204,7 +204,7 @@ def __init__( self.new_token = new_token @classmethod - def from_line(cls: Type["PositionCommand"], line: str) -> "PositionCommand": + def from_line(cls: type["PositionCommand"], line: str) -> "PositionCommand": stream_name, instance_name, prev_token, new_token = line.split(" ", 3) return cls(stream_name, instance_name, int(prev_token), int(new_token)) @@ -249,7 +249,7 @@ class ReplicateCommand(Command): REPLICATE """ - __slots__: List[str] = [] + __slots__: list[str] = [] NAME = "REPLICATE" @@ -257,7 +257,7 @@ def __init__(self) -> None: pass @classmethod - def from_line(cls: Type[T], line: str) -> T: + def from_line(cls: type[T], line: str) -> T: return cls() def to_line(self) -> str: @@ -299,7 +299,7 @@ def __init__( self.last_sync_ms = last_sync_ms @classmethod - def from_line(cls: Type["UserSyncCommand"], line: str) -> "UserSyncCommand": + def from_line(cls: type["UserSyncCommand"], line: str) -> "UserSyncCommand": device_id: Optional[str] instance_id, user_id, device_id, state, last_sync_ms = line.split(" ", 4) @@ -343,7 +343,7 @@ def __init__(self, instance_id: str): @classmethod def from_line( - cls: Type["ClearUserSyncsCommand"], line: str + cls: type["ClearUserSyncsCommand"], line: str ) -> "ClearUserSyncsCommand": return cls(line) @@ -373,7 +373,7 @@ def __init__(self, instance_name: str, token: int): @classmethod def from_line( - cls: Type["FederationAckCommand"], line: str + cls: type["FederationAckCommand"], line: str ) -> "FederationAckCommand": instance_name, token = line.split(" ") return cls(instance_name, int(token)) @@ -418,7 +418,7 @@ def __init__( self.last_seen = last_seen @classmethod - def from_line(cls: Type["UserIpCommand"], line: str) -> "UserIpCommand": + def from_line(cls: type["UserIpCommand"], line: str) -> "UserIpCommand": user_id, jsn = line.split(" ", 1) access_token, ip, user_agent, device_id, last_seen = json_decoder.decode(jsn) @@ -485,7 +485,7 @@ def __init__( self.lock_key = lock_key @classmethod - def from_line(cls: Type["LockReleasedCommand"], line: str) -> "LockReleasedCommand": + def from_line(cls: type["LockReleasedCommand"], line: str) -> "LockReleasedCommand": instance_name, lock_name, lock_key = json_decoder.decode(line) return cls(instance_name, lock_name, lock_key) @@ -505,7 +505,7 @@ class NewActiveTaskCommand(_SimpleCommand): NAME = "NEW_ACTIVE_TASK" -_COMMANDS: Tuple[Type[Command], ...] = ( +_COMMANDS: tuple[type[Command], ...] = ( ServerCommand, RdataCommand, PositionCommand, diff --git a/synapse/replication/tcp/handler.py b/synapse/replication/tcp/handler.py index 4d0d3d44abc..8ef3d0fcc20 100644 --- a/synapse/replication/tcp/handler.py +++ b/synapse/replication/tcp/handler.py @@ -25,13 +25,9 @@ Any, Awaitable, Deque, - Dict, Iterable, Iterator, - List, Optional, - Set, - Tuple, TypeVar, Union, ) @@ -120,7 +116,7 @@ # the type of the entries in _command_queues_by_stream _StreamCommandQueue = Deque[ - Tuple[Union[RdataCommand, PositionCommand], IReplicationConnection] + tuple[Union[RdataCommand, PositionCommand], IReplicationConnection] ] @@ -141,18 +137,18 @@ def __init__(self, hs: "HomeServer"): self._instance_name = hs.get_instance_name() # Additional Redis channel suffixes to subscribe to. - self._channels_to_subscribe_to: List[str] = [] + self._channels_to_subscribe_to: list[str] = [] self._is_presence_writer = ( hs.get_instance_name() in hs.config.worker.writers.presence ) - self._streams: Dict[str, Stream] = { + self._streams: dict[str, Stream] = { stream.NAME: stream(hs) for stream in STREAMS_MAP.values() } # List of streams that this instance is the source of - self._streams_to_replicate: List[Stream] = [] + self._streams_to_replicate: list[Stream] = [] for stream in self._streams.values(): if hs.config.redis.redis_enabled and stream.NAME == CachesStream.NAME: @@ -246,14 +242,14 @@ def __init__(self, hs: "HomeServer"): # Map of stream name to batched updates. See RdataCommand for info on # how batching works. - self._pending_batches: Dict[str, List[Any]] = {} + self._pending_batches: dict[str, list[Any]] = {} # The factory used to create connections. self._factory: Optional[ReconnectingClientFactory] = None # The currently connected connections. (The list of places we need to send # outgoing replication commands to.) - self._connections: List[IReplicationConnection] = [] + self._connections: list[IReplicationConnection] = [] tcp_resource_total_connections_gauge.register_hook( homeserver_instance_id=hs.get_instance_id(), @@ -264,7 +260,7 @@ def __init__(self, hs: "HomeServer"): # them in order in a separate background process. # the streams which are currently being processed by _unsafe_process_queue - self._processing_streams: Set[str] = set() + self._processing_streams: set[str] = set() # for each stream, a queue of commands that are awaiting processing, and the # connection that they arrived on. @@ -274,7 +270,7 @@ def __init__(self, hs: "HomeServer"): # For each connection, the incoming stream names that have received a POSITION # from that connection. - self._streams_by_connection: Dict[IReplicationConnection, Set[str]] = {} + self._streams_by_connection: dict[IReplicationConnection, set[str]] = {} tcp_command_queue_gauge.register_hook( homeserver_instance_id=hs.get_instance_id(), @@ -450,11 +446,11 @@ def start_replication(self, hs: "HomeServer") -> None: bindAddress=None, ) - def get_streams(self) -> Dict[str, Stream]: + def get_streams(self) -> dict[str, Stream]: """Get a map from stream name to all streams.""" return self._streams - def get_streams_to_replicate(self) -> List[Stream]: + def get_streams_to_replicate(self) -> list[Stream]: """Get a list of streams that this instances replicates.""" return self._streams_to_replicate @@ -902,8 +898,8 @@ def send_new_active_task(self, task_id: str) -> None: def _batch_updates( - updates: Iterable[Tuple[UpdateToken, UpdateRow]], -) -> Iterator[Tuple[UpdateToken, List[UpdateRow]]]: + updates: Iterable[tuple[UpdateToken, UpdateRow]], +) -> Iterator[tuple[UpdateToken, list[UpdateRow]]]: """Collect stream updates with the same token together Given a series of updates returned by Stream.get_updates_since(), collects diff --git a/synapse/replication/tcp/protocol.py b/synapse/replication/tcp/protocol.py index bcfc65c2c0d..f3a54ed3ffe 100644 --- a/synapse/replication/tcp/protocol.py +++ b/synapse/replication/tcp/protocol.py @@ -163,7 +163,7 @@ def __init__( self.conn_id = random_string(5) # To dedupe in case of name clashes. # List of pending commands to send once we've established the connection - self.pending_commands: List[Command] = [] + self.pending_commands: list[Command] = [] # The LoopingCall for sending pings. self._send_ping_loop: Optional[task.LoopingCall] = None diff --git a/synapse/replication/tcp/redis.py b/synapse/replication/tcp/redis.py index caffb2913ea..ef8a07ea695 100644 --- a/synapse/replication/tcp/redis.py +++ b/synapse/replication/tcp/redis.py @@ -21,7 +21,7 @@ import logging from inspect import isawaitable -from typing import TYPE_CHECKING, Any, Generic, List, Optional, Type, TypeVar, cast +from typing import TYPE_CHECKING, Any, Generic, Optional, Type, TypeVar, cast import attr from txredisapi import ( @@ -72,7 +72,7 @@ class ConstantProperty(Generic[T, V]): constant: V = attr.ib() - def __get__(self, obj: Optional[T], objtype: Optional[Type[T]] = None) -> V: + def __get__(self, obj: Optional[T], objtype: Optional[type[T]] = None) -> V: return self.constant def __set__(self, obj: Optional[T], value: V) -> None: @@ -111,7 +111,7 @@ class RedisSubscriber(SubscriberProtocol): hs: "HomeServer" synapse_handler: "ReplicationCommandHandler" synapse_stream_prefix: str - synapse_channel_names: List[str] + synapse_channel_names: list[str] synapse_outbound_redis_connection: ConnectionHandler def __init__(self, *args: Any, **kwargs: Any): @@ -381,7 +381,7 @@ def __init__( self, hs: "HomeServer", outbound_redis_connection: ConnectionHandler, - channel_names: List[str], + channel_names: list[str], ): super().__init__( hs, diff --git a/synapse/replication/tcp/resource.py b/synapse/replication/tcp/resource.py index ef72a0a5325..8df0a3853f4 100644 --- a/synapse/replication/tcp/resource.py +++ b/synapse/replication/tcp/resource.py @@ -22,7 +22,7 @@ import logging import random -from typing import TYPE_CHECKING, List, Optional, Tuple +from typing import TYPE_CHECKING, Optional from prometheus_client import Counter @@ -320,8 +320,8 @@ async def _run_notifier_loop(self) -> None: def _batch_updates( - updates: List[Tuple[Token, StreamRow]], -) -> List[Tuple[Optional[Token], StreamRow]]: + updates: list[tuple[Token, StreamRow]], +) -> list[tuple[Optional[Token], StreamRow]]: """Takes a list of updates of form [(token, row)] and sets the token to None for all rows where the next row has the same token. This is used to implement batching. @@ -337,7 +337,7 @@ def _batch_updates( if not updates: return [] - new_updates: List[Tuple[Optional[Token], StreamRow]] = [] + new_updates: list[tuple[Optional[Token], StreamRow]] = [] for i, update in enumerate(updates[:-1]): if update[0] == updates[i + 1][0]: new_updates.append((None, update[1])) diff --git a/synapse/replication/tcp/streams/_base.py b/synapse/replication/tcp/streams/_base.py index ec7e935d6a3..a6e016be278 100644 --- a/synapse/replication/tcp/streams/_base.py +++ b/synapse/replication/tcp/streams/_base.py @@ -26,7 +26,6 @@ Any, Awaitable, Callable, - List, Optional, Tuple, TypeVar, @@ -66,7 +65,7 @@ # * `new_last_token` is the new position in stream. # * `limited` is whether there are more updates to fetch. # -StreamUpdateResult = Tuple[List[Tuple[Token, StreamRow]], Token, bool] +StreamUpdateResult = tuple[list[tuple[Token, StreamRow]], Token, bool] # The type of an update_function for a stream # @@ -400,7 +399,7 @@ class TypingStreamRow: room_id: str # All the users that are 'typing' right now in the specified room. - user_ids: List[str] + user_ids: list[str] NAME = "typing" ROW_TYPE = TypingStreamRow @@ -410,7 +409,7 @@ def __init__(self, hs: "HomeServer"): # On the writer, query the typing handler typing_writer_handler = hs.get_typing_writer_handler() update_function: Callable[ - [str, int, int, int], Awaitable[Tuple[List[Tuple[int, Any]], int, bool]] + [str, int, int, int], Awaitable[tuple[list[tuple[int, Any]], int, bool]] ] = typing_writer_handler.get_all_typing_updates self.current_token_function = typing_writer_handler.get_current_token else: @@ -512,7 +511,7 @@ class CachesStreamRow: """ cache_func: str - keys: Optional[List[Any]] + keys: Optional[list[Any]] invalidation_ts: int NAME = "caches" diff --git a/synapse/replication/tcp/streams/events.py b/synapse/replication/tcp/streams/events.py index 05b55fb0338..42eaa0c0494 100644 --- a/synapse/replication/tcp/streams/events.py +++ b/synapse/replication/tcp/streams/events.py @@ -20,7 +20,7 @@ # import heapq from collections import defaultdict -from typing import TYPE_CHECKING, Iterable, Optional, Tuple, Type, TypeVar, cast +from typing import TYPE_CHECKING, Iterable, Optional, Tuple, TypeVar, cast import attr @@ -93,7 +93,7 @@ class BaseEventsStreamRow: TypeId: str @classmethod - def from_data(cls: Type[T], data: Iterable[Optional[str]]) -> T: + def from_data(cls: type[T], data: Iterable[Optional[str]]) -> T: """Parse the data from the replication stream into a row. By default we just call the constructor with the data list as arguments @@ -136,7 +136,7 @@ class EventsStreamAllStateRow(BaseEventsStreamRow): room_id: str -_EventRows: Tuple[Type[BaseEventsStreamRow], ...] = ( +_EventRows: tuple[type[BaseEventsStreamRow], ...] = ( EventsStreamEventRow, EventsStreamCurrentStateRow, EventsStreamAllStateRow, @@ -237,7 +237,7 @@ async def _update_function( # distinguish the row type). At the same time, we can limit the event_rows # to the max stream_id from state_rows. - event_updates: Iterable[Tuple[int, Tuple]] = ( + event_updates: Iterable[tuple[int, Tuple]] = ( (stream_id, (EventsStreamEventRow.TypeId, rest)) for (stream_id, *rest) in event_rows if stream_id <= upper_limit @@ -254,20 +254,20 @@ async def _update_function( for room_id, stream_ids in state_updates_by_room.items() if len(stream_ids) >= _MAX_STATE_UPDATES_PER_ROOM ] - state_all_updates: Iterable[Tuple[int, Tuple]] = ( + state_all_updates: Iterable[tuple[int, Tuple]] = ( (max_stream_id, (EventsStreamAllStateRow.TypeId, (room_id,))) for (max_stream_id, room_id) in state_all_rows ) # Any remaining state updates are sent individually. state_all_rooms = {room_id for _, room_id in state_all_rows} - state_updates: Iterable[Tuple[int, Tuple]] = ( + state_updates: Iterable[tuple[int, Tuple]] = ( (stream_id, (EventsStreamCurrentStateRow.TypeId, rest)) for (stream_id, *rest) in state_rows if rest[0] not in state_all_rooms ) - ex_outliers_updates: Iterable[Tuple[int, Tuple]] = ( + ex_outliers_updates: Iterable[tuple[int, Tuple]] = ( (stream_id, (EventsStreamEventRow.TypeId, rest)) for (stream_id, *rest) in ex_outliers_rows ) @@ -282,6 +282,6 @@ async def _update_function( @classmethod def parse_row(cls, row: StreamRow) -> "EventsStreamRow": - (typ, data) = cast(Tuple[str, Iterable[Optional[str]]], row) + (typ, data) = cast(tuple[str, Iterable[Optional[str]]], row) event_stream_row_data = TypeToRow[typ].from_data(data) return EventsStreamRow(typ, event_stream_row_data) diff --git a/synapse/replication/tcp/streams/federation.py b/synapse/replication/tcp/streams/federation.py index 1c2ffe86b7b..c99e7203810 100644 --- a/synapse/replication/tcp/streams/federation.py +++ b/synapse/replication/tcp/streams/federation.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import TYPE_CHECKING, Any, Awaitable, Callable, List, Tuple +from typing import TYPE_CHECKING, Any, Awaitable, Callable import attr @@ -58,7 +58,7 @@ def __init__(self, hs: "HomeServer"): federation_sender.get_current_token ) update_function: Callable[ - [str, int, int, int], Awaitable[Tuple[List[Tuple[int, Any]], int, bool]] + [str, int, int, int], Awaitable[tuple[list[tuple[int, Any]], int, bool]] ] = federation_sender.get_replication_rows elif hs.should_send_federation(): @@ -88,5 +88,5 @@ def _stub_current_token(instance_name: str) -> int: @staticmethod async def _stub_update_function( instance_name: str, from_token: int, upto_token: int, limit: int - ) -> Tuple[list, int, bool]: + ) -> tuple[list, int, bool]: return [], upto_token, False diff --git a/synapse/rest/__init__.py b/synapse/rest/__init__.py index db3bd465425..ea0e47ded47 100644 --- a/synapse/rest/__init__.py +++ b/synapse/rest/__init__.py @@ -19,7 +19,7 @@ # # import logging -from typing import TYPE_CHECKING, Callable, Dict, Iterable, List, Optional, Tuple +from typing import TYPE_CHECKING, Callable, Iterable, Optional from synapse.http.server import HttpServer, JsonResource from synapse.rest import admin @@ -78,7 +78,7 @@ RegisterServletsFunc = Callable[["HomeServer", HttpServer], None] -CLIENT_SERVLET_FUNCTIONS: Tuple[RegisterServletsFunc, ...] = ( +CLIENT_SERVLET_FUNCTIONS: tuple[RegisterServletsFunc, ...] = ( versions.register_servlets, initial_sync.register_servlets, room.register_deprecated_servlets, @@ -128,7 +128,7 @@ thread_subscriptions.register_servlets, ) -SERVLET_GROUPS: Dict[str, Iterable[RegisterServletsFunc]] = { +SERVLET_GROUPS: dict[str, Iterable[RegisterServletsFunc]] = { "client": CLIENT_SERVLET_FUNCTIONS, } @@ -143,7 +143,7 @@ class ClientRestResource(JsonResource): * etc """ - def __init__(self, hs: "HomeServer", servlet_groups: Optional[List[str]] = None): + def __init__(self, hs: "HomeServer", servlet_groups: Optional[list[str]] = None): JsonResource.__init__(self, hs, canonical_json=False) if hs.config.media.can_load_media_repo: # This import is here to prevent a circular import failure diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py index 0386f8a34b2..5e75dc4c009 100644 --- a/synapse/rest/admin/__init__.py +++ b/synapse/rest/admin/__init__.py @@ -35,7 +35,7 @@ import logging from http import HTTPStatus -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Optional from synapse.api.errors import Codes, NotFoundError, SynapseError from synapse.handlers.pagination import PURGE_HISTORY_ACTION_NAME @@ -137,7 +137,7 @@ class VersionServlet(RestServlet): def __init__(self, hs: "HomeServer"): self.res = {"server_version": SYNAPSE_VERSION} - def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: return HTTPStatus.OK, self.res @@ -153,7 +153,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, room_id: str, event_id: Optional[str] - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) body = parse_json_object_from_request(request, allow_empty_body=True) @@ -237,7 +237,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, purge_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) purge_task = await self.pagination_handler.get_delete_task(purge_id) diff --git a/synapse/rest/admin/background_updates.py b/synapse/rest/admin/background_updates.py index 6fba616d3aa..96190c416d4 100644 --- a/synapse/rest/admin/background_updates.py +++ b/synapse/rest/admin/background_updates.py @@ -20,7 +20,7 @@ # import logging from http import HTTPStatus -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.errors import SynapseError from synapse.http.servlet import ( @@ -47,7 +47,7 @@ def __init__(self, hs: "HomeServer"): self._auth = hs.get_auth() self._data_stores = hs.get_datastores() - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) # We need to check that all configured databases have updates enabled. @@ -56,7 +56,7 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: return HTTPStatus.OK, {"enabled": enabled} - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) body = parse_json_object_from_request(request) @@ -88,7 +88,7 @@ def __init__(self, hs: "HomeServer"): self._auth = hs.get_auth() self._data_stores = hs.get_datastores() - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) # We need to check that all configured databases have updates enabled. @@ -121,7 +121,7 @@ def __init__(self, hs: "HomeServer"): self._auth = hs.get_auth() self._store = hs.get_datastores().main - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) body = parse_json_object_from_request(request) diff --git a/synapse/rest/admin/devices.py b/synapse/rest/admin/devices.py index c488bce58e1..c8e9242ce85 100644 --- a/synapse/rest/admin/devices.py +++ b/synapse/rest/admin/devices.py @@ -20,7 +20,7 @@ # import logging from http import HTTPStatus -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.errors import NotFoundError, SynapseError from synapse.http.servlet import ( @@ -56,7 +56,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str, device_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) target_user = UserID.from_string(user_id) @@ -76,7 +76,7 @@ async def on_GET( async def on_DELETE( self, request: SynapseRequest, user_id: str, device_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) target_user = UserID.from_string(user_id) @@ -92,7 +92,7 @@ async def on_DELETE( async def on_PUT( self, request: SynapseRequest, user_id: str, device_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) target_user = UserID.from_string(user_id) @@ -128,7 +128,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) target_user = UserID.from_string(user_id) @@ -157,7 +157,7 @@ async def on_GET( async def on_POST( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: """Creates a new device for the user.""" await assert_requester_is_admin(self.auth, request) @@ -201,7 +201,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) target_user = UserID.from_string(user_id) diff --git a/synapse/rest/admin/event_reports.py b/synapse/rest/admin/event_reports.py index ff1abc0697c..5e8f85de7e2 100644 --- a/synapse/rest/admin/event_reports.py +++ b/synapse/rest/admin/event_reports.py @@ -21,7 +21,7 @@ import logging from http import HTTPStatus -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.constants import Direction from synapse.api.errors import Codes, NotFoundError, SynapseError @@ -65,7 +65,7 @@ def __init__(self, hs: "HomeServer"): self._auth = hs.get_auth() self._store = hs.get_datastores().main - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) start = parse_integer(request, "from", default=0) @@ -123,7 +123,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, report_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) message = ( @@ -149,7 +149,7 @@ async def on_GET( async def on_DELETE( self, request: SynapseRequest, report_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) message = ( diff --git a/synapse/rest/admin/events.py b/synapse/rest/admin/events.py index 61b347f8f44..1c39d5caf30 100644 --- a/synapse/rest/admin/events.py +++ b/synapse/rest/admin/events.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.errors import NotFoundError from synapse.events.utils import ( @@ -43,7 +43,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, event_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self._auth.get_user_by_req(request) await assert_user_is_admin(self._auth, requester) diff --git a/synapse/rest/admin/experimental_features.py b/synapse/rest/admin/experimental_features.py index 3d3015cef77..1b8d24615e6 100644 --- a/synapse/rest/admin/experimental_features.py +++ b/synapse/rest/admin/experimental_features.py @@ -22,7 +22,7 @@ from enum import Enum from http import HTTPStatus -from typing import TYPE_CHECKING, Dict, Tuple +from typing import TYPE_CHECKING, Dict from synapse.api.errors import SynapseError from synapse.http.servlet import RestServlet, parse_json_object_from_request @@ -74,7 +74,7 @@ async def on_GET( self, request: SynapseRequest, user_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: """ List which features are enabled for a given user """ @@ -99,7 +99,7 @@ async def on_GET( async def on_PUT( self, request: SynapseRequest, user_id: str - ) -> Tuple[HTTPStatus, Dict]: + ) -> tuple[HTTPStatus, Dict]: """ Enable or disable the provided features for the requester """ diff --git a/synapse/rest/admin/federation.py b/synapse/rest/admin/federation.py index d85a04b8252..e958ef9747c 100644 --- a/synapse/rest/admin/federation.py +++ b/synapse/rest/admin/federation.py @@ -20,7 +20,7 @@ # import logging from http import HTTPStatus -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.constants import Direction from synapse.api.errors import NotFoundError, SynapseError @@ -58,7 +58,7 @@ def __init__(self, hs: "HomeServer"): self._auth = hs.get_auth() self._store = hs.get_datastores().main - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) start = parse_integer(request, "from", default=0) @@ -115,7 +115,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, destination: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) if not await self._store.is_destination_known(destination): @@ -175,7 +175,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, destination: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) if not await self._store.is_destination_known(destination): @@ -224,7 +224,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, destination: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) if not await self._store.is_destination_known(destination): diff --git a/synapse/rest/admin/media.py b/synapse/rest/admin/media.py index 8732c0bf9d3..cfdb314b1a9 100644 --- a/synapse/rest/admin/media.py +++ b/synapse/rest/admin/media.py @@ -20,7 +20,7 @@ # import logging from http import HTTPStatus -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Optional import attr @@ -67,7 +67,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, server_name: str, media_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) await assert_user_is_admin(self.auth, requester) @@ -134,7 +134,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) await assert_user_is_admin(self.auth, requester) @@ -161,7 +161,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) await assert_user_is_admin(self.auth, requester) @@ -190,7 +190,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, server_name: str, media_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) await assert_user_is_admin(self.auth, requester) @@ -219,7 +219,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, server_name: str, media_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) logger.info("Remove from quarantine media by ID: %s/%s", server_name, media_id) @@ -241,7 +241,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, media_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) logger.info("Protecting local media by ID: %s", media_id) @@ -263,7 +263,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, media_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) logger.info("Unprotecting local media by ID: %s", media_id) @@ -285,7 +285,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) local_mxcs, remote_mxcs = await self.store.get_media_mxcs_in_room(room_id) @@ -300,7 +300,7 @@ def __init__(self, hs: "HomeServer"): self.media_repository = hs.get_media_repository() self.auth = hs.get_auth() - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) before_ts = parse_integer(request, "before_ts", required=True) @@ -338,7 +338,7 @@ def __init__(self, hs: "HomeServer"): async def on_DELETE( self, request: SynapseRequest, server_name: str, media_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) if not self._is_mine_server_name(server_name): @@ -375,7 +375,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, server_name: Optional[str] = None - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) before_ts = parse_integer(request, "before_ts", required=True) @@ -433,7 +433,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: # This will always be set by the time Twisted calls us. assert request.args is not None @@ -477,7 +477,7 @@ async def on_GET( async def on_DELETE( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: # This will always be set by the time Twisted calls us. assert request.args is not None diff --git a/synapse/rest/admin/registration_tokens.py b/synapse/rest/admin/registration_tokens.py index bec23315900..ea266403a01 100644 --- a/synapse/rest/admin/registration_tokens.py +++ b/synapse/rest/admin/registration_tokens.py @@ -22,7 +22,7 @@ import logging import string from http import HTTPStatus -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.errors import Codes, NotFoundError, SynapseError from synapse.http.servlet import ( @@ -80,7 +80,7 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.store = hs.get_datastores().main - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) valid = parse_boolean(request, "valid") token_list = await self.store.get_registration_tokens(valid) @@ -133,7 +133,7 @@ def __init__(self, hs: "HomeServer"): self.allowed_chars = string.ascii_letters + string.digits + "._~-" self.allowed_chars_set = set(self.allowed_chars) - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) body = parse_json_object_from_request(request) @@ -282,7 +282,7 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.store = hs.get_datastores().main - async def on_GET(self, request: SynapseRequest, token: str) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest, token: str) -> tuple[int, JsonDict]: """Retrieve a registration token.""" await assert_requester_is_admin(self.auth, request) token_info = await self.store.get_one_registration_token(token) @@ -293,7 +293,7 @@ async def on_GET(self, request: SynapseRequest, token: str) -> Tuple[int, JsonDi return HTTPStatus.OK, token_info - async def on_PUT(self, request: SynapseRequest, token: str) -> Tuple[int, JsonDict]: + async def on_PUT(self, request: SynapseRequest, token: str) -> tuple[int, JsonDict]: """Update a registration token.""" await assert_requester_is_admin(self.auth, request) body = parse_json_object_from_request(request) @@ -348,7 +348,7 @@ async def on_PUT(self, request: SynapseRequest, token: str) -> Tuple[int, JsonDi async def on_DELETE( self, request: SynapseRequest, token: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: """Delete a registration token.""" await assert_requester_is_admin(self.auth, request) diff --git a/synapse/rest/admin/rooms.py b/synapse/rest/admin/rooms.py index 5bed89c2c46..216af29f9b0 100644 --- a/synapse/rest/admin/rooms.py +++ b/synapse/rest/admin/rooms.py @@ -20,7 +20,7 @@ # import logging from http import HTTPStatus -from typing import TYPE_CHECKING, List, Optional, Tuple, cast +from typing import TYPE_CHECKING, Optional, cast import attr from immutabledict import immutabledict @@ -88,7 +88,7 @@ def __init__(self, hs: "HomeServer"): async def on_DELETE( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self._auth.get_user_by_req(request) await assert_user_is_admin(self._auth, requester) @@ -167,7 +167,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) if not RoomID.is_valid(room_id): @@ -198,7 +198,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, delete_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) delete_task = await self._pagination_handler.get_delete_task(delete_id) @@ -224,7 +224,7 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.admin_handler = hs.get_admin_handler() - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) # Extract query parameters @@ -319,7 +319,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) ret = await self.store.get_room_with_stats(room_id) @@ -337,7 +337,7 @@ async def on_GET( async def on_DELETE( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: return await self._delete_room( request, room_id, @@ -353,7 +353,7 @@ async def _delete_room( auth: "Auth", room_shutdown_handler: "RoomShutdownHandler", pagination_handler: "PaginationHandler", - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await auth.get_user_by_req(request) await assert_user_is_admin(auth, requester) @@ -429,7 +429,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) room = await self.store.get_room(room_id) @@ -458,7 +458,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) room = await self.store.get_room(room_id) @@ -498,7 +498,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, room_identifier: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: # This will always be set by the time Twisted calls us. assert request.args is not None @@ -521,7 +521,7 @@ async def on_POST( # Get the room ID from the identifier. try: - remote_room_hosts: Optional[List[str]] = [ + remote_room_hosts: Optional[list[str]] = [ x.decode("ascii") for x in request.args[b"server_name"] ] except Exception: @@ -591,7 +591,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, room_identifier: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) await assert_user_is_admin(self.auth, requester) content = parse_json_object_from_request(request, allow_empty_body=True) @@ -756,7 +756,7 @@ def __init__(self, hs: "HomeServer"): async def on_DELETE( self, request: SynapseRequest, room_identifier: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) room_id, _ = await self.resolve_room_id(room_identifier) @@ -766,7 +766,7 @@ async def on_DELETE( async def on_GET( self, request: SynapseRequest, room_identifier: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) room_id, _ = await self.resolve_room_id(room_identifier) @@ -805,7 +805,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str, event_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=False) await assert_user_is_admin(self.auth, requester) @@ -871,7 +871,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) if not RoomID.is_valid(room_id): @@ -891,7 +891,7 @@ async def on_GET( async def on_PUT( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self._auth.get_user_by_req(request) await assert_user_is_admin(self._auth, requester) @@ -935,7 +935,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self._auth.get_user_by_req(request) await assert_user_is_admin(self._auth, requester) @@ -997,7 +997,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self._auth.get_user_by_req(request) await assert_user_is_admin(self._auth, requester) diff --git a/synapse/rest/admin/scheduled_tasks.py b/synapse/rest/admin/scheduled_tasks.py index 2ae13021b9e..41c402b4243 100644 --- a/synapse/rest/admin/scheduled_tasks.py +++ b/synapse/rest/admin/scheduled_tasks.py @@ -13,7 +13,7 @@ # # # -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.http.servlet import RestServlet, parse_integer, parse_string from synapse.http.site import SynapseRequest @@ -35,7 +35,7 @@ def __init__(self, hs: "HomeServer"): self._auth = hs.get_auth() self._store = hs.get_datastores().main - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) # extract query params diff --git a/synapse/rest/admin/server_notice_servlet.py b/synapse/rest/admin/server_notice_servlet.py index f3150e88d71..0be04c0f909 100644 --- a/synapse/rest/admin/server_notice_servlet.py +++ b/synapse/rest/admin/server_notice_servlet.py @@ -18,7 +18,7 @@ # # from http import HTTPStatus -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Optional from synapse.api.constants import EventTypes from synapse.api.errors import NotFoundError, SynapseError @@ -81,7 +81,7 @@ async def _do( request: SynapseRequest, requester: Requester, txn_id: Optional[str], - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_user_is_admin(self.auth, requester) body = parse_json_object_from_request(request) assert_params_in_dict(body, ("user_id", "content")) @@ -118,13 +118,13 @@ async def _do( async def on_POST( self, request: SynapseRequest, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) return await self._do(request, requester, None) async def on_PUT( self, request: SynapseRequest, txn_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) set_tag("txn_id", txn_id) return await self.txns.fetch_or_execute_request( diff --git a/synapse/rest/admin/statistics.py b/synapse/rest/admin/statistics.py index 0adc5b70059..3de1d4e9bdb 100644 --- a/synapse/rest/admin/statistics.py +++ b/synapse/rest/admin/statistics.py @@ -21,7 +21,7 @@ import logging from http import HTTPStatus -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.constants import Direction from synapse.api.errors import Codes, SynapseError @@ -48,7 +48,7 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.store = hs.get_datastores().main - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) order_by = parse_string( @@ -119,7 +119,7 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.stats_controller = hs.get_storage_controllers().stats - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) room_sizes = await self.stats_controller.get_room_db_size_estimate() diff --git a/synapse/rest/admin/username_available.py b/synapse/rest/admin/username_available.py index 2d642f7d6b3..fb0cee42dad 100644 --- a/synapse/rest/admin/username_available.py +++ b/synapse/rest/admin/username_available.py @@ -20,7 +20,7 @@ # import logging from http import HTTPStatus -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.http.servlet import RestServlet, parse_string from synapse.http.site import SynapseRequest @@ -50,7 +50,7 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.registration_handler = hs.get_registration_handler() - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) username = parse_string(request, "username", required=True) diff --git a/synapse/rest/admin/users.py b/synapse/rest/admin/users.py index 25a38dc4acb..26d33daa0b5 100644 --- a/synapse/rest/admin/users.py +++ b/synapse/rest/admin/users.py @@ -23,7 +23,7 @@ import logging import secrets from http import HTTPStatus -from typing import TYPE_CHECKING, Dict, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Optional, Union import attr @@ -113,7 +113,7 @@ def __init__(self, hs: "HomeServer"): hs.config.mas.enabled or hs.config.experimental.msc3861.enabled ) - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) start = parse_integer(request, "from", default=0) @@ -164,7 +164,7 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: direction = parse_enum(request, "dir", Direction, default=Direction.FORWARDS) # twisted.web.server.Request.args is incorrectly defined as Optional[Any] - args: Dict[bytes, List[bytes]] = request.args # type: ignore + args: dict[bytes, list[bytes]] = request.args # type: ignore not_user_types = parse_strings_from_args(args, "not_user_type") users, total = await self.store.get_users_paginate( @@ -256,7 +256,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonMapping]: + ) -> tuple[int, JsonMapping]: await assert_requester_is_admin(self.auth, request) target_user = UserID.from_string(user_id) @@ -271,7 +271,7 @@ async def on_GET( async def on_PUT( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonMapping]: + ) -> tuple[int, JsonMapping]: requester = await self.auth.get_user_by_req(request) await assert_user_is_admin(self.auth, requester) @@ -349,14 +349,14 @@ async def on_PUT( "'approved' parameter is not of type boolean", ) - # convert List[Dict[str, str]] into List[Tuple[str, str]] + # convert List[dict[str, str]] into List[tuple[str, str]] if external_ids is not None: new_external_ids = [ (external_id["auth_provider"], external_id["external_id"]) for external_id in external_ids ] - # convert List[Dict[str, str]] into Set[Tuple[str, str]] + # convert List[dict[str, str]] into Set[tuple[str, str]] if threepids is not None: new_threepids = { (threepid["medium"], threepid["address"]) for threepid in threepids @@ -545,7 +545,7 @@ class UserRegisterServlet(RestServlet): def __init__(self, hs: "HomeServer"): self.auth_handler = hs.get_auth_handler() self.reactor = hs.get_reactor() - self.nonces: Dict[str, int] = {} + self.nonces: dict[str, int] = {} self.hs = hs self._all_user_types = hs.config.user_types.all_user_types @@ -559,7 +559,7 @@ def _clear_old_nonces(self) -> None: if now - v > self.NONCE_TIMEOUT: del self.nonces[k] - def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: """ Generate a new nonce. """ @@ -569,7 +569,7 @@ def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: self.nonces[nonce] = int(self.reactor.seconds()) return HTTPStatus.OK, {"nonce": nonce} - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: self._clear_old_nonces() if not self.hs.config.registration.registration_shared_secret: @@ -730,7 +730,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonMapping]: + ) -> tuple[int, JsonMapping]: target_user = UserID.from_string(user_id) requester = await self.auth.get_user_by_req(request) @@ -756,7 +756,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, target_user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) await assert_user_is_admin(self.auth, requester) @@ -801,7 +801,7 @@ class PutBody(RequestBodyModel): async def on_PUT( self, request: SynapseRequest, target_user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) await assert_user_is_admin(self.auth, requester) @@ -828,7 +828,7 @@ def __init__(self, hs: "HomeServer"): ) self.auth = hs.get_auth() - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) if self.account_validity_module_callbacks.on_legacy_admin_request_callback: @@ -878,7 +878,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, target_user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: """Post request to allow an administrator reset password for a user. This needs user to have administrator access in Synapse. """ @@ -920,7 +920,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, target_user_id: str - ) -> Tuple[int, Optional[List[JsonDict]]]: + ) -> tuple[int, Optional[list[JsonDict]]]: """Get request to search user table for specific users according to search term. This needs user to have a administrator access in Synapse. @@ -989,7 +989,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) target_user = UserID.from_string(user_id) @@ -1006,7 +1006,7 @@ async def on_GET( async def on_PUT( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) await assert_user_is_admin(self.auth, requester) auth_user = requester.user @@ -1047,7 +1047,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) room_ids = await self.store.get_rooms_for_user(user_id) @@ -1079,7 +1079,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) if not self.is_mine(UserID.from_string(user_id)): @@ -1122,7 +1122,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) await assert_user_is_admin(self.auth, requester) auth_user = requester.user @@ -1190,7 +1190,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) if not self.is_mine_id(user_id): @@ -1204,7 +1204,7 @@ async def on_POST( async def on_DELETE( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) if not self.is_mine_id(user_id): @@ -1242,7 +1242,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) if not self.is_mine_id(user_id): @@ -1273,7 +1273,7 @@ async def on_GET( async def on_POST( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) if not self.is_mine_id(user_id): @@ -1321,7 +1321,7 @@ async def on_POST( async def on_DELETE( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self.auth, request) if not self.is_mine_id(user_id): @@ -1349,7 +1349,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) if not self._is_mine_id(user_id): @@ -1390,7 +1390,7 @@ async def on_POST( self, request: SynapseRequest, user_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) if user_id is None: @@ -1424,7 +1424,7 @@ async def on_GET( request: SynapseRequest, provider: str, external_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) user_id = await self._store.get_user_by_external_id(provider, external_id) @@ -1449,7 +1449,7 @@ async def on_GET( request: SynapseRequest, medium: str, address: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) user_id = await self._store.get_user_id_by_threepid(medium, address) @@ -1475,14 +1475,14 @@ def __init__(self, hs: "HomeServer"): self.admin_handler = hs.get_admin_handler() class PostBody(RequestBodyModel): - rooms: List[StrictStr] + rooms: list[StrictStr] reason: Optional[StrictStr] limit: Optional[StrictInt] use_admin: Optional[StrictBool] async def on_POST( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self._auth.get_user_by_req(request) await assert_user_is_admin(self._auth, requester) @@ -1531,7 +1531,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, redact_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) task = await self.admin_handler.get_redact_task(redact_id) @@ -1574,7 +1574,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) from_ts = parse_integer(request, "from_ts", required=True) @@ -1599,7 +1599,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await assert_requester_is_admin(self._auth, request) from_ts = parse_integer(request, "from_ts", required=True) diff --git a/synapse/rest/client/_base.py b/synapse/rest/client/_base.py index 6cf37869d89..fad7234718e 100644 --- a/synapse/rest/client/_base.py +++ b/synapse/rest/client/_base.py @@ -23,7 +23,7 @@ import logging import re -from typing import Any, Awaitable, Callable, Iterable, Pattern, Tuple, TypeVar, cast +from typing import Any, Awaitable, Callable, Iterable, Pattern, TypeVar, cast from synapse.api.errors import InteractiveAuthIncompleteError from synapse.api.urls import CLIENT_API_PREFIX @@ -86,7 +86,7 @@ def set_timeline_upper_limit(filter_json: JsonDict, filter_timeline_limit: int) ) -C = TypeVar("C", bound=Callable[..., Awaitable[Tuple[int, JsonDict]]]) +C = TypeVar("C", bound=Callable[..., Awaitable[tuple[int, JsonDict]]]) def interactive_auth_handler(orig: C) -> C: @@ -104,7 +104,7 @@ async def on_POST(self, request): await self.auth_handler.check_auth """ - async def wrapped(*args: Any, **kwargs: Any) -> Tuple[int, JsonDict]: + async def wrapped(*args: Any, **kwargs: Any) -> tuple[int, JsonDict]: try: return await orig(*args, **kwargs) except InteractiveAuthIncompleteError as e: diff --git a/synapse/rest/client/account.py b/synapse/rest/client/account.py index d9f0c169e80..8f2f54f7505 100644 --- a/synapse/rest/client/account.py +++ b/synapse/rest/client/account.py @@ -21,7 +21,7 @@ # import logging import random -from typing import TYPE_CHECKING, List, Literal, Optional, Tuple +from typing import TYPE_CHECKING, Literal, Optional from urllib.parse import urlparse import attr @@ -89,7 +89,7 @@ def __init__(self, hs: "HomeServer"): template_text=self.config.email.email_password_reset_template_text, ) - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: if not self.config.email.can_verify_email: logger.warning( "User password resets have been disabled due to lack of email config" @@ -169,7 +169,7 @@ class PostBody(RequestBodyModel): new_password: Optional[constr(max_length=512, strict=True)] = None @interactive_auth_handler - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: body = parse_and_validate_json_object_from_request(request, self.PostBody) # we do basic sanity checks here because the auth layer will store these @@ -296,7 +296,7 @@ class PostBody(RequestBodyModel): erase: StrictBool = False @interactive_auth_handler - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: body = parse_and_validate_json_object_from_request(request, self.PostBody) requester = await self.auth.get_user_by_req(request) @@ -341,7 +341,7 @@ def __init__(self, hs: "HomeServer"): template_text=self.config.email.email_add_threepid_template_text, ) - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: if not self.hs.config.registration.enable_3pid_changes: raise SynapseError( 400, "3PID changes are disabled on this server", Codes.FORBIDDEN @@ -418,7 +418,7 @@ def __init__(self, hs: "HomeServer"): self.store = self.hs.get_datastores().main self.identity_handler = hs.get_identity_handler() - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: body = parse_and_validate_json_object_from_request( request, MsisdnRequestTokenBody ) @@ -567,7 +567,7 @@ def __init__(self, hs: "HomeServer"): self.store = hs.get_datastores().main self.identity_handler = hs.get_identity_handler() - async def on_POST(self, request: Request) -> Tuple[int, JsonDict]: + async def on_POST(self, request: Request) -> tuple[int, JsonDict]: if not self.config.registration.account_threepid_delegate_msisdn: raise SynapseError( 400, @@ -601,7 +601,7 @@ def __init__(self, hs: "HomeServer"): self.auth_handler = hs.get_auth_handler() self.datastore = self.hs.get_datastores().main - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) threepids = await self.datastore.user_get_threepids(requester.user.to_string()) @@ -612,7 +612,7 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: # the endpoint is deprecated. (If you really want to, you could do this by reusing # ThreePidBindRestServelet.PostBody with an `alias_generator` to handle # `threePidCreds` versus `three_pid_creds`. - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: if self.hs.config.mas.enabled or self.hs.config.experimental.msc3861.enabled: raise NotFoundError(errcode=Codes.UNRECOGNIZED) @@ -669,7 +669,7 @@ class PostBody(RequestBodyModel): sid: StrictStr @interactive_auth_handler - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: if not self.hs.config.registration.enable_3pid_changes: raise SynapseError( 400, "3PID changes are disabled on this server", Codes.FORBIDDEN @@ -718,7 +718,7 @@ class PostBody(RequestBodyModel): id_server: StrictStr sid: StrictStr - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: body = parse_and_validate_json_object_from_request(request, self.PostBody) requester = await self.auth.get_user_by_req(request) @@ -746,7 +746,7 @@ class PostBody(RequestBodyModel): id_server: Optional[StrictStr] = None medium: Literal["email", "msisdn"] - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: """Unbind the given 3pid from a specific identity server, or identity servers that are known to have this 3pid bound """ @@ -775,7 +775,7 @@ class PostBody(RequestBodyModel): id_server: Optional[StrictStr] = None medium: Literal["email", "msisdn"] - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: if not self.hs.config.registration.enable_3pid_changes: raise SynapseError( 400, "3PID changes are disabled on this server", Codes.FORBIDDEN @@ -859,7 +859,7 @@ def __init__(self, hs: "HomeServer"): super().__init__() self.auth = hs.get_auth() - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) response = { @@ -889,9 +889,9 @@ def __init__(self, hs: "HomeServer"): class PostBody(RequestBodyModel): # TODO: we could validate that each user id is an mxid here, and/or parse it # as a UserID - user_ids: List[StrictStr] + user_ids: list[StrictStr] - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: await self._auth.get_user_by_req(request) body = parse_and_validate_json_object_from_request(request, self.PostBody) diff --git a/synapse/rest/client/account_data.py b/synapse/rest/client/account_data.py index 734c9e992f5..0800c0f5b8a 100644 --- a/synapse/rest/client/account_data.py +++ b/synapse/rest/client/account_data.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Optional from synapse.api.constants import AccountDataTypes, ReceiptTypes from synapse.api.errors import AuthError, Codes, NotFoundError, SynapseError @@ -75,7 +75,7 @@ def __init__(self, hs: "HomeServer"): async def on_PUT( self, request: SynapseRequest, user_id: str, account_data_type: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) if user_id != requester.user.to_string(): raise AuthError(403, "Cannot add account data for other users.") @@ -101,7 +101,7 @@ async def on_PUT( async def on_GET( self, request: SynapseRequest, user_id: str, account_data_type: str - ) -> Tuple[int, JsonMapping]: + ) -> tuple[int, JsonMapping]: requester = await self.auth.get_user_by_req(request) if user_id != requester.user.to_string(): raise AuthError(403, "Cannot get account data for other users.") @@ -152,7 +152,7 @@ async def on_DELETE( request: SynapseRequest, user_id: str, account_data_type: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) if user_id != requester.user.to_string(): raise AuthError(403, "Cannot delete account data for other users.") @@ -191,7 +191,7 @@ async def on_PUT( user_id: str, room_id: str, account_data_type: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) if user_id != requester.user.to_string(): raise AuthError(403, "Cannot add account data for other users.") @@ -230,7 +230,7 @@ async def on_GET( user_id: str, room_id: str, account_data_type: str, - ) -> Tuple[int, JsonMapping]: + ) -> tuple[int, JsonMapping]: requester = await self.auth.get_user_by_req(request) if user_id != requester.user.to_string(): raise AuthError(403, "Cannot get account data for other users.") @@ -288,7 +288,7 @@ async def on_DELETE( user_id: str, room_id: str, account_data_type: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) if user_id != requester.user.to_string(): raise AuthError(403, "Cannot delete account data for other users.") diff --git a/synapse/rest/client/account_validity.py b/synapse/rest/client/account_validity.py index ec7836b647f..1c605390548 100644 --- a/synapse/rest/client/account_validity.py +++ b/synapse/rest/client/account_validity.py @@ -19,7 +19,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from twisted.web.server import Request @@ -90,7 +90,7 @@ def __init__(self, hs: "HomeServer"): hs.config.account_validity.account_validity_renew_by_email_enabled ) - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_expired=True) user_id = requester.user.to_string() await self.account_activity_handler.send_renewal_email_to_user(user_id) diff --git a/synapse/rest/client/appservice_ping.py b/synapse/rest/client/appservice_ping.py index 1f9662a95a6..7e2ac15783a 100644 --- a/synapse/rest/client/appservice_ping.py +++ b/synapse/rest/client/appservice_ping.py @@ -22,7 +22,7 @@ import logging import time from http import HTTPStatus -from typing import TYPE_CHECKING, Any, Dict, Tuple +from typing import TYPE_CHECKING, Any from synapse.api.errors import ( CodeMessageException, @@ -58,7 +58,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, appservice_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) if not requester.app_service: @@ -97,7 +97,7 @@ async def on_POST( Codes.AS_PING_CONNECTION_TIMEOUT, ) except CodeMessageException as e: - additional_fields: Dict[str, Any] = {"status": e.code} + additional_fields: dict[str, Any] = {"status": e.code} if isinstance(e, HttpResponseException): try: additional_fields["body"] = e.response.decode("utf-8") diff --git a/synapse/rest/client/auth_metadata.py b/synapse/rest/client/auth_metadata.py index 4b5d9974780..702f550906b 100644 --- a/synapse/rest/client/auth_metadata.py +++ b/synapse/rest/client/auth_metadata.py @@ -13,7 +13,7 @@ # limitations under the License. import logging import typing -from typing import Tuple, cast +from typing import cast from synapse.api.auth.mas import MasDelegatedAuth from synapse.api.errors import Codes, SynapseError @@ -48,7 +48,7 @@ def __init__(self, hs: "HomeServer"): self._config = hs.config self._auth = hs.get_auth() - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: if self._config.mas.enabled: assert isinstance(self._auth, MasDelegatedAuth) return 200, {"issuer": await self._auth.issuer()} @@ -93,7 +93,7 @@ def __init__(self, hs: "HomeServer"): self._config = hs.config self._auth = hs.get_auth() - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: if self._config.mas.enabled: assert isinstance(self._auth, MasDelegatedAuth) return 200, await self._auth.auth_metadata() diff --git a/synapse/rest/client/capabilities.py b/synapse/rest/client/capabilities.py index a279db1cc5c..baff999ab06 100644 --- a/synapse/rest/client/capabilities.py +++ b/synapse/rest/client/capabilities.py @@ -19,7 +19,7 @@ # import logging from http import HTTPStatus -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, MSC3244_CAPABILITIES from synapse.http.server import HttpServer @@ -48,7 +48,7 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.auth_handler = hs.get_auth_handler() - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await self.auth.get_user_by_req(request, allow_guest=True) change_password = self.auth_handler.can_change_password() diff --git a/synapse/rest/client/delayed_events.py b/synapse/rest/client/delayed_events.py index 2dd5a60b2b1..80abacbc9d6 100644 --- a/synapse/rest/client/delayed_events.py +++ b/synapse/rest/client/delayed_events.py @@ -17,7 +17,7 @@ import logging from enum import Enum from http import HTTPStatus -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.errors import Codes, SynapseError from synapse.http.server import HttpServer @@ -52,7 +52,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, delay_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) body = parse_json_object_from_request(request) @@ -95,7 +95,7 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.delayed_events_handler = hs.get_delayed_events_handler() - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) # TODO: Support Pagination stream API ("from" query parameter) delayed_events = await self.delayed_events_handler.get_all_for_user(requester) diff --git a/synapse/rest/client/devices.py b/synapse/rest/client/devices.py index 0777abde7f6..9b5f8f851dc 100644 --- a/synapse/rest/client/devices.py +++ b/synapse/rest/client/devices.py @@ -22,7 +22,7 @@ import logging from http import HTTPStatus -from typing import TYPE_CHECKING, List, Optional, Tuple +from typing import TYPE_CHECKING, Optional from synapse._pydantic_compat import Extra, StrictStr from synapse.api import errors @@ -56,7 +56,7 @@ def __init__(self, hs: "HomeServer"): self.device_handler = hs.get_device_handler() self._msc3852_enabled = hs.config.experimental.msc3852_enabled - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) devices = await self.device_handler.get_devices_by_user( requester.user.to_string() @@ -95,10 +95,10 @@ def __init__(self, hs: "HomeServer"): class PostBody(RequestBodyModel): auth: Optional[AuthenticationData] - devices: List[StrictStr] + devices: list[StrictStr] @interactive_auth_handler - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) try: @@ -150,7 +150,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, device_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) device = await self.device_handler.get_device( requester.user.to_string(), device_id @@ -177,7 +177,7 @@ class DeleteBody(RequestBodyModel): @interactive_auth_handler async def on_DELETE( self, request: SynapseRequest, device_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) try: @@ -221,7 +221,7 @@ class PutBody(RequestBodyModel): async def on_PUT( self, request: SynapseRequest, device_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) body = parse_and_validate_json_object_from_request(request, self.PutBody) @@ -302,7 +302,7 @@ def __init__(self, hs: "HomeServer"): handler = hs.get_device_handler() self.device_handler = handler - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) dehydrated_device = await self.device_handler.get_dehydrated_device( requester.user.to_string() @@ -318,7 +318,7 @@ class PutBody(RequestBodyModel): device_data: DehydratedDeviceDataModel initial_device_display_name: Optional[StrictStr] - async def on_PUT(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_PUT(self, request: SynapseRequest) -> tuple[int, JsonDict]: submission = parse_and_validate_json_object_from_request(request, self.PutBody) requester = await self.auth.get_user_by_req(request) @@ -364,7 +364,7 @@ def __init__(self, hs: "HomeServer"): class PostBody(RequestBodyModel): device_id: StrictStr - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) submission = parse_and_validate_json_object_from_request(request, self.PostBody) @@ -395,7 +395,7 @@ class PostBody(RequestBodyModel): async def on_POST( self, request: SynapseRequest, device_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) next_batch = parse_and_validate_json_object_from_request( @@ -501,7 +501,7 @@ def __init__(self, hs: "HomeServer"): self.e2e_keys_handler = hs.get_e2e_keys_handler() self.device_handler = handler - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) dehydrated_device = await self.device_handler.get_dehydrated_device( @@ -515,7 +515,7 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: else: raise errors.NotFoundError("No dehydrated device available") - async def on_DELETE(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_DELETE(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) dehydrated_device = await self.device_handler.get_dehydrated_device( @@ -543,7 +543,7 @@ class PutBody(RequestBodyModel): class Config: extra = Extra.allow - async def on_PUT(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_PUT(self, request: SynapseRequest) -> tuple[int, JsonDict]: submission = parse_and_validate_json_object_from_request(request, self.PutBody) requester = await self.auth.get_user_by_req(request) user_id = requester.user.to_string() diff --git a/synapse/rest/client/directory.py b/synapse/rest/client/directory.py index 479f489623b..eccada67be9 100644 --- a/synapse/rest/client/directory.py +++ b/synapse/rest/client/directory.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, List, Literal, Optional, Tuple +from typing import TYPE_CHECKING, Literal, Optional from twisted.web.server import Request @@ -59,7 +59,7 @@ def __init__(self, hs: "HomeServer"): self.directory_handler = hs.get_directory_handler() self.auth = hs.get_auth() - async def on_GET(self, request: Request, room_alias: str) -> Tuple[int, JsonDict]: + async def on_GET(self, request: Request, room_alias: str) -> tuple[int, JsonDict]: if not RoomAlias.is_valid(room_alias): raise SynapseError(400, "Room alias invalid", errcode=Codes.INVALID_PARAM) room_alias_obj = RoomAlias.from_string(room_alias) @@ -72,11 +72,11 @@ class PutBody(RequestBodyModel): # TODO: get Pydantic to validate that this is a valid room id? room_id: StrictStr # `servers` is unspecced - servers: Optional[List[StrictStr]] = None + servers: Optional[list[StrictStr]] = None async def on_PUT( self, request: SynapseRequest, room_alias: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: if not RoomAlias.is_valid(room_alias): raise SynapseError(400, "Room alias invalid", errcode=Codes.INVALID_PARAM) room_alias_obj = RoomAlias.from_string(room_alias) @@ -103,7 +103,7 @@ async def on_PUT( async def on_DELETE( self, request: SynapseRequest, room_alias: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: if not RoomAlias.is_valid(room_alias): raise SynapseError(400, "Room alias invalid", errcode=Codes.INVALID_PARAM) room_alias_obj = RoomAlias.from_string(room_alias) @@ -141,7 +141,7 @@ def __init__(self, hs: "HomeServer"): self.directory_handler = hs.get_directory_handler() self.auth = hs.get_auth() - async def on_GET(self, request: Request, room_id: str) -> Tuple[int, JsonDict]: + async def on_GET(self, request: Request, room_id: str) -> tuple[int, JsonDict]: room = await self.store.get_room(room_id) if room is None: raise NotFoundError("Unknown room") @@ -153,7 +153,7 @@ class PutBody(RequestBodyModel): async def on_PUT( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) content = parse_and_validate_json_object_from_request(request, self.PutBody) @@ -181,13 +181,13 @@ class PutBody(RequestBodyModel): async def on_PUT( self, request: SynapseRequest, network_id: str, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: content = parse_and_validate_json_object_from_request(request, self.PutBody) return await self._edit(request, network_id, room_id, content.visibility) async def on_DELETE( self, request: SynapseRequest, network_id: str, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: return await self._edit(request, network_id, room_id, "private") async def _edit( @@ -196,7 +196,7 @@ async def _edit( network_id: str, room_id: str, visibility: Literal["public", "private"], - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) if not requester.app_service: raise AuthError( diff --git a/synapse/rest/client/events.py b/synapse/rest/client/events.py index ad23cc76ce0..082bacade6f 100644 --- a/synapse/rest/client/events.py +++ b/synapse/rest/client/events.py @@ -22,7 +22,7 @@ """This module contains REST servlets to do with event streaming, /events.""" import logging -from typing import TYPE_CHECKING, Dict, List, Tuple, Union +from typing import TYPE_CHECKING, Union from synapse.api.errors import SynapseError from synapse.events.utils import SerializeEventConfig @@ -51,9 +51,9 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.store = hs.get_datastores().main - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) - args: Dict[bytes, List[bytes]] = request.args # type: ignore + args: dict[bytes, list[bytes]] = request.args # type: ignore if requester.is_guest: if b"room_id" not in args: raise SynapseError(400, "Guest users must specify room_id param") @@ -96,7 +96,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, event_id: str - ) -> Tuple[int, Union[str, JsonDict]]: + ) -> tuple[int, Union[str, JsonDict]]: requester = await self.auth.get_user_by_req(request) event = await self.event_handler.get_event(requester.user, None, event_id) diff --git a/synapse/rest/client/filter.py b/synapse/rest/client/filter.py index f1e881975f5..cfe82e14735 100644 --- a/synapse/rest/client/filter.py +++ b/synapse/rest/client/filter.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.errors import AuthError, NotFoundError, StoreError, SynapseError from synapse.http.server import HttpServer @@ -48,7 +48,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str, filter_id: str - ) -> Tuple[int, JsonMapping]: + ) -> tuple[int, JsonMapping]: target_user = UserID.from_string(user_id) requester = await self.auth.get_user_by_req(request) @@ -87,7 +87,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: target_user = UserID.from_string(user_id) requester = await self.auth.get_user_by_req(request) diff --git a/synapse/rest/client/initial_sync.py b/synapse/rest/client/initial_sync.py index a2c50f5d589..c20e007c5bd 100644 --- a/synapse/rest/client/initial_sync.py +++ b/synapse/rest/client/initial_sync.py @@ -19,7 +19,7 @@ # # -from typing import TYPE_CHECKING, Dict, List, Tuple +from typing import TYPE_CHECKING from synapse.http.server import HttpServer from synapse.http.servlet import RestServlet, parse_boolean @@ -43,9 +43,9 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.store = hs.get_datastores().main - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) - args: Dict[bytes, List[bytes]] = request.args # type: ignore + args: dict[bytes, list[bytes]] = request.args # type: ignore as_client_event = b"raw" not in args pagination_config = await PaginationConfig.from_request( self.store, request, default_limit=10 diff --git a/synapse/rest/client/keys.py b/synapse/rest/client/keys.py index 55922b97d42..ee8b5c8b5f1 100644 --- a/synapse/rest/client/keys.py +++ b/synapse/rest/client/keys.py @@ -24,7 +24,7 @@ import re from collections import Counter from http import HTTPStatus -from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Mapping, Optional, Union from typing_extensions import Self @@ -129,7 +129,7 @@ class KeyUploadRequestBody(RequestBodyModel): """ class DeviceKeys(RequestBodyModel): - algorithms: List[StrictStr] + algorithms: list[StrictStr] """The encryption algorithms supported by this device.""" device_id: StrictStr @@ -225,7 +225,7 @@ def validate_one_time_keys(cls: Self, v: Any) -> Any: async def on_POST( self, request: SynapseRequest, device_id: Optional[str] - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) user_id = requester.user.to_string() @@ -343,7 +343,7 @@ def __init__(self, hs: "HomeServer"): self.e2e_keys_handler = hs.get_e2e_keys_handler() @cancellable - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) user_id = requester.user.to_string() device_id = requester.device_id @@ -388,7 +388,7 @@ def __init__(self, hs: "HomeServer"): self.store = hs.get_datastores().main @cancellable - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) from_token_string = parse_string(request, "from", required=True) @@ -442,13 +442,13 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.e2e_keys_handler = hs.get_e2e_keys_handler() - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) timeout = parse_integer(request, "timeout", 10 * 1000) body = parse_json_object_from_request(request) # Generate a count for each algorithm, which is hard-coded to 1. - query: Dict[str, Dict[str, Dict[str, int]]] = {} + query: dict[str, dict[str, dict[str, int]]] = {} for user_id, one_time_keys in body.get("one_time_keys", {}).items(): for device_id, algorithm in one_time_keys.items(): query.setdefault(user_id, {})[device_id] = {algorithm: 1} @@ -490,13 +490,13 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.e2e_keys_handler = hs.get_e2e_keys_handler() - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) timeout = parse_integer(request, "timeout", 10 * 1000) body = parse_json_object_from_request(request) # Generate a count for each algorithm. - query: Dict[str, Dict[str, Dict[str, int]]] = {} + query: dict[str, dict[str, dict[str, int]]] = {} for user_id, one_time_keys in body.get("one_time_keys", {}).items(): for device_id, algorithms in one_time_keys.items(): query.setdefault(user_id, {})[device_id] = Counter(algorithms) @@ -526,7 +526,7 @@ def __init__(self, hs: "HomeServer"): self.auth_handler = hs.get_auth_handler() @interactive_auth_handler - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) user_id = requester.user.to_string() body = parse_json_object_from_request(request) @@ -663,7 +663,7 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.e2e_keys_handler = hs.get_e2e_keys_handler() - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) user_id = requester.user.to_string() body = parse_json_object_from_request(request) diff --git a/synapse/rest/client/knock.py b/synapse/rest/client/knock.py index d7a17e1b352..5e96079b663 100644 --- a/synapse/rest/client/knock.py +++ b/synapse/rest/client/knock.py @@ -20,7 +20,7 @@ # # import logging -from typing import TYPE_CHECKING, Dict, List, Tuple +from typing import TYPE_CHECKING from synapse.api.constants import Membership from synapse.api.errors import SynapseError @@ -58,7 +58,7 @@ async def on_POST( self, request: SynapseRequest, room_identifier: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) content = parse_json_object_from_request(request) @@ -70,7 +70,7 @@ async def on_POST( room_id = room_identifier # twisted.web.server.Request.args is incorrectly defined as Optional[Any] - args: Dict[bytes, List[bytes]] = request.args # type: ignore + args: dict[bytes, list[bytes]] = request.args # type: ignore # Prefer via over server_name (deprecated with MSC4156) remote_room_hosts = parse_strings_from_args(args, "via", required=False) if remote_room_hosts is None: diff --git a/synapse/rest/client/login.py b/synapse/rest/client/login.py index 921232a3ea4..bba6944982b 100644 --- a/synapse/rest/client/login.py +++ b/synapse/rest/client/login.py @@ -26,10 +26,7 @@ Any, Awaitable, Callable, - Dict, - List, Optional, - Tuple, TypedDict, Union, ) @@ -75,7 +72,7 @@ class LoginResponse(TypedDict, total=False): expires_in_ms: Optional[int] refresh_token: Optional[str] device_id: Optional[str] - well_known: Optional[Dict[str, Any]] + well_known: Optional[dict[str, Any]] class LoginRestServlet(RestServlet): @@ -142,8 +139,8 @@ def __init__(self, hs: "HomeServer"): # counters are initialised for the auth_provider_ids. _load_sso_handlers(hs) - def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: - flows: List[JsonDict] = [] + def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: + flows: list[JsonDict] = [] if self.jwt_enabled: flows.append({"type": LoginRestServlet.JWT_TYPE}) @@ -178,7 +175,7 @@ def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: # fall back to the fallback API if they don't understand one of the # login flow types returned. if support_login_token_flow: - tokenTypeFlow: Dict[str, Any] = {"type": LoginRestServlet.TOKEN_TYPE} + tokenTypeFlow: dict[str, Any] = {"type": LoginRestServlet.TOKEN_TYPE} # If the login token flow is enabled advertise the get_login_token flag. if self._get_login_token_enabled: tokenTypeFlow["get_login_token"] = True @@ -190,7 +187,7 @@ def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: return 200, {"flows": flows} - async def on_POST(self, request: SynapseRequest) -> Tuple[int, LoginResponse]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, LoginResponse]: login_submission = parse_json_object_from_request(request) # Check to see if the client requested a refresh token. @@ -602,7 +599,7 @@ def __init__(self, hs: "HomeServer"): ) self.refresh_token_lifetime = hs.config.registration.refresh_token_lifetime - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: refresh_submission = parse_json_object_from_request(request) assert_params_in_dict(refresh_submission, ["refresh_token"]) @@ -626,7 +623,7 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: token, access_valid_until_ms, refresh_valid_until_ms ) - response: Dict[str, Union[str, int]] = { + response: dict[str, Union[str, int]] = { "access_token": access_token, "refresh_token": refresh_token, } @@ -684,7 +681,7 @@ async def on_GET( finish_request(request) return - args: Dict[bytes, List[bytes]] = request.args # type: ignore + args: dict[bytes, list[bytes]] = request.args # type: ignore client_redirect_url = parse_bytes_from_args(args, "redirectUrl", required=True) sso_url = await self._sso_handler.handle_redirect_request( request, diff --git a/synapse/rest/client/login_token_request.py b/synapse/rest/client/login_token_request.py index a053db8e551..f455e9c0b7f 100644 --- a/synapse/rest/client/login_token_request.py +++ b/synapse/rest/client/login_token_request.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.ratelimiting import Ratelimiter from synapse.config.ratelimiting import RatelimitSettings @@ -89,7 +89,7 @@ def __init__(self, hs: "HomeServer"): ) @interactive_auth_handler - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) body = parse_json_object_from_request(request) diff --git a/synapse/rest/client/logout.py b/synapse/rest/client/logout.py index 39c62b9e267..d804552a4a7 100644 --- a/synapse/rest/client/logout.py +++ b/synapse/rest/client/logout.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.http.server import HttpServer from synapse.http.servlet import RestServlet @@ -43,7 +43,7 @@ def __init__(self, hs: "HomeServer"): self._auth_handler = hs.get_auth_handler() self._device_handler = hs.get_device_handler() - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req( request, allow_expired=True, allow_locked=True ) @@ -70,7 +70,7 @@ def __init__(self, hs: "HomeServer"): self._auth_handler = hs.get_auth_handler() self._device_handler = hs.get_device_handler() - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req( request, allow_expired=True, allow_locked=True ) diff --git a/synapse/rest/client/matrixrtc.py b/synapse/rest/client/matrixrtc.py index afe4d4fa83e..22f8498f2fa 100644 --- a/synapse/rest/client/matrixrtc.py +++ b/synapse/rest/client/matrixrtc.py @@ -15,7 +15,7 @@ # # -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.http.server import HttpServer from synapse.http.servlet import RestServlet @@ -37,7 +37,7 @@ def __init__(self, hs: "HomeServer"): self._auth = hs.get_auth() self._transports = hs.config.matrix_rtc.transports - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: # Require authentication for this endpoint. await self._auth.get_user_by_req(request) diff --git a/synapse/rest/client/mutual_rooms.py b/synapse/rest/client/mutual_rooms.py index abb1fab0a34..7d0570d0cb9 100644 --- a/synapse/rest/client/mutual_rooms.py +++ b/synapse/rest/client/mutual_rooms.py @@ -20,7 +20,7 @@ # import logging from http import HTTPStatus -from typing import TYPE_CHECKING, Dict, List, Tuple +from typing import TYPE_CHECKING from synapse.api.errors import Codes, SynapseError from synapse.http.server import HttpServer @@ -51,9 +51,9 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.store = hs.get_datastores().main - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: # twisted.web.server.Request.args is incorrectly defined as Optional[Any] - args: Dict[bytes, List[bytes]] = request.args # type: ignore + args: dict[bytes, list[bytes]] = request.args # type: ignore user_ids = parse_strings_from_args(args, "user_id", required=True) diff --git a/synapse/rest/client/notifications.py b/synapse/rest/client/notifications.py index 168ce50d3ff..2420e9fffbe 100644 --- a/synapse/rest/client/notifications.py +++ b/synapse/rest/client/notifications.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.constants import ReceiptTypes from synapse.events.utils import ( @@ -53,7 +53,7 @@ def __init__(self, hs: "HomeServer"): self.clock = hs.get_clock() self._event_serializer = hs.get_event_client_serializer() - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) user_id = requester.user.to_string() diff --git a/synapse/rest/client/openid.py b/synapse/rest/client/openid.py index a2c2faa1996..e624a48ce75 100644 --- a/synapse/rest/client/openid.py +++ b/synapse/rest/client/openid.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.errors import AuthError from synapse.http.server import HttpServer @@ -80,7 +80,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) if user_id != requester.user.to_string(): raise AuthError(403, "Cannot request tokens for other users.") diff --git a/synapse/rest/client/password_policy.py b/synapse/rest/client/password_policy.py index 7ec6dd34437..314c409fc2c 100644 --- a/synapse/rest/client/password_policy.py +++ b/synapse/rest/client/password_policy.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from twisted.web.server import Request @@ -46,7 +46,7 @@ def __init__(self, hs: "HomeServer"): self.policy = hs.config.auth.password_policy self.enabled = hs.config.auth.password_policy_enabled - def on_GET(self, request: Request) -> Tuple[int, JsonDict]: + def on_GET(self, request: Request) -> tuple[int, JsonDict]: if not self.enabled or not self.policy: return 200, {} diff --git a/synapse/rest/client/presence.py b/synapse/rest/client/presence.py index 104d54cd890..de3ffdaa0be 100644 --- a/synapse/rest/client/presence.py +++ b/synapse/rest/client/presence.py @@ -22,7 +22,7 @@ """This module contains REST servlets to do with presence: /presence/""" import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.errors import AuthError, Codes, LimitExceededError, SynapseError from synapse.api.ratelimiting import Ratelimiter @@ -60,7 +60,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) user = UserID.from_string(user_id) @@ -84,7 +84,7 @@ async def on_GET( async def on_PUT( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) user = UserID.from_string(user_id) diff --git a/synapse/rest/client/profile.py b/synapse/rest/client/profile.py index 8bc532c8117..7f3128cb617 100644 --- a/synapse/rest/client/profile.py +++ b/synapse/rest/client/profile.py @@ -23,7 +23,7 @@ import re from http import HTTPStatus -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.constants import ProfileFields from synapse.api.errors import Codes, SynapseError @@ -69,7 +69,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester_user = None if self.hs.config.server.require_auth_for_profile_requests: @@ -118,7 +118,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str, field_name: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester_user = None if self.hs.config.server.require_auth_for_profile_requests: @@ -156,7 +156,7 @@ async def on_GET( async def on_PUT( self, request: SynapseRequest, user_id: str, field_name: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: if not UserID.is_valid(user_id): raise SynapseError( HTTPStatus.BAD_REQUEST, "Invalid user id", Codes.INVALID_PARAM @@ -221,7 +221,7 @@ async def on_PUT( async def on_DELETE( self, request: SynapseRequest, user_id: str, field_name: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: if not UserID.is_valid(user_id): raise SynapseError( HTTPStatus.BAD_REQUEST, "Invalid user id", Codes.INVALID_PARAM diff --git a/synapse/rest/client/push_rule.py b/synapse/rest/client/push_rule.py index c1939a9b577..0a9b83af953 100644 --- a/synapse/rest/client/push_rule.py +++ b/synapse/rest/client/push_rule.py @@ -20,7 +20,7 @@ # from http import HTTPStatus -from typing import TYPE_CHECKING, List, Tuple, Union +from typing import TYPE_CHECKING, Union from synapse.api.errors import ( Codes, @@ -67,7 +67,7 @@ def __init__(self, hs: "HomeServer"): self._push_rules_handler = hs.get_push_rules_handler() self._push_rule_linearizer = Linearizer(name="push_rules", clock=hs.get_clock()) - async def on_PUT(self, request: SynapseRequest, path: str) -> Tuple[int, JsonDict]: + async def on_PUT(self, request: SynapseRequest, path: str) -> tuple[int, JsonDict]: if not self._is_push_worker: raise Exception("Cannot handle PUT /push_rules on worker") @@ -79,7 +79,7 @@ async def on_PUT(self, request: SynapseRequest, path: str) -> Tuple[int, JsonDic async def handle_put( self, request: SynapseRequest, path: str, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: spec = _rule_spec_from_path(path.split("/")) try: priority_class = _priority_class_from_spec(spec) @@ -140,7 +140,7 @@ async def handle_put( async def on_DELETE( self, request: SynapseRequest, path: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: if not self._is_push_worker: raise Exception("Cannot handle DELETE /push_rules on worker") @@ -155,7 +155,7 @@ async def handle_delete( request: SynapseRequest, path: str, user_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: spec = _rule_spec_from_path(path.split("/")) namespaced_rule_id = f"global/{spec.template}/{spec.rule_id}" @@ -170,7 +170,7 @@ async def handle_delete( else: raise - async def on_GET(self, request: SynapseRequest, path: str) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest, path: str) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) requester.user.to_string() @@ -196,7 +196,7 @@ async def on_GET(self, request: SynapseRequest, path: str) -> Tuple[int, JsonDic raise UnrecognizedRequestError() -def _rule_spec_from_path(path: List[str]) -> RuleSpec: +def _rule_spec_from_path(path: list[str]) -> RuleSpec: """Turn a sequence of path components into a rule spec Args: @@ -240,7 +240,7 @@ def _rule_spec_from_path(path: List[str]) -> RuleSpec: def _rule_tuple_from_request_object( rule_template: str, rule_id: str, req_obj: JsonDict -) -> Tuple[List[JsonDict], List[Union[str, JsonDict]]]: +) -> tuple[list[JsonDict], list[Union[str, JsonDict]]]: if rule_template == "postcontent": # postcontent is from MSC4306, which says that clients # cannot create their own postcontent rules right now. @@ -279,7 +279,7 @@ def _rule_tuple_from_request_object( return conditions, actions -def _filter_ruleset_with_path(ruleset: JsonDict, path: List[str]) -> JsonDict: +def _filter_ruleset_with_path(ruleset: JsonDict, path: list[str]) -> JsonDict: if path == []: raise UnrecognizedRequestError( PushRuleRestServlet.SLIGHTLY_PEDANTIC_TRAILING_SLASH_ERROR diff --git a/synapse/rest/client/pusher.py b/synapse/rest/client/pusher.py index a455f95a263..66d7fec07e1 100644 --- a/synapse/rest/client/pusher.py +++ b/synapse/rest/client/pusher.py @@ -21,7 +21,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.errors import Codes, SynapseError from synapse.http.server import HttpServer @@ -52,7 +52,7 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self._store = hs.get_datastores().main - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) user_id = requester.user.to_string() @@ -85,7 +85,7 @@ def __init__(self, hs: "HomeServer"): self.pusher_pool = self.hs.get_pusherpool() self._store = hs.get_datastores().main - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) user_id = requester.user.to_string() diff --git a/synapse/rest/client/read_marker.py b/synapse/rest/client/read_marker.py index d3d3c7c41de..874e7487bf6 100644 --- a/synapse/rest/client/read_marker.py +++ b/synapse/rest/client/read_marker.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.constants import ReceiptTypes from synapse.http.server import HttpServer @@ -56,7 +56,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) await self.presence_handler.bump_presence_active_time( diff --git a/synapse/rest/client/receipts.py b/synapse/rest/client/receipts.py index 4bf93f485c9..d3a43537bb3 100644 --- a/synapse/rest/client/receipts.py +++ b/synapse/rest/client/receipts.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.constants import MAIN_TIMELINE, ReceiptTypes from synapse.api.errors import Codes, SynapseError @@ -59,7 +59,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, room_id: str, receipt_type: str, event_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) if not RoomID.is_valid(room_id) or not event_id.startswith(EventID.SIGIL): diff --git a/synapse/rest/client/register.py b/synapse/rest/client/register.py index b42006e4cee..145dc6f5699 100644 --- a/synapse/rest/client/register.py +++ b/synapse/rest/client/register.py @@ -21,7 +21,7 @@ # import logging import random -from typing import TYPE_CHECKING, List, Optional, Tuple +from typing import TYPE_CHECKING, Optional from twisted.web.server import Request @@ -100,7 +100,7 @@ def __init__(self, hs: "HomeServer"): template_text=self.config.email.email_already_in_use_template_text, ) - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: if not self.hs.config.email.can_verify_email: logger.warning( "Email registration has been disabled due to lack of email config" @@ -183,7 +183,7 @@ def __init__(self, hs: "HomeServer"): self.server_name = hs.hostname self.identity_handler = hs.get_identity_handler() - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: body = parse_json_object_from_request(request) assert_params_in_dict( @@ -352,7 +352,7 @@ def __init__(self, hs: "HomeServer"): hs.config.registration.inhibit_user_in_use_error ) - async def on_GET(self, request: Request) -> Tuple[int, JsonDict]: + async def on_GET(self, request: Request) -> tuple[int, JsonDict]: if not self.hs.config.registration.enable_registration: raise SynapseError( 403, "Registration has been disabled", errcode=Codes.FORBIDDEN @@ -402,7 +402,7 @@ def __init__(self, hs: "HomeServer"): cfg=hs.config.ratelimiting.rc_registration_token_validity, ) - async def on_GET(self, request: Request) -> Tuple[int, JsonDict]: + async def on_GET(self, request: Request) -> tuple[int, JsonDict]: await self.ratelimiter.ratelimit(None, (request.getClientAddress().host,)) if not self.hs.config.registration.enable_registration: @@ -453,7 +453,7 @@ def __init__(self, hs: "HomeServer"): ) @interactive_auth_handler - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: body = parse_json_object_from_request(request) client_addr = request.getClientAddress().host @@ -853,7 +853,7 @@ async def _create_registration_details( async def _do_guest_registration( self, params: JsonDict, address: Optional[str] = None - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: if not self.hs.config.registration.allow_guest_access: raise SynapseError(403, "Guest access is disabled") user_id = await self.registration_handler.register_user( @@ -913,7 +913,7 @@ def __init__(self, hs: "HomeServer"): self.ratelimiter = hs.get_registration_ratelimiter() @interactive_auth_handler - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: body = parse_json_object_from_request(request) client_addr = request.getClientAddress().host @@ -970,7 +970,7 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: def _calculate_registration_flows( config: HomeServerConfig, auth_handler: AuthHandler -) -> List[List[str]]: +) -> list[list[str]]: """Get a suitable flows list for registration Args: diff --git a/synapse/rest/client/relations.py b/synapse/rest/client/relations.py index 49943cf0c34..d6c74118168 100644 --- a/synapse/rest/client/relations.py +++ b/synapse/rest/client/relations.py @@ -20,7 +20,7 @@ import logging import re -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Optional from synapse.api.constants import Direction from synapse.handlers.relations import ThreadsListInclude @@ -63,7 +63,7 @@ async def on_GET( parent_id: str, relation_type: Optional[str] = None, event_type: Optional[str] = None, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) pagination_config = await PaginationConfig.from_request( @@ -105,7 +105,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) limit = parse_integer(request, "limit", default=5) diff --git a/synapse/rest/client/reporting.py b/synapse/rest/client/reporting.py index 81faf38a7f8..f11f6b7b771 100644 --- a/synapse/rest/client/reporting.py +++ b/synapse/rest/client/reporting.py @@ -21,7 +21,7 @@ import logging from http import HTTPStatus -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse._pydantic_compat import StrictStr from synapse.api.errors import AuthError, Codes, NotFoundError, SynapseError @@ -57,7 +57,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, room_id: str, event_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) user_id = requester.user.to_string() @@ -138,7 +138,7 @@ class PostBody(RequestBodyModel): async def on_POST( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) user_id = requester.user.to_string() @@ -191,7 +191,7 @@ class PostBody(RequestBodyModel): async def on_POST( self, request: SynapseRequest, target_user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) body = parse_and_validate_json_object_from_request(request, self.PostBody) diff --git a/synapse/rest/client/room.py b/synapse/rest/client/room.py index 1084139df0f..38e315d0e7c 100644 --- a/synapse/rest/client/room.py +++ b/synapse/rest/client/room.py @@ -25,7 +25,7 @@ import re from enum import Enum from http import HTTPStatus -from typing import TYPE_CHECKING, Awaitable, Dict, List, Optional, Tuple +from typing import TYPE_CHECKING, Awaitable, Optional from urllib import parse as urlparse from prometheus_client.core import Histogram @@ -166,20 +166,20 @@ def register(self, http_server: HttpServer) -> None: async def on_PUT( self, request: SynapseRequest, txn_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) set_tag("txn_id", txn_id) return await self.txns.fetch_or_execute_request( request, requester, self._do, request, requester ) - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) return await self._do(request, requester) async def _do( self, request: SynapseRequest, requester: Requester - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: room_id, _, _ = await self._room_creation_handler.create_room( requester, self.get_room_config(request) ) @@ -244,18 +244,18 @@ def register(self, http_server: HttpServer) -> None: @cancellable def on_GET_no_state_key( self, request: SynapseRequest, room_id: str, event_type: str - ) -> Awaitable[Tuple[int, JsonDict]]: + ) -> Awaitable[tuple[int, JsonDict]]: return self.on_GET(request, room_id, event_type, "") def on_PUT_no_state_key( self, request: SynapseRequest, room_id: str, event_type: str - ) -> Awaitable[Tuple[int, JsonDict]]: + ) -> Awaitable[tuple[int, JsonDict]]: return self.on_PUT(request, room_id, event_type, "") @cancellable async def on_GET( self, request: SynapseRequest, room_id: str, event_type: str, state_key: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) format = parse_string( request, "format", default="content", allowed_values=["content", "event"] @@ -295,7 +295,7 @@ async def on_PUT( event_type: str, state_key: str, txn_id: Optional[str] = None, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) if txn_id: @@ -408,7 +408,7 @@ async def _do( room_id: str, event_type: str, txn_id: Optional[str], - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: content = parse_json_object_from_request(request) origin_server_ts = None @@ -460,13 +460,13 @@ async def on_POST( request: SynapseRequest, room_id: str, event_type: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) return await self._do(request, requester, room_id, event_type, None) async def on_PUT( self, request: SynapseRequest, room_id: str, event_type: str, txn_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) set_tag("txn_id", txn_id) @@ -545,11 +545,11 @@ async def _do( requester: Requester, room_identifier: str, txn_id: Optional[str], - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: content = parse_json_object_from_request(request, allow_empty_body=True) # twisted.web.server.Request.args is incorrectly defined as Optional[Any] - args: Dict[bytes, List[bytes]] = request.args # type: ignore + args: dict[bytes, list[bytes]] = request.args # type: ignore # Prefer via over server_name (deprecated with MSC4156) remote_room_hosts = parse_strings_from_args(args, "via", required=False) if remote_room_hosts is None: @@ -578,13 +578,13 @@ async def on_POST( self, request: SynapseRequest, room_identifier: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) return await self._do(request, requester, room_identifier, None) async def on_PUT( self, request: SynapseRequest, room_identifier: str, txn_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) set_tag("txn_id", txn_id) @@ -603,7 +603,7 @@ def __init__(self, hs: "HomeServer"): self.hs = hs self.auth = hs.get_auth() - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: server = parse_string(request, "server") try: @@ -652,7 +652,7 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: return 200, data - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: await self.auth.get_user_by_req(request, allow_guest=True) server = parse_string(request, "server") @@ -726,7 +726,7 @@ def __init__(self, hs: "HomeServer"): @cancellable async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: # TODO support Pagination stream API (limit/tokens) requester = await self.auth.get_user_by_req(request, allow_guest=True) handler = self.message_handler @@ -780,7 +780,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) users_with_profile = await self.message_handler.get_joined_members( @@ -809,7 +809,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: processing_start_time = self.clock.time_msec() # Fire off and hope that we get a result by the end. # @@ -870,7 +870,7 @@ def __init__(self, hs: "HomeServer"): @cancellable async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, List[JsonDict]]: + ) -> tuple[int, list[JsonDict]]: requester = await self.auth.get_user_by_req(request, allow_guest=True) # Get all the current state for this room events = await self.message_handler.get_state_events( @@ -893,7 +893,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) pagination_config = await PaginationConfig.from_request( self.store, request, default_limit=10 @@ -925,7 +925,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str, event_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) include_unredacted_content = self.msc2815_enabled and ( @@ -1013,7 +1013,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str, event_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) limit = parse_integer(request, "limit", default=10) @@ -1072,20 +1072,20 @@ def register(self, http_server: HttpServer) -> None: PATTERNS = "/rooms/(?P[^/]*)/forget" register_txn_path(self, PATTERNS, http_server) - async def _do(self, requester: Requester, room_id: str) -> Tuple[int, JsonDict]: + async def _do(self, requester: Requester, room_id: str) -> tuple[int, JsonDict]: await self.room_member_handler.forget(user=requester.user, room_id=room_id) return 200, {} async def on_POST( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=False) return await self._do(requester, room_id) async def on_PUT( self, request: SynapseRequest, room_id: str, txn_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=False) set_tag("txn_id", txn_id) @@ -1119,7 +1119,7 @@ async def _do( room_id: str, membership_action: str, txn_id: Optional[str], - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: if requester.is_guest and membership_action not in { Membership.JOIN, Membership.LEAVE, @@ -1196,13 +1196,13 @@ async def on_POST( request: SynapseRequest, room_id: str, membership_action: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) return await self._do(request, requester, room_id, membership_action, None) async def on_PUT( self, request: SynapseRequest, room_id: str, membership_action: str, txn_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) set_tag("txn_id", txn_id) @@ -1242,7 +1242,7 @@ async def _do( room_id: str, event_id: str, txn_id: Optional[str], - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: content = parse_json_object_from_request(request) requester_suspended = await self._store.get_user_suspended_status( @@ -1328,13 +1328,13 @@ async def on_POST( request: SynapseRequest, room_id: str, event_id: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) return await self._do(request, requester, room_id, event_id, None) async def on_PUT( self, request: SynapseRequest, room_id: str, event_id: str, txn_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) set_tag("txn_id", txn_id) @@ -1363,7 +1363,7 @@ def __init__(self, hs: "HomeServer"): async def on_PUT( self, request: SynapseRequest, room_id: str, user_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) if not self._is_typing_writer: @@ -1419,7 +1419,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) alias_list = await self.directory_handler.get_aliases_for_room( @@ -1438,7 +1438,7 @@ def __init__(self, hs: "HomeServer"): self.search_handler = hs.get_search_handler() self.auth = hs.get_auth() - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) content = parse_json_object_from_request(request) @@ -1458,7 +1458,7 @@ def __init__(self, hs: "HomeServer"): self.store = hs.get_datastores().main self.auth = hs.get_auth() - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) room_ids = await self.store.get_rooms_for_user(requester.user.to_string()) @@ -1533,7 +1533,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self._auth.get_user_by_req(request) await self._auth.check_user_in_room_or_world_readable(room_id, requester) @@ -1566,7 +1566,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self._auth.get_user_by_req(request, allow_guest=True) max_depth = parse_integer(request, "max_depth") @@ -1575,7 +1575,7 @@ async def on_GET( # twisted.web.server.Request.args is incorrectly defined as Optional[Any] remote_room_hosts = None if self.msc4235_enabled: - args: Dict[bytes, List[bytes]] = request.args # type: ignore + args: dict[bytes, list[bytes]] = request.args # type: ignore via_param = parse_strings_from_args( args, "org.matrix.msc4235.via", required=False ) @@ -1614,7 +1614,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, room_identifier: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: try: requester = await self._auth.get_user_by_req(request, allow_guest=True) requester_user_id: Optional[str] = requester.user.to_string() @@ -1623,7 +1623,7 @@ async def on_GET( requester_user_id = None # twisted.web.server.Request.args is incorrectly defined as Optional[Any] - args: Dict[bytes, List[bytes]] = request.args # type: ignore + args: dict[bytes, list[bytes]] = request.args # type: ignore remote_room_hosts = parse_strings_from_args(args, "via", required=False) room_id, remote_room_hosts = await self.resolve_room_id( room_identifier, diff --git a/synapse/rest/client/room_keys.py b/synapse/rest/client/room_keys.py index 7be08ecb609..b2de591dc57 100644 --- a/synapse/rest/client/room_keys.py +++ b/synapse/rest/client/room_keys.py @@ -19,7 +19,7 @@ # import logging -from typing import TYPE_CHECKING, Optional, Tuple, cast +from typing import TYPE_CHECKING, Optional, cast from synapse.api.errors import Codes, NotFoundError, SynapseError from synapse.http.server import HttpServer @@ -52,7 +52,7 @@ def __init__(self, hs: "HomeServer"): async def on_PUT( self, request: SynapseRequest, room_id: Optional[str], session_id: Optional[str] - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: """ Uploads one or more encrypted E2E room keys for backup purposes. room_id: the ID of the room the keys are for (optional) @@ -147,7 +147,7 @@ async def on_PUT( async def on_GET( self, request: SynapseRequest, room_id: Optional[str], session_id: Optional[str] - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: """ Retrieves one or more encrypted E2E room keys for backup purposes. Symmetric with the PUT version of the API. @@ -234,7 +234,7 @@ async def on_GET( async def on_DELETE( self, request: SynapseRequest, room_id: Optional[str], session_id: Optional[str] - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: """ Deletes one or more encrypted E2E room keys for a user for backup purposes. @@ -267,7 +267,7 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.e2e_room_keys_handler = hs.get_e2e_room_keys_handler() - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: """ Retrieve the version information about the most current backup version (if any) @@ -293,7 +293,7 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: raise SynapseError(404, "No backup found", Codes.NOT_FOUND) return 200, info - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: """ Create a new backup version for this user's room_keys with the given info. The version is allocated by the server and returned to the user @@ -345,7 +345,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, version: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: """ Retrieve the version information about a given version of the user's room_keys backup. @@ -374,7 +374,7 @@ async def on_GET( async def on_DELETE( self, request: SynapseRequest, version: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: """ Delete the information about a given version of the user's room_keys backup. Doesn't delete the actual room data. @@ -391,7 +391,7 @@ async def on_DELETE( async def on_PUT( self, request: SynapseRequest, version: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: """ Update the information about a given version of the user's room_keys backup. diff --git a/synapse/rest/client/room_upgrade_rest_servlet.py b/synapse/rest/client/room_upgrade_rest_servlet.py index a9717781b02..1c87b86ecb5 100644 --- a/synapse/rest/client/room_upgrade_rest_servlet.py +++ b/synapse/rest/client/room_upgrade_rest_servlet.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.errors import Codes, ShadowBanError, SynapseError from synapse.api.room_versions import KNOWN_ROOM_VERSIONS @@ -73,7 +73,7 @@ def __init__(self, hs: "HomeServer"): async def on_POST( self, request: SynapseRequest, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self._auth.get_user_by_req(request) content = parse_json_object_from_request(request) diff --git a/synapse/rest/client/sendtodevice.py b/synapse/rest/client/sendtodevice.py index 2a675145609..597cb1fecc3 100644 --- a/synapse/rest/client/sendtodevice.py +++ b/synapse/rest/client/sendtodevice.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.http import servlet from synapse.http.server import HttpServer @@ -53,7 +53,7 @@ def __init__(self, hs: "HomeServer"): async def on_PUT( self, request: SynapseRequest, message_type: str, txn_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request, allow_guest=True) set_tag("txn_id", txn_id) return await self.txns.fetch_or_execute_request( @@ -70,7 +70,7 @@ async def _put( request: SynapseRequest, requester: Requester, message_type: str, - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: content = parse_json_object_from_request(request) assert_params_in_dict(content, ("messages",)) diff --git a/synapse/rest/client/sync.py b/synapse/rest/client/sync.py index 01868de60bf..9c03eecea40 100644 --- a/synapse/rest/client/sync.py +++ b/synapse/rest/client/sync.py @@ -21,7 +21,7 @@ import itertools import logging from collections import defaultdict -from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Mapping, Optional, Union import attr @@ -138,7 +138,7 @@ def __init__(self, hs: "HomeServer"): cfg=hs.config.ratelimiting.rc_presence_per_user, ) - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: # This will always be set by the time Twisted calls us. assert request.args is not None @@ -380,7 +380,7 @@ async def encode_response( return response @staticmethod - def encode_presence(events: List[UserPresenceState], time_now: int) -> JsonDict: + def encode_presence(events: list[UserPresenceState], time_now: int) -> JsonDict: return { "events": [ { @@ -398,7 +398,7 @@ def encode_presence(events: List[UserPresenceState], time_now: int) -> JsonDict: async def encode_joined( self, sync_config: SyncConfig, - rooms: List[JoinedSyncResult], + rooms: list[JoinedSyncResult], time_now: int, serialize_options: SerializeEventConfig, ) -> JsonDict: @@ -428,7 +428,7 @@ async def encode_joined( @trace_with_opname("sync.encode_invited") async def encode_invited( self, - rooms: List[InvitedSyncResult], + rooms: list[InvitedSyncResult], time_now: int, serialize_options: SerializeEventConfig, ) -> JsonDict: @@ -464,10 +464,10 @@ async def encode_invited( @trace_with_opname("sync.encode_knocked") async def encode_knocked( self, - rooms: List[KnockedSyncResult], + rooms: list[KnockedSyncResult], time_now: int, serialize_options: SerializeEventConfig, - ) -> Dict[str, Dict[str, Any]]: + ) -> dict[str, dict[str, Any]]: """ Encode the rooms we've knocked on in a sync result. @@ -517,7 +517,7 @@ async def encode_knocked( async def encode_archived( self, sync_config: SyncConfig, - rooms: List[ArchivedSyncResult], + rooms: list[ArchivedSyncResult], time_now: int, serialize_options: SerializeEventConfig, ) -> JsonDict: @@ -768,7 +768,7 @@ def __init__(self, hs: "HomeServer"): self.sliding_sync_handler = hs.get_sliding_sync_handler() self.event_serializer = hs.get_event_client_serializer() - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req_experimental_feature( request, allow_guest=True, feature=ExperimentalFeature.MSC3575 ) @@ -900,7 +900,7 @@ def encode_operation( async def encode_rooms( self, requester: Requester, - rooms: Dict[str, SlidingSyncResult.RoomResult], + rooms: dict[str, SlidingSyncResult.RoomResult], ) -> JsonDict: time_now = self.clock.time_msec() @@ -909,7 +909,7 @@ async def encode_rooms( requester=requester, ) - serialized_rooms: Dict[str, JsonDict] = {} + serialized_rooms: dict[str, JsonDict] = {} for room_id, room_result in rooms.items(): serialized_rooms[room_id] = { "notification_count": room_result.notification_count, diff --git a/synapse/rest/client/tags.py b/synapse/rest/client/tags.py index fb59efb11fd..5699ff35c78 100644 --- a/synapse/rest/client/tags.py +++ b/synapse/rest/client/tags.py @@ -21,7 +21,7 @@ import logging from http import HTTPStatus -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.errors import AuthError, Codes, SynapseError from synapse.http.server import HttpServer @@ -56,7 +56,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, user_id: str, room_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) if user_id != requester.user.to_string(): raise AuthError(403, "Cannot get tags for other users.") @@ -85,7 +85,7 @@ def __init__(self, hs: "HomeServer"): async def on_PUT( self, request: SynapseRequest, user_id: str, room_id: str, tag: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) if user_id != requester.user.to_string(): raise AuthError(403, "Cannot add tags for other users.") @@ -114,7 +114,7 @@ async def on_PUT( async def on_DELETE( self, request: SynapseRequest, user_id: str, room_id: str, tag: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) if user_id != requester.user.to_string(): raise AuthError(403, "Cannot add tags for other users.") diff --git a/synapse/rest/client/thirdparty.py b/synapse/rest/client/thirdparty.py index f972591ebfb..c17335eb48c 100644 --- a/synapse/rest/client/thirdparty.py +++ b/synapse/rest/client/thirdparty.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Dict, List, Tuple +from typing import TYPE_CHECKING from synapse.api.constants import ThirdPartyEntityKind from synapse.http.server import HttpServer @@ -45,7 +45,7 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.appservice_handler = hs.get_application_service_handler() - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await self.auth.get_user_by_req(request, allow_guest=True) protocols = await self.appservice_handler.get_3pe_protocols() @@ -63,7 +63,7 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, protocol: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await self.auth.get_user_by_req(request, allow_guest=True) protocols = await self.appservice_handler.get_3pe_protocols( @@ -86,10 +86,10 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, protocol: str - ) -> Tuple[int, List[JsonDict]]: + ) -> tuple[int, list[JsonDict]]: await self.auth.get_user_by_req(request, allow_guest=True) - fields: Dict[bytes, List[bytes]] = request.args # type: ignore[assignment] + fields: dict[bytes, list[bytes]] = request.args # type: ignore[assignment] fields.pop(b"access_token", None) results = await self.appservice_handler.query_3pe( @@ -110,10 +110,10 @@ def __init__(self, hs: "HomeServer"): async def on_GET( self, request: SynapseRequest, protocol: str - ) -> Tuple[int, List[JsonDict]]: + ) -> tuple[int, list[JsonDict]]: await self.auth.get_user_by_req(request, allow_guest=True) - fields: Dict[bytes, List[bytes]] = request.args # type: ignore[assignment] + fields: dict[bytes, list[bytes]] = request.args # type: ignore[assignment] fields.pop(b"access_token", None) results = await self.appservice_handler.query_3pe( diff --git a/synapse/rest/client/thread_subscriptions.py b/synapse/rest/client/thread_subscriptions.py index 039aba1721c..f879c7589c1 100644 --- a/synapse/rest/client/thread_subscriptions.py +++ b/synapse/rest/client/thread_subscriptions.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import TYPE_CHECKING, Dict, Optional, Tuple +from typing import TYPE_CHECKING, Optional import attr from typing_extensions import TypeAlias @@ -59,7 +59,7 @@ class PutBody(RequestBodyModel): async def on_GET( self, request: SynapseRequest, room_id: str, thread_root_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: RoomID.from_string(room_id) if not thread_root_id.startswith("$"): raise SynapseError( @@ -80,7 +80,7 @@ async def on_GET( async def on_PUT( self, request: SynapseRequest, room_id: str, thread_root_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: RoomID.from_string(room_id) if not thread_root_id.startswith("$"): raise SynapseError( @@ -101,7 +101,7 @@ async def on_PUT( async def on_DELETE( self, request: SynapseRequest, room_id: str, thread_root_id: str - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: RoomID.from_string(room_id) if not thread_root_id.startswith("$"): raise SynapseError( @@ -134,7 +134,7 @@ def __init__(self, hs: "HomeServer"): self.is_mine = hs.is_mine self.store = hs.get_datastores().main - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req(request) limit = min( @@ -204,8 +204,8 @@ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: ) ) - subscribed_threads: Dict[str, Dict[str, JsonDict]] = {} - unsubscribed_threads: Dict[str, Dict[str, JsonDict]] = {} + subscribed_threads: dict[str, dict[str, JsonDict]] = {} + unsubscribed_threads: dict[str, dict[str, JsonDict]] = {} for stream_id, room_id, thread_root_id, subscribed, automatic in subscriptions: if subscribed: subscribed_threads.setdefault(room_id, {})[thread_root_id] = ( diff --git a/synapse/rest/client/transactions.py b/synapse/rest/client/transactions.py index 571ba2fa623..4b3656a5971 100644 --- a/synapse/rest/client/transactions.py +++ b/synapse/rest/client/transactions.py @@ -23,7 +23,7 @@ to ensure idempotency when performing PUTs using the REST API.""" import logging -from typing import TYPE_CHECKING, Awaitable, Callable, Dict, Hashable, Tuple +from typing import TYPE_CHECKING, Awaitable, Callable, Hashable from typing_extensions import ParamSpec @@ -51,8 +51,8 @@ def __init__(self, hs: "HomeServer"): self.hs = hs self.clock = self.hs.get_clock() # $txn_key: (ObservableDeferred<(res_code, res_json_body)>, timestamp) - self.transactions: Dict[ - Hashable, Tuple[ObservableDeferred[Tuple[int, JsonDict]], int] + self.transactions: dict[ + Hashable, tuple[ObservableDeferred[tuple[int, JsonDict]], int] ] = {} # Try to clean entries every 30 mins. This means entries will exist # for at *LEAST* 30 mins, and at *MOST* 60 mins. @@ -103,10 +103,10 @@ def fetch_or_execute_request( self, request: IRequest, requester: Requester, - fn: Callable[P, Awaitable[Tuple[int, JsonDict]]], + fn: Callable[P, Awaitable[tuple[int, JsonDict]]], *args: P.args, **kwargs: P.kwargs, - ) -> "Deferred[Tuple[int, JsonDict]]": + ) -> "Deferred[tuple[int, JsonDict]]": """Fetches the response for this transaction, or executes the given function to produce a response for this transaction. diff --git a/synapse/rest/client/user_directory.py b/synapse/rest/client/user_directory.py index 94fcb11c0ca..0f561c2e615 100644 --- a/synapse/rest/client/user_directory.py +++ b/synapse/rest/client/user_directory.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.errors import SynapseError from synapse.http.server import HttpServer @@ -46,7 +46,7 @@ def __init__(self, hs: "HomeServer"): self.auth = hs.get_auth() self.user_directory_handler = hs.get_user_directory_handler() - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonMapping]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonMapping]: """Searches for users in directory Returns: diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py index 20395430d70..dee2cdb637b 100644 --- a/synapse/rest/client/versions.py +++ b/synapse/rest/client/versions.py @@ -23,7 +23,7 @@ import logging import re -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.api.constants import RoomCreationPreset from synapse.http.server import HttpServer @@ -62,7 +62,7 @@ def __init__(self, hs: "HomeServer"): in self.config.room.encryption_enabled_by_default_for_room_presets ) - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: msc3881_enabled = self.config.experimental.msc3881_enabled msc3575_enabled = self.config.experimental.msc3575_enabled diff --git a/synapse/rest/client/voip.py b/synapse/rest/client/voip.py index fbed3a3bae9..581829a790f 100644 --- a/synapse/rest/client/voip.py +++ b/synapse/rest/client/voip.py @@ -22,7 +22,7 @@ import base64 import hashlib import hmac -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.http.server import HttpServer from synapse.http.servlet import RestServlet @@ -43,7 +43,7 @@ def __init__(self, hs: "HomeServer"): self.hs = hs self.auth = hs.get_auth() - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: requester = await self.auth.get_user_by_req( request, self.hs.config.voip.turn_allow_guests ) diff --git a/synapse/rest/consent/consent_resource.py b/synapse/rest/consent/consent_resource.py index 3961f82894a..a1d2364bedb 100644 --- a/synapse/rest/consent/consent_resource.py +++ b/synapse/rest/consent/consent_resource.py @@ -23,7 +23,7 @@ from hashlib import sha256 from http import HTTPStatus from os import path -from typing import TYPE_CHECKING, Any, Dict, List +from typing import TYPE_CHECKING, Any import jinja2 from jinja2 import TemplateNotFound @@ -121,7 +121,7 @@ async def _async_render_GET(self, request: Request) -> None: has_consented = False public_version = username == "" if not public_version: - args: Dict[bytes, List[bytes]] = request.args # type: ignore + args: dict[bytes, list[bytes]] = request.args # type: ignore userhmac_bytes = parse_bytes_from_args(args, "h", required=True) self._check_hash(username, userhmac_bytes) @@ -154,7 +154,7 @@ async def _async_render_GET(self, request: Request) -> None: async def _async_render_POST(self, request: Request) -> None: version = parse_string(request, "v", required=True) username = parse_string(request, "u", required=True) - args: Dict[bytes, List[bytes]] = request.args # type: ignore + args: dict[bytes, list[bytes]] = request.args # type: ignore userhmac = parse_bytes_from_args(args, "h", required=True) self._check_hash(username, userhmac) diff --git a/synapse/rest/key/v2/local_key_resource.py b/synapse/rest/key/v2/local_key_resource.py index 608da25a6ce..f783acdb83c 100644 --- a/synapse/rest/key/v2/local_key_resource.py +++ b/synapse/rest/key/v2/local_key_resource.py @@ -21,7 +21,7 @@ import logging import re -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Optional from signedjson.sign import sign_json from unpaddedbase64 import encode_base64 @@ -108,7 +108,7 @@ def response_json_object(self) -> JsonDict: def on_GET( self, request: Request, key_id: Optional[str] = None - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: # Matrix 1.6 drops support for passing the key_id, this is incompatible # with earlier versions and is allowed in order to support both. # A warning is issued to help determine when it is safe to drop this. diff --git a/synapse/rest/key/v2/remote_key_resource.py b/synapse/rest/key/v2/remote_key_resource.py index 94c679b9e75..51cb0774969 100644 --- a/synapse/rest/key/v2/remote_key_resource.py +++ b/synapse/rest/key/v2/remote_key_resource.py @@ -21,7 +21,7 @@ import logging import re -from typing import TYPE_CHECKING, Dict, Mapping, Optional, Set, Tuple +from typing import TYPE_CHECKING, Mapping, Optional from signedjson.sign import sign_json @@ -113,7 +113,7 @@ class RemoteKey(RestServlet): CATEGORY = "Federation requests" class PostBody(RequestBodyModel): - server_keys: Dict[StrictStr, Dict[StrictStr, _KeyQueryCriteriaDataModel]] + server_keys: dict[StrictStr, dict[StrictStr, _KeyQueryCriteriaDataModel]] def __init__(self, hs: "HomeServer"): self.fetcher = ServerKeyFetcher(hs) @@ -144,7 +144,7 @@ def register(self, http_server: HttpServer) -> None: async def on_GET( self, request: Request, server: str, key_id: Optional[str] = None - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: if server and key_id: # Matrix 1.6 drops support for passing the key_id, this is incompatible # with earlier versions and is allowed in order to support both. @@ -168,7 +168,7 @@ async def on_GET( return 200, await self.query_keys(query, query_remote_on_cache_miss=True) - async def on_POST(self, request: Request) -> Tuple[int, JsonDict]: + async def on_POST(self, request: Request) -> tuple[int, JsonDict]: content = parse_and_validate_json_object_from_request(request, self.PostBody) query = content.server_keys @@ -177,12 +177,12 @@ async def on_POST(self, request: Request) -> Tuple[int, JsonDict]: async def query_keys( self, - query: Dict[str, Dict[str, _KeyQueryCriteriaDataModel]], + query: dict[str, dict[str, _KeyQueryCriteriaDataModel]], query_remote_on_cache_miss: bool = False, ) -> JsonDict: logger.info("Handling query for keys %r", query) - server_keys: Dict[Tuple[str, str], Optional[FetchKeyResultForRemote]] = {} + server_keys: dict[tuple[str, str], Optional[FetchKeyResultForRemote]] = {} for server_name, key_ids in query.items(): if key_ids: results: Mapping[ @@ -199,13 +199,13 @@ async def query_keys( ((server_name, key_id), res) for key_id, res in results.items() ) - json_results: Set[bytes] = set() + json_results: set[bytes] = set() time_now_ms = self.clock.time_msec() # Map server_name->key_id->int. Note that the value of the int is unused. # XXX: why don't we just use a set? - cache_misses: Dict[str, Dict[str, int]] = {} + cache_misses: dict[str, dict[str, int]] = {} for (server_name, key_id), key_result in server_keys.items(): if not query[server_name]: # all keys were requested. Just return what we have without worrying diff --git a/synapse/rest/media/upload_resource.py b/synapse/rest/media/upload_resource.py index 74d82805824..484749dbe6e 100644 --- a/synapse/rest/media/upload_resource.py +++ b/synapse/rest/media/upload_resource.py @@ -22,7 +22,7 @@ import logging import re -from typing import IO, TYPE_CHECKING, Dict, List, Optional, Tuple +from typing import IO, TYPE_CHECKING, Optional from synapse.api.errors import Codes, SynapseError from synapse.http.server import respond_with_json @@ -56,7 +56,7 @@ def __init__(self, hs: "HomeServer", media_repo: "MediaRepository"): async def _get_file_metadata( self, request: SynapseRequest, user_id: str - ) -> Tuple[int, Optional[str], str]: + ) -> tuple[int, Optional[str], str]: raw_content_length = request.getHeader("Content-Length") if raw_content_length is None: raise SynapseError(msg="Request must specify a Content-Length", code=400) @@ -78,7 +78,7 @@ async def _get_file_metadata( code=413, errcode=Codes.TOO_LARGE, ) - args: Dict[bytes, List[bytes]] = request.args # type: ignore + args: dict[bytes, list[bytes]] = request.args # type: ignore upload_name_bytes = parse_bytes_from_args(args, "filename") if upload_name_bytes: try: diff --git a/synapse/rest/synapse/client/federation_whitelist.py b/synapse/rest/synapse/client/federation_whitelist.py index f59daf8428a..0382fef1e2b 100644 --- a/synapse/rest/synapse/client/federation_whitelist.py +++ b/synapse/rest/synapse/client/federation_whitelist.py @@ -13,7 +13,7 @@ # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.http.server import DirectServeJsonResource from synapse.http.site import SynapseRequest @@ -50,7 +50,7 @@ def __init__(self, hs: "HomeServer"): self._auth = hs.get_auth() - async def _async_render_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def _async_render_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await self._auth.get_user_by_req(request) whitelist = [] diff --git a/synapse/rest/synapse/client/jwks.py b/synapse/rest/synapse/client/jwks.py index e9a7c24e3b5..15ff6f47c19 100644 --- a/synapse/rest/synapse/client/jwks.py +++ b/synapse/rest/synapse/client/jwks.py @@ -19,7 +19,7 @@ # # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from synapse.http.server import DirectServeJsonResource from synapse.http.site import SynapseRequest @@ -73,5 +73,5 @@ def __init__(self, hs: "HomeServer"): "keys": keys, } - async def _async_render_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def _async_render_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: return 200, self.res diff --git a/synapse/rest/synapse/client/password_reset.py b/synapse/rest/synapse/client/password_reset.py index 377578ef8a9..1ccdf23da8f 100644 --- a/synapse/rest/synapse/client/password_reset.py +++ b/synapse/rest/synapse/client/password_reset.py @@ -19,7 +19,7 @@ # # import logging -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from twisted.web.server import Request @@ -65,7 +65,7 @@ def __init__(self, hs: "HomeServer"): # This resource should only be mounted if email validation is enabled assert hs.config.email.can_verify_email - async def _async_render_GET(self, request: Request) -> Tuple[int, bytes]: + async def _async_render_GET(self, request: Request) -> tuple[int, bytes]: sid = parse_string(request, "sid", required=True) token = parse_string(request, "token", required=True) client_secret = parse_string(request, "client_secret", required=True) @@ -83,7 +83,7 @@ async def _async_render_GET(self, request: Request) -> Tuple[int, bytes]: self._confirmation_email_template.render(**template_vars).encode("utf-8"), ) - async def _async_render_POST(self, request: Request) -> Tuple[int, bytes]: + async def _async_render_POST(self, request: Request) -> tuple[int, bytes]: sid = parse_string(request, "sid", required=True) token = parse_string(request, "token", required=True) client_secret = parse_string(request, "client_secret", required=True) diff --git a/synapse/rest/synapse/client/pick_username.py b/synapse/rest/synapse/client/pick_username.py index 1727bb63b7b..867ea1866d9 100644 --- a/synapse/rest/synapse/client/pick_username.py +++ b/synapse/rest/synapse/client/pick_username.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Generator, List, Tuple +from typing import TYPE_CHECKING, Generator from twisted.web.resource import Resource from twisted.web.server import Request @@ -65,7 +65,7 @@ def __init__(self, hs: "HomeServer"): super().__init__(clock=hs.get_clock()) self._sso_handler = hs.get_sso_handler() - async def _async_render_GET(self, request: Request) -> Tuple[int, JsonDict]: + async def _async_render_GET(self, request: Request) -> tuple[int, JsonDict]: localpart = parse_string(request, "username", required=True) session_id = get_username_mapping_session_cookie_from_request(request) @@ -138,7 +138,7 @@ async def _async_render_POST(self, request: SynapseRequest) -> None: use_avatar = parse_boolean(request, "use_avatar", default=False) try: - emails_to_use: List[str] = [ + emails_to_use: list[str] = [ val.decode("utf-8") for val in request.args.get(b"use_email", []) ] except ValueError: diff --git a/synapse/rest/synapse/client/rendezvous.py b/synapse/rest/synapse/client/rendezvous.py index 5278c355721..24c10dee822 100644 --- a/synapse/rest/synapse/client/rendezvous.py +++ b/synapse/rest/synapse/client/rendezvous.py @@ -14,7 +14,7 @@ # import logging -from typing import TYPE_CHECKING, List +from typing import TYPE_CHECKING from synapse.api.errors import UnrecognizedRequestError from synapse.http.server import DirectServeJsonResource @@ -34,7 +34,7 @@ def __init__(self, hs: "HomeServer") -> None: self._handler = hs.get_rendezvous_handler() async def _async_render_GET(self, request: SynapseRequest) -> None: - postpath: List[bytes] = request.postpath # type: ignore + postpath: list[bytes] = request.postpath # type: ignore if len(postpath) != 1: raise UnrecognizedRequestError() session_id = postpath[0].decode("ascii") @@ -42,7 +42,7 @@ async def _async_render_GET(self, request: SynapseRequest) -> None: self._handler.handle_get(request, session_id) def _async_render_PUT(self, request: SynapseRequest) -> None: - postpath: List[bytes] = request.postpath # type: ignore + postpath: list[bytes] = request.postpath # type: ignore if len(postpath) != 1: raise UnrecognizedRequestError() session_id = postpath[0].decode("ascii") @@ -50,7 +50,7 @@ def _async_render_PUT(self, request: SynapseRequest) -> None: self._handler.handle_put(request, session_id) def _async_render_DELETE(self, request: SynapseRequest) -> None: - postpath: List[bytes] = request.postpath # type: ignore + postpath: list[bytes] = request.postpath # type: ignore if len(postpath) != 1: raise UnrecognizedRequestError() session_id = postpath[0].decode("ascii") diff --git a/synapse/rest/synapse/client/saml2/metadata_resource.py b/synapse/rest/synapse/client/saml2/metadata_resource.py index bcd5195108e..e7ed96174f8 100644 --- a/synapse/rest/synapse/client/saml2/metadata_resource.py +++ b/synapse/rest/synapse/client/saml2/metadata_resource.py @@ -20,11 +20,11 @@ from typing import TYPE_CHECKING -import saml2.metadata - from twisted.web.resource import Resource from twisted.web.server import Request +import saml2.metadata + if TYPE_CHECKING: from synapse.server import HomeServer diff --git a/synapse/rest/synapse/mas/devices.py b/synapse/rest/synapse/mas/devices.py index 6cc11535906..654fed8c038 100644 --- a/synapse/rest/synapse/mas/devices.py +++ b/synapse/rest/synapse/mas/devices.py @@ -15,7 +15,7 @@ import logging from http import HTTPStatus -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Optional from synapse._pydantic_compat import StrictStr from synapse.api.errors import NotFoundError @@ -56,7 +56,7 @@ class PostBody(RequestBodyModel): async def _async_render_POST( self, request: "SynapseRequest" - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: self.assert_request_is_from_mas(request) body = parse_and_validate_json_object_from_request(request, self.PostBody) @@ -97,7 +97,7 @@ class PostBody(RequestBodyModel): async def _async_render_POST( self, request: "SynapseRequest" - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: self.assert_request_is_from_mas(request) body = parse_and_validate_json_object_from_request(request, self.PostBody) @@ -138,7 +138,7 @@ class PostBody(RequestBodyModel): async def _async_render_POST( self, request: "SynapseRequest" - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: self.assert_request_is_from_mas(request) body = parse_and_validate_json_object_from_request(request, self.PostBody) @@ -180,7 +180,7 @@ class PostBody(RequestBodyModel): async def _async_render_POST( self, request: "SynapseRequest" - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: self.assert_request_is_from_mas(request) body = parse_and_validate_json_object_from_request(request, self.PostBody) diff --git a/synapse/rest/synapse/mas/users.py b/synapse/rest/synapse/mas/users.py index 09aa13bebbc..a8028872700 100644 --- a/synapse/rest/synapse/mas/users.py +++ b/synapse/rest/synapse/mas/users.py @@ -15,7 +15,7 @@ import logging from http import HTTPStatus -from typing import TYPE_CHECKING, Any, Optional, Tuple, TypedDict +from typing import TYPE_CHECKING, Any, Optional, TypedDict from synapse._pydantic_compat import StrictBool, StrictStr, root_validator from synapse.api.errors import NotFoundError, SynapseError @@ -58,7 +58,7 @@ class Response(TypedDict): async def _async_render_GET( self, request: "SynapseRequest" - ) -> Tuple[int, Response]: + ) -> tuple[int, Response]: self.assert_request_is_from_mas(request) localpart = parse_string(request, "localpart", required=True) @@ -128,7 +128,7 @@ def validate_exclusive(cls, values: Any) -> Any: async def _async_render_POST( self, request: "SynapseRequest" - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: self.assert_request_is_from_mas(request) body = parse_and_validate_json_object_from_request(request, self.PostBody) @@ -239,7 +239,7 @@ def __init__(self, hs: "HomeServer"): async def _async_render_GET( self, request: "SynapseRequest" - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: self.assert_request_is_from_mas(request) localpart = parse_string(request, "localpart") if localpart is None: @@ -272,7 +272,7 @@ class PostBody(RequestBodyModel): async def _async_render_POST( self, request: "SynapseRequest" - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: self.assert_request_is_from_mas(request) body = parse_and_validate_json_object_from_request(request, self.PostBody) @@ -312,7 +312,7 @@ class PostBody(RequestBodyModel): async def _async_render_POST( self, request: "SynapseRequest" - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: self.assert_request_is_from_mas(request) body = parse_and_validate_json_object_from_request(request, self.PostBody) @@ -350,7 +350,7 @@ class PostBody(RequestBodyModel): async def _async_render_POST( self, request: "SynapseRequest" - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: self.assert_request_is_from_mas(request) body = parse_and_validate_json_object_from_request(request, self.PostBody) @@ -394,7 +394,7 @@ class PostBody(RequestBodyModel): async def _async_render_POST( self, request: "SynapseRequest" - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: self.assert_request_is_from_mas(request) body = parse_and_validate_json_object_from_request(request, self.PostBody) @@ -440,7 +440,7 @@ class PostBody(RequestBodyModel): async def _async_render_POST( self, request: "SynapseRequest" - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: self.assert_request_is_from_mas(request) body = parse_and_validate_json_object_from_request(request, self.PostBody) diff --git a/synapse/rest/well_known.py b/synapse/rest/well_known.py index ae8c6a8fc0d..00965cfb82f 100644 --- a/synapse/rest/well_known.py +++ b/synapse/rest/well_known.py @@ -18,7 +18,7 @@ # # import logging -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Optional from twisted.web.resource import Resource from twisted.web.server import Request @@ -97,7 +97,7 @@ def __init__(self, hs: "HomeServer"): super().__init__(clock=hs.get_clock()) self._well_known_builder = WellKnownBuilder(hs) - async def _async_render_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def _async_render_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: r = await self._well_known_builder.get_well_known() if not r: raise NotFoundError(".well-known not available") diff --git a/synapse/server.py b/synapse/server.py index 1316249dda1..4b4ed2928db 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -34,11 +34,7 @@ Any, Awaitable, Callable, - Dict, - List, Optional, - Tuple, - Type, TypeVar, cast, ) @@ -277,7 +273,7 @@ class ShutdownInfo: func: Callable[..., Any] trigger_id: _SystemEventID - kwargs: Dict[str, object] + kwargs: dict[str, object] class HomeServer(metaclass=abc.ABCMeta): @@ -312,7 +308,7 @@ class HomeServer(metaclass=abc.ABCMeta): @property @abc.abstractmethod - def DATASTORE_CLASS(self) -> Type["SQLBaseStore"]: + def DATASTORE_CLASS(self) -> type["SQLBaseStore"]: # This is overridden in derived application classes # (such as synapse.app.homeserver.SynapseHomeServer) and gives the class to be # instantiated during setup() for future return by get_datastores() @@ -340,8 +336,8 @@ def __init__( # the key we use to sign events and requests self.signing_key = config.key.signing_key[0] self.config = config - self._listening_services: List[Port] = [] - self._metrics_listeners: List[Tuple[WSGIServer, Thread]] = [] + self._listening_services: list[Port] = [] + self._metrics_listeners: list[tuple[WSGIServer, Thread]] = [] self.start_time: Optional[int] = None self._instance_id = random_string(5) @@ -351,15 +347,15 @@ def __init__( self.datastores: Optional[Databases] = None - self._module_web_resources: Dict[str, Resource] = {} + self._module_web_resources: dict[str, Resource] = {} self._module_web_resources_consumed = False # This attribute is set by the free function `refresh_certificate`. self.tls_server_context_factory: Optional[IOpenSSLContextFactory] = None self._is_shutdown = False - self._async_shutdown_handlers: List[ShutdownInfo] = [] - self._sync_shutdown_handlers: List[ShutdownInfo] = [] + self._async_shutdown_handlers: list[ShutdownInfo] = [] + self._sync_shutdown_handlers: list[ShutdownInfo] = [] self._background_processes: set[defer.Deferred[Optional[Any]]] = set() def run_as_background_process( @@ -1105,7 +1101,7 @@ def get_replication_data_handler(self) -> ReplicationDataHandler: return ReplicationDataHandler(self) @cache_in_self - def get_replication_streams(self) -> Dict[str, Stream]: + def get_replication_streams(self) -> dict[str, Stream]: return {stream.NAME: stream(self) for stream in STREAMS_MAP.values()} @cache_in_self diff --git a/synapse/server_notices/consent_server_notices.py b/synapse/server_notices/consent_server_notices.py index d937a3034e9..99b362f5ff9 100644 --- a/synapse/server_notices/consent_server_notices.py +++ b/synapse/server_notices/consent_server_notices.py @@ -18,7 +18,7 @@ # # import logging -from typing import TYPE_CHECKING, Any, Set +from typing import TYPE_CHECKING, Any from synapse.api.errors import SynapseError from synapse.api.urls import ConsentURIBuilder @@ -40,7 +40,7 @@ def __init__(self, hs: "HomeServer"): self._server_notices_manager = hs.get_server_notices_manager() self._store = hs.get_datastores().main - self._users_in_progress: Set[str] = set() + self._users_in_progress: set[str] = set() self._current_consent_version = hs.config.consent.user_consent_version self._server_notice_content = ( diff --git a/synapse/server_notices/resource_limits_server_notices.py b/synapse/server_notices/resource_limits_server_notices.py index e88e8c9b453..493b8cb62b7 100644 --- a/synapse/server_notices/resource_limits_server_notices.py +++ b/synapse/server_notices/resource_limits_server_notices.py @@ -18,7 +18,7 @@ # # import logging -from typing import TYPE_CHECKING, List, Tuple +from typing import TYPE_CHECKING from synapse.api.constants import ( EventTypes, @@ -127,7 +127,7 @@ async def maybe_send_server_notice_to_user(self, user_id: str) -> None: logger.error("Error sending resource limits server notice: %s", e) async def _remove_limit_block_notification( - self, user_id: str, ref_events: List[str] + self, user_id: str, ref_events: list[str] ) -> None: """Utility method to remove limit block notifications from the server notices room. @@ -170,7 +170,7 @@ async def _apply_limit_block_notification( user_id, content, EventTypes.Pinned, "" ) - async def _is_room_currently_blocked(self, room_id: str) -> Tuple[bool, List[str]]: + async def _is_room_currently_blocked(self, room_id: str) -> tuple[bool, list[str]]: """ Determines if the room is currently blocked @@ -198,7 +198,7 @@ async def _is_room_currently_blocked(self, room_id: str) -> Tuple[bool, List[str # The user has yet to join the server notices room pass - referenced_events: List[str] = [] + referenced_events: list[str] = [] if pinned_state_event is not None: referenced_events = list(pinned_state_event.content.get("pinned", [])) diff --git a/synapse/state/__init__.py b/synapse/state/__init__.py index 394dc72fa69..5b861f55342 100644 --- a/synapse/state/__init__.py +++ b/synapse/state/__init__.py @@ -27,14 +27,9 @@ Awaitable, Callable, DefaultDict, - Dict, - FrozenSet, - List, Mapping, Optional, Sequence, - Set, - Tuple, ) import attr @@ -246,7 +241,7 @@ async def compute_state_after_events( async def get_current_user_ids_in_room( self, room_id: str, latest_event_ids: StrCollection - ) -> Set[str]: + ) -> set[str]: """ Get the users IDs who are currently in a room. @@ -271,7 +266,7 @@ async def get_current_user_ids_in_room( async def get_hosts_in_room_at_events( self, room_id: str, event_ids: StrCollection - ) -> FrozenSet[str]: + ) -> frozenset[str]: """Get the hosts that were in a room at the given event ids Args: @@ -647,7 +642,7 @@ def __init__(self, hs: "HomeServer"): ) # dict of set of event_ids -> _StateCacheEntry. - self._state_cache: ExpiringCache[FrozenSet[int], _StateCacheEntry] = ( + self._state_cache: ExpiringCache[frozenset[int], _StateCacheEntry] = ( ExpiringCache( cache_name="state_cache", server_name=self.server_name, @@ -676,7 +671,7 @@ async def resolve_state_groups( room_id: str, room_version: str, state_groups_ids: Mapping[int, StateMap[str]], - event_map: Optional[Dict[str, EventBase]], + event_map: Optional[dict[str, EventBase]], state_res_store: "StateResolutionStore", ) -> _StateCacheEntry: """Resolves conflicts between a set of state groups @@ -776,7 +771,7 @@ async def resolve_events_with_store( room_id: str, room_version: str, state_sets: Sequence[StateMap[str]], - event_map: Optional[Dict[str, EventBase]], + event_map: Optional[dict[str, EventBase]], state_res_store: "StateResolutionStore", ) -> StateMap[str]: """ @@ -884,7 +879,7 @@ def _report_biggest( items = self._state_res_metrics.items() # log the N biggest rooms - biggest: List[Tuple[str, _StateResMetrics]] = heapq.nlargest( + biggest: list[tuple[str, _StateResMetrics]] = heapq.nlargest( n_to_log, items, key=lambda i: extract_key(i[1]) ) metrics_logger.debug( @@ -975,7 +970,7 @@ class StateResolutionStore: def get_events( self, event_ids: StrCollection, allow_rejected: bool = False - ) -> Awaitable[Dict[str, EventBase]]: + ) -> Awaitable[dict[str, EventBase]]: """Get events from the database Args: @@ -996,9 +991,9 @@ def get_events( def get_auth_chain_difference( self, room_id: str, - state_sets: List[Set[str]], - conflicted_state: Optional[Set[str]], - additional_backwards_reachable_conflicted_events: Optional[Set[str]], + state_sets: list[set[str]], + conflicted_state: Optional[set[str]], + additional_backwards_reachable_conflicted_events: Optional[set[str]], ) -> Awaitable[StateDifference]: """ "Given sets of state events figure out the auth chain difference (as per state res v2 algorithm). diff --git a/synapse/state/v1.py b/synapse/state/v1.py index a2e9eb0a42a..a2193472645 100644 --- a/synapse/state/v1.py +++ b/synapse/state/v1.py @@ -23,13 +23,9 @@ from typing import ( Awaitable, Callable, - Dict, Iterable, - List, Optional, Sequence, - Set, - Tuple, ) from synapse import event_auth @@ -49,8 +45,8 @@ async def resolve_events_with_store( room_id: str, room_version: RoomVersion, state_sets: Sequence[StateMap[str]], - event_map: Optional[Dict[str, EventBase]], - state_map_factory: Callable[[StrCollection], Awaitable[Dict[str, EventBase]]], + event_map: Optional[dict[str, EventBase]], + state_map_factory: Callable[[StrCollection], Awaitable[dict[str, EventBase]]], ) -> StateMap[str]: """ Args: @@ -145,7 +141,7 @@ async def resolve_events_with_store( def _seperate( state_sets: Iterable[StateMap[str]], -) -> Tuple[MutableStateMap[str], MutableStateMap[Set[str]]]: +) -> tuple[MutableStateMap[str], MutableStateMap[set[str]]]: """Takes the state_sets and figures out which keys are conflicted and which aren't. i.e., which have multiple different event_ids associated with them in different state sets. @@ -166,7 +162,7 @@ def _seperate( """ state_set_iterator = iter(state_sets) unconflicted_state = dict(next(state_set_iterator)) - conflicted_state: MutableStateMap[Set[str]] = {} + conflicted_state: MutableStateMap[set[str]] = {} for state_set in state_set_iterator: for key, value in state_set.items(): @@ -196,8 +192,8 @@ def _seperate( def _create_auth_events_from_maps( room_version: RoomVersion, unconflicted_state: StateMap[str], - conflicted_state: StateMap[Set[str]], - state_map: Dict[str, EventBase], + conflicted_state: StateMap[set[str]], + state_map: dict[str, EventBase], ) -> StateMap[str]: """ @@ -228,9 +224,9 @@ def _create_auth_events_from_maps( def _resolve_with_state( room_version: RoomVersion, unconflicted_state_ids: MutableStateMap[str], - conflicted_state_ids: StateMap[Set[str]], + conflicted_state_ids: StateMap[set[str]], auth_event_ids: StateMap[str], - state_map: Dict[str, EventBase], + state_map: dict[str, EventBase], ) -> MutableStateMap[str]: conflicted_state = {} for key, event_ids in conflicted_state_ids.items(): @@ -263,7 +259,7 @@ def _resolve_with_state( def _resolve_state_events( room_version: RoomVersion, - conflicted_state: StateMap[List[EventBase]], + conflicted_state: StateMap[list[EventBase]], auth_events: MutableStateMap[EventBase], ) -> StateMap[EventBase]: """This is where we actually decide which of the conflicted state to @@ -312,7 +308,7 @@ def _resolve_state_events( def _resolve_auth_events( - room_version: RoomVersion, events: List[EventBase], auth_events: StateMap[EventBase] + room_version: RoomVersion, events: list[EventBase], auth_events: StateMap[EventBase] ) -> EventBase: reverse = list(reversed(_ordered_events(events))) @@ -347,7 +343,7 @@ def _resolve_auth_events( def _resolve_normal_events( - events: List[EventBase], auth_events: StateMap[EventBase] + events: list[EventBase], auth_events: StateMap[EventBase] ) -> EventBase: for event in _ordered_events(events): try: @@ -365,8 +361,8 @@ def _resolve_normal_events( return event -def _ordered_events(events: Iterable[EventBase]) -> List[EventBase]: - def key_func(e: EventBase) -> Tuple[int, str]: +def _ordered_events(events: Iterable[EventBase]) -> list[EventBase]: + def key_func(e: EventBase) -> tuple[int, str]: # we have to use utf-8 rather than ascii here because it turns out we allow # people to send us events with non-ascii event IDs :/ return -int(e.depth), hashlib.sha1(e.event_id.encode("utf-8")).hexdigest() diff --git a/synapse/state/v2.py b/synapse/state/v2.py index 8bf67064342..683f0c1dcc2 100644 --- a/synapse/state/v2.py +++ b/synapse/state/v2.py @@ -25,16 +25,12 @@ Any, Awaitable, Callable, - Dict, Generator, Iterable, - List, Literal, Optional, Protocol, Sequence, - Set, - Tuple, overload, ) @@ -61,13 +57,13 @@ class StateResolutionStore(Protocol): # TestStateResolutionStore in tests. def get_events( self, event_ids: StrCollection, allow_rejected: bool = False - ) -> Awaitable[Dict[str, EventBase]]: ... + ) -> Awaitable[dict[str, EventBase]]: ... def get_auth_chain_difference( self, room_id: str, - state_sets: List[Set[str]], - conflicted_state: Optional[Set[str]], + state_sets: list[set[str]], + conflicted_state: Optional[set[str]], additional_backwards_reachable_conflicted_events: Optional[set[str]], ) -> Awaitable[StateDifference]: ... @@ -88,7 +84,7 @@ async def resolve_events_with_store( room_id: str, room_version: RoomVersion, state_sets: Sequence[StateMap[str]], - event_map: Optional[Dict[str, EventBase]], + event_map: Optional[dict[str, EventBase]], state_res_store: StateResolutionStore, ) -> StateMap[str]: """Resolves the state using the v2 state resolution algorithm @@ -128,7 +124,7 @@ async def resolve_events_with_store( logger.debug("%d conflicted state entries", len(conflicted_state)) logger.debug("Calculating auth chain difference") - conflicted_set: Optional[Set[str]] = None + conflicted_set: Optional[set[str]] = None if room_version.state_res == StateResolutionVersions.V2_1: # calculate the conflicted subgraph conflicted_set = set(itertools.chain.from_iterable(conflicted_state.values())) @@ -242,7 +238,7 @@ async def resolve_events_with_store( async def _get_power_level_for_sender( room_id: str, event_id: str, - event_map: Dict[str, EventBase], + event_map: dict[str, EventBase], state_res_store: StateResolutionStore, ) -> int: """Return the power level of the sender of the given event according to @@ -315,10 +311,10 @@ async def _get_power_level_for_sender( async def _get_auth_chain_difference( room_id: str, state_sets: Sequence[StateMap[str]], - unpersisted_events: Dict[str, EventBase], + unpersisted_events: dict[str, EventBase], state_res_store: StateResolutionStore, - conflicted_state: Optional[Set[str]], -) -> Set[str]: + conflicted_state: Optional[set[str]], +) -> set[str]: """Compare the auth chains of each state set and return the set of events that only appear in some, but not all of the auth chains. @@ -356,10 +352,10 @@ async def _get_auth_chain_difference( # event IDs if they appear in the `unpersisted_events`. This is the intersection of # the event's auth chain with the events in `unpersisted_events` *plus* their # auth event IDs. - events_to_auth_chain: Dict[str, Set[str]] = {} + events_to_auth_chain: dict[str, set[str]] = {} # remember the forward links when doing the graph traversal, we'll need it for v2.1 checks # This is a map from an event to the set of events that contain it as an auth event. - event_to_next_event: Dict[str, Set[str]] = {} + event_to_next_event: dict[str, set[str]] = {} for event in unpersisted_events.values(): chain = {event.event_id} events_to_auth_chain[event.event_id] = chain @@ -379,8 +375,8 @@ async def _get_auth_chain_difference( # # Note: If there are no `unpersisted_events` (which is the common case), we can do a # much simpler calculation. - additional_backwards_reachable_conflicted_events: Set[str] = set() - unpersisted_conflicted_events: Set[str] = set() + additional_backwards_reachable_conflicted_events: set[str] = set() + unpersisted_conflicted_events: set[str] = set() if unpersisted_events: # The list of state sets to pass to the store, where each state set is a set # of the event ids making up the state. This is similar to `state_sets`, @@ -388,17 +384,17 @@ async def _get_auth_chain_difference( # ((type, state_key)->event_id) mappings; and (b) we have stripped out # unpersisted events and replaced them with the persisted events in # their auth chain. - state_sets_ids: List[Set[str]] = [] + state_sets_ids: list[set[str]] = [] # For each state set, the unpersisted event IDs reachable (by their auth # chain) from the events in that set. - unpersisted_set_ids: List[Set[str]] = [] + unpersisted_set_ids: list[set[str]] = [] for state_set in state_sets: - set_ids: Set[str] = set() + set_ids: set[str] = set() state_sets_ids.append(set_ids) - unpersisted_ids: Set[str] = set() + unpersisted_ids: set[str] = set() unpersisted_set_ids.append(unpersisted_ids) for event_id in state_set.values(): @@ -479,7 +475,7 @@ async def _get_auth_chain_difference( # but NOT the backwards conflicted set. This mirrors what the DB layer does but in reverse: # we supplied events which are backwards reachable to the DB and now the DB is providing # forwards reachable events from the DB. - forwards_conflicted_set: Set[str] = set() + forwards_conflicted_set: set[str] = set() # we include unpersisted conflicted events here to process exclusive unpersisted subgraphs search_queue = subgraph_frontier.union(unpersisted_conflicted_events) while search_queue: @@ -490,7 +486,7 @@ async def _get_auth_chain_difference( # we've already calculated the backwards form as this is the auth chain for each # unpersisted conflicted event. - backwards_conflicted_set: Set[str] = set() + backwards_conflicted_set: set[str] = set() for uce in unpersisted_conflicted_events: backwards_conflicted_set.update(events_to_auth_chain.get(uce, [])) @@ -526,7 +522,7 @@ async def _get_auth_chain_difference( def _seperate( state_sets: Iterable[StateMap[str]], -) -> Tuple[StateMap[str], StateMap[Set[str]]]: +) -> tuple[StateMap[str], StateMap[set[str]]]: """Return the unconflicted and conflicted state. This is different than in the original algorithm, as this defines a key to be conflicted if one of the state sets doesn't have that key. @@ -550,7 +546,7 @@ def _seperate( conflicted_state[key] = event_ids # mypy doesn't understand that discarding None above means that conflicted - # state is StateMap[Set[str]], not StateMap[Set[Optional[Str]]]. + # state is StateMap[set[str]], not StateMap[set[Optional[Str]]]. return unconflicted_state, conflicted_state # type: ignore[return-value] @@ -579,12 +575,12 @@ def _is_power_event(event: EventBase) -> bool: async def _add_event_and_auth_chain_to_graph( - graph: Dict[str, Set[str]], + graph: dict[str, set[str]], room_id: str, event_id: str, - event_map: Dict[str, EventBase], + event_map: dict[str, EventBase], state_res_store: StateResolutionStore, - full_conflicted_set: Set[str], + full_conflicted_set: set[str], ) -> None: """Helper function for _reverse_topological_power_sort that add the event and its auth chain (that is in the auth diff) to the graph @@ -616,10 +612,10 @@ async def _reverse_topological_power_sort( clock: Clock, room_id: str, event_ids: Iterable[str], - event_map: Dict[str, EventBase], + event_map: dict[str, EventBase], state_res_store: StateResolutionStore, - full_conflicted_set: Set[str], -) -> List[str]: + full_conflicted_set: set[str], +) -> list[str]: """Returns a list of the event_ids sorted by reverse topological ordering, and then by power level and origin_server_ts @@ -635,7 +631,7 @@ async def _reverse_topological_power_sort( The sorted list """ - graph: Dict[str, Set[str]] = {} + graph: dict[str, set[str]] = {} for idx, event_id in enumerate(event_ids, start=1): await _add_event_and_auth_chain_to_graph( graph, room_id, event_id, event_map, state_res_store, full_conflicted_set @@ -658,7 +654,7 @@ async def _reverse_topological_power_sort( if idx % _AWAIT_AFTER_ITERATIONS == 0: await clock.sleep(0) - def _get_power_order(event_id: str) -> Tuple[int, int, str]: + def _get_power_order(event_id: str) -> tuple[int, int, str]: ev = event_map[event_id] pl = event_to_pl[event_id] @@ -675,9 +671,9 @@ async def _iterative_auth_checks( clock: Clock, room_id: str, room_version: RoomVersion, - event_ids: List[str], + event_ids: list[str], base_state: StateMap[str], - event_map: Dict[str, EventBase], + event_map: dict[str, EventBase], state_res_store: StateResolutionStore, ) -> MutableStateMap[str]: """Sequentially apply auth checks to each event in given list, updating the @@ -758,11 +754,11 @@ async def _iterative_auth_checks( async def _mainline_sort( clock: Clock, room_id: str, - event_ids: List[str], + event_ids: list[str], resolved_power_event_id: Optional[str], - event_map: Dict[str, EventBase], + event_map: dict[str, EventBase], state_res_store: StateResolutionStore, -) -> List[str]: +) -> list[str]: """Returns a sorted list of event_ids sorted by mainline ordering based on the given event resolved_power_event_id @@ -829,8 +825,8 @@ async def _mainline_sort( async def _get_mainline_depth_for_event( clock: Clock, event: EventBase, - mainline_map: Dict[str, int], - event_map: Dict[str, EventBase], + mainline_map: dict[str, int], + event_map: dict[str, EventBase], state_res_store: StateResolutionStore, ) -> int: """Get the mainline depths for the given event based on the mainline map @@ -880,7 +876,7 @@ async def _get_mainline_depth_for_event( async def _get_event( room_id: str, event_id: str, - event_map: Dict[str, EventBase], + event_map: dict[str, EventBase], state_res_store: StateResolutionStore, allow_none: Literal[False] = False, ) -> EventBase: ... @@ -890,7 +886,7 @@ async def _get_event( async def _get_event( room_id: str, event_id: str, - event_map: Dict[str, EventBase], + event_map: dict[str, EventBase], state_res_store: StateResolutionStore, allow_none: Literal[True], ) -> Optional[EventBase]: ... @@ -899,7 +895,7 @@ async def _get_event( async def _get_event( room_id: str, event_id: str, - event_map: Dict[str, EventBase], + event_map: dict[str, EventBase], state_res_store: StateResolutionStore, allow_none: bool = False, ) -> Optional[EventBase]: @@ -936,7 +932,7 @@ async def _get_event( def lexicographical_topological_sort( - graph: Dict[str, Set[str]], key: Callable[[str], Any] + graph: dict[str, set[str]], key: Callable[[str], Any] ) -> Generator[str, None, None]: """Performs a lexicographic reverse topological sort on the graph. @@ -960,7 +956,7 @@ def lexicographical_topological_sort( # outgoing edges, c.f. # https://en.wikipedia.org/wiki/Topological_sorting#Kahn's_algorithm outdegree_map = graph - reverse_graph: Dict[str, Set[str]] = {} + reverse_graph: dict[str, set[str]] = {} # Lists of nodes with zero out degree. Is actually a tuple of # `(key(node), node)` so that sorting does the right thing diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 1fddcc0799a..b6958ef06ba 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -21,7 +21,7 @@ # import logging from abc import ABCMeta -from typing import TYPE_CHECKING, Any, Collection, Dict, Iterable, Optional, Union +from typing import TYPE_CHECKING, Any, Collection, Iterable, Optional, Union from synapse.storage.database import ( DatabasePool, @@ -60,7 +60,7 @@ def __init__( self.database_engine = database.engine self.db_pool = database - self.external_cached_functions: Dict[str, CachedFunction] = {} + self.external_cached_functions: dict[str, CachedFunction] = {} def process_replication_rows( # noqa: B027 (no-op by design) self, diff --git a/synapse/storage/background_updates.py b/synapse/storage/background_updates.py index e3e793d5f59..ce213050a96 100644 --- a/synapse/storage/background_updates.py +++ b/synapse/storage/background_updates.py @@ -28,13 +28,9 @@ AsyncContextManager, Awaitable, Callable, - Dict, Iterable, - List, Optional, Sequence, - Tuple, - Type, cast, ) @@ -96,7 +92,7 @@ class ForeignKeyConstraint(Constraint): """ referenced_table: str - columns: Sequence[Tuple[str, str]] + columns: Sequence[tuple[str, str]] deferred: bool def make_check_clause(self, table: str) -> str: @@ -173,7 +169,7 @@ async def __aenter__(self) -> int: async def __aexit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc: Optional[BaseException], tb: Optional[TracebackType], ) -> None: @@ -260,8 +256,8 @@ def __init__(self, hs: "HomeServer", database: "DatabasePool"): self._default_batch_size_callback: Optional[DEFAULT_BATCH_SIZE_CALLBACK] = None self._min_batch_size_callback: Optional[MIN_BATCH_SIZE_CALLBACK] = None - self._background_update_performance: Dict[str, BackgroundUpdatePerformance] = {} - self._background_update_handlers: Dict[str, _BackgroundUpdateHandler] = {} + self._background_update_performance: dict[str, BackgroundUpdatePerformance] = {} + self._background_update_handlers: dict[str, _BackgroundUpdateHandler] = {} # TODO: all these bool flags make me feel icky---can we combine into a status # enum? self._all_done = False @@ -530,14 +526,14 @@ async def do_next_background_update(self, sleep: bool = True) -> bool: True if we have finished running all the background updates, otherwise False """ - def get_background_updates_txn(txn: Cursor) -> List[Tuple[str, Optional[str]]]: + def get_background_updates_txn(txn: Cursor) -> list[tuple[str, Optional[str]]]: txn.execute( """ SELECT update_name, depends_on FROM background_updates ORDER BY ordering, update_name """ ) - return cast(List[Tuple[str, Optional[str]]], txn.fetchall()) + return cast(list[tuple[str, Optional[str]]], txn.fetchall()) if not self._current_background_update: all_pending_updates = await self.db_pool.runInteraction( @@ -965,7 +961,7 @@ async def validate_constraint_and_delete_in_background( order_columns = ", ".join(unique_columns) where_clause = "" - args: List[Any] = [] + args: list[Any] = [] if parsed_progress.lower_bound: where_clause = f"""WHERE ({order_columns}) > ({", ".join("?" for _ in unique_columns)})""" args.extend(parsed_progress.lower_bound) diff --git a/synapse/storage/controllers/persist_events.py b/synapse/storage/controllers/persist_events.py index 646e2cf1151..e02dfe2c093 100644 --- a/synapse/storage/controllers/persist_events.py +++ b/synapse/storage/controllers/persist_events.py @@ -32,14 +32,11 @@ ClassVar, Collection, Deque, - Dict, Generator, Generic, Iterable, List, Optional, - Set, - Tuple, TypeVar, Union, ) @@ -143,7 +140,7 @@ class _PersistEventsTask: name: ClassVar[str] = "persist_event_batch" # used for opentracing - events_and_contexts: List[EventPersistencePair] + events_and_contexts: list[EventPersistencePair] backfilled: bool def try_merge(self, task: "_EventPersistQueueTask") -> bool: @@ -208,8 +205,8 @@ def __init__( """ self.server_name = server_name self.hs = hs - self._event_persist_queues: Dict[str, Deque[_EventPersistQueueItem]] = {} - self._currently_persisting_rooms: Set[str] = set() + self._event_persist_queues: dict[str, Deque[_EventPersistQueueItem]] = {} + self._currently_persisting_rooms: set[str] = set() self._per_item_callback = per_item_callback async def add_to_queue( @@ -365,7 +362,7 @@ async def _process_event_persist_queue_task( self, room_id: str, task: _EventPersistQueueTask, - ) -> Dict[str, str]: + ) -> dict[str, str]: """Callback for the _event_persist_queue Returns: @@ -394,7 +391,7 @@ async def persist_events( self, events_and_contexts: Iterable[EventPersistencePair], backfilled: bool = False, - ) -> Tuple[List[EventBase], RoomStreamToken]: + ) -> tuple[list[EventBase], RoomStreamToken]: """ Write events to the database Args: @@ -414,8 +411,8 @@ async def persist_events( PartialStateConflictError: if attempting to persist a partial state event in a room that has been un-partial stated. """ - event_ids: List[str] = [] - partitioned: Dict[str, List[EventPersistencePair]] = {} + event_ids: list[str] = [] + partitioned: dict[str, list[EventPersistencePair]] = {} for event, ctx in events_and_contexts: partitioned.setdefault(event.room_id, []).append((event, ctx)) event_ids.append(event.event_id) @@ -431,8 +428,8 @@ async def persist_events( set_tag(SynapseTags.FUNC_ARG_PREFIX + "backfilled", str(backfilled)) async def enqueue( - item: Tuple[str, List[EventPersistencePair]], - ) -> Dict[str, str]: + item: tuple[str, list[EventPersistencePair]], + ) -> dict[str, str]: room_id, evs_ctxs = item return await self._event_persist_queue.add_to_queue( room_id, @@ -447,7 +444,7 @@ async def enqueue( # # Since we use `yieldable_gather_results` we need to merge the returned list # of dicts into one. - replaced_events: Dict[str, str] = {} + replaced_events: dict[str, str] = {} for d in ret_vals: replaced_events.update(d) @@ -469,7 +466,7 @@ async def enqueue( @trace async def persist_event( self, event: EventBase, context: EventContext, backfilled: bool = False - ) -> Tuple[EventBase, PersistedEventPosition, RoomStreamToken]: + ) -> tuple[EventBase, PersistedEventPosition, RoomStreamToken]: """ Returns: The event, stream ordering of `event`, and the stream ordering of the @@ -573,7 +570,7 @@ async def _calculate_current_state(self, room_id: str) -> StateMap[str]: async def _persist_event_batch( self, room_id: str, task: _PersistEventsTask - ) -> Dict[str, str]: + ) -> dict[str, str]: """Callback for the _event_persist_queue Calculates the change to current state and forward extremities, and @@ -592,7 +589,7 @@ async def _persist_event_batch( events_and_contexts = task.events_and_contexts backfilled = task.backfilled - replaced_events: Dict[str, str] = {} + replaced_events: dict[str, str] = {} if not events_and_contexts: return replaced_events @@ -678,8 +675,8 @@ async def _persist_event_batch( return replaced_events async def _calculate_new_forward_extremities_and_state_delta( - self, room_id: str, ev_ctx_rm: List[EventPersistencePair] - ) -> Tuple[Optional[Set[str]], Optional[DeltaState]]: + self, room_id: str, ev_ctx_rm: list[EventPersistencePair] + ) -> tuple[Optional[set[str]], Optional[DeltaState]]: """Calculates the new forward extremities and state delta for a room given events to persist. @@ -803,9 +800,9 @@ async def _calculate_new_forward_extremities_and_state_delta( async def _calculate_new_extremities( self, room_id: str, - event_contexts: List[EventPersistencePair], + event_contexts: list[EventPersistencePair], latest_event_ids: AbstractSet[str], - ) -> Set[str]: + ) -> set[str]: """Calculates the new forward extremities for a room given events to persist. @@ -863,10 +860,10 @@ async def _calculate_new_extremities( async def _get_new_state_after_events( self, room_id: str, - events_context: List[EventPersistencePair], + events_context: list[EventPersistencePair], old_latest_event_ids: AbstractSet[str], - new_latest_event_ids: Set[str], - ) -> Tuple[Optional[StateMap[str]], Optional[StateMap[str]], Set[str]]: + new_latest_event_ids: set[str], + ) -> tuple[Optional[StateMap[str]], Optional[StateMap[str]], set[str]]: """Calculate the current state dict after adding some new events to a room @@ -1037,11 +1034,11 @@ async def _get_new_state_after_events( async def _prune_extremities( self, room_id: str, - new_latest_event_ids: Set[str], + new_latest_event_ids: set[str], resolved_state_group: int, - event_id_to_state_group: Dict[str, int], - events_context: List[EventPersistencePair], - ) -> Set[str]: + event_id_to_state_group: dict[str, int], + events_context: list[EventPersistencePair], + ) -> set[str]: """See if we can prune any of the extremities after calculating the resolved state. """ @@ -1108,7 +1105,7 @@ async def _prune_extremities( # as a first cut. events_to_check: Collection[EventBase] = [event] while events_to_check: - new_events: Set[str] = set() + new_events: set[str] = set() for event_to_check in events_to_check: if self.is_mine_id(event_to_check.sender): if event_to_check.type != EventTypes.Dummy: @@ -1177,7 +1174,7 @@ async def _calculate_state_delta( async def _is_server_still_joined( self, room_id: str, - ev_ctx_rm: List[EventPersistencePair], + ev_ctx_rm: list[EventPersistencePair], delta: DeltaState, ) -> bool: """Check if the server will still be joined after the given events have diff --git a/synapse/storage/controllers/purge_events.py b/synapse/storage/controllers/purge_events.py index ded9cb0567e..6606fdcc30f 100644 --- a/synapse/storage/controllers/purge_events.py +++ b/synapse/storage/controllers/purge_events.py @@ -26,7 +26,6 @@ Collection, Mapping, Optional, - Set, ) from synapse.logging.context import nested_logging_context @@ -99,7 +98,7 @@ async def purge_history( async def _find_unreferenced_groups( self, state_groups: Collection[int], - ) -> Set[int]: + ) -> set[int]: """Used when purging history to figure out which state groups can be deleted. @@ -316,7 +315,7 @@ async def _find_unreferenced_groups_for_background_deletion( self, last_checked_state_group: int, batch_size: int, - ) -> tuple[Set[int], int, bool]: + ) -> tuple[set[int], int, bool]: """Used when deleting unreferenced state groups in the background to figure out which state groups can be deleted. To avoid increased DB usage due to de-deltaing state groups, this returns only diff --git a/synapse/storage/controllers/state.py b/synapse/storage/controllers/state.py index 76978402b94..690a0dde2e3 100644 --- a/synapse/storage/controllers/state.py +++ b/synapse/storage/controllers/state.py @@ -25,13 +25,9 @@ AbstractSet, Callable, Collection, - Dict, - FrozenSet, Iterable, - List, Mapping, Optional, - Tuple, Union, ) @@ -95,7 +91,7 @@ def notify_room_un_partial_stated(self, room_id: str) -> None: @tag_args async def get_state_group_delta( self, state_group: int - ) -> Tuple[Optional[int], Optional[StateMap[str]]]: + ) -> tuple[Optional[int], Optional[StateMap[str]]]: """Given a state group try to return a previous group and a delta between the old and the new. @@ -114,7 +110,7 @@ async def get_state_group_delta( @tag_args async def get_state_groups_ids( self, _room_id: str, event_ids: Collection[str], await_full_state: bool = True - ) -> Dict[int, MutableStateMap[str]]: + ) -> dict[int, MutableStateMap[str]]: """Get the event IDs of all the state for the state groups for the given events Args: @@ -164,7 +160,7 @@ async def get_state_ids_for_group( @tag_args async def get_state_groups( self, room_id: str, event_ids: Collection[str] - ) -> Dict[int, List[EventBase]]: + ) -> dict[int, list[EventBase]]: """Get the state groups for the given list of event_ids Args: @@ -200,8 +196,8 @@ async def get_state_groups( @trace @tag_args async def _get_state_groups_from_groups( - self, groups: List[int], state_filter: StateFilter - ) -> Dict[int, StateMap[str]]: + self, groups: list[int], state_filter: StateFilter + ) -> dict[int, StateMap[str]]: """Returns the state groups for a given set of groups, filtering on types of state events. @@ -222,7 +218,7 @@ async def _get_state_groups_from_groups( @tag_args async def get_state_for_events( self, event_ids: Collection[str], state_filter: Optional[StateFilter] = None - ) -> Dict[str, StateMap[EventBase]]: + ) -> dict[str, StateMap[EventBase]]: """Given a list of event_ids and type tuples, return a list of state dicts for each event. @@ -277,7 +273,7 @@ async def get_state_ids_for_events( event_ids: Collection[str], state_filter: Optional[StateFilter] = None, await_full_state: bool = True, - ) -> Dict[str, StateMap[str]]: + ) -> dict[str, StateMap[str]]: """ Get the room states after each of a list of events. @@ -505,7 +501,7 @@ async def get_state_at( @tag_args async def get_state_for_groups( self, groups: Iterable[int], state_filter: Optional[StateFilter] = None - ) -> Dict[int, MutableStateMap[str]]: + ) -> dict[int, MutableStateMap[str]]: """Gets the state at each of a list of state groups, optionally filtering by type/state_key @@ -671,7 +667,7 @@ async def get_server_acl_for_room( @tag_args async def get_current_state_deltas( self, prev_stream_id: int, max_stream_id: int - ) -> Tuple[int, List[StateDelta]]: + ) -> tuple[int, list[StateDelta]]: """Fetch a list of room state changes since the given stream id Args: @@ -745,7 +741,7 @@ async def get_current_hosts_in_room(self, room_id: str) -> AbstractSet[str]: @trace @tag_args - async def get_current_hosts_in_room_ordered(self, room_id: str) -> Tuple[str, ...]: + async def get_current_hosts_in_room_ordered(self, room_id: str) -> tuple[str, ...]: """Get current hosts in room based on current state. Blocks until we have full state for the given room. This only happens for rooms @@ -807,7 +803,7 @@ async def get_users_in_room_with_profiles( async def get_joined_hosts( self, room_id: str, state_entry: "_StateCacheEntry" - ) -> FrozenSet[str]: + ) -> frozenset[str]: state_group: Union[object, int] = state_entry.state_group if not state_group: # If state_group is None it means it has yet to be assigned a @@ -828,7 +824,7 @@ async def _get_joined_hosts( room_id: str, state_group: Union[object, int], state_entry: "_StateCacheEntry", - ) -> FrozenSet[str]: + ) -> frozenset[str]: # We don't use `state_group`, it's there so that we can cache based on # it. However, its important that its never None, since two # current_state's with a state_group of None are likely to be different. diff --git a/synapse/storage/controllers/stats.py b/synapse/storage/controllers/stats.py index 9445a862403..18e27e08781 100644 --- a/synapse/storage/controllers/stats.py +++ b/synapse/storage/controllers/stats.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Collection, Counter, List, Tuple +from typing import TYPE_CHECKING, Collection, Counter from synapse.api.errors import SynapseError from synapse.storage.database import LoggingTransaction @@ -39,7 +39,7 @@ class StatsController: def __init__(self, hs: "HomeServer", stores: Databases): self.stores = stores - async def get_room_db_size_estimate(self) -> List[Tuple[str, int]]: + async def get_room_db_size_estimate(self) -> list[tuple[str, int]]: """Get an estimate of the largest rooms and how much database space they use, in bytes. diff --git a/synapse/storage/database.py b/synapse/storage/database.py index a4b2b26795c..795a036ff22 100644 --- a/synapse/storage/database.py +++ b/synapse/storage/database.py @@ -31,16 +31,13 @@ Awaitable, Callable, Collection, - Dict, Iterable, Iterator, - List, Literal, Mapping, Optional, Sequence, Tuple, - Type, TypeVar, cast, overload, @@ -218,9 +215,9 @@ def cursor( self, *, txn_name: Optional[str] = None, - after_callbacks: Optional[List["_CallbackListEntry"]] = None, - async_after_callbacks: Optional[List["_AsyncCallbackListEntry"]] = None, - exception_callbacks: Optional[List["_CallbackListEntry"]] = None, + after_callbacks: Optional[list["_CallbackListEntry"]] = None, + async_after_callbacks: Optional[list["_AsyncCallbackListEntry"]] = None, + exception_callbacks: Optional[list["_CallbackListEntry"]] = None, ) -> "LoggingTransaction": if not txn_name: txn_name = self.default_txn_name @@ -250,7 +247,7 @@ def __enter__(self) -> "LoggingDatabaseConnection": def __exit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[types.TracebackType], ) -> Optional[bool]: @@ -262,9 +259,9 @@ def __getattr__(self, name: str) -> Any: # The type of entry which goes on our after_callbacks and exception_callbacks lists. -_CallbackListEntry = Tuple[Callable[..., object], Tuple[object, ...], Dict[str, object]] -_AsyncCallbackListEntry = Tuple[ - Callable[..., Awaitable], Tuple[object, ...], Dict[str, object] +_CallbackListEntry = tuple[Callable[..., object], tuple[object, ...], dict[str, object]] +_AsyncCallbackListEntry = tuple[ + Callable[..., Awaitable], tuple[object, ...], dict[str, object] ] P = ParamSpec("P") @@ -311,9 +308,9 @@ def __init__( name: str, server_name: str, database_engine: BaseDatabaseEngine, - after_callbacks: Optional[List[_CallbackListEntry]] = None, - async_after_callbacks: Optional[List[_AsyncCallbackListEntry]] = None, - exception_callbacks: Optional[List[_CallbackListEntry]] = None, + after_callbacks: Optional[list[_CallbackListEntry]] = None, + async_after_callbacks: Optional[list[_AsyncCallbackListEntry]] = None, + exception_callbacks: Optional[list[_CallbackListEntry]] = None, ): self.txn = txn self.name = name @@ -386,10 +383,10 @@ def call_on_exception( def fetchone(self) -> Optional[Tuple]: return self.txn.fetchone() - def fetchmany(self, size: Optional[int] = None) -> List[Tuple]: + def fetchmany(self, size: Optional[int] = None) -> list[Tuple]: return self.txn.fetchmany(size=size) - def fetchall(self) -> List[Tuple]: + def fetchall(self) -> list[Tuple]: return self.txn.fetchall() def __iter__(self) -> Iterator[Tuple]: @@ -435,7 +432,7 @@ def execute_values( values: Iterable[Iterable[Any]], template: Optional[str] = None, fetch: bool = True, - ) -> List[Tuple]: + ) -> list[Tuple]: """Corresponds to psycopg2.extras.execute_values. Only available when using postgres. @@ -540,7 +537,7 @@ def __enter__(self) -> "LoggingTransaction": def __exit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[types.TracebackType], ) -> None: @@ -549,8 +546,8 @@ def __exit__( class PerformanceCounters: def __init__(self) -> None: - self.current_counters: Dict[str, Tuple[int, float]] = {} - self.previous_counters: Dict[str, Tuple[int, float]] = {} + self.current_counters: dict[str, tuple[int, float]] = {} + self.previous_counters: dict[str, tuple[int, float]] = {} def update(self, key: str, duration_secs: float) -> None: count, cum_time = self.current_counters.get(key, (0, 0.0)) @@ -616,7 +613,7 @@ def __init__( self._previous_loop_ts = 0.0 # Transaction counter: key is the twisted thread id, value is the current count - self._txn_counters: Dict[int, int] = defaultdict(int) + self._txn_counters: dict[int, int] = defaultdict(int) # TODO(paul): These can eventually be removed once the metrics code # is running in mainline, and we have some nice monitoring frontends @@ -666,7 +663,7 @@ async def _check_safe_to_upsert(self) -> None: If the background updates have not completed, wait 15 sec and check again. """ updates = cast( - List[Tuple[str]], + list[tuple[str]], await self.simple_select_list( "background_updates", keyvalues=None, @@ -717,9 +714,9 @@ def new_transaction( self, conn: LoggingDatabaseConnection, desc: str, - after_callbacks: List[_CallbackListEntry], - async_after_callbacks: List[_AsyncCallbackListEntry], - exception_callbacks: List[_CallbackListEntry], + after_callbacks: list[_CallbackListEntry], + async_after_callbacks: list[_AsyncCallbackListEntry], + exception_callbacks: list[_CallbackListEntry], func: Callable[Concatenate[LoggingTransaction, P], R], *args: P.args, **kwargs: P.kwargs, @@ -956,9 +953,9 @@ async def runInteraction( """ async def _runInteraction() -> R: - after_callbacks: List[_CallbackListEntry] = [] - async_after_callbacks: List[_AsyncCallbackListEntry] = [] - exception_callbacks: List[_CallbackListEntry] = [] + after_callbacks: list[_CallbackListEntry] = [] + async_after_callbacks: list[_AsyncCallbackListEntry] = [] + exception_callbacks: list[_CallbackListEntry] = [] if not current_context(): logger.warning("Starting db txn '%s' from sentinel context", desc) @@ -1105,7 +1102,7 @@ def inner_func(conn: _PoolConnection, *args: P.args, **kwargs: P.kwargs) -> R: self._db_pool.runWithConnection(inner_func, *args, **kwargs) ) - async def execute(self, desc: str, query: str, *args: Any) -> List[Tuple[Any, ...]]: + async def execute(self, desc: str, query: str, *args: Any) -> list[tuple[Any, ...]]: """Runs a single query for a result set. Args: @@ -1116,7 +1113,7 @@ async def execute(self, desc: str, query: str, *args: Any) -> List[Tuple[Any, .. The result of decoder(results) """ - def interaction(txn: LoggingTransaction) -> List[Tuple[Any, ...]]: + def interaction(txn: LoggingTransaction) -> list[tuple[Any, ...]]: txn.execute(query, args) return txn.fetchall() @@ -1128,7 +1125,7 @@ def interaction(txn: LoggingTransaction) -> List[Tuple[Any, ...]]: async def simple_insert( self, table: str, - values: Dict[str, Any], + values: dict[str, Any], desc: str = "simple_insert", ) -> None: """Executes an INSERT query on the named table. @@ -1142,7 +1139,7 @@ async def simple_insert( @staticmethod def simple_insert_txn( - txn: LoggingTransaction, table: str, values: Dict[str, Any] + txn: LoggingTransaction, table: str, values: dict[str, Any] ) -> None: keys, vals = zip(*values.items()) @@ -1158,9 +1155,9 @@ def simple_insert_txn( def simple_insert_returning_txn( txn: LoggingTransaction, table: str, - values: Dict[str, Any], + values: dict[str, Any], returning: StrCollection, - ) -> Tuple[Any, ...]: + ) -> tuple[Any, ...]: """Executes a `INSERT INTO... RETURNING...` statement (or equivalent for SQLite versions that don't support it). """ @@ -1261,9 +1258,9 @@ def simple_insert_many_txn( async def simple_upsert( self, table: str, - keyvalues: Dict[str, Any], - values: Dict[str, Any], - insertion_values: Optional[Dict[str, Any]] = None, + keyvalues: dict[str, Any], + values: dict[str, Any], + insertion_values: Optional[dict[str, Any]] = None, where_clause: Optional[str] = None, desc: str = "simple_upsert", ) -> bool: @@ -1463,7 +1460,7 @@ def _getwhere(key: str) -> str: return True # We didn't find any existing rows, so insert a new one - allvalues: Dict[str, Any] = {} + allvalues: dict[str, Any] = {} allvalues.update(keyvalues) allvalues.update(values) allvalues.update(insertion_values) @@ -1500,7 +1497,7 @@ def simple_upsert_txn_native_upsert( Returns True if a row was inserted or updated (i.e. if `values` is not empty then this always returns True) """ - allvalues: Dict[str, Any] = {} + allvalues: dict[str, Any] = {} allvalues.update(keyvalues) allvalues.update(insertion_values or {}) @@ -1694,7 +1691,7 @@ def simple_upsert_many_txn_native_upsert( value_values: A list of each row's value column values. Ignored if value_names is empty. """ - allnames: List[str] = [] + allnames: list[str] = [] allnames.extend(key_names) allnames.extend(value_names) @@ -1737,30 +1734,30 @@ def simple_upsert_many_txn_native_upsert( async def simple_select_one( self, table: str, - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], retcols: Collection[str], allow_none: Literal[False] = False, desc: str = "simple_select_one", - ) -> Tuple[Any, ...]: ... + ) -> tuple[Any, ...]: ... @overload async def simple_select_one( self, table: str, - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], retcols: Collection[str], allow_none: Literal[True] = True, desc: str = "simple_select_one", - ) -> Optional[Tuple[Any, ...]]: ... + ) -> Optional[tuple[Any, ...]]: ... async def simple_select_one( self, table: str, - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], retcols: Collection[str], allow_none: bool = False, desc: str = "simple_select_one", - ) -> Optional[Tuple[Any, ...]]: + ) -> Optional[tuple[Any, ...]]: """Executes a SELECT query on the named table, which is expected to return a single row, returning multiple columns from it. @@ -1786,7 +1783,7 @@ async def simple_select_one( async def simple_select_one_onecol( self, table: str, - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], retcol: str, allow_none: Literal[False] = False, desc: str = "simple_select_one_onecol", @@ -1796,7 +1793,7 @@ async def simple_select_one_onecol( async def simple_select_one_onecol( self, table: str, - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], retcol: str, allow_none: Literal[True] = True, desc: str = "simple_select_one_onecol", @@ -1805,7 +1802,7 @@ async def simple_select_one_onecol( async def simple_select_one_onecol( self, table: str, - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], retcol: str, allow_none: bool = False, desc: str = "simple_select_one_onecol", @@ -1837,7 +1834,7 @@ def simple_select_one_onecol_txn( cls, txn: LoggingTransaction, table: str, - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], retcol: str, allow_none: Literal[False] = False, ) -> Any: ... @@ -1848,7 +1845,7 @@ def simple_select_one_onecol_txn( cls, txn: LoggingTransaction, table: str, - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], retcol: str, allow_none: Literal[True] = True, ) -> Optional[Any]: ... @@ -1858,7 +1855,7 @@ def simple_select_one_onecol_txn( cls, txn: LoggingTransaction, table: str, - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], retcol: str, allow_none: bool = False, ) -> Optional[Any]: @@ -1878,9 +1875,9 @@ def simple_select_one_onecol_txn( def simple_select_onecol_txn( txn: LoggingTransaction, table: str, - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], retcol: str, - ) -> List[Any]: + ) -> list[Any]: sql = ("SELECT %(retcol)s FROM %(table)s") % {"retcol": retcol, "table": table} if keyvalues: @@ -1894,10 +1891,10 @@ def simple_select_onecol_txn( async def simple_select_onecol( self, table: str, - keyvalues: Optional[Dict[str, Any]], + keyvalues: Optional[dict[str, Any]], retcol: str, desc: str = "simple_select_onecol", - ) -> List[Any]: + ) -> list[Any]: """Executes a SELECT query on the named table, which returns a list comprising of the values of the named column from the selected rows. @@ -1922,10 +1919,10 @@ async def simple_select_onecol( async def simple_select_list( self, table: str, - keyvalues: Optional[Dict[str, Any]], + keyvalues: Optional[dict[str, Any]], retcols: Collection[str], desc: str = "simple_select_list", - ) -> List[Tuple[Any, ...]]: + ) -> list[tuple[Any, ...]]: """Executes a SELECT query on the named table, which may return zero or more rows, returning the result as a list of tuples. @@ -1954,9 +1951,9 @@ def simple_select_list_txn( cls, txn: LoggingTransaction, table: str, - keyvalues: Optional[Dict[str, Any]], + keyvalues: Optional[dict[str, Any]], retcols: Iterable[str], - ) -> List[Tuple[Any, ...]]: + ) -> list[tuple[Any, ...]]: """Executes a SELECT query on the named table, which may return zero or more rows, returning the result as a list of tuples. @@ -1990,10 +1987,10 @@ async def simple_select_many_batch( column: str, iterable: Iterable[Any], retcols: Collection[str], - keyvalues: Optional[Dict[str, Any]] = None, + keyvalues: Optional[dict[str, Any]] = None, desc: str = "simple_select_many_batch", batch_size: int = 100, - ) -> List[Tuple[Any, ...]]: + ) -> list[tuple[Any, ...]]: """Executes a SELECT query on the named table, which may return zero or more rows. @@ -2013,7 +2010,7 @@ async def simple_select_many_batch( """ keyvalues = keyvalues or {} - results: List[Tuple[Any, ...]] = [] + results: list[tuple[Any, ...]] = [] for chunk in batch_iter(iterable, batch_size): rows = await self.runInteraction( @@ -2038,9 +2035,9 @@ def simple_select_many_txn( table: str, column: str, iterable: Collection[Any], - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], retcols: Iterable[str], - ) -> List[Tuple[Any, ...]]: + ) -> list[tuple[Any, ...]]: """Executes a SELECT query on the named table, which may return zero or more rows. @@ -2080,8 +2077,8 @@ def simple_select_many_txn( async def simple_update( self, table: str, - keyvalues: Dict[str, Any], - updatevalues: Dict[str, Any], + keyvalues: dict[str, Any], + updatevalues: dict[str, Any], desc: str, ) -> int: """ @@ -2217,8 +2214,8 @@ def simple_update_many_txn( async def simple_update_one( self, table: str, - keyvalues: Dict[str, Any], - updatevalues: Dict[str, Any], + keyvalues: dict[str, Any], + updatevalues: dict[str, Any], desc: str = "simple_update_one", ) -> None: """Executes an UPDATE query on the named table, setting new values for @@ -2244,8 +2241,8 @@ def simple_update_one_txn( cls, txn: LoggingTransaction, table: str, - keyvalues: Dict[str, Any], - updatevalues: Dict[str, Any], + keyvalues: dict[str, Any], + updatevalues: dict[str, Any], ) -> None: rowcount = cls.simple_update_txn(txn, table, keyvalues, updatevalues) @@ -2259,29 +2256,29 @@ def simple_update_one_txn( def simple_select_one_txn( txn: LoggingTransaction, table: str, - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], retcols: Collection[str], allow_none: Literal[False] = False, - ) -> Tuple[Any, ...]: ... + ) -> tuple[Any, ...]: ... @overload @staticmethod def simple_select_one_txn( txn: LoggingTransaction, table: str, - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], retcols: Collection[str], allow_none: Literal[True] = True, - ) -> Optional[Tuple[Any, ...]]: ... + ) -> Optional[tuple[Any, ...]]: ... @staticmethod def simple_select_one_txn( txn: LoggingTransaction, table: str, - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], retcols: Collection[str], allow_none: bool = False, - ) -> Optional[Tuple[Any, ...]]: + ) -> Optional[tuple[Any, ...]]: select_sql = "SELECT %s FROM %s" % (", ".join(retcols), table) if keyvalues: @@ -2302,7 +2299,7 @@ def simple_select_one_txn( return row async def simple_delete_one( - self, table: str, keyvalues: Dict[str, Any], desc: str = "simple_delete_one" + self, table: str, keyvalues: dict[str, Any], desc: str = "simple_delete_one" ) -> None: """Executes a DELETE query on the named table, expecting to delete a single row. @@ -2322,7 +2319,7 @@ async def simple_delete_one( @staticmethod def simple_delete_one_txn( - txn: LoggingTransaction, table: str, keyvalues: Dict[str, Any] + txn: LoggingTransaction, table: str, keyvalues: dict[str, Any] ) -> None: """Executes a DELETE query on the named table, expecting to delete a single row. @@ -2343,7 +2340,7 @@ def simple_delete_one_txn( raise StoreError(500, "More than one row matched (%s)" % (table,)) async def simple_delete( - self, table: str, keyvalues: Dict[str, Any], desc: str + self, table: str, keyvalues: dict[str, Any], desc: str ) -> int: """Executes a DELETE query on the named table. @@ -2363,7 +2360,7 @@ async def simple_delete( @staticmethod def simple_delete_txn( - txn: LoggingTransaction, table: str, keyvalues: Dict[str, Any] + txn: LoggingTransaction, table: str, keyvalues: dict[str, Any] ) -> int: """Executes a DELETE query on the named table. @@ -2389,7 +2386,7 @@ async def simple_delete_many( table: str, column: str, iterable: Collection[Any], - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], desc: str, ) -> int: """Executes a DELETE query on the named table. @@ -2423,7 +2420,7 @@ def simple_delete_many_txn( table: str, column: str, values: Collection[Any], - keyvalues: Dict[str, Any], + keyvalues: dict[str, Any], ) -> int: """Executes a DELETE query on the named table. @@ -2503,7 +2500,7 @@ def get_cache_dict( stream_column: str, max_value: int, limit: int = 100000, - ) -> Tuple[Dict[Any, int], int]: + ) -> tuple[dict[Any, int], int]: """Gets roughly the last N changes in the given stream table as a map from entity to the stream ID of the most recent change. @@ -2528,7 +2525,7 @@ def get_cache_dict( # The rows come out in reverse stream ID order, so we want to keep the # stream ID of the first row for each entity. - cache: Dict[Any, int] = {} + cache: dict[Any, int] = {} for row in txn: cache.setdefault(row[0], int(row[1])) @@ -2552,11 +2549,11 @@ def simple_select_list_paginate_txn( start: int, limit: int, retcols: Iterable[str], - filters: Optional[Dict[str, Any]] = None, - keyvalues: Optional[Dict[str, Any]] = None, - exclude_keyvalues: Optional[Dict[str, Any]] = None, + filters: Optional[dict[str, Any]] = None, + keyvalues: Optional[dict[str, Any]] = None, + exclude_keyvalues: Optional[dict[str, Any]] = None, order_direction: str = "ASC", - ) -> List[Tuple[Any, ...]]: + ) -> list[tuple[Any, ...]]: """ Executes a SELECT query on the named table with start and limit, of row numbers, which may return zero or number of rows from start to limit, @@ -2591,7 +2588,7 @@ def simple_select_list_paginate_txn( raise ValueError("order_direction must be one of 'ASC' or 'DESC'.") where_clause = "WHERE " if filters or keyvalues or exclude_keyvalues else "" - arg_list: List[Any] = [] + arg_list: list[Any] = [] if filters: where_clause += " AND ".join("%s LIKE ?" % (k,) for k in filters) arg_list += list(filters.values()) @@ -2621,7 +2618,7 @@ def make_in_list_sql_clause( iterable: Collection[Any], *, negative: bool = False, -) -> Tuple[str, list]: +) -> tuple[str, list]: """Returns an SQL clause that checks the given column is in the iterable. On SQLite this expands to `column IN (?, ?, ...)`, whereas on Postgres @@ -2661,24 +2658,24 @@ def make_in_list_sql_clause( @overload def make_tuple_in_list_sql_clause( database_engine: BaseDatabaseEngine, - columns: Tuple[str, str], - iterable: Collection[Tuple[Any, Any]], -) -> Tuple[str, list]: ... + columns: tuple[str, str], + iterable: Collection[tuple[Any, Any]], +) -> tuple[str, list]: ... @overload def make_tuple_in_list_sql_clause( database_engine: BaseDatabaseEngine, - columns: Tuple[str, str, str], - iterable: Collection[Tuple[Any, Any, Any]], -) -> Tuple[str, list]: ... + columns: tuple[str, str, str], + iterable: Collection[tuple[Any, Any, Any]], +) -> tuple[str, list]: ... def make_tuple_in_list_sql_clause( database_engine: BaseDatabaseEngine, - columns: Tuple[str, ...], - iterable: Collection[Tuple[Any, ...]], -) -> Tuple[str, list]: + columns: tuple[str, ...], + iterable: Collection[tuple[Any, ...]], +) -> tuple[str, list]: """Returns an SQL clause that checks the given tuple of columns is in the iterable. Args: @@ -2726,7 +2723,7 @@ def make_tuple_in_list_sql_clause( KV = TypeVar("KV") -def make_tuple_comparison_clause(keys: List[Tuple[str, KV]]) -> Tuple[str, List[KV]]: +def make_tuple_comparison_clause(keys: list[tuple[str, KV]]) -> tuple[str, list[KV]]: """Returns a tuple comparison SQL clause Builds a SQL clause that looks like "(a, b) > (?, ?)" diff --git a/synapse/storage/databases/__init__.py b/synapse/storage/databases/__init__.py index a4aba96686f..f145d21096a 100644 --- a/synapse/storage/databases/__init__.py +++ b/synapse/storage/databases/__init__.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Generic, List, Optional, Type, TypeVar +from typing import TYPE_CHECKING, Generic, Optional, TypeVar from synapse.metrics import SERVER_NAME_LABEL, LaterGauge from synapse.storage._base import SQLBaseStore @@ -61,13 +61,13 @@ class Databases(Generic[DataStoreT]): state_deletion """ - databases: List[DatabasePool] + databases: list[DatabasePool] main: "DataStore" # FIXME: https://github.com/matrix-org/synapse/issues/11165: actually an instance of `main_store_class` state: StateGroupDataStore persist_events: Optional[PersistEventsStore] state_deletion: StateDeletionDataStore - def __init__(self, main_store_class: Type[DataStoreT], hs: "HomeServer"): + def __init__(self, main_store_class: type[DataStoreT], hs: "HomeServer"): # Note we pass in the main store class here as workers use a different main # store. diff --git a/synapse/storage/databases/main/__init__.py b/synapse/storage/databases/main/__init__.py index 83b480adafe..9f23c1a4e04 100644 --- a/synapse/storage/databases/main/__init__.py +++ b/synapse/storage/databases/main/__init__.py @@ -20,7 +20,7 @@ # # import logging -from typing import TYPE_CHECKING, List, Optional, Tuple, Union, cast +from typing import TYPE_CHECKING, Optional, Union, cast import attr @@ -188,9 +188,9 @@ async def get_users_paginate( order_by: str = UserSortOrder.NAME.value, direction: Direction = Direction.FORWARDS, approved: bool = True, - not_user_types: Optional[List[str]] = None, + not_user_types: Optional[list[str]] = None, locked: bool = False, - ) -> Tuple[List[UserPaginateResponse], int]: + ) -> tuple[list[UserPaginateResponse], int]: """Function to retrieve a paginated list of users from users list. This will return a json list of users and the total number of users matching the filter criteria. @@ -216,7 +216,7 @@ async def get_users_paginate( def get_users_paginate_txn( txn: LoggingTransaction, - ) -> Tuple[List[UserPaginateResponse], int]: + ) -> tuple[list[UserPaginateResponse], int]: filters = [] args: list = [] @@ -311,7 +311,7 @@ def get_users_paginate_txn( """ sql = "SELECT COUNT(*) as total_users " + sql_base txn.execute(sql, args) - count = cast(Tuple[int], txn.fetchone())[0] + count = cast(tuple[int], txn.fetchone())[0] sql = f""" SELECT name, user_type, is_guest, admin, deactivated, shadow_banned, @@ -351,8 +351,8 @@ def get_users_paginate_txn( async def search_users( self, term: str - ) -> List[ - Tuple[str, Optional[str], Union[int, bool], Union[int, bool], Optional[str]] + ) -> list[ + tuple[str, Optional[str], Union[int, bool], Union[int, bool], Optional[str]] ]: """Function to search users list for one or more users with the matched term. @@ -366,8 +366,8 @@ async def search_users( def search_users( txn: LoggingTransaction, - ) -> List[ - Tuple[str, Optional[str], Union[int, bool], Union[int, bool], Optional[str]] + ) -> list[ + tuple[str, Optional[str], Union[int, bool], Union[int, bool], Optional[str]] ]: search_term = "%%" + term + "%%" @@ -379,8 +379,8 @@ def search_users( txn.execute(sql, (search_term,)) return cast( - List[ - Tuple[ + list[ + tuple[ str, Optional[str], Union[int, bool], diff --git a/synapse/storage/databases/main/account_data.py b/synapse/storage/databases/main/account_data.py index 16876e5461f..f1fb5fe1886 100644 --- a/synapse/storage/databases/main/account_data.py +++ b/synapse/storage/databases/main/account_data.py @@ -23,13 +23,9 @@ from typing import ( TYPE_CHECKING, Any, - Dict, - FrozenSet, Iterable, - List, Mapping, Optional, - Tuple, cast, ) @@ -140,7 +136,7 @@ async def get_global_account_data_for_user( def get_global_account_data_for_user( txn: LoggingTransaction, - ) -> Dict[str, JsonDict]: + ) -> dict[str, JsonDict]: # The 'content != '{}' condition below prevents us from using # `simple_select_list_txn` here, as it doesn't support conditions # other than 'equals'. @@ -185,7 +181,7 @@ async def get_room_account_data_for_user( def get_room_account_data_for_user_txn( txn: LoggingTransaction, - ) -> Dict[str, Dict[str, JsonMapping]]: + ) -> dict[str, dict[str, JsonMapping]]: # The 'content != '{}' condition below prevents us from using # `simple_select_list_txn` here, as it doesn't support conditions # other than 'equals'. @@ -202,7 +198,7 @@ def get_room_account_data_for_user_txn( txn.execute(sql, (user_id,)) - by_room: Dict[str, Dict[str, JsonMapping]] = {} + by_room: dict[str, dict[str, JsonMapping]] = {} for room_id, account_data_type, content in txn: room_data = by_room.setdefault(room_id, {}) @@ -281,9 +277,9 @@ async def get_account_data_for_room( def get_account_data_for_room_txn( txn: LoggingTransaction, - ) -> Dict[str, JsonMapping]: + ) -> dict[str, JsonMapping]: rows = cast( - List[Tuple[str, str]], + list[tuple[str, str]], self.db_pool.simple_select_list_txn( txn, table="room_account_data", @@ -338,7 +334,7 @@ def get_account_data_for_room_and_type_txn( async def get_updated_global_account_data( self, last_id: int, current_id: int, limit: int - ) -> List[Tuple[int, str, str]]: + ) -> list[tuple[int, str, str]]: """Get the global account_data that has changed, for the account_data stream Args: @@ -355,14 +351,14 @@ async def get_updated_global_account_data( def get_updated_global_account_data_txn( txn: LoggingTransaction, - ) -> List[Tuple[int, str, str]]: + ) -> list[tuple[int, str, str]]: sql = ( "SELECT stream_id, user_id, account_data_type" " FROM account_data WHERE ? < stream_id AND stream_id <= ?" " ORDER BY stream_id ASC LIMIT ?" ) txn.execute(sql, (last_id, current_id, limit)) - return cast(List[Tuple[int, str, str]], txn.fetchall()) + return cast(list[tuple[int, str, str]], txn.fetchall()) return await self.db_pool.runInteraction( "get_updated_global_account_data", get_updated_global_account_data_txn @@ -370,7 +366,7 @@ def get_updated_global_account_data_txn( async def get_updated_room_account_data( self, last_id: int, current_id: int, limit: int - ) -> List[Tuple[int, str, str, str]]: + ) -> list[tuple[int, str, str, str]]: """Get the global account_data that has changed, for the account_data stream Args: @@ -387,14 +383,14 @@ async def get_updated_room_account_data( def get_updated_room_account_data_txn( txn: LoggingTransaction, - ) -> List[Tuple[int, str, str, str]]: + ) -> list[tuple[int, str, str, str]]: sql = ( "SELECT stream_id, user_id, room_id, account_data_type" " FROM room_account_data WHERE ? < stream_id AND stream_id <= ?" " ORDER BY stream_id ASC LIMIT ?" ) txn.execute(sql, (last_id, current_id, limit)) - return cast(List[Tuple[int, str, str, str]], txn.fetchall()) + return cast(list[tuple[int, str, str, str]], txn.fetchall()) return await self.db_pool.runInteraction( "get_updated_room_account_data", get_updated_room_account_data_txn @@ -402,7 +398,7 @@ def get_updated_room_account_data_txn( async def get_updated_global_account_data_for_user( self, user_id: str, stream_id: int - ) -> Dict[str, JsonMapping]: + ) -> dict[str, JsonMapping]: """Get all the global account_data that's changed for a user. Args: @@ -415,7 +411,7 @@ async def get_updated_global_account_data_for_user( def get_updated_global_account_data_for_user( txn: LoggingTransaction, - ) -> Dict[str, JsonMapping]: + ) -> dict[str, JsonMapping]: sql = """ SELECT account_data_type, content FROM account_data WHERE user_id = ? AND stream_id > ? @@ -437,7 +433,7 @@ def get_updated_global_account_data_for_user( async def get_updated_room_account_data_for_user( self, user_id: str, stream_id: int - ) -> Dict[str, Dict[str, JsonMapping]]: + ) -> dict[str, dict[str, JsonMapping]]: """Get all the room account_data that's changed for a user. Args: @@ -450,14 +446,14 @@ async def get_updated_room_account_data_for_user( def get_updated_room_account_data_for_user_txn( txn: LoggingTransaction, - ) -> Dict[str, Dict[str, JsonMapping]]: + ) -> dict[str, dict[str, JsonMapping]]: sql = """ SELECT room_id, account_data_type, content FROM room_account_data WHERE user_id = ? AND stream_id > ? """ txn.execute(sql, (user_id, stream_id)) - account_data_by_room: Dict[str, Dict[str, JsonMapping]] = {} + account_data_by_room: dict[str, dict[str, JsonMapping]] = {} for row in txn: room_account_data = account_data_by_room.setdefault(row[0], {}) room_account_data[row[1]] = db_to_json(row[2]) @@ -484,7 +480,7 @@ async def get_updated_room_account_data_for_user_for_room( room_id: str, from_stream_id: int, to_stream_id: int, - ) -> Dict[str, JsonMapping]: + ) -> dict[str, JsonMapping]: """Get the room account_data that's changed for a user in a room. (> `from_stream_id` and <= `to_stream_id`) @@ -501,14 +497,14 @@ async def get_updated_room_account_data_for_user_for_room( def get_updated_room_account_data_for_user_for_room_txn( txn: LoggingTransaction, - ) -> Dict[str, JsonMapping]: + ) -> dict[str, JsonMapping]: sql = """ SELECT account_data_type, content FROM room_account_data WHERE user_id = ? AND room_id = ? AND stream_id > ? AND stream_id <= ? """ txn.execute(sql, (user_id, room_id, from_stream_id, to_stream_id)) - room_account_data: Dict[str, JsonMapping] = {} + room_account_data: dict[str, JsonMapping] = {} for row in txn: room_account_data[row[0]] = db_to_json(row[1]) @@ -526,7 +522,7 @@ def get_updated_room_account_data_for_user_for_room_txn( ) @cached(max_entries=5000, iterable=True) - async def ignored_by(self, user_id: str) -> FrozenSet[str]: + async def ignored_by(self, user_id: str) -> frozenset[str]: """ Get users which ignore the given user. @@ -546,7 +542,7 @@ async def ignored_by(self, user_id: str) -> FrozenSet[str]: ) @cached(max_entries=5000, iterable=True) - async def ignored_users(self, user_id: str) -> FrozenSet[str]: + async def ignored_users(self, user_id: str) -> frozenset[str]: """ Get users which the given user ignores. diff --git a/synapse/storage/databases/main/appservice.py b/synapse/storage/databases/main/appservice.py index 97dbbb14935..7558672905f 100644 --- a/synapse/storage/databases/main/appservice.py +++ b/synapse/storage/databases/main/appservice.py @@ -20,7 +20,7 @@ # import logging import re -from typing import TYPE_CHECKING, List, Optional, Pattern, Sequence, Tuple, cast +from typing import TYPE_CHECKING, Optional, Pattern, Sequence, cast from synapse.appservice import ( ApplicationService, @@ -52,7 +52,7 @@ def _make_exclusive_regex( - services_cache: List[ApplicationService], + services_cache: list[ApplicationService], ) -> Optional[Pattern]: # We precompile a regex constructed from all the regexes that the AS's # have registered for exclusive users. @@ -93,7 +93,7 @@ def get_max_as_txn_id(txn: Cursor) -> int: txn.execute( "SELECT COALESCE(max(txn_id), 0) FROM application_services_txns" ) - return cast(Tuple[int], txn.fetchone())[0] + return cast(tuple[int], txn.fetchone())[0] self._as_txn_seq_gen = build_sequence_generator( db_conn, @@ -106,7 +106,7 @@ def get_max_as_txn_id(txn: Cursor) -> int: super().__init__(database, db_conn, hs) - def get_app_services(self) -> List[ApplicationService]: + def get_app_services(self) -> list[ApplicationService]: return self.services_cache def get_if_app_services_interested_in_user(self, user_id: str) -> bool: @@ -199,7 +199,7 @@ class ApplicationServiceTransactionWorkerStore( ): async def get_appservices_by_state( self, state: ApplicationServiceState - ) -> List[ApplicationService]: + ) -> list[ApplicationService]: """Get a list of application services based on their state. Args: @@ -208,7 +208,7 @@ async def get_appservices_by_state( A list of ApplicationServices, which may be empty. """ results = cast( - List[Tuple[str]], + list[tuple[str]], await self.db_pool.simple_select_list( table="application_services_state", keyvalues={"state": state.value}, @@ -273,8 +273,8 @@ async def create_appservice_txn( self, service: ApplicationService, events: Sequence[EventBase], - ephemeral: List[JsonMapping], - to_device_messages: List[JsonMapping], + ephemeral: list[JsonMapping], + to_device_messages: list[JsonMapping], one_time_keys_count: TransactionOneTimeKeysCount, unused_fallback_keys: TransactionUnusedFallbackKeys, device_list_summary: DeviceListUpdates, @@ -358,7 +358,7 @@ async def get_oldest_unsent_txn( def _get_oldest_unsent_txn( txn: LoggingTransaction, - ) -> Optional[Tuple[int, str]]: + ) -> Optional[tuple[int, str]]: # Monotonically increasing txn ids, so just select the smallest # one in the txns table (we delete them when they are sent) txn.execute( @@ -366,7 +366,7 @@ def _get_oldest_unsent_txn( " ORDER BY txn_id ASC LIMIT 1", (service.id,), ) - return cast(Optional[Tuple[int, str]], txn.fetchone()) + return cast(Optional[tuple[int, str]], txn.fetchone()) entry = await self.db_pool.runInteraction( "get_oldest_unsent_appservice_txn", _get_oldest_unsent_txn diff --git a/synapse/storage/databases/main/cache.py b/synapse/storage/databases/main/cache.py index 674c6b921ee..5a96510b138 100644 --- a/synapse/storage/databases/main/cache.py +++ b/synapse/storage/databases/main/cache.py @@ -23,7 +23,7 @@ import itertools import json import logging -from typing import TYPE_CHECKING, Any, Collection, Iterable, List, Optional, Tuple +from typing import TYPE_CHECKING, Any, Collection, Iterable, Optional from synapse.api.constants import EventTypes from synapse.config._base import Config @@ -145,7 +145,7 @@ def __init__( async def get_all_updated_caches( self, instance_name: str, last_id: int, current_id: int, limit: int - ) -> Tuple[List[Tuple[int, tuple]], int, bool]: + ) -> tuple[list[tuple[int, tuple]], int, bool]: """Get updates for caches replication stream. Args: @@ -172,7 +172,7 @@ async def get_all_updated_caches( def get_all_updated_caches_txn( txn: LoggingTransaction, - ) -> Tuple[List[Tuple[int, tuple]], int, bool]: + ) -> tuple[list[tuple[int, tuple]], int, bool]: # We purposefully don't bound by the current token, as we want to # send across cache invalidations as quickly as possible. Cache # invalidations are idempotent, so duplicates are fine. @@ -597,7 +597,7 @@ def _invalidate_caches_for_room(self, room_id: str) -> None: self._invalidate_state_caches_all(room_id) async def invalidate_cache_and_stream( - self, cache_name: str, keys: Tuple[Any, ...] + self, cache_name: str, keys: tuple[Any, ...] ) -> None: """Invalidates the cache and adds it to the cache stream so other workers will know to invalidate their caches. @@ -620,7 +620,7 @@ def _invalidate_cache_and_stream( self, txn: LoggingTransaction, cache_func: CachedFunction, - keys: Tuple[Any, ...], + keys: tuple[Any, ...], ) -> None: """Invalidates the cache and adds it to the cache stream so other workers will know to invalidate their caches. @@ -636,7 +636,7 @@ def _invalidate_cache_and_stream_bulk( self, txn: LoggingTransaction, cache_func: CachedFunction, - key_tuples: Collection[Tuple[Any, ...]], + key_tuples: Collection[tuple[Any, ...]], ) -> None: """A bulk version of _invalidate_cache_and_stream. @@ -759,7 +759,7 @@ def _send_invalidation_to_replication_bulk( self, txn: LoggingTransaction, cache_name: str, - key_tuples: Collection[Tuple[Any, ...]], + key_tuples: Collection[tuple[Any, ...]], ) -> None: """Announce the invalidation of multiple (but not all) cache entries. diff --git a/synapse/storage/databases/main/client_ips.py b/synapse/storage/databases/main/client_ips.py index dc6ab99a6c7..1033d85a40b 100644 --- a/synapse/storage/databases/main/client_ips.py +++ b/synapse/storage/databases/main/client_ips.py @@ -22,11 +22,8 @@ import logging from typing import ( TYPE_CHECKING, - Dict, - List, Mapping, Optional, - Tuple, TypedDict, Union, cast, @@ -190,7 +187,7 @@ def get_last_seen(txn: LoggingTransaction) -> Optional[int]: """, (begin_last_seen, batch_size), ) - row = cast(Optional[Tuple[int]], txn.fetchone()) + row = cast(Optional[tuple[int]], txn.fetchone()) if row: return row[0] else: @@ -222,7 +219,7 @@ def remove(txn: LoggingTransaction) -> None: # Define the search space, which requires handling the last batch in # a different way - args: Tuple[int, ...] + args: tuple[int, ...] if last: clause = "? <= last_seen" args = (begin_last_seen,) @@ -251,7 +248,7 @@ def remove(txn: LoggingTransaction) -> None: args, ) res = cast( - List[Tuple[str, str, str, Optional[str], str, int, int]], txn.fetchall() + list[tuple[str, str, str, Optional[str], str, int, int]], txn.fetchall() ) # We've got some duplicates @@ -361,7 +358,7 @@ def _devices_last_seen_update_txn(txn: LoggingTransaction) -> int: # we'll just end up updating the same device row multiple # times, which is fine. - where_args: List[Union[str, int]] + where_args: list[Union[str, int]] where_clause, where_args = make_tuple_comparison_clause( [("user_id", last_user_id), ("device_id", last_device_id)], ) @@ -383,7 +380,7 @@ def _devices_last_seen_update_txn(txn: LoggingTransaction) -> int: """ % {"where_clause": where_clause} txn.execute(sql, where_args + [batch_size]) - rows = cast(List[Tuple[int, str, str, str, str]], txn.fetchall()) + rows = cast(list[tuple[int, str, str, str, str]], txn.fetchall()) if not rows: return 0 @@ -434,7 +431,7 @@ def __init__( self.user_ips_max_age = hs.config.server.user_ips_max_age # (user_id, access_token, ip,) -> last_seen - self.client_ip_last_seen = LruCache[Tuple[str, str, str], int]( + self.client_ip_last_seen = LruCache[tuple[str, str, str], int]( cache_name="client_ip_last_seen", server_name=self.server_name, max_size=50000, @@ -449,8 +446,8 @@ def __init__( # tables. # (user_id, access_token, ip,) -> (user_agent, device_id, last_seen) - self._batch_row_update: Dict[ - Tuple[str, str, str], Tuple[str, Optional[str], int] + self._batch_row_update: dict[ + tuple[str, str, str], tuple[str, Optional[str], int] ] = {} self.clock.looping_call(self._update_client_ips_batch, 5 * 1000) @@ -504,7 +501,7 @@ def _prune_old_user_ips_txn(txn: LoggingTransaction) -> None: async def _get_last_client_ip_by_device_from_database( self, user_id: str, device_id: Optional[str] - ) -> Dict[Tuple[str, str], DeviceLastConnectionInfo]: + ) -> dict[tuple[str, str], DeviceLastConnectionInfo]: """For each device_id listed, give the user_ip it was last seen on. The result might be slightly out of date as client IPs are inserted in batches. @@ -522,7 +519,7 @@ async def _get_last_client_ip_by_device_from_database( keyvalues["device_id"] = device_id res = cast( - List[Tuple[str, Optional[str], Optional[str], str, Optional[int]]], + list[tuple[str, Optional[str], Optional[str], str, Optional[int]]], await self.db_pool.simple_select_list( table="devices", keyvalues=keyvalues, @@ -543,7 +540,7 @@ async def _get_last_client_ip_by_device_from_database( async def _get_user_ip_and_agents_from_database( self, user: UserID, since_ts: int = 0 - ) -> List[LastConnectionInfo]: + ) -> list[LastConnectionInfo]: """Fetch the IPs and user agents for a user since the given timestamp. The result might be slightly out of date as client IPs are inserted in batches. @@ -567,7 +564,7 @@ async def _get_user_ip_and_agents_from_database( """ user_id = user.to_string() - def get_recent(txn: LoggingTransaction) -> List[Tuple[str, str, str, int]]: + def get_recent(txn: LoggingTransaction) -> list[tuple[str, str, str, int]]: txn.execute( """ SELECT access_token, ip, user_agent, last_seen FROM user_ips @@ -577,7 +574,7 @@ def get_recent(txn: LoggingTransaction) -> List[Tuple[str, str, str, int]]: """, (since_ts, user_id), ) - return cast(List[Tuple[str, str, str, int]], txn.fetchall()) + return cast(list[tuple[str, str, str, int]], txn.fetchall()) rows = await self.db_pool.runInteraction( desc="get_user_ip_and_agents", func=get_recent @@ -673,7 +670,7 @@ async def _update_client_ips_batch(self) -> None: def _update_client_ips_batch_txn( self, txn: LoggingTransaction, - to_update: Mapping[Tuple[str, str, str], Tuple[str, Optional[str], int]], + to_update: Mapping[tuple[str, str, str], tuple[str, Optional[str], int]], ) -> None: assert self._update_on_this_worker, ( "This worker is not designated to update client IPs" @@ -719,7 +716,7 @@ def _update_client_ips_batch_txn( async def get_last_client_ip_by_device( self, user_id: str, device_id: Optional[str] - ) -> Dict[Tuple[str, str], DeviceLastConnectionInfo]: + ) -> dict[tuple[str, str], DeviceLastConnectionInfo]: """For each device_id listed, give the user_ip it was last seen on Args: @@ -759,7 +756,7 @@ async def get_last_client_ip_by_device( async def get_user_ip_and_agents( self, user: UserID, since_ts: int = 0 - ) -> List[LastConnectionInfo]: + ) -> list[LastConnectionInfo]: """Fetch the IPs and user agents for a user since the given timestamp. Args: @@ -786,7 +783,7 @@ async def get_user_ip_and_agents( # the result return rows_from_db - results: Dict[Tuple[str, str], LastConnectionInfo] = { + results: dict[tuple[str, str], LastConnectionInfo] = { (connection["access_token"], connection["ip"]): connection for connection in rows_from_db } diff --git a/synapse/storage/databases/main/delayed_events.py b/synapse/storage/databases/main/delayed_events.py index 78f55b983f0..33101327f50 100644 --- a/synapse/storage/databases/main/delayed_events.py +++ b/synapse/storage/databases/main/delayed_events.py @@ -13,7 +13,7 @@ # import logging -from typing import List, NewType, Optional, Tuple +from typing import NewType, Optional import attr @@ -93,7 +93,7 @@ async def add_delayed_event( origin_server_ts: Optional[int], content: JsonDict, delay: int, - ) -> Tuple[DelayID, Timestamp]: + ) -> tuple[DelayID, Timestamp]: """ Inserts a new delayed event in the DB. @@ -201,7 +201,7 @@ def _get_count_of_delayed_events(txn: LoggingTransaction) -> int: async def get_all_delayed_events_for_user( self, user_localpart: str, - ) -> List[JsonDict]: + ) -> list[JsonDict]: """Returns all pending delayed events owned by the given user.""" # TODO: Support Pagination stream API ("next_batch" field) rows = await self.db_pool.execute( @@ -236,8 +236,8 @@ async def get_all_delayed_events_for_user( async def process_timeout_delayed_events( self, current_ts: Timestamp - ) -> Tuple[ - List[DelayedEventDetails], + ) -> tuple[ + list[DelayedEventDetails], Optional[Timestamp], ]: """ @@ -250,8 +250,8 @@ async def process_timeout_delayed_events( def process_timeout_delayed_events_txn( txn: LoggingTransaction, - ) -> Tuple[ - List[DelayedEventDetails], + ) -> tuple[ + list[DelayedEventDetails], Optional[Timestamp], ]: sql_cols = ", ".join( @@ -322,7 +322,7 @@ async def process_target_delayed_event( *, delay_id: str, user_localpart: str, - ) -> Tuple[ + ) -> tuple[ EventDetails, Optional[Timestamp], ]: @@ -343,7 +343,7 @@ async def process_target_delayed_event( def process_target_delayed_event_txn( txn: LoggingTransaction, - ) -> Tuple[ + ) -> tuple[ EventDetails, Optional[Timestamp], ]: diff --git a/synapse/storage/databases/main/deviceinbox.py b/synapse/storage/databases/main/deviceinbox.py index a66e11f738c..49a82b98d3c 100644 --- a/synapse/storage/databases/main/deviceinbox.py +++ b/synapse/storage/databases/main/deviceinbox.py @@ -24,12 +24,8 @@ from typing import ( TYPE_CHECKING, Collection, - Dict, Iterable, - List, Optional, - Set, - Tuple, cast, ) @@ -92,7 +88,7 @@ def __init__( # Map of (user_id, device_id) to the last stream_id that has been # deleted up to. This is so that we can no op deletions. self._last_device_delete_cache: ExpiringCache[ - Tuple[str, Optional[str]], int + tuple[str, Optional[str]], int ] = ExpiringCache( cache_name="last_device_delete_cache", server_name=self.server_name, @@ -203,7 +199,7 @@ async def get_messages_for_user_devices( user_ids: Collection[str], from_stream_id: int, to_stream_id: int, - ) -> Dict[Tuple[str, str], List[JsonDict]]: + ) -> dict[tuple[str, str], list[JsonDict]]: """ Retrieve to-device messages for a given set of users. @@ -242,7 +238,7 @@ async def get_messages_for_device( from_stream_id: int, to_stream_id: int, limit: int = 100, - ) -> Tuple[List[JsonDict], int]: + ) -> tuple[list[JsonDict], int]: """ Retrieve to-device messages for a single user device. @@ -271,7 +267,7 @@ async def get_messages_for_device( def get_device_messages_txn( txn: LoggingTransaction, - ) -> Tuple[List[JsonDict], int]: + ) -> tuple[list[JsonDict], int]: sql = """ SELECT stream_id, message_json FROM device_inbox WHERE user_id = ? AND device_id = ? @@ -284,7 +280,7 @@ def get_device_messages_txn( # Create and fill a dictionary of (user ID, device ID) -> list of messages # intended for each device. last_processed_stream_pos = to_stream_id - to_device_messages: List[JsonDict] = [] + to_device_messages: list[JsonDict] = [] rowcount = 0 for row in txn: rowcount += 1 @@ -331,7 +327,7 @@ async def _get_device_messages( user_ids: Collection[str], from_stream_id: int, to_stream_id: int, - ) -> Tuple[Dict[Tuple[str, str], List[JsonDict]], int]: + ) -> tuple[dict[tuple[str, str], list[JsonDict]], int]: """ Retrieve pending to-device messages for a collection of user devices. @@ -363,7 +359,7 @@ async def _get_device_messages( logger.warning("No users provided upon querying for device IDs") return {}, to_stream_id - user_ids_to_query: Set[str] = set() + user_ids_to_query: set[str] = set() # Determine which users have devices with pending messages for user_id in user_ids: @@ -378,7 +374,7 @@ async def _get_device_messages( def get_device_messages_txn( txn: LoggingTransaction, - ) -> Tuple[Dict[Tuple[str, str], List[JsonDict]], int]: + ) -> tuple[dict[tuple[str, str], list[JsonDict]], int]: # Build a query to select messages from any of the given devices that # are between the given stream id bounds. @@ -389,7 +385,7 @@ def get_device_messages_txn( # since device_inbox has an index on `(user_id, device_id, stream_id)` user_device_dicts = cast( - List[Tuple[str]], + list[tuple[str]], self.db_pool.simple_select_many_txn( txn, table="devices", @@ -436,7 +432,7 @@ def get_device_messages_txn( # Create and fill a dictionary of (user ID, device ID) -> list of messages # intended for each device. - recipient_device_to_messages: Dict[Tuple[str, str], List[JsonDict]] = {} + recipient_device_to_messages: dict[tuple[str, str], list[JsonDict]] = {} rowcount = 0 for row in txn: rowcount += 1 @@ -535,7 +531,7 @@ async def delete_messages_for_device_between( from_stream_id: Optional[int], to_stream_id: int, limit: int, - ) -> Tuple[Optional[int], int]: + ) -> tuple[Optional[int], int]: """Delete N device messages between the stream IDs, returning the highest stream ID deleted (or None if all messages in the range have been deleted) and the number of messages deleted. @@ -555,7 +551,7 @@ async def delete_messages_for_device_between( def delete_messages_for_device_between_txn( txn: LoggingTransaction, - ) -> Tuple[Optional[int], int]: + ) -> tuple[Optional[int], int]: txn.execute( """ SELECT MAX(stream_id) FROM ( @@ -598,7 +594,7 @@ def delete_messages_for_device_between_txn( @trace async def get_new_device_msgs_for_remote( self, destination: str, last_stream_id: int, current_stream_id: int, limit: int - ) -> Tuple[List[JsonDict], int]: + ) -> tuple[list[JsonDict], int]: """ Args: destination: The name of the remote server. @@ -628,7 +624,7 @@ async def get_new_device_msgs_for_remote( @trace def get_new_messages_for_remote_destination_txn( txn: LoggingTransaction, - ) -> Tuple[List[JsonDict], int]: + ) -> tuple[list[JsonDict], int]: sql = ( "SELECT stream_id, messages_json FROM device_federation_outbox" " WHERE destination = ?" @@ -684,7 +680,7 @@ def delete_messages_for_remote_destination_txn(txn: LoggingTransaction) -> None: async def get_all_new_device_messages( self, instance_name: str, last_id: int, current_id: int, limit: int - ) -> Tuple[List[Tuple[int, tuple]], int, bool]: + ) -> tuple[list[tuple[int, tuple]], int, bool]: """Get updates for to device replication stream. Args: @@ -711,7 +707,7 @@ async def get_all_new_device_messages( def get_all_new_device_messages_txn( txn: LoggingTransaction, - ) -> Tuple[List[Tuple[int, tuple]], int, bool]: + ) -> tuple[list[tuple[int, tuple]], int, bool]: # We limit like this as we might have multiple rows per stream_id, and # we want to make sure we always get all entries for any stream_id # we return. @@ -746,8 +742,8 @@ def get_all_new_device_messages_txn( @trace async def add_messages_to_device_inbox( self, - local_messages_by_user_then_device: Dict[str, Dict[str, JsonDict]], - remote_messages_by_destination: Dict[str, JsonDict], + local_messages_by_user_then_device: dict[str, dict[str, JsonDict]], + remote_messages_by_destination: dict[str, JsonDict], ) -> int: """Used to send messages from this server. @@ -844,7 +840,7 @@ async def add_messages_from_remote_to_device_inbox( self, origin: str, message_id: str, - local_messages_by_user_then_device: Dict[str, Dict[str, JsonDict]], + local_messages_by_user_then_device: dict[str, dict[str, JsonDict]], ) -> int: assert self._can_write_to_device @@ -898,7 +894,7 @@ def _add_messages_to_local_device_inbox_txn( self, txn: LoggingTransaction, stream_id: int, - messages_by_user_then_device: Dict[str, Dict[str, JsonDict]], + messages_by_user_then_device: dict[str, dict[str, JsonDict]], ) -> None: assert self._can_write_to_device @@ -929,7 +925,7 @@ def _add_messages_to_local_device_inbox_txn( # We exclude hidden devices (such as cross-signing keys) here as they are # not expected to receive to-device messages. rows = cast( - List[Tuple[str]], + list[tuple[str]], self.db_pool.simple_select_many_txn( txn, table="devices", @@ -1055,7 +1051,7 @@ def get_devices_with_messages_txn( txn.execute(sql, args) return {row[0] for row in txn} - results: Set[str] = set() + results: set[str] = set() for batch_device_ids in batch_iter(device_ids, 1000): batch_results = await self.db_pool.runInteraction( "get_devices_with_messages", @@ -1143,7 +1139,7 @@ async def _remove_dead_devices_from_device_inbox( def _remove_dead_devices_from_device_inbox_txn( txn: LoggingTransaction, - ) -> Tuple[int, bool]: + ) -> tuple[int, bool]: if "max_stream_id" in progress: max_stream_id = progress["max_stream_id"] else: @@ -1151,7 +1147,7 @@ def _remove_dead_devices_from_device_inbox_txn( # There's a type mismatch here between how we want to type the row and # what fetchone says it returns, but we silence it because we know that # res can't be None. - res = cast(Tuple[Optional[int]], txn.fetchone()) + res = cast(tuple[Optional[int]], txn.fetchone()) if res[0] is None: # this can only happen if the `device_inbox` table is empty, in which # case we have no work to do. @@ -1214,7 +1210,7 @@ def _cleanup_device_federation_outbox_txn( max_stream_id = progress["max_stream_id"] else: txn.execute("SELECT max(stream_id) FROM device_federation_outbox") - res = cast(Tuple[Optional[int]], txn.fetchone()) + res = cast(tuple[Optional[int]], txn.fetchone()) if res[0] is None: # this can only happen if the `device_inbox` table is empty, in which # case we have no work to do. diff --git a/synapse/storage/databases/main/devices.py b/synapse/storage/databases/main/devices.py index d4b9ce0ea0a..bf5e05ea513 100644 --- a/synapse/storage/databases/main/devices.py +++ b/synapse/storage/databases/main/devices.py @@ -24,13 +24,9 @@ TYPE_CHECKING, Any, Collection, - Dict, Iterable, - List, Mapping, Optional, - Set, - Tuple, cast, ) @@ -284,7 +280,7 @@ def count_devices_by_users_txn( ) txn.execute(sql + clause, args) - return cast(Tuple[int], txn.fetchone())[0] + return cast(tuple[int], txn.fetchone())[0] if not user_ids: return 0 @@ -381,7 +377,7 @@ async def delete_devices(self, user_id: str, device_ids: StrCollection) -> None: device_ids: The IDs of the devices to delete """ - def _delete_devices_txn(txn: LoggingTransaction, device_ids: List[str]) -> None: + def _delete_devices_txn(txn: LoggingTransaction, device_ids: list[str]) -> None: self.db_pool.simple_delete_many_txn( txn, table="devices", @@ -497,7 +493,7 @@ async def get_device( async def get_devices_by_user( self, user_id: str - ) -> Dict[str, Dict[str, Optional[str]]]: + ) -> dict[str, dict[str, Optional[str]]]: """Retrieve all of a user's registered devices. Only returns devices that are not marked as hidden. @@ -508,7 +504,7 @@ async def get_devices_by_user( and "display_name" for each device. Display name may be null. """ devices = cast( - List[Tuple[str, str, Optional[str]]], + list[tuple[str, str, Optional[str]]], await self.db_pool.simple_select_list( table="devices", keyvalues={"user_id": user_id, "hidden": False}, @@ -524,7 +520,7 @@ async def get_devices_by_user( async def get_devices_by_auth_provider_session_id( self, auth_provider_id: str, auth_provider_session_id: str - ) -> List[Tuple[str, str]]: + ) -> list[tuple[str, str]]: """Retrieve the list of devices associated with a SSO IdP session ID. Args: @@ -534,7 +530,7 @@ async def get_devices_by_auth_provider_session_id( A list of dicts containing the device_id and the user_id of each device """ return cast( - List[Tuple[str, str]], + list[tuple[str, str]], await self.db_pool.simple_select_list( table="device_auth_providers", keyvalues={ @@ -549,7 +545,7 @@ async def get_devices_by_auth_provider_session_id( @trace async def get_device_updates_by_remote( self, destination: str, from_stream_id: int, limit: int - ) -> Tuple[int, List[Tuple[str, JsonDict]]]: + ) -> tuple[int, list[tuple[str, JsonDict]]]: """Get a stream of device updates to send to the given remote server. Args: @@ -659,8 +655,8 @@ async def get_device_updates_by_remote( last_processed_stream_id = from_stream_id # A map of (user ID, device ID) to (stream ID, context). - query_map: Dict[Tuple[str, str], Tuple[int, Optional[str]]] = {} - cross_signing_keys_by_user: Dict[str, Dict[str, object]] = {} + query_map: dict[tuple[str, str], tuple[int, Optional[str]]] = {} + cross_signing_keys_by_user: dict[str, dict[str, object]] = {} for user_id, device_id, update_stream_id, update_context in updates: # Calculate the remaining length budget. # Note that, for now, each entry in `cross_signing_keys_by_user` @@ -766,7 +762,7 @@ def _get_device_updates_by_remote_txn( from_stream_id: int, now_stream_id: int, limit: int, - ) -> List[Tuple[str, str, int, Optional[str]]]: + ) -> list[tuple[str, str, int, Optional[str]]]: """Return device update information for a given remote destination Args: @@ -792,14 +788,14 @@ def _get_device_updates_by_remote_txn( """ txn.execute(sql, (destination, from_stream_id, now_stream_id, limit)) - return cast(List[Tuple[str, str, int, Optional[str]]], txn.fetchall()) + return cast(list[tuple[str, str, int, Optional[str]]], txn.fetchall()) async def _get_device_update_edus_by_remote( self, destination: str, from_stream_id: int, - query_map: Dict[Tuple[str, str], Tuple[int, Optional[str]]], - ) -> List[Tuple[str, dict]]: + query_map: dict[tuple[str, str], tuple[int, Optional[str]]], + ) -> list[tuple[str, dict]]: """Returns a list of device update EDUs as well as E2EE keys Args: @@ -933,7 +929,7 @@ def _mark_as_sent_devices_by_remote_txn( txn.execute(sql, (destination, stream_id)) async def add_user_signature_change_to_streams( - self, from_user_id: str, user_ids: List[str] + self, from_user_id: str, user_ids: list[str] ) -> int: """Persist that a user has made new signatures @@ -962,7 +958,7 @@ def _add_user_signature_change_txn( self, txn: LoggingTransaction, from_user_id: str, - user_ids: List[str], + user_ids: list[str], stream_id: int, ) -> None: txn.call_after( @@ -984,8 +980,8 @@ def _add_user_signature_change_txn( @trace @cancellable async def get_user_devices_from_cache( - self, user_ids: Set[str], user_and_device_ids: List[Tuple[str, str]] - ) -> Tuple[Set[str], Dict[str, Mapping[str, JsonMapping]]]: + self, user_ids: set[str], user_and_device_ids: list[tuple[str, str]] + ) -> tuple[set[str], dict[str, Mapping[str, JsonMapping]]]: """Get the devices (and keys if any) for remote users from the cache. Args: @@ -1005,13 +1001,13 @@ async def get_user_devices_from_cache( user_ids_not_in_cache = unique_user_ids - user_ids_in_cache # First fetch all the users which all devices are to be returned. - results: Dict[str, Mapping[str, JsonMapping]] = {} + results: dict[str, Mapping[str, JsonMapping]] = {} for user_id in user_ids: if user_id in user_ids_in_cache: results[user_id] = await self.get_cached_devices_for_user(user_id) # Then fetch all device-specific requests, but skip users we've already # fetched all devices for. - device_specific_results: Dict[str, Dict[str, JsonMapping]] = {} + device_specific_results: dict[str, dict[str, JsonMapping]] = {} for user_id, device_id in user_and_device_ids: if user_id in user_ids_in_cache and user_id not in user_ids: device = await self._get_cached_user_device(user_id, device_id) @@ -1025,7 +1021,7 @@ async def get_user_devices_from_cache( async def get_users_whose_devices_are_cached( self, user_ids: StrCollection - ) -> Set[str]: + ) -> set[str]: """Checks which of the given users we have cached the devices for.""" user_map = await self.get_device_list_last_stream_id_for_remotes(user_ids) @@ -1056,7 +1052,7 @@ async def get_cached_devices_for_user( self, user_id: str ) -> Mapping[str, JsonMapping]: devices = cast( - List[Tuple[str, str]], + list[tuple[str, str]], await self.db_pool.simple_select_list( table="device_lists_remote_cache", keyvalues={"user_id": user_id}, @@ -1071,7 +1067,7 @@ async def get_all_devices_changed( self, from_key: MultiWriterStreamToken, to_key: MultiWriterStreamToken, - ) -> Set[str]: + ) -> set[str]: """Get all users whose devices have changed in the given range. Args: @@ -1131,7 +1127,7 @@ async def get_users_whose_devices_changed( from_key: MultiWriterStreamToken, user_ids: Collection[str], to_key: Optional[MultiWriterStreamToken] = None, - ) -> Set[str]: + ) -> set[str]: """Get set of users whose devices have changed since `from_key` that are in the given list of user_ids. @@ -1164,14 +1160,14 @@ def _get_users_whose_devices_changed_txn( txn: LoggingTransaction, from_key: MultiWriterStreamToken, to_key: MultiWriterStreamToken, - ) -> Set[str]: + ) -> set[str]: sql = """ SELECT user_id, stream_id, instance_name FROM device_lists_stream WHERE ? < stream_id AND stream_id <= ? AND %s """ - changes: Set[str] = set() + changes: set[str] = set() # Query device changes with a batch of users at a time for chunk in batch_iter(user_ids_to_check, 100): @@ -1204,7 +1200,7 @@ def _get_users_whose_devices_changed_txn( async def get_users_whose_signatures_changed( self, user_id: str, from_key: MultiWriterStreamToken - ) -> Set[str]: + ) -> set[str]: """Get the users who have new cross-signing signatures made by `user_id` since `from_key`. @@ -1243,7 +1239,7 @@ async def get_users_whose_signatures_changed( async def get_all_device_list_changes_for_remotes( self, instance_name: str, last_id: int, current_id: int, limit: int - ) -> Tuple[List[Tuple[int, tuple]], int, bool]: + ) -> tuple[list[tuple[int, tuple]], int, bool]: """Get updates for device lists replication stream. Args: @@ -1270,7 +1266,7 @@ async def get_all_device_list_changes_for_remotes( def _get_all_device_list_changes_for_remotes( txn: Cursor, - ) -> Tuple[List[Tuple[int, tuple]], int, bool]: + ) -> tuple[list[tuple[int, tuple]], int, bool]: # This query Does The Right Thing where it'll correctly apply the # bounds to the inner queries. sql = """ @@ -1322,7 +1318,7 @@ async def get_device_list_last_stream_id_for_remotes( self, user_ids: Iterable[str] ) -> Mapping[str, Optional[str]]: rows = cast( - List[Tuple[str, str]], + list[tuple[str, str]], await self.db_pool.simple_select_many_batch( table="device_lists_remote_extremeties", column="user_id", @@ -1332,7 +1328,7 @@ async def get_device_list_last_stream_id_for_remotes( ), ) - results: Dict[str, Optional[str]] = dict.fromkeys(user_ids) + results: dict[str, Optional[str]] = dict.fromkeys(user_ids) results.update(rows) return results @@ -1340,7 +1336,7 @@ async def get_device_list_last_stream_id_for_remotes( async def get_user_ids_requiring_device_list_resync( self, user_ids: Optional[Collection[str]] = None, - ) -> Set[str]: + ) -> set[str]: """Given a list of remote users return the list of users that we should resync the device lists for. If None is given instead of a list, return every user that we should resync the device lists for. @@ -1350,7 +1346,7 @@ async def get_user_ids_requiring_device_list_resync( """ if user_ids: rows = cast( - List[Tuple[str]], + list[tuple[str]], await self.db_pool.simple_select_many_batch( table="device_lists_remote_resync", column="user_id", @@ -1361,7 +1357,7 @@ async def get_user_ids_requiring_device_list_resync( ) else: rows = cast( - List[Tuple[str]], + list[tuple[str]], await self.db_pool.simple_select_list( table="device_lists_remote_resync", keyvalues=None, @@ -1406,7 +1402,7 @@ async def mark_remote_user_device_cache_as_valid(self, user_id: str) -> None: desc="mark_remote_user_device_cache_as_valid", ) - async def handle_potentially_left_users(self, user_ids: Set[str]) -> None: + async def handle_potentially_left_users(self, user_ids: set[str]) -> None: """Given a set of remote users check if the server still shares a room with them. If not then mark those users' device cache as stale. """ @@ -1423,7 +1419,7 @@ async def handle_potentially_left_users(self, user_ids: Set[str]) -> None: def handle_potentially_left_users_txn( self, txn: LoggingTransaction, - user_ids: Set[str], + user_ids: set[str], ) -> None: """Given a set of remote users check if the server still shares a room with them. If not then mark those users' device cache as stale. @@ -1463,7 +1459,7 @@ def mark_remote_user_device_list_as_unsubscribed_txn( async def get_dehydrated_device( self, user_id: str - ) -> Optional[Tuple[str, JsonDict]]: + ) -> Optional[tuple[str, JsonDict]]: """Retrieve the information for a dehydrated device. Args: @@ -1672,7 +1668,7 @@ def _prune_txn(txn: LoggingTransaction) -> None: async def get_local_devices_not_accessed_since( self, since_ms: int - ) -> Dict[str, List[str]]: + ) -> dict[str, list[str]]: """Retrieves local devices that haven't been accessed since a given date. Args: @@ -1687,20 +1683,20 @@ async def get_local_devices_not_accessed_since( def get_devices_not_accessed_since_txn( txn: LoggingTransaction, - ) -> List[Tuple[str, str]]: + ) -> list[tuple[str, str]]: sql = """ SELECT user_id, device_id FROM devices WHERE last_seen < ? AND hidden = FALSE """ txn.execute(sql, (since_ms,)) - return cast(List[Tuple[str, str]], txn.fetchall()) + return cast(list[tuple[str, str]], txn.fetchall()) rows = await self.db_pool.runInteraction( "get_devices_not_accessed_since", get_devices_not_accessed_since_txn, ) - devices: Dict[str, List[str]] = {} + devices: dict[str, list[str]] = {} for user_id, device_id in rows: # Remote devices are never stale from our point of view. if self.hs.is_mine_id(user_id): @@ -1728,7 +1724,7 @@ async def get_device_list_changes_in_rooms( room_ids: Collection[str], from_token: MultiWriterStreamToken, to_token: MultiWriterStreamToken, - ) -> Optional[Set[str]]: + ) -> Optional[set[str]]: """Return the set of users whose devices have changed in the given rooms since the given stream ID. @@ -1759,7 +1755,7 @@ async def get_device_list_changes_in_rooms( def _get_device_list_changes_in_rooms_txn( txn: LoggingTransaction, chunk: list[str], - ) -> Set[str]: + ) -> set[str]: clause, args = make_in_list_sql_clause( self.database_engine, "room_id", chunk ) @@ -1788,7 +1784,7 @@ def _get_device_list_changes_in_rooms_txn( return changes - async def get_all_device_list_changes(self, from_id: int, to_id: int) -> Set[str]: + async def get_all_device_list_changes(self, from_id: int, to_id: int) -> set[str]: """Return the set of rooms where devices have changed since the given stream ID. @@ -1807,7 +1803,7 @@ async def get_all_device_list_changes(self, from_id: int, to_id: int) -> Set[str def _get_all_device_list_changes_txn( txn: LoggingTransaction, - ) -> Set[str]: + ) -> set[str]: txn.execute(sql, (from_id, to_id)) return {room_id for (room_id,) in txn} @@ -1818,7 +1814,7 @@ def _get_all_device_list_changes_txn( async def get_device_list_changes_in_room( self, room_id: str, min_stream_id: int - ) -> Collection[Tuple[str, str]]: + ) -> Collection[tuple[str, str]]: """Get all device list changes that happened in the room since the given stream ID. @@ -1834,9 +1830,9 @@ async def get_device_list_changes_in_room( def get_device_list_changes_in_room_txn( txn: LoggingTransaction, - ) -> Collection[Tuple[str, str]]: + ) -> Collection[tuple[str, str]]: txn.execute(sql, (room_id, min_stream_id)) - return cast(Collection[Tuple[str, str]], txn.fetchall()) + return cast(Collection[tuple[str, str]], txn.fetchall()) return await self.db_pool.runInteraction( "get_device_list_changes_in_room", @@ -1911,7 +1907,7 @@ def _update_remote_device_list_cache_entry_txn( ) async def update_remote_device_list_cache( - self, user_id: str, devices: List[dict], stream_id: int + self, user_id: str, devices: list[dict], stream_id: int ) -> None: """Replace the entire cache of the remote user's devices. @@ -1932,7 +1928,7 @@ async def update_remote_device_list_cache( ) def _update_remote_device_list_cache_txn( - self, txn: LoggingTransaction, user_id: str, devices: List[dict], stream_id: int + self, txn: LoggingTransaction, user_id: str, devices: list[dict], stream_id: int ) -> None: """Replace the list of cached devices for this user with the given list.""" self.db_pool.simple_delete_txn( @@ -2031,7 +2027,7 @@ def _add_device_change_to_stream_txn( txn: LoggingTransaction, user_id: str, device_ids: Collection[str], - stream_ids: List[int], + stream_ids: list[int], ) -> None: txn.call_after( self._device_list_stream_cache.entity_has_changed, @@ -2076,7 +2072,7 @@ def _add_device_outbound_poke_to_stream_txn( device_id: str, hosts: Collection[str], stream_id: int, - context: Optional[Dict[str, str]], + context: Optional[dict[str, str]], ) -> None: if self._device_list_federation_stream_cache: for host in hosts: @@ -2163,8 +2159,8 @@ def _add_device_outbound_room_poke_txn( user_id: str, device_ids: StrCollection, room_ids: StrCollection, - stream_ids: List[int], - context: Dict[str, str], + stream_ids: list[int], + context: dict[str, str], ) -> None: """Record the user in the room has updated their device.""" @@ -2208,7 +2204,7 @@ def _add_device_outbound_room_poke_txn( async def get_uncoverted_outbound_room_pokes( self, start_stream_id: int, start_room_id: str, limit: int = 10 - ) -> List[Tuple[str, str, str, int, Optional[Dict[str, str]]]]: + ) -> list[tuple[str, str, str, int, Optional[dict[str, str]]]]: """Get device list changes by room that have not yet been handled and written to `device_lists_outbound_pokes`. @@ -2236,7 +2232,7 @@ async def get_uncoverted_outbound_room_pokes( def get_uncoverted_outbound_room_pokes_txn( txn: LoggingTransaction, - ) -> List[Tuple[str, str, str, int, Optional[Dict[str, str]]]]: + ) -> list[tuple[str, str, str, int, Optional[dict[str, str]]]]: txn.execute( sql, ( @@ -2270,7 +2266,7 @@ async def add_device_list_outbound_pokes( device_id: str, room_id: str, hosts: Collection[str], - context: Optional[Dict[str, str]], + context: Optional[dict[str, str]], ) -> None: """Queue the device update to be sent to the given set of hosts, calculated from the room ID. @@ -2327,7 +2323,7 @@ async def add_remote_device_list_to_pending( async def get_pending_remote_device_list_updates_for_room( self, room_id: str - ) -> Collection[Tuple[str, str]]: + ) -> Collection[tuple[str, str]]: """Get the set of remote device list updates from the pending table for the room. """ @@ -2361,16 +2357,16 @@ async def get_pending_remote_device_list_updates_for_room( def get_pending_remote_device_list_updates_for_room_txn( txn: LoggingTransaction, - ) -> Collection[Tuple[str, str]]: + ) -> Collection[tuple[str, str]]: txn.execute(sql, (room_id, min_device_stream_id)) - return cast(Collection[Tuple[str, str]], txn.fetchall()) + return cast(Collection[tuple[str, str]], txn.fetchall()) return await self.db_pool.runInteraction( "get_pending_remote_device_list_updates_for_room", get_pending_remote_device_list_updates_for_room_txn, ) - async def get_device_change_last_converted_pos(self) -> Tuple[int, str]: + async def get_device_change_last_converted_pos(self) -> tuple[int, str]: """ Get the position of the last row in `device_list_changes_in_room` that has been converted to `device_lists_outbound_pokes`. @@ -2388,7 +2384,7 @@ async def get_device_change_last_converted_pos(self) -> Tuple[int, str]: retcols=["stream_id", "room_id"], desc="get_device_change_last_converted_pos", ) - return cast(Tuple[int, str], min(rows)) + return cast(tuple[int, str], min(rows)) async def set_device_change_last_converted_pos( self, diff --git a/synapse/storage/databases/main/directory.py b/synapse/storage/databases/main/directory.py index 49c0575acaa..99a951ca4a8 100644 --- a/synapse/storage/databases/main/directory.py +++ b/synapse/storage/databases/main/directory.py @@ -19,7 +19,7 @@ # # -from typing import Iterable, List, Optional, Sequence, Tuple +from typing import Iterable, Optional, Sequence import attr @@ -34,7 +34,7 @@ class RoomAliasMapping: room_id: str room_alias: str - servers: List[str] + servers: list[str] class DirectoryWorkerStore(CacheInvalidationWorkerStore): @@ -187,7 +187,7 @@ async def update_aliases_for_room( def _update_aliases_for_room_txn(txn: LoggingTransaction) -> None: update_creator_sql = "" - sql_params: Tuple[str, ...] = (new_room_id, old_room_id) + sql_params: tuple[str, ...] = (new_room_id, old_room_id) if creator: update_creator_sql = ", creator = ?" sql_params = (new_room_id, creator, old_room_id) diff --git a/synapse/storage/databases/main/e2e_room_keys.py b/synapse/storage/databases/main/e2e_room_keys.py index d978e115e42..a4d03d1d905 100644 --- a/synapse/storage/databases/main/e2e_room_keys.py +++ b/synapse/storage/databases/main/e2e_room_keys.py @@ -21,13 +21,10 @@ from typing import ( TYPE_CHECKING, - Dict, Iterable, - List, Literal, Mapping, Optional, - Tuple, TypedDict, cast, ) @@ -195,7 +192,7 @@ async def update_e2e_room_key( ) async def add_e2e_room_keys( - self, user_id: str, version: str, room_keys: Iterable[Tuple[str, str, RoomKey]] + self, user_id: str, version: str, room_keys: Iterable[tuple[str, str, RoomKey]] ) -> None: """Bulk add room keys to a given backup. @@ -257,8 +254,8 @@ async def get_e2e_room_keys( version: str, room_id: Optional[str] = None, session_id: Optional[str] = None, - ) -> Dict[ - Literal["rooms"], Dict[str, Dict[Literal["sessions"], Dict[str, RoomKey]]] + ) -> dict[ + Literal["rooms"], dict[str, dict[Literal["sessions"], dict[str, RoomKey]]] ]: """Bulk get the E2E room keys for a given backup, optionally filtered to a given room, or a given session. @@ -290,7 +287,7 @@ async def get_e2e_room_keys( keyvalues["session_id"] = session_id rows = cast( - List[Tuple[str, str, int, int, int, str]], + list[tuple[str, str, int, int, int, str]], await self.db_pool.simple_select_list( table="e2e_room_keys", keyvalues=keyvalues, @@ -306,8 +303,8 @@ async def get_e2e_room_keys( ), ) - sessions: Dict[ - Literal["rooms"], Dict[str, Dict[Literal["sessions"], Dict[str, RoomKey]]] + sessions: dict[ + Literal["rooms"], dict[str, dict[Literal["sessions"], dict[str, RoomKey]]] ] = {"rooms": {}} for ( room_id, @@ -333,7 +330,7 @@ async def get_e2e_room_keys_multi( user_id: str, version: str, room_keys: Mapping[str, Mapping[Literal["sessions"], Iterable[str]]], - ) -> Dict[str, Dict[str, RoomKey]]: + ) -> dict[str, dict[str, RoomKey]]: """Get multiple room keys at a time. The difference between this function and get_e2e_room_keys is that this function can be used to retrieve multiple specific keys at a time, whereas get_e2e_room_keys is used for @@ -370,7 +367,7 @@ def _get_e2e_room_keys_multi_txn( user_id: str, version: int, room_keys: Mapping[str, Mapping[Literal["sessions"], Iterable[str]]], - ) -> Dict[str, Dict[str, RoomKey]]: + ) -> dict[str, dict[str, RoomKey]]: if not room_keys: return {} @@ -400,7 +397,7 @@ def _get_e2e_room_keys_multi_txn( txn.execute(sql, params) - ret: Dict[str, Dict[str, RoomKey]] = {} + ret: dict[str, dict[str, RoomKey]] = {} for row in txn: room_id = row[0] @@ -483,7 +480,7 @@ def _get_current_version(txn: LoggingTransaction, user_id: str) -> int: ) # `SELECT MAX() FROM ...` will always return 1 row. The value in that row will # be `NULL` when there are no available versions. - row = cast(Tuple[Optional[int]], txn.fetchone()) + row = cast(tuple[Optional[int]], txn.fetchone()) if row[0] is None: raise StoreError(404, "No current backup version") return row[0] @@ -559,7 +556,7 @@ def _create_e2e_room_keys_version_txn(txn: LoggingTransaction) -> str: "SELECT MAX(version) FROM e2e_room_keys_versions WHERE user_id=?", (user_id,), ) - current_version = cast(Tuple[Optional[int]], txn.fetchone())[0] + current_version = cast(tuple[Optional[int]], txn.fetchone())[0] if current_version is None: current_version = 0 @@ -600,7 +597,7 @@ async def update_e2e_room_keys_version( version_etag: etag of the keys in the backup. If None, then the etag is not updated. """ - updatevalues: Dict[str, object] = {} + updatevalues: dict[str, object] = {} if info is not None and "auth_data" in info: updatevalues["auth_data"] = json_encoder.encode(info["auth_data"]) diff --git a/synapse/storage/databases/main/end_to_end_keys.py b/synapse/storage/databases/main/end_to_end_keys.py index 2d3d0c0036e..991d64db446 100644 --- a/synapse/storage/databases/main/end_to_end_keys.py +++ b/synapse/storage/databases/main/end_to_end_keys.py @@ -25,15 +25,11 @@ TYPE_CHECKING, Any, Collection, - Dict, Iterable, - List, Literal, Mapping, Optional, Sequence, - Set, - Tuple, Union, cast, overload, @@ -155,7 +151,7 @@ def process_replication_rows( async def get_e2e_device_keys_for_federation_query( self, user_id: str - ) -> Tuple[int, Sequence[JsonMapping]]: + ) -> tuple[int, Sequence[JsonMapping]]: """Get all devices (with any device keys) for a user Returns: @@ -241,9 +237,9 @@ async def _get_e2e_device_keys_for_federation_query_inner( @cancellable async def get_e2e_device_keys_for_cs_api( self, - query_list: Collection[Tuple[str, Optional[str]]], + query_list: Collection[tuple[str, Optional[str]]], include_displaynames: bool = True, - ) -> Dict[str, Dict[str, JsonDict]]: + ) -> dict[str, dict[str, JsonDict]]: """Fetch a list of device keys, formatted suitably for the C/S API. Args: query_list: List of pairs of user_ids and device_ids. @@ -262,7 +258,7 @@ async def get_e2e_device_keys_for_cs_api( # Build the result structure, un-jsonify the results, and add the # "unsigned" section - rv: Dict[str, Dict[str, JsonDict]] = {} + rv: dict[str, dict[str, JsonDict]] = {} for user_id, device_keys in results.items(): rv[user_id] = {} for device_id, device_info in device_keys.items(): @@ -284,36 +280,36 @@ async def get_e2e_device_keys_for_cs_api( @overload async def get_e2e_device_keys_and_signatures( self, - query_list: Collection[Tuple[str, Optional[str]]], + query_list: Collection[tuple[str, Optional[str]]], include_all_devices: Literal[False] = False, - ) -> Dict[str, Dict[str, DeviceKeyLookupResult]]: ... + ) -> dict[str, dict[str, DeviceKeyLookupResult]]: ... @overload async def get_e2e_device_keys_and_signatures( self, - query_list: Collection[Tuple[str, Optional[str]]], + query_list: Collection[tuple[str, Optional[str]]], include_all_devices: bool = False, include_deleted_devices: Literal[False] = False, - ) -> Dict[str, Dict[str, DeviceKeyLookupResult]]: ... + ) -> dict[str, dict[str, DeviceKeyLookupResult]]: ... @overload async def get_e2e_device_keys_and_signatures( self, - query_list: Collection[Tuple[str, Optional[str]]], + query_list: Collection[tuple[str, Optional[str]]], include_all_devices: Literal[True], include_deleted_devices: Literal[True], - ) -> Dict[str, Dict[str, Optional[DeviceKeyLookupResult]]]: ... + ) -> dict[str, dict[str, Optional[DeviceKeyLookupResult]]]: ... @trace @cancellable async def get_e2e_device_keys_and_signatures( self, - query_list: Collection[Tuple[str, Optional[str]]], + query_list: Collection[tuple[str, Optional[str]]], include_all_devices: bool = False, include_deleted_devices: bool = False, ) -> Union[ - Dict[str, Dict[str, DeviceKeyLookupResult]], - Dict[str, Dict[str, Optional[DeviceKeyLookupResult]]], + dict[str, dict[str, DeviceKeyLookupResult]], + dict[str, dict[str, Optional[DeviceKeyLookupResult]]], ]: """Fetch a list of device keys @@ -388,18 +384,18 @@ async def get_e2e_device_keys_and_signatures( async def _get_e2e_device_keys( self, - query_list: Collection[Tuple[str, Optional[str]]], + query_list: Collection[tuple[str, Optional[str]]], include_all_devices: bool = False, include_deleted_devices: bool = False, - ) -> Dict[str, Dict[str, Optional[DeviceKeyLookupResult]]]: + ) -> dict[str, dict[str, Optional[DeviceKeyLookupResult]]]: """Get information on devices from the database The results include the device's keys and self-signatures, but *not* any cross-signing signatures which have been added subsequently (for which, see get_e2e_device_keys_and_signatures) """ - query_clauses: List[str] = [] - query_params_list: List[List[object]] = [] + query_clauses: list[str] = [] + query_params_list: list[list[object]] = [] if include_all_devices is False: include_deleted_devices = False @@ -437,7 +433,7 @@ async def _get_e2e_device_keys( query_clauses.append(user_device_id_in_list_clause) query_params_list.append(user_device_args) - result: Dict[str, Dict[str, Optional[DeviceKeyLookupResult]]] = {} + result: dict[str, dict[str, Optional[DeviceKeyLookupResult]]] = {} def get_e2e_device_keys_txn( txn: LoggingTransaction, query_clause: str, query_params: list @@ -490,8 +486,8 @@ def get_e2e_device_keys_txn( @cached() def _get_e2e_cross_signing_signatures_for_device( self, - user_id_and_device_id: Tuple[str, str], - ) -> Sequence[Tuple[str, str]]: + user_id_and_device_id: tuple[str, str], + ) -> Sequence[tuple[str, str]]: """ The single-item version of `_get_e2e_cross_signing_signatures_for_devices`. See @cachedList for why a separate method is needed. @@ -503,8 +499,8 @@ def _get_e2e_cross_signing_signatures_for_device( list_name="device_query", ) async def _get_e2e_cross_signing_signatures_for_devices( - self, device_query: Iterable[Tuple[str, str]] - ) -> Mapping[Tuple[str, str], Sequence[Tuple[str, str]]]: + self, device_query: Iterable[tuple[str, str]] + ) -> Mapping[tuple[str, str], Sequence[tuple[str, str]]]: """Get cross-signing signatures for a given list of user IDs and devices. Args: @@ -524,8 +520,8 @@ async def _get_e2e_cross_signing_signatures_for_devices( """ def _get_e2e_cross_signing_signatures_for_devices_txn( - txn: LoggingTransaction, device_query: Iterable[Tuple[str, str]] - ) -> Mapping[Tuple[str, str], Sequence[Tuple[str, str]]]: + txn: LoggingTransaction, device_query: Iterable[tuple[str, str]] + ) -> Mapping[tuple[str, str], Sequence[tuple[str, str]]]: where_clause_sql, where_clause_params = make_tuple_in_list_sql_clause( self.database_engine, columns=("target_user_id", "target_device_id", "user_id"), @@ -541,7 +537,7 @@ def _get_e2e_cross_signing_signatures_for_devices_txn( txn.execute(signature_sql, where_clause_params) - devices_and_signatures: Dict[Tuple[str, str], List[Tuple[str, str]]] = {} + devices_and_signatures: dict[tuple[str, str], list[tuple[str, str]]] = {} # `@cachedList` requires we return one key for every item in `device_query`. # Pre-populate `devices_and_signatures` with each key so that none are missing. @@ -567,8 +563,8 @@ def _get_e2e_cross_signing_signatures_for_devices_txn( ) async def get_e2e_one_time_keys( - self, user_id: str, device_id: str, key_ids: List[str] - ) -> Dict[Tuple[str, str], str]: + self, user_id: str, device_id: str, key_ids: list[str] + ) -> dict[tuple[str, str], str]: """Retrieve a number of one-time keys for a user Args: @@ -581,7 +577,7 @@ async def get_e2e_one_time_keys( """ rows = cast( - List[Tuple[str, str, str]], + list[tuple[str, str, str]], await self.db_pool.simple_select_many_batch( table="e2e_one_time_keys_json", column="key_id", @@ -600,7 +596,7 @@ async def add_e2e_one_time_keys( user_id: str, device_id: str, time_now: int, - new_keys: Iterable[Tuple[str, str, str]], + new_keys: Iterable[tuple[str, str, str]], ) -> None: """Insert some new one time keys for a device. Errors if any of the keys already exist. @@ -627,7 +623,7 @@ def _add_e2e_one_time_keys_txn( user_id: str, device_id: str, time_now: int, - new_keys: Iterable[Tuple[str, str, str]], + new_keys: Iterable[tuple[str, str, str]], ) -> None: """Insert some new one time keys for a device. Errors if any of the keys already exist. @@ -674,7 +670,7 @@ async def count_e2e_one_time_keys( A mapping from algorithm to number of keys for that algorithm. """ - def _count_e2e_one_time_keys(txn: LoggingTransaction) -> Dict[str, int]: + def _count_e2e_one_time_keys(txn: LoggingTransaction) -> dict[str, int]: sql = ( "SELECT algorithm, COUNT(key_id) FROM e2e_one_time_keys_json" " WHERE user_id = ? AND device_id = ?" @@ -962,7 +958,7 @@ def _get_bare_e2e_cross_signing_keys_bulk_txn( self, txn: LoggingTransaction, user_ids: Iterable[str], - ) -> Dict[str, Dict[str, JsonDict]]: + ) -> dict[str, dict[str, JsonDict]]: """Returns the cross-signing keys for a set of users. The output of this function should be passed to _get_e2e_cross_signing_signatures_txn if the signatures for the calling user need to be fetched. @@ -977,7 +973,7 @@ def _get_bare_e2e_cross_signing_keys_bulk_txn( the dict. """ - result: Dict[str, Dict[str, JsonDict]] = {} + result: dict[str, dict[str, JsonDict]] = {} for user_chunk in batch_iter(user_ids, 100): clause, params = make_in_list_sql_clause( @@ -1017,9 +1013,9 @@ def _get_bare_e2e_cross_signing_keys_bulk_txn( def _get_e2e_cross_signing_signatures_txn( self, txn: LoggingTransaction, - keys: Dict[str, Optional[Dict[str, JsonDict]]], + keys: dict[str, Optional[dict[str, JsonDict]]], from_user_id: str, - ) -> Dict[str, Optional[Dict[str, JsonDict]]]: + ) -> dict[str, Optional[dict[str, JsonDict]]]: """Returns the cross-signing signatures made by a user on a set of keys. Args: @@ -1037,7 +1033,7 @@ def _get_e2e_cross_signing_signatures_txn( # find out what cross-signing keys (a.k.a. devices) we need to get # signatures for. This is a map of (user_id, device_id) to key type # (device_id is the key's public part). - devices: Dict[Tuple[str, str], str] = {} + devices: dict[tuple[str, str], str] = {} for user_id, user_keys in keys.items(): if user_keys is None: @@ -1100,7 +1096,7 @@ def _get_e2e_cross_signing_signatures_txn( @cancellable async def get_e2e_cross_signing_keys_bulk( - self, user_ids: List[str], from_user_id: Optional[str] = None + self, user_ids: list[str], from_user_id: Optional[str] = None ) -> Mapping[str, Optional[Mapping[str, JsonMapping]]]: """Returns the cross-signing keys for a set of users. @@ -1118,7 +1114,7 @@ async def get_e2e_cross_signing_keys_bulk( if from_user_id: result = cast( - Dict[str, Optional[Mapping[str, JsonMapping]]], + dict[str, Optional[Mapping[str, JsonMapping]]], await self.db_pool.runInteraction( "get_e2e_cross_signing_signatures", self._get_e2e_cross_signing_signatures_txn, @@ -1131,7 +1127,7 @@ async def get_e2e_cross_signing_keys_bulk( async def get_all_user_signature_changes_for_remotes( self, instance_name: str, last_id: int, current_id: int, limit: int - ) -> Tuple[List[Tuple[int, tuple]], int, bool]: + ) -> tuple[list[tuple[int, tuple]], int, bool]: """Get updates for groups replication stream. Note that the user signature stream represents when a user signs their @@ -1163,7 +1159,7 @@ async def get_all_user_signature_changes_for_remotes( def _get_all_user_signature_changes_for_remotes_txn( txn: LoggingTransaction, - ) -> Tuple[List[Tuple[int, tuple]], int, bool]: + ) -> tuple[list[tuple[int, tuple]], int, bool]: sql = """ SELECT stream_id, from_user_id AS user_id FROM user_signature_stream @@ -1194,9 +1190,9 @@ def get_device_stream_token(self) -> MultiWriterStreamToken: ... async def claim_e2e_one_time_keys( - self, query_list: Collection[Tuple[str, str, str, int]] - ) -> Tuple[ - Dict[str, Dict[str, Dict[str, JsonDict]]], List[Tuple[str, str, str, int]] + self, query_list: Collection[tuple[str, str, str, int]] + ) -> tuple[ + dict[str, dict[str, dict[str, JsonDict]]], list[tuple[str, str, str, int]] ]: """Take a list of one time keys out of the database. @@ -1211,12 +1207,12 @@ async def claim_e2e_one_time_keys( may be less than the input counts. In this case, the returned counts are the number of claims that were not fulfilled. """ - results: Dict[str, Dict[str, Dict[str, JsonDict]]] = {} - missing: List[Tuple[str, str, str, int]] = [] + results: dict[str, dict[str, dict[str, JsonDict]]] = {} + missing: list[tuple[str, str, str, int]] = [] if isinstance(self.database_engine, PostgresEngine): # If we can use execute_values we can use a single batch query # in autocommit mode. - unfulfilled_claim_counts: Dict[Tuple[str, str, str], int] = {} + unfulfilled_claim_counts: dict[tuple[str, str, str], int] = {} for user_id, device_id, algorithm, count in query_list: unfulfilled_claim_counts[user_id, device_id, algorithm] = count @@ -1265,8 +1261,8 @@ async def claim_e2e_one_time_keys( return results, missing async def claim_e2e_fallback_keys( - self, query_list: Iterable[Tuple[str, str, str, bool]] - ) -> Dict[str, Dict[str, Dict[str, JsonDict]]]: + self, query_list: Iterable[tuple[str, str, str, bool]] + ) -> dict[str, dict[str, dict[str, JsonDict]]]: """Take a list of fallback keys out of the database. Args: @@ -1293,13 +1289,13 @@ async def claim_e2e_fallback_keys( def _claim_e2e_fallback_keys_bulk_txn( self, txn: LoggingTransaction, - query_list: Iterable[Tuple[str, str, str, bool]], - ) -> Dict[str, Dict[str, Dict[str, JsonDict]]]: + query_list: Iterable[tuple[str, str, str, bool]], + ) -> dict[str, dict[str, dict[str, JsonDict]]]: """Efficient implementation of claim_e2e_fallback_keys for Postgres. Safe to autocommit: this is a single query. """ - results: Dict[str, Dict[str, Dict[str, JsonDict]]] = {} + results: dict[str, dict[str, dict[str, JsonDict]]] = {} sql = """ WITH claims(user_id, device_id, algorithm, mark_as_used) AS ( @@ -1312,11 +1308,11 @@ def _claim_e2e_fallback_keys_bulk_txn( RETURNING k.user_id, k.device_id, k.algorithm, k.key_id, k.key_json; """ claimed_keys = cast( - List[Tuple[str, str, str, str, str]], + list[tuple[str, str, str, str, str]], txn.execute_values(sql, query_list), ) - seen_user_device: Set[Tuple[str, str]] = set() + seen_user_device: set[tuple[str, str]] = set() for user_id, device_id, algorithm, key_id, key_json in claimed_keys: device_results = results.setdefault(user_id, {}).setdefault(device_id, {}) device_results[f"{algorithm}:{key_id}"] = json_decoder.decode(key_json) @@ -1330,10 +1326,10 @@ def _claim_e2e_fallback_keys_bulk_txn( async def _claim_e2e_fallback_keys_simple( self, - query_list: Iterable[Tuple[str, str, str, bool]], - ) -> Dict[str, Dict[str, Dict[str, JsonDict]]]: + query_list: Iterable[tuple[str, str, str, bool]], + ) -> dict[str, dict[str, dict[str, JsonDict]]]: """Naive, inefficient implementation of claim_e2e_fallback_keys for SQLite.""" - results: Dict[str, Dict[str, Dict[str, JsonDict]]] = {} + results: dict[str, dict[str, dict[str, JsonDict]]] = {} for user_id, device_id, algorithm, mark_as_used in query_list: row = await self.db_pool.simple_select_one( table="e2e_fallback_keys_json", @@ -1381,7 +1377,7 @@ def _claim_e2e_one_time_key_simple( device_id: str, algorithm: str, count: int, - ) -> List[Tuple[str, str]]: + ) -> list[tuple[str, str]]: """Claim OTK for device for DBs that don't support RETURNING. Returns: @@ -1426,8 +1422,8 @@ def _claim_e2e_one_time_key_simple( def _claim_e2e_one_time_keys_bulk( self, txn: LoggingTransaction, - query_list: Iterable[Tuple[str, str, str, int]], - ) -> List[Tuple[str, str, str, str, str]]: + query_list: Iterable[tuple[str, str, str, int]], + ) -> list[tuple[str, str, str, str, str]]: """Bulk claim OTKs, for DBs that support DELETE FROM... RETURNING. Args: @@ -1466,7 +1462,7 @@ def _claim_e2e_one_time_keys_bulk( RETURNING user_id, device_id, algorithm, key_id, key_json; """ otk_rows = cast( - List[Tuple[str, str, str, str, str]], txn.execute_values(sql, query_list) + list[tuple[str, str, str, str, str]], txn.execute_values(sql, query_list) ) seen_user_device = { @@ -1482,7 +1478,7 @@ def _claim_e2e_one_time_keys_bulk( async def get_master_cross_signing_key_updatable_before( self, user_id: str - ) -> Tuple[bool, Optional[int]]: + ) -> tuple[bool, Optional[int]]: """Get time before which a master cross-signing key may be replaced without UIA. (UIA means "User-Interactive Auth".) @@ -1503,7 +1499,7 @@ async def get_master_cross_signing_key_updatable_before( """ - def impl(txn: LoggingTransaction) -> Tuple[bool, Optional[int]]: + def impl(txn: LoggingTransaction) -> tuple[bool, Optional[int]]: # We want to distinguish between three cases: txn.execute( """ @@ -1515,7 +1511,7 @@ def impl(txn: LoggingTransaction) -> Tuple[bool, Optional[int]]: """, (user_id,), ) - row = cast(Optional[Tuple[Optional[int]]], txn.fetchone()) + row = cast(Optional[tuple[Optional[int]]], txn.fetchone()) if row is None: return False, None return True, row[0] @@ -1527,7 +1523,7 @@ def impl(txn: LoggingTransaction) -> Tuple[bool, Optional[int]]: async def delete_old_otks_for_next_user_batch( self, after_user_id: str, number_of_users: int - ) -> Tuple[List[str], int]: + ) -> tuple[list[str], int]: """Deletes old OTKs belonging to the next batch of users Returns: @@ -1536,7 +1532,7 @@ async def delete_old_otks_for_next_user_batch( * `rows` is the number of deleted rows """ - def impl(txn: LoggingTransaction) -> Tuple[List[str], int]: + def impl(txn: LoggingTransaction) -> tuple[list[str], int]: # Find a batch of users txn.execute( """ diff --git a/synapse/storage/databases/main/event_federation.py b/synapse/storage/databases/main/event_federation.py index d77420ff475..d889e8eceb2 100644 --- a/synapse/storage/databases/main/event_federation.py +++ b/synapse/storage/databases/main/event_federation.py @@ -25,15 +25,10 @@ from typing import ( TYPE_CHECKING, Collection, - Dict, - FrozenSet, Generator, Iterable, - List, Optional, Sequence, - Set, - Tuple, cast, ) @@ -132,9 +127,9 @@ class BackfillQueueNavigationItem: @attr.s(frozen=True, slots=True, auto_attribs=True) class StateDifference: # The event IDs in the auth difference. - auth_difference: Set[str] + auth_difference: set[str] # The event IDs in the conflicted state subgraph. Used in v2.1 only. - conflicted_subgraph: Optional[Set[str]] + conflicted_subgraph: Optional[set[str]] class _NoChainCoverIndex(Exception): @@ -165,7 +160,7 @@ def __init__( ) # Cache of event ID to list of auth event IDs and their depths. - self._event_auth_cache: LruCache[str, List[Tuple[str, int]]] = LruCache( + self._event_auth_cache: LruCache[str, list[tuple[str, int]]] = LruCache( max_size=500000, clock=self.hs.get_clock(), server_name=self.server_name, @@ -199,7 +194,7 @@ def __init__( async def get_auth_chain( self, room_id: str, event_ids: Collection[str], include_given: bool = False - ) -> List[EventBase]: + ) -> list[EventBase]: """Get auth events for given event_ids. The events *must* be state events. Args: @@ -222,7 +217,7 @@ async def get_auth_chain_ids( room_id: str, event_ids: Collection[str], include_given: bool = False, - ) -> Set[str]: + ) -> set[str]: """Get auth events for given event_ids. The events *must* be state events. Args: @@ -267,7 +262,7 @@ def _get_auth_chain_ids_using_cover_index_txn( room_id: str, event_ids: Collection[str], include_given: bool, - ) -> Set[str]: + ) -> set[str]: """Calculates the auth chain IDs using the chain index.""" # First we look up the chain ID/sequence numbers for the given events. @@ -275,10 +270,10 @@ def _get_auth_chain_ids_using_cover_index_txn( initial_events = set(event_ids) # All the events that we've found that are reachable from the events. - seen_events: Set[str] = set() + seen_events: set[str] = set() # A map from chain ID to max sequence number of the given events. - event_chains: Dict[int, int] = {} + event_chains: dict[int, int] = {} sql = """ SELECT event_id, chain_id, sequence_number @@ -313,7 +308,7 @@ def _get_auth_chain_ids_using_cover_index_txn( # are reachable from any event. # A map from chain ID to max sequence number *reachable* from any event ID. - chains: Dict[int, int] = {} + chains: dict[int, int] = {} for links in self._get_chain_links(txn, set(event_chains.keys())): for chain_id in links: if chain_id not in event_chains: @@ -366,8 +361,8 @@ def _get_auth_chain_ids_using_cover_index_txn( @classmethod def _get_chain_links( - cls, txn: LoggingTransaction, chains_to_fetch: Set[int] - ) -> Generator[Dict[int, List[Tuple[int, int, int]]], None, None]: + cls, txn: LoggingTransaction, chains_to_fetch: set[int] + ) -> Generator[dict[int, list[tuple[int, int, int]]], None, None]: """Fetch all auth chain links from the given set of chains, and all links from those chains, recursively. @@ -410,7 +405,7 @@ def _get_chain_links( ) txn.execute(sql % (clause,), args) - links: Dict[int, List[Tuple[int, int, int]]] = {} + links: dict[int, list[tuple[int, int, int]]] = {} for ( origin_chain_id, @@ -428,7 +423,7 @@ def _get_chain_links( def _get_auth_chain_ids_txn( self, txn: LoggingTransaction, event_ids: Collection[str], include_given: bool - ) -> Set[str]: + ) -> set[str]: """Calculates the auth chain IDs. This is used when we don't have a cover index for the room. @@ -449,10 +444,10 @@ def _get_auth_chain_ids_txn( front = set(event_ids) while front: - new_front: Set[str] = set() + new_front: set[str] = set() for chunk in batch_iter(front, 100): # Pull the auth events either from the cache or DB. - to_fetch: List[str] = [] # Event IDs to fetch from DB + to_fetch: list[str] = [] # Event IDs to fetch from DB for event_id in chunk: res = self._event_auth_cache.get(event_id) if res is None: @@ -468,7 +463,7 @@ def _get_auth_chain_ids_txn( # Note we need to batch up the results by event ID before # adding to the cache. - to_cache: Dict[str, List[Tuple[str, int]]] = {} + to_cache: dict[str, list[tuple[str, int]]] = {} for event_id, auth_event_id, auth_event_depth in txn: to_cache.setdefault(event_id, []).append( (auth_event_id, auth_event_depth) @@ -488,8 +483,8 @@ def _get_auth_chain_ids_txn( async def get_auth_chain_difference( self, room_id: str, - state_sets: List[Set[str]], - ) -> Set[str]: + state_sets: list[set[str]], + ) -> set[str]: state_diff = await self.get_auth_chain_difference_extended( room_id, state_sets, None, None ) @@ -498,9 +493,9 @@ async def get_auth_chain_difference( async def get_auth_chain_difference_extended( self, room_id: str, - state_sets: List[Set[str]], - conflicted_set: Optional[Set[str]], - additional_backwards_reachable_conflicted_events: Optional[Set[str]], + state_sets: list[set[str]], + conflicted_set: Optional[set[str]], + additional_backwards_reachable_conflicted_events: Optional[set[str]], ) -> StateDifference: """ "Given sets of state events figure out the auth chain difference (as per state res v2 algorithm). @@ -560,9 +555,9 @@ def _get_auth_chain_difference_using_cover_index_txn( self, txn: LoggingTransaction, room_id: str, - state_sets: List[Set[str]], - conflicted_set: Optional[Set[str]] = None, - additional_backwards_reachable_conflicted_events: Optional[Set[str]] = None, + state_sets: list[set[str]], + conflicted_set: Optional[set[str]] = None, + additional_backwards_reachable_conflicted_events: Optional[set[str]] = None, ) -> StateDifference: """Calculates the auth chain difference using the chain index. @@ -587,14 +582,14 @@ def _get_auth_chain_difference_using_cover_index_txn( ) # Map from event_id -> (chain ID, seq no) - chain_info: Dict[str, Tuple[int, int]] = {} + chain_info: dict[str, tuple[int, int]] = {} # Map from chain ID -> seq no -> event Id - chain_to_event: Dict[int, Dict[int, str]] = {} + chain_to_event: dict[int, dict[int, str]] = {} # All the chains that we've found that are reachable from the state # sets. - seen_chains: Set[int] = set() + seen_chains: set[int] = set() # Fetch the chain cover index for the initial set of events we're # considering. @@ -621,7 +616,7 @@ def fetch_chain_info(events_to_fetch: Collection[str]) -> None: events_missing_chain_info = initial_events.difference(chain_info) # The result set to return, i.e. the auth chain difference. - auth_difference_result: Set[str] = set() + auth_difference_result: set[str] = set() if events_missing_chain_info: # For some reason we have events we haven't calculated the chain @@ -652,21 +647,21 @@ def fetch_chain_info(events_to_fetch: Collection[str]) -> None: # A subset of chain_info for conflicted events only, as we need to # loop all conflicted chain positions. Map from event_id -> (chain ID, seq no) - conflicted_chain_positions: Dict[str, Tuple[int, int]] = {} + conflicted_chain_positions: dict[str, tuple[int, int]] = {} # For each chain, remember the positions where conflicted events are. # We need this for calculating the forward reachable events. - conflicted_chain_to_seq: Dict[int, Set[int]] = {} # chain_id => {seq_num} + conflicted_chain_to_seq: dict[int, set[int]] = {} # chain_id => {seq_num} # A subset of chain_info for additional backwards reachable events only, as we need to # loop all additional backwards reachable events for calculating backwards reachable events. - additional_backwards_reachable_positions: Dict[ - str, Tuple[int, int] + additional_backwards_reachable_positions: dict[ + str, tuple[int, int] ] = {} # event_id => (chain_id, seq_num) # These next two fields are critical as the intersection of them is the conflicted subgraph. # We'll populate them when we walk the chain links. # chain_id => max(seq_num) backwards reachable (e.g 4 means 1,2,3,4 are backwards reachable) - conflicted_backwards_reachable: Dict[int, int] = {} + conflicted_backwards_reachable: dict[int, int] = {} # chain_id => min(seq_num) forwards reachable (e.g 4 means 4,5,6..n are forwards reachable) - conflicted_forwards_reachable: Dict[int, int] = {} + conflicted_forwards_reachable: dict[int, int] = {} # populate the v2.1 data structures if is_state_res_v21: @@ -688,9 +683,9 @@ def fetch_chain_info(events_to_fetch: Collection[str]) -> None: # Corresponds to `state_sets`, except as a map from chain ID to max # sequence number reachable from the state set. - set_to_chain: List[Dict[int, int]] = [] + set_to_chain: list[dict[int, int]] = [] for state_set in state_sets: - chains: Dict[int, int] = {} + chains: dict[int, int] = {} set_to_chain.append(chains) for state_id in state_set: @@ -802,7 +797,7 @@ def fetch_chain_info(events_to_fetch: Collection[str]) -> None: # Mapping from chain ID to the range of sequence numbers that should be # pulled from the database. - auth_diff_chain_to_gap: Dict[int, Tuple[int, int]] = {} + auth_diff_chain_to_gap: dict[int, tuple[int, int]] = {} for chain_id in seen_chains: min_seq_no = min(chains.get(chain_id, 0) for chains in set_to_chain) @@ -820,10 +815,10 @@ def fetch_chain_info(events_to_fetch: Collection[str]) -> None: auth_diff_chain_to_gap[chain_id] = (min_seq_no, max_seq_no) break - conflicted_subgraph_result: Set[str] = set() + conflicted_subgraph_result: set[str] = set() # Mapping from chain ID to the range of sequence numbers that should be # pulled from the database. - conflicted_subgraph_chain_to_gap: Dict[int, Tuple[int, int]] = {} + conflicted_subgraph_chain_to_gap: dict[int, tuple[int, int]] = {} if is_state_res_v21: # also include the conflicted subgraph using backward/forward reachability info from all # the conflicted events. To calculate this, we want to extract the intersection between @@ -882,9 +877,9 @@ def fetch_chain_info(events_to_fetch: Collection[str]) -> None: ) def _fetch_event_ids_from_chains_txn( - self, txn: LoggingTransaction, chains: Dict[int, Tuple[int, int]] - ) -> Set[str]: - result: Set[str] = set() + self, txn: LoggingTransaction, chains: dict[int, tuple[int, int]] + ) -> set[str]: + result: set[str] = set() if isinstance(self.database_engine, PostgresEngine): # We can use `execute_values` to efficiently fetch the gaps when # using postgres. @@ -918,10 +913,10 @@ def _fixup_auth_chain_difference_sets( self, txn: LoggingTransaction, room_id: str, - state_sets: List[Set[str]], - events_missing_chain_info: Set[str], + state_sets: list[set[str]], + events_missing_chain_info: set[str], events_that_have_chain_index: Collection[str], - ) -> Set[str]: + ) -> set[str]: """Helper for `_get_auth_chain_difference_using_cover_index_txn` to handle the case where we haven't calculated the chain cover index for all events. @@ -962,7 +957,7 @@ def _fixup_auth_chain_difference_sets( WHERE tc.room_id = ? """ txn.execute(sql, (room_id,)) - event_to_auth_ids: Dict[str, Set[str]] = {} + event_to_auth_ids: dict[str, set[str]] = {} events_that_have_chain_index = set(events_that_have_chain_index) for event_id, auth_id, auth_id_has_chain in txn: s = event_to_auth_ids.setdefault(event_id, set()) @@ -982,7 +977,7 @@ def _fixup_auth_chain_difference_sets( raise _NoChainCoverIndex(room_id) # Create a map from event IDs we care about to their partial auth chain. - event_id_to_partial_auth_chain: Dict[str, Set[str]] = {} + event_id_to_partial_auth_chain: dict[str, set[str]] = {} for event_id, auth_ids in event_to_auth_ids.items(): if not any(event_id in state_set for state_set in state_sets): continue @@ -1005,7 +1000,7 @@ def _fixup_auth_chain_difference_sets( # 1. Update the state sets to only include indexed events; and # 2. Create a new list containing the auth chains of the un-indexed # events - unindexed_state_sets: List[Set[str]] = [] + unindexed_state_sets: list[set[str]] = [] for state_set in state_sets: unindexed_state_set = set() for event_id, auth_chain in event_id_to_partial_auth_chain.items(): @@ -1031,8 +1026,8 @@ def _fixup_auth_chain_difference_sets( return union - intersection def _get_auth_chain_difference_txn( - self, txn: LoggingTransaction, state_sets: List[Set[str]] - ) -> Set[str]: + self, txn: LoggingTransaction, state_sets: list[set[str]] + ) -> set[str]: """Calculates the auth chain difference using a breadth first search. This is used when we don't have a cover index for the room. @@ -1087,7 +1082,7 @@ def _get_auth_chain_difference_txn( } # The sorted list of events whose auth chains we should walk. - search: List[Tuple[int, str]] = [] + search: list[tuple[int, str]] = [] # We need to get the depth of the initial events for sorting purposes. sql = """ @@ -1104,13 +1099,13 @@ def _get_auth_chain_difference_txn( # I think building a temporary list with fetchall is more efficient than # just `search.extend(txn)`, but this is unconfirmed - search.extend(cast(List[Tuple[int, str]], txn.fetchall())) + search.extend(cast(list[tuple[int, str]], txn.fetchall())) # sort by depth search.sort() # Map from event to its auth events - event_to_auth_events: Dict[str, Set[str]] = {} + event_to_auth_events: dict[str, set[str]] = {} base_sql = """ SELECT a.event_id, auth_id, depth @@ -1129,8 +1124,8 @@ def _get_auth_chain_difference_txn( # currently walking, either from cache or DB. search, chunk = search[:-100], search[-100:] - found: List[Tuple[str, str, int]] = [] # Results found - to_fetch: List[str] = [] # Event IDs to fetch from DB + found: list[tuple[str, str, int]] = [] # Results found + to_fetch: list[str] = [] # Event IDs to fetch from DB for _, event_id in chunk: res = self._event_auth_cache.get(event_id) if res is None: @@ -1147,7 +1142,7 @@ def _get_auth_chain_difference_txn( # We parse the results and add the to the `found` set and the # cache (note we need to batch up the results by event ID before # adding to the cache). - to_cache: Dict[str, List[Tuple[str, int]]] = {} + to_cache: dict[str, list[tuple[str, int]]] = {} for event_id, auth_event_id, auth_event_depth in txn: to_cache.setdefault(event_id, []).append( (auth_event_id, auth_event_depth) @@ -1204,7 +1199,7 @@ async def get_backfill_points_in_room( room_id: str, current_depth: int, limit: int, - ) -> List[Tuple[str, int]]: + ) -> list[tuple[str, int]]: """ Get the backward extremities to backfill from in the room along with the approximate depth. @@ -1235,7 +1230,7 @@ async def get_backfill_points_in_room( def get_backfill_points_in_room_txn( txn: LoggingTransaction, room_id: str - ) -> List[Tuple[str, int]]: + ) -> list[tuple[str, int]]: # Assemble a tuple lookup of event_id -> depth for the oldest events # we know of in the room. Backwards extremeties are the oldest # events we know of in the room but we only know of them because @@ -1336,7 +1331,7 @@ def get_backfill_points_in_room_txn( ), ) - return cast(List[Tuple[str, int]], txn.fetchall()) + return cast(list[tuple[str, int]], txn.fetchall()) return await self.db_pool.runInteraction( "get_backfill_points_in_room", @@ -1346,14 +1341,14 @@ def get_backfill_points_in_room_txn( async def get_max_depth_of( self, event_ids: Collection[str] - ) -> Tuple[Optional[str], int]: + ) -> tuple[Optional[str], int]: """Returns the event ID and depth for the event that has the max depth from a set of event IDs Args: event_ids: The event IDs to calculate the max depth of. """ rows = cast( - List[Tuple[str, int]], + list[tuple[str, int]], await self.db_pool.simple_select_many_batch( table="events", column="event_id", @@ -1378,14 +1373,14 @@ async def get_max_depth_of( return max_depth_event_id, current_max_depth - async def get_min_depth_of(self, event_ids: List[str]) -> Tuple[Optional[str], int]: + async def get_min_depth_of(self, event_ids: list[str]) -> tuple[Optional[str], int]: """Returns the event ID and depth for the event that has the min depth from a set of event IDs Args: event_ids: The event IDs to calculate the max depth of. """ rows = cast( - List[Tuple[str, int]], + list[tuple[str, int]], await self.db_pool.simple_select_many_batch( table="events", column="event_id", @@ -1410,7 +1405,7 @@ async def get_min_depth_of(self, event_ids: List[str]) -> Tuple[Optional[str], i return min_depth_event_id, current_min_depth - async def get_prev_events_for_room(self, room_id: str) -> List[str]: + async def get_prev_events_for_room(self, room_id: str) -> list[str]: """ Gets a subset of the current forward extremities in the given room. @@ -1431,7 +1426,7 @@ async def get_prev_events_for_room(self, room_id: str) -> List[str]: def _get_prev_events_for_room_txn( self, txn: LoggingTransaction, room_id: str - ) -> List[str]: + ) -> list[str]: # we just use the 10 newest events. Older events will become # prev_events of future events. @@ -1449,7 +1444,7 @@ def _get_prev_events_for_room_txn( async def get_rooms_with_many_extremities( self, min_count: int, limit: int, room_id_filter: Iterable[str] - ) -> List[str]: + ) -> list[str]: """Get the top rooms with at least N extremities. Args: @@ -1462,7 +1457,7 @@ async def get_rooms_with_many_extremities( sorted by extremity count. """ - def _get_rooms_with_many_extremities_txn(txn: LoggingTransaction) -> List[str]: + def _get_rooms_with_many_extremities_txn(txn: LoggingTransaction) -> list[str]: where_clause = "1=1" if room_id_filter: where_clause = "room_id NOT IN (%s)" % ( @@ -1487,7 +1482,7 @@ def _get_rooms_with_many_extremities_txn(txn: LoggingTransaction) -> List[str]: ) @cached(max_entries=5000, iterable=True) - async def get_latest_event_ids_in_room(self, room_id: str) -> FrozenSet[str]: + async def get_latest_event_ids_in_room(self, room_id: str) -> frozenset[str]: event_ids = await self.db_pool.simple_select_onecol( table="event_forward_extremities", keyvalues={"room_id": room_id}, @@ -1610,7 +1605,7 @@ async def _get_forward_extremeties_for_room( WHERE room_id = ? """ - def get_forward_extremeties_for_room_txn(txn: LoggingTransaction) -> List[str]: + def get_forward_extremeties_for_room_txn(txn: LoggingTransaction) -> list[str]: txn.execute(sql, (stream_ordering, room_id)) return [event_id for (event_id,) in txn] @@ -1627,7 +1622,7 @@ def get_forward_extremeties_for_room_txn(txn: LoggingTransaction) -> List[str]: def _get_connected_prev_event_backfill_results_txn( self, txn: LoggingTransaction, event_id: str, limit: int - ) -> List[BackfillQueueNavigationItem]: + ) -> list[BackfillQueueNavigationItem]: """ Find any events connected by prev_event the specified event_id. @@ -1675,8 +1670,8 @@ def _get_connected_prev_event_backfill_results_txn( ] async def get_backfill_events( - self, room_id: str, seed_event_id_list: List[str], limit: int - ) -> List[EventBase]: + self, room_id: str, seed_event_id_list: list[str], limit: int + ) -> list[EventBase]: """Get a list of Events for a given topic that occurred before (and including) the events in seed_event_id_list. Return a list of max size `limit` @@ -1704,9 +1699,9 @@ def _get_backfill_events( self, txn: LoggingTransaction, room_id: str, - seed_event_id_list: List[str], + seed_event_id_list: list[str], limit: int, - ) -> Set[str]: + ) -> set[str]: """ We want to make sure that we do a breadth-first, "depth" ordered search. We also handle navigating historical branches of history connected by @@ -1719,7 +1714,7 @@ def _get_backfill_events( limit, ) - event_id_results: Set[str] = set() + event_id_results: set[str] = set() # In a PriorityQueue, the lowest valued entries are retrieved first. # We're using depth as the priority in the queue and tie-break based on @@ -1727,7 +1722,7 @@ def _get_backfill_events( # highest and newest-in-time message. We add events to the queue with a # negative depth so that we process the newest-in-time messages first # going backwards in time. stream_ordering follows the same pattern. - queue: "PriorityQueue[Tuple[int, int, str, str]]" = PriorityQueue() + queue: "PriorityQueue[tuple[int, int, str, str]]" = PriorityQueue() for seed_event_id in seed_event_id_list: event_lookup_result = self.db_pool.simple_select_one_txn( @@ -1847,7 +1842,7 @@ def _record_event_failed_pull_attempt_upsert_txn( @trace async def get_event_ids_with_failed_pull_attempts( self, event_ids: StrCollection - ) -> Set[str]: + ) -> set[str]: """ Filter the given list of `event_ids` and return events which have any failed pull attempts. @@ -1860,7 +1855,7 @@ async def get_event_ids_with_failed_pull_attempts( """ rows = cast( - List[Tuple[str]], + list[tuple[str]], await self.db_pool.simple_select_many_batch( table="event_failed_pull_attempts", column="event_id", @@ -1877,7 +1872,7 @@ async def get_event_ids_to_not_pull_from_backoff( self, room_id: str, event_ids: Collection[str], - ) -> Dict[str, int]: + ) -> dict[str, int]: """ Filter down the events to ones that we've failed to pull before recently. Uses exponential backoff. @@ -1891,7 +1886,7 @@ async def get_event_ids_to_not_pull_from_backoff( next timestamp at which we may try pulling them again. """ event_failed_pull_attempts = cast( - List[Tuple[str, int, int]], + list[tuple[str, int, int]], await self.db_pool.simple_select_many_batch( table="event_failed_pull_attempts", column="event_id", @@ -1932,10 +1927,10 @@ async def get_event_ids_to_not_pull_from_backoff( async def get_missing_events( self, room_id: str, - earliest_events: List[str], - latest_events: List[str], + earliest_events: list[str], + latest_events: list[str], limit: int, - ) -> List[EventBase]: + ) -> list[EventBase]: ids = await self.db_pool.runInteraction( "get_missing_events", self._get_missing_events, @@ -1950,13 +1945,13 @@ def _get_missing_events( self, txn: LoggingTransaction, room_id: str, - earliest_events: List[str], - latest_events: List[str], + earliest_events: list[str], + latest_events: list[str], limit: int, - ) -> List[str]: + ) -> list[str]: seen_events = set(earliest_events) front = set(latest_events) - seen_events - event_results: List[str] = [] + event_results: list[str] = [] query = ( "SELECT prev_event_id FROM event_edges " @@ -1983,7 +1978,7 @@ def _get_missing_events( @trace @tag_args - async def get_successor_events(self, event_id: str) -> List[str]: + async def get_successor_events(self, event_id: str) -> list[str]: """Fetch all events that have the given event as a prev event Args: @@ -2057,7 +2052,7 @@ def _remove_received_event_from_staging_txn( """ txn.execute(sql, (origin, event_id)) - row = cast(Optional[Tuple[int]], txn.fetchone()) + row = cast(Optional[tuple[int]], txn.fetchone()) if row is None: return None @@ -2104,7 +2099,7 @@ def _remove_received_event_from_staging_txn( async def get_next_staged_event_id_for_room( self, room_id: str, - ) -> Optional[Tuple[str, str]]: + ) -> Optional[tuple[str, str]]: """ Get the next event ID in the staging area for the given room. @@ -2114,7 +2109,7 @@ async def get_next_staged_event_id_for_room( def _get_next_staged_event_id_for_room_txn( txn: LoggingTransaction, - ) -> Optional[Tuple[str, str]]: + ) -> Optional[tuple[str, str]]: sql = """ SELECT origin, event_id FROM federation_inbound_events_staging @@ -2125,7 +2120,7 @@ def _get_next_staged_event_id_for_room_txn( txn.execute(sql, (room_id,)) - return cast(Optional[Tuple[str, str]], txn.fetchone()) + return cast(Optional[tuple[str, str]], txn.fetchone()) return await self.db_pool.runInteraction( "get_next_staged_event_id_for_room", _get_next_staged_event_id_for_room_txn @@ -2135,12 +2130,12 @@ async def get_next_staged_event_for_room( self, room_id: str, room_version: RoomVersion, - ) -> Optional[Tuple[str, EventBase]]: + ) -> Optional[tuple[str, EventBase]]: """Get the next event in the staging area for the given room.""" def _get_next_staged_event_for_room_txn( txn: LoggingTransaction, - ) -> Optional[Tuple[str, str, str]]: + ) -> Optional[tuple[str, str, str]]: sql = """ SELECT event_json, internal_metadata, origin FROM federation_inbound_events_staging @@ -2150,7 +2145,7 @@ def _get_next_staged_event_for_room_txn( """ txn.execute(sql, (room_id,)) - return cast(Optional[Tuple[str, str, str]], txn.fetchone()) + return cast(Optional[tuple[str, str, str]], txn.fetchone()) row = await self.db_pool.runInteraction( "get_next_staged_event_for_room", _get_next_staged_event_for_room_txn @@ -2199,7 +2194,7 @@ async def prune_staged_events_in_room( # by other events in the queue). We do this so that we can always # backpaginate in all the events we have dropped. rows = cast( - List[Tuple[str, str]], + list[tuple[str, str]], await self.db_pool.simple_select_list( table="federation_inbound_events_staging", keyvalues={"room_id": room_id}, @@ -2210,8 +2205,8 @@ async def prune_staged_events_in_room( # Find the set of events referenced by those in the queue, as well as # collecting all the event IDs in the queue. - referenced_events: Set[str] = set() - seen_events: Set[str] = set() + referenced_events: set[str] = set() + seen_events: set[str] = set() for event_id, event_json in rows: seen_events.add(event_id) event_d = db_to_json(event_json) @@ -2272,7 +2267,7 @@ async def prune_staged_events_in_room( return True - async def get_all_rooms_with_staged_incoming_events(self) -> List[str]: + async def get_all_rooms_with_staged_incoming_events(self) -> list[str]: """Get the room IDs of all events currently staged.""" return await self.db_pool.simple_select_onecol( table="federation_inbound_events_staging", @@ -2287,15 +2282,15 @@ async def _get_stats_for_federation_staging(self) -> None: def _get_stats_for_federation_staging_txn( txn: LoggingTransaction, - ) -> Tuple[int, int]: + ) -> tuple[int, int]: txn.execute("SELECT count(*) FROM federation_inbound_events_staging") - (count,) = cast(Tuple[int], txn.fetchone()) + (count,) = cast(tuple[int], txn.fetchone()) txn.execute( "SELECT min(received_ts) FROM federation_inbound_events_staging" ) - (received_ts,) = cast(Tuple[Optional[int]], txn.fetchone()) + (received_ts,) = cast(tuple[Optional[int]], txn.fetchone()) # If there is nothing in the staging area default it to 0. age = 0 @@ -2409,8 +2404,8 @@ def delete_event_auth(txn: LoggingTransaction) -> bool: def _materialize( origin_chain_id: int, origin_sequence_number: int, - links: Dict[int, List[Tuple[int, int, int]]], - materialized: Dict[int, int], + links: dict[int, list[tuple[int, int, int]]], + materialized: dict[int, int], backwards: bool = True, ) -> None: """Helper function for fetching auth chain links. For a given origin chain @@ -2468,10 +2463,10 @@ def _materialize( def _generate_forward_links( - links: Dict[int, List[Tuple[int, int, int]]], -) -> Dict[int, List[Tuple[int, int, int]]]: + links: dict[int, list[tuple[int, int, int]]], +) -> dict[int, list[tuple[int, int, int]]]: """Reverse the input links from the given backwards links""" - new_links: Dict[int, List[Tuple[int, int, int]]] = {} + new_links: dict[int, list[tuple[int, int, int]]] = {} for origin_chain_id, chain_links in links.items(): for origin_seq_num, target_chain_id, target_seq_num in chain_links: new_links.setdefault(target_chain_id, []).append( @@ -2481,9 +2476,9 @@ def _generate_forward_links( def accumulate_forwards_reachable_events( - conflicted_forwards_reachable: Dict[int, int], - back_links: Dict[int, List[Tuple[int, int, int]]], - conflicted_chain_positions: Dict[str, Tuple[int, int]], + conflicted_forwards_reachable: dict[int, int], + back_links: dict[int, list[tuple[int, int, int]]], + conflicted_chain_positions: dict[str, tuple[int, int]], ) -> None: """Accumulate new forwards reachable events using the back_links provided. diff --git a/synapse/storage/databases/main/event_push_actions.py b/synapse/storage/databases/main/event_push_actions.py index ec26aedc6bc..9c624c739af 100644 --- a/synapse/storage/databases/main/event_push_actions.py +++ b/synapse/storage/databases/main/event_push_actions.py @@ -84,8 +84,6 @@ from typing import ( TYPE_CHECKING, Collection, - Dict, - List, Mapping, Optional, Tuple, @@ -118,11 +116,11 @@ logger = logging.getLogger(__name__) -DEFAULT_NOTIF_ACTION: List[Union[dict, str]] = [ +DEFAULT_NOTIF_ACTION: list[Union[dict, str]] = [ "notify", {"set_tweak": "highlight", "value": False}, ] -DEFAULT_HIGHLIGHT_ACTION: List[Union[dict, str]] = [ +DEFAULT_HIGHLIGHT_ACTION: list[Union[dict, str]] = [ "notify", {"set_tweak": "sound", "value": "default"}, {"set_tweak": "highlight"}, @@ -138,7 +136,7 @@ class _RoomReceipt: unthreaded_stream_ordering: int = 0 # threaded_stream_ordering includes the main pseudo-thread. - threaded_stream_ordering: Dict[str, int] = attr.Factory(dict) + threaded_stream_ordering: dict[str, int] = attr.Factory(dict) def is_unread(self, thread_id: str, stream_ordering: int) -> bool: """Returns True if the stream ordering is unread according to the receipt information.""" @@ -165,7 +163,7 @@ class HttpPushAction: event_id: str room_id: str stream_ordering: int - actions: List[Union[dict, str]] + actions: list[Union[dict, str]] @attr.s(slots=True, frozen=True, auto_attribs=True) @@ -244,7 +242,7 @@ def _serialize_action( return json_encoder.encode(actions) -def _deserialize_action(actions: str, is_highlight: bool) -> List[Union[dict, str]]: +def _deserialize_action(actions: str, is_highlight: bool) -> list[Union[dict, str]]: """Custom deserializer for actions. This allows us to "compress" common actions""" if actions: return db_to_json(actions) @@ -256,7 +254,7 @@ def _deserialize_action(actions: str, is_highlight: bool) -> List[Union[dict, st class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBaseStore): - _background_tasks: List[LoopingCall] = [] + _background_tasks: list[LoopingCall] = [] def __init__( self, @@ -351,7 +349,7 @@ def drop_null_thread_id_indexes_txn(txn: LoggingTransaction) -> None: ) return 0 - async def get_unread_counts_by_room_for_user(self, user_id: str) -> Dict[str, int]: + async def get_unread_counts_by_room_for_user(self, user_id: str) -> dict[str, int]: """Get the notification count by room for a user. Only considers notifications, not highlight or unread counts, and threads are currently aggregated under their room. @@ -373,7 +371,7 @@ async def get_unread_counts_by_room_for_user(self, user_id: str) -> Dict[str, in def _get_unread_counts_by_room_for_user_txn( self, txn: LoggingTransaction, user_id: str - ) -> Dict[str, int]: + ) -> dict[str, int]: receipt_types_clause, args = make_in_list_sql_clause( self.database_engine, "receipt_type", @@ -440,7 +438,7 @@ def _get_unread_counts_by_room_for_user_txn( txn.execute(sql, args) seen_thread_ids = set() - room_to_count: Dict[str, int] = defaultdict(int) + room_to_count: dict[str, int] = defaultdict(int) for room_id, thread_id, notif_count in txn: room_to_count[room_id] += notif_count @@ -585,7 +583,7 @@ def _get_unread_counts_by_pos_txn( """ main_counts = NotifCounts() - thread_counts: Dict[str, NotifCounts] = {} + thread_counts: dict[str, NotifCounts] = {} def _get_thread(thread_id: str) -> NotifCounts: if thread_id == MAIN_TIMELINE: @@ -778,7 +776,7 @@ def _get_notif_unread_count_for_user_room( stream_ordering: int, max_stream_ordering: Optional[int] = None, thread_id: Optional[str] = None, - ) -> List[Tuple[int, int, str]]: + ) -> list[tuple[int, int, str]]: """Returns the notify and unread counts from `event_push_actions` for the given user/room in the given range. @@ -840,12 +838,12 @@ def _get_notif_unread_count_for_user_room( """ txn.execute(sql, args) - return cast(List[Tuple[int, int, str]], txn.fetchall()) + return cast(list[tuple[int, int, str]], txn.fetchall()) async def get_push_action_users_in_range( self, min_stream_ordering: int, max_stream_ordering: int - ) -> List[str]: - def f(txn: LoggingTransaction) -> List[str]: + ) -> list[str]: + def f(txn: LoggingTransaction) -> list[str]: sql = ( "SELECT DISTINCT(user_id) FROM event_push_actions WHERE" " stream_ordering >= ? AND stream_ordering <= ? AND notif = 1" @@ -861,7 +859,7 @@ def _get_receipts_for_room_and_threads_txn( user_id: str, room_ids: StrCollection, thread_ids: StrCollection, - ) -> Dict[str, _RoomReceipt]: + ) -> dict[str, _RoomReceipt]: """ Get (private) read receipts for a user in each of the given room IDs and thread IDs. @@ -936,7 +934,7 @@ def _get_receipts_for_room_and_threads_txn( txn.execute(sql, args) - result: Dict[str, _RoomReceipt] = {} + result: dict[str, _RoomReceipt] = {} for room_id, thread_id, stream_ordering in txn: room_receipt = result.setdefault(room_id, _RoomReceipt()) if thread_id is None: @@ -952,7 +950,7 @@ async def get_unread_push_actions_for_user_in_range_for_http( min_stream_ordering: int, max_stream_ordering: int, limit: int = 20, - ) -> List[HttpPushAction]: + ) -> list[HttpPushAction]: """Get a list of the most recent unread push actions for a given user, within the given stream ordering range. Called by the httppusher. @@ -971,7 +969,7 @@ async def get_unread_push_actions_for_user_in_range_for_http( def get_push_actions_txn( txn: LoggingTransaction, - ) -> List[Tuple[str, str, str, int, str, bool]]: + ) -> list[tuple[str, str, str, int, str, bool]]: sql = """ SELECT ep.event_id, ep.room_id, ep.thread_id, ep.stream_ordering, ep.actions, ep.highlight @@ -984,7 +982,7 @@ def get_push_actions_txn( ORDER BY ep.stream_ordering ASC LIMIT ? """ txn.execute(sql, (user_id, min_stream_ordering, max_stream_ordering, limit)) - return cast(List[Tuple[str, str, str, int, str, bool]], txn.fetchall()) + return cast(list[tuple[str, str, str, int, str, bool]], txn.fetchall()) push_actions = await self.db_pool.runInteraction( "get_unread_push_actions_for_user_in_range_http", get_push_actions_txn @@ -1040,7 +1038,7 @@ async def get_unread_push_actions_for_user_in_range_for_email( min_stream_ordering: int, max_stream_ordering: int, limit: int = 20, - ) -> List[EmailPushAction]: + ) -> list[EmailPushAction]: """Get a list of the most recent unread push actions for a given user, within the given stream ordering range. Called by the emailpusher @@ -1059,7 +1057,7 @@ async def get_unread_push_actions_for_user_in_range_for_email( def get_push_actions_txn( txn: LoggingTransaction, - ) -> List[Tuple[str, str, str, int, str, bool, int]]: + ) -> list[tuple[str, str, str, int, str, bool, int]]: sql = """ SELECT ep.event_id, ep.room_id, ep.thread_id, ep.stream_ordering, ep.actions, ep.highlight, e.received_ts @@ -1073,7 +1071,7 @@ def get_push_actions_txn( ORDER BY ep.stream_ordering DESC LIMIT ? """ txn.execute(sql, (user_id, min_stream_ordering, max_stream_ordering, limit)) - return cast(List[Tuple[str, str, str, int, str, bool, int]], txn.fetchall()) + return cast(list[tuple[str, str, str, int, str, bool, int]], txn.fetchall()) push_actions = await self.db_pool.runInteraction( "get_unread_push_actions_for_user_in_range_email", get_push_actions_txn @@ -1159,7 +1157,7 @@ def _get_if_maybe_push_in_range_for_user_txn(txn: LoggingTransaction) -> bool: async def add_push_actions_to_staging( self, event_id: str, - user_id_actions: Dict[str, Collection[Union[Mapping, str]]], + user_id_actions: dict[str, Collection[Union[Mapping, str]]], count_as_unread: bool, thread_id: str, ) -> None: @@ -1179,7 +1177,7 @@ async def add_push_actions_to_staging( # can be used to insert into the `event_push_actions_staging` table. def _gen_entry( user_id: str, actions: Collection[Union[Mapping, str]] - ) -> Tuple[str, str, str, int, int, int, str, int]: + ) -> tuple[str, str, str, int, int, int, str, int]: is_highlight = 1 if _action_has_highlight(actions) else 0 notif = 1 if "notify" in actions else 0 return ( @@ -1296,7 +1294,7 @@ def _find_first_stream_ordering_after_ts_txn( The stream ordering """ txn.execute("SELECT MAX(stream_ordering) FROM events") - max_stream_ordering = cast(Tuple[Optional[int]], txn.fetchone())[0] + max_stream_ordering = cast(tuple[Optional[int]], txn.fetchone())[0] if max_stream_ordering is None: return 0 @@ -1355,7 +1353,7 @@ def _find_first_stream_ordering_after_ts_txn( async def get_time_of_last_push_action_before( self, stream_ordering: int ) -> Optional[int]: - def f(txn: LoggingTransaction) -> Optional[Tuple[int]]: + def f(txn: LoggingTransaction) -> Optional[tuple[int]]: sql = """ SELECT e.received_ts FROM event_push_actions AS ep @@ -1365,7 +1363,7 @@ def f(txn: LoggingTransaction) -> Optional[Tuple[int]]: LIMIT 1 """ txn.execute(sql, (stream_ordering,)) - return cast(Optional[Tuple[int]], txn.fetchone()) + return cast(Optional[tuple[int]], txn.fetchone()) result = await self.db_pool.runInteraction( "get_time_of_last_push_action_before", f @@ -1457,7 +1455,7 @@ def _handle_new_receipts_for_notifs_txn(self, txn: LoggingTransaction) -> bool: limit, ), ) - rows = cast(List[Tuple[int, str, str, Optional[str], int]], txn.fetchall()) + rows = cast(list[tuple[int, str, str, Optional[str], int]], txn.fetchall()) # For each new read receipt we delete push actions from before it and # recalculate the summary. @@ -1654,7 +1652,7 @@ def _rotate_notifs_before_txn( # object because we might not have the same amount of rows in each of them. To do # this, we use a dict indexed on the user ID and room ID to make it easier to # populate. - summaries: Dict[Tuple[str, str, str], _EventPushSummary] = {} + summaries: dict[tuple[str, str, str], _EventPushSummary] = {} for row in txn: summaries[(row[0], row[1], row[2])] = _EventPushSummary( unread_count=row[3], @@ -1832,10 +1830,10 @@ async def get_push_actions_for_user( before: Optional[int] = None, limit: int = 50, only_highlight: bool = False, - ) -> List[UserPushAction]: + ) -> list[UserPushAction]: def f( txn: LoggingTransaction, - ) -> List[Tuple[str, str, int, int, str, bool, str, int]]: + ) -> list[tuple[str, str, int, int, str, bool, str, int]]: before_clause = "" if before: before_clause = "AND epa.stream_ordering < ?" @@ -1863,7 +1861,7 @@ def f( """ % (before_clause,) txn.execute(sql, args) return cast( - List[Tuple[str, str, int, int, str, bool, str, int]], txn.fetchall() + list[tuple[str, str, int, int, str, bool, str, int]], txn.fetchall() ) push_actions = await self.db_pool.runInteraction("get_push_actions_for_user", f) diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py index b6037468b39..da9ecfbdb97 100644 --- a/synapse/storage/databases/main/events.py +++ b/synapse/storage/databases/main/events.py @@ -27,14 +27,10 @@ TYPE_CHECKING, Any, Collection, - Dict, Generator, Iterable, - List, Optional, Sequence, - Set, - Tuple, TypedDict, cast, ) @@ -129,7 +125,7 @@ class DeltaState: should e.g. be removed from `current_state_events` table. """ - to_delete: List[Tuple[str, str]] + to_delete: list[tuple[str, str]] to_insert: StateMap[str] no_longer_in_room: bool = False @@ -207,9 +203,9 @@ class SlidingSyncTableChanges: SlidingSyncMembershipSnapshotSharedInsertValues ) # List of membership to insert into `sliding_sync_membership_snapshots` - to_insert_membership_snapshots: List[SlidingSyncMembershipInfo] + to_insert_membership_snapshots: list[SlidingSyncMembershipInfo] # List of user_id to delete from `sliding_sync_membership_snapshots` - to_delete_membership_snapshots: List[str] + to_delete_membership_snapshots: list[str] @attr.s(slots=True, auto_attribs=True) @@ -226,7 +222,7 @@ class NewEventChainLinks: chain_id: int sequence_number: int - links: List[Tuple[int, int]] = attr.Factory(list) + links: list[tuple[int, int]] = attr.Factory(list) class PersistEventsStore: @@ -274,11 +270,11 @@ def __init__( async def _persist_events_and_state_updates( self, room_id: str, - events_and_contexts: List[EventPersistencePair], + events_and_contexts: list[EventPersistencePair], *, state_delta_for_room: Optional[DeltaState], - new_forward_extremities: Optional[Set[str]], - new_event_links: Dict[str, NewEventChainLinks], + new_forward_extremities: Optional[set[str]], + new_event_links: dict[str, NewEventChainLinks], use_negative_stream_ordering: bool = False, inhibit_local_membership_updates: bool = False, ) -> None: @@ -585,21 +581,21 @@ async def _calculate_sliding_sync_table_changes( ] membership_snapshot_shared_insert_values: SlidingSyncMembershipSnapshotSharedInsertValues = {} - membership_infos_to_insert_membership_snapshots: List[ + membership_infos_to_insert_membership_snapshots: list[ SlidingSyncMembershipInfo ] = [] if to_insert: - membership_event_id_to_user_id_map: Dict[str, str] = {} + membership_event_id_to_user_id_map: dict[str, str] = {} for state_key, event_id in to_insert.items(): if state_key[0] == EventTypes.Member and self.is_mine_id(state_key[1]): membership_event_id_to_user_id_map[event_id] = state_key[1] - membership_event_map: Dict[str, EventBase] = {} + membership_event_map: dict[str, EventBase] = {} # In normal event persist scenarios, we should be able to find the # membership events in the `events_and_contexts` given to us but it's # possible a state reset happened which added us to the room without a # corresponding new membership event (reset back to a previous membership). - missing_membership_event_ids: Set[str] = set() + missing_membership_event_ids: set[str] = set() for membership_event_id in membership_event_id_to_user_id_map.keys(): membership_event = event_map.get(membership_event_id) if membership_event: @@ -668,7 +664,7 @@ async def _calculate_sliding_sync_table_changes( # these state events in `events_and_contexts` since we don't generally # batch up local membership changes with other events, but it can # happen. - missing_state_event_ids: Set[str] = set() + missing_state_event_ids: set[str] = set() for state_key, event_id in current_state_ids_map.items(): event = event_map.get(event_id) if event: @@ -780,7 +776,7 @@ async def _calculate_sliding_sync_table_changes( # events in the `events_and_contexts` given to us but it's possible a state # reset happened which that reset back to a previous state. current_state_map = {} - missing_event_ids: Set[str] = set() + missing_event_ids: set[str] = set() for state_key, event_id in current_state_ids_map.items(): event = event_map.get(event_id) if event: @@ -826,7 +822,7 @@ async def _calculate_sliding_sync_table_changes( async def calculate_chain_cover_index_for_events( self, room_id: str, events: Collection[EventBase] - ) -> Dict[str, NewEventChainLinks]: + ) -> dict[str, NewEventChainLinks]: # Filter to state events, and ensure there are no duplicates. state_events = [] seen_events = set() @@ -849,7 +845,7 @@ async def calculate_chain_cover_index_for_events( def calculate_chain_cover_index_for_events_txn( self, txn: LoggingTransaction, room_id: str, state_events: Collection[EventBase] - ) -> Dict[str, NewEventChainLinks]: + ) -> dict[str, NewEventChainLinks]: # We now calculate chain ID/sequence numbers for any state events we're # persisting. We ignore out of band memberships as we're not in the room # and won't have their auth chain (we'll fix it up later if we join the @@ -905,7 +901,7 @@ def calculate_chain_cover_index_for_events_txn( event_to_auth_chain, ) - async def _get_events_which_are_prevs(self, event_ids: Iterable[str]) -> List[str]: + async def _get_events_which_are_prevs(self, event_ids: Iterable[str]) -> list[str]: """Filter the supplied list of event_ids to get those which are prev_events of existing (non-outlier/rejected) events. @@ -915,7 +911,7 @@ async def _get_events_which_are_prevs(self, event_ids: Iterable[str]) -> List[st Returns: Filtered event ids """ - results: List[str] = [] + results: list[str] = [] def _get_events_which_are_prevs_txn( txn: LoggingTransaction, batch: Collection[str] @@ -946,7 +942,7 @@ def _get_events_which_are_prevs_txn( return results - async def _get_prevs_before_rejected(self, event_ids: Iterable[str]) -> Set[str]: + async def _get_prevs_before_rejected(self, event_ids: Iterable[str]) -> set[str]: """Get soft-failed ancestors to remove from the extremities. Given a set of events, find all those that have been soft-failed or @@ -967,7 +963,7 @@ async def _get_prevs_before_rejected(self, event_ids: Iterable[str]) -> Set[str] # The set of event_ids to return. This includes all soft-failed events # and their prev events. - existing_prevs: Set[str] = set() + existing_prevs: set[str] = set() def _get_prevs_before_rejected_txn( txn: LoggingTransaction, batch: Collection[str] @@ -1016,11 +1012,11 @@ def _persist_events_txn( txn: LoggingTransaction, *, room_id: str, - events_and_contexts: List[EventPersistencePair], + events_and_contexts: list[EventPersistencePair], inhibit_local_membership_updates: bool, state_delta_for_room: Optional[DeltaState], - new_forward_extremities: Optional[Set[str]], - new_event_links: Dict[str, NewEventChainLinks], + new_forward_extremities: Optional[set[str]], + new_event_links: dict[str, NewEventChainLinks], sliding_sync_table_changes: Optional[SlidingSyncTableChanges], ) -> None: """Insert some number of room events into the necessary database tables. @@ -1178,8 +1174,8 @@ def _persist_events_txn( def _persist_event_auth_chain_txn( self, txn: LoggingTransaction, - events: List[EventBase], - new_event_links: Dict[str, NewEventChainLinks], + events: list[EventBase], + new_event_links: dict[str, NewEventChainLinks], ) -> None: if new_event_links: self._persist_chain_cover_index(txn, self.db_pool, new_event_links) @@ -1212,9 +1208,9 @@ def _add_chain_cover_index( txn: LoggingTransaction, db_pool: DatabasePool, event_chain_id_gen: SequenceGenerator, - event_to_room_id: Dict[str, str], - event_to_types: Dict[str, Tuple[str, str]], - event_to_auth_chain: Dict[str, StrCollection], + event_to_room_id: dict[str, str], + event_to_types: dict[str, tuple[str, str]], + event_to_auth_chain: dict[str, StrCollection], ) -> None: """Calculate and persist the chain cover index for the given events. @@ -1241,10 +1237,10 @@ def _calculate_chain_cover_index( txn: LoggingTransaction, db_pool: DatabasePool, event_chain_id_gen: SequenceGenerator, - event_to_room_id: Dict[str, str], - event_to_types: Dict[str, Tuple[str, str]], - event_to_auth_chain: Dict[str, StrCollection], - ) -> Dict[str, NewEventChainLinks]: + event_to_room_id: dict[str, str], + event_to_types: dict[str, tuple[str, str]], + event_to_auth_chain: dict[str, StrCollection], + ) -> dict[str, NewEventChainLinks]: """Calculate the chain cover index for the given events. Args: @@ -1259,7 +1255,7 @@ def _calculate_chain_cover_index( """ # Map from event ID to chain ID/sequence number. - chain_map: Dict[str, Tuple[int, int]] = {} + chain_map: dict[str, tuple[int, int]] = {} # Set of event IDs to calculate chain ID/seq numbers for. events_to_calc_chain_id_for = set(event_to_room_id) @@ -1268,7 +1264,7 @@ def _calculate_chain_cover_index( # we're looking at. These should just be out of band memberships, where # we didn't have the auth chain when we first persisted. auth_chain_to_calc_rows = cast( - List[Tuple[str, str, str]], + list[tuple[str, str, str]], db_pool.simple_select_many_txn( txn, table="event_auth_chain_to_calculate", @@ -1490,7 +1486,7 @@ def _persist_chain_cover_index( cls, txn: LoggingTransaction, db_pool: DatabasePool, - new_event_links: Dict[str, NewEventChainLinks], + new_event_links: dict[str, NewEventChainLinks], ) -> None: db_pool.simple_insert_many_txn( txn, @@ -1536,12 +1532,12 @@ def _allocate_chain_ids( txn: LoggingTransaction, db_pool: DatabasePool, event_chain_id_gen: SequenceGenerator, - event_to_room_id: Dict[str, str], - event_to_types: Dict[str, Tuple[str, str]], - event_to_auth_chain: Dict[str, StrCollection], - events_to_calc_chain_id_for: Set[str], - chain_map: Dict[str, Tuple[int, int]], - ) -> Dict[str, Tuple[int, int]]: + event_to_room_id: dict[str, str], + event_to_types: dict[str, tuple[str, str]], + event_to_auth_chain: dict[str, StrCollection], + events_to_calc_chain_id_for: set[str], + chain_map: dict[str, tuple[int, int]], + ) -> dict[str, tuple[int, int]]: """Allocates, but does not persist, chain ID/sequence numbers for the events in `events_to_calc_chain_id_for`. (c.f. _add_chain_cover_index for info on args) @@ -1573,8 +1569,8 @@ def _allocate_chain_ids( # new chain if the sequence number has already been allocated. # - existing_chains: Set[int] = set() - tree: List[Tuple[str, Optional[str]]] = [] + existing_chains: set[int] = set() + tree: list[tuple[str, Optional[str]]] = [] # We need to do this in a topologically sorted order as we want to # generate chain IDs/sequence numbers of an event's auth events before @@ -1604,7 +1600,7 @@ def _allocate_chain_ids( ) txn.execute(sql % (clause,), args) - chain_to_max_seq_no: Dict[Any, int] = {row[0]: row[1] for row in txn} + chain_to_max_seq_no: dict[Any, int] = {row[0]: row[1] for row in txn} # Allocate the new events chain ID/sequence numbers. # @@ -1614,8 +1610,8 @@ def _allocate_chain_ids( # number of new chain IDs in one call, replacing all temporary # objects with real allocated chain IDs. - unallocated_chain_ids: Set[object] = set() - new_chain_tuples: Dict[str, Tuple[Any, int]] = {} + unallocated_chain_ids: set[object] = set() + new_chain_tuples: dict[str, tuple[Any, int]] = {} for event_id, auth_event_id in tree: # If we reference an auth_event_id we fetch the allocated chain ID, # either from the existing `chain_map` or the newly generated @@ -1626,7 +1622,7 @@ def _allocate_chain_ids( if not existing_chain_id: existing_chain_id = chain_map[auth_event_id] - new_chain_tuple: Optional[Tuple[Any, int]] = None + new_chain_tuple: Optional[tuple[Any, int]] = None if existing_chain_id: # We found a chain ID/sequence number candidate, check its # not already taken. @@ -1653,7 +1649,7 @@ def _allocate_chain_ids( ) # Map from potentially temporary chain ID to real chain ID - chain_id_to_allocated_map: Dict[Any, int] = dict( + chain_id_to_allocated_map: dict[Any, int] = dict( zip(unallocated_chain_ids, newly_allocated_chain_ids) ) chain_id_to_allocated_map.update((c, c) for c in existing_chains) @@ -1666,12 +1662,12 @@ def _allocate_chain_ids( def _persist_transaction_ids_txn( self, txn: LoggingTransaction, - events_and_contexts: List[EventPersistencePair], + events_and_contexts: list[EventPersistencePair], ) -> None: """Persist the mapping from transaction IDs to event IDs (if defined).""" inserted_ts = self._clock.time_msec() - to_insert_device_id: List[Tuple[str, str, str, str, str, int]] = [] + to_insert_device_id: list[tuple[str, str, str, str, str, int]] = [] for event, _ in events_and_contexts: txn_id = getattr(event.internal_metadata, "txn_id", None) device_id = getattr(event.internal_metadata, "device_id", None) @@ -1899,7 +1895,7 @@ def _update_current_state_txn( sliding_sync_table_changes.joined_room_updates.values() ) - args: List[Any] = [ + args: list[Any] = [ room_id, room_id, sliding_sync_table_changes.joined_room_bump_stamp_to_fully_insert, @@ -2316,7 +2312,7 @@ def _update_sliding_sync_tables_with_new_persisted_events_txn( self, txn: LoggingTransaction, room_id: str, - events_and_contexts: List[EventPersistencePair], + events_and_contexts: list[EventPersistencePair], ) -> None: """ Update the latest `event_stream_ordering`/`bump_stamp` columns in the @@ -2427,7 +2423,7 @@ def _update_forward_extremities_txn( self, txn: LoggingTransaction, room_id: str, - new_forward_extremities: Set[str], + new_forward_extremities: set[str], max_stream_order: int, ) -> None: self.db_pool.simple_delete_txn( @@ -2456,8 +2452,8 @@ def _update_forward_extremities_txn( @classmethod def _filter_events_and_contexts_for_duplicates( - cls, events_and_contexts: List[EventPersistencePair] - ) -> List[EventPersistencePair]: + cls, events_and_contexts: list[EventPersistencePair] + ) -> list[EventPersistencePair]: """Ensure that we don't have the same event twice. Pick the earliest non-outlier if there is one, else the earliest one. @@ -2486,7 +2482,7 @@ def _update_room_depths_txn( self, txn: LoggingTransaction, room_id: str, - events_and_contexts: List[EventPersistencePair], + events_and_contexts: list[EventPersistencePair], ) -> None: """Update min_depth for each room @@ -2528,8 +2524,8 @@ def _update_room_depths_txn( def _update_outliers_txn( self, txn: LoggingTransaction, - events_and_contexts: List[EventPersistencePair], - ) -> List[EventPersistencePair]: + events_and_contexts: list[EventPersistencePair], + ) -> list[EventPersistencePair]: """Update any outliers with new event info. This turns outliers into ex-outliers (unless the new event was rejected), and @@ -2547,7 +2543,7 @@ def _update_outliers_txn( a room that has been un-partial stated. """ rows = cast( - List[Tuple[str, bool]], + list[tuple[str, bool]], self.db_pool.simple_select_many_txn( txn, "events", @@ -2740,8 +2736,8 @@ def event_dict(event: EventBase) -> JsonDict: def _store_rejected_events_txn( self, txn: LoggingTransaction, - events_and_contexts: List[EventPersistencePair], - ) -> List[EventPersistencePair]: + events_and_contexts: list[EventPersistencePair], + ) -> list[EventPersistencePair]: """Add rows to the 'rejections' table for received events which were rejected @@ -2768,8 +2764,8 @@ def _update_metadata_tables_txn( self, txn: LoggingTransaction, *, - events_and_contexts: List[EventPersistencePair], - all_events_and_contexts: List[EventPersistencePair], + events_and_contexts: list[EventPersistencePair], + all_events_and_contexts: list[EventPersistencePair], inhibit_local_membership_updates: bool = False, ) -> None: """Update all the miscellaneous tables for new events @@ -2863,9 +2859,9 @@ def _update_metadata_tables_txn( def _add_to_cache( self, txn: LoggingTransaction, - events_and_contexts: List[EventPersistencePair], + events_and_contexts: list[EventPersistencePair], ) -> None: - to_prefill: List[EventCacheEntry] = [] + to_prefill: list[EventCacheEntry] = [] ev_map = {e.event_id: e for e, _ in events_and_contexts} if not ev_map: @@ -2925,7 +2921,7 @@ def insert_labels_for_event_txn( self, txn: LoggingTransaction, event_id: str, - labels: List[str], + labels: list[str], room_id: str, topological_ordering: int, ) -> None: @@ -2967,7 +2963,7 @@ def _insert_event_expiry_txn( def _store_room_members_txn( self, txn: LoggingTransaction, - events: List[EventBase], + events: list[EventBase], *, inhibit_local_membership_updates: bool = False, ) -> None: @@ -3336,8 +3332,8 @@ def store_event_search_txn( def _set_push_actions_for_event_and_users_txn( self, txn: LoggingTransaction, - events_and_contexts: List[EventPersistencePair], - all_events_and_contexts: List[EventPersistencePair], + events_and_contexts: list[EventPersistencePair], + all_events_and_contexts: list[EventPersistencePair], ) -> None: """Handles moving push actions from staging table to main event_push_actions table for all events in `events_and_contexts`. @@ -3517,7 +3513,7 @@ def _update_min_depth_for_room_txn( ) def _handle_mult_prev_events( - self, txn: LoggingTransaction, events: List[EventBase] + self, txn: LoggingTransaction, events: list[EventBase] ) -> None: """ For the given event, update the event edges table and forward and @@ -3535,7 +3531,7 @@ def _handle_mult_prev_events( self._update_backward_extremeties(txn, events) def _update_backward_extremeties( - self, txn: LoggingTransaction, events: List[EventBase] + self, txn: LoggingTransaction, events: list[EventBase] ) -> None: """Updates the event_backward_extremities tables based on the new/updated events being persisted. @@ -3637,16 +3633,16 @@ class _LinkMap: # Stores the set of links as nested maps: source chain ID -> target chain ID # -> source sequence number -> target sequence number. - maps: Dict[int, Dict[int, Dict[int, int]]] = attr.Factory(dict) + maps: dict[int, dict[int, dict[int, int]]] = attr.Factory(dict) # Stores the links that have been added (with new set to true), as tuples of # `(source chain ID, source sequence no, target chain ID, target sequence no.)` - additions: Set[Tuple[int, int, int, int]] = attr.Factory(set) + additions: set[tuple[int, int, int, int]] = attr.Factory(set) def add_link( self, - src_tuple: Tuple[int, int], - target_tuple: Tuple[int, int], + src_tuple: tuple[int, int], + target_tuple: tuple[int, int], new: bool = True, ) -> bool: """Add a new link between two chains, ensuring no redundant links are added. @@ -3701,7 +3697,7 @@ def add_link( current_links[src_seq] = target_seq return True - def get_additions(self) -> Generator[Tuple[int, int, int, int], None, None]: + def get_additions(self) -> Generator[tuple[int, int, int, int], None, None]: """Gets any newly added links. Yields: @@ -3715,8 +3711,8 @@ def get_additions(self) -> Generator[Tuple[int, int, int, int], None, None]: def exists_path_from( self, - src_tuple: Tuple[int, int], - target_tuple: Tuple[int, int], + src_tuple: tuple[int, int], + target_tuple: tuple[int, int], ) -> bool: """Checks if there is a path between the source chain ID/sequence and target chain ID/sequence. @@ -3728,7 +3724,7 @@ def exists_path_from( return target_seq <= src_seq # We have to graph traverse the links to check for indirect paths. - visited_chains: Dict[int, int] = collections.Counter() + visited_chains: dict[int, int] = collections.Counter() search = [(src_chain, src_seq)] while search: chain, seq = search.pop() diff --git a/synapse/storage/databases/main/events_bg_updates.py b/synapse/storage/databases/main/events_bg_updates.py index 37dd8e48d5d..637b9104c00 100644 --- a/synapse/storage/databases/main/events_bg_updates.py +++ b/synapse/storage/databases/main/events_bg_updates.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple, cast +from typing import TYPE_CHECKING, Optional, cast import attr @@ -97,7 +97,7 @@ class _CalculateChainCover: # Map from room_id to last depth/stream processed for each room that we have # processed all events for (i.e. the rooms we can flip the # `has_auth_chain_index` for) - finished_room_map: Dict[str, Tuple[int, int]] + finished_room_map: dict[str, tuple[int, int]] @attr.s(slots=True, frozen=True, auto_attribs=True) @@ -451,7 +451,7 @@ def reindex_search_txn(txn: LoggingTransaction) -> int: chunks = [event_ids[i : i + 100] for i in range(0, len(event_ids), 100)] for chunk in chunks: ev_rows = cast( - List[Tuple[str, str]], + list[tuple[str, str]], self.db_pool.simple_select_many_txn( txn, table="event_json", @@ -527,8 +527,8 @@ def _cleanup_extremities_bg_update_txn(txn: LoggingTransaction) -> int: # The set of extremity event IDs that we're checking this round original_set = set() - # A dict[str, Set[str]] of event ID to their prev events. - graph: Dict[str, Set[str]] = {} + # A dict[str, set[str]] of event ID to their prev events. + graph: dict[str, set[str]] = {} # The set of descendants of the original set that are not rejected # nor soft-failed. Ancestors of these events should be removed @@ -647,7 +647,7 @@ def _cleanup_extremities_bg_update_txn(txn: LoggingTransaction) -> int: if deleted: # We now need to invalidate the caches of these rooms rows = cast( - List[Tuple[str]], + list[tuple[str]], self.db_pool.simple_select_many_txn( txn, table="events", @@ -851,7 +851,7 @@ async def _rejected_events_metadata(self, progress: dict, batch_size: int) -> in def get_rejected_events( txn: Cursor, - ) -> List[Tuple[str, str, JsonDict, bool, bool]]: + ) -> list[tuple[str, str, JsonDict, bool, bool]]: # Fetch rejected event json, their room version and whether we have # inserted them into the state_events or auth_events tables. # @@ -883,7 +883,7 @@ def get_rejected_events( ) return cast( - List[Tuple[str, str, JsonDict, bool, bool]], + list[tuple[str, str, JsonDict, bool, bool]], [(row[0], row[1], db_to_json(row[2]), row[3], row[4]) for row in txn], ) @@ -1126,7 +1126,7 @@ def _calculate_chain_cover_txn( # We also need to fetch the auth events for them. auth_events = cast( - List[Tuple[str, str]], + list[tuple[str, str]], self.db_pool.simple_select_many_txn( txn, table="event_auth", @@ -1137,7 +1137,7 @@ def _calculate_chain_cover_txn( ), ) - event_to_auth_chain: Dict[str, List[str]] = {} + event_to_auth_chain: dict[str, list[str]] = {} for event_id, auth_id in auth_events: event_to_auth_chain.setdefault(event_id, []).append(auth_id) @@ -1151,7 +1151,7 @@ def _calculate_chain_cover_txn( self.event_chain_id_gen, event_to_room_id, event_to_types, - cast(Dict[str, StrCollection], event_to_auth_chain), + cast(dict[str, StrCollection], event_to_auth_chain), ) return _CalculateChainCover( @@ -1256,7 +1256,7 @@ def _event_arbitrary_relations_txn(txn: LoggingTransaction) -> int: results = list(txn) # (event_id, parent_id, rel_type) for each relation - relations_to_insert: List[Tuple[str, str, str, str]] = [] + relations_to_insert: list[tuple[str, str, str, str]] = [] for event_id, event_json_raw in results: try: event_json = db_to_json(event_json_raw) @@ -1636,7 +1636,7 @@ async def _sliding_sync_joined_rooms_bg_update( # We don't need to fetch any progress state because we just grab the next N # events in `sliding_sync_joined_rooms_to_recalculate` - def _get_rooms_to_update_txn(txn: LoggingTransaction) -> List[Tuple[str]]: + def _get_rooms_to_update_txn(txn: LoggingTransaction) -> list[tuple[str]]: """ Returns: A list of room ID's to update along with the progress value @@ -1658,7 +1658,7 @@ def _get_rooms_to_update_txn(txn: LoggingTransaction) -> List[Tuple[str]]: (batch_size,), ) - rooms_to_update_rows = cast(List[Tuple[str]], txn.fetchall()) + rooms_to_update_rows = cast(list[tuple[str]], txn.fetchall()) return rooms_to_update_rows @@ -1674,9 +1674,9 @@ def _get_rooms_to_update_txn(txn: LoggingTransaction) -> List[Tuple[str]]: return 0 # Map from room_id to insert/update state values in the `sliding_sync_joined_rooms` table. - joined_room_updates: Dict[str, SlidingSyncStateInsertValues] = {} + joined_room_updates: dict[str, SlidingSyncStateInsertValues] = {} # Map from room_id to stream_ordering/bump_stamp, etc values - joined_room_stream_ordering_updates: Dict[ + joined_room_stream_ordering_updates: dict[ str, _JoinedRoomStreamOrderingUpdate ] = {} # As long as we get this value before we fetch the current state, we can use it @@ -1886,8 +1886,8 @@ async def _sliding_sync_membership_snapshots_bg_update( def _find_memberships_to_update_txn( txn: LoggingTransaction, - ) -> List[ - Tuple[ + ) -> list[ + tuple[ str, Optional[str], Optional[str], @@ -1979,8 +1979,8 @@ def _find_memberships_to_update_txn( raise Exception("last_event_stream_ordering should not be None") memberships_to_update_rows = cast( - List[ - Tuple[ + list[ + tuple[ str, Optional[str], Optional[str], @@ -2023,7 +2023,7 @@ def _find_memberships_to_update_txn( def _find_previous_invite_or_knock_membership_txn( txn: LoggingTransaction, room_id: str, user_id: str, event_id: str - ) -> Optional[Tuple[str, str]]: + ) -> Optional[tuple[str, str]]: # Find the previous invite/knock event before the leave event # # Here are some notes on how we landed on this query: @@ -2085,11 +2085,11 @@ def _find_previous_invite_or_knock_membership_txn( return event_id, membership # Map from (room_id, user_id) to ... - to_insert_membership_snapshots: Dict[ - Tuple[str, str], SlidingSyncMembershipSnapshotSharedInsertValues + to_insert_membership_snapshots: dict[ + tuple[str, str], SlidingSyncMembershipSnapshotSharedInsertValues ] = {} - to_insert_membership_infos: Dict[ - Tuple[str, str], SlidingSyncMembershipInfoWithEventPos + to_insert_membership_infos: dict[ + tuple[str, str], SlidingSyncMembershipInfoWithEventPos ] = {} for ( room_id, @@ -2510,7 +2510,7 @@ def _txn( ) memberships_to_update_rows = cast( - List[Tuple[str, str, str, int, int]], + list[tuple[str, str, str, int, int]], txn.fetchall(), ) if not memberships_to_update_rows: @@ -2519,9 +2519,9 @@ def _txn( # Assemble the values to update # # (room_id, user_id) - key_values: List[Tuple[str, str]] = [] + key_values: list[tuple[str, str]] = [] # (forgotten,) - value_values: List[Tuple[int]] = [] + value_values: list[tuple[int]] = [] for ( room_id, user_id, @@ -2585,7 +2585,7 @@ async def fixup_max_depth_cap_bg_update( room_id_bound = progress.get("room_id", "") - def redo_max_depth_bg_update_txn(txn: LoggingTransaction) -> Tuple[bool, int]: + def redo_max_depth_bg_update_txn(txn: LoggingTransaction) -> tuple[bool, int]: txn.execute( """ SELECT room_id, room_version FROM rooms @@ -2597,7 +2597,7 @@ def redo_max_depth_bg_update_txn(txn: LoggingTransaction) -> Tuple[bool, int]: ) # Find the next room ID to process, with a relevant room version. - room_ids: List[str] = [] + room_ids: list[str] = [] max_room_id: Optional[str] = None for room_id, room_version_str in txn: max_room_id = room_id @@ -2704,7 +2704,7 @@ def _resolve_stale_data_in_sliding_sync_joined_rooms_table( # If we have nothing written to the `sliding_sync_joined_rooms` table, there is # nothing to clean up - row = cast(Optional[Tuple[int]], txn.fetchone()) + row = cast(Optional[tuple[int]], txn.fetchone()) max_stream_ordering_sliding_sync_joined_rooms_table = None depends_on = None if row is not None: @@ -2830,7 +2830,7 @@ def _resolve_stale_data_in_sliding_sync_membership_snapshots_table( # If we have nothing written to the `sliding_sync_membership_snapshots` table, # there is nothing to clean up - row = cast(Optional[Tuple[int]], txn.fetchone()) + row = cast(Optional[tuple[int]], txn.fetchone()) max_stream_ordering_sliding_sync_membership_snapshots_table = None if row is not None: (max_stream_ordering_sliding_sync_membership_snapshots_table,) = row diff --git a/synapse/storage/databases/main/events_forward_extremities.py b/synapse/storage/databases/main/events_forward_extremities.py index bd763885d79..d43fb443fd7 100644 --- a/synapse/storage/databases/main/events_forward_extremities.py +++ b/synapse/storage/databases/main/events_forward_extremities.py @@ -20,7 +20,7 @@ # import logging -from typing import List, Optional, Tuple, cast +from typing import Optional, cast from synapse.api.errors import SynapseError from synapse.storage.database import LoggingTransaction @@ -98,7 +98,7 @@ def delete_forward_extremities_for_room_txn(txn: LoggingTransaction) -> int: async def get_forward_extremities_for_room( self, room_id: str - ) -> List[Tuple[str, int, int, Optional[int]]]: + ) -> list[tuple[str, int, int, Optional[int]]]: """ Get list of forward extremities for a room. @@ -108,7 +108,7 @@ async def get_forward_extremities_for_room( def get_forward_extremities_for_room_txn( txn: LoggingTransaction, - ) -> List[Tuple[str, int, int, Optional[int]]]: + ) -> list[tuple[str, int, int, Optional[int]]]: sql = """ SELECT event_id, state_group, depth, received_ts FROM event_forward_extremities @@ -118,7 +118,7 @@ def get_forward_extremities_for_room_txn( """ txn.execute(sql, (room_id,)) - return cast(List[Tuple[str, int, int, Optional[int]]], txn.fetchall()) + return cast(list[tuple[str, int, int, Optional[int]]], txn.fetchall()) return await self.db_pool.runInteraction( "get_forward_extremities_for_room", diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py index 4f9a1a4f780..005f75a2d8d 100644 --- a/synapse/storage/databases/main/events_worker.py +++ b/synapse/storage/databases/main/events_worker.py @@ -27,15 +27,11 @@ TYPE_CHECKING, Any, Collection, - Dict, Iterable, - List, Literal, Mapping, MutableMapping, Optional, - Set, - Tuple, cast, overload, ) @@ -191,7 +187,7 @@ class _EventRow: format_version: Optional[int] room_version_id: Optional[str] rejected_reason: Optional[str] - redactions: List[str] + redactions: list[str] outlier: bool @@ -286,7 +282,7 @@ def __init__( 5 * 60 * 1000, ) - self._get_event_cache: AsyncLruCache[Tuple[str], EventCacheEntry] = ( + self._get_event_cache: AsyncLruCache[tuple[str], EventCacheEntry] = ( AsyncLruCache( clock=hs.get_clock(), server_name=self.server_name, @@ -300,8 +296,8 @@ def __init__( # Map from event ID to a deferred that will result in a map from event # ID to cache entry. Note that the returned dict may not have the # requested event in it if the event isn't in the DB. - self._current_event_fetches: Dict[ - str, ObservableDeferred[Dict[str, EventCacheEntry]] + self._current_event_fetches: dict[ + str, ObservableDeferred[dict[str, EventCacheEntry]] ] = {} # We keep track of the events we have currently loaded in memory so that @@ -311,8 +307,8 @@ def __init__( self._event_ref: MutableMapping[str, EventBase] = weakref.WeakValueDictionary() self._event_fetch_lock = threading.Condition() - self._event_fetch_list: List[ - Tuple[Iterable[str], "defer.Deferred[Dict[str, _EventRow]]"] + self._event_fetch_list: list[ + tuple[Iterable[str], "defer.Deferred[dict[str, _EventRow]]"] ] = [] self._event_fetch_ongoing = 0 event_fetch_ongoing_gauge.labels(**{SERVER_NAME_LABEL: self.server_name}).set( @@ -323,7 +319,7 @@ def __init__( # the DataStore and PersistEventStore. def get_chain_id_txn(txn: Cursor) -> int: txn.execute("SELECT COALESCE(max(chain_id), 0) FROM event_auth_chains") - return cast(Tuple[int], txn.fetchone())[0] + return cast(tuple[int], txn.fetchone())[0] self.event_chain_id_gen = build_sequence_generator( db_conn, @@ -387,7 +383,7 @@ def get_un_partial_stated_events_token(self, instance_name: str) -> int: async def get_un_partial_stated_events_from_stream( self, instance_name: str, last_id: int, current_id: int, limit: int - ) -> Tuple[List[Tuple[int, Tuple[str, bool]]], int, bool]: + ) -> tuple[list[tuple[int, tuple[str, bool]]], int, bool]: """Get updates for the un-partial-stated events replication stream. Args: @@ -414,7 +410,7 @@ async def get_un_partial_stated_events_from_stream( def get_un_partial_stated_events_from_stream_txn( txn: LoggingTransaction, - ) -> Tuple[List[Tuple[int, Tuple[str, bool]]], int, bool]: + ) -> tuple[list[tuple[int, tuple[str, bool]]], int, bool]: sql = """ SELECT stream_id, event_id, rejection_status_changed FROM un_partial_stated_event_stream @@ -585,7 +581,7 @@ async def get_events( redact_behaviour: EventRedactBehaviour = EventRedactBehaviour.redact, get_prev_content: bool = False, allow_rejected: bool = False, - ) -> Dict[str, EventBase]: + ) -> dict[str, EventBase]: """Get events from the database Unknown events will be omitted from the response. @@ -633,7 +629,7 @@ async def get_events_as_list( redact_behaviour: EventRedactBehaviour = EventRedactBehaviour.redact, get_prev_content: bool = False, allow_rejected: bool = False, - ) -> List[EventBase]: + ) -> list[EventBase]: """Get events from the database and return in a list in the same order as given by `event_ids` arg. @@ -792,7 +788,7 @@ async def get_unredacted_events_from_cache_or_db( self, event_ids: Collection[str], allow_rejected: bool = False, - ) -> Dict[str, EventCacheEntry]: + ) -> dict[str, EventCacheEntry]: """Fetch a bunch of events from the cache or the database. Note that the events pulled by this function will not have any redactions @@ -836,9 +832,9 @@ async def get_unredacted_events_from_cache_or_db( # avoid extraneous work (if we don't do this we can end up in a n^2 mode # when we wait on the same Deferred N times, then try and merge the # same dict into itself N times). - already_fetching_ids: Set[str] = set() - already_fetching_deferreds: Set[ - ObservableDeferred[Dict[str, EventCacheEntry]] + already_fetching_ids: set[str] = set() + already_fetching_deferreds: set[ + ObservableDeferred[dict[str, EventCacheEntry]] ] = set() for event_id in missing_events_ids: @@ -853,7 +849,7 @@ async def get_unredacted_events_from_cache_or_db( if missing_events_ids: - async def get_missing_events_from_cache_or_db() -> Dict[ + async def get_missing_events_from_cache_or_db() -> dict[ str, EventCacheEntry ]: """Fetches the events in `missing_event_ids` from the database. @@ -869,7 +865,7 @@ async def get_missing_events_from_cache_or_db() -> Dict[ # to all the events we pulled from the DB (this will result in this # function returning more events than requested, but that can happen # already due to `_get_events_from_db`). - fetching_deferred: ObservableDeferred[Dict[str, EventCacheEntry]] = ( + fetching_deferred: ObservableDeferred[dict[str, EventCacheEntry]] = ( ObservableDeferred(defer.Deferred(), consumeErrors=True) ) for event_id in missing_events_ids: @@ -908,7 +904,7 @@ async def get_missing_events_from_cache_or_db() -> Dict[ # We must allow the database fetch to complete in the presence of # cancellations, since multiple `_get_events_from_cache_or_db` calls can # reuse the same fetch. - missing_events: Dict[str, EventCacheEntry] = await delay_cancellation( + missing_events: dict[str, EventCacheEntry] = await delay_cancellation( get_missing_events_from_cache_or_db() ) event_entry_map.update(missing_events) @@ -999,7 +995,7 @@ def _invalidate_async_get_event_cache_room_id(self, room_id: str) -> None: async def _get_events_from_cache( self, events: Iterable[str], update_metrics: bool = True - ) -> Dict[str, EventCacheEntry]: + ) -> dict[str, EventCacheEntry]: """Fetch events from the caches, both in memory and any external. May return rejected events. @@ -1025,7 +1021,7 @@ async def _get_events_from_cache( @trace async def _get_events_from_external_cache( self, events: Collection[str], update_metrics: bool = True - ) -> Dict[str, EventCacheEntry]: + ) -> dict[str, EventCacheEntry]: """Fetch events from any configured external cache. May return rejected events. @@ -1051,7 +1047,7 @@ async def _get_events_from_external_cache( def _get_events_from_local_cache( self, events: Iterable[str], update_metrics: bool = True - ) -> Dict[str, EventCacheEntry]: + ) -> dict[str, EventCacheEntry]: """Fetch events from the local, in memory, caches. May return rejected events. @@ -1095,7 +1091,7 @@ async def get_stripped_room_state_from_event_context( context: EventContext, state_keys_to_include: StateFilter, membership_user_id: Optional[str] = None, - ) -> List[JsonDict]: + ) -> list[JsonDict]: """ Retrieve the stripped state from a room, given an event context to retrieve state from as well as the state types to include. Optionally, include the membership @@ -1257,7 +1253,7 @@ def _fetch_loop(self, conn: LoggingDatabaseConnection) -> None: def _fetch_event_list( self, conn: LoggingDatabaseConnection, - event_list: List[Tuple[Iterable[str], "defer.Deferred[Dict[str, _EventRow]]"]], + event_list: list[tuple[Iterable[str], "defer.Deferred[dict[str, _EventRow]]"]], ) -> None: """Handle a load of requests from the _event_fetch_list queue @@ -1312,7 +1308,7 @@ def fire_errback(exc: Exception) -> None: @trace async def _get_events_from_db( self, event_ids: Collection[str] - ) -> Dict[str, EventCacheEntry]: + ) -> dict[str, EventCacheEntry]: """Fetch a bunch of events from the database. May return rejected events. @@ -1333,8 +1329,8 @@ async def _get_events_from_db( str(len(event_ids)), ) - fetched_event_ids: Set[str] = set() - fetched_events: Dict[str, _EventRow] = {} + fetched_event_ids: set[str] = set() + fetched_events: dict[str, _EventRow] = {} @trace async def _fetch_event_ids_and_get_outstanding_redactions( @@ -1351,7 +1347,7 @@ async def _fetch_event_ids_and_get_outstanding_redactions( row_map = await self._enqueue_events(event_ids_to_fetch) # we need to recursively fetch any redactions of those events - redaction_ids: Set[str] = set() + redaction_ids: set[str] = set() for event_id in event_ids_to_fetch: row = row_map.get(event_id) fetched_event_ids.add(event_id) @@ -1378,7 +1374,7 @@ async def _fetch_event_ids_and_get_outstanding_redactions( ) # build a map from event_id to EventBase - event_map: Dict[str, EventBase] = {} + event_map: dict[str, EventBase] = {} for event_id, row in fetched_events.items(): assert row.event_id == event_id @@ -1491,7 +1487,7 @@ async def _fetch_event_ids_and_get_outstanding_redactions( # finally, we can decide whether each one needs redacting, and build # the cache entries. - result_map: Dict[str, EventCacheEntry] = {} + result_map: dict[str, EventCacheEntry] = {} for event_id, original_ev in event_map.items(): redactions = fetched_events[event_id].redactions redacted_event = self._maybe_redact_event_row( @@ -1511,7 +1507,7 @@ async def _fetch_event_ids_and_get_outstanding_redactions( return result_map - async def _enqueue_events(self, events: Collection[str]) -> Dict[str, _EventRow]: + async def _enqueue_events(self, events: Collection[str]) -> dict[str, _EventRow]: """Fetches events from the database using the _event_fetch_list. This allows batch and bulk fetching of events - it allows us to fetch events without having to create a new transaction for each request for events. @@ -1524,7 +1520,7 @@ async def _enqueue_events(self, events: Collection[str]) -> Dict[str, _EventRow] that weren't requested. """ - events_d: "defer.Deferred[Dict[str, _EventRow]]" = defer.Deferred() + events_d: "defer.Deferred[dict[str, _EventRow]]" = defer.Deferred() with self._event_fetch_lock: self._event_fetch_list.append((events, events_d)) self._event_fetch_lock.notify() @@ -1540,7 +1536,7 @@ async def _enqueue_events(self, events: Collection[str]) -> Dict[str, _EventRow] def _fetch_event_rows( self, txn: LoggingTransaction, event_ids: Iterable[str] - ) -> Dict[str, _EventRow]: + ) -> dict[str, _EventRow]: """Fetch event rows from the database Events which are not found are omitted from the result. @@ -1607,7 +1603,7 @@ def _fetch_event_rows( # check for MSC4932 redactions to_check = [] - events: List[_EventRow] = [] + events: list[_EventRow] = [] for e in evs: event = event_dict.get(e) if not event: @@ -1656,7 +1652,7 @@ def _maybe_redact_event_row( self, original_ev: EventBase, redactions: Iterable[str], - event_map: Dict[str, EventBase], + event_map: dict[str, EventBase], ) -> Optional[EventBase]: """Given an event object and a list of possible redacting event ids, determine whether to honour any of those redactions and if so return a redacted @@ -1727,12 +1723,12 @@ def _maybe_redact_event_row( # no valid redaction found for this event return None - async def have_events_in_timeline(self, event_ids: Iterable[str]) -> Set[str]: + async def have_events_in_timeline(self, event_ids: Iterable[str]) -> set[str]: """Given a list of event ids, check if we have already processed and stored them as non outliers. """ rows = cast( - List[Tuple[str]], + list[tuple[str]], await self.db_pool.simple_select_many_batch( table="events", retcols=("event_id",), @@ -1749,7 +1745,7 @@ async def have_events_in_timeline(self, event_ids: Iterable[str]) -> Set[str]: @tag_args async def have_seen_events( self, room_id: str, event_ids: Iterable[str] - ) -> Set[str]: + ) -> set[str]: """Given a list of event ids, check if we have already processed them. The room_id is only used to structure the cache (so that it can later be @@ -1768,7 +1764,7 @@ async def have_seen_events( # we break it down. However, each batch requires its own index scan, so we make # the batches as big as possible. - results: Set[str] = set() + results: set[str] = set() for event_ids_chunk in batch_iter(event_ids, 500): events_seen_dict = await self._have_seen_events_dict( room_id, event_ids_chunk @@ -1798,7 +1794,7 @@ async def _have_seen_events_dict( # not being invalidated when purging events from a room. The optimisation can # be re-added after https://github.com/matrix-org/synapse/issues/13476 - def have_seen_events_txn(txn: LoggingTransaction) -> Dict[str, bool]: + def have_seen_events_txn(txn: LoggingTransaction) -> dict[str, bool]: # we deliberately do *not* query the database for room_id, to make the # query an index-only lookup on `events_event_id_key`. # @@ -1850,7 +1846,7 @@ async def get_current_state_event_counts(self, room_id: str) -> int: room_id, ) - async def get_room_complexity(self, room_id: str) -> Dict[str, float]: + async def get_room_complexity(self, room_id: str) -> dict[str, float]: """ Get a rough approximation of the complexity of the room. This is used by remote servers to decide whether they wish to join the room or not. @@ -1873,7 +1869,7 @@ async def get_room_complexity(self, room_id: str) -> Dict[str, float]: async def get_all_new_forward_event_rows( self, instance_name: str, last_id: int, current_id: int, limit: int - ) -> List[Tuple[int, str, str, str, str, str, str, str, bool, bool]]: + ) -> list[tuple[int, str, str, str, str, str, str, str, bool, bool]]: """Returns new events, for the Events replication stream Args: @@ -1889,7 +1885,7 @@ async def get_all_new_forward_event_rows( def get_all_new_forward_event_rows( txn: LoggingTransaction, - ) -> List[Tuple[int, str, str, str, str, str, str, str, bool, bool]]: + ) -> list[tuple[int, str, str, str, str, str, str, str, bool, bool]]: sql = ( "SELECT e.stream_ordering, e.event_id, e.room_id, e.type," " se.state_key, redacts, relates_to_id, membership, rejections.reason IS NOT NULL," @@ -1907,7 +1903,7 @@ def get_all_new_forward_event_rows( ) txn.execute(sql, (last_id, current_id, instance_name, limit)) return cast( - List[Tuple[int, str, str, str, str, str, str, str, bool, bool]], + list[tuple[int, str, str, str, str, str, str, str, bool, bool]], txn.fetchall(), ) @@ -1917,7 +1913,7 @@ def get_all_new_forward_event_rows( async def get_ex_outlier_stream_rows( self, instance_name: str, last_id: int, current_id: int - ) -> List[Tuple[int, str, str, str, str, str, str, str, bool, bool]]: + ) -> list[tuple[int, str, str, str, str, str, str, str, bool, bool]]: """Returns de-outliered events, for the Events replication stream Args: @@ -1932,7 +1928,7 @@ async def get_ex_outlier_stream_rows( def get_ex_outlier_stream_rows_txn( txn: LoggingTransaction, - ) -> List[Tuple[int, str, str, str, str, str, str, str, bool, bool]]: + ) -> list[tuple[int, str, str, str, str, str, str, str, bool, bool]]: sql = ( "SELECT out.event_stream_ordering, e.event_id, e.room_id, e.type," " se.state_key, redacts, relates_to_id, membership, rejections.reason IS NOT NULL," @@ -1954,7 +1950,7 @@ def get_ex_outlier_stream_rows_txn( txn.execute(sql, (last_id, current_id, instance_name)) return cast( - List[Tuple[int, str, str, str, str, str, str, str, bool, bool]], + list[tuple[int, str, str, str, str, str, str, str, bool, bool]], txn.fetchall(), ) @@ -1964,7 +1960,7 @@ def get_ex_outlier_stream_rows_txn( async def get_all_new_backfill_event_rows( self, instance_name: str, last_id: int, current_id: int, limit: int - ) -> Tuple[List[Tuple[int, Tuple[str, str, str, str, str, str]]], int, bool]: + ) -> tuple[list[tuple[int, tuple[str, str, str, str, str, str]]], int, bool]: """Get updates for backfill replication stream, including all new backfilled events and events that have gone from being outliers to not. @@ -1994,7 +1990,7 @@ async def get_all_new_backfill_event_rows( def get_all_new_backfill_event_rows( txn: LoggingTransaction, - ) -> Tuple[List[Tuple[int, Tuple[str, str, str, str, str, str]]], int, bool]: + ) -> tuple[list[tuple[int, tuple[str, str, str, str, str, str]]], int, bool]: sql = ( "SELECT -e.stream_ordering, e.event_id, e.room_id, e.type," " se.state_key, redacts, relates_to_id" @@ -2008,10 +2004,10 @@ def get_all_new_backfill_event_rows( " LIMIT ?" ) txn.execute(sql, (-last_id, -current_id, instance_name, limit)) - new_event_updates: List[ - Tuple[int, Tuple[str, str, str, str, str, str]] + new_event_updates: list[ + tuple[int, tuple[str, str, str, str, str, str]] ] = [] - row: Tuple[int, str, str, str, str, str, str] + row: tuple[int, str, str, str, str, str, str] # Type safety: iterating over `txn` yields `Tuple`, i.e. # `Tuple[Any, ...]` of arbitrary length. Mypy detects assigning a # variadic tuple to a fixed length tuple and flags it up as an error. @@ -2057,7 +2053,7 @@ def get_all_new_backfill_event_rows( async def get_all_updated_current_state_deltas( self, instance_name: str, from_token: int, to_token: int, target_row_count: int - ) -> Tuple[List[Tuple[int, str, str, str, str]], int, bool]: + ) -> tuple[list[tuple[int, str, str, str, str]], int, bool]: """Fetch updates from current_state_delta_stream Args: @@ -2079,7 +2075,7 @@ async def get_all_updated_current_state_deltas( def get_all_updated_current_state_deltas_txn( txn: LoggingTransaction, - ) -> List[Tuple[int, str, str, str, str]]: + ) -> list[tuple[int, str, str, str, str]]: sql = """ SELECT stream_id, room_id, type, state_key, event_id FROM current_state_delta_stream @@ -2088,23 +2084,23 @@ def get_all_updated_current_state_deltas_txn( ORDER BY stream_id ASC LIMIT ? """ txn.execute(sql, (from_token, to_token, instance_name, target_row_count)) - return cast(List[Tuple[int, str, str, str, str]], txn.fetchall()) + return cast(list[tuple[int, str, str, str, str]], txn.fetchall()) def get_deltas_for_stream_id_txn( txn: LoggingTransaction, stream_id: int - ) -> List[Tuple[int, str, str, str, str]]: + ) -> list[tuple[int, str, str, str, str]]: sql = """ SELECT stream_id, room_id, type, state_key, event_id FROM current_state_delta_stream WHERE stream_id = ? """ txn.execute(sql, [stream_id]) - return cast(List[Tuple[int, str, str, str, str]], txn.fetchall()) + return cast(list[tuple[int, str, str, str, str]], txn.fetchall()) # we need to make sure that, for every stream id in the results, we get *all* # the rows with that stream id. - rows: List[Tuple[int, str, str, str, str]] = await self.db_pool.runInteraction( + rows: list[tuple[int, str, str, str, str]] = await self.db_pool.runInteraction( "get_all_updated_current_state_deltas", get_all_updated_current_state_deltas_txn, ) @@ -2135,7 +2131,7 @@ def get_deltas_for_stream_id_txn( async def get_senders_for_event_ids( self, event_ids: Collection[str] - ) -> Dict[str, Optional[str]]: + ) -> dict[str, Optional[str]]: """ Given a sequence of event IDs, return the sender associated with each. @@ -2151,7 +2147,7 @@ async def get_senders_for_event_ids( def _get_senders_for_event_ids( txn: LoggingTransaction, - ) -> Dict[str, Optional[str]]: + ) -> dict[str, Optional[str]]: rows = self.db_pool.simple_select_many_txn( txn=txn, table="events", @@ -2167,7 +2163,7 @@ def _get_senders_for_event_ids( ) @cached(max_entries=5000) - async def get_event_ordering(self, event_id: str, room_id: str) -> Tuple[int, int]: + async def get_event_ordering(self, event_id: str, room_id: str) -> tuple[int, int]: res = await self.db_pool.simple_select_one( table="events", retcols=["topological_ordering", "stream_ordering"], @@ -2182,7 +2178,7 @@ async def get_event_ordering(self, event_id: str, room_id: str) -> Tuple[int, in return int(res[0]), int(res[1]) - async def get_next_event_to_expire(self) -> Optional[Tuple[str, int]]: + async def get_next_event_to_expire(self) -> Optional[tuple[str, int]]: """Retrieve the entry with the lowest expiry timestamp in the event_expiry table, or None if there's no more event to expire. @@ -2194,7 +2190,7 @@ async def get_next_event_to_expire(self) -> Optional[Tuple[str, int]]: def get_next_event_to_expire_txn( txn: LoggingTransaction, - ) -> Optional[Tuple[str, int]]: + ) -> Optional[tuple[str, int]]: txn.execute( """ SELECT event_id, expiry_ts FROM event_expiry @@ -2202,7 +2198,7 @@ def get_next_event_to_expire_txn( """ ) - return cast(Optional[Tuple[str, int]], txn.fetchone()) + return cast(Optional[tuple[str, int]], txn.fetchone()) return await self.db_pool.runInteraction( desc="get_next_event_to_expire", func=get_next_event_to_expire_txn @@ -2229,7 +2225,7 @@ async def get_event_id_from_transaction_id_and_device_id( async def get_already_persisted_events( self, events: Iterable[EventBase] - ) -> Dict[str, str]: + ) -> dict[str, str]: """Look up if we have already persisted an event for the transaction ID, returning a mapping from event ID in the given list to the event ID of an existing event. @@ -2239,7 +2235,7 @@ async def get_already_persisted_events( """ mapping = {} - txn_id_to_event: Dict[Tuple[str, str, str, str], str] = {} + txn_id_to_event: dict[tuple[str, str, str, str], str] = {} for event in events: device_id = getattr(event.internal_metadata, "device_id", None) @@ -2516,7 +2512,7 @@ async def get_partial_state_events( any of the events which are unknown (or are outliers). """ result = cast( - List[Tuple[str]], + list[tuple[str]], await self.db_pool.simple_select_many_batch( table="partial_state_events", column="event_id", @@ -2541,7 +2537,7 @@ async def is_partial_state_event(self, event_id: str) -> bool: ) return result is not None - async def get_partial_state_events_batch(self, room_id: str) -> List[str]: + async def get_partial_state_events_batch(self, room_id: str) -> list[str]: """ Get a list of events in the given room that: - have partial state; and @@ -2560,7 +2556,7 @@ async def get_partial_state_events_batch(self, room_id: str) -> List[str]: @staticmethod def _get_partial_state_events_batch_txn( txn: LoggingTransaction, room_id: str - ) -> List[str]: + ) -> list[str]: # we want to work through the events from oldest to newest, so # we only want events whose prev_events do *not* have partial state - hence # the 'NOT EXISTS' clause in the below. @@ -2644,8 +2640,8 @@ def mark_event_rejected_txn( self.invalidate_get_event_cache_after_txn(txn, event_id) async def get_events_sent_by_user_in_room( - self, user_id: str, room_id: str, limit: int, filter: Optional[List[str]] = None - ) -> Optional[List[str]]: + self, user_id: str, room_id: str, limit: int, filter: Optional[list[str]] = None + ) -> Optional[list[str]]: """ Get a list of event ids of events sent by the user in the specified room @@ -2660,10 +2656,10 @@ def _get_events_by_user_in_room_txn( txn: LoggingTransaction, user_id: str, room_id: str, - filter: Optional[List[str]], + filter: Optional[list[str]], batch_size: int, offset: int, - ) -> Tuple[Optional[List[str]], int]: + ) -> tuple[Optional[list[str]], int]: if filter: base_clause, args = make_in_list_sql_clause( txn.database_engine, "type", filter @@ -2696,7 +2692,7 @@ def _get_events_by_user_in_room_txn( if batch_size > limit: batch_size = limit - selected_ids: List[str] = [] + selected_ids: list[str] = [] while offset < limit: res, offset = await self.db_pool.runInteraction( "get_events_by_user", diff --git a/synapse/storage/databases/main/experimental_features.py b/synapse/storage/databases/main/experimental_features.py index d980c57fa8d..77b6c36884c 100644 --- a/synapse/storage/databases/main/experimental_features.py +++ b/synapse/storage/databases/main/experimental_features.py @@ -19,7 +19,7 @@ # # -from typing import TYPE_CHECKING, Dict, FrozenSet, List, Tuple, cast +from typing import TYPE_CHECKING, cast from synapse.storage.database import ( DatabasePool, @@ -44,7 +44,7 @@ def __init__( super().__init__(database, db_conn, hs) @cached() - async def list_enabled_features(self, user_id: str) -> FrozenSet[str]: + async def list_enabled_features(self, user_id: str) -> frozenset[str]: """ Checks to see what features are enabled for a given user Args: @@ -54,7 +54,7 @@ async def list_enabled_features(self, user_id: str) -> FrozenSet[str]: the features currently enabled for the user """ enabled = cast( - List[Tuple[str]], + list[tuple[str]], await self.db_pool.simple_select_list( table="per_user_experimental_features", keyvalues={"user_id": user_id, "enabled": True}, @@ -67,7 +67,7 @@ async def list_enabled_features(self, user_id: str) -> FrozenSet[str]: async def set_features_for_user( self, user: str, - features: Dict["ExperimentalFeature", bool], + features: dict["ExperimentalFeature", bool], ) -> None: """ Enables or disables features for a given user diff --git a/synapse/storage/databases/main/filtering.py b/synapse/storage/databases/main/filtering.py index af9634bad4b..4b3bc69d205 100644 --- a/synapse/storage/databases/main/filtering.py +++ b/synapse/storage/databases/main/filtering.py @@ -20,7 +20,7 @@ # # -from typing import TYPE_CHECKING, Optional, Tuple, Union, cast +from typing import TYPE_CHECKING, Optional, Union, cast from canonicaljson import encode_canonical_json @@ -187,7 +187,7 @@ def _do_txn(txn: LoggingTransaction) -> int: sql = "SELECT MAX(filter_id) FROM user_filters WHERE full_user_id = ?" txn.execute(sql, (user_id.to_string(),)) - max_id = cast(Tuple[Optional[int]], txn.fetchone())[0] + max_id = cast(tuple[Optional[int]], txn.fetchone())[0] if max_id is None: filter_id = 0 else: diff --git a/synapse/storage/databases/main/keys.py b/synapse/storage/databases/main/keys.py index 2a99a97dd66..9833565095d 100644 --- a/synapse/storage/databases/main/keys.py +++ b/synapse/storage/databases/main/keys.py @@ -22,7 +22,7 @@ import itertools import json import logging -from typing import Dict, Iterable, List, Mapping, Optional, Tuple, Union, cast +from typing import Iterable, Mapping, Optional, Union, cast from canonicaljson import encode_canonical_json from signedjson.key import decode_verify_key_bytes @@ -50,7 +50,7 @@ async def store_server_keys_response( server_name: str, from_server: str, ts_added_ms: int, - verify_keys: Dict[str, FetchKeyResult], + verify_keys: dict[str, FetchKeyResult], response_json: JsonDict, ) -> None: """Stores the keys for the given server that we got from `from_server`. @@ -130,7 +130,7 @@ def store_server_keys_response_txn(txn: LoggingTransaction) -> None: @cached() def _get_server_keys_json( - self, server_name_and_key_id: Tuple[str, str] + self, server_name_and_key_id: tuple[str, str] ) -> FetchKeyResult: raise NotImplementedError() @@ -138,8 +138,8 @@ def _get_server_keys_json( cached_method_name="_get_server_keys_json", list_name="server_name_and_key_ids" ) async def get_server_keys_json( - self, server_name_and_key_ids: Iterable[Tuple[str, str]] - ) -> Mapping[Tuple[str, str], FetchKeyResult]: + self, server_name_and_key_ids: Iterable[tuple[str, str]] + ) -> Mapping[tuple[str, str], FetchKeyResult]: """ Args: server_name_and_key_ids: @@ -151,7 +151,7 @@ async def get_server_keys_json( """ keys = {} - def _get_keys(txn: Cursor, batch: Tuple[Tuple[str, str], ...]) -> None: + def _get_keys(txn: Cursor, batch: tuple[tuple[str, str], ...]) -> None: """Processes a batch of keys to fetch, and adds the result to `keys`.""" # batch_iter always returns tuples so it's safe to do len(batch) @@ -189,7 +189,7 @@ def _get_keys(txn: Cursor, batch: Tuple[Tuple[str, str], ...]) -> None: valid_until_ts=ts_valid_until_ms, ) - def _txn(txn: Cursor) -> Dict[Tuple[str, str], FetchKeyResult]: + def _txn(txn: Cursor) -> dict[tuple[str, str], FetchKeyResult]: for batch in batch_iter(server_name_and_key_ids, 50): _get_keys(txn, batch) return keys @@ -215,7 +215,7 @@ async def get_server_keys_json_for_remote( If we have multiple entries for a given key ID, returns the most recent. """ rows = cast( - List[Tuple[str, str, int, int, Union[bytes, memoryview]]], + list[tuple[str, str, int, int, Union[bytes, memoryview]]], await self.db_pool.simple_select_many_batch( table="server_keys_json", column="key_id", @@ -252,13 +252,13 @@ async def get_server_keys_json_for_remote( async def get_all_server_keys_json_for_remote( self, server_name: str, - ) -> Dict[str, FetchKeyResultForRemote]: + ) -> dict[str, FetchKeyResultForRemote]: """Fetch the cached keys for the given server. If we have multiple entries for a given key ID, returns the most recent. """ rows = cast( - List[Tuple[str, str, int, int, Union[bytes, memoryview]]], + list[tuple[str, str, int, int, Union[bytes, memoryview]]], await self.db_pool.simple_select_list( table="server_keys_json", keyvalues={"server_name": server_name}, diff --git a/synapse/storage/databases/main/lock.py b/synapse/storage/databases/main/lock.py index e2b15eaf6a5..9dd2cae3447 100644 --- a/synapse/storage/databases/main/lock.py +++ b/synapse/storage/databases/main/lock.py @@ -21,7 +21,7 @@ import logging from contextlib import AsyncExitStack from types import TracebackType -from typing import TYPE_CHECKING, Collection, Optional, Set, Tuple, Type +from typing import TYPE_CHECKING, Collection, Optional from weakref import WeakValueDictionary from twisted.internet import defer @@ -82,7 +82,7 @@ def __init__( # A map from `(lock_name, lock_key)` to lock that we think we # currently hold. - self._live_lock_tokens: WeakValueDictionary[Tuple[str, str], Lock] = ( + self._live_lock_tokens: WeakValueDictionary[tuple[str, str], Lock] = ( WeakValueDictionary() ) @@ -91,7 +91,7 @@ def __init__( # multiple read locks at a time but only one write lock (no mixing read # and write locks at the same time). self._live_read_write_lock_tokens: WeakValueDictionary[ - Tuple[str, str, str], Lock + tuple[str, str, str], Lock ] = WeakValueDictionary() # When we shut down we want to remove the locks. Technically this can @@ -104,7 +104,7 @@ def __init__( shutdown_func=self._on_shutdown, ) - self._acquiring_locks: Set[Tuple[str, str]] = set() + self._acquiring_locks: set[tuple[str, str]] = set() self.clock.looping_call( self._reap_stale_read_write_locks, _LOCK_TIMEOUT_MS / 10.0 @@ -288,7 +288,7 @@ def set_lock() -> None: async def try_acquire_multi_read_write_lock( self, - lock_names: Collection[Tuple[str, str]], + lock_names: Collection[tuple[str, str]], write: bool, ) -> Optional[AsyncExitStack]: """Try to acquire multiple locks for the given names/keys. Will return @@ -318,7 +318,7 @@ async def try_acquire_multi_read_write_lock( def _try_acquire_multi_read_write_lock_txn( self, txn: LoggingTransaction, - lock_names: Collection[Tuple[str, str]], + lock_names: Collection[tuple[str, str]], write: bool, ) -> Collection["Lock"]: locks = [] @@ -497,7 +497,7 @@ async def __aenter__(self) -> None: async def __aexit__( self, - _exctype: Optional[Type[BaseException]], + _exctype: Optional[type[BaseException]], _excinst: Optional[BaseException], _exctb: Optional[TracebackType], ) -> bool: diff --git a/synapse/storage/databases/main/media_repository.py b/synapse/storage/databases/main/media_repository.py index b8bd0042d78..b9f882662ee 100644 --- a/synapse/storage/databases/main/media_repository.py +++ b/synapse/storage/databases/main/media_repository.py @@ -25,9 +25,7 @@ TYPE_CHECKING, Collection, Iterable, - List, Optional, - Tuple, Union, cast, ) @@ -275,7 +273,7 @@ async def get_local_media_by_user_paginate( user_id: str, order_by: str = MediaSortOrder.CREATED_TS.value, direction: Direction = Direction.FORWARDS, - ) -> Tuple[List[LocalMedia], int]: + ) -> tuple[list[LocalMedia], int]: """Get a paginated list of metadata for a local piece of media which an user_id has uploaded @@ -292,7 +290,7 @@ async def get_local_media_by_user_paginate( def get_local_media_by_user_paginate_txn( txn: LoggingTransaction, - ) -> Tuple[List[LocalMedia], int]: + ) -> tuple[list[LocalMedia], int]: # Set ordering order_by_column = MediaSortOrder(order_by).value @@ -301,14 +299,14 @@ def get_local_media_by_user_paginate_txn( else: order = "ASC" - args: List[Union[str, int]] = [user_id] + args: list[Union[str, int]] = [user_id] sql = """ SELECT COUNT(*) as total_media FROM local_media_repository WHERE user_id = ? """ txn.execute(sql, args) - count = cast(Tuple[int], txn.fetchone())[0] + count = cast(tuple[int], txn.fetchone())[0] sql = """ SELECT @@ -365,7 +363,7 @@ async def get_local_media_ids( keep_profiles: bool, include_quarantined_media: bool, include_protected_media: bool, - ) -> List[str]: + ) -> list[str]: """ Retrieve a list of media IDs from the local media store. @@ -437,7 +435,7 @@ async def get_local_media_ids( AND NOT safe_from_quarantine """ - def _get_local_media_ids_txn(txn: LoggingTransaction) -> List[str]: + def _get_local_media_ids_txn(txn: LoggingTransaction) -> list[str]: txn.execute(sql, (before_ts, before_ts, size_gt)) return [row[0] for row in txn] @@ -544,7 +542,7 @@ async def mark_local_media_as_safe(self, media_id: str, safe: bool = True) -> No desc="mark_local_media_as_safe", ) - async def count_pending_media(self, user_id: UserID) -> Tuple[int, int]: + async def count_pending_media(self, user_id: UserID) -> tuple[int, int]: """Count the number of pending media for a user. Returns: @@ -552,7 +550,7 @@ async def count_pending_media(self, user_id: UserID) -> Tuple[int, int]: expiration timestamp. """ - def get_pending_media_txn(txn: LoggingTransaction) -> Tuple[int, int]: + def get_pending_media_txn(txn: LoggingTransaction) -> tuple[int, int]: sql = """ SELECT COUNT(*), MIN(created_ts) FROM local_media_repository @@ -637,9 +635,9 @@ async def store_url_cache( desc="store_url_cache", ) - async def get_local_media_thumbnails(self, media_id: str) -> List[ThumbnailInfo]: + async def get_local_media_thumbnails(self, media_id: str) -> list[ThumbnailInfo]: rows = cast( - List[Tuple[int, int, str, str, int]], + list[tuple[int, int, str, str, int]], await self.db_pool.simple_select_list( "local_media_repository_thumbnails", {"media_id": media_id}, @@ -755,7 +753,7 @@ async def store_cached_remote_media( async def update_cached_last_access_time( self, local_media: Iterable[str], - remote_media: Iterable[Tuple[str, str]], + remote_media: Iterable[tuple[str, str]], time_ms: int, ) -> None: """Updates the last access time of the given media @@ -793,9 +791,9 @@ def update_cache_txn(txn: LoggingTransaction) -> None: async def get_remote_media_thumbnails( self, origin: str, media_id: str - ) -> List[ThumbnailInfo]: + ) -> list[ThumbnailInfo]: rows = cast( - List[Tuple[int, int, str, str, int]], + list[tuple[int, int, str, str, int]], await self.db_pool.simple_select_list( "remote_media_cache_thumbnails", {"media_origin": origin, "media_id": media_id}, @@ -881,7 +879,7 @@ async def store_remote_media_thumbnail( async def get_remote_media_ids( self, before_ts: int, include_quarantined_media: bool - ) -> List[Tuple[str, str, str]]: + ) -> list[tuple[str, str, str]]: """ Retrieve a list of server name, media ID tuples from the remote media cache. @@ -911,7 +909,7 @@ async def get_remote_media_ids( """ return cast( - List[Tuple[str, str, str]], + list[tuple[str, str, str]], await self.db_pool.execute("get_remote_media_ids", sql, before_ts), ) @@ -932,7 +930,7 @@ def delete_remote_media_txn(txn: LoggingTransaction) -> None: "delete_remote_media", delete_remote_media_txn ) - async def get_expired_url_cache(self, now_ts: int) -> List[str]: + async def get_expired_url_cache(self, now_ts: int) -> list[str]: sql = ( "SELECT media_id FROM local_media_repository_url_cache" " WHERE expires_ts < ?" @@ -940,7 +938,7 @@ async def get_expired_url_cache(self, now_ts: int) -> List[str]: " LIMIT 500" ) - def _get_expired_url_cache_txn(txn: LoggingTransaction) -> List[str]: + def _get_expired_url_cache_txn(txn: LoggingTransaction) -> list[str]: txn.execute(sql, (now_ts,)) return [row[0] for row in txn] @@ -959,7 +957,7 @@ def _delete_url_cache_txn(txn: LoggingTransaction) -> None: await self.db_pool.runInteraction("delete_url_cache", _delete_url_cache_txn) - async def get_url_cache_media_before(self, before_ts: int) -> List[str]: + async def get_url_cache_media_before(self, before_ts: int) -> list[str]: sql = ( "SELECT media_id FROM local_media_repository" " WHERE created_ts < ? AND url_cache IS NOT NULL" @@ -967,7 +965,7 @@ async def get_url_cache_media_before(self, before_ts: int) -> List[str]: " LIMIT 500" ) - def _get_url_cache_media_before_txn(txn: LoggingTransaction) -> List[str]: + def _get_url_cache_media_before_txn(txn: LoggingTransaction) -> list[str]: txn.execute(sql, (before_ts,)) return [row[0] for row in txn] diff --git a/synapse/storage/databases/main/metrics.py b/synapse/storage/databases/main/metrics.py index 49411ed0341..dc8e2c16165 100644 --- a/synapse/storage/databases/main/metrics.py +++ b/synapse/storage/databases/main/metrics.py @@ -21,7 +21,7 @@ import calendar import logging import time -from typing import TYPE_CHECKING, Dict, List, Tuple, cast +from typing import TYPE_CHECKING, cast from synapse.metrics import SERVER_NAME_LABEL, GaugeBucketCollector from synapse.metrics.background_process_metrics import wrap_as_background_process @@ -85,7 +85,7 @@ def __init__( @wrap_as_background_process("read_forward_extremities") async def _read_forward_extremities(self) -> None: - def fetch(txn: LoggingTransaction) -> List[Tuple[int, int]]: + def fetch(txn: LoggingTransaction) -> list[tuple[int, int]]: txn.execute( """ SELECT t1.c, t2.c @@ -98,7 +98,7 @@ def fetch(txn: LoggingTransaction) -> List[Tuple[int, int]]: ) t2 ON t1.room_id = t2.room_id """ ) - return cast(List[Tuple[int, int]], txn.fetchall()) + return cast(list[tuple[int, int]], txn.fetchall()) res = await self.db_pool.runInteraction("read_forward_extremities", fetch) @@ -125,7 +125,7 @@ def _count_messages(txn: LoggingTransaction) -> int: AND stream_ordering > ? """ txn.execute(sql, (self.stream_ordering_day_ago,)) - (count,) = cast(Tuple[int], txn.fetchone()) + (count,) = cast(tuple[int], txn.fetchone()) return count return await self.db_pool.runInteraction("count_e2ee_messages", _count_messages) @@ -144,7 +144,7 @@ def _count_messages(txn: LoggingTransaction) -> int: """ txn.execute(sql, (like_clause, self.stream_ordering_day_ago)) - (count,) = cast(Tuple[int], txn.fetchone()) + (count,) = cast(tuple[int], txn.fetchone()) return count return await self.db_pool.runInteraction( @@ -159,7 +159,7 @@ def _count(txn: LoggingTransaction) -> int: AND stream_ordering > ? """ txn.execute(sql, (self.stream_ordering_day_ago,)) - (count,) = cast(Tuple[int], txn.fetchone()) + (count,) = cast(tuple[int], txn.fetchone()) return count return await self.db_pool.runInteraction( @@ -181,7 +181,7 @@ def _count_messages(txn: LoggingTransaction) -> int: AND stream_ordering > ? """ txn.execute(sql, (self.stream_ordering_day_ago,)) - (count,) = cast(Tuple[int], txn.fetchone()) + (count,) = cast(tuple[int], txn.fetchone()) return count return await self.db_pool.runInteraction("count_messages", _count_messages) @@ -200,7 +200,7 @@ def _count_messages(txn: LoggingTransaction) -> int: """ txn.execute(sql, (like_clause, self.stream_ordering_day_ago)) - (count,) = cast(Tuple[int], txn.fetchone()) + (count,) = cast(tuple[int], txn.fetchone()) return count return await self.db_pool.runInteraction( @@ -215,7 +215,7 @@ def _count(txn: LoggingTransaction) -> int: AND stream_ordering > ? """ txn.execute(sql, (self.stream_ordering_day_ago,)) - (count,) = cast(Tuple[int], txn.fetchone()) + (count,) = cast(tuple[int], txn.fetchone()) return count return await self.db_pool.runInteraction("count_daily_active_rooms", _count) @@ -256,10 +256,10 @@ def _count_users(self, txn: LoggingTransaction, time_from: int) -> int: # Mypy knows that fetchone() might return None if there are no rows. # We know better: "SELECT COUNT(...) FROM ..." without any GROUP BY always # returns exactly one row. - (count,) = cast(Tuple[int], txn.fetchone()) + (count,) = cast(tuple[int], txn.fetchone()) return count - async def count_r30v2_users(self) -> Dict[str, int]: + async def count_r30v2_users(self) -> dict[str, int]: """ Counts the number of 30 day retained users, defined as users that: - Appear more than once in the past 60 days @@ -279,7 +279,7 @@ async def count_r30v2_users(self) -> Dict[str, int]: - "web" (any web application -- it's not possible to distinguish Element Web here) """ - def _count_r30v2_users(txn: LoggingTransaction) -> Dict[str, int]: + def _count_r30v2_users(txn: LoggingTransaction) -> dict[str, int]: thirty_days_in_secs = 86400 * 30 now = int(self.clock.time()) sixty_days_ago_in_secs = now - 2 * thirty_days_in_secs @@ -376,7 +376,7 @@ def _count_r30v2_users(txn: LoggingTransaction) -> Dict[str, int]: thirty_days_in_secs * 1000, ), ) - (count,) = cast(Tuple[int], txn.fetchone()) + (count,) = cast(tuple[int], txn.fetchone()) results["all"] = count return results diff --git a/synapse/storage/databases/main/monthly_active_users.py b/synapse/storage/databases/main/monthly_active_users.py index 86744f616ce..bf8e540ffba 100644 --- a/synapse/storage/databases/main/monthly_active_users.py +++ b/synapse/storage/databases/main/monthly_active_users.py @@ -18,7 +18,7 @@ # # import logging -from typing import TYPE_CHECKING, Dict, List, Mapping, Optional, Tuple, cast +from typing import TYPE_CHECKING, Mapping, Optional, cast from synapse.metrics.background_process_metrics import wrap_as_background_process from synapse.storage.database import ( @@ -94,7 +94,7 @@ def _count_users(txn: LoggingTransaction) -> int: WHERE (users.appservice_id IS NULL OR users.appservice_id = ''); """ txn.execute(sql) - (count,) = cast(Tuple[int], txn.fetchone()) + (count,) = cast(tuple[int], txn.fetchone()) return count return await self.db_pool.runInteraction("count_users", _count_users) @@ -112,7 +112,7 @@ async def get_monthly_active_count_by_service(self) -> Mapping[str, int]: """ - def _count_users_by_service(txn: LoggingTransaction) -> Dict[str, int]: + def _count_users_by_service(txn: LoggingTransaction) -> dict[str, int]: sql = """ SELECT COALESCE(appservice_id, 'native'), COUNT(*) FROM monthly_active_users @@ -121,7 +121,7 @@ def _count_users_by_service(txn: LoggingTransaction) -> Dict[str, int]: """ txn.execute(sql) - result = cast(List[Tuple[str, int]], txn.fetchall()) + result = cast(list[tuple[str, int]], txn.fetchall()) return dict(result) return await self.db_pool.runInteraction( @@ -130,7 +130,7 @@ def _count_users_by_service(txn: LoggingTransaction) -> Dict[str, int]: async def get_monthly_active_users_by_service( self, start_timestamp: Optional[int] = None, end_timestamp: Optional[int] = None - ) -> List[Tuple[str, str]]: + ) -> list[tuple[str, str]]: """Generates list of monthly active users and their services. Please see "get_monthly_active_count_by_service" docstring for more details about services. @@ -160,7 +160,7 @@ async def get_monthly_active_users_by_service( where_clause = "" query_params = [] - def _list_users(txn: LoggingTransaction) -> List[Tuple[str, str]]: + def _list_users(txn: LoggingTransaction) -> list[tuple[str, str]]: sql = f""" SELECT COALESCE(appservice_id, 'native'), user_id FROM monthly_active_users @@ -169,11 +169,11 @@ def _list_users(txn: LoggingTransaction) -> List[Tuple[str, str]]: """ txn.execute(sql, query_params) - return cast(List[Tuple[str, str]], txn.fetchall()) + return cast(list[tuple[str, str]], txn.fetchall()) return await self.db_pool.runInteraction("list_users", _list_users) - async def get_registered_reserved_users(self) -> List[str]: + async def get_registered_reserved_users(self) -> list[str]: """Of the reserved threepids defined in config, retrieve those that are associated with registered users @@ -219,7 +219,7 @@ async def reap_monthly_active_users(self) -> None: entries exist. """ - def _reap_users(txn: LoggingTransaction, reserved_users: List[str]) -> None: + def _reap_users(txn: LoggingTransaction, reserved_users: list[str]) -> None: """ Args: reserved_users: reserved users to preserve @@ -294,7 +294,7 @@ def _reap_users(txn: LoggingTransaction, reserved_users: List[str]) -> None: ) def _initialise_reserved_users( - self, txn: LoggingTransaction, threepids: List[dict] + self, txn: LoggingTransaction, threepids: list[dict] ) -> None: """Ensures that reserved threepids are accounted for in the MAU table, should be called on start up. diff --git a/synapse/storage/databases/main/presence.py b/synapse/storage/databases/main/presence.py index 587f51df2c8..fec94f4e5ae 100644 --- a/synapse/storage/databases/main/presence.py +++ b/synapse/storage/databases/main/presence.py @@ -21,12 +21,9 @@ from typing import ( TYPE_CHECKING, Any, - Dict, Iterable, - List, Mapping, Optional, - Tuple, Union, cast, ) @@ -116,8 +113,8 @@ def __init__( ) async def update_presence( - self, presence_states: List[UserPresenceState] - ) -> Tuple[int, int]: + self, presence_states: list[UserPresenceState] + ) -> tuple[int, int]: assert self._can_persist_presence stream_ordering_manager = self._presence_id_gen.get_next_mult( @@ -142,8 +139,8 @@ async def update_presence( def _update_presence_txn( self, txn: LoggingTransaction, - stream_orderings: List[int], - presence_states: List[UserPresenceState], + stream_orderings: list[int], + presence_states: list[UserPresenceState], ) -> None: for stream_id, state in zip(stream_orderings, presence_states): txn.call_after( @@ -193,7 +190,7 @@ def _update_presence_txn( async def get_all_presence_updates( self, instance_name: str, last_id: int, current_id: int, limit: int - ) -> Tuple[List[Tuple[int, list]], int, bool]: + ) -> tuple[list[tuple[int, list]], int, bool]: """Get updates for presence replication stream. Args: @@ -220,7 +217,7 @@ async def get_all_presence_updates( def get_all_presence_updates_txn( txn: LoggingTransaction, - ) -> Tuple[List[Tuple[int, list]], int, bool]: + ) -> tuple[list[tuple[int, list]], int, bool]: sql = """ SELECT stream_id, user_id, state, last_active_ts, last_federation_update_ts, last_user_sync_ts, @@ -232,7 +229,7 @@ def get_all_presence_updates_txn( """ txn.execute(sql, (last_id, current_id, limit)) updates = cast( - List[Tuple[int, list]], + list[tuple[int, list]], [(row[0], row[1:]) for row in txn], ) @@ -263,7 +260,7 @@ async def get_presence_for_users( # TODO All these columns are nullable, but we don't expect that: # https://github.com/matrix-org/synapse/issues/16467 rows = cast( - List[Tuple[str, str, int, int, int, Optional[str], Union[int, bool]]], + list[tuple[str, str, int, int, int, Optional[str], Union[int, bool]]], await self.db_pool.simple_select_many_batch( table="presence_stream", column="user_id", @@ -375,7 +372,7 @@ def _add_users_to_send_full_presence_to(txn: LoggingTransaction) -> None: async def get_presence_for_all_users( self, include_offline: bool = True, - ) -> Dict[str, UserPresenceState]: + ) -> dict[str, UserPresenceState]: """Retrieve the current presence state for all users. Note that the presence_stream table is culled frequently, so it should only @@ -402,7 +399,7 @@ async def get_presence_for_all_users( # TODO All these columns are nullable, but we don't expect that: # https://github.com/matrix-org/synapse/issues/16467 rows = cast( - List[Tuple[str, str, int, int, int, Optional[str], Union[int, bool]]], + list[tuple[str, str, int, int, int, Optional[str], Union[int, bool]]], await self.db_pool.runInteraction( "get_presence_for_all_users", self.db_pool.simple_select_list_paginate_txn, @@ -457,7 +454,7 @@ def get_current_presence_token(self) -> int: def get_presence_stream_id_gen(self) -> MultiWriterIdGenerator: return self._presence_id_gen - def _get_active_presence(self, db_conn: Connection) -> List[UserPresenceState]: + def _get_active_presence(self, db_conn: Connection) -> list[UserPresenceState]: """Fetch non-offline presence from the database so that we can register the appropriate time outs. """ @@ -488,7 +485,7 @@ def _get_active_presence(self, db_conn: Connection) -> List[UserPresenceState]: for user_id, state, last_active_ts, last_federation_update_ts, last_user_sync_ts, status_msg, currently_active in rows ] - def take_presence_startup_info(self) -> List[UserPresenceState]: + def take_presence_startup_info(self) -> list[UserPresenceState]: active_on_startup = self._presence_on_startup self._presence_on_startup = [] return active_on_startup diff --git a/synapse/storage/databases/main/profile.py b/synapse/storage/databases/main/profile.py index 30d8a58d965..71f01a597b5 100644 --- a/synapse/storage/databases/main/profile.py +++ b/synapse/storage/databases/main/profile.py @@ -19,7 +19,7 @@ # # import json -from typing import TYPE_CHECKING, Dict, Optional, Tuple, cast +from typing import TYPE_CHECKING, Optional, cast from canonicaljson import encode_canonical_json @@ -240,7 +240,7 @@ def get_profile_field(txn: LoggingTransaction) -> JsonValue: # Test exists first since value being None is used for both # missing and a null JSON value. - exists, value = cast(Tuple[bool, JsonValue], txn.fetchone()) + exists, value = cast(tuple[bool, JsonValue], txn.fetchone()) if not exists: raise StoreError(404, "No row found") return value @@ -258,7 +258,7 @@ def get_profile_field(txn: LoggingTransaction) -> JsonValue: # If value_type is None, then the value did not exist. value_type, value = cast( - Tuple[Optional[str], JsonValue], txn.fetchone() + tuple[Optional[str], JsonValue], txn.fetchone() ) if not value_type: raise StoreError(404, "No row found") @@ -271,7 +271,7 @@ def get_profile_field(txn: LoggingTransaction) -> JsonValue: return await self.db_pool.runInteraction("get_profile_field", get_profile_field) - async def get_profile_fields(self, user_id: UserID) -> Dict[str, str]: + async def get_profile_fields(self, user_id: UserID) -> dict[str, str]: """ Get all custom profile fields for a user. @@ -346,7 +346,7 @@ def _check_profile_size( # possible due to the grammar. (f'$."{new_field_name}"', user_id.localpart), ) - row = cast(Tuple[Optional[int], Optional[int], Optional[int]], txn.fetchone()) + row = cast(tuple[Optional[int], Optional[int], Optional[int]], txn.fetchone()) # The values return null if the column is null. total_bytes = ( diff --git a/synapse/storage/databases/main/purge_events.py b/synapse/storage/databases/main/purge_events.py index d4642a1309b..10de1b35a6e 100644 --- a/synapse/storage/databases/main/purge_events.py +++ b/synapse/storage/databases/main/purge_events.py @@ -20,7 +20,7 @@ # import logging -from typing import Any, Set, Tuple, cast +from typing import Any, cast from synapse.api.errors import SynapseError from synapse.storage.database import LoggingTransaction @@ -103,7 +103,7 @@ class PurgeEventsStore(StateGroupWorkerStore, CacheInvalidationWorkerStore): async def purge_history( self, room_id: str, token: str, delete_local_events: bool - ) -> Set[int]: + ) -> set[int]: """Deletes room history before a certain point. Note that only a single purge can occur at once, this is guaranteed via @@ -137,7 +137,7 @@ def _purge_history_txn( room_id: str, token: RoomStreamToken, delete_local_events: bool, - ) -> Set[int]: + ) -> set[int]: # Tables that should be pruned: # event_auth # event_backward_extremities @@ -204,7 +204,7 @@ def _purge_history_txn( logger.info("[purge] looking for events to delete") should_delete_expr = "state_events.state_key IS NULL" - should_delete_params: Tuple[Any, ...] = () + should_delete_params: tuple[Any, ...] = () if not delete_local_events: should_delete_expr += " AND event_id NOT LIKE ?" @@ -355,7 +355,7 @@ def _purge_history_txn( """, (room_id,), ) - (min_depth,) = cast(Tuple[int], txn.fetchone()) + (min_depth,) = cast(tuple[int], txn.fetchone()) logger.info("[purge] updating room_depth to %d", min_depth) diff --git a/synapse/storage/databases/main/push_rule.py b/synapse/storage/databases/main/push_rule.py index 1860be17131..ecab19eb2e5 100644 --- a/synapse/storage/databases/main/push_rule.py +++ b/synapse/storage/databases/main/push_rule.py @@ -23,13 +23,10 @@ TYPE_CHECKING, Any, Collection, - Dict, Iterable, - List, Mapping, Optional, Sequence, - Tuple, Union, cast, ) @@ -69,8 +66,8 @@ def _load_rules( - rawrules: List[Tuple[str, int, str, str]], - enabled_map: Dict[str, bool], + rawrules: list[tuple[str, int, str, str]], + enabled_map: dict[str, bool], experimental_config: ExperimentalConfig, ) -> FilteredPushRules: """Take the DB rows returned from the DB and convert them into a full @@ -206,7 +203,7 @@ def process_replication_position( @cached(max_entries=5000) async def get_push_rules_for_user(self, user_id: str) -> FilteredPushRules: rows = cast( - List[Tuple[str, int, int, str, str]], + list[tuple[str, int, int, str, str]], await self.db_pool.simple_select_list( table="push_rules", keyvalues={"user_name": user_id}, @@ -232,9 +229,9 @@ async def get_push_rules_for_user(self, user_id: str) -> FilteredPushRules: self.hs.config.experimental, ) - async def get_push_rules_enabled_for_user(self, user_id: str) -> Dict[str, bool]: + async def get_push_rules_enabled_for_user(self, user_id: str) -> dict[str, bool]: results = cast( - List[Tuple[str, Optional[Union[int, bool]]]], + list[tuple[str, Optional[Union[int, bool]]]], await self.db_pool.simple_select_list( table="push_rules_enable", keyvalues={"user_name": user_id}, @@ -257,7 +254,7 @@ def have_push_rules_changed_txn(txn: LoggingTransaction) -> bool: " WHERE user_id = ? AND ? < stream_id" ) txn.execute(sql, (user_id, last_id)) - (count,) = cast(Tuple[int], txn.fetchone()) + (count,) = cast(tuple[int], txn.fetchone()) return bool(count) return await self.db_pool.runInteraction( @@ -271,7 +268,7 @@ async def bulk_get_push_rules( if not user_ids: return {} - raw_rules: Dict[str, List[Tuple[str, int, str, str]]] = { + raw_rules: dict[str, list[tuple[str, int, str, str]]] = { user_id: [] for user_id in user_ids } @@ -280,7 +277,7 @@ async def bulk_get_push_rules( gather_results( ( cast( - "defer.Deferred[List[Tuple[str, str, int, int, str, str]]]", + "defer.Deferred[list[tuple[str, str, int, int, str, str]]]", run_in_background( self.db_pool.simple_select_many_batch, table="push_rules", @@ -312,7 +309,7 @@ async def bulk_get_push_rules( (rule_id, priority_class, conditions, actions) ) - results: Dict[str, FilteredPushRules] = {} + results: dict[str, FilteredPushRules] = {} for user_id, rules in raw_rules.items(): results[user_id] = _load_rules( @@ -323,14 +320,14 @@ async def bulk_get_push_rules( async def bulk_get_push_rules_enabled( self, user_ids: Collection[str] - ) -> Dict[str, Dict[str, bool]]: + ) -> dict[str, dict[str, bool]]: if not user_ids: return {} - results: Dict[str, Dict[str, bool]] = {user_id: {} for user_id in user_ids} + results: dict[str, dict[str, bool]] = {user_id: {} for user_id in user_ids} rows = cast( - List[Tuple[str, str, Optional[int]]], + list[tuple[str, str, Optional[int]]], await self.db_pool.simple_select_many_batch( table="push_rules_enable", column="user_name", @@ -346,7 +343,7 @@ async def bulk_get_push_rules_enabled( async def get_all_push_rule_updates( self, instance_name: str, last_id: int, current_id: int, limit: int - ) -> Tuple[List[Tuple[int, Tuple[str]]], int, bool]: + ) -> tuple[list[tuple[int, tuple[str]]], int, bool]: """Get updates for push_rules replication stream. Args: @@ -373,7 +370,7 @@ async def get_all_push_rule_updates( def get_all_push_rule_updates_txn( txn: LoggingTransaction, - ) -> Tuple[List[Tuple[int, Tuple[str]]], int, bool]: + ) -> tuple[list[tuple[int, tuple[str]]], int, bool]: sql = """ SELECT stream_id, user_id FROM push_rules_stream @@ -383,7 +380,7 @@ def get_all_push_rule_updates_txn( """ txn.execute(sql, (last_id, current_id, limit)) updates = cast( - List[Tuple[int, Tuple[str]]], + list[tuple[int, tuple[str]]], [(stream_id, (user_id,)) for stream_id, user_id in txn], ) @@ -794,7 +791,7 @@ async def set_push_rule_actions( self, user_id: str, rule_id: str, - actions: List[Union[dict, str]], + actions: list[Union[dict, str]], is_default_rule: bool, ) -> None: """ diff --git a/synapse/storage/databases/main/pusher.py b/synapse/storage/databases/main/pusher.py index 1b2aa79ab17..c8f049536ac 100644 --- a/synapse/storage/databases/main/pusher.py +++ b/synapse/storage/databases/main/pusher.py @@ -23,12 +23,9 @@ from typing import ( TYPE_CHECKING, Any, - Dict, Iterable, Iterator, - List, Optional, - Tuple, cast, ) @@ -51,7 +48,7 @@ logger = logging.getLogger(__name__) # The type of a row in the pushers table. -PusherRow = Tuple[ +PusherRow = tuple[ int, # id str, # user_name Optional[int], # access_token @@ -192,7 +189,7 @@ async def get_pushers_by_app_id_and_pushkey( async def get_pushers_by_user_id(self, user_id: str) -> Iterator[PusherConfig]: return await self.get_pushers_by({"user_name": user_id}) - async def get_pushers_by(self, keyvalues: Dict[str, Any]) -> Iterator[PusherConfig]: + async def get_pushers_by(self, keyvalues: dict[str, Any]) -> Iterator[PusherConfig]: """Retrieve pushers that match the given criteria. Args: @@ -202,7 +199,7 @@ async def get_pushers_by(self, keyvalues: Dict[str, Any]) -> Iterator[PusherConf The pushers for which the given columns have the given values. """ - def get_pushers_by_txn(txn: LoggingTransaction) -> List[PusherRow]: + def get_pushers_by_txn(txn: LoggingTransaction) -> list[PusherRow]: # We could technically use simple_select_list here, but we need to call # COALESCE on the 'enabled' column. While it is technically possible to give # simple_select_list the whole `COALESCE(...) AS ...` as a column name, it @@ -220,7 +217,7 @@ def get_pushers_by_txn(txn: LoggingTransaction) -> List[PusherRow]: txn.execute(sql, list(keyvalues.values())) - return cast(List[PusherRow], txn.fetchall()) + return cast(list[PusherRow], txn.fetchall()) ret = await self.db_pool.runInteraction( desc="get_pushers_by", @@ -230,7 +227,7 @@ def get_pushers_by_txn(txn: LoggingTransaction) -> List[PusherRow]: return self._decode_pushers_rows(ret) async def get_enabled_pushers(self) -> Iterator[PusherConfig]: - def get_enabled_pushers_txn(txn: LoggingTransaction) -> List[PusherRow]: + def get_enabled_pushers_txn(txn: LoggingTransaction) -> list[PusherRow]: txn.execute( """ SELECT id, user_name, access_token, profile_tag, kind, app_id, @@ -240,7 +237,7 @@ def get_enabled_pushers_txn(txn: LoggingTransaction) -> List[PusherRow]: FROM pushers WHERE COALESCE(enabled, TRUE) """ ) - return cast(List[PusherRow], txn.fetchall()) + return cast(list[PusherRow], txn.fetchall()) return self._decode_pushers_rows( await self.db_pool.runInteraction( @@ -250,7 +247,7 @@ def get_enabled_pushers_txn(txn: LoggingTransaction) -> List[PusherRow]: async def get_all_updated_pushers_rows( self, instance_name: str, last_id: int, current_id: int, limit: int - ) -> Tuple[List[Tuple[int, tuple]], int, bool]: + ) -> tuple[list[tuple[int, tuple]], int, bool]: """Get updates for pushers replication stream. Args: @@ -277,7 +274,7 @@ async def get_all_updated_pushers_rows( def get_all_updated_pushers_rows_txn( txn: LoggingTransaction, - ) -> Tuple[List[Tuple[int, tuple]], int, bool]: + ) -> tuple[list[tuple[int, tuple]], int, bool]: sql = """ SELECT id, user_name, app_id, pushkey FROM pushers @@ -286,7 +283,7 @@ def get_all_updated_pushers_rows_txn( """ txn.execute(sql, (last_id, current_id, limit)) updates = cast( - List[Tuple[int, tuple]], + list[tuple[int, tuple]], [ (stream_id, (user_name, app_id, pushkey, False)) for stream_id, user_name, app_id, pushkey in txn @@ -379,9 +376,9 @@ async def update_pusher_failing_since( async def get_throttle_params_by_room( self, pusher_id: int - ) -> Dict[str, ThrottleParams]: + ) -> dict[str, ThrottleParams]: res = cast( - List[Tuple[str, Optional[int], Optional[int]]], + list[tuple[str, Optional[int], Optional[int]]], await self.db_pool.simple_select_list( "pusher_throttle", {"pusher": pusher_id}, @@ -610,7 +607,7 @@ def set_device_id_for_pushers_txn(txn: LoggingTransaction) -> int: (last_pusher_id, batch_size), ) - rows = cast(List[Tuple[int, Optional[str], Optional[str]]], txn.fetchall()) + rows = cast(list[tuple[int, Optional[str], Optional[str]]], txn.fetchall()) if len(rows) == 0: return 0 @@ -764,7 +761,7 @@ async def delete_all_pushers_for_user(self, user_id: str) -> None: # account. pushers = list(await self.get_pushers_by_user_id(user_id)) - def delete_pushers_txn(txn: LoggingTransaction, stream_ids: List[int]) -> None: + def delete_pushers_txn(txn: LoggingTransaction, stream_ids: list[int]) -> None: self._invalidate_cache_and_stream( # type: ignore[attr-defined] txn, self.get_if_user_has_pusher, (user_id,) ) diff --git a/synapse/storage/databases/main/receipts.py b/synapse/storage/databases/main/receipts.py index f1dbf68971d..63d4e1f68c7 100644 --- a/synapse/storage/databases/main/receipts.py +++ b/synapse/storage/databases/main/receipts.py @@ -24,14 +24,10 @@ TYPE_CHECKING, Any, Collection, - Dict, Iterable, - List, Mapping, Optional, Sequence, - Set, - Tuple, cast, ) @@ -92,14 +88,14 @@ def merge_to_content(receipts: Collection["ReceiptInRoom"]) -> JsonMapping: # matching threaded receipts. # Set of (user_id, event_id) - unthreaded_receipts: Set[Tuple[str, str]] = { + unthreaded_receipts: set[tuple[str, str]] = { (receipt.user_id, receipt.event_id) for receipt in receipts if receipt.thread_id is None } # event_id -> receipt_type -> user_id -> receipt data - content: Dict[str, Dict[str, Dict[str, JsonMapping]]] = {} + content: dict[str, dict[str, dict[str, JsonMapping]]] = {} for receipt in receipts: data = receipt.data if receipt.thread_id is not None: @@ -180,7 +176,7 @@ def get_last_unthreaded_receipt_for_user_txn( user_id: str, room_id: str, receipt_types: Collection[str], - ) -> Optional[Tuple[str, int]]: + ) -> Optional[tuple[str, int]]: """ Fetch the event ID and stream_ordering for the latest unthreaded receipt in a room with one of the given receipt types. @@ -212,11 +208,11 @@ def get_last_unthreaded_receipt_for_user_txn( args.extend((user_id, room_id)) txn.execute(sql, args) - return cast(Optional[Tuple[str, int]], txn.fetchone()) + return cast(Optional[tuple[str, int]], txn.fetchone()) async def get_receipts_for_user( self, user_id: str, receipt_types: Iterable[str] - ) -> Dict[str, str]: + ) -> dict[str, str]: """ Fetch the event IDs for the latest receipts sent by the given user. @@ -285,7 +281,7 @@ async def _get_receipts_for_user_with_orderings( A map of room ID to the latest receipt information. """ - def f(txn: LoggingTransaction) -> List[Tuple[str, str, int, int]]: + def f(txn: LoggingTransaction) -> list[tuple[str, str, int, int]]: sql = ( "SELECT rl.room_id, rl.event_id," " e.topological_ordering, e.stream_ordering" @@ -297,7 +293,7 @@ def f(txn: LoggingTransaction) -> List[Tuple[str, str, int, int]]: " AND receipt_type = ?" ) txn.execute(sql, (user_id, receipt_type)) - return cast(List[Tuple[str, str, int, int]], txn.fetchall()) + return cast(list[tuple[str, str, int, int]], txn.fetchall()) rows = await self.db_pool.runInteraction( "get_receipts_for_user_with_orderings", f @@ -316,7 +312,7 @@ async def get_linearized_receipts_for_rooms( room_ids: Iterable[str], to_key: MultiWriterStreamToken, from_key: Optional[MultiWriterStreamToken] = None, - ) -> List[JsonMapping]: + ) -> list[JsonMapping]: """Get receipts for multiple rooms for sending to clients. Args: @@ -379,7 +375,7 @@ async def _get_linearized_receipts_for_room( ) -> Sequence[JsonMapping]: """See get_linearized_receipts_for_room""" - def f(txn: LoggingTransaction) -> List[Tuple[str, str, str, str]]: + def f(txn: LoggingTransaction) -> list[tuple[str, str, str, str]]: if from_key: sql = """ SELECT stream_id, instance_name, receipt_type, user_id, event_id, data @@ -466,7 +462,7 @@ def f( txn.execute(sql + clause, [to_key.get_max_stream_pos()] + list(args)) - results: Dict[str, List[ReceiptInRoom]] = {} + results: dict[str, list[ReceiptInRoom]] = {} for ( stream_id, instance_name, @@ -515,7 +511,7 @@ def f( async def get_linearized_receipts_for_events( self, - room_and_event_ids: Collection[Tuple[str, str]], + room_and_event_ids: Collection[tuple[str, str]], ) -> Mapping[str, Sequence[ReceiptInRoom]]: """Get all receipts for the given set of events. @@ -531,8 +527,8 @@ async def get_linearized_receipts_for_events( def get_linearized_receipts_for_events_txn( txn: LoggingTransaction, - room_id_event_id_tuples: Collection[Tuple[str, str]], - ) -> List[Tuple[str, str, str, str, Optional[str], str]]: + room_id_event_id_tuples: Collection[tuple[str, str]], + ) -> list[tuple[str, str, str, str, Optional[str], str]]: clause, args = make_tuple_in_list_sql_clause( self.database_engine, ("room_id", "event_id"), room_id_event_id_tuples ) @@ -548,7 +544,7 @@ def get_linearized_receipts_for_events_txn( return txn.fetchall() # room_id -> receipts - room_to_receipts: Dict[str, List[ReceiptInRoom]] = {} + room_to_receipts: dict[str, list[ReceiptInRoom]] = {} for batch in batch_iter(room_and_event_ids, 1000): batch_results = await self.db_pool.runInteraction( "get_linearized_receipts_for_events", @@ -596,7 +592,7 @@ async def get_linearized_receipts_for_all_rooms( A dictionary of roomids to a list of receipts. """ - def f(txn: LoggingTransaction) -> List[Tuple[str, str, str, str, str]]: + def f(txn: LoggingTransaction) -> list[tuple[str, str, str, str, str]]: if from_key: sql = """ SELECT stream_id, instance_name, room_id, receipt_type, user_id, event_id, data @@ -659,7 +655,7 @@ async def get_linearized_receipts_for_user_in_rooms( def get_linearized_receipts_for_user_in_rooms_txn( txn: LoggingTransaction, batch_room_ids: StrCollection, - ) -> List[Tuple[str, str, str, str, Optional[str], str]]: + ) -> list[tuple[str, str, str, str, Optional[str], str]]: clause, args = make_in_list_sql_clause( self.database_engine, "room_id", batch_room_ids ) @@ -687,7 +683,7 @@ def get_linearized_receipts_for_user_in_rooms_txn( ] # room_id -> receipts - room_to_receipts: Dict[str, List[ReceiptInRoom]] = {} + room_to_receipts: dict[str, list[ReceiptInRoom]] = {} for batch in batch_iter(room_ids, 1000): batch_results = await self.db_pool.runInteraction( "get_linearized_receipts_for_events", @@ -746,7 +742,7 @@ def f(txn: LoggingTransaction, room_ids: StrCollection) -> StrCollection: return [room_id for (room_id,) in txn] - results: List[str] = [] + results: list[str] = [] for batch in batch_iter(room_ids, 1000): batch_result = await self.db_pool.runInteraction( "get_rooms_with_receipts_between", f, batch @@ -757,7 +753,7 @@ def f(txn: LoggingTransaction, room_ids: StrCollection) -> StrCollection: async def get_users_sent_receipts_between( self, last_id: int, current_id: int - ) -> List[str]: + ) -> list[str]: """Get all users who sent receipts between `last_id` exclusive and `current_id` inclusive. @@ -768,7 +764,7 @@ async def get_users_sent_receipts_between( if last_id == current_id: return [] - def _get_users_sent_receipts_between_txn(txn: LoggingTransaction) -> List[str]: + def _get_users_sent_receipts_between_txn(txn: LoggingTransaction) -> list[str]: sql = """ SELECT DISTINCT user_id FROM receipts_linearized WHERE ? < stream_id AND stream_id <= ? @@ -783,8 +779,8 @@ def _get_users_sent_receipts_between_txn(txn: LoggingTransaction) -> List[str]: async def get_all_updated_receipts( self, instance_name: str, last_id: int, current_id: int, limit: int - ) -> Tuple[ - List[Tuple[int, Tuple[str, str, str, str, Optional[str], JsonDict]]], int, bool + ) -> tuple[ + list[tuple[int, tuple[str, str, str, str, Optional[str], JsonDict]]], int, bool ]: """Get updates for receipts replication stream. @@ -812,8 +808,8 @@ async def get_all_updated_receipts( def get_all_updated_receipts_txn( txn: LoggingTransaction, - ) -> Tuple[ - List[Tuple[int, Tuple[str, str, str, str, Optional[str], JsonDict]]], + ) -> tuple[ + list[tuple[int, tuple[str, str, str, str, Optional[str], JsonDict]]], int, bool, ]: @@ -828,7 +824,7 @@ def get_all_updated_receipts_txn( txn.execute(sql, (last_id, current_id, instance_name, limit)) updates = cast( - List[Tuple[int, Tuple[str, str, str, str, Optional[str], JsonDict]]], + list[tuple[int, tuple[str, str, str, str, Optional[str], JsonDict]]], [(r[0], r[1:6] + (db_to_json(r[6]),)) for r in txn], ) @@ -917,7 +913,7 @@ def _insert_linearized_receipt_txn( if stream_ordering is not None: if thread_id is None: thread_clause = "r.thread_id IS NULL" - thread_args: Tuple[str, ...] = () + thread_args: tuple[str, ...] = () else: thread_clause = "r.thread_id = ?" thread_args = (thread_id,) @@ -986,7 +982,7 @@ def _insert_linearized_receipt_txn( return rx_ts def _graph_to_linear( - self, txn: LoggingTransaction, room_id: str, event_ids: List[str] + self, txn: LoggingTransaction, room_id: str, event_ids: list[str] ) -> str: """ Generate a linearized event from a list of events (i.e. a list of forward @@ -1026,7 +1022,7 @@ async def insert_receipt( room_id: str, receipt_type: str, user_id: str, - event_ids: List[str], + event_ids: list[str], thread_id: Optional[str], data: dict, ) -> Optional[PersistedPosition]: @@ -1098,7 +1094,7 @@ async def _insert_graph_receipt( room_id: str, receipt_type: str, user_id: str, - event_ids: List[str], + event_ids: list[str], thread_id: Optional[str], data: JsonDict, ) -> None: @@ -1237,7 +1233,7 @@ def _remote_duplicate_receipts_txn(txn: LoggingTransaction) -> None: HAVING COUNT(*) > 1 """ txn.execute(sql) - duplicate_keys = cast(List[Tuple[int, str, str, str]], list(txn)) + duplicate_keys = cast(list[tuple[int, str, str, str]], list(txn)) # Then remove duplicate receipts, keeping the one with the highest # `stream_id`. Since there might be duplicate rows with the same @@ -1255,7 +1251,7 @@ def _remote_duplicate_receipts_txn(txn: LoggingTransaction) -> None: LIMIT 1 """ txn.execute(sql, (room_id, receipt_type, user_id, stream_id)) - row_id = cast(Tuple[str], txn.fetchone())[0] + row_id = cast(tuple[str], txn.fetchone())[0] sql = f""" DELETE FROM receipts_linearized @@ -1306,7 +1302,7 @@ def _remote_duplicate_receipts_txn(txn: LoggingTransaction) -> None: HAVING COUNT(*) > 1 """ txn.execute(sql) - duplicate_keys = cast(List[Tuple[str, str, str]], list(txn)) + duplicate_keys = cast(list[tuple[str, str, str]], list(txn)) # Then remove all duplicate receipts. # We could be clever and try to keep the latest receipt out of every set of diff --git a/synapse/storage/databases/main/registration.py b/synapse/storage/databases/main/registration.py index 906d1a91f68..7ce9bf43e63 100644 --- a/synapse/storage/databases/main/registration.py +++ b/synapse/storage/databases/main/registration.py @@ -22,7 +22,7 @@ import logging import random import re -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union, cast +from typing import TYPE_CHECKING, Any, Optional, Union, cast import attr @@ -576,7 +576,7 @@ async def set_renewal_token_for_user( async def get_user_from_renewal_token( self, renewal_token: str - ) -> Tuple[str, int, Optional[int]]: + ) -> tuple[str, int, Optional[int]]: """Get a user ID and renewal status from a renewal token. Args: @@ -592,7 +592,7 @@ async def get_user_from_renewal_token( has not been renewed using the current token yet. """ return cast( - Tuple[str, int, Optional[int]], + tuple[str, int, Optional[int]], await self.db_pool.simple_select_one( table="account_validity", keyvalues={"renewal_token": renewal_token}, @@ -617,7 +617,7 @@ async def get_renewal_token_for_user(self, user_id: str) -> str: desc="get_renewal_token_for_user", ) - async def get_users_expiring_soon(self) -> List[Tuple[str, int]]: + async def get_users_expiring_soon(self) -> list[tuple[str, int]]: """Selects users whose account will expire in the [now, now + renew_at] time window (see configuration for account_validity for information on what renew_at refers to). @@ -628,14 +628,14 @@ async def get_users_expiring_soon(self) -> List[Tuple[str, int]]: def select_users_txn( txn: LoggingTransaction, now_ms: int, renew_at: int - ) -> List[Tuple[str, int]]: + ) -> list[tuple[str, int]]: sql = ( "SELECT user_id, expiration_ts_ms FROM account_validity" " WHERE email_sent = FALSE AND (expiration_ts_ms - ?) <= ?" ) values = [now_ms, renew_at] txn.execute(sql, values) - return cast(List[Tuple[str, int]], txn.fetchall()) + return cast(list[tuple[str, int]], txn.fetchall()) return await self.db_pool.runInteraction( "get_users_expiring_soon", @@ -858,17 +858,17 @@ def is_support_user_txn(self, txn: LoggingTransaction, user_id: str) -> bool: ) return True if res == UserTypes.SUPPORT else False - async def get_users_by_id_case_insensitive(self, user_id: str) -> Dict[str, str]: + async def get_users_by_id_case_insensitive(self, user_id: str) -> dict[str, str]: """Gets users that match user_id case insensitively. Returns: A mapping of user_id -> password_hash. """ - def f(txn: LoggingTransaction) -> Dict[str, str]: + def f(txn: LoggingTransaction) -> dict[str, str]: sql = "SELECT name, password_hash FROM users WHERE lower(name) = lower(?)" txn.execute(sql, (user_id,)) - result = cast(List[Tuple[str, str]], txn.fetchall()) + result = cast(list[tuple[str, str]], txn.fetchall()) return dict(result) return await self.db_pool.runInteraction("get_users_by_id_case_insensitive", f) @@ -978,7 +978,7 @@ async def remove_user_external_id( async def replace_user_external_id( self, - record_external_ids: List[Tuple[str, str]], + record_external_ids: list[tuple[str, str]], user_id: str, ) -> None: """Replace mappings from external user ids to a mxid in a single transaction. @@ -1045,7 +1045,7 @@ async def get_user_by_external_id( desc="get_user_by_external_id", ) - async def get_external_ids_by_user(self, mxid: str) -> List[Tuple[str, str]]: + async def get_external_ids_by_user(self, mxid: str) -> list[tuple[str, str]]: """Look up external ids for the given user Args: @@ -1055,7 +1055,7 @@ async def get_external_ids_by_user(self, mxid: str) -> List[Tuple[str, str]]: Tuples of (auth_provider, external_id) """ return cast( - List[Tuple[str, str]], + list[tuple[str, str]], await self.db_pool.simple_select_list( table="user_external_ids", keyvalues={"user_id": mxid}, @@ -1075,7 +1075,7 @@ def _count_users(txn: LoggingTransaction) -> int: return await self.db_pool.runInteraction("count_users", _count_users) - async def count_daily_user_type(self) -> Dict[str, int]: + async def count_daily_user_type(self) -> dict[str, int]: """ Counts 1) native non guest users 2) native guests users @@ -1083,7 +1083,7 @@ async def count_daily_user_type(self) -> Dict[str, int]: who registered on the homeserver in the past 24 hours """ - def _count_daily_user_type(txn: LoggingTransaction) -> Dict[str, int]: + def _count_daily_user_type(txn: LoggingTransaction) -> dict[str, int]: yesterday = int(self.clock.time()) - (60 * 60 * 24) sql = """ @@ -1116,7 +1116,7 @@ def _count_users(txn: LoggingTransaction) -> int: WHERE appservice_id IS NULL """ ) - (count,) = cast(Tuple[int], txn.fetchone()) + (count,) = cast(tuple[int], txn.fetchone()) return count return await self.db_pool.runInteraction("count_users", _count_users) @@ -1196,9 +1196,9 @@ async def user_add_threepid( {"user_id": user_id, "validated_at": validated_at, "added_at": added_at}, ) - async def user_get_threepids(self, user_id: str) -> List[ThreepidResult]: + async def user_get_threepids(self, user_id: str) -> list[ThreepidResult]: results = cast( - List[Tuple[str, str, int, int]], + list[tuple[str, str, int, int]], await self.db_pool.simple_select_list( "user_threepids", keyvalues={"user_id": user_id}, @@ -1253,7 +1253,7 @@ async def add_user_bound_threepid( desc="add_user_bound_threepid", ) - async def user_get_bound_threepids(self, user_id: str) -> List[Tuple[str, str]]: + async def user_get_bound_threepids(self, user_id: str) -> list[tuple[str, str]]: """Get the threepids that a user has bound to an identity server through the homeserver The homeserver remembers where binds to an identity server occurred. Using this method can retrieve those threepids. @@ -1267,7 +1267,7 @@ async def user_get_bound_threepids(self, user_id: str) -> List[Tuple[str, str]]: address: The address of the threepid (e.g "bob@example.com") """ return cast( - List[Tuple[str, str]], + list[tuple[str, str]], await self.db_pool.simple_select_list( table="user_threepid_id_server", keyvalues={"user_id": user_id}, @@ -1302,7 +1302,7 @@ async def remove_user_bound_threepid( async def get_id_servers_user_bound( self, user_id: str, medium: str, address: str - ) -> List[str]: + ) -> list[str]: """Get the list of identity servers that the server proxied bind requests to for given user and threepid @@ -1686,7 +1686,7 @@ async def use_registration_token(self, token: str) -> None: """ def _use_registration_token_txn(txn: LoggingTransaction) -> None: - # Normally, res is Optional[Dict[str, Any]]. + # Normally, res is Optional[dict[str, Any]]. # Override type because the return type is only optional if # allow_none is True, and we don't want mypy throwing errors # about None not being indexable. @@ -1716,7 +1716,7 @@ def _use_registration_token_txn(txn: LoggingTransaction) -> None: async def get_registration_tokens( self, valid: Optional[bool] = None - ) -> List[Tuple[str, Optional[int], int, int, Optional[int]]]: + ) -> list[tuple[str, Optional[int], int, int, Optional[int]]]: """List all registration tokens. Used by the admin API. Args: @@ -1735,7 +1735,7 @@ async def get_registration_tokens( def select_registration_tokens_txn( txn: LoggingTransaction, now: int, valid: Optional[bool] - ) -> List[Tuple[str, Optional[int], int, int, Optional[int]]]: + ) -> list[tuple[str, Optional[int], int, int, Optional[int]]]: if valid is None: # Return all tokens regardless of validity txn.execute( @@ -1765,7 +1765,7 @@ def select_registration_tokens_txn( txn.execute(sql, [now]) return cast( - List[Tuple[str, Optional[int], int, int, Optional[int]]], txn.fetchall() + list[tuple[str, Optional[int], int, int, Optional[int]]], txn.fetchall() ) return await self.db_pool.runInteraction( @@ -1775,7 +1775,7 @@ def select_registration_tokens_txn( valid, ) - async def get_one_registration_token(self, token: str) -> Optional[Dict[str, Any]]: + async def get_one_registration_token(self, token: str) -> Optional[dict[str, Any]]: """Get info about the given registration token. Used by the admin API. Args: @@ -1892,8 +1892,8 @@ def _create_registration_token_txn(txn: LoggingTransaction) -> bool: ) async def update_registration_token( - self, token: str, updatevalues: Dict[str, Optional[int]] - ) -> Optional[Dict[str, Any]]: + self, token: str, updatevalues: dict[str, Optional[int]] + ) -> Optional[dict[str, Any]]: """Update a registration token. Used by the admin API. Args: @@ -1909,7 +1909,7 @@ async def update_registration_token( def _update_registration_token_txn( txn: LoggingTransaction, - ) -> Optional[Dict[str, Any]]: + ) -> Optional[dict[str, Any]]: try: self.db_pool.simple_update_one_txn( txn, @@ -2457,7 +2457,7 @@ async def user_delete_access_tokens( user_id: str, except_token_id: Optional[int] = None, device_id: Optional[str] = None, - ) -> List[Tuple[str, int, Optional[str]]]: + ) -> list[tuple[str, int, Optional[str]]]: """ Invalidate access and refresh tokens belonging to a user @@ -2471,14 +2471,14 @@ async def user_delete_access_tokens( A tuple of (token, token id, device id) for each of the deleted tokens """ - def f(txn: LoggingTransaction) -> List[Tuple[str, int, Optional[str]]]: + def f(txn: LoggingTransaction) -> list[tuple[str, int, Optional[str]]]: keyvalues = {"user_id": user_id} if device_id is not None: keyvalues["device_id"] = device_id items = keyvalues.items() where_clause = " AND ".join(k + " = ?" for k, _ in items) - values: List[Union[str, int]] = [v for _, v in items] + values: list[Union[str, int]] = [v for _, v in items] # Conveniently, refresh_tokens and access_tokens both use the user_id and device_id fields. Only caveat # is the `except_token_id` param that is tricky to get right, so for now we're just using the same where # clause and values before we handle that. This seems to be only used in the "set password" handler. @@ -2517,7 +2517,7 @@ async def user_delete_access_tokens_for_devices( self, user_id: str, device_ids: StrCollection, - ) -> List[Tuple[str, int, Optional[str]]]: + ) -> list[tuple[str, int, Optional[str]]]: """ Invalidate access and refresh tokens belonging to a user @@ -2530,7 +2530,7 @@ async def user_delete_access_tokens_for_devices( def user_delete_access_tokens_for_devices_txn( txn: LoggingTransaction, batch_device_ids: StrCollection - ) -> List[Tuple[str, int, Optional[str]]]: + ) -> list[tuple[str, int, Optional[str]]]: self.db_pool.simple_delete_many_txn( txn, table="refresh_tokens", @@ -2686,7 +2686,7 @@ async def _background_update_set_deactivated_flag( def _background_update_set_deactivated_flag_txn( txn: LoggingTransaction, - ) -> Tuple[bool, int]: + ) -> tuple[bool, int]: txn.execute( """ SELECT diff --git a/synapse/storage/databases/main/relations.py b/synapse/storage/databases/main/relations.py index ea746e05118..529102c2451 100644 --- a/synapse/storage/databases/main/relations.py +++ b/synapse/storage/databases/main/relations.py @@ -22,15 +22,10 @@ from typing import ( TYPE_CHECKING, Collection, - Dict, - FrozenSet, Iterable, - List, Mapping, Optional, Sequence, - Set, - Tuple, Union, cast, ) @@ -179,7 +174,7 @@ async def get_relations_for_event( from_token: Optional[StreamToken] = None, to_token: Optional[StreamToken] = None, recurse: bool = False, - ) -> Tuple[Sequence[_RelatedEvent], Optional[StreamToken]]: + ) -> tuple[Sequence[_RelatedEvent], Optional[StreamToken]]: """Get a list of relations for an event, ordered by topological ordering. Args: @@ -209,7 +204,7 @@ async def get_relations_for_event( assert limit >= 0 where_clause = ["room_id = ?"] - where_args: List[Union[str, int]] = [room_id] + where_args: list[Union[str, int]] = [room_id] is_redacted = event.internal_metadata.is_redacted() if relation_type is not None: @@ -281,14 +276,14 @@ async def get_relations_for_event( def _get_recent_references_for_event_txn( txn: LoggingTransaction, - ) -> Tuple[List[_RelatedEvent], Optional[StreamToken]]: + ) -> tuple[list[_RelatedEvent], Optional[StreamToken]]: txn.execute(sql, [event.event_id] + where_args + [limit + 1]) events = [] - topo_orderings: List[int] = [] - stream_orderings: List[int] = [] + topo_orderings: list[int] = [] + stream_orderings: list[int] = [] for event_id, relation_type, sender, topo_ordering, stream_ordering in cast( - List[Tuple[str, str, str, int, int]], txn + list[tuple[str, str, str, int, int]], txn ): # Do not include edits for redacted events as they leak event # content. @@ -329,8 +324,8 @@ def _get_recent_references_for_event_txn( async def get_all_relations_for_event_with_types( self, event_id: str, - relation_types: List[str], - ) -> List[str]: + relation_types: list[str], + ) -> list[str]: """Get the event IDs of all events that have a relation to the given event with one of the given relation types. @@ -345,9 +340,9 @@ async def get_all_relations_for_event_with_types( def get_all_relation_ids_for_event_with_types_txn( txn: LoggingTransaction, - ) -> List[str]: + ) -> list[str]: rows = cast( - List[Tuple[str]], + list[tuple[str]], self.db_pool.simple_select_many_txn( txn=txn, table="event_relations", @@ -368,7 +363,7 @@ def get_all_relation_ids_for_event_with_types_txn( async def get_all_relations_for_event( self, event_id: str, - ) -> List[str]: + ) -> list[str]: """Get the event IDs of all events that have a relation to the given event. Args: @@ -380,9 +375,9 @@ async def get_all_relations_for_event( def get_all_relation_ids_for_event_txn( txn: LoggingTransaction, - ) -> List[str]: + ) -> list[str]: rows = cast( - List[Tuple[str]], + list[tuple[str]], self.db_pool.simple_select_list_txn( txn=txn, table="event_relations", @@ -462,7 +457,7 @@ async def event_is_target_of_relation(self, parent_id: str) -> bool: return result is not None @cached() # type: ignore[synapse-@cached-mutable] - async def get_references_for_event(self, event_id: str) -> List[JsonDict]: + async def get_references_for_event(self, event_id: str) -> list[JsonDict]: raise NotImplementedError() @cachedList(cached_method_name="get_references_for_event", list_name="event_ids") @@ -498,12 +493,12 @@ async def get_references_for_events( def _get_references_for_events_txn( txn: LoggingTransaction, - ) -> Mapping[str, List[_RelatedEvent]]: + ) -> Mapping[str, list[_RelatedEvent]]: txn.execute(sql, args) - result: Dict[str, List[_RelatedEvent]] = {} + result: dict[str, list[_RelatedEvent]] = {} for relates_to_id, event_id, sender in cast( - List[Tuple[str, str, str]], txn + list[tuple[str, str, str]], txn ): result.setdefault(relates_to_id, []).append( _RelatedEvent(event_id, sender) @@ -578,14 +573,14 @@ async def get_applicable_edits( ORDER by edit.origin_server_ts, edit.event_id """ - def _get_applicable_edits_txn(txn: LoggingTransaction) -> Dict[str, str]: + def _get_applicable_edits_txn(txn: LoggingTransaction) -> dict[str, str]: clause, args = make_in_list_sql_clause( txn.database_engine, "relates_to_id", event_ids ) args.append(RelationTypes.REPLACE) txn.execute(sql % (clause,), args) - return dict(cast(Iterable[Tuple[str, str]], txn.fetchall())) + return dict(cast(Iterable[tuple[str, str]], txn.fetchall())) edit_ids = await self.db_pool.runInteraction( "get_applicable_edits", _get_applicable_edits_txn @@ -603,14 +598,14 @@ def _get_applicable_edits_txn(txn: LoggingTransaction) -> Dict[str, str]: } @cached() # type: ignore[synapse-@cached-mutable] - def get_thread_summary(self, event_id: str) -> Optional[Tuple[int, EventBase]]: + def get_thread_summary(self, event_id: str) -> Optional[tuple[int, EventBase]]: raise NotImplementedError() # TODO: This returns a mutable object, which is generally bad. @cachedList(cached_method_name="get_thread_summary", list_name="event_ids") # type: ignore[synapse-@cached-mutable] async def get_thread_summaries( self, event_ids: Collection[str] - ) -> Mapping[str, Optional[Tuple[int, EventBase]]]: + ) -> Mapping[str, Optional[tuple[int, EventBase]]]: """Get the number of threaded replies and the latest reply (if any) for the given events. Args: @@ -627,7 +622,7 @@ async def get_thread_summaries( def _get_thread_summaries_txn( txn: LoggingTransaction, - ) -> Tuple[Dict[str, int], Dict[str, str]]: + ) -> tuple[dict[str, int], dict[str, str]]: # Fetch the count of threaded events and the latest event ID. # TODO Should this only allow m.room.message events. if isinstance(self.database_engine, PostgresEngine): @@ -698,7 +693,7 @@ def _get_thread_summaries_txn( args.append(RelationTypes.THREAD) txn.execute(sql % (clause,), args) - counts = dict(cast(List[Tuple[str, int]], txn.fetchall())) + counts = dict(cast(list[tuple[str, int]], txn.fetchall())) return counts, latest_event_ids @@ -726,8 +721,8 @@ def _get_thread_summaries_txn( async def get_threaded_messages_per_user( self, event_ids: Collection[str], - users: FrozenSet[str] = frozenset(), - ) -> Dict[Tuple[str, str], int]: + users: frozenset[str] = frozenset(), + ) -> dict[tuple[str, str], int]: """Get the number of threaded replies for a set of users. This is used, in conjunction with get_thread_summaries, to calculate an @@ -759,7 +754,7 @@ async def get_threaded_messages_per_user( def _get_threaded_messages_per_user_txn( txn: LoggingTransaction, - ) -> Dict[Tuple[str, str], int]: + ) -> dict[tuple[str, str], int]: users_sql, users_args = make_in_list_sql_clause( self.database_engine, "child.sender", users ) @@ -799,7 +794,7 @@ async def get_threads_participated( user participated in that event's thread, otherwise false. """ - def _get_threads_participated_txn(txn: LoggingTransaction) -> Set[str]: + def _get_threads_participated_txn(txn: LoggingTransaction) -> set[str]: # Fetch whether the requester has participated or not. sql = """ SELECT DISTINCT relates_to_id @@ -830,10 +825,10 @@ def _get_threads_participated_txn(txn: LoggingTransaction) -> Set[str]: async def events_have_relations( self, - parent_ids: List[str], - relation_senders: Optional[List[str]], - relation_types: Optional[List[str]], - ) -> List[str]: + parent_ids: list[str], + relation_senders: Optional[list[str]], + relation_types: Optional[list[str]], + ) -> list[str]: """Check which events have a relationship from the given senders of the given types. @@ -856,8 +851,8 @@ async def events_have_relations( %s; """ - def _get_if_events_have_relations(txn: LoggingTransaction) -> List[str]: - clauses: List[str] = [] + def _get_if_events_have_relations(txn: LoggingTransaction) -> list[str]: + clauses: list[str] = [] clause, args = make_in_list_sql_clause( txn.database_engine, "relates_to_id", parent_ids ) @@ -936,7 +931,7 @@ async def get_threads( room_id: str, limit: int = 5, from_token: Optional[ThreadsNextBatch] = None, - ) -> Tuple[Sequence[str], Optional[ThreadsNextBatch]]: + ) -> tuple[Sequence[str], Optional[ThreadsNextBatch]]: """Get a list of thread IDs, ordered by topological ordering of their latest reply. @@ -976,10 +971,10 @@ async def get_threads( def _get_threads_txn( txn: LoggingTransaction, - ) -> Tuple[List[str], Optional[ThreadsNextBatch]]: + ) -> tuple[list[str], Optional[ThreadsNextBatch]]: txn.execute(sql, (room_id, *pagination_args, limit + 1)) - rows = cast(List[Tuple[str, int, int]], txn.fetchall()) + rows = cast(list[tuple[str, int, int]], txn.fetchall()) thread_ids = [r[0] for r in rows] # If there are more events, generate the next pagination key from the diff --git a/synapse/storage/databases/main/room.py b/synapse/storage/databases/main/room.py index 9f03c084a59..7a294de558b 100644 --- a/synapse/storage/databases/main/room.py +++ b/synapse/storage/databases/main/room.py @@ -27,12 +27,8 @@ AbstractSet, Any, Collection, - Dict, - List, Mapping, Optional, - Set, - Tuple, Union, cast, ) @@ -139,7 +135,7 @@ class RoomSortOrder(Enum): @attr.s(slots=True, frozen=True, auto_attribs=True) class PartialStateResyncInfo: joined_via: Optional[str] - servers_in_room: Set[str] = attr.ib(factory=set) + servers_in_room: set[str] = attr.ib(factory=set) class RoomWorkerStore(CacheInvalidationWorkerStore): @@ -209,7 +205,7 @@ async def store_room( logger.error("store_room with room_id=%s failed: %s", room_id, e) raise StoreError(500, "Problem creating room.") - async def get_room(self, room_id: str) -> Optional[Tuple[bool, bool]]: + async def get_room(self, room_id: str) -> Optional[tuple[bool, bool]]: """Retrieve a room. Args: @@ -222,7 +218,7 @@ async def get_room(self, room_id: str) -> Optional[Tuple[bool, bool]]: or None if the room is unknown. """ row = cast( - Optional[Tuple[Optional[Union[int, bool]], Optional[Union[int, bool]]]], + Optional[tuple[Optional[Union[int, bool]], Optional[Union[int, bool]]]], await self.db_pool.simple_select_one( table="rooms", keyvalues={"room_id": room_id}, @@ -287,7 +283,7 @@ def get_room_with_stats_txn( "get_room_with_stats", get_room_with_stats_txn, room_id ) - async def get_public_room_ids(self) -> List[str]: + async def get_public_room_ids(self) -> list[str]: return await self.db_pool.simple_select_onecol( table="rooms", keyvalues={"is_public": True}, @@ -296,8 +292,8 @@ async def get_public_room_ids(self) -> List[str]: ) def _construct_room_type_where_clause( - self, room_types: Union[List[Union[str, None]], None] - ) -> Tuple[Union[str, None], list]: + self, room_types: Union[list[Union[str, None]], None] + ) -> tuple[Union[str, None], list]: if not room_types: return None, [] @@ -387,7 +383,7 @@ def _count_public_rooms_txn(txn: LoggingTransaction) -> int: """ txn.execute(sql, query_args) - return cast(Tuple[int], txn.fetchone())[0] + return cast(tuple[int], txn.fetchone())[0] return await self.db_pool.runInteraction( "count_public_rooms", _count_public_rooms_txn @@ -399,7 +395,7 @@ async def get_room_count(self) -> int: def f(txn: LoggingTransaction) -> int: sql = "SELECT count(*) FROM rooms" txn.execute(sql) - row = cast(Tuple[int], txn.fetchone()) + row = cast(tuple[int], txn.fetchone()) return row[0] return await self.db_pool.runInteraction("get_rooms", f) @@ -409,10 +405,10 @@ async def get_largest_public_rooms( network_tuple: Optional[ThirdPartyInstanceID], search_filter: Optional[dict], limit: Optional[int], - bounds: Optional[Tuple[int, str]], + bounds: Optional[tuple[int, str]], forwards: bool, ignore_non_federatable: bool = False, - ) -> List[LargestRoomStats]: + ) -> list[LargestRoomStats]: """Gets the largest public rooms (where largest is in terms of joined members, as tracked in the statistics table). @@ -433,7 +429,7 @@ async def get_largest_public_rooms( """ where_clauses = [] - query_args: List[Union[str, int]] = [] + query_args: list[Union[str, int]] = [] if network_tuple: if network_tuple.appservice_id: @@ -549,7 +545,7 @@ async def get_largest_public_rooms( def _get_largest_public_rooms_txn( txn: LoggingTransaction, - ) -> List[LargestRoomStats]: + ) -> list[LargestRoomStats]: txn.execute(sql, query_args) results = [ @@ -611,7 +607,7 @@ async def get_rooms_paginate( search_term: Optional[str], public_rooms: Optional[bool], empty_rooms: Optional[bool], - ) -> Tuple[List[Dict[str, Any]], int]: + ) -> tuple[list[dict[str, Any]], int]: """Function to retrieve a paginated list of rooms as json. Args: @@ -760,7 +756,7 @@ async def get_rooms_paginate( def _get_rooms_paginate_txn( txn: LoggingTransaction, - ) -> Tuple[List[Dict[str, Any]], int]: + ) -> tuple[list[dict[str, Any]], int]: # Add the search term into the WHERE clause # and execute the data query txn.execute(info_sql, where_args + [limit, start]) @@ -795,7 +791,7 @@ def _get_rooms_paginate_txn( # Add the search term into the WHERE clause if present txn.execute(count_sql, where_args) - room_count = cast(Tuple[int], txn.fetchone()) + room_count = cast(tuple[int], txn.fetchone()) return rooms, room_count[0] return await self.db_pool.runInteraction( @@ -909,7 +905,7 @@ async def get_retention_policy_for_room(self, room_id: str) -> RetentionPolicy: def get_retention_policy_for_room_txn( txn: LoggingTransaction, - ) -> Optional[Tuple[Optional[int], Optional[int]]]: + ) -> Optional[tuple[Optional[int], Optional[int]]]: txn.execute( """ SELECT min_lifetime, max_lifetime FROM room_retention @@ -919,7 +915,7 @@ def get_retention_policy_for_room_txn( (room_id,), ) - return cast(Optional[Tuple[Optional[int], Optional[int]]], txn.fetchone()) + return cast(Optional[tuple[Optional[int], Optional[int]]], txn.fetchone()) ret = await self.db_pool.runInteraction( "get_retention_policy_for_room", @@ -951,7 +947,7 @@ def get_retention_policy_for_room_txn( max_lifetime=max_lifetime, ) - async def get_media_mxcs_in_room(self, room_id: str) -> Tuple[List[str], List[str]]: + async def get_media_mxcs_in_room(self, room_id: str) -> tuple[list[str], list[str]]: """Retrieves all the local and remote media MXC URIs in a given room Args: @@ -963,7 +959,7 @@ async def get_media_mxcs_in_room(self, room_id: str) -> Tuple[List[str], List[st def _get_media_mxcs_in_room_txn( txn: LoggingTransaction, - ) -> Tuple[List[str], List[str]]: + ) -> tuple[list[str], list[str]]: local_mxcs, remote_mxcs = self._get_media_mxcs_in_room_txn(txn, room_id) local_media_mxcs = [] remote_media_mxcs = [] @@ -1001,7 +997,7 @@ def _quarantine_media_in_room_txn(txn: LoggingTransaction) -> int: def _get_media_mxcs_in_room_txn( self, txn: LoggingTransaction, room_id: str - ) -> Tuple[List[str], List[Tuple[str, str]]]: + ) -> tuple[list[str], list[tuple[str, str]]]: """Retrieves all the local and remote media MXC URIs in a given room Returns: @@ -1107,7 +1103,7 @@ def _quarantine_media_by_user_txn(txn: LoggingTransaction) -> int: def _get_media_ids_by_user_txn( self, txn: LoggingTransaction, user_id: str, filter_quarantined: bool = True - ) -> List[str]: + ) -> list[str]: """Retrieves local media IDs by a given user Args: @@ -1137,8 +1133,8 @@ def _get_media_ids_by_user_txn( def _quarantine_local_media_txn( self, txn: LoggingTransaction, - hashes: Set[str], - media_ids: Set[str], + hashes: set[str], + media_ids: set[str], quarantined_by: Optional[str], ) -> int: """Quarantine and unquarantine local media items. @@ -1192,8 +1188,8 @@ def _quarantine_local_media_txn( def _quarantine_remote_media_txn( self, txn: LoggingTransaction, - hashes: Set[str], - media: Set[Tuple[str, str]], + hashes: set[str], + media: set[tuple[str, str]], quarantined_by: Optional[str], ) -> int: """Quarantine and unquarantine remote items @@ -1240,8 +1236,8 @@ def _quarantine_remote_media_txn( def _quarantine_media_txn( self, txn: LoggingTransaction, - local_mxcs: List[str], - remote_mxcs: List[Tuple[str, str]], + local_mxcs: list[str], + remote_mxcs: list[tuple[str, str]], quarantined_by: Optional[str], ) -> int: """Quarantine and unquarantine local and remote media items @@ -1346,7 +1342,7 @@ async def unblock_room(self, room_id: str) -> None: async def get_rooms_for_retention_period_in_range( self, min_ms: Optional[int], max_ms: Optional[int], include_null: bool = False - ) -> Dict[str, RetentionPolicy]: + ) -> dict[str, RetentionPolicy]: """Retrieves all of the rooms within the given retention range. Optionally includes the rooms which don't have a retention policy. @@ -1368,7 +1364,7 @@ async def get_rooms_for_retention_period_in_range( def get_rooms_for_retention_period_in_range_txn( txn: LoggingTransaction, - ) -> Dict[str, RetentionPolicy]: + ) -> dict[str, RetentionPolicy]: range_conditions = [] args = [] @@ -1464,10 +1460,10 @@ async def get_partial_state_room_resync_info( A dictionary of rooms with partial state, with room IDs as keys and lists of servers in rooms as values. """ - room_servers: Dict[str, PartialStateResyncInfo] = {} + room_servers: dict[str, PartialStateResyncInfo] = {} rows = cast( - List[Tuple[str, str]], + list[tuple[str, str]], await self.db_pool.simple_select_list( table="partial_state_rooms", keyvalues={}, @@ -1480,7 +1476,7 @@ async def get_partial_state_room_resync_info( room_servers[room_id] = PartialStateResyncInfo(joined_via=joined_via) rows = cast( - List[Tuple[str, str]], + list[tuple[str, str]], await self.db_pool.simple_select_list( "partial_state_rooms_servers", keyvalues=None, @@ -1533,7 +1529,7 @@ async def is_partial_state_room_batched( """ rows = cast( - List[Tuple[str]], + list[tuple[str]], await self.db_pool.simple_select_many_batch( table="partial_state_rooms", column="room_id", @@ -1571,7 +1567,7 @@ def _get_partial_rooms_for_user_txn( async def get_join_event_id_and_device_lists_stream_id_for_partial_state( self, room_id: str - ) -> Tuple[str, int]: + ) -> tuple[str, int]: """Get the event ID of the initial join that started the partial join, and the device list stream ID at the point we started the partial join. @@ -1583,7 +1579,7 @@ async def get_join_event_id_and_device_lists_stream_id_for_partial_state( """ return cast( - Tuple[str, int], + tuple[str, int], await self.db_pool.simple_select_one( table="partial_state_rooms", keyvalues={"room_id": room_id}, @@ -1602,7 +1598,7 @@ def get_un_partial_stated_rooms_id_generator(self) -> MultiWriterIdGenerator: async def get_un_partial_stated_rooms_between( self, last_id: int, current_id: int, room_ids: Collection[str] - ) -> Set[str]: + ) -> set[str]: """Get all rooms that got un partial stated between `last_id` exclusive and `current_id` inclusive. @@ -1615,7 +1611,7 @@ async def get_un_partial_stated_rooms_between( def _get_un_partial_stated_rooms_between_txn( txn: LoggingTransaction, - ) -> Set[str]: + ) -> set[str]: sql = """ SELECT DISTINCT room_id FROM un_partial_stated_room_stream WHERE ? < stream_id AND stream_id <= ? AND @@ -1636,7 +1632,7 @@ def _get_un_partial_stated_rooms_between_txn( async def get_un_partial_stated_rooms_from_stream( self, instance_name: str, last_id: int, current_id: int, limit: int - ) -> Tuple[List[Tuple[int, Tuple[str]]], int, bool]: + ) -> tuple[list[tuple[int, tuple[str]]], int, bool]: """Get updates for un partial stated rooms replication stream. Args: @@ -1663,7 +1659,7 @@ async def get_un_partial_stated_rooms_from_stream( def get_un_partial_stated_rooms_from_stream_txn( txn: LoggingTransaction, - ) -> Tuple[List[Tuple[int, Tuple[str]]], int, bool]: + ) -> tuple[list[tuple[int, tuple[str]]], int, bool]: sql = """ SELECT stream_id, room_id FROM un_partial_stated_room_stream @@ -1686,7 +1682,7 @@ def get_un_partial_stated_rooms_from_stream_txn( get_un_partial_stated_rooms_from_stream_txn, ) - async def get_event_report(self, report_id: int) -> Optional[Dict[str, Any]]: + async def get_event_report(self, report_id: int) -> Optional[dict[str, Any]]: """Retrieve an event report Args: @@ -1698,7 +1694,7 @@ async def get_event_report(self, report_id: int) -> Optional[Dict[str, Any]]: def _get_event_report_txn( txn: LoggingTransaction, report_id: int - ) -> Optional[Dict[str, Any]]: + ) -> Optional[dict[str, Any]]: sql = """ SELECT er.id, @@ -1755,7 +1751,7 @@ async def get_event_reports_paginate( user_id: Optional[str] = None, room_id: Optional[str] = None, event_sender_user_id: Optional[str] = None, - ) -> Tuple[List[Dict[str, Any]], int]: + ) -> tuple[list[dict[str, Any]], int]: """Retrieve a paginated list of event reports Args: @@ -1775,9 +1771,9 @@ async def get_event_reports_paginate( def _get_event_reports_paginate_txn( txn: LoggingTransaction, - ) -> Tuple[List[Dict[str, Any]], int]: + ) -> tuple[list[dict[str, Any]], int]: filters = [] - args: List[object] = [] + args: list[object] = [] if user_id: filters.append("er.user_id LIKE ?") @@ -1810,7 +1806,7 @@ def _get_event_reports_paginate_txn( {} """.format(where_clause) txn.execute(sql, args) - count = cast(Tuple[int], txn.fetchone())[0] + count = cast(tuple[int], txn.fetchone())[0] sql = """ SELECT @@ -2214,7 +2210,7 @@ async def _remove_tombstoned_rooms_from_directory( last_room = progress.get("room_id", "") - def _get_rooms(txn: LoggingTransaction) -> List[str]: + def _get_rooms(txn: LoggingTransaction) -> list[str]: txn.execute( """ SELECT room_id @@ -2460,7 +2456,7 @@ def __init__( self._instance_name = hs.get_instance_name() async def upsert_room_on_join( - self, room_id: str, room_version: RoomVersion, state_events: List[EventBase] + self, room_id: str, room_version: RoomVersion, state_events: list[EventBase] ) -> None: """Ensure that the room is stored in the table diff --git a/synapse/storage/databases/main/roommember.py b/synapse/storage/databases/main/roommember.py index 65caf4b1eaa..1e22ab4e6d6 100644 --- a/synapse/storage/databases/main/roommember.py +++ b/synapse/storage/databases/main/roommember.py @@ -24,15 +24,10 @@ TYPE_CHECKING, AbstractSet, Collection, - Dict, - FrozenSet, Iterable, - List, Mapping, Optional, Sequence, - Set, - Tuple, Union, cast, ) @@ -187,7 +182,7 @@ async def get_users_in_room(self, room_id: str) -> Sequence[str]: desc="get_users_in_room", ) - def get_users_in_room_txn(self, txn: LoggingTransaction, room_id: str) -> List[str]: + def get_users_in_room_txn(self, txn: LoggingTransaction, room_id: str) -> list[str]: """Returns a list of users in the room.""" return self.db_pool.simple_select_onecol_txn( @@ -242,7 +237,7 @@ async def get_subset_users_in_room_with_profiles( def _get_subset_users_in_room_with_profiles( txn: LoggingTransaction, - ) -> Dict[str, ProfileInfo]: + ) -> dict[str, ProfileInfo]: clause, ids = make_in_list_sql_clause( self.database_engine, "c.state_key", user_ids ) @@ -287,7 +282,7 @@ async def get_users_in_room_with_profiles( def _get_users_in_room_with_profiles( txn: LoggingTransaction, - ) -> Dict[str, ProfileInfo]: + ) -> dict[str, ProfileInfo]: sql = """ SELECT state_key, display_name, avatar_url FROM room_memberships as m INNER JOIN current_state_events as c @@ -328,14 +323,14 @@ async def get_room_summary(self, room_id: str) -> Mapping[str, MemberSummary]: def _get_room_summary_txn( txn: LoggingTransaction, - ) -> Dict[str, MemberSummary]: + ) -> dict[str, MemberSummary]: # first get counts. # We do this all in one transaction to keep the cache small. # FIXME: get rid of this when we have room_stats counts = self._get_member_counts_txn(txn, room_id) - res: Dict[str, MemberSummary] = {} + res: dict[str, MemberSummary] = {} for membership, count in counts.items(): res.setdefault(membership, MemberSummary([], count)) @@ -392,7 +387,7 @@ async def get_member_counts(self, room_id: str) -> Mapping[str, int]: def _get_member_counts_txn( self, txn: LoggingTransaction, room_id: str - ) -> Dict[str, int]: + ) -> dict[str, int]: """Get a mapping of number of users by membership""" # Note, rejected events will have a null membership field, so @@ -473,7 +468,7 @@ async def get_rooms_for_local_user_where_membership_is( user_id: str, membership_list: Collection[str], excluded_rooms: StrCollection = (), - ) -> List[RoomsForUser]: + ) -> list[RoomsForUser]: """Get all the rooms for this *local* user where the membership for this user matches one in the membership list. @@ -536,8 +531,8 @@ def _get_rooms_for_local_user_where_membership_is_txn( self, txn: LoggingTransaction, user_id: str, - membership_list: List[str], - ) -> List[RoomsForUser]: + membership_list: list[str], + ) -> list[RoomsForUser]: """Get all the rooms for this *local* user where the membership for this user matches one in the membership list. @@ -603,12 +598,12 @@ async def get_local_users_in_room(self, room_id: str) -> Sequence[str]: async def get_local_users_related_to_room( self, room_id: str - ) -> List[Tuple[str, str]]: + ) -> list[tuple[str, str]]: """ Retrieves a list of the current roommembers who are local to the server and their membership status. """ return cast( - List[Tuple[str, str]], + list[tuple[str, str]], await self.db_pool.simple_select_list( table="local_current_membership", keyvalues={"room_id": room_id}, @@ -660,7 +655,7 @@ async def is_server_notice_room(self, room_id: str) -> bool: async def get_local_current_membership_for_user_in_room( self, user_id: str, room_id: str - ) -> Tuple[Optional[str], Optional[str]]: + ) -> tuple[Optional[str], Optional[str]]: """Retrieve the current local membership state and event ID for a user in a room. Args: @@ -677,7 +672,7 @@ async def get_local_current_membership_for_user_in_room( raise SynapseError(HTTPStatus.BAD_REQUEST, message, errcode=Codes.BAD_JSON) results = cast( - Optional[Tuple[str, str]], + Optional[tuple[str, str]], await self.db_pool.simple_select_one( "local_current_membership", {"room_id": room_id, "user_id": user_id}, @@ -693,7 +688,7 @@ async def get_local_current_membership_for_user_in_room( async def get_users_server_still_shares_room_with( self, user_ids: Collection[str] - ) -> Set[str]: + ) -> set[str]: """Given a list of users return the set that the server still share a room with. """ @@ -711,7 +706,7 @@ def get_users_server_still_shares_room_with_txn( self, txn: LoggingTransaction, user_ids: Collection[str], - ) -> Set[str]: + ) -> set[str]: if not user_ids: return set() @@ -734,7 +729,7 @@ def get_users_server_still_shares_room_with_txn( async def get_rooms_user_currently_banned_from( self, user_id: str - ) -> FrozenSet[str]: + ) -> frozenset[str]: """Returns a set of room_ids the user is currently banned from. If a remote user only returns rooms this server is currently @@ -754,7 +749,7 @@ async def get_rooms_user_currently_banned_from( return frozenset(room_ids) @cached(max_entries=500000, iterable=True) - async def get_rooms_for_user(self, user_id: str) -> FrozenSet[str]: + async def get_rooms_for_user(self, user_id: str) -> frozenset[str]: """Returns a set of room_ids the user is currently joined to. If a remote user only returns rooms this server is currently @@ -780,7 +775,7 @@ async def get_rooms_for_user(self, user_id: str) -> FrozenSet[str]: ) async def _get_rooms_for_users( self, user_ids: Collection[str] - ) -> Mapping[str, FrozenSet[str]]: + ) -> Mapping[str, frozenset[str]]: """A batched version of `get_rooms_for_user`. Returns: @@ -788,7 +783,7 @@ async def _get_rooms_for_users( """ rows = cast( - List[Tuple[str, str]], + list[tuple[str, str]], await self.db_pool.simple_select_many_batch( table="current_state_events", column="state_key", @@ -805,7 +800,7 @@ async def _get_rooms_for_users( ), ) - user_rooms: Dict[str, Set[str]] = {user_id: set() for user_id in user_ids} + user_rooms: dict[str, set[str]] = {user_id: set() for user_id in user_ids} for state_key, room_id in rows: user_rooms[state_key].add(room_id) @@ -814,11 +809,11 @@ async def _get_rooms_for_users( async def get_rooms_for_users( self, user_ids: Collection[str] - ) -> Dict[str, FrozenSet[str]]: + ) -> dict[str, frozenset[str]]: """A batched wrapper around `_get_rooms_for_users`, to prevent locking other calls to `get_rooms_for_user` for large user lists. """ - all_user_rooms: Dict[str, FrozenSet[str]] = {} + all_user_rooms: dict[str, frozenset[str]] = {} # 250 users is pretty arbitrary but the data can be quite large if users # are in many rooms. @@ -848,7 +843,7 @@ async def _do_users_share_a_room( def do_users_share_a_room_txn( txn: LoggingTransaction, user_ids: Collection[str] - ) -> Dict[str, bool]: + ) -> dict[str, bool]: clause, args = make_in_list_sql_clause( self.database_engine, "state_key", user_ids ) @@ -882,7 +877,7 @@ def do_users_share_a_room_txn( async def do_users_share_a_room( self, user_id: str, other_user_ids: Collection[str] - ) -> Set[str]: + ) -> set[str]: """Return the set of users who share a room with the first users""" user_dict = await self._do_users_share_a_room(user_id, other_user_ids) @@ -911,7 +906,7 @@ async def _do_users_share_a_room_joined_or_invited( def do_users_share_a_room_joined_or_invited_txn( txn: LoggingTransaction, user_ids: Collection[str] - ) -> Dict[str, bool]: + ) -> dict[str, bool]: clause, args = make_in_list_sql_clause( self.database_engine, "state_key", user_ids ) @@ -947,7 +942,7 @@ def do_users_share_a_room_joined_or_invited_txn( async def do_users_share_a_room_joined_or_invited( self, user_id: str, other_user_ids: Collection[str] - ) -> Set[str]: + ) -> set[str]: """Return the set of users who share a room with the first users via being either joined or invited""" user_dict = await self._do_users_share_a_room_joined_or_invited( @@ -956,11 +951,11 @@ async def do_users_share_a_room_joined_or_invited( return {u for u, share_room in user_dict.items() if share_room} - async def get_users_who_share_room_with_user(self, user_id: str) -> Set[str]: + async def get_users_who_share_room_with_user(self, user_id: str) -> set[str]: """Returns the set of users who share a room with `user_id`""" room_ids = await self.get_rooms_for_user(user_id) - user_who_share_room: Set[str] = set() + user_who_share_room: set[str] = set() for room_id in room_ids: user_ids = await self.get_users_in_room(room_id) user_who_share_room.update(user_ids) @@ -969,8 +964,8 @@ async def get_users_who_share_room_with_user(self, user_id: str) -> Set[str]: @cached(cache_context=True, iterable=True) async def get_mutual_rooms_between_users( - self, user_ids: FrozenSet[str], cache_context: _CacheContext - ) -> FrozenSet[str]: + self, user_ids: frozenset[str], cache_context: _CacheContext + ) -> frozenset[str]: """ Returns the set of rooms that all users in `user_ids` share. @@ -979,7 +974,7 @@ async def get_mutual_rooms_between_users( overlapping joined rooms for. cache_context """ - shared_room_ids: Optional[FrozenSet[str]] = None + shared_room_ids: Optional[frozenset[str]] = None for user_id in user_ids: room_ids = await self.get_rooms_for_user( user_id, on_invalidate=cache_context.invalidate @@ -993,7 +988,7 @@ async def get_mutual_rooms_between_users( async def get_joined_user_ids_from_state( self, room_id: str, state: StateMap[str] - ) -> Set[str]: + ) -> set[str]: """ For a given set of state IDs, get a set of user IDs in the room. @@ -1050,7 +1045,7 @@ async def get_joined_user_ids_from_state( ) def _get_user_id_from_membership_event_id( self, event_id: str - ) -> Optional[Tuple[str, ProfileInfo]]: + ) -> Optional[tuple[str, ProfileInfo]]: raise NotImplementedError() @cachedList( @@ -1071,7 +1066,7 @@ async def _get_user_ids_from_membership_event_ids( """ rows = cast( - List[Tuple[str, str]], + list[tuple[str, str]], await self.db_pool.simple_select_many_batch( table="room_memberships", column="event_id", @@ -1148,7 +1143,7 @@ async def get_current_hosts_in_room(self, room_id: str) -> AbstractSet[str]: # For PostgreSQL we can use a regex to pull out the domains from the # joined users in `current_state_events` via regex. - def get_current_hosts_in_room_txn(txn: LoggingTransaction) -> Set[str]: + def get_current_hosts_in_room_txn(txn: LoggingTransaction) -> set[str]: sql = """ SELECT DISTINCT substring(state_key FROM '@[^:]*:(.*)$') FROM current_state_events @@ -1165,7 +1160,7 @@ def get_current_hosts_in_room_txn(txn: LoggingTransaction) -> Set[str]: ) @cached(iterable=True, max_entries=10000) - async def get_current_hosts_in_room_ordered(self, room_id: str) -> Tuple[str, ...]: + async def get_current_hosts_in_room_ordered(self, room_id: str) -> tuple[str, ...]: """ Get current hosts in room based on current state. @@ -1201,7 +1196,7 @@ async def get_current_hosts_in_room_ordered(self, room_id: str) -> Tuple[str, .. def get_current_hosts_in_room_ordered_txn( txn: LoggingTransaction, - ) -> Tuple[str, ...]: + ) -> tuple[str, ...]: # Returns a list of servers currently joined in the room sorted by # longest in the room first (aka. with the lowest depth). The # heuristic of sorting by servers who have been in the room the @@ -1245,7 +1240,7 @@ async def _get_approximate_current_memberships_in_room( """ rows = cast( - List[Tuple[str, Optional[str]]], + list[tuple[str, Optional[str]]], await self.db_pool.simple_select_list( "current_state_events", keyvalues={"room_id": room_id}, @@ -1297,7 +1292,7 @@ async def get_forgotten_rooms_for_user(self, user_id: str) -> AbstractSet[str]: The forgotten rooms. """ - def _get_forgotten_rooms_for_user_txn(txn: LoggingTransaction) -> Set[str]: + def _get_forgotten_rooms_for_user_txn(txn: LoggingTransaction) -> set[str]: # This is a slightly convoluted query that first looks up all rooms # that the user has forgotten in the past, then rechecks that list # to see if any have subsequently been updated. This is done so that @@ -1348,7 +1343,7 @@ async def is_locally_forgotten_room(self, room_id: str) -> bool: # If any rows still exist it means someone has not forgotten this room yet return not rows[0][0] - async def get_rooms_user_has_been_in(self, user_id: str) -> Set[str]: + async def get_rooms_user_has_been_in(self, user_id: str) -> set[str]: """Get all rooms that the user has ever been in. Args: @@ -1369,7 +1364,7 @@ async def get_rooms_user_has_been_in(self, user_id: str) -> Set[str]: async def get_membership_event_ids_for_user( self, user_id: str, room_id: str - ) -> Set[str]: + ) -> set[str]: """Get all event_ids for the given user and room. Args: @@ -1409,7 +1404,7 @@ async def get_membership_from_event_ids( """ rows = cast( - List[Tuple[str, str, str]], + list[tuple[str, str, str]], await self.db_pool.simple_select_many_batch( table="room_memberships", column="event_id", @@ -1533,7 +1528,7 @@ async def get_sliding_sync_rooms_for_user_from_membership_snapshots( def _txn( txn: LoggingTransaction, - ) -> Dict[str, RoomsForUserSlidingSync]: + ) -> dict[str, RoomsForUserSlidingSync]: # XXX: If you use any new columns that can change (like from # `sliding_sync_joined_rooms` or `forgotten`), make sure to bust the # `get_sliding_sync_rooms_for_user_from_membership_snapshots` cache in the @@ -1582,7 +1577,7 @@ async def get_sliding_sync_self_leave_rooms_after_to_token( self, user_id: str, to_token: StreamToken, - ) -> Dict[str, RoomsForUserSlidingSync]: + ) -> dict[str, RoomsForUserSlidingSync]: """ Get all the self-leave rooms for a user after the `to_token` (outside the token range) that are potentially relevant[1] and needed to handle a sliding sync @@ -1614,7 +1609,7 @@ async def get_sliding_sync_self_leave_rooms_after_to_token( def _txn( txn: LoggingTransaction, - ) -> Dict[str, RoomsForUserSlidingSync]: + ) -> dict[str, RoomsForUserSlidingSync]: sql = """ SELECT m.room_id, m.sender, m.membership, m.membership_event_id, r.room_version, @@ -1641,7 +1636,7 @@ def _txn( txn.execute(sql, (user_id, min_to_token_position)) # Map from room_id to membership info - room_membership_for_user_map: Dict[str, RoomsForUserSlidingSync] = {} + room_membership_for_user_map: dict[str, RoomsForUserSlidingSync] = {} for row in txn: room_for_user = RoomsForUserSlidingSync( room_id=row[0], @@ -1728,7 +1723,7 @@ def get_sliding_sync_room_for_user_txn( async def get_sliding_sync_room_for_user_batch( self, user_id: str, room_ids: StrCollection - ) -> Dict[str, RoomsForUserSlidingSync]: + ) -> dict[str, RoomsForUserSlidingSync]: """Get the sliding sync room entry for the given user and rooms.""" if not room_ids: @@ -1736,7 +1731,7 @@ async def get_sliding_sync_room_for_user_batch( def get_sliding_sync_room_for_user_batch_txn( txn: LoggingTransaction, - ) -> Dict[str, RoomsForUserSlidingSync]: + ) -> dict[str, RoomsForUserSlidingSync]: clause, args = make_in_list_sql_clause( self.database_engine, "m.room_id", room_ids ) @@ -1779,7 +1774,7 @@ def get_sliding_sync_room_for_user_batch_txn( async def get_rooms_for_user_by_date( self, user_id: str, from_ts: int - ) -> FrozenSet[str]: + ) -> frozenset[str]: """ Fetch a list of rooms that the user has joined at or after the given timestamp, including those they subsequently have left/been banned from. @@ -1993,7 +1988,7 @@ async def _background_current_state_membership( def _background_current_state_membership_txn( txn: LoggingTransaction, last_processed_room: str - ) -> Tuple[int, bool]: + ) -> tuple[int, bool]: processed = 0 while processed < batch_size: txn.execute( @@ -2063,7 +2058,7 @@ def __init__( def extract_heroes_from_room_summary( details: Mapping[str, MemberSummary], me: str -) -> List[str]: +) -> list[str]: """Determine the users that represent a room, from the perspective of the `me` user. This function expects `MemberSummary.members` to already be sorted by @@ -2105,7 +2100,7 @@ class _JoinedHostsCache: """The cached data used by the `_get_joined_hosts_cache`.""" # Dict of host to the set of their users in the room at the state group. - hosts_to_joined_users: Dict[str, Set[str]] = attr.Factory(dict) + hosts_to_joined_users: dict[str, set[str]] = attr.Factory(dict) # The state group `hosts_to_joined_users` is derived from. Will be an object # if the instance is newly created or if the state is not based on a state diff --git a/synapse/storage/databases/main/search.py b/synapse/storage/databases/main/search.py index 47dfdf64e59..63489f5c275 100644 --- a/synapse/storage/databases/main/search.py +++ b/synapse/storage/databases/main/search.py @@ -28,10 +28,7 @@ Any, Collection, Iterable, - List, Optional, - Set, - Tuple, Union, cast, ) @@ -362,7 +359,7 @@ def create_index(conn: LoggingDatabaseConnection) -> None: pg, ) - def reindex_search_txn(txn: LoggingTransaction) -> Tuple[int, bool]: + def reindex_search_txn(txn: LoggingTransaction) -> tuple[int, bool]: sql = """ UPDATE event_search AS es SET stream_ordering = e.stream_ordering, origin_server_ts = e.origin_server_ts @@ -451,7 +448,7 @@ async def search_msgs( """ clauses = [] - args: List[Any] = [] + args: list[Any] = [] # Make sure we don't explode because the person is in too many rooms. # We filter the results below regardless. @@ -471,7 +468,7 @@ async def search_msgs( count_args = args count_clauses = clauses - sqlite_highlights: List[str] = [] + sqlite_highlights: list[str] = [] if isinstance(self.database_engine, PostgresEngine): search_query = search_term @@ -519,7 +516,7 @@ async def search_msgs( # List of tuples of (rank, room_id, event_id). results = cast( - List[Tuple[Union[int, float], str, str]], + list[tuple[Union[int, float], str, str]], await self.db_pool.execute("search_msgs", sql, *args), ) @@ -544,7 +541,7 @@ async def search_msgs( # List of tuples of (room_id, count). count_results = cast( - List[Tuple[str, int]], + list[tuple[str, int]], await self.db_pool.execute("search_rooms_count", count_sql, *count_args), ) @@ -580,7 +577,7 @@ async def search_rooms( Each match as a dictionary. """ clauses = [] - args: List[Any] = [] + args: list[Any] = [] # Make sure we don't explode because the person is in too many rooms. # We filter the results below regardless. @@ -602,7 +599,7 @@ async def search_rooms( count_args = list(args) count_clauses = list(clauses) - sqlite_highlights: List[str] = [] + sqlite_highlights: list[str] = [] if pagination_token: try: @@ -686,7 +683,7 @@ async def search_rooms( # List of tuples of (rank, room_id, event_id, origin_server_ts, stream_ordering). results = cast( - List[Tuple[Union[int, float], str, str, int, int]], + list[tuple[Union[int, float], str, str, int, int]], await self.db_pool.execute("search_rooms", sql, *args), ) @@ -711,7 +708,7 @@ async def search_rooms( # List of tuples of (room_id, count). count_results = cast( - List[Tuple[str, int]], + list[tuple[str, int]], await self.db_pool.execute("search_rooms_count", count_sql, *count_args), ) @@ -732,8 +729,8 @@ async def search_rooms( } async def _find_highlights_in_postgres( - self, search_query: str, events: List[EventBase] - ) -> Set[str]: + self, search_query: str, events: list[EventBase] + ) -> set[str]: """Given a list of events and a search term, return a list of words that match from the content of the event. @@ -748,7 +745,7 @@ async def _find_highlights_in_postgres( A set of strings. """ - def f(txn: LoggingTransaction) -> Set[str]: + def f(txn: LoggingTransaction) -> set[str]: highlight_words = set() for event in events: # As a hack we simply join values of all possible keys. This is @@ -811,7 +808,7 @@ def _to_postgres_options(options_dict: JsonDict) -> str: @dataclass class Phrase: - phrase: List[str] + phrase: list[str] class SearchToken(enum.Enum): @@ -821,7 +818,7 @@ class SearchToken(enum.Enum): Token = Union[str, Phrase, SearchToken] -TokenList = List[Token] +TokenList = list[Token] def _is_stop_word(word: str) -> bool: @@ -901,7 +898,7 @@ def _tokenize_query(query: str) -> TokenList: return tokens -def _tokens_to_sqlite_match_query(tokens: TokenList) -> Tuple[str, List[str]]: +def _tokens_to_sqlite_match_query(tokens: TokenList) -> tuple[str, list[str]]: """ Convert the list of tokens to a string suitable for passing to sqlite's MATCH. Assume sqlite was compiled with enhanced query syntax. @@ -934,7 +931,7 @@ def _tokens_to_sqlite_match_query(tokens: TokenList) -> Tuple[str, List[str]]: return "".join(match_query), highlights -def _parse_query_for_sqlite(search_term: str) -> Tuple[str, List[str]]: +def _parse_query_for_sqlite(search_term: str) -> tuple[str, list[str]]: """Takes a plain unicode string from the user and converts it into a form that can be passed to sqllite's matchinfo(). diff --git a/synapse/storage/databases/main/signatures.py b/synapse/storage/databases/main/signatures.py index ef86151e310..8072a8c7412 100644 --- a/synapse/storage/databases/main/signatures.py +++ b/synapse/storage/databases/main/signatures.py @@ -19,7 +19,7 @@ # # -from typing import Collection, Dict, List, Mapping, Tuple +from typing import Collection, Mapping from unpaddedbase64 import encode_base64 @@ -59,7 +59,7 @@ async def get_event_reference_hashes( allow_rejected=True, ) - hashes: Dict[str, Dict[str, bytes]] = {} + hashes: dict[str, dict[str, bytes]] = {} for event_id in event_ids: event = events.get(event_id) if event is None: @@ -72,7 +72,7 @@ async def get_event_reference_hashes( async def add_event_hashes( self, event_ids: Collection[str] - ) -> List[Tuple[str, Dict[str, str]]]: + ) -> list[tuple[str, dict[str, str]]]: """ Args: diff --git a/synapse/storage/databases/main/sliding_sync.py b/synapse/storage/databases/main/sliding_sync.py index c0c5087b13c..62463c02599 100644 --- a/synapse/storage/databases/main/sliding_sync.py +++ b/synapse/storage/databases/main/sliding_sync.py @@ -14,7 +14,7 @@ import logging -from typing import TYPE_CHECKING, Dict, List, Mapping, Optional, Set, cast +from typing import TYPE_CHECKING, Mapping, Optional, cast import attr @@ -222,7 +222,7 @@ def persist_per_connection_state_txn( # with the updates to `required_state` # Dict from required state json -> required state ID - required_state_to_id: Dict[str, int] = {} + required_state_to_id: dict[str, int] = {} if previous_connection_position is not None: rows = self.db_pool.simple_select_list_txn( txn, @@ -233,8 +233,8 @@ def persist_per_connection_state_txn( for required_state_id, required_state in rows: required_state_to_id[required_state] = required_state_id - room_to_state_ids: Dict[str, int] = {} - unique_required_state: Dict[str, List[str]] = {} + room_to_state_ids: dict[str, int] = {} + unique_required_state: dict[str, list[str]] = {} for room_id, room_state in per_connection_state.room_configs.items(): serialized_state = json_encoder.encode( # We store the required state as a sorted list of event type / @@ -418,7 +418,7 @@ def _get_and_clear_connection_positions_txn( ), ) - required_state_map: Dict[int, Dict[str, Set[str]]] = {} + required_state_map: dict[int, dict[str, set[str]]] = {} for row in rows: state = required_state_map[row[0]] = {} for event_type, state_key in db_to_json(row[1]): @@ -437,7 +437,7 @@ def _get_and_clear_connection_positions_txn( ), ) - room_configs: Dict[str, RoomSyncConfig] = {} + room_configs: dict[str, RoomSyncConfig] = {} for ( room_id, timeline_limit, @@ -449,9 +449,9 @@ def _get_and_clear_connection_positions_txn( ) # Now look up the per-room stream data. - rooms: Dict[str, HaveSentRoom[str]] = {} - receipts: Dict[str, HaveSentRoom[str]] = {} - account_data: Dict[str, HaveSentRoom[str]] = {} + rooms: dict[str, HaveSentRoom[str]] = {} + receipts: dict[str, HaveSentRoom[str]] = {} + account_data: dict[str, HaveSentRoom[str]] = {} receipt_rows = self.db_pool.simple_select_list_txn( txn, diff --git a/synapse/storage/databases/main/state.py b/synapse/storage/databases/main/state.py index cfcc731f861..c2c1b62d7e9 100644 --- a/synapse/storage/databases/main/state.py +++ b/synapse/storage/databases/main/state.py @@ -25,15 +25,10 @@ TYPE_CHECKING, Any, Collection, - Dict, - FrozenSet, Iterable, - List, Mapping, MutableMapping, Optional, - Set, - Tuple, TypeVar, Union, cast, @@ -199,7 +194,7 @@ def get_room_version_id_txn(self, txn: LoggingTransaction, room_id: str) -> str: @trace async def get_metadata_for_events( self, event_ids: Collection[str] - ) -> Dict[str, EventMetadata]: + ) -> dict[str, EventMetadata]: """Get some metadata (room_id, type, state_key) for the given events. This method is a faster alternative than fetching the full events from @@ -212,7 +207,7 @@ async def get_metadata_for_events( def get_metadata_for_events_txn( txn: LoggingTransaction, batch_ids: Collection[str], - ) -> Dict[str, EventMetadata]: + ) -> dict[str, EventMetadata]: clause, args = make_in_list_sql_clause( self.database_engine, "e.event_id", batch_ids ) @@ -236,7 +231,7 @@ def get_metadata_for_events_txn( for event_id, room_id, event_type, state_key, rejection_reason in txn } - result_map: Dict[str, EventMetadata] = {} + result_map: dict[str, EventMetadata] = {} for batch_ids in batch_iter(event_ids, 1000): result_map.update( await self.db_pool.runInteraction( @@ -329,7 +324,7 @@ async def get_room_type(self, room_id: str) -> Union[Optional[str], Sentinel]: @cachedList(cached_method_name="get_room_type", list_name="room_ids") async def bulk_get_room_type( - self, room_ids: Set[str] + self, room_ids: set[str] ) -> Mapping[str, Union[Optional[str], Sentinel]]: """ Bulk fetch room types for the given rooms (via current state). @@ -408,7 +403,7 @@ async def get_room_encryption(self, room_id: str) -> Optional[str]: @cachedList(cached_method_name="get_room_encryption", list_name="room_ids") async def bulk_get_room_encryption( - self, room_ids: Set[str] + self, room_ids: set[str] ) -> Mapping[str, Union[Optional[str], Sentinel]]: """ Bulk fetch room encryption for the given rooms (via current state). @@ -469,7 +464,7 @@ def txn( # If we haven't updated `room_stats_state` with the room yet, query the state # directly. This should happen only rarely so we don't mind if we do this in a # loop. - encryption_event_ids: List[str] = [] + encryption_event_ids: list[str] = [] for room_id in room_ids - results.keys(): state_map = await self.get_partial_filtered_current_state_ids( room_id, @@ -541,7 +536,7 @@ def _get_current_state_ids_txn(txn: LoggingTransaction) -> StateMap[str]: async def check_if_events_in_current_state( self, event_ids: StrCollection - ) -> FrozenSet[str]: + ) -> frozenset[str]: """Checks and returns which of the given events is part of the current state.""" rows = await self.db_pool.simple_select_many_batch( table="current_state_events", @@ -632,7 +627,7 @@ async def _get_state_group_for_events( RuntimeError if the state is unknown at any of the given events """ rows = cast( - List[Tuple[str, int]], + list[tuple[str, int]], await self.db_pool.simple_select_many_batch( table="event_to_state_groups", column="event_id", @@ -651,7 +646,7 @@ async def _get_state_group_for_events( async def get_referenced_state_groups( self, state_groups: Iterable[int] - ) -> Set[int]: + ) -> set[int]: """Check if the state groups are referenced by events. Args: @@ -662,7 +657,7 @@ async def get_referenced_state_groups( """ rows = cast( - List[Tuple[int]], + list[tuple[int]], await self.db_pool.simple_select_many_batch( table="event_to_state_groups", column="state_group", @@ -803,7 +798,7 @@ async def _background_remove_left_rooms( def _background_remove_left_rooms_txn( txn: LoggingTransaction, - ) -> Tuple[bool, Set[str]]: + ) -> tuple[bool, set[str]]: # get a batch of room ids to consider sql = """ SELECT DISTINCT room_id FROM current_state_events @@ -884,7 +879,7 @@ def _background_remove_left_rooms_txn( # server didn't share a room with the remote user and therefore may # have missed any device updates. rows = cast( - List[Tuple[str]], + list[tuple[str]], self.db_pool.simple_select_many_txn( txn, table="current_state_events", @@ -975,7 +970,7 @@ def __init__( @attr.s(auto_attribs=True, slots=True) -class StateMapWrapper(Dict[StateKey, str]): +class StateMapWrapper(dict[StateKey, str]): """A wrapper around a StateMap[str] to ensure that we only query for items that were not filtered out. diff --git a/synapse/storage/databases/main/state_deltas.py b/synapse/storage/databases/main/state_deltas.py index 303b232d7b5..3df5c8b6f48 100644 --- a/synapse/storage/databases/main/state_deltas.py +++ b/synapse/storage/databases/main/state_deltas.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, List, Optional, Tuple +from typing import TYPE_CHECKING, Optional import attr @@ -79,7 +79,7 @@ def __init__( async def get_partial_current_state_deltas( self, prev_stream_id: int, max_stream_id: int - ) -> Tuple[int, List[StateDelta]]: + ) -> tuple[int, list[StateDelta]]: """Fetch a list of room state changes since the given stream id This may be the partial state if we're lazy joining the room. @@ -114,7 +114,7 @@ async def get_partial_current_state_deltas( def get_current_state_deltas_txn( txn: LoggingTransaction, - ) -> Tuple[int, List[StateDelta]]: + ) -> tuple[int, list[StateDelta]]: # First we calculate the max stream id that will give us less than # N results. # We arbitrarily limit to 100 stream_id entries to ensure we don't @@ -193,7 +193,7 @@ def get_current_state_deltas_for_room_txn( *, from_token: Optional[RoomStreamToken], to_token: Optional[RoomStreamToken], - ) -> List[StateDelta]: + ) -> list[StateDelta]: """ Get the state deltas between two tokens. @@ -239,7 +239,7 @@ async def get_current_state_deltas_for_room( *, from_token: Optional[RoomStreamToken], to_token: Optional[RoomStreamToken], - ) -> List[StateDelta]: + ) -> list[StateDelta]: """ Get the state deltas between two tokens. @@ -275,7 +275,7 @@ async def get_current_state_deltas_for_rooms( room_ids: StrCollection, from_token: RoomStreamToken, to_token: RoomStreamToken, - ) -> List[StateDelta]: + ) -> list[StateDelta]: """Get the state deltas between two tokens for the set of rooms.""" room_ids = self._curr_state_delta_stream_cache.get_entities_changed( @@ -287,7 +287,7 @@ async def get_current_state_deltas_for_rooms( def get_current_state_deltas_for_rooms_txn( txn: LoggingTransaction, room_ids: StrCollection, - ) -> List[StateDelta]: + ) -> list[StateDelta]: clause, args = make_in_list_sql_clause( self.database_engine, "room_id", room_ids ) diff --git a/synapse/storage/databases/main/stats.py b/synapse/storage/databases/main/stats.py index 74830b71299..19e525a3cdf 100644 --- a/synapse/storage/databases/main/stats.py +++ b/synapse/storage/databases/main/stats.py @@ -26,11 +26,8 @@ TYPE_CHECKING, Any, Counter, - Dict, Iterable, - List, Optional, - Tuple, Union, cast, ) @@ -154,7 +151,7 @@ async def _populate_stats_process_users( last_user_id = progress.get("last_user_id", "") - def _get_next_batch(txn: LoggingTransaction) -> List[str]: + def _get_next_batch(txn: LoggingTransaction) -> list[str]: sql = """ SELECT DISTINCT name FROM users WHERE name > ? @@ -200,7 +197,7 @@ async def _populate_stats_process_rooms( last_room_id = progress.get("last_room_id", "") - def _get_next_batch(txn: LoggingTransaction) -> List[str]: + def _get_next_batch(txn: LoggingTransaction) -> list[str]: sql = """ SELECT DISTINCT room_id FROM current_state_events WHERE room_id > ? @@ -245,7 +242,7 @@ async def get_stats_positions(self) -> int: desc="stats_incremental_position", ) - async def update_room_state(self, room_id: str, fields: Dict[str, Any]) -> None: + async def update_room_state(self, room_id: str, fields: dict[str, Any]) -> None: """Update the state of a room. fields can contain the following keys with string values: @@ -320,7 +317,7 @@ async def get_earliest_token_for_stats( ) async def bulk_update_stats_delta( - self, ts: int, updates: Dict[str, Dict[str, Counter[str]]], stream_id: int + self, ts: int, updates: dict[str, dict[str, Counter[str]]], stream_id: int ) -> None: """Bulk update stats tables for a given stream_id and updates the stats incremental position. @@ -363,9 +360,9 @@ async def update_stats_delta( ts: int, stats_type: str, stats_id: str, - fields: Dict[str, int], + fields: dict[str, int], complete_with_stream_id: int, - absolute_field_overrides: Optional[Dict[str, int]] = None, + absolute_field_overrides: Optional[dict[str, int]] = None, ) -> None: """ Updates the statistics for a subject, with a delta (difference/relative @@ -401,9 +398,9 @@ def _update_stats_delta_txn( ts: int, stats_type: str, stats_id: str, - fields: Dict[str, int], + fields: dict[str, int], complete_with_stream_id: int, - absolute_field_overrides: Optional[Dict[str, int]] = None, + absolute_field_overrides: Optional[dict[str, int]] = None, ) -> None: if absolute_field_overrides is None: absolute_field_overrides = {} @@ -450,9 +447,9 @@ def _upsert_with_additive_relatives_txn( self, txn: LoggingTransaction, table: str, - keyvalues: Dict[str, Any], - absolutes: Dict[str, Any], - additive_relatives: Dict[str, int], + keyvalues: dict[str, Any], + absolutes: dict[str, Any], + additive_relatives: dict[str, int], ) -> None: """Used to update values in the stats tables. @@ -510,11 +507,11 @@ async def _calculate_and_set_initial_state_for_room(self, room_id: str) -> None: def _fetch_current_state_stats( txn: LoggingTransaction, - ) -> Tuple[List[str], Dict[str, int], int, List[str], int]: + ) -> tuple[list[str], dict[str, int], int, list[str], int]: pos = self.get_room_max_stream_ordering() # type: ignore[attr-defined] rows = cast( - List[Tuple[str]], + list[tuple[str]], self.db_pool.simple_select_many_txn( txn, table="current_state_events", @@ -544,7 +541,7 @@ def _fetch_current_state_stats( """, (room_id,), ) - membership_counts = dict(cast(Iterable[Tuple[str, int]], txn)) + membership_counts = dict(cast(Iterable[tuple[str, int]], txn)) txn.execute( """ @@ -554,7 +551,7 @@ def _fetch_current_state_stats( (room_id,), ) - current_state_events_count = cast(Tuple[int], txn.fetchone())[0] + current_state_events_count = cast(tuple[int], txn.fetchone())[0] users_in_room = self.get_users_in_room_txn(txn, room_id) # type: ignore[attr-defined] @@ -588,7 +585,7 @@ def _fetch_current_state_stats( ) return - room_state: Dict[str, Union[None, bool, str]] = { + room_state: dict[str, Union[None, bool, str]] = { "join_rules": None, "history_visibility": None, "encryption": None, @@ -651,7 +648,7 @@ def _fetch_current_state_stats( async def _calculate_and_set_initial_state_for_user(self, user_id: str) -> None: def _calculate_and_set_initial_state_for_user_txn( txn: LoggingTransaction, - ) -> Tuple[int, int]: + ) -> tuple[int, int]: pos = self._get_max_stream_id_in_current_state_deltas_txn(txn) txn.execute( @@ -662,7 +659,7 @@ def _calculate_and_set_initial_state_for_user_txn( """, (user_id,), ) - count = cast(Tuple[int], txn.fetchone())[0] + count = cast(tuple[int], txn.fetchone())[0] return count, pos joined_rooms, pos = await self.db_pool.runInteraction( @@ -688,7 +685,7 @@ async def get_users_media_usage_paginate( order_by: Optional[str] = UserSortOrder.USER_ID.value, direction: Direction = Direction.FORWARDS, search_term: Optional[str] = None, - ) -> Tuple[List[Tuple[str, Optional[str], int, int]], int]: + ) -> tuple[list[tuple[str, Optional[str], int, int]], int]: """Function to retrieve a paginated list of users and their uploaded local media (size and number). This will return a json list of users and the total number of users matching the filter criteria. @@ -713,7 +710,7 @@ async def get_users_media_usage_paginate( def get_users_media_usage_paginate_txn( txn: LoggingTransaction, - ) -> Tuple[List[Tuple[str, Optional[str], int, int]], int]: + ) -> tuple[list[tuple[str, Optional[str], int, int]], int]: filters = [] args: list = [] @@ -766,7 +763,7 @@ def get_users_media_usage_paginate_txn( sql_base=sql_base, ) txn.execute(sql, args) - count = cast(Tuple[int], txn.fetchone())[0] + count = cast(tuple[int], txn.fetchone())[0] sql = """ SELECT @@ -785,7 +782,7 @@ def get_users_media_usage_paginate_txn( args += [limit, start] txn.execute(sql, args) - users = cast(List[Tuple[str, Optional[str], int, int]], txn.fetchall()) + users = cast(list[tuple[str, Optional[str], int, int]], txn.fetchall()) return users, count diff --git a/synapse/storage/databases/main/stream.py b/synapse/storage/databases/main/stream.py index 66280f2f9af..e8ea1e54806 100644 --- a/synapse/storage/databases/main/stream.py +++ b/synapse/storage/databases/main/stream.py @@ -47,15 +47,11 @@ AbstractSet, Any, Collection, - Dict, Iterable, - List, Literal, Mapping, Optional, Protocol, - Set, - Tuple, cast, overload, ) @@ -109,7 +105,7 @@ async def __call__( to_key: Optional[RoomStreamToken] = None, direction: Direction = Direction.BACKWARDS, limit: int = 0, - ) -> Tuple[List[EventBase], RoomStreamToken, bool]: ... + ) -> tuple[list[EventBase], RoomStreamToken, bool]: ... # Used as return values for pagination APIs @@ -122,8 +118,8 @@ class _EventDictReturn: @attr.s(slots=True, frozen=True, auto_attribs=True) class _EventsAround: - events_before: List[EventBase] - events_after: List[EventBase] + events_before: list[EventBase] + events_after: list[EventBase] start: RoomStreamToken end: RoomStreamToken @@ -156,9 +152,9 @@ class CurrentStateDeltaMembership: def generate_pagination_where_clause( direction: Direction, - column_names: Tuple[str, str], - from_token: Optional[Tuple[Optional[int], int]], - to_token: Optional[Tuple[Optional[int], int]], + column_names: tuple[str, str], + from_token: Optional[tuple[Optional[int], int]], + to_token: Optional[tuple[Optional[int], int]], engine: BaseDatabaseEngine, ) -> str: """Creates an SQL expression to bound the columns by the pagination @@ -224,8 +220,8 @@ def generate_pagination_bounds( direction: Direction, from_token: Optional[RoomStreamToken], to_token: Optional[RoomStreamToken], -) -> Tuple[ - str, Optional[Tuple[Optional[int], int]], Optional[Tuple[Optional[int], int]] +) -> tuple[ + str, Optional[tuple[Optional[int], int]], Optional[tuple[Optional[int], int]] ]: """ Generate a start and end point for this page of events. @@ -261,7 +257,7 @@ def generate_pagination_bounds( # by fetching all events between the min stream token and the maximum # stream token (as returned by `RoomStreamToken.get_max_stream_pos`) and # then filtering the results. - from_bound: Optional[Tuple[Optional[int], int]] = None + from_bound: Optional[tuple[Optional[int], int]] = None if from_token: if from_token.topological is not None: from_bound = from_token.as_historical_tuple() @@ -276,7 +272,7 @@ def generate_pagination_bounds( from_token.stream, ) - to_bound: Optional[Tuple[Optional[int], int]] = None + to_bound: Optional[tuple[Optional[int], int]] = None if to_token: if to_token.topological is not None: to_bound = to_token.as_historical_tuple() @@ -320,8 +316,8 @@ def generate_next_token( def _make_generic_sql_bound( bound: str, - column_names: Tuple[str, str], - values: Tuple[Optional[int], int], + column_names: tuple[str, str], + values: tuple[Optional[int], int], engine: BaseDatabaseEngine, ) -> str: """Create an SQL expression that bounds the given column names by the @@ -484,7 +480,7 @@ def _filter_results_by_stream( return True -def filter_to_clause(event_filter: Optional[Filter]) -> Tuple[str, List[str]]: +def filter_to_clause(event_filter: Optional[Filter]) -> tuple[str, list[str]]: # NB: This may create SQL clauses that don't optimise well (and we don't # have indices on all possible clauses). E.g. it may create # "room_id == X AND room_id != X", which postgres doesn't optimise. @@ -669,7 +665,7 @@ async def get_room_events_stream_for_rooms( to_key: Optional[RoomStreamToken] = None, direction: Direction = Direction.BACKWARDS, limit: int = 0, - ) -> Dict[str, Tuple[List[EventBase], RoomStreamToken, bool]]: + ) -> dict[str, tuple[list[EventBase], RoomStreamToken, bool]]: """Get new room events in stream ordering since `from_key`. Args: @@ -730,7 +726,7 @@ async def get_room_events_stream_for_rooms( def get_rooms_that_changed( self, room_ids: Collection[str], from_key: RoomStreamToken - ) -> Set[str]: + ) -> set[str]: """Given a list of rooms and a token, return rooms where there may have been changes. """ @@ -765,7 +761,7 @@ def get_rooms_that_have_updates_since_sliding_sync_table_txn( AND event_stream_ordering > ? """ - results: Set[str] = set() + results: set[str] = set() for batch in batch_iter(room_ids, 1000): clause, args = make_in_list_sql_clause( self.database_engine, "room_id", batch @@ -791,7 +787,7 @@ async def paginate_room_events_by_stream_ordering( to_key: Optional[RoomStreamToken] = None, direction: Direction = Direction.BACKWARDS, limit: int = 0, - ) -> Tuple[List[EventBase], RoomStreamToken, bool]: + ) -> tuple[list[EventBase], RoomStreamToken, bool]: """ Paginate events by `stream_ordering` in the room from the `from_key` in the given `direction` to the `to_key` or `limit`. @@ -876,7 +872,7 @@ async def paginate_room_events_by_stream_ordering( engine=self.database_engine, ) - def f(txn: LoggingTransaction) -> Tuple[List[_EventDictReturn], bool]: + def f(txn: LoggingTransaction) -> tuple[list[_EventDictReturn], bool]: sql = f""" SELECT event_id, instance_name, stream_ordering FROM events @@ -940,8 +936,8 @@ async def get_current_state_delta_membership_changes_for_user( user_id: str, from_key: RoomStreamToken, to_key: RoomStreamToken, - excluded_room_ids: Optional[List[str]] = None, - ) -> List[CurrentStateDeltaMembership]: + excluded_room_ids: Optional[list[str]] = None, + ) -> list[CurrentStateDeltaMembership]: """ Fetch membership events (and the previous event that was replaced by that one) for a given user. @@ -995,13 +991,13 @@ async def get_current_state_delta_membership_changes_for_user( if not has_changed: return [] - def f(txn: LoggingTransaction) -> List[CurrentStateDeltaMembership]: + def f(txn: LoggingTransaction) -> list[CurrentStateDeltaMembership]: # To handle tokens with a non-empty instance_map we fetch more # results than necessary and then filter down min_from_id = from_key.stream max_to_id = to_key.get_max_stream_pos() - args: List[Any] = [min_from_id, max_to_id, EventTypes.Member, user_id] + args: list[Any] = [min_from_id, max_to_id, EventTypes.Member, user_id] # TODO: It would be good to assert that the `from_token`/`to_token` is >= # the first row in `current_state_delta_stream` for the rooms we're @@ -1044,7 +1040,7 @@ def f(txn: LoggingTransaction) -> List[CurrentStateDeltaMembership]: txn.execute(sql, args) - membership_changes: List[CurrentStateDeltaMembership] = [] + membership_changes: list[CurrentStateDeltaMembership] = [] for ( room_id, event_id, @@ -1136,7 +1132,7 @@ async def get_sliding_sync_membership_changes( from_key: RoomStreamToken, to_key: RoomStreamToken, excluded_room_ids: Optional[AbstractSet[str]] = None, - ) -> Dict[str, RoomsForUserStateReset]: + ) -> dict[str, RoomsForUserStateReset]: """ Fetch membership events that result in a meaningful membership change for a given user. @@ -1185,7 +1181,7 @@ async def get_sliding_sync_membership_changes( if excluded_room_ids is not None: room_ids_to_exclude = excluded_room_ids - def f(txn: LoggingTransaction) -> Dict[str, RoomsForUserStateReset]: + def f(txn: LoggingTransaction) -> dict[str, RoomsForUserStateReset]: # To handle tokens with a non-empty instance_map we fetch more # results than necessary and then filter down min_from_id = from_key.stream @@ -1248,7 +1244,7 @@ def f(txn: LoggingTransaction) -> Dict[str, RoomsForUserStateReset]: (user_id, EventTypes.Member, user_id, min_from_id, max_to_id), ) - membership_changes: Dict[str, RoomsForUserStateReset] = {} + membership_changes: dict[str, RoomsForUserStateReset] = {} for ( room_id, membership_event_id, @@ -1332,8 +1328,8 @@ async def get_membership_changes_for_user( user_id: str, from_key: RoomStreamToken, to_key: RoomStreamToken, - excluded_rooms: Optional[List[str]] = None, - ) -> List[EventBase]: + excluded_rooms: Optional[list[str]] = None, + ) -> list[EventBase]: """Fetch membership events for a given user. All such events whose stream ordering `s` lies in the range @@ -1351,13 +1347,13 @@ async def get_membership_changes_for_user( if not has_changed: return [] - def f(txn: LoggingTransaction) -> List[_EventDictReturn]: + def f(txn: LoggingTransaction) -> list[_EventDictReturn]: # To handle tokens with a non-empty instance_map we fetch more # results than necessary and then filter down min_from_id = from_key.stream max_to_id = to_key.get_max_stream_pos() - args: List[Any] = [user_id, min_from_id, max_to_id] + args: list[Any] = [user_id, min_from_id, max_to_id] ignore_room_clause = "" if excluded_rooms is not None and len(excluded_rooms) > 0: @@ -1403,7 +1399,7 @@ def f(txn: LoggingTransaction) -> List[_EventDictReturn]: async def get_recent_events_for_room( self, room_id: str, limit: int, end_token: RoomStreamToken - ) -> Tuple[List[EventBase], RoomStreamToken]: + ) -> tuple[list[EventBase], RoomStreamToken]: """Get the most recent events in the room in topological ordering. Args: @@ -1428,7 +1424,7 @@ async def get_recent_events_for_room( async def get_recent_event_ids_for_room( self, room_id: str, limit: int, end_token: RoomStreamToken - ) -> Tuple[List[_EventDictReturn], RoomStreamToken]: + ) -> tuple[list[_EventDictReturn], RoomStreamToken]: """Get the most recent events in the room in topological ordering. Args: @@ -1459,7 +1455,7 @@ async def get_recent_event_ids_for_room( async def get_room_event_before_stream_ordering( self, room_id: str, stream_ordering: int - ) -> Optional[Tuple[int, int, str]]: + ) -> Optional[tuple[int, int, str]]: """Gets details of the first event in a room at or before a stream ordering Args: @@ -1470,7 +1466,7 @@ async def get_room_event_before_stream_ordering( A tuple of (stream ordering, topological ordering, event_id) """ - def _f(txn: LoggingTransaction) -> Optional[Tuple[int, int, str]]: + def _f(txn: LoggingTransaction) -> Optional[tuple[int, int, str]]: sql = """ SELECT stream_ordering, topological_ordering, event_id FROM events @@ -1483,7 +1479,7 @@ def _f(txn: LoggingTransaction) -> Optional[Tuple[int, int, str]]: LIMIT 1 """ txn.execute(sql, (room_id, stream_ordering)) - return cast(Optional[Tuple[int, int, str]], txn.fetchone()) + return cast(Optional[tuple[int, int, str]], txn.fetchone()) return await self.db_pool.runInteraction( "get_room_event_before_stream_ordering", _f @@ -1519,7 +1515,7 @@ async def get_last_event_pos_in_room( self, room_id: str, event_types: Optional[StrCollection] = None, - ) -> Optional[Tuple[str, PersistedEventPosition]]: + ) -> Optional[tuple[str, PersistedEventPosition]]: """ Returns the ID and event position of the last event in a room. @@ -1536,9 +1532,9 @@ async def get_last_event_pos_in_room( def _get_last_event_pos_in_room_txn( txn: LoggingTransaction, - ) -> Optional[Tuple[str, PersistedEventPosition]]: + ) -> Optional[tuple[str, PersistedEventPosition]]: event_type_clause = "" - event_type_args: List[str] = [] + event_type_args: list[str] = [] if event_types is not None and len(event_types) > 0: event_type_clause, event_type_args = make_in_list_sql_clause( txn.database_engine, "type", event_types @@ -1562,7 +1558,7 @@ def _get_last_event_pos_in_room_txn( [room_id] + event_type_args, ) - row = cast(Optional[Tuple[str, int, str]], txn.fetchone()) + row = cast(Optional[tuple[str, int, str]], txn.fetchone()) if row is not None: event_id, stream_ordering, instance_name = row @@ -1585,7 +1581,7 @@ async def get_last_event_pos_in_room_before_stream_ordering( room_id: str, end_token: RoomStreamToken, event_types: Optional[StrCollection] = None, - ) -> Optional[Tuple[str, PersistedEventPosition]]: + ) -> Optional[tuple[str, PersistedEventPosition]]: """ Returns the ID and event position of the last event in a room at or before a stream ordering. @@ -1602,7 +1598,7 @@ async def get_last_event_pos_in_room_before_stream_ordering( def get_last_event_pos_in_room_before_stream_ordering_txn( txn: LoggingTransaction, - ) -> Optional[Tuple[str, PersistedEventPosition]]: + ) -> Optional[tuple[str, PersistedEventPosition]]: # We're looking for the closest event at or before the token. We need to # handle the fact that the stream token can be a vector clock (with an # `instance_map`) and events can be persisted on different instances @@ -1616,7 +1612,7 @@ def get_last_event_pos_in_room_before_stream_ordering_txn( max_stream = end_token.get_max_stream_pos() event_type_clause = "" - event_type_args: List[str] = [] + event_type_args: list[str] = [] if event_types is not None and len(event_types) > 0: event_type_clause, event_type_args = make_in_list_sql_clause( txn.database_engine, "type", event_types @@ -1692,7 +1688,7 @@ async def bulk_get_last_event_pos_in_room_before_stream_ordering( self, room_ids: StrCollection, end_token: RoomStreamToken, - ) -> Dict[str, int]: + ) -> dict[str, int]: """Bulk fetch the stream position of the latest events in the given rooms """ @@ -1705,8 +1701,8 @@ async def bulk_get_last_event_pos_in_room_before_stream_ordering( # Check that the stream position for the rooms are from before the # minimum position of the token. If not then we need to fetch more # rows. - results: Dict[str, int] = {} - recheck_rooms: Set[str] = set() + results: dict[str, int] = {} + recheck_rooms: set[str] = set() min_token = end_token.stream for room_id, stream in uncapped_results.items(): if stream is None: @@ -1747,11 +1743,11 @@ async def _bulk_get_max_event_pos( now_token = self.get_room_max_token() max_pos = now_token.get_max_stream_pos() - results: Dict[str, int] = {} + results: dict[str, int] = {} # First, we check for the rooms in the stream change cache to see if we # can just use the latest position from it. - missing_room_ids: Set[str] = set() + missing_room_ids: set[str] = set() for room_id in room_ids: stream_pos = self._events_stream_cache.get_max_pos_of_last_change(room_id) if stream_pos is not None: @@ -1770,7 +1766,7 @@ async def _bulk_get_max_event_pos( def bulk_get_max_event_pos_fallback_txn( txn: LoggingTransaction, batched_room_ids: StrCollection - ) -> Dict[str, int]: + ) -> dict[str, int]: clause, args = make_in_list_sql_clause( self.database_engine, "room_id", batched_room_ids ) @@ -1795,7 +1791,7 @@ def bulk_get_max_event_pos_fallback_txn( # the joins and sub-queries. def bulk_get_max_event_pos_from_sliding_sync_tables_txn( txn: LoggingTransaction, batched_room_ids: StrCollection - ) -> Dict[str, int]: + ) -> dict[str, int]: clause, args = make_in_list_sql_clause( self.database_engine, "room_id", batched_room_ids ) @@ -1808,7 +1804,7 @@ def bulk_get_max_event_pos_from_sliding_sync_tables_txn( txn.execute(sql, args) return {row[0]: row[1] for row in txn} - recheck_rooms: Set[str] = set() + recheck_rooms: set[str] = set() for batched in batch_iter(room_ids, 1000): if await self.have_finished_sliding_sync_background_jobs(): batch_results = await self.db_pool.runInteraction( @@ -2077,7 +2073,7 @@ async def get_all_new_event_ids_stream( from_id: int, current_id: int, limit: int, - ) -> Tuple[int, Dict[str, Optional[int]]]: + ) -> tuple[int, dict[str, Optional[int]]]: """Get all new events Returns all event ids with from_id < stream_ordering <= current_id. @@ -2098,7 +2094,7 @@ async def get_all_new_event_ids_stream( def get_all_new_event_ids_stream_txn( txn: LoggingTransaction, - ) -> Tuple[int, Dict[str, Optional[int]]]: + ) -> tuple[int, dict[str, Optional[int]]]: sql = ( "SELECT e.stream_ordering, e.event_id, e.received_ts" " FROM events AS e" @@ -2115,7 +2111,7 @@ def get_all_new_event_ids_stream_txn( if len(rows) == limit: upper_bound = rows[-1][0] - event_to_received_ts: Dict[str, Optional[int]] = { + event_to_received_ts: dict[str, Optional[int]] = { row[1]: row[2] for row in rows } return upper_bound, event_to_received_ts @@ -2194,7 +2190,7 @@ def _reset_federation_positions_txn(self, txn: LoggingTransaction) -> None: """ txn.execute(sql) min_positions = dict( - cast(Iterable[Tuple[str, int]], txn) + cast(Iterable[tuple[str, int]], txn) ) # Map from type -> min position # Ensure we do actually have some values here @@ -2229,7 +2225,7 @@ def _paginate_room_events_by_topological_ordering_txn( direction: Direction = Direction.BACKWARDS, limit: int = 0, event_filter: Optional[Filter] = None, - ) -> Tuple[List[_EventDictReturn], RoomStreamToken, bool]: + ) -> tuple[list[_EventDictReturn], RoomStreamToken, bool]: """Returns list of events before or after a given token. Args: @@ -2269,7 +2265,7 @@ def _paginate_room_events_by_topological_ordering_txn( # Token selection matches what we do below if there are no rows return [], to_token if to_token else from_token, False - args: List[Any] = [room_id] + args: list[Any] = [room_id] order, from_bound, to_bound = generate_pagination_bounds( direction, from_token, to_token @@ -2403,7 +2399,7 @@ async def paginate_room_events_by_topological_ordering( direction: Direction = Direction.BACKWARDS, limit: int = 0, event_filter: Optional[Filter] = None, - ) -> Tuple[List[EventBase], RoomStreamToken, bool]: + ) -> tuple[list[EventBase], RoomStreamToken, bool]: """ Paginate events by `topological_ordering` (tie-break with `stream_ordering`) in the room from the `from_key` in the given `direction` to the `to_key` or diff --git a/synapse/storage/databases/main/tags.py b/synapse/storage/databases/main/tags.py index 94cf7f40525..0768dd78c05 100644 --- a/synapse/storage/databases/main/tags.py +++ b/synapse/storage/databases/main/tags.py @@ -21,7 +21,7 @@ # import logging -from typing import Any, Dict, Iterable, List, Mapping, Tuple, cast +from typing import Any, Iterable, Mapping, cast from synapse.api.constants import AccountDataTypes from synapse.replication.tcp.streams import AccountDataStream @@ -52,13 +52,13 @@ async def get_tags_for_user( """ rows = cast( - List[Tuple[str, str, str]], + list[tuple[str, str, str]], await self.db_pool.simple_select_list( "room_tags", {"user_id": user_id}, ["room_id", "tag", "content"] ), ) - tags_by_room: Dict[str, Dict[str, JsonDict]] = {} + tags_by_room: dict[str, dict[str, JsonDict]] = {} for room_id, tag, content in rows: room_tags = tags_by_room.setdefault(room_id, {}) room_tags[tag] = db_to_json(content) @@ -66,7 +66,7 @@ async def get_tags_for_user( async def get_all_updated_tags( self, instance_name: str, last_id: int, current_id: int, limit: int - ) -> Tuple[List[Tuple[int, str, str]], int, bool]: + ) -> tuple[list[tuple[int, str, str]], int, bool]: """Get updates for tags replication stream. Args: @@ -93,7 +93,7 @@ async def get_all_updated_tags( def get_all_updated_tags_txn( txn: LoggingTransaction, - ) -> List[Tuple[int, str, str]]: + ) -> list[tuple[int, str, str]]: sql = ( "SELECT stream_id, user_id, room_id" " FROM room_tags_revisions as r" @@ -102,7 +102,7 @@ def get_all_updated_tags_txn( ) txn.execute(sql, (last_id, current_id, limit)) # mypy doesn't understand what the query is selecting. - return cast(List[Tuple[int, str, str]], txn.fetchall()) + return cast(list[tuple[int, str, str]], txn.fetchall()) tag_ids = await self.db_pool.runInteraction( "get_all_updated_tags", get_all_updated_tags_txn @@ -131,7 +131,7 @@ async def get_updated_tags( rooms that changed since the stream_id token. """ - def get_updated_tags_txn(txn: LoggingTransaction) -> List[str]: + def get_updated_tags_txn(txn: LoggingTransaction) -> list[str]: sql = ( "SELECT room_id from room_tags_revisions" " WHERE user_id = ? AND stream_id > ?" @@ -218,7 +218,7 @@ async def get_tags_for_room( A mapping of tags to tag content. """ rows = cast( - List[Tuple[str, str]], + list[tuple[str, str]], await self.db_pool.simple_select_list( table="room_tags", keyvalues={"user_id": user_id, "room_id": room_id}, @@ -338,8 +338,8 @@ def process_replication_rows( if stream_name == AccountDataStream.NAME: # Cast is safe because the `AccountDataStream` should only be giving us # `AccountDataStreamRow` - account_data_stream_rows: List[AccountDataStream.AccountDataStreamRow] = ( - cast(List[AccountDataStream.AccountDataStreamRow], rows) + account_data_stream_rows: list[AccountDataStream.AccountDataStreamRow] = ( + cast(list[AccountDataStream.AccountDataStreamRow], rows) ) for row in account_data_stream_rows: diff --git a/synapse/storage/databases/main/task_scheduler.py b/synapse/storage/databases/main/task_scheduler.py index 2d4804fef6f..7410507255f 100644 --- a/synapse/storage/databases/main/task_scheduler.py +++ b/synapse/storage/databases/main/task_scheduler.py @@ -19,7 +19,7 @@ # # -from typing import TYPE_CHECKING, Any, List, Optional, Tuple, cast +from typing import TYPE_CHECKING, Any, Optional, cast from synapse.storage._base import SQLBaseStore, db_to_json from synapse.storage.database import ( @@ -34,7 +34,7 @@ if TYPE_CHECKING: from synapse.server import HomeServer -ScheduledTaskRow = Tuple[str, str, str, int, str, str, str, str] +ScheduledTaskRow = tuple[str, str, str, int, str, str, str, str] class TaskSchedulerWorkerStore(SQLBaseStore): @@ -63,12 +63,12 @@ def _convert_row_to_task(row: ScheduledTaskRow) -> ScheduledTask: async def get_scheduled_tasks( self, *, - actions: Optional[List[str]] = None, + actions: Optional[list[str]] = None, resource_id: Optional[str] = None, - statuses: Optional[List[TaskStatus]] = None, + statuses: Optional[list[TaskStatus]] = None, max_timestamp: Optional[int] = None, limit: Optional[int] = None, - ) -> List[ScheduledTask]: + ) -> list[ScheduledTask]: """Get a list of scheduled tasks from the DB. Args: @@ -82,9 +82,9 @@ async def get_scheduled_tasks( Returns: a list of `ScheduledTask`, ordered by increasing timestamps """ - def get_scheduled_tasks_txn(txn: LoggingTransaction) -> List[ScheduledTaskRow]: - clauses: List[str] = [] - args: List[Any] = [] + def get_scheduled_tasks_txn(txn: LoggingTransaction) -> list[ScheduledTaskRow]: + clauses: list[str] = [] + args: list[Any] = [] if resource_id: clauses.append("resource_id = ?") args.append(resource_id) @@ -115,7 +115,7 @@ def get_scheduled_tasks_txn(txn: LoggingTransaction) -> List[ScheduledTaskRow]: args.append(limit) txn.execute(sql, args) - return cast(List[ScheduledTaskRow], txn.fetchall()) + return cast(list[ScheduledTaskRow], txn.fetchall()) rows = await self.db_pool.runInteraction( "get_scheduled_tasks", get_scheduled_tasks_txn diff --git a/synapse/storage/databases/main/thread_subscriptions.py b/synapse/storage/databases/main/thread_subscriptions.py index 50084887a4e..1c02ab16110 100644 --- a/synapse/storage/databases/main/thread_subscriptions.py +++ b/synapse/storage/databases/main/thread_subscriptions.py @@ -14,11 +14,8 @@ from typing import ( TYPE_CHECKING, Any, - FrozenSet, Iterable, - List, Optional, - Tuple, Union, cast, ) @@ -479,7 +476,7 @@ async def get_subscription_for_thread( @cached(max_entries=100) async def get_subscribers_to_thread( self, room_id: str, thread_root_event_id: str - ) -> FrozenSet[str]: + ) -> frozenset[str]: """ Returns: the set of user_ids for local users who are subscribed to the given thread. @@ -510,7 +507,7 @@ def get_thread_subscriptions_stream_id_generator(self) -> MultiWriterIdGenerator async def get_updated_thread_subscriptions( self, *, from_id: int, to_id: int, limit: int - ) -> List[Tuple[int, str, str, str]]: + ) -> list[tuple[int, str, str, str]]: """Get updates to thread subscriptions between two stream IDs. Args: @@ -524,7 +521,7 @@ async def get_updated_thread_subscriptions( def get_updated_thread_subscriptions_txn( txn: LoggingTransaction, - ) -> List[Tuple[int, str, str, str]]: + ) -> list[tuple[int, str, str, str]]: sql = """ SELECT stream_id, user_id, room_id, event_id FROM thread_subscriptions @@ -534,7 +531,7 @@ def get_updated_thread_subscriptions_txn( """ txn.execute(sql, (from_id, to_id, limit)) - return cast(List[Tuple[int, str, str, str]], txn.fetchall()) + return cast(list[tuple[int, str, str, str]], txn.fetchall()) return await self.db_pool.runInteraction( "get_updated_thread_subscriptions", @@ -543,7 +540,7 @@ def get_updated_thread_subscriptions_txn( async def get_latest_updated_thread_subscriptions_for_user( self, user_id: str, *, from_id: int, to_id: int, limit: int - ) -> List[Tuple[int, str, str, bool, Optional[bool]]]: + ) -> list[tuple[int, str, str, bool, Optional[bool]]]: """Get the latest updates to thread subscriptions for a specific user. Args: @@ -561,7 +558,7 @@ async def get_latest_updated_thread_subscriptions_for_user( def get_updated_thread_subscriptions_for_user_txn( txn: LoggingTransaction, - ) -> List[Tuple[int, str, str, bool, Optional[bool]]]: + ) -> list[tuple[int, str, str, bool, Optional[bool]]]: sql = """ WITH the_updates AS ( SELECT stream_id, room_id, event_id, subscribed, automatic diff --git a/synapse/storage/databases/main/transactions.py b/synapse/storage/databases/main/transactions.py index 41c94839273..e0422f7459c 100644 --- a/synapse/storage/databases/main/transactions.py +++ b/synapse/storage/databases/main/transactions.py @@ -21,7 +21,7 @@ import logging from enum import Enum -from typing import TYPE_CHECKING, Iterable, List, Mapping, Optional, Tuple, cast +from typing import TYPE_CHECKING, Iterable, Mapping, Optional, cast import attr from canonicaljson import encode_canonical_json @@ -97,7 +97,7 @@ def _cleanup_transactions_txn(txn: LoggingTransaction) -> None: async def get_received_txn_response( self, transaction_id: str, origin: str - ) -> Optional[Tuple[int, JsonDict]]: + ) -> Optional[tuple[int, JsonDict]]: """For an incoming transaction from a given origin, check if we have already responded to it. If so, return the response code and response body (as a dict). @@ -120,7 +120,7 @@ async def get_received_txn_response( def _get_received_txn_response( self, txn: LoggingTransaction, transaction_id: str, origin: str - ) -> Optional[Tuple[int, JsonDict]]: + ) -> Optional[tuple[int, JsonDict]]: result = self.db_pool.simple_select_one_txn( txn, table="received_transactions", @@ -215,7 +215,7 @@ async def get_destination_retry_timings_batch( self, destinations: StrCollection ) -> Mapping[str, Optional[DestinationRetryTimings]]: rows = cast( - List[Tuple[str, Optional[int], Optional[int], Optional[int]]], + list[tuple[str, Optional[int], Optional[int], Optional[int]]], await self.db_pool.simple_select_many_batch( table="destinations", iterable=destinations, @@ -377,7 +377,7 @@ async def get_catch_up_room_event_ids( self, destination: str, last_successful_stream_ordering: int, - ) -> List[str]: + ) -> list[str]: """ Returns at most 50 event IDs and their corresponding stream_orderings that correspond to the oldest events that have not yet been sent to @@ -403,7 +403,7 @@ def _get_catch_up_room_event_ids_txn( txn: LoggingTransaction, destination: str, last_successful_stream_ordering: int, - ) -> List[str]: + ) -> list[str]: q = """ SELECT event_id FROM destination_rooms JOIN events USING (stream_ordering) @@ -421,7 +421,7 @@ def _get_catch_up_room_event_ids_txn( async def get_catch_up_outstanding_destinations( self, after_destination: Optional[str] - ) -> List[str]: + ) -> list[str]: """ Get a list of destinations we should retry transaction sending to. @@ -450,7 +450,7 @@ async def get_catch_up_outstanding_destinations( @staticmethod def _get_catch_up_outstanding_destinations_txn( txn: LoggingTransaction, now_time_ms: int, after_destination: Optional[str] - ) -> List[str]: + ) -> list[str]: # We're looking for destinations which satisfy either of the following # conditions: # @@ -540,8 +540,8 @@ async def get_destinations_paginate( destination: Optional[str] = None, order_by: str = DestinationSortOrder.DESTINATION.value, direction: Direction = Direction.FORWARDS, - ) -> Tuple[ - List[Tuple[str, Optional[int], Optional[int], Optional[int], Optional[int]]], + ) -> tuple[ + list[tuple[str, Optional[int], Optional[int], Optional[int], Optional[int]]], int, ]: """Function to retrieve a paginated list of destinations. @@ -566,9 +566,9 @@ async def get_destinations_paginate( def get_destinations_paginate_txn( txn: LoggingTransaction, - ) -> Tuple[ - List[ - Tuple[str, Optional[int], Optional[int], Optional[int], Optional[int]] + ) -> tuple[ + list[ + tuple[str, Optional[int], Optional[int], Optional[int], Optional[int]] ], int, ]: @@ -579,7 +579,7 @@ def get_destinations_paginate_txn( else: order = "ASC" - args: List[object] = [] + args: list[object] = [] where_statement = "" if destination: args.extend(["%" + destination.lower() + "%"]) @@ -588,7 +588,7 @@ def get_destinations_paginate_txn( sql_base = f"FROM destinations {where_statement} " sql = f"SELECT COUNT(*) as total_destinations {sql_base}" txn.execute(sql, args) - count = cast(Tuple[int], txn.fetchone())[0] + count = cast(tuple[int], txn.fetchone())[0] sql = f""" SELECT destination, retry_last_ts, retry_interval, failure_ts, @@ -599,8 +599,8 @@ def get_destinations_paginate_txn( """ txn.execute(sql, args + [limit, start]) destinations = cast( - List[ - Tuple[ + list[ + tuple[ str, Optional[int], Optional[int], Optional[int], Optional[int] ] ], @@ -618,7 +618,7 @@ async def get_destination_rooms_paginate( start: int, limit: int, direction: Direction = Direction.FORWARDS, - ) -> Tuple[List[Tuple[str, int]], int]: + ) -> tuple[list[tuple[str, int]], int]: """Function to retrieve a paginated list of destination's rooms. This will return a json list of rooms and the total number of rooms. @@ -636,7 +636,7 @@ async def get_destination_rooms_paginate( def get_destination_rooms_paginate_txn( txn: LoggingTransaction, - ) -> Tuple[List[Tuple[str, int]], int]: + ) -> tuple[list[tuple[str, int]], int]: if direction == Direction.BACKWARDS: order = "DESC" else: @@ -648,10 +648,10 @@ def get_destination_rooms_paginate_txn( WHERE destination = ? """ txn.execute(sql, [destination]) - count = cast(Tuple[int], txn.fetchone())[0] + count = cast(tuple[int], txn.fetchone())[0] rooms = cast( - List[Tuple[str, int]], + list[tuple[str, int]], self.db_pool.simple_select_list_paginate_txn( txn=txn, table="destination_rooms", diff --git a/synapse/storage/databases/main/ui_auth.py b/synapse/storage/databases/main/ui_auth.py index 569925e39fa..69a4431f29a 100644 --- a/synapse/storage/databases/main/ui_auth.py +++ b/synapse/storage/databases/main/ui_auth.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Any, Dict, List, Optional, Tuple, Union, cast +from typing import Any, Optional, Union, cast import attr @@ -170,7 +170,7 @@ async def mark_ui_auth_stage_complete( async def get_completed_ui_auth_stages( self, session_id: str - ) -> Dict[str, Union[str, bool, JsonDict]]: + ) -> dict[str, Union[str, bool, JsonDict]]: """ Retrieve the completed stages of a UI authentication session. @@ -182,7 +182,7 @@ async def get_completed_ui_auth_stages( """ results = {} rows = cast( - List[Tuple[str, str]], + list[tuple[str, str]], await self.db_pool.simple_select_list( table="ui_auth_sessions_credentials", keyvalues={"session_id": session_id}, @@ -302,14 +302,14 @@ async def add_user_agent_ip_to_ui_auth_session( async def get_user_agents_ips_to_ui_auth_session( self, session_id: str, - ) -> List[Tuple[str, str]]: + ) -> list[tuple[str, str]]: """Get the given user agents / IPs used during the ui auth process Returns: List of user_agent/ip pairs """ return cast( - List[Tuple[str, str]], + list[tuple[str, str]], await self.db_pool.simple_select_list( table="ui_auth_sessions_ips", keyvalues={"session_id": session_id}, @@ -353,7 +353,7 @@ def _delete_old_ui_auth_sessions_txn( # If a registration token was used, decrement the pending counter # before deleting the session. rows = cast( - List[Tuple[str]], + list[tuple[str]], self.db_pool.simple_select_many_txn( txn, table="ui_auth_sessions_credentials", @@ -365,7 +365,7 @@ def _delete_old_ui_auth_sessions_txn( ) # Get the tokens used and how much pending needs to be decremented by. - token_counts: Dict[str, int] = {} + token_counts: dict[str, int] = {} for r in rows: # If registration was successfully completed, the result of the # registration token stage for that session will be True. @@ -378,7 +378,7 @@ def _delete_old_ui_auth_sessions_txn( # Update the `pending` counters. if len(token_counts) > 0: token_rows = cast( - List[Tuple[str, int]], + list[tuple[str, int]], self.db_pool.simple_select_many_txn( txn, table="registration_tokens", diff --git a/synapse/storage/databases/main/user_directory.py b/synapse/storage/databases/main/user_directory.py index 9deb9ab73c2..895d7e61488 100644 --- a/synapse/storage/databases/main/user_directory.py +++ b/synapse/storage/databases/main/user_directory.py @@ -26,11 +26,8 @@ TYPE_CHECKING, Collection, Iterable, - List, Optional, Sequence, - Set, - Tuple, TypedDict, cast, ) @@ -209,7 +206,7 @@ async def _populate_user_directory_process_rooms( def _get_next_batch( txn: LoggingTransaction, - ) -> Optional[Sequence[Tuple[str, int]]]: + ) -> Optional[Sequence[tuple[str, int]]]: # Only fetch 250 rooms, so we don't fetch too many at once, even # if those 250 rooms have less than batch_size state events. sql = """ @@ -218,7 +215,7 @@ def _get_next_batch( LIMIT 250 """ % (TEMP_TABLE + "_rooms",) txn.execute(sql) - rooms_to_work_on = cast(List[Tuple[str, int]], txn.fetchall()) + rooms_to_work_on = cast(list[tuple[str, int]], txn.fetchall()) if not rooms_to_work_on: return None @@ -369,14 +366,14 @@ def _populate_user_directory_process_users_txn( RETURNING user_id """ txn.execute(sql, (batch_size,)) - user_result = cast(List[Tuple[str]], txn.fetchall()) + user_result = cast(list[tuple[str]], txn.fetchall()) else: sql = "SELECT user_id FROM %s ORDER BY user_id LIMIT %s" % ( TEMP_TABLE + "_users", str(batch_size), ) txn.execute(sql) - user_result = cast(List[Tuple[str]], txn.fetchall()) + user_result = cast(list[tuple[str]], txn.fetchall()) if not user_result: return None @@ -408,7 +405,7 @@ def _populate_user_directory_process_users_txn( # Next fetch their profiles. Note that not all users have profiles. profile_rows = cast( - List[Tuple[str, Optional[str], Optional[str]]], + list[tuple[str, Optional[str], Optional[str]]], self.db_pool.simple_select_many_txn( txn, table="profiles", @@ -514,7 +511,7 @@ def _filter_local_users_for_dir_txn( ] rows = cast( - List[Tuple[str, Optional[str]]], + list[tuple[str, Optional[str]]], self.db_pool.simple_select_many_txn( txn, table="users", @@ -608,7 +605,7 @@ async def clear_remote_user_profile_in_user_dir_stale(self, user_id: str) -> Non async def get_remote_servers_with_profiles_to_refresh( self, now_ts: int, limit: int - ) -> List[str]: + ) -> list[str]: """ Get a list of up to `limit` server names which have users whose locally-cached profiles we believe to be stale @@ -617,7 +614,7 @@ async def get_remote_servers_with_profiles_to_refresh( def _get_remote_servers_with_refreshable_profiles_txn( txn: LoggingTransaction, - ) -> List[str]: + ) -> list[str]: sql = """ SELECT user_server_name FROM user_directory_stale_remote_users @@ -636,7 +633,7 @@ def _get_remote_servers_with_refreshable_profiles_txn( async def get_remote_users_to_refresh_on_server( self, server_name: str, now_ts: int, limit: int - ) -> List[Tuple[str, int, int]]: + ) -> list[tuple[str, int, int]]: """ Get a list of up to `limit` user IDs from the server `server_name` whose locally-cached profiles we believe to be stale @@ -651,7 +648,7 @@ async def get_remote_users_to_refresh_on_server( def _get_remote_users_to_refresh_on_server_txn( txn: LoggingTransaction, - ) -> List[Tuple[str, int, int]]: + ) -> list[tuple[str, int, int]]: sql = """ SELECT user_id, retry_counter, next_try_at_ts FROM user_directory_stale_remote_users @@ -660,7 +657,7 @@ def _get_remote_users_to_refresh_on_server_txn( LIMIT ? """ txn.execute(sql, (server_name, now_ts, limit)) - return cast(List[Tuple[str, int, int]], txn.fetchall()) + return cast(list[tuple[str, int, int]], txn.fetchall()) return await self.db_pool.runInteraction( "get_remote_users_to_refresh_on_server", @@ -771,7 +768,7 @@ def _update_profiles_in_user_dir_txn( raise Exception("Unrecognized database engine") async def add_users_who_share_private_room( - self, room_id: str, user_id_tuples: Iterable[Tuple[str, str]] + self, room_id: str, user_id_tuples: Iterable[tuple[str, str]] ) -> None: """Insert entries into the users_who_share_private_rooms table. The first user should be a local user. @@ -834,7 +831,7 @@ def _delete_all_from_user_dir_txn(txn: LoggingTransaction) -> None: async def _get_user_in_directory( self, user_id: str - ) -> Optional[Tuple[Optional[str], Optional[str]]]: + ) -> Optional[tuple[Optional[str], Optional[str]]]: """ Fetch the user information in the user directory. @@ -843,7 +840,7 @@ async def _get_user_in_directory( avatar URL (both of which may be None). """ return cast( - Optional[Tuple[Optional[str], Optional[str]]], + Optional[tuple[Optional[str], Optional[str]]], await self.db_pool.simple_select_one( table="user_directory", keyvalues={"user_id": user_id}, @@ -864,7 +861,7 @@ async def update_user_directory_stream_pos(self, stream_id: Optional[int]) -> No class SearchResult(TypedDict): limited: bool - results: List[UserProfile] + results: list[UserProfile] class UserDirectoryStore(UserDirectoryBackgroundUpdateStore): @@ -911,7 +908,7 @@ def _remove_from_user_dir_txn(txn: LoggingTransaction) -> None: "remove_from_user_dir", _remove_from_user_dir_txn ) - async def get_users_in_dir_due_to_room(self, room_id: str) -> Set[str]: + async def get_users_in_dir_due_to_room(self, room_id: str) -> set[str]: """Get all user_ids that are in the room directory because they're in the given room_id """ @@ -965,7 +962,7 @@ def _remove_user_who_share_room_txn(txn: LoggingTransaction) -> None: "remove_user_who_share_room", _remove_user_who_share_room_txn ) - async def get_user_dir_rooms_user_is_in(self, user_id: str) -> List[str]: + async def get_user_dir_rooms_user_is_in(self, user_id: str) -> list[str]: """ Returns the rooms that a user is in. @@ -1031,7 +1028,7 @@ async def search_user_dir( } """ - join_args: Tuple[str, ...] = (user_id,) + join_args: tuple[str, ...] = (user_id,) if self.hs.config.userdirectory.user_directory_search_all_users: where_clause = "user_id != ?" @@ -1060,7 +1057,7 @@ async def search_user_dir( # We allow manipulating the ranking algorithm by injecting statements # based on config options. additional_ordering_statements = [] - ordering_arguments: Tuple[str, ...] = () + ordering_arguments: tuple[str, ...] = () if isinstance(self.database_engine, PostgresEngine): full_query, exact_query, prefix_query = _parse_query_postgres(search_term) @@ -1166,7 +1163,7 @@ async def search_user_dir( raise Exception("Unrecognized database engine") results = cast( - List[Tuple[str, Optional[str], Optional[str]]], + list[tuple[str, Optional[str], Optional[str]]], await self.db_pool.execute("search_user_dir", sql, *args), ) @@ -1232,7 +1229,7 @@ def _parse_query_sqlite(search_term: str) -> str: return " & ".join("(%s* OR %s)" % (result, result) for result in results) -def _parse_query_postgres(search_term: str) -> Tuple[str, str, str]: +def _parse_query_postgres(search_term: str) -> tuple[str, str, str]: """Takes a plain unicode string from the user and converts it into a form that can be passed to the database. We use this so that we can add prefix matching, which isn't something @@ -1263,7 +1260,7 @@ def _parse_query_postgres(search_term: str) -> Tuple[str, str, str]: return both, exact, prefix -def _parse_words(search_term: str) -> List[str]: +def _parse_words(search_term: str) -> list[str]: """Split the provided search string into a list of its words using ICU. Args: @@ -1275,7 +1272,7 @@ def _parse_words(search_term: str) -> List[str]: return _parse_words_with_icu(search_term) -def _parse_words_with_icu(search_term: str) -> List[str]: +def _parse_words_with_icu(search_term: str) -> list[str]: """Break down the provided search string into its individual words using ICU (International Components for Unicode). @@ -1298,7 +1295,7 @@ def _parse_words_with_icu(search_term: str) -> List[str]: # # In particular, user-71 in postgres gets tokenised to "user, -71", and this # will not match a query for "user, 71". - new_results: List[str] = [] + new_results: list[str] = [] i = 0 while i < len(results): curr = results[i] diff --git a/synapse/storage/databases/main/user_erasure_store.py b/synapse/storage/databases/main/user_erasure_store.py index cceed484c39..f89f11e149e 100644 --- a/synapse/storage/databases/main/user_erasure_store.py +++ b/synapse/storage/databases/main/user_erasure_store.py @@ -18,7 +18,7 @@ # # -from typing import Iterable, List, Mapping, Tuple, cast +from typing import Iterable, Mapping, cast from synapse.storage.database import LoggingTransaction from synapse.storage.databases.main import CacheInvalidationWorkerStore @@ -57,7 +57,7 @@ async def are_users_erased(self, user_ids: Iterable[str]) -> Mapping[str, bool]: for each user, whether the user has requested erasure. """ rows = cast( - List[Tuple[str]], + list[tuple[str]], await self.db_pool.simple_select_many_batch( table="erased_users", column="user_id", diff --git a/synapse/storage/databases/state/bg_updates.py b/synapse/storage/databases/state/bg_updates.py index ac38b2ab19c..a0d8667b072 100644 --- a/synapse/storage/databases/state/bg_updates.py +++ b/synapse/storage/databases/state/bg_updates.py @@ -22,11 +22,8 @@ import logging from typing import ( TYPE_CHECKING, - Dict, - List, Mapping, Optional, - Tuple, Union, ) @@ -106,7 +103,7 @@ def _count_state_group_hops_txn( def _get_state_groups_from_groups_txn( self, txn: LoggingTransaction, - groups: List[int], + groups: list[int], state_filter: Optional[StateFilter] = None, ) -> Mapping[int, StateMap[str]]: """ @@ -123,7 +120,7 @@ def _get_state_groups_from_groups_txn( if state_filter is None: state_filter = StateFilter.all() - results: Dict[int, MutableStateMap[str]] = {group: {} for group in groups} + results: dict[int, MutableStateMap[str]] = {group: {} for group in groups} if isinstance(self.database_engine, PostgresEngine): # Temporarily disable sequential scans in this transaction. This is @@ -147,7 +144,7 @@ def _get_state_groups_from_groups_txn( %s """ - overall_select_query_args: List[Union[int, str]] = [] + overall_select_query_args: list[Union[int, str]] = [] # This is an optimization to create a select clause per-condition. This # makes the query planner a lot smarter on what rows should pull out in the @@ -156,7 +153,7 @@ def _get_state_groups_from_groups_txn( use_condition_optimization = ( not state_filter.include_others and not state_filter.is_full() ) - state_filter_condition_combos: List[Tuple[str, Optional[str]]] = [] + state_filter_condition_combos: list[tuple[str, Optional[str]]] = [] # We don't need to caclculate this list if we're not using the condition # optimization if use_condition_optimization: @@ -173,7 +170,7 @@ def _get_state_groups_from_groups_txn( # `filter_events_for_client` which just uses 2 conditions # (`EventTypes.RoomHistoryVisibility` and `EventTypes.Member`). if use_condition_optimization and len(state_filter_condition_combos) < 10: - select_clause_list: List[str] = [] + select_clause_list: list[str] = [] for etype, skey in state_filter_condition_combos: if skey is None: where_clause = "(type = ?)" @@ -216,7 +213,7 @@ def _get_state_groups_from_groups_txn( """ for group in groups: - args: List[Union[int, str]] = [group] + args: list[Union[int, str]] = [group] args.extend(overall_select_query_args) txn.execute(sql % (overall_select_clause,), args) @@ -347,7 +344,7 @@ async def _background_deduplicate_state( ) max_group = rows[0][0] - def reindex_txn(txn: LoggingTransaction) -> Tuple[bool, int]: + def reindex_txn(txn: LoggingTransaction) -> tuple[bool, int]: new_last_state_group = last_state_group for count in range(batch_size): txn.execute( diff --git a/synapse/storage/databases/state/deletion.py b/synapse/storage/databases/state/deletion.py index 9b62c1d8141..6975690c513 100644 --- a/synapse/storage/databases/state/deletion.py +++ b/synapse/storage/databases/state/deletion.py @@ -21,8 +21,6 @@ Collection, Mapping, Optional, - Set, - Tuple, ) from synapse.events.snapshot import EventPersistencePair @@ -233,7 +231,7 @@ async def persisting_state_group_references( any state groups referenced still exist and that they don't get deleted during this.""" - referenced_state_groups: Set[int] = set() + referenced_state_groups: set[int] = set() for event, ctx in event_and_contexts: if ctx.rejected or event.internal_metadata.is_outlier(): continue @@ -269,7 +267,7 @@ async def persisting_state_group_references( ) def _mark_state_groups_as_persisting_txn( - self, txn: LoggingTransaction, state_groups: Set[int] + self, txn: LoggingTransaction, state_groups: set[int] ) -> None: """Marks the given state groups as being persisted.""" @@ -508,7 +506,7 @@ def get_state_groups_ready_for_potential_deletion_txn( async def get_next_state_group_collection_to_delete( self, - ) -> Optional[Tuple[str, Mapping[int, int]]]: + ) -> Optional[tuple[str, Mapping[int, int]]]: """Get the next set of state groups to try and delete Returns: @@ -522,7 +520,7 @@ async def get_next_state_group_collection_to_delete( def _get_next_state_group_collection_to_delete_txn( self, txn: LoggingTransaction, - ) -> Optional[Tuple[str, Mapping[int, int]]]: + ) -> Optional[tuple[str, Mapping[int, int]]]: """Implementation of `get_next_state_group_collection_to_delete`""" # We want to return chunks of state groups that were marked for deletion diff --git a/synapse/storage/databases/state/store.py b/synapse/storage/databases/state/store.py index b62f3e6f5ba..6f25e7f0bce 100644 --- a/synapse/storage/databases/state/store.py +++ b/synapse/storage/databases/state/store.py @@ -22,13 +22,9 @@ import logging from typing import ( TYPE_CHECKING, - Dict, Iterable, - List, Mapping, Optional, - Set, - Tuple, cast, ) @@ -174,7 +170,7 @@ def _get_state_group_delta_txn(txn: LoggingTransaction) -> _GetStateGroupDelta: return _GetStateGroupDelta(None, None) delta_ids = cast( - List[Tuple[str, str, str]], + list[tuple[str, str, str]], self.db_pool.simple_select_list_txn( txn, table="state_groups_state", @@ -199,8 +195,8 @@ def _get_state_group_delta_txn(txn: LoggingTransaction) -> _GetStateGroupDelta: @tag_args @cancellable async def _get_state_groups_from_groups( - self, groups: List[int], state_filter: StateFilter - ) -> Dict[int, StateMap[str]]: + self, groups: list[int], state_filter: StateFilter + ) -> dict[int, StateMap[str]]: """Returns the state groups for a given set of groups from the database, filtering on types of state events. @@ -211,7 +207,7 @@ async def _get_state_groups_from_groups( Returns: Dict of state group to state map. """ - results: Dict[int, StateMap[str]] = {} + results: dict[int, StateMap[str]] = {} chunks = [groups[i : i + 100] for i in range(0, len(groups), 100)] for chunk in chunks: @@ -232,7 +228,7 @@ def _get_state_for_group_using_cache( cache: DictionaryCache[int, StateKey, str], group: int, state_filter: StateFilter, - ) -> Tuple[MutableStateMap[str], bool]: + ) -> tuple[MutableStateMap[str], bool]: """Checks if group is in cache. See `get_state_for_groups` Args: @@ -284,7 +280,7 @@ def _get_state_for_group_using_cache( @cancellable async def _get_state_for_groups( self, groups: Iterable[int], state_filter: Optional[StateFilter] = None - ) -> Dict[int, MutableStateMap[str]]: + ) -> dict[int, MutableStateMap[str]]: """Gets the state at each of a list of state groups, optionally filtering by type/state_key @@ -355,7 +351,7 @@ def _get_state_for_groups_using_cache( groups: Iterable[int], cache: DictionaryCache[int, StateKey, str], state_filter: StateFilter, - ) -> Tuple[Dict[int, MutableStateMap[str]], Set[int]]: + ) -> tuple[dict[int, MutableStateMap[str]], set[int]]: """Gets the state at each of a list of state groups, optionally filtering by type/state_key, querying from a specific cache. @@ -387,7 +383,7 @@ def _get_state_for_groups_using_cache( def _insert_into_cache( self, - group_to_state_dict: Dict[int, StateMap[str]], + group_to_state_dict: dict[int, StateMap[str]], state_filter: StateFilter, cache_seq_num_members: int, cache_seq_num_non_members: int, @@ -452,10 +448,10 @@ def _insert_into_cache( @tag_args async def store_state_deltas_for_batched( self, - events_and_context: List[Tuple[EventBase, UnpersistedEventContextBase]], + events_and_context: list[tuple[EventBase, UnpersistedEventContextBase]], room_id: str, prev_group: int, - ) -> List[Tuple[EventBase, UnpersistedEventContext]]: + ) -> list[tuple[EventBase, UnpersistedEventContext]]: """Generate and store state deltas for a group of events and contexts created to be batch persisted. Note that all the events must be in a linear chain (ie a <- b <- c). @@ -469,9 +465,9 @@ async def store_state_deltas_for_batched( def insert_deltas_group_txn( txn: LoggingTransaction, - events_and_context: List[Tuple[EventBase, UnpersistedEventContext]], + events_and_context: list[tuple[EventBase, UnpersistedEventContext]], prev_group: int, - ) -> List[Tuple[EventBase, UnpersistedEventContext]]: + ) -> list[tuple[EventBase, UnpersistedEventContext]]: """Generate and store state groups for the provided events and contexts. Requires that we have the state as a delta from the last persisted state group. @@ -782,7 +778,7 @@ def _purge_unreferenced_state_groups( ) rows = cast( - List[Tuple[int]], + list[tuple[int]], self.db_pool.simple_select_many_txn( txn, table="state_group_edges", @@ -853,7 +849,7 @@ def _purge_unreferenced_state_groups( @tag_args async def get_previous_state_groups( self, state_groups: Iterable[int] - ) -> Dict[int, int]: + ) -> dict[int, int]: """Fetch the previous groups of the given state groups. Args: @@ -864,7 +860,7 @@ async def get_previous_state_groups( """ rows = cast( - List[Tuple[int, int]], + list[tuple[int, int]], await self.db_pool.simple_select_many_batch( table="state_group_edges", column="state_group", @@ -881,7 +877,7 @@ async def get_previous_state_groups( @tag_args async def get_next_state_groups( self, state_groups: Iterable[int] - ) -> Dict[int, int]: + ) -> dict[int, int]: """Fetch the groups that have the given state groups as their previous state groups. @@ -893,7 +889,7 @@ async def get_next_state_groups( """ rows = cast( - List[Tuple[int, int]], + list[tuple[int, int]], await self.db_pool.simple_select_many_batch( table="state_group_edges", column="prev_state_group", diff --git a/synapse/storage/engines/postgres.py b/synapse/storage/engines/postgres.py index e4cd359201b..8a1bbfa0f59 100644 --- a/synapse/storage/engines/postgres.py +++ b/synapse/storage/engines/postgres.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Any, Mapping, NoReturn, Optional, Tuple, cast +from typing import TYPE_CHECKING, Any, Mapping, NoReturn, Optional, cast import psycopg2.extensions @@ -79,11 +79,11 @@ def _disable_bytes_adapter(_: bytes) -> NoReturn: def single_threaded(self) -> bool: return False - def get_db_locale(self, txn: Cursor) -> Tuple[str, str]: + def get_db_locale(self, txn: Cursor) -> tuple[str, str]: txn.execute( "SELECT datcollate, datctype FROM pg_database WHERE datname = current_database()" ) - collation, ctype = cast(Tuple[str, str], txn.fetchone()) + collation, ctype = cast(tuple[str, str], txn.fetchone()) return collation, ctype def check_database( diff --git a/synapse/storage/engines/sqlite.py b/synapse/storage/engines/sqlite.py index 9d1795ebe59..ac3dc25bb50 100644 --- a/synapse/storage/engines/sqlite.py +++ b/synapse/storage/engines/sqlite.py @@ -22,7 +22,7 @@ import sqlite3 import struct import threading -from typing import TYPE_CHECKING, Any, List, Mapping, Optional +from typing import TYPE_CHECKING, Any, Mapping, Optional from synapse.storage.engines import BaseDatabaseEngine from synapse.storage.engines._base import AUTO_INCREMENT_PRIMARY_KEYPLACEHOLDER @@ -182,7 +182,7 @@ def executescript(cursor: sqlite3.Cursor, script: str) -> None: # Following functions taken from: https://github.com/coleifer/peewee -def _parse_match_info(buf: bytes) -> List[int]: +def _parse_match_info(buf: bytes) -> list[int]: bufsize = len(buf) return [struct.unpack("@I", buf[i : i + 4])[0] for i in range(0, bufsize, 4)] diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py index bf087702ea9..d4bd8020e1d 100644 --- a/synapse/storage/prepare_database.py +++ b/synapse/storage/prepare_database.py @@ -28,10 +28,8 @@ Counter as CounterType, Generator, Iterable, - List, Optional, TextIO, - Tuple, ) import attr @@ -270,7 +268,7 @@ def _setup_new_database( for database in databases ) - directory_entries: List[_DirectoryListing] = [] + directory_entries: list[_DirectoryListing] = [] for directory in directories: directory_entries.extend( _DirectoryListing(file_name, os.path.join(directory, file_name)) @@ -453,7 +451,7 @@ def _upgrade_existing_database( file_name_counter: CounterType[str] = Counter() # Now find which directories have anything of interest. - directory_entries: List[_DirectoryListing] = [] + directory_entries: list[_DirectoryListing] = [] for directory in directories: logger.debug("Looking for schema deltas in %s", directory) try: @@ -593,7 +591,7 @@ def _apply_module_schema_files( cur: Cursor, database_engine: BaseDatabaseEngine, modname: str, - names_and_streams: Iterable[Tuple[str, TextIO]], + names_and_streams: Iterable[tuple[str, TextIO]], ) -> None: """Apply the module schemas for a single module diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py index 9dc6c395e8f..35da5351f87 100644 --- a/synapse/storage/roommember.py +++ b/synapse/storage/roommember.py @@ -20,7 +20,7 @@ # import logging -from typing import List, Optional, Tuple +from typing import Optional import attr @@ -84,6 +84,6 @@ class ProfileInfo: class MemberSummary: # A truncated list of (user_id, event_id) tuples for users of a given # membership type, suitable for use in calculating heroes for a room. - members: List[Tuple[str, str]] + members: list[tuple[str, str]] # The total number of users of a given membership type. count: int diff --git a/synapse/storage/schema/main/delta/30/as_users.py b/synapse/storage/schema/main/delta/30/as_users.py index 060217575b0..b7e9a11c2f1 100644 --- a/synapse/storage/schema/main/delta/30/as_users.py +++ b/synapse/storage/schema/main/delta/30/as_users.py @@ -19,7 +19,7 @@ # # import logging -from typing import Dict, Iterable, List, Tuple, cast +from typing import Iterable, cast from synapse.config.appservice import load_appservices from synapse.config.homeserver import HomeServerConfig @@ -44,7 +44,7 @@ def run_upgrade( config: HomeServerConfig, ) -> None: cur.execute("SELECT name FROM users") - rows = cast(Iterable[Tuple[str]], cur.fetchall()) + rows = cast(Iterable[tuple[str]], cur.fetchall()) config_files = [] try: @@ -54,7 +54,7 @@ def run_upgrade( appservices = load_appservices(config.server.server_name, config_files) - owned: Dict[str, List[str]] = {} + owned: dict[str, list[str]] = {} for row in rows: user_id = row[0] diff --git a/synapse/storage/types.py b/synapse/storage/types.py index 4329d88c9a6..69b29717912 100644 --- a/synapse/storage/types.py +++ b/synapse/storage/types.py @@ -23,13 +23,11 @@ Any, Callable, Iterator, - List, Mapping, Optional, Protocol, Sequence, Tuple, - Type, Union, ) @@ -49,9 +47,9 @@ def executemany( def fetchone(self) -> Optional[Tuple]: ... - def fetchmany(self, size: Optional[int] = ...) -> List[Tuple]: ... + def fetchmany(self, size: Optional[int] = ...) -> list[Tuple]: ... - def fetchall(self) -> List[Tuple]: ... + def fetchall(self) -> list[Tuple]: ... @property def description( @@ -84,7 +82,7 @@ def __enter__(self) -> "Connection": ... def __exit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType], ) -> Optional[bool]: ... @@ -117,20 +115,20 @@ class DBAPI2Module(Protocol): # explain why this is necessary for safety. TL;DR: we shouldn't be able to write # to `x`, only read from it. See also https://github.com/python/mypy/issues/6002 . @property - def Warning(self) -> Type[Exception]: ... + def Warning(self) -> type[Exception]: ... @property - def Error(self) -> Type[Exception]: ... + def Error(self) -> type[Exception]: ... # Errors are divided into `InterfaceError`s (something went wrong in the database # driver) and `DatabaseError`s (something went wrong in the database). These are # both subclasses of `Error`, but we can't currently express this in type # annotations due to https://github.com/python/mypy/issues/8397 @property - def InterfaceError(self) -> Type[Exception]: ... + def InterfaceError(self) -> type[Exception]: ... @property - def DatabaseError(self) -> Type[Exception]: ... + def DatabaseError(self) -> type[Exception]: ... # Everything below is a subclass of `DatabaseError`. @@ -139,7 +137,7 @@ def DatabaseError(self) -> Type[Exception]: ... # - An invalid date time was provided. # - A string contained a null code point. @property - def DataError(self) -> Type[Exception]: ... + def DataError(self) -> type[Exception]: ... # Roughly: something went wrong in the database, but it's not within the application # programmer's control. Examples: @@ -150,18 +148,18 @@ def DataError(self) -> Type[Exception]: ... # - The database ran out of resources, such as storage, memory, connections, etc. # - The database encountered an error from the operating system. @property - def OperationalError(self) -> Type[Exception]: ... + def OperationalError(self) -> type[Exception]: ... # Roughly: we've given the database data which breaks a rule we asked it to enforce. # Examples: # - Stop, criminal scum! You violated the foreign key constraint # - Also check constraints, non-null constraints, etc. @property - def IntegrityError(self) -> Type[Exception]: ... + def IntegrityError(self) -> type[Exception]: ... # Roughly: something went wrong within the database server itself. @property - def InternalError(self) -> Type[Exception]: ... + def InternalError(self) -> type[Exception]: ... # Roughly: the application did something silly that needs to be fixed. Examples: # - We don't have permissions to do something. @@ -169,11 +167,11 @@ def InternalError(self) -> Type[Exception]: ... # - We tried to use a reserved name. # - We referred to a column that doesn't exist. @property - def ProgrammingError(self) -> Type[Exception]: ... + def ProgrammingError(self) -> type[Exception]: ... # Roughly: we've tried to do something that this database doesn't support. @property - def NotSupportedError(self) -> Type[Exception]: ... + def NotSupportedError(self) -> type[Exception]: ... # We originally wrote # def connect(self, *args, **kwargs) -> Connection: ... diff --git a/synapse/storage/util/id_generators.py b/synapse/storage/util/id_generators.py index 1b7c5dac7a2..5bf5c2b4bf7 100644 --- a/synapse/storage/util/id_generators.py +++ b/synapse/storage/util/id_generators.py @@ -28,15 +28,10 @@ TYPE_CHECKING, AsyncContextManager, ContextManager, - Dict, Generic, Iterable, - List, Optional, Sequence, - Set, - Tuple, - Type, TypeVar, Union, cast, @@ -223,9 +218,9 @@ def __init__( stream_name: str, server_name: str, instance_name: str, - tables: List[Tuple[str, str, str]], + tables: list[tuple[str, str, str]], sequence_name: str, - writers: List[str], + writers: list[str], positive: bool = True, ) -> None: self._db = db @@ -243,7 +238,7 @@ def __init__( # Note: If we are a negative stream then we still store all the IDs as # positive to make life easier for us, and simply negate the IDs when we # return them. - self._current_positions: Dict[str, int] = {} + self._current_positions: dict[str, int] = {} # Set of local IDs that we're still processing. The current position # should be less than the minimum of this set (if not empty). @@ -260,7 +255,7 @@ def __init__( # Set of local IDs that we've processed that are larger than the current # position, due to there being smaller unpersisted IDs. - self._finished_ids: Set[int] = set() + self._finished_ids: set[int] = set() # We track the max position where we know everything before has been # persisted. This is done by a) looking at the min across all instances @@ -281,7 +276,7 @@ def __init__( self._persisted_upto_position = ( min(self._current_positions.values()) if self._current_positions else 1 ) - self._known_persisted_positions: List[int] = [] + self._known_persisted_positions: list[int] = [] # The maximum stream ID that we have seen been allocated across any writer. # Since this defaults to 1, this means that ID 1 is assumed to have already @@ -348,7 +343,7 @@ def __init__( def _load_current_ids( self, db_conn: LoggingDatabaseConnection, - tables: List[Tuple[str, str, str]], + tables: list[tuple[str, str, str]], sequence_name: str, ) -> None: cur = db_conn.cursor(txn_name="_load_current_ids") @@ -439,7 +434,7 @@ def _load_current_ids( self._persisted_upto_position = min_stream_id - rows: List[Tuple[str, int]] = [] + rows: list[tuple[str, int]] = [] for table, instance_column, id_column in tables: sql = """ SELECT %(instance)s, %(id)s FROM %(table)s @@ -453,13 +448,13 @@ def _load_current_ids( cur.execute(sql, (min_stream_id * self._return_factor,)) # Cast safety: this corresponds to the types returned by the query above. - rows.extend(cast(Iterable[Tuple[str, int]], cur)) + rows.extend(cast(Iterable[tuple[str, int]], cur)) # Sort by stream_id (ascending, lowest -> highest) so that we handle # rows in order for each instance because we don't want to overwrite # the current_position of an instance to a lower stream ID than # we're actually at. - def sort_by_stream_id_key_func(row: Tuple[str, int]) -> int: + def sort_by_stream_id_key_func(row: tuple[str, int]) -> int: (instance, stream_id) = row # If `stream_id` is ever `None`, we will see a `TypeError: '<' # not supported between instances of 'NoneType' and 'X'` error. @@ -492,7 +487,7 @@ def _load_next_id_txn(self, txn: Cursor) -> int: stream_ids = self._load_next_mult_id_txn(txn, 1) return stream_ids[0] - def _load_next_mult_id_txn(self, txn: Cursor, n: int) -> List[int]: + def _load_next_mult_id_txn(self, txn: Cursor, n: int) -> list[int]: # We need to track that we've requested some more stream IDs, and what # the current max allocated stream ID is. This is to prevent a race # where we've been allocated stream IDs but they have not yet been added @@ -529,7 +524,7 @@ def get_next(self) -> AsyncContextManager[int]: AsyncContextManager[int], _MultiWriterCtxManager(self, self._notifier) ) - def get_next_mult(self, n: int) -> AsyncContextManager[List[int]]: + def get_next_mult(self, n: int) -> AsyncContextManager[list[int]]: # If we have a list of instances that are allowed to write to this # stream, make sure we're in it. if self._writers and self._instance_name not in self._writers: @@ -537,7 +532,7 @@ def get_next_mult(self, n: int) -> AsyncContextManager[List[int]]: # Cast safety: see get_next. return cast( - AsyncContextManager[List[int]], + AsyncContextManager[list[int]], _MultiWriterCtxManager(self, self._notifier, n), ) @@ -578,7 +573,7 @@ def get_next_txn(self, txn: LoggingTransaction) -> int: return self._return_factor * next_id - def get_next_mult_txn(self, txn: LoggingTransaction, n: int) -> List[int]: + def get_next_mult_txn(self, txn: LoggingTransaction, n: int) -> list[int]: """ Usage: @@ -615,7 +610,7 @@ def get_next_mult_txn(self, txn: LoggingTransaction, n: int) -> List[int]: return [self._return_factor * next_id for next_id in next_ids] - def _mark_ids_as_finished(self, next_ids: List[int]) -> None: + def _mark_ids_as_finished(self, next_ids: list[int]) -> None: """These IDs have finished being processed so we should advance the current position if possible. """ @@ -707,7 +702,7 @@ def get_minimal_local_current_token(self) -> int: self._instance_name, self._persisted_upto_position ) - def get_positions(self) -> Dict[str, int]: + def get_positions(self) -> dict[str, int]: """Get a copy of the current positon map. Note that this won't necessarily include all configured writers if some @@ -849,7 +844,7 @@ async def __aenter__(self) -> T: async def __aexit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc: Optional[BaseException], tb: Optional[TracebackType], ) -> Optional[bool]: @@ -863,9 +858,9 @@ class _MultiWriterCtxManager: id_gen: MultiWriterIdGenerator notifier: "ReplicationNotifier" multiple_ids: Optional[int] = None - stream_ids: List[int] = attr.Factory(list) + stream_ids: list[int] = attr.Factory(list) - async def __aenter__(self) -> Union[int, List[int]]: + async def __aenter__(self) -> Union[int, list[int]]: # It's safe to run this in autocommit mode as fetching values from a # sequence ignores transaction semantics anyway. self.stream_ids = await self.id_gen._db.runInteraction( @@ -882,7 +877,7 @@ async def __aenter__(self) -> Union[int, List[int]]: async def __aexit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc: Optional[BaseException], tb: Optional[TracebackType], ) -> bool: diff --git a/synapse/storage/util/partial_state_events_tracker.py b/synapse/storage/util/partial_state_events_tracker.py index f8addf38f6a..5078f6367b4 100644 --- a/synapse/storage/util/partial_state_events_tracker.py +++ b/synapse/storage/util/partial_state_events_tracker.py @@ -21,7 +21,7 @@ import logging from collections import defaultdict -from typing import Collection, Dict, Set +from typing import Collection from twisted.internet import defer from twisted.internet.defer import Deferred @@ -43,7 +43,7 @@ def __init__(self, store: EventsWorkerStore): self._store = store # a map from event id to a set of Deferreds which are waiting for that event to be # un-partial-stated. - self._observers: Dict[str, Set[Deferred[None]]] = defaultdict(set) + self._observers: dict[str, set[Deferred[None]]] = defaultdict(set) def notify_un_partial_stated(self, event_id: str) -> None: """Notify that we now have full state for a given event @@ -93,7 +93,7 @@ async def await_full_state(self, event_ids: Collection[str]) -> None: ) # create an observer for each lazy-joined event - observers: Dict[str, Deferred[None]] = { + observers: dict[str, Deferred[None]] = { event_id: Deferred() for event_id in partial_state_event_ids } for event_id, observer in observers.items(): @@ -140,7 +140,7 @@ def __init__(self, store: RoomWorkerStore): # a map from room id to a set of Deferreds which are waiting for that room to be # un-partial-stated. - self._observers: Dict[str, Set[Deferred[None]]] = defaultdict(set) + self._observers: dict[str, set[Deferred[None]]] = defaultdict(set) def notify_un_partial_stated(self, room_id: str) -> None: """Notify that we now have full current state for a given room diff --git a/synapse/storage/util/sequence.py b/synapse/storage/util/sequence.py index cac3eba1a59..e2256aa1091 100644 --- a/synapse/storage/util/sequence.py +++ b/synapse/storage/util/sequence.py @@ -21,7 +21,7 @@ import abc import logging import threading -from typing import TYPE_CHECKING, Callable, List, Optional +from typing import TYPE_CHECKING, Callable, Optional from synapse.storage.engines import ( BaseDatabaseEngine, @@ -61,7 +61,7 @@ def get_next_id_txn(self, txn: Cursor) -> int: ... @abc.abstractmethod - def get_next_mult_txn(self, txn: Cursor, n: int) -> List[int]: + def get_next_mult_txn(self, txn: Cursor, n: int) -> list[int]: """Get the next `n` IDs in the sequence""" ... @@ -105,7 +105,7 @@ def get_next_id_txn(self, txn: Cursor) -> int: assert fetch_res is not None return fetch_res[0] - def get_next_mult_txn(self, txn: Cursor, n: int) -> List[int]: + def get_next_mult_txn(self, txn: Cursor, n: int) -> list[int]: txn.execute( "SELECT nextval(?) FROM generate_series(1, ?)", (self._sequence_name, n) ) @@ -241,7 +241,7 @@ def get_next_id_txn(self, txn: Cursor) -> int: self._current_max_id += 1 return self._current_max_id - def get_next_mult_txn(self, txn: Cursor, n: int) -> List[int]: + def get_next_mult_txn(self, txn: Cursor, n: int) -> list[int]: with self._lock: if self._current_max_id is None: assert self._callback is not None diff --git a/synapse/streams/__init__.py b/synapse/streams/__init__.py index 67635d7ebed..faf453b8a11 100644 --- a/synapse/streams/__init__.py +++ b/synapse/streams/__init__.py @@ -19,7 +19,7 @@ # # from abc import ABC, abstractmethod -from typing import Generic, List, Optional, Tuple, TypeVar +from typing import Generic, Optional, TypeVar from synapse.types import StrCollection, UserID @@ -39,5 +39,5 @@ async def get_new_events( room_ids: StrCollection, is_guest: bool, explicit_room_id: Optional[str] = None, - ) -> Tuple[List[R], K]: + ) -> tuple[list[R], K]: raise NotImplementedError() diff --git a/synapse/streams/events.py b/synapse/streams/events.py index 1e4bebe46d5..143f659499b 100644 --- a/synapse/streams/events.py +++ b/synapse/streams/events.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Sequence, Tuple +from typing import TYPE_CHECKING, Sequence import attr @@ -52,7 +52,7 @@ class _EventSourcesInner: receipt: ReceiptEventSource account_data: AccountDataEventSource - def get_sources(self) -> Sequence[Tuple[StreamKeyType, EventSource]]: + def get_sources(self) -> Sequence[tuple[StreamKeyType, EventSource]]: return [ (StreamKeyType.ROOM, self.room), (StreamKeyType.PRESENCE, self.presence), diff --git a/synapse/types/__init__.py b/synapse/types/__init__.py index 0386cb77d62..87436459ac9 100644 --- a/synapse/types/__init__.py +++ b/synapse/types/__init__.py @@ -29,17 +29,12 @@ AbstractSet, Any, ClassVar, - Dict, - List, Literal, Mapping, Match, MutableMapping, NoReturn, Optional, - Set, - Tuple, - Type, TypedDict, TypeVar, Union, @@ -84,16 +79,16 @@ # Define a state map type from type/state_key to T (usually an event ID or # event) T = TypeVar("T") -StateKey = Tuple[str, str] +StateKey = tuple[str, str] StateMap = Mapping[StateKey, T] MutableStateMap = MutableMapping[StateKey, T] # JSON types. These could be made stronger, but will do for now. # A "simple" (canonical) JSON value. SimpleJsonValue = Optional[Union[str, int, bool]] -JsonValue = Union[List[SimpleJsonValue], Tuple[SimpleJsonValue, ...], SimpleJsonValue] +JsonValue = Union[list[SimpleJsonValue], tuple[SimpleJsonValue, ...], SimpleJsonValue] # A JSON-serialisable dict. -JsonDict = Dict[str, Any] +JsonDict = dict[str, Any] # A JSON-serialisable mapping; roughly speaking an immutable JSONDict. # Useful when you have a TypedDict which isn't going to be mutated and you don't want # to cast to JsonDict everywhere. @@ -106,12 +101,12 @@ # # StrCollection is an unordered collection of strings. If ordering is important, # StrSequence can be used instead. -StrCollection = Union[Tuple[str, ...], List[str], AbstractSet[str]] +StrCollection = Union[tuple[str, ...], list[str], AbstractSet[str]] # Sequence[str] that does not include str itself; str being a Sequence[str] # is very misleading and results in bugs. # # Unlike StrCollection, StrSequence is an ordered collection of strings. -StrSequence = Union[Tuple[str, ...], List[str]] +StrSequence = Union[tuple[str, ...], list[str]] # Note that this seems to require inheriting *directly* from Interface in order @@ -165,13 +160,13 @@ class Requester: user: "UserID" access_token_id: Optional[int] is_guest: bool - scope: Set[str] + scope: set[str] shadow_banned: bool device_id: Optional[str] app_service: Optional["ApplicationService"] authenticated_entity: str - def serialize(self) -> Dict[str, Any]: + def serialize(self) -> dict[str, Any]: """Converts self to a type that can be serialized as JSON, and then deserialized by `deserialize` @@ -191,7 +186,7 @@ def serialize(self) -> Dict[str, Any]: @staticmethod def deserialize( - store: "ApplicationServiceWorkerStore", input: Dict[str, Any] + store: "ApplicationServiceWorkerStore", input: dict[str, Any] ) -> "Requester": """Converts a dict that was produced by `serialize` back into a Requester. @@ -305,11 +300,11 @@ class DomainSpecificString(metaclass=abc.ABCMeta): def __copy__(self: DS) -> DS: return self - def __deepcopy__(self: DS, memo: Dict[str, object]) -> DS: + def __deepcopy__(self: DS, memo: dict[str, object]) -> DS: return self @classmethod - def from_string(cls: Type[DS], s: str) -> DS: + def from_string(cls: type[DS], s: str) -> DS: """Parse the string given by 's' into a structure object.""" if len(s) < 1 or s[0:1] != cls.SIGIL: raise SynapseError( @@ -337,7 +332,7 @@ def to_string(self) -> str: return "%s%s:%s" % (self.SIGIL, self.localpart, self.domain) @classmethod - def is_valid(cls: Type[DS], s: str) -> bool: + def is_valid(cls: type[DS], s: str) -> bool: """Parses the input string and attempts to ensure it is valid.""" # TODO: this does not reject an empty localpart or an overly-long string. # See https://spec.matrix.org/v1.2/appendices/#identifier-grammar @@ -393,7 +388,7 @@ class RoomID: room_id_with_domain: Optional[RoomIdWithDomain] @classmethod - def is_valid(cls: Type["RoomID"], s: str) -> bool: + def is_valid(cls: type["RoomID"], s: str) -> bool: if ":" in s: return RoomIdWithDomain.is_valid(s) try: @@ -415,7 +410,7 @@ def to_string(self) -> str: __repr__ = to_string @classmethod - def from_string(cls: Type["RoomID"], s: str) -> "RoomID": + def from_string(cls: type["RoomID"], s: str) -> "RoomID": # sigil check if len(s) < 1 or s[0] != cls.SIGIL: raise SynapseError( @@ -829,7 +824,7 @@ def copy_and_advance(self, other: "RoomStreamToken") -> "RoomStreamToken": return super().copy_and_advance(other) - def as_historical_tuple(self) -> Tuple[int, int]: + def as_historical_tuple(self) -> tuple[int, int]: """Returns a tuple of `(topological, stream)` for historical tokens. Raises if not an historical token (i.e. doesn't have a topological part). @@ -1412,7 +1407,7 @@ def __iter__(self) -> NoReturn: def __copy__(self) -> "ThirdPartyInstanceID": return self - def __deepcopy__(self, memo: Dict[str, object]) -> "ThirdPartyInstanceID": + def __deepcopy__(self, memo: dict[str, object]) -> "ThirdPartyInstanceID": return self @classmethod @@ -1436,7 +1431,7 @@ class ReadReceipt: room_id: str receipt_type: str user_id: str - event_ids: List[str] + event_ids: list[str] thread_id: Optional[str] data: JsonDict @@ -1459,8 +1454,8 @@ class DeviceListUpdates: # The latter happening only once, thus always giving you the same sets # across multiple DeviceListUpdates instances. # Also see: don't define mutable default arguments. - changed: Set[str] = attr.ib(factory=set) - left: Set[str] = attr.ib(factory=set) + changed: set[str] = attr.ib(factory=set) + left: set[str] = attr.ib(factory=set) def __bool__(self) -> bool: return bool(self.changed or self.left) @@ -1468,7 +1463,7 @@ def __bool__(self) -> bool: def get_verify_key_from_cross_signing_key( key_info: Mapping[str, Any], -) -> Tuple[str, VerifyKey]: +) -> tuple[str, VerifyKey]: """Get the key ID and signedjson verify key from a cross-signing key dict Args: diff --git a/synapse/types/handlers/__init__.py b/synapse/types/handlers/__init__.py index f2fbc1dddf2..80651bb6855 100644 --- a/synapse/types/handlers/__init__.py +++ b/synapse/types/handlers/__init__.py @@ -19,7 +19,7 @@ # -from typing import List, Optional, TypedDict +from typing import Optional, TypedDict from synapse.api.constants import EventTypes @@ -87,7 +87,7 @@ class ShutdownRoomResponse(TypedDict): new_room_id: A string representing the room ID of the new room. """ - kicked_users: List[str] - failed_to_kick_users: List[str] - local_aliases: List[str] + kicked_users: list[str] + failed_to_kick_users: list[str] + local_aliases: list[str] new_room_id: Optional[str] diff --git a/synapse/types/handlers/sliding_sync.py b/synapse/types/handlers/sliding_sync.py index b7bc565464f..aef7db8e980 100644 --- a/synapse/types/handlers/sliding_sync.py +++ b/synapse/types/handlers/sliding_sync.py @@ -21,16 +21,12 @@ AbstractSet, Any, Callable, - Dict, Final, Generic, - List, Mapping, MutableMapping, Optional, Sequence, - Set, - Tuple, TypeVar, cast, ) @@ -178,17 +174,17 @@ class StrippedHero: name: Optional[str] avatar: Optional[str] - heroes: Optional[List[StrippedHero]] + heroes: Optional[list[StrippedHero]] is_dm: bool initial: bool unstable_expanded_timeline: bool # Should be empty for invite/knock rooms with `stripped_state` - required_state: List[EventBase] + required_state: list[EventBase] # Should be empty for invite/knock rooms with `stripped_state` - timeline_events: List[EventBase] - bundled_aggregations: Optional[Dict[str, "BundledAggregations"]] + timeline_events: list[EventBase] + bundled_aggregations: Optional[dict[str, "BundledAggregations"]] # Optional because it's only relevant to invite/knock rooms - stripped_state: List[JsonDict] + stripped_state: list[JsonDict] # Only optional because it won't be included for invite/knock rooms with `stripped_state` prev_batch: Optional[StreamToken] # Only optional because it won't be included for invite/knock rooms with `stripped_state` @@ -240,11 +236,11 @@ class Operation: """ op: OperationType - range: Tuple[int, int] - room_ids: List[str] + range: tuple[int, int] + room_ids: list[str] count: int - ops: List[Operation] + ops: list[Operation] @attr.s(slots=True, frozen=True, auto_attribs=True) class Extensions: @@ -415,7 +411,7 @@ def __bool__(self) -> bool: next_pos: SlidingSyncStreamToken lists: Mapping[str, SlidingWindowList] - rooms: Dict[str, RoomResult] + rooms: dict[str, RoomResult] extensions: Extensions def __bool__(self) -> bool: @@ -485,7 +481,7 @@ def from_room_config( Args: room_params: `SlidingSyncConfig.SlidingSyncList` or `SlidingSyncConfig.RoomSubscription` """ - required_state_map: Dict[str, Set[str]] = {} + required_state_map: dict[str, set[str]] = {} for ( state_type, state_key, diff --git a/synapse/types/rest/client/__init__.py b/synapse/types/rest/client/__init__.py index 11d7e59b43a..4940fabd129 100644 --- a/synapse/types/rest/client/__init__.py +++ b/synapse/types/rest/client/__init__.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import TYPE_CHECKING, Dict, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Optional, Union from synapse._pydantic_compat import ( Extra, @@ -72,7 +72,7 @@ class ThreepidRequestTokenBody(RequestBodyModel): @validator("id_access_token", always=True) def token_required_for_identity_server( - cls, token: Optional[str], values: Dict[str, object] + cls, token: Optional[str], values: dict[str, object] ) -> Optional[str]: if values.get("id_server") is not None and token is None: raise ValueError("id_access_token is required if an id_server is supplied.") @@ -144,7 +144,7 @@ class CommonRoomParameters(RequestBodyModel): (Max 1000 messages) """ - required_state: List[Tuple[StrictStr, StrictStr]] + required_state: list[tuple[StrictStr, StrictStr]] # mypy workaround via https://github.com/pydantic/pydantic/issues/156#issuecomment-1130883884 if TYPE_CHECKING: timeline_limit: int @@ -242,21 +242,21 @@ class Filters(RequestBodyModel): """ is_dm: Optional[StrictBool] = None - spaces: Optional[List[StrictStr]] = None + spaces: Optional[list[StrictStr]] = None is_encrypted: Optional[StrictBool] = None is_invite: Optional[StrictBool] = None - room_types: Optional[List[Union[StrictStr, None]]] = None - not_room_types: Optional[List[Union[StrictStr, None]]] = None + room_types: Optional[list[Union[StrictStr, None]]] = None + not_room_types: Optional[list[Union[StrictStr, None]]] = None room_name_like: Optional[StrictStr] = None - tags: Optional[List[StrictStr]] = None - not_tags: Optional[List[StrictStr]] = None + tags: Optional[list[StrictStr]] = None + not_tags: Optional[list[StrictStr]] = None # mypy workaround via https://github.com/pydantic/pydantic/issues/156#issuecomment-1130883884 if TYPE_CHECKING: - ranges: Optional[List[Tuple[int, int]]] = None + ranges: Optional[list[tuple[int, int]]] = None else: ranges: Optional[ - List[Tuple[conint(ge=0, strict=True), conint(ge=0, strict=True)]] + list[tuple[conint(ge=0, strict=True), conint(ge=0, strict=True)]] ] = None # type: ignore[valid-type] slow_get_all_rooms: Optional[StrictBool] = False filters: Optional[Filters] = None @@ -327,9 +327,9 @@ class AccountDataExtension(RequestBodyModel): enabled: Optional[StrictBool] = False # Process all lists defined in the Sliding Window API. (This is the default.) - lists: Optional[List[StrictStr]] = ["*"] + lists: Optional[list[StrictStr]] = ["*"] # Process all room subscriptions defined in the Room Subscription API. (This is the default.) - rooms: Optional[List[StrictStr]] = ["*"] + rooms: Optional[list[StrictStr]] = ["*"] class ReceiptsExtension(RequestBodyModel): """The Receipts extension (MSC3960) @@ -344,9 +344,9 @@ class ReceiptsExtension(RequestBodyModel): enabled: Optional[StrictBool] = False # Process all lists defined in the Sliding Window API. (This is the default.) - lists: Optional[List[StrictStr]] = ["*"] + lists: Optional[list[StrictStr]] = ["*"] # Process all room subscriptions defined in the Room Subscription API. (This is the default.) - rooms: Optional[List[StrictStr]] = ["*"] + rooms: Optional[list[StrictStr]] = ["*"] class TypingExtension(RequestBodyModel): """The Typing Notification extension (MSC3961) @@ -361,9 +361,9 @@ class TypingExtension(RequestBodyModel): enabled: Optional[StrictBool] = False # Process all lists defined in the Sliding Window API. (This is the default.) - lists: Optional[List[StrictStr]] = ["*"] + lists: Optional[list[StrictStr]] = ["*"] # Process all room subscriptions defined in the Room Subscription API. (This is the default.) - rooms: Optional[List[StrictStr]] = ["*"] + rooms: Optional[list[StrictStr]] = ["*"] class ThreadSubscriptionsExtension(RequestBodyModel): """The Thread Subscriptions extension (MSC4308) @@ -389,18 +389,18 @@ class ThreadSubscriptionsExtension(RequestBodyModel): # mypy workaround via https://github.com/pydantic/pydantic/issues/156#issuecomment-1130883884 if TYPE_CHECKING: - lists: Optional[Dict[str, SlidingSyncList]] = None + lists: Optional[dict[str, SlidingSyncList]] = None else: - lists: Optional[Dict[constr(max_length=64, strict=True), SlidingSyncList]] = ( + lists: Optional[dict[constr(max_length=64, strict=True), SlidingSyncList]] = ( None # type: ignore[valid-type] ) - room_subscriptions: Optional[Dict[StrictStr, RoomSubscription]] = None + room_subscriptions: Optional[dict[StrictStr, RoomSubscription]] = None extensions: Optional[Extensions] = None @validator("lists") def lists_length_check( - cls, value: Optional[Dict[str, SlidingSyncList]] - ) -> Optional[Dict[str, SlidingSyncList]]: + cls, value: Optional[dict[str, SlidingSyncList]] + ) -> Optional[dict[str, SlidingSyncList]]: if value is not None: assert len(value) <= 100, f"Max lists: 100 but saw {len(value)}" return value diff --git a/synapse/types/state.py b/synapse/types/state.py index 6420e050a58..1b4de61d3e1 100644 --- a/synapse/types/state.py +++ b/synapse/types/state.py @@ -25,13 +25,9 @@ Any, Callable, Collection, - Dict, Iterable, - List, Mapping, Optional, - Set, - Tuple, TypeVar, ) @@ -42,7 +38,7 @@ from synapse.types import MutableStateMap, StateKey, StateMap if TYPE_CHECKING: - from typing import FrozenSet # noqa: used within quoted type hint; flake8 sad + pass # noqa: used within quoted type hint; flake8 sad logger = logging.getLogger(__name__) @@ -64,7 +60,7 @@ class StateFilter: appear in `types`. """ - types: "immutabledict[str, Optional[FrozenSet[str]]]" + types: "immutabledict[str, Optional[frozenset[str]]]" include_others: bool = False def __attrs_post_init__(self) -> None: @@ -105,7 +101,7 @@ def none() -> "StateFilter": return _NONE_STATE_FILTER @staticmethod - def from_types(types: Iterable[Tuple[str, Optional[str]]]) -> "StateFilter": + def from_types(types: Iterable[tuple[str, Optional[str]]]) -> "StateFilter": """Creates a filter that only fetches the given types Args: @@ -115,7 +111,7 @@ def from_types(types: Iterable[Tuple[str, Optional[str]]]) -> "StateFilter": Returns: The new state filter. """ - type_dict: Dict[str, Optional[Set[str]]] = {} + type_dict: dict[str, Optional[set[str]]] = {} for typ, s in types: if typ in type_dict: if type_dict[typ] is None: @@ -134,7 +130,7 @@ def from_types(types: Iterable[Tuple[str, Optional[str]]]) -> "StateFilter": ) ) - def to_types(self) -> Iterable[Tuple[str, Optional[str]]]: + def to_types(self) -> Iterable[tuple[str, Optional[str]]]: """The inverse to `from_types`.""" for event_type, state_keys in self.types.items(): if state_keys is None: @@ -167,7 +163,7 @@ def freeze( Returns a (frozen) StateFilter with the same contents as the parameters specified here, which can be made of mutable types. """ - types_with_frozen_values: Dict[str, Optional[FrozenSet[str]]] = {} + types_with_frozen_values: dict[str, Optional[frozenset[str]]] = {} for state_types, state_keys in types.items(): if state_keys is not None: types_with_frozen_values[state_types] = frozenset(state_keys) @@ -240,7 +236,7 @@ def return_expanded(self) -> "StateFilter": # We want to return all non-members return _ALL_NON_MEMBER_STATE_FILTER - def make_sql_filter_clause(self) -> Tuple[str, List[str]]: + def make_sql_filter_clause(self) -> tuple[str, list[str]]: """Converts the filter to an SQL clause. For example: @@ -257,7 +253,7 @@ def make_sql_filter_clause(self) -> Tuple[str, List[str]]: """ where_clause = "" - where_args: List[str] = [] + where_args: list[str] = [] if self.is_full(): return where_clause, where_args @@ -353,7 +349,7 @@ def has_wildcards(self) -> bool: state_keys is None for state_keys in self.types.values() ) - def concrete_types(self) -> List[Tuple[str, str]]: + def concrete_types(self) -> list[tuple[str, str]]: """Returns a list of concrete type/state_keys (i.e. not None) that will be fetched. This will be a complete list if `has_wildcards` returns False, but otherwise will be a subset (or even empty). @@ -368,7 +364,7 @@ def concrete_types(self) -> List[Tuple[str, str]]: for s in state_keys ] - def wildcard_types(self) -> List[str]: + def wildcard_types(self) -> list[str]: """Returns a list of event types which require us to fetch all state keys. This will be empty unless `has_wildcards` returns True. @@ -377,7 +373,7 @@ def wildcard_types(self) -> List[str]: """ return [t for t, state_keys in self.types.items() if state_keys is None] - def get_member_split(self) -> Tuple["StateFilter", "StateFilter"]: + def get_member_split(self) -> tuple["StateFilter", "StateFilter"]: """Return the filter split into two: one which assumes it's exclusively matching against member state, and one which assumes it's matching against non member state. @@ -416,7 +412,7 @@ def get_member_split(self) -> Tuple["StateFilter", "StateFilter"]: def _decompose_into_four_parts( self, - ) -> Tuple[Tuple[bool, Set[str]], Tuple[Set[str], Set[StateKey]]]: + ) -> tuple[tuple[bool, set[str]], tuple[set[str], set[StateKey]]]: """ Decomposes this state filter into 4 constituent parts, which can be thought of as this: @@ -432,18 +428,18 @@ def _decompose_into_four_parts( correspondence. """ is_all = self.include_others - excluded_types: Set[str] = {t for t in self.types if is_all} - wildcard_types: Set[str] = {t for t, s in self.types.items() if s is None} - concrete_keys: Set[StateKey] = set(self.concrete_types()) + excluded_types: set[str] = {t for t in self.types if is_all} + wildcard_types: set[str] = {t for t, s in self.types.items() if s is None} + concrete_keys: set[StateKey] = set(self.concrete_types()) return (is_all, excluded_types), (wildcard_types, concrete_keys) @staticmethod def _recompose_from_four_parts( all_part: bool, - minus_wildcards: Set[str], - plus_wildcards: Set[str], - plus_state_keys: Set[StateKey], + minus_wildcards: set[str], + plus_wildcards: set[str], + plus_state_keys: set[StateKey], ) -> "StateFilter": """ Recomposes a state filter from 4 parts. @@ -454,7 +450,7 @@ def _recompose_from_four_parts( # {state type -> set of state keys OR None for wildcard} # (The same structure as that of a StateFilter.) - new_types: Dict[str, Optional[Set[str]]] = {} + new_types: dict[str, Optional[set[str]]] = {} # if we start with all, insert the excluded statetypes as empty sets # to prevent them from being included diff --git a/synapse/util/__init__.py b/synapse/util/__init__.py index 2ae2e245a97..0d3b7ca7402 100644 --- a/synapse/util/__init__.py +++ b/synapse/util/__init__.py @@ -23,12 +23,10 @@ import logging import typing from typing import ( - Dict, Iterator, Mapping, Optional, Sequence, - Set, TypeVar, ) @@ -119,8 +117,8 @@ class MutableOverlayMapping(collections.abc.MutableMapping[K, V]): """ _underlying_map: Mapping[K, V] - _mutable_map: Dict[K, V] = attr.ib(factory=dict) - _deletions: Set[K] = attr.ib(factory=set) + _mutable_map: dict[K, V] = attr.ib(factory=dict) + _deletions: set[K] = attr.ib(factory=set) def __getitem__(self, key: K) -> V: if key in self._deletions: diff --git a/synapse/util/async_helpers.py b/synapse/util/async_helpers.py index 2a167f209cb..c568b377d24 100644 --- a/synapse/util/async_helpers.py +++ b/synapse/util/async_helpers.py @@ -35,16 +35,12 @@ Callable, Collection, Coroutine, - Dict, Generator, Generic, Hashable, Iterable, - List, Literal, Optional, - Set, - Tuple, TypeVar, Union, overload, @@ -108,8 +104,8 @@ class ObservableDeferred(Generic[_T], AbstractObservableDeferred[_T]): __slots__ = ["_deferred", "_observers", "_result"] _deferred: "defer.Deferred[_T]" - _observers: Union[List["defer.Deferred[_T]"], Tuple[()]] - _result: Union[None, Tuple[Literal[True], _T], Tuple[Literal[False], Failure]] + _observers: Union[list["defer.Deferred[_T]"], tuple[()]] + _result: Union[None, tuple[Literal[True], _T], tuple[Literal[False], Failure]] def __init__(self, deferred: "defer.Deferred[_T]", consumeErrors: bool = False): object.__setattr__(self, "_deferred", deferred) @@ -268,7 +264,7 @@ async def yieldable_gather_results( iter: Iterable[T], *args: P.args, **kwargs: P.kwargs, -) -> List[R]: +) -> list[R]: """Executes the function with each argument concurrently. Args: @@ -310,7 +306,7 @@ async def yieldable_gather_results_delaying_cancellation( iter: Iterable[T], *args: P.args, **kwargs: P.kwargs, -) -> List[R]: +) -> list[R]: """Executes the function with each argument concurrently. Cancellation is delayed until after all the results have been gathered. @@ -350,49 +346,49 @@ async def yieldable_gather_results_delaying_cancellation( @overload def gather_results( - deferredList: Tuple[()], consumeErrors: bool = ... -) -> "defer.Deferred[Tuple[()]]": ... + deferredList: tuple[()], consumeErrors: bool = ... +) -> "defer.Deferred[tuple[()]]": ... @overload def gather_results( - deferredList: Tuple["defer.Deferred[T1]"], + deferredList: tuple["defer.Deferred[T1]"], consumeErrors: bool = ..., -) -> "defer.Deferred[Tuple[T1]]": ... +) -> "defer.Deferred[tuple[T1]]": ... @overload def gather_results( - deferredList: Tuple["defer.Deferred[T1]", "defer.Deferred[T2]"], + deferredList: tuple["defer.Deferred[T1]", "defer.Deferred[T2]"], consumeErrors: bool = ..., -) -> "defer.Deferred[Tuple[T1, T2]]": ... +) -> "defer.Deferred[tuple[T1, T2]]": ... @overload def gather_results( - deferredList: Tuple[ + deferredList: tuple[ "defer.Deferred[T1]", "defer.Deferred[T2]", "defer.Deferred[T3]" ], consumeErrors: bool = ..., -) -> "defer.Deferred[Tuple[T1, T2, T3]]": ... +) -> "defer.Deferred[tuple[T1, T2, T3]]": ... @overload def gather_results( - deferredList: Tuple[ + deferredList: tuple[ "defer.Deferred[T1]", "defer.Deferred[T2]", "defer.Deferred[T3]", "defer.Deferred[T4]", ], consumeErrors: bool = ..., -) -> "defer.Deferred[Tuple[T1, T2, T3, T4]]": ... +) -> "defer.Deferred[tuple[T1, T2, T3, T4]]": ... def gather_results( # type: ignore[misc] - deferredList: Tuple["defer.Deferred[T1]", ...], + deferredList: tuple["defer.Deferred[T1]", ...], consumeErrors: bool = False, -) -> "defer.Deferred[Tuple[T1, ...]]": +) -> "defer.Deferred[tuple[T1, ...]]": """Combines a tuple of `Deferred`s into a single `Deferred`. Wraps `defer.gatherResults` to provide type annotations that support heterogenous @@ -406,50 +402,50 @@ def gather_results( # type: ignore[misc] @overload async def gather_optional_coroutines( - *coroutines: Unpack[Tuple[Optional[Coroutine[Any, Any, T1]]]], -) -> Tuple[Optional[T1]]: ... + *coroutines: Unpack[tuple[Optional[Coroutine[Any, Any, T1]]]], +) -> tuple[Optional[T1]]: ... @overload async def gather_optional_coroutines( *coroutines: Unpack[ - Tuple[ + tuple[ Optional[Coroutine[Any, Any, T1]], Optional[Coroutine[Any, Any, T2]], ] ], -) -> Tuple[Optional[T1], Optional[T2]]: ... +) -> tuple[Optional[T1], Optional[T2]]: ... @overload async def gather_optional_coroutines( *coroutines: Unpack[ - Tuple[ + tuple[ Optional[Coroutine[Any, Any, T1]], Optional[Coroutine[Any, Any, T2]], Optional[Coroutine[Any, Any, T3]], ] ], -) -> Tuple[Optional[T1], Optional[T2], Optional[T3]]: ... +) -> tuple[Optional[T1], Optional[T2], Optional[T3]]: ... @overload async def gather_optional_coroutines( *coroutines: Unpack[ - Tuple[ + tuple[ Optional[Coroutine[Any, Any, T1]], Optional[Coroutine[Any, Any, T2]], Optional[Coroutine[Any, Any, T3]], Optional[Coroutine[Any, Any, T4]], ] ], -) -> Tuple[Optional[T1], Optional[T2], Optional[T3], Optional[T4]]: ... +) -> tuple[Optional[T1], Optional[T2], Optional[T3], Optional[T4]]: ... @overload async def gather_optional_coroutines( *coroutines: Unpack[ - Tuple[ + tuple[ Optional[Coroutine[Any, Any, T1]], Optional[Coroutine[Any, Any, T2]], Optional[Coroutine[Any, Any, T3]], @@ -457,13 +453,13 @@ async def gather_optional_coroutines( Optional[Coroutine[Any, Any, T5]], ] ], -) -> Tuple[Optional[T1], Optional[T2], Optional[T3], Optional[T4], Optional[T5]]: ... +) -> tuple[Optional[T1], Optional[T2], Optional[T3], Optional[T4], Optional[T5]]: ... @overload async def gather_optional_coroutines( *coroutines: Unpack[ - Tuple[ + tuple[ Optional[Coroutine[Any, Any, T1]], Optional[Coroutine[Any, Any, T2]], Optional[Coroutine[Any, Any, T3]], @@ -472,14 +468,14 @@ async def gather_optional_coroutines( Optional[Coroutine[Any, Any, T6]], ] ], -) -> Tuple[ +) -> tuple[ Optional[T1], Optional[T2], Optional[T3], Optional[T4], Optional[T5], Optional[T6] ]: ... async def gather_optional_coroutines( - *coroutines: Unpack[Tuple[Optional[Coroutine[Any, Any, T1]], ...]], -) -> Tuple[Optional[T1], ...]: + *coroutines: Unpack[tuple[Optional[Coroutine[Any, Any, T1]], ...]], +) -> tuple[Optional[T1], ...]: """Helper function that allows waiting on multiple coroutines at once. The return value is a tuple of the return values of the coroutines in order. @@ -563,7 +559,7 @@ def __init__( self._clock = clock # key_to_defer is a map from the key to a _LinearizerEntry. - self.key_to_defer: Dict[Hashable, _LinearizerEntry] = {} + self.key_to_defer: dict[Hashable, _LinearizerEntry] = {} def is_queued(self, key: Hashable) -> bool: """Checks whether there is a process queued up waiting""" @@ -698,10 +694,10 @@ class ReadWriteLock: def __init__(self) -> None: # Latest readers queued - self.key_to_current_readers: Dict[str, Set[defer.Deferred]] = {} + self.key_to_current_readers: dict[str, set[defer.Deferred]] = {} # Latest writer queued - self.key_to_current_writer: Dict[str, defer.Deferred] = {} + self.key_to_current_writer: dict[str, defer.Deferred] = {} def read(self, key: str) -> AsyncContextManager: @asynccontextmanager @@ -968,7 +964,7 @@ class AwakenableSleeper: """ def __init__(self, clock: Clock) -> None: - self._streams: Dict[str, Set[defer.Deferred[None]]] = {} + self._streams: dict[str, set[defer.Deferred[None]]] = {} self._clock = clock def wake(self, name: str) -> None: diff --git a/synapse/util/batching_queue.py b/synapse/util/batching_queue.py index f77301afd81..514abcbec16 100644 --- a/synapse/util/batching_queue.py +++ b/synapse/util/batching_queue.py @@ -24,12 +24,8 @@ TYPE_CHECKING, Awaitable, Callable, - Dict, Generic, Hashable, - List, - Set, - Tuple, TypeVar, ) @@ -102,7 +98,7 @@ def __init__( name: str, hs: "HomeServer", clock: Clock, - process_batch_callback: Callable[[List[V]], Awaitable[R]], + process_batch_callback: Callable[[list[V]], Awaitable[R]], ): self._name = name self.hs = hs @@ -110,11 +106,11 @@ def __init__( self._clock = clock # The set of keys currently being processed. - self._processing_keys: Set[Hashable] = set() + self._processing_keys: set[Hashable] = set() # The currently pending batch of values by key, with a Deferred to call # with the result of the corresponding `_process_batch_callback` call. - self._next_values: Dict[Hashable, List[Tuple[V, defer.Deferred]]] = {} + self._next_values: dict[Hashable, list[tuple[V, defer.Deferred]]] = {} # The function to call with batches of values. self._process_batch_callback = process_batch_callback diff --git a/synapse/util/caches/__init__.py b/synapse/util/caches/__init__.py index 08ff842af0f..c799fca5500 100644 --- a/synapse/util/caches/__init__.py +++ b/synapse/util/caches/__init__.py @@ -24,7 +24,7 @@ import typing from enum import Enum, auto from sys import intern -from typing import Any, Callable, Dict, List, Optional, Sized, TypeVar +from typing import Any, Callable, Optional, Sized, TypeVar import attr from prometheus_client import REGISTRY @@ -162,7 +162,7 @@ def clear_memory_usage(self) -> None: if self.memory_usage is not None: self.memory_usage = 0 - def describe(self) -> List[str]: + def describe(self) -> list[str]: return [] def collect(self) -> None: @@ -283,7 +283,7 @@ def intern_string(string: T) -> T: return string -def intern_dict(dictionary: Dict[str, Any]) -> Dict[str, Any]: +def intern_dict(dictionary: dict[str, Any]) -> dict[str, Any]: """Takes a dictionary and interns well known keys and their values""" return { KNOWN_KEYS.get(key, key): _intern_known_values(key, value) diff --git a/synapse/util/caches/deferred_cache.py b/synapse/util/caches/deferred_cache.py index 016acbac710..380f2a78caa 100644 --- a/synapse/util/caches/deferred_cache.py +++ b/synapse/util/caches/deferred_cache.py @@ -26,13 +26,10 @@ from typing import ( Callable, Collection, - Dict, Generic, MutableMapping, Optional, - Set, Sized, - Tuple, TypeVar, Union, cast, @@ -203,7 +200,7 @@ def get_bulk( self, keys: Collection[KT], callback: Optional[Callable[[], None]] = None, - ) -> Tuple[Dict[KT, VT], Optional["defer.Deferred[Dict[KT, VT]]"], Collection[KT]]: + ) -> tuple[dict[KT, VT], Optional["defer.Deferred[dict[KT, VT]]"], Collection[KT]]: """Bulk lookup of items in the cache. Returns: @@ -458,7 +455,7 @@ class CacheEntrySingle(CacheEntry[KT, VT]): def __init__(self, deferred: "defer.Deferred[VT]") -> None: self._deferred = ObservableDeferred(deferred, consumeErrors=True) - self._callbacks: Set[Callable[[], None]] = set() + self._callbacks: set[Callable[[], None]] = set() def deferred(self, key: KT) -> "defer.Deferred[VT]": return self._deferred.observe() @@ -481,9 +478,9 @@ class CacheMultipleEntries(CacheEntry[KT, VT]): __slots__ = ["_deferred", "_callbacks", "_global_callbacks"] def __init__(self) -> None: - self._deferred: Optional[ObservableDeferred[Dict[KT, VT]]] = None - self._callbacks: Dict[KT, Set[Callable[[], None]]] = {} - self._global_callbacks: Set[Callable[[], None]] = set() + self._deferred: Optional[ObservableDeferred[dict[KT, VT]]] = None + self._callbacks: dict[KT, set[Callable[[], None]]] = {} + self._global_callbacks: set[Callable[[], None]] = set() def deferred(self, key: KT) -> "defer.Deferred[VT]": if not self._deferred: @@ -513,7 +510,7 @@ def add_global_invalidation_callback( def complete_bulk( self, cache: DeferredCache[KT, VT], - result: Dict[KT, VT], + result: dict[KT, VT], ) -> None: """Called when there is a result""" for key, value in result.items(): diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py index 6e3c8eada98..b48bb6e918a 100644 --- a/synapse/util/caches/descriptors.py +++ b/synapse/util/caches/descriptors.py @@ -30,7 +30,6 @@ Generic, Hashable, Iterable, - List, Mapping, Optional, Protocol, @@ -63,9 +62,9 @@ class CachedFunction(Generic[F]): - invalidate: Callable[[Tuple[Any, ...]], None] + invalidate: Callable[[tuple[Any, ...]], None] invalidate_all: Callable[[], None] - prefill: Callable[[Tuple[Any, ...], Any], None] + prefill: Callable[[tuple[Any, ...], Any], None] cache: Any = None num_args: Any = None @@ -364,7 +363,7 @@ def __init__( def __get__( self, obj: Optional[Any], objtype: Optional[Type] = None - ) -> Callable[..., "defer.Deferred[Dict[Hashable, Any]]"]: + ) -> Callable[..., "defer.Deferred[dict[Hashable, Any]]"]: cached_method = getattr(obj, self.cached_method_name) cache: DeferredCache[CacheKey, Any] = cached_method.cache num_args = cached_method.num_args @@ -412,7 +411,7 @@ def cache_key_to_arg(key: tuple) -> Hashable: results = {cache_key_to_arg(key): v for key, v in immediate_results.items()} - cached_defers: List["defer.Deferred[Any]"] = [] + cached_defers: list["defer.Deferred[Any]"] = [] if pending_deferred: def update_results(r: Dict) -> None: @@ -425,7 +424,7 @@ def update_results(r: Dict) -> None: if missing: cache_entry = cache.start_bulk_input(missing, invalidate_callback) - def complete_all(res: Dict[Hashable, Any]) -> None: + def complete_all(res: dict[Hashable, Any]) -> None: missing_results = {} for key in missing: arg = cache_key_to_arg(key) @@ -478,7 +477,7 @@ class _CacheContext: Cache = Union[DeferredCache, LruCache] _cache_context_objects: """WeakValueDictionary[ - Tuple["_CacheContext.Cache", CacheKey], "_CacheContext" + tuple["_CacheContext.Cache", CacheKey], "_CacheContext" ]""" = WeakValueDictionary() def __init__(self, cache: "_CacheContext.Cache", cache_key: CacheKey) -> None: diff --git a/synapse/util/caches/dictionary_cache.py b/synapse/util/caches/dictionary_cache.py index eb5493d322d..dd6f413e79f 100644 --- a/synapse/util/caches/dictionary_cache.py +++ b/synapse/util/caches/dictionary_cache.py @@ -22,13 +22,10 @@ import logging import threading from typing import ( - Dict, Generic, Iterable, Literal, Optional, - Set, - Tuple, TypeVar, Union, ) @@ -65,8 +62,8 @@ class DictionaryEntry(Generic[DKT, DV]): """ full: bool - known_absent: Set[DKT] - value: Dict[DKT, DV] + known_absent: set[DKT] + value: dict[DKT, DV] def __len__(self) -> int: return len(self.value) @@ -160,8 +157,8 @@ def __init__( # * A key of `(KT, DKT)` has a value of `_PerKeyValue` # * A key of `(KT, _FullCacheKey.KEY)` has a value of `Dict[DKT, DV]` self.cache: LruCache[ - Tuple[KT, Union[DKT, Literal[_FullCacheKey.KEY]]], - Union[_PerKeyValue, Dict[DKT, DV]], + tuple[KT, Union[DKT, Literal[_FullCacheKey.KEY]]], + Union[_PerKeyValue, dict[DKT, DV]], ] = LruCache( max_size=max_entries, clock=clock, @@ -297,7 +294,7 @@ def update( self, sequence: int, key: KT, - value: Dict[DKT, DV], + value: dict[DKT, DV], fetched_keys: Optional[Iterable[DKT]] = None, ) -> None: """Updates the entry in the cache. @@ -332,7 +329,7 @@ def update( self._update_subset(key, value, fetched_keys) def _update_subset( - self, key: KT, value: Dict[DKT, DV], fetched_keys: Iterable[DKT] + self, key: KT, value: dict[DKT, DV], fetched_keys: Iterable[DKT] ) -> None: """Add the given dictionary values as explicit keys in the cache. diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py index 324acb728ab..04549ab65fc 100644 --- a/synapse/util/caches/lrucache.py +++ b/synapse/util/caches/lrucache.py @@ -30,15 +30,10 @@ Any, Callable, Collection, - Dict, Generic, Iterable, - List, Literal, Optional, - Set, - Tuple, - Type, TypeVar, Union, cast, @@ -308,7 +303,7 @@ def __init__( # footprint down. Storing `None` is free as its a singleton, while empty # lists are 56 bytes (and empty sets are 216 bytes, if we did the naive # thing and used sets). - self.callbacks: Optional[List[Callable[[], None]]] = None + self.callbacks: Optional[list[Callable[[], None]]] = None self.add_callbacks(callbacks) @@ -404,7 +399,7 @@ def __init__( clock: Clock, server_name: str, cache_name: str, - cache_type: Type[Union[dict, TreeCache]] = dict, + cache_type: type[Union[dict, TreeCache]] = dict, size_callback: Optional[Callable[[VT], int]] = None, metrics_collection_callback: Optional[Callable[[], None]] = None, apply_cache_factor_from_config: bool = True, @@ -420,7 +415,7 @@ def __init__( clock: Clock, server_name: str, cache_name: Literal[None] = None, - cache_type: Type[Union[dict, TreeCache]] = dict, + cache_type: type[Union[dict, TreeCache]] = dict, size_callback: Optional[Callable[[VT], int]] = None, metrics_collection_callback: Optional[Callable[[], None]] = None, apply_cache_factor_from_config: bool = True, @@ -435,7 +430,7 @@ def __init__( clock: Clock, server_name: str, cache_name: Optional[str] = None, - cache_type: Type[Union[dict, TreeCache]] = dict, + cache_type: type[Union[dict, TreeCache]] = dict, size_callback: Optional[Callable[[VT], int]] = None, metrics_collection_callback: Optional[Callable[[], None]] = None, apply_cache_factor_from_config: bool = True, @@ -489,7 +484,7 @@ def __init__( Note: The new key does not have to be unique. """ - cache: Union[Dict[KT, _Node[KT, VT]], TreeCache] = cache_type() + cache: Union[dict[KT, _Node[KT, VT]], TreeCache] = cache_type() self.cache = cache # Used for introspection. self.apply_cache_factor_from_config = apply_cache_factor_from_config @@ -529,7 +524,7 @@ def __init__( lock = threading.Lock() - extra_index: Dict[KT, Set[KT]] = {} + extra_index: dict[KT, set[KT]] = {} def evict() -> None: while cache_len() > self.max_size: @@ -682,21 +677,21 @@ def cache_get_multi( key: tuple, default: Literal[None] = None, update_metrics: bool = True, - ) -> Union[None, Iterable[Tuple[KT, VT]]]: ... + ) -> Union[None, Iterable[tuple[KT, VT]]]: ... @overload def cache_get_multi( key: tuple, default: T, update_metrics: bool = True, - ) -> Union[T, Iterable[Tuple[KT, VT]]]: ... + ) -> Union[T, Iterable[tuple[KT, VT]]]: ... @synchronized def cache_get_multi( key: tuple, default: Optional[T] = None, update_metrics: bool = True, - ) -> Union[None, T, Iterable[Tuple[KT, VT]]]: + ) -> Union[None, T, Iterable[tuple[KT, VT]]]: """Returns a generator yielding all entries under the given key. Can only be used if backed by a tree cache. diff --git a/synapse/util/caches/response_cache.py b/synapse/util/caches/response_cache.py index 3d39357236a..e82036d7e01 100644 --- a/synapse/util/caches/response_cache.py +++ b/synapse/util/caches/response_cache.py @@ -24,7 +24,6 @@ Any, Awaitable, Callable, - Dict, Generic, Iterable, Optional, @@ -119,7 +118,7 @@ def __init__( timeout_ms enable_logging """ - self._result_cache: Dict[KV, ResponseCacheEntry] = {} + self._result_cache: dict[KV, ResponseCacheEntry] = {} self.clock = clock self.timeout_sec = timeout_ms / 1000.0 diff --git a/synapse/util/caches/stream_change_cache.py b/synapse/util/caches/stream_change_cache.py index 2cffd352d82..552570fbb9b 100644 --- a/synapse/util/caches/stream_change_cache.py +++ b/synapse/util/caches/stream_change_cache.py @@ -21,7 +21,7 @@ import logging import math -from typing import Collection, Dict, FrozenSet, List, Mapping, Optional, Set, Union +from typing import Collection, Mapping, Optional, Union import attr from sortedcontainers import SortedDict @@ -45,14 +45,14 @@ class AllEntitiesChangedResult: that callers do the correct checks. """ - _entities: Optional[List[EntityType]] + _entities: Optional[list[EntityType]] @property def hit(self) -> bool: return self._entities is not None @property - def entities(self) -> List[EntityType]: + def entities(self) -> list[EntityType]: assert self._entities is not None return self._entities @@ -94,11 +94,11 @@ def __init__( self._max_size = math.floor(max_size) # map from stream id to the set of entities which changed at that stream id. - self._cache: SortedDict[int, Set[EntityType]] = SortedDict() + self._cache: SortedDict[int, set[EntityType]] = SortedDict() # map from entity to the stream ID of the latest change for that entity. # # Must be kept in sync with _cache. - self._entity_to_key: Dict[EntityType, int] = {} + self._entity_to_key: dict[EntityType, int] = {} # the earliest stream_pos for which we can reliably answer # get_all_entities_changed. In other words, one less than the earliest @@ -182,7 +182,7 @@ def has_entity_changed(self, entity: EntityType, stream_pos: int) -> bool: def get_entities_changed( self, entities: Collection[EntityType], stream_pos: int, _perf_factor: int = 1 - ) -> Union[Set[EntityType], FrozenSet[EntityType]]: + ) -> Union[set[EntityType], frozenset[EntityType]]: """ Returns the subset of the given entities that have had changes after the given position. @@ -291,7 +291,7 @@ def get_all_entities_changed(self, stream_pos: int) -> AllEntitiesChangedResult: if stream_pos < self._earliest_known_stream_pos: return AllEntitiesChangedResult(None) - changed_entities: List[EntityType] = [] + changed_entities: list[EntityType] = [] for k in self._cache.islice(start=self._cache.bisect_right(stream_pos)): changed_entities.extend(self._cache[k]) diff --git a/synapse/util/caches/ttlcache.py b/synapse/util/caches/ttlcache.py index 18c3a1e51c5..2be9463d6a3 100644 --- a/synapse/util/caches/ttlcache.py +++ b/synapse/util/caches/ttlcache.py @@ -21,7 +21,7 @@ import logging import time -from typing import Any, Callable, Dict, Generic, Tuple, TypeVar, Union +from typing import Any, Callable, Generic, TypeVar, Union import attr from sortedcontainers import SortedList @@ -56,7 +56,7 @@ def __init__( """ # map from key to _CacheEntry - self._data: Dict[KT, _CacheEntry[KT, VT]] = {} + self._data: dict[KT, _CacheEntry[KT, VT]] = {} # the _CacheEntries, sorted by expiry time self._expiry_list: SortedList[_CacheEntry[KT, VT]] = SortedList() @@ -113,7 +113,7 @@ def get(self, key: KT, default: T = SENTINEL) -> Union[VT, T]: self._metrics.inc_hits() return e.value - def get_with_expiry(self, key: KT) -> Tuple[VT, float, float]: + def get_with_expiry(self, key: KT) -> tuple[VT, float, float]: """Get a value, and its expiry time, from the cache Args: diff --git a/synapse/util/clock.py b/synapse/util/clock.py index 5e65cf32a4b..6557582629e 100644 --- a/synapse/util/clock.py +++ b/synapse/util/clock.py @@ -17,8 +17,6 @@ from typing import ( Any, Callable, - Dict, - List, ) from typing_extensions import ParamSpec @@ -62,10 +60,10 @@ def __init__(self, reactor: ISynapseThreadlessReactor, server_name: str) -> None self._delayed_call_id: int = 0 """Unique ID used to track delayed calls""" - self._looping_calls: List[LoopingCall] = [] + self._looping_calls: list[LoopingCall] = [] """List of active looping calls""" - self._call_id_to_delayed_call: Dict[int, IDelayedCall] = {} + self._call_id_to_delayed_call: dict[int, IDelayedCall] = {} """Mapping from unique call ID to delayed call""" self._is_shutdown = False diff --git a/synapse/util/daemonize.py b/synapse/util/daemonize.py index dba815040dd..411b47f939a 100644 --- a/synapse/util/daemonize.py +++ b/synapse/util/daemonize.py @@ -27,7 +27,7 @@ import signal import sys from types import FrameType, TracebackType -from typing import NoReturn, Optional, Type +from typing import NoReturn, Optional from synapse.logging.context import ( LoggingContext, @@ -119,7 +119,7 @@ def daemonize_process(pid_file: str, logger: logging.Logger, chdir: str = "/") - # also catch any other uncaught exceptions before we get that far.) def excepthook( - type_: Type[BaseException], + type_: type[BaseException], value: BaseException, traceback: Optional[TracebackType], ) -> None: diff --git a/synapse/util/distributor.py b/synapse/util/distributor.py index dec6536e4e1..e8df5399cd5 100644 --- a/synapse/util/distributor.py +++ b/synapse/util/distributor.py @@ -24,9 +24,7 @@ Any, Awaitable, Callable, - Dict, Generic, - List, Optional, TypeVar, Union, @@ -69,8 +67,8 @@ def __init__(self, hs: "HomeServer") -> None: (this should be `hs.hostname`). """ self.hs = hs - self.signals: Dict[str, Signal] = {} - self.pre_registration: Dict[str, List[Callable]] = {} + self.signals: dict[str, Signal] = {} + self.pre_registration: dict[str, list[Callable]] = {} def declare(self, name: str) -> None: if name in self.signals: @@ -122,7 +120,7 @@ class Signal(Generic[P]): def __init__(self, name: str): self.name: str = name - self.observers: List[Callable[P, Any]] = [] + self.observers: list[Callable[P, Any]] = [] def observe(self, observer: Callable[P, Any]) -> None: """Adds a new callable to the observer list which will be invoked by @@ -131,7 +129,7 @@ def observe(self, observer: Callable[P, Any]) -> None: Each observer callable may return a Deferred.""" self.observers.append(observer) - def fire(self, *args: P.args, **kwargs: P.kwargs) -> "defer.Deferred[List[Any]]": + def fire(self, *args: P.args, **kwargs: P.kwargs) -> "defer.Deferred[list[Any]]": """Invokes every callable in the observer list, passing in the args and kwargs. Exceptions thrown by observers are logged but ignored. It is not an error to fire a signal with no observers. diff --git a/synapse/util/events.py b/synapse/util/events.py index 4808268702a..e41799b1f77 100644 --- a/synapse/util/events.py +++ b/synapse/util/events.py @@ -13,7 +13,7 @@ # # -from typing import Any, List, Optional +from typing import Any, Optional from synapse._pydantic_compat import Field, StrictStr, ValidationError, validator from synapse.types import JsonDict @@ -52,7 +52,7 @@ class MTopic(ParseModel): See `TopicContentBlock` in the Matrix specification. """ - m_text: Optional[List[MTextRepresentation]] = Field(alias="m.text") + m_text: Optional[list[MTextRepresentation]] = Field(alias="m.text") """ An ordered array of textual representations in different mimetypes. """ @@ -63,7 +63,7 @@ class MTopic(ParseModel): @validator("m_text", pre=True) def ignore_invalid_representations( cls, m_text: Any - ) -> Optional[List[MTextRepresentation]]: + ) -> Optional[list[MTextRepresentation]]: if not isinstance(m_text, list): raise ValueError("m.text must be a list") representations = [] diff --git a/synapse/util/gai_resolver.py b/synapse/util/gai_resolver.py index 3c7a966e879..e07003f1af5 100644 --- a/synapse/util/gai_resolver.py +++ b/synapse/util/gai_resolver.py @@ -17,12 +17,9 @@ from typing import ( TYPE_CHECKING, Callable, - List, NoReturn, Optional, Sequence, - Tuple, - Type, Union, ) @@ -91,13 +88,13 @@ def cancel(self) -> NoReturn: } -_GETADDRINFO_RESULT = List[ - Tuple[ +_GETADDRINFO_RESULT = list[ + tuple[ AddressFamily, SocketKind, int, str, - Union[Tuple[str, int], Tuple[str, int, int, int], Tuple[int, bytes]], + Union[tuple[str, int], tuple[str, int, int, int], tuple[int, bytes]], ] ] @@ -141,7 +138,7 @@ def resolveHostName( resolutionReceiver: IResolutionReceiver, hostName: str, portNumber: int = 0, - addressTypes: Optional[Sequence[Type[IAddress]]] = None, + addressTypes: Optional[Sequence[type[IAddress]]] = None, transportSemantics: str = "TCP", ) -> IHostResolution: """ diff --git a/synapse/util/httpresourcetree.py b/synapse/util/httpresourcetree.py index 6471b31c941..46fa92a4c5c 100644 --- a/synapse/util/httpresourcetree.py +++ b/synapse/util/httpresourcetree.py @@ -20,7 +20,6 @@ # import logging -from typing import Dict from twisted.web.resource import Resource @@ -30,7 +29,7 @@ def create_resource_tree( - desired_tree: Dict[str, Resource], root_resource: Resource + desired_tree: dict[str, Resource], root_resource: Resource ) -> Resource: """Create the resource tree for this homeserver. @@ -48,7 +47,7 @@ def create_resource_tree( # unless you give it a Request object IN ADDITION to the name :/ So # instead, we'll store a copy of this mapping so we can actually add # extra resources to existing nodes. See self._resource_id for the key. - resource_mappings: Dict[str, Resource] = {} + resource_mappings: dict[str, Resource] = {} for full_path_str, res in desired_tree.items(): # twisted requires all resources to be bytes full_path = full_path_str.encode("utf-8") diff --git a/synapse/util/iterutils.py b/synapse/util/iterutils.py index 0a6a30aab2d..19789a4666d 100644 --- a/synapse/util/iterutils.py +++ b/synapse/util/iterutils.py @@ -24,16 +24,12 @@ from typing import ( Callable, Collection, - Dict, Generator, Iterable, Iterator, - List, Mapping, Protocol, - Set, Sized, - Tuple, TypeVar, ) @@ -52,7 +48,7 @@ class _SelfSlice(Sized, Protocol): def __getitem__(self: S, i: slice) -> S: ... -def batch_iter(iterable: Iterable[T], size: int) -> Iterator[Tuple[T, ...]]: +def batch_iter(iterable: Iterable[T], size: int) -> Iterator[tuple[T, ...]]: """batch an iterable up into tuples with a maximum size Args: @@ -80,7 +76,7 @@ def chunk_seq(iseq: S, maxlen: int) -> Iterator[S]: def partition( iterable: Iterable[T], predicate: Callable[[T], bool] -) -> Tuple[List[T], List[T]]: +) -> tuple[list[T], list[T]]: """ Separate a given iterable into two lists based on the result of a predicate function. @@ -115,7 +111,7 @@ def sorted_topologically( # This is implemented by Kahn's algorithm. degree_map = dict.fromkeys(nodes, 0) - reverse_graph: Dict[T, Set[T]] = {} + reverse_graph: dict[T, set[T]] = {} for node, edges in graph.items(): if node not in degree_map: @@ -165,7 +161,7 @@ def sorted_topologically_batched( """ degree_map = dict.fromkeys(nodes, 0) - reverse_graph: Dict[T, Set[T]] = {} + reverse_graph: dict[T, set[T]] = {} for node, edges in graph.items(): if node not in degree_map: diff --git a/synapse/util/json.py b/synapse/util/json.py index e6db55f8e4a..b1091704a81 100644 --- a/synapse/util/json.py +++ b/synapse/util/json.py @@ -16,7 +16,6 @@ import json from typing import ( Any, - Dict, ) from immutabledict import immutabledict @@ -27,7 +26,7 @@ def _reject_invalid_json(val: Any) -> None: raise ValueError("Invalid JSON value: '%s'" % val) -def _handle_immutabledict(obj: Any) -> Dict[Any, Any]: +def _handle_immutabledict(obj: Any) -> dict[Any, Any]: """Helper for json_encoder. Makes immutabledicts serializable by returning the underlying dict """ diff --git a/synapse/util/linked_list.py b/synapse/util/linked_list.py index 87f801c0cf1..052863fdd6f 100644 --- a/synapse/util/linked_list.py +++ b/synapse/util/linked_list.py @@ -22,7 +22,7 @@ """A circular doubly linked list implementation.""" import threading -from typing import Generic, Optional, Type, TypeVar +from typing import Generic, Optional, TypeVar P = TypeVar("P") LN = TypeVar("LN", bound="ListNode") @@ -53,7 +53,7 @@ def __init__(self, cache_entry: Optional[P] = None) -> None: self.next_node: Optional[ListNode[P]] = None @classmethod - def create_root_node(cls: Type["ListNode[P]"]) -> "ListNode[P]": + def create_root_node(cls: type["ListNode[P]"]) -> "ListNode[P]": """Create a new linked list by creating a "root" node, which is a node that has prev_node/next_node pointing to itself and no associated cache entry. @@ -65,7 +65,7 @@ def create_root_node(cls: Type["ListNode[P]"]) -> "ListNode[P]": @classmethod def insert_after( - cls: Type[LN], + cls: type[LN], cache_entry: P, node: "ListNode[P]", ) -> LN: diff --git a/synapse/util/manhole.py b/synapse/util/manhole.py index 63ec3e7e1b3..dbf444e0159 100644 --- a/synapse/util/manhole.py +++ b/synapse/util/manhole.py @@ -21,7 +21,7 @@ import inspect import sys import traceback -from typing import Any, Dict, Optional +from typing import Any, Optional from twisted.conch import manhole_ssh from twisted.conch.insults import insults @@ -71,7 +71,7 @@ -----END RSA PRIVATE KEY-----""" -def manhole(settings: ManholeConfig, globals: Dict[str, Any]) -> ServerFactory: +def manhole(settings: ManholeConfig, globals: dict[str, Any]) -> ServerFactory: """Starts a ssh listener with password authentication using the given username and password. Clients connecting to the ssh listener will find themselves in a colored python shell with diff --git a/synapse/util/metrics.py b/synapse/util/metrics.py index 7b6ad0e459c..9b6c9200185 100644 --- a/synapse/util/metrics.py +++ b/synapse/util/metrics.py @@ -25,11 +25,9 @@ from typing import ( Awaitable, Callable, - Dict, Generator, Optional, Protocol, - Type, TypeVar, ) @@ -238,7 +236,7 @@ def __enter__(self) -> "Measure": def __exit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: @@ -293,8 +291,8 @@ class DynamicCollectorRegistry(CollectorRegistry): def __init__(self) -> None: super().__init__() - self._server_name_to_pre_update_hooks: Dict[ - str, Dict[str, Callable[[], None]] + self._server_name_to_pre_update_hooks: dict[ + str, dict[str, Callable[[], None]] ] = {} """ Mapping of server name to a mapping of metric name to metric pre-update diff --git a/synapse/util/module_loader.py b/synapse/util/module_loader.py index a33c75d54e3..8c0af8a323f 100644 --- a/synapse/util/module_loader.py +++ b/synapse/util/module_loader.py @@ -21,7 +21,7 @@ import importlib import importlib.util from types import ModuleType -from typing import Any, Tuple, Type +from typing import Any, Type import jsonschema @@ -30,7 +30,7 @@ from synapse.types import StrSequence -def load_module(provider: dict, config_path: StrSequence) -> Tuple[Type, Any]: +def load_module(provider: dict, config_path: StrSequence) -> tuple[Type, Any]: """Loads a synapse module with its config Args: diff --git a/synapse/util/patch_inline_callbacks.py b/synapse/util/patch_inline_callbacks.py index c776ad65b33..fca166a5b8d 100644 --- a/synapse/util/patch_inline_callbacks.py +++ b/synapse/util/patch_inline_callbacks.py @@ -21,7 +21,7 @@ import functools import sys from types import GeneratorType -from typing import Any, Callable, Generator, List, TypeVar, cast +from typing import Any, Callable, Generator, TypeVar, cast from typing_extensions import ParamSpec @@ -56,7 +56,7 @@ def new_inline_callbacks( @functools.wraps(f) def wrapped(*args: P.args, **kwargs: P.kwargs) -> "Deferred[T]": start_context = current_context() - changes: List[str] = [] + changes: list[str] = [] orig: Callable[P, "Deferred[T]"] = orig_inline_callbacks( _check_yield_points(f, changes) ) @@ -126,7 +126,7 @@ def check_ctx(r: T) -> T: def _check_yield_points( f: Callable[P, Generator["Deferred[object]", object, T]], - changes: List[str], + changes: list[str], ) -> Callable: """Wraps a generator that is about to be passed to defer.inlineCallbacks checking that after every yield the log contexts are correct. diff --git a/synapse/util/ratelimitutils.py b/synapse/util/ratelimitutils.py index 756677fe6c4..c571a50b3ec 100644 --- a/synapse/util/ratelimitutils.py +++ b/synapse/util/ratelimitutils.py @@ -29,14 +29,10 @@ Callable, ContextManager, DefaultDict, - Dict, Iterator, - List, Mapping, MutableSet, Optional, - Set, - Tuple, ) from weakref import WeakSet @@ -104,7 +100,7 @@ def _get_counts_from_rate_limiter_instance( count_func: Callable[["FederationRateLimiter"], int], -) -> Mapping[Tuple[str, ...], int]: +) -> Mapping[tuple[str, ...], int]: """Returns a count of something (slept/rejected hosts) by (metrics_name)""" # Cast to a list to prevent it changing while the Prometheus # thread is collecting metrics @@ -114,7 +110,7 @@ def _get_counts_from_rate_limiter_instance( # Map from (metrics_name,) -> int, the number of something like slept hosts # or rejected hosts. The key type is Tuple[str], but we leave the length # unspecified for compatability with LaterGauge's annotations. - counts: Dict[Tuple[str, ...], int] = {} + counts: dict[tuple[str, ...], int] = {} for rate_limiter_instance in rate_limiter_instances: # Only track metrics if they provided a `metrics_name` to # differentiate this instance of the rate limiter. @@ -244,7 +240,7 @@ def __init__( self.concurrent_requests = config.concurrent # request_id objects for requests which have been slept - self.sleeping_requests: Set[object] = set() + self.sleeping_requests: set[object] = set() # map from request_id object to Deferred for requests which are ready # for processing but have been queued @@ -253,11 +249,11 @@ def __init__( ] = collections.OrderedDict() # request id objects for requests which are in progress - self.current_processing: Set[object] = set() + self.current_processing: set[object] = set() # times at which we have recently (within the last window_size ms) # received requests. - self.request_times: List[int] = [] + self.request_times: list[int] = [] @contextlib.contextmanager def ratelimit(self, host: str) -> "Iterator[defer.Deferred[None]]": diff --git a/synapse/util/retryutils.py b/synapse/util/retryutils.py index 96fe2bd5664..ce747c3f198 100644 --- a/synapse/util/retryutils.py +++ b/synapse/util/retryutils.py @@ -21,7 +21,7 @@ import logging import random from types import TracebackType -from typing import TYPE_CHECKING, Any, Optional, Type +from typing import TYPE_CHECKING, Any, Optional from synapse.api.errors import CodeMessageException from synapse.storage import DataStore @@ -230,7 +230,7 @@ def __enter__(self) -> None: def __exit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: diff --git a/synapse/util/stringutils.py b/synapse/util/stringutils.py index 32b5bc00c9d..6b0d3677dae 100644 --- a/synapse/util/stringutils.py +++ b/synapse/util/stringutils.py @@ -23,7 +23,7 @@ import re import secrets import string -from typing import Any, Iterable, Optional, Tuple +from typing import Any, Iterable, Optional from netaddr import valid_ipv6 @@ -92,7 +92,7 @@ def assert_valid_client_secret(client_secret: str) -> None: ) -def parse_server_name(server_name: str) -> Tuple[str, Optional[int]]: +def parse_server_name(server_name: str) -> tuple[str, Optional[int]]: """Split a server name into host/port parts. Args: @@ -123,7 +123,7 @@ def parse_server_name(server_name: str) -> Tuple[str, Optional[int]]: VALID_HOST_REGEX = re.compile("\\A[0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*\\Z") -def parse_and_validate_server_name(server_name: str) -> Tuple[str, Optional[int]]: +def parse_and_validate_server_name(server_name: str) -> tuple[str, Optional[int]]: """Split a server name into host/port parts and do some basic validation. Args: @@ -190,7 +190,7 @@ def valid_id_server_location(id_server: str) -> bool: return "#" not in path and "?" not in path -def parse_and_validate_mxc_uri(mxc: str) -> Tuple[str, Optional[int], str]: +def parse_and_validate_mxc_uri(mxc: str) -> tuple[str, Optional[int], str]: """Parse the given string as an MXC URI Checks that the "server name" part is a valid server name diff --git a/synapse/util/task_scheduler.py b/synapse/util/task_scheduler.py index 8dd6f12feb3..f033d37579c 100644 --- a/synapse/util/task_scheduler.py +++ b/synapse/util/task_scheduler.py @@ -20,7 +20,7 @@ # import logging -from typing import TYPE_CHECKING, Awaitable, Callable, Dict, List, Optional, Set, Tuple +from typing import TYPE_CHECKING, Awaitable, Callable, Optional from twisted.python.failure import Failure @@ -110,13 +110,13 @@ def __init__(self, hs: "HomeServer"): self.server_name = hs.hostname self._store = hs.get_datastores().main self._clock = hs.get_clock() - self._running_tasks: Set[str] = set() + self._running_tasks: set[str] = set() # A map between action names and their registered function - self._actions: Dict[ + self._actions: dict[ str, Callable[ [ScheduledTask], - Awaitable[Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]], + Awaitable[tuple[TaskStatus, Optional[JsonMapping], Optional[str]]], ], ] = {} self._run_background_tasks = hs.config.worker.run_background_tasks @@ -143,7 +143,7 @@ def register_action( self, function: Callable[ [ScheduledTask], - Awaitable[Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]], + Awaitable[tuple[TaskStatus, Optional[JsonMapping], Optional[str]]], ], action_name: str, ) -> None: @@ -278,12 +278,12 @@ async def get_task(self, id: str) -> Optional[ScheduledTask]: async def get_tasks( self, *, - actions: Optional[List[str]] = None, + actions: Optional[list[str]] = None, resource_id: Optional[str] = None, - statuses: Optional[List[TaskStatus]] = None, + statuses: Optional[list[TaskStatus]] = None, max_timestamp: Optional[int] = None, limit: Optional[int] = None, - ) -> List[ScheduledTask]: + ) -> list[ScheduledTask]: """Get a list of tasks. Returns all the tasks if no args are provided. If an arg is `None`, all tasks matching the other args will be selected. diff --git a/synapse/util/wheel_timer.py b/synapse/util/wheel_timer.py index 95eb1d71859..c63faa96dfb 100644 --- a/synapse/util/wheel_timer.py +++ b/synapse/util/wheel_timer.py @@ -19,7 +19,7 @@ # # import logging -from typing import Generic, Hashable, List, Set, TypeVar +from typing import Generic, Hashable, TypeVar import attr @@ -31,7 +31,7 @@ @attr.s(slots=True, frozen=True, auto_attribs=True) class _Entry(Generic[T]): end_key: int - elements: Set[T] = attr.Factory(set) + elements: set[T] = attr.Factory(set) class WheelTimer(Generic[T]): @@ -46,7 +46,7 @@ def __init__(self, bucket_size: int = 5000) -> None: accuracy of the timer. """ self.bucket_size: int = bucket_size - self.entries: List[_Entry[T]] = [] + self.entries: list[_Entry[T]] = [] def insert(self, now: int, obj: T, then: int) -> None: """Inserts object into timer. @@ -91,7 +91,7 @@ def insert(self, now: int, obj: T, then: int) -> None: self.entries[-1].elements.add(obj) - def fetch(self, now: int) -> List[T]: + def fetch(self, now: int) -> list[T]: """Fetch any objects that have timed out Args: @@ -102,7 +102,7 @@ def fetch(self, now: int) -> List[T]: """ now_key = int(now / self.bucket_size) - ret: List[T] = [] + ret: list[T] = [] while self.entries and self.entries[0].end_key <= now_key: ret.extend(self.entries.pop(0).elements) diff --git a/synapse/visibility.py b/synapse/visibility.py index 662f2636d01..41b6198af03 100644 --- a/synapse/visibility.py +++ b/synapse/visibility.py @@ -23,14 +23,9 @@ from enum import Enum, auto from typing import ( Collection, - Dict, Final, - FrozenSet, - List, Optional, Sequence, - Set, - Tuple, ) import attr @@ -76,18 +71,18 @@ Membership.BAN, ) -_HISTORY_VIS_KEY: Final[Tuple[str, str]] = (EventTypes.RoomHistoryVisibility, "") +_HISTORY_VIS_KEY: Final[tuple[str, str]] = (EventTypes.RoomHistoryVisibility, "") @trace async def filter_events_for_client( storage: StorageControllers, user_id: str, - events: List[EventBase], + events: list[EventBase], is_peeking: bool = False, - always_include_ids: FrozenSet[str] = frozenset(), + always_include_ids: frozenset[str] = frozenset(), filter_send_to_client: bool = True, -) -> List[EventBase]: +) -> list[EventBase]: """ Check which events a user is allowed to see. If the user can see the event but its sender asked for their data to be erased, prune the content of the event. @@ -160,7 +155,7 @@ async def filter_events_for_client( if filter_send_to_client: room_ids = {e.room_id for e in events} - retention_policies: Dict[str, RetentionPolicy] = {} + retention_policies: dict[str, RetentionPolicy] = {} for room_id in room_ids: retention_policies[ @@ -351,7 +346,7 @@ def _check_client_allowed_to_see_event( clock: Clock, filter_send_to_client: bool, is_peeking: bool, - always_include_ids: FrozenSet[str], + always_include_ids: frozenset[str], sender_ignored: bool, retention_policy: RetentionPolicy, state: Optional[StateMap[EventBase]], @@ -652,7 +647,7 @@ async def filter_events_for_server( redact: bool, filter_out_erased_senders: bool, filter_out_remote_partial_state_events: bool, -) -> List[EventBase]: +) -> list[EventBase]: """Filter a list of events based on whether the target server is allowed to see them. @@ -687,7 +682,7 @@ async def filter_events_for_server( # otherwise a room could be fully joined after we retrieve those, which would then bypass # this check but would base the filtering on an outdated view of the membership events. - partial_state_invisible_event_ids: Set[str] = set() + partial_state_invisible_event_ids: set[str] = set() if filter_out_remote_partial_state_events: for e in events: sender_domain = get_domain_from_id(e.sender) @@ -733,7 +728,7 @@ async def filter_events_for_server( async def _event_to_history_vis( storage: StorageControllers, events: Collection[EventBase] -) -> Dict[str, str]: +) -> dict[str, str]: """Get the history visibility at each of the given events Returns a map from event id to history_visibility setting @@ -758,7 +753,7 @@ async def _event_to_history_vis( } vis_events = await storage.main.get_events(visibility_ids) - result: Dict[str, str] = {} + result: dict[str, str] = {} for event in events: vis = HistoryVisibility.SHARED state_ids = event_to_state_ids.get(event.event_id) @@ -780,7 +775,7 @@ async def _event_to_history_vis( async def _event_to_memberships( storage: StorageControllers, events: Collection[EventBase], server_name: str -) -> Dict[str, StateMap[Tuple[str, str]]]: +) -> dict[str, StateMap[tuple[str, str]]]: """Get the remote membership list at each of the given events Returns a map from event id to state map, which will contain only membership events diff --git a/synmark/__main__.py b/synmark/__main__.py index 82717c4fc74..5308c960124 100644 --- a/synmark/__main__.py +++ b/synmark/__main__.py @@ -22,7 +22,7 @@ from argparse import REMAINDER, Namespace from contextlib import redirect_stderr from io import StringIO -from typing import Any, Callable, Coroutine, List, TypeVar +from typing import Any, Callable, Coroutine, TypeVar import pyperf @@ -76,7 +76,7 @@ def on_done(res: T) -> T: if __name__ == "__main__": - def add_cmdline_args(cmd: List[str], args: Namespace) -> None: + def add_cmdline_args(cmd: list[str], args: Namespace) -> None: if args.log: cmd.extend(["--log"]) cmd.extend(args.tests) diff --git a/tests/api/test_filtering.py b/tests/api/test_filtering.py index d74878a4e13..7742a06b4c9 100644 --- a/tests/api/test_filtering.py +++ b/tests/api/test_filtering.py @@ -20,7 +20,6 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import List from unittest.mock import patch import jsonschema @@ -50,7 +49,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: def test_errors_on_invalid_filters(self) -> None: # See USER_FILTER_SCHEMA for the filter schema. - invalid_filters: List[JsonDict] = [ + invalid_filters: list[JsonDict] = [ # `account_data` must be a dictionary {"account_data": "Hello World"}, # `event_format` must be "client" or "federation" @@ -67,7 +66,7 @@ def test_errors_on_invalid_filters(self) -> None: def test_ignores_unknown_filter_fields(self) -> None: # For forward compatibility, we must ignore unknown filter fields. # See USER_FILTER_SCHEMA for the filter schema. - filters: List[JsonDict] = [ + filters: list[JsonDict] = [ {"org.matrix.msc9999.future_option": True}, {"presence": {"org.matrix.msc9999.future_option": True}}, {"room": {"org.matrix.msc9999.future_option": True}}, @@ -78,7 +77,7 @@ def test_ignores_unknown_filter_fields(self) -> None: # Must not raise. def test_valid_filters(self) -> None: - valid_filters: List[JsonDict] = [ + valid_filters: list[JsonDict] = [ { "room": { "timeline": {"limit": 20}, @@ -557,7 +556,7 @@ def test_filter_relations(self) -> None: room_id="!foo:bar", ), ] - jsondicts: List[JsonDict] = [{}] + jsondicts: list[JsonDict] = [{}] # For the following tests we patch the datastore method (intead of injecting # events). This is a bit cheeky, but tests the logic of _check_event_relations. @@ -565,7 +564,7 @@ def test_filter_relations(self) -> None: # Filter for a particular sender. definition = {"related_by_senders": ["@foo:bar"]} - async def events_have_relations(*args: object, **kwargs: object) -> List[str]: + async def events_have_relations(*args: object, **kwargs: object) -> list[str]: return ["$with_relation"] with patch.object( diff --git a/tests/app/test_openid_listener.py b/tests/app/test_openid_listener.py index 6ca514d557f..6a1a630fe83 100644 --- a/tests/app/test_openid_listener.py +++ b/tests/app/test_openid_listener.py @@ -17,7 +17,6 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import List from unittest.mock import Mock, patch from parameterized import parameterized @@ -58,7 +57,7 @@ def default_config(self) -> JsonDict: (["openid"], "auth_fail"), ] ) - def test_openid_listener(self, names: List[str], expectation: str) -> None: + def test_openid_listener(self, names: list[str], expectation: str) -> None: """ Test different openid listener configurations. @@ -106,7 +105,7 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: (["openid"], "auth_fail"), ] ) - def test_openid_listener(self, names: List[str], expectation: str) -> None: + def test_openid_listener(self, names: list[str], expectation: str) -> None: """ Test different openid listener configurations. diff --git a/tests/appservice/test_api.py b/tests/appservice/test_api.py index 085dfd2d1df..1943292a8f1 100644 --- a/tests/appservice/test_api.py +++ b/tests/appservice/test_api.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Any, List, Mapping, Optional, Sequence, Union +from typing import Any, Mapping, Optional, Sequence, Union from unittest.mock import Mock from twisted.internet.testing import MemoryReactor @@ -81,7 +81,7 @@ async def get_json( url: str, args: Mapping[Any, Any], headers: Mapping[Union[str, bytes], Sequence[Union[str, bytes]]], - ) -> List[JsonDict]: + ) -> list[JsonDict]: # Ensure the access token is passed as a header. if not headers or not headers.get(b"Authorization"): raise RuntimeError("Access token not provided") @@ -157,7 +157,7 @@ async def get_json( headers: Optional[ Mapping[Union[str, bytes], Sequence[Union[str, bytes]]] ] = None, - ) -> List[JsonDict]: + ) -> list[JsonDict]: # Ensure the access token is passed as a both a query param and in the headers. if not args.get(b"access_token"): raise RuntimeError("Access token should be provided in query params.") diff --git a/tests/appservice/test_scheduler.py b/tests/appservice/test_scheduler.py index f4490a1a794..f17957c2065 100644 --- a/tests/appservice/test_scheduler.py +++ b/tests/appservice/test_scheduler.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import List, Optional, Sequence, Tuple +from typing import Optional, Sequence from unittest.mock import AsyncMock, Mock from typing_extensions import TypeAlias @@ -288,11 +288,11 @@ def take_txn( # Corresponds to synapse.appservice.scheduler._TransactionController.send TxnCtrlArgs: TypeAlias = """ defer.Deferred[ - Tuple[ + tuple[ ApplicationService, Sequence[EventBase], - Optional[List[JsonDict]], - Optional[List[JsonDict]], + Optional[list[JsonDict]], + Optional[list[JsonDict]], Optional[TransactionOneTimeKeysCount], Optional[TransactionUnusedFallbackKeys], Optional[DeviceListUpdates], diff --git a/tests/config/utils.py b/tests/config/utils.py index 3cba4ac5889..efc63558db5 100644 --- a/tests/config/utils.py +++ b/tests/config/utils.py @@ -24,7 +24,6 @@ import unittest from contextlib import redirect_stdout from io import StringIO -from typing import List from synapse.config.homeserver import HomeServerConfig @@ -61,7 +60,7 @@ def generate_config_and_remove_lines_containing(self, needles: list[str]) -> Non with open(self.config_file, "w") as f: f.write("".join(contents)) - def add_lines_to_config(self, lines: List[str]) -> None: + def add_lines_to_config(self, lines: list[str]) -> None: with open(self.config_file, "a") as f: for line in lines: f.write(line + "\n") diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py index 93ae24628a8..2eaf77e9dcd 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py @@ -19,7 +19,7 @@ # # import time -from typing import Any, Dict, List, Optional, cast +from typing import Any, Optional, cast from unittest.mock import Mock import attr @@ -60,7 +60,7 @@ def __init__(self) -> None: self.server_name = "mock_server" self.key = signedjson.key.generate_signing_key("0") - def get_verify_keys(self) -> Dict[str, str]: + def get_verify_keys(self) -> dict[str, str]: vk = signedjson.key.get_verify_key(self.key) return {"%s:%s" % (vk.alg, vk.version): encode_verify_key_base64(vk)} @@ -107,8 +107,8 @@ def test_verify_json_objects_for_server_awaits_previous_requests(self) -> None: first_lookup_deferred: "Deferred[None]" = Deferred() async def first_lookup_fetch( - server_name: str, key_ids: List[str], minimum_valid_until_ts: int - ) -> Dict[str, FetchKeyResult]: + server_name: str, key_ids: list[str], minimum_valid_until_ts: int + ) -> dict[str, FetchKeyResult]: # self.assertEqual(current_context().request.id, "context_11") self.assertEqual(server_name, "server10") self.assertEqual(key_ids, [get_key_id(key1)]) @@ -152,8 +152,8 @@ async def first_lookup() -> None: # should block rather than start a second call async def second_lookup_fetch( - server_name: str, key_ids: List[str], minimum_valid_until_ts: int - ) -> Dict[str, FetchKeyResult]: + server_name: str, key_ids: list[str], minimum_valid_until_ts: int + ) -> dict[str, FetchKeyResult]: # self.assertEqual(current_context().request.id, "context_12") return {get_key_id(key1): FetchKeyResult(get_verify_key(key1), 100)} @@ -276,8 +276,8 @@ def test_verify_for_local_server_unknown_key(self) -> None: # set up a mock fetcher which will return the key async def get_keys( - server_name: str, key_ids: List[str], minimum_valid_until_ts: int - ) -> Dict[str, FetchKeyResult]: + server_name: str, key_ids: list[str], minimum_valid_until_ts: int + ) -> dict[str, FetchKeyResult]: self.assertEqual(server_name, self.hs.hostname) self.assertEqual(key_ids, [get_key_id(key2)]) @@ -302,8 +302,8 @@ def test_verify_json_dedupes_key_requests(self) -> None: key1 = signedjson.key.generate_signing_key("1") async def get_keys( - server_name: str, key_ids: List[str], minimum_valid_until_ts: int - ) -> Dict[str, FetchKeyResult]: + server_name: str, key_ids: list[str], minimum_valid_until_ts: int + ) -> dict[str, FetchKeyResult]: # there should only be one request object (with the max validity) self.assertEqual(server_name, "server1") self.assertEqual(key_ids, [get_key_id(key1)]) @@ -344,16 +344,16 @@ def test_verify_json_falls_back_to_other_fetchers(self) -> None: key1 = signedjson.key.generate_signing_key("1") async def get_keys1( - server_name: str, key_ids: List[str], minimum_valid_until_ts: int - ) -> Dict[str, FetchKeyResult]: + server_name: str, key_ids: list[str], minimum_valid_until_ts: int + ) -> dict[str, FetchKeyResult]: self.assertEqual(server_name, "server1") self.assertEqual(key_ids, [get_key_id(key1)]) self.assertEqual(minimum_valid_until_ts, 1500) return {get_key_id(key1): FetchKeyResult(get_verify_key(key1), 800)} async def get_keys2( - server_name: str, key_ids: List[str], minimum_valid_until_ts: int - ) -> Dict[str, FetchKeyResult]: + server_name: str, key_ids: list[str], minimum_valid_until_ts: int + ) -> dict[str, FetchKeyResult]: self.assertEqual(server_name, "server1") self.assertEqual(key_ids, [get_key_id(key1)]) self.assertEqual(minimum_valid_until_ts, 1500) @@ -701,7 +701,7 @@ def build_response() -> dict: SERVER_NAME, testkey, VALID_UNTIL_TS ) - def get_key_from_perspectives(response: JsonDict) -> Dict[str, FetchKeyResult]: + def get_key_from_perspectives(response: JsonDict) -> dict[str, FetchKeyResult]: fetcher = PerspectivesKeyFetcher(self.hs) self.expect_outgoing_key_query(SERVER_NAME, "key1", response) return self.get_success(fetcher.get_keys(SERVER_NAME, ["key1"], 0)) diff --git a/tests/events/test_auto_accept_invites.py b/tests/events/test_auto_accept_invites.py index fa7ea641057..d3842e72d7f 100644 --- a/tests/events/test_auto_accept_invites.py +++ b/tests/events/test_auto_accept_invites.py @@ -21,7 +21,7 @@ import asyncio from asyncio import Future from http import HTTPStatus -from typing import Any, Awaitable, Dict, List, Optional, Tuple, TypeVar, cast +from typing import Any, Awaitable, Optional, TypeVar, cast from unittest.mock import Mock import attr @@ -527,7 +527,7 @@ def sync_join( testcase: HomeserverTestCase, user_id: str, since_token: Optional[StreamToken] = None, -) -> Tuple[List[JoinedSyncResult], StreamToken]: +) -> tuple[list[JoinedSyncResult], StreamToken]: """Perform a sync request for the given user and return the user join updates they've received, as well as the next_batch token. @@ -765,7 +765,7 @@ class MockEvent: sender: str type: str - content: Dict[str, Any] + content: dict[str, Any] room_id: str = "!someroom" state_key: Optional[str] = None @@ -802,7 +802,7 @@ def make_multiple_awaitable(result: TV) -> Awaitable[TV]: def create_module( - config_override: Optional[Dict[str, Any]] = None, worker_name: Optional[str] = None + config_override: Optional[dict[str, Any]] = None, worker_name: Optional[str] = None ) -> InviteAutoAccepter: # Create a mock based on the ModuleApi spec, but override some mocked functions # because some capabilities are needed for running the tests. diff --git a/tests/events/test_presence_router.py b/tests/events/test_presence_router.py index 696d9dd6e27..aa8d7454c05 100644 --- a/tests/events/test_presence_router.py +++ b/tests/events/test_presence_router.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Dict, Iterable, List, Optional, Set, Tuple, Union +from typing import Iterable, Optional, Union from unittest.mock import AsyncMock, Mock import attr @@ -46,7 +46,7 @@ @attr.s class PresenceRouterTestConfig: - users_who_should_receive_all_presence = attr.ib(type=List[str], default=[]) + users_who_should_receive_all_presence = attr.ib(type=list[str], default=[]) class LegacyPresenceRouterTestModule: @@ -56,14 +56,14 @@ def __init__(self, config: PresenceRouterTestConfig, module_api: ModuleApi): async def get_users_for_states( self, state_updates: Iterable[UserPresenceState] - ) -> Dict[str, Set[UserPresenceState]]: + ) -> dict[str, set[UserPresenceState]]: users_to_state = { user_id: set(state_updates) for user_id in self._config.users_who_should_receive_all_presence } return users_to_state - async def get_interested_users(self, user_id: str) -> Union[Set[str], str]: + async def get_interested_users(self, user_id: str) -> Union[set[str], str]: if user_id in self._config.users_who_should_receive_all_presence: return PresenceRouter.ALL_USERS @@ -106,14 +106,14 @@ def __init__(self, config: PresenceRouterTestConfig, api: ModuleApi): async def get_users_for_states( self, state_updates: Iterable[UserPresenceState] - ) -> Dict[str, Set[UserPresenceState]]: + ) -> dict[str, set[UserPresenceState]]: users_to_state = { user_id: set(state_updates) for user_id in self._config.users_who_should_receive_all_presence } return users_to_state - async def get_interested_users(self, user_id: str) -> Union[Set[str], str]: + async def get_interested_users(self, user_id: str) -> Union[set[str], str]: if user_id in self._config.users_who_should_receive_all_presence: return PresenceRouter.ALL_USERS @@ -511,7 +511,7 @@ def sync_presence( testcase: HomeserverTestCase, user_id: str, since_token: Optional[StreamToken] = None, -) -> Tuple[List[UserPresenceState], StreamToken]: +) -> tuple[list[UserPresenceState], StreamToken]: """Perform a sync request for the given user and return the user presence updates they've received, as well as the next_batch token. diff --git a/tests/events/test_utils.py b/tests/events/test_utils.py index c6ebefbf384..9d41067844e 100644 --- a/tests/events/test_utils.py +++ b/tests/events/test_utils.py @@ -20,7 +20,7 @@ # import unittest as stdlib_unittest -from typing import Any, List, Mapping, Optional +from typing import Any, Mapping, Optional import attr from parameterized import parameterized @@ -648,7 +648,7 @@ class SerializeEventTestCase(stdlib_unittest.TestCase): def serialize( self, ev: EventBase, - fields: Optional[List[str]], + fields: Optional[list[str]], include_admin_metadata: bool = False, ) -> JsonDict: return serialize_event( diff --git a/tests/federation/test_federation_catch_up.py b/tests/federation/test_federation_catch_up.py index 5edb6517671..34b552b9ed6 100644 --- a/tests/federation/test_federation_catch_up.py +++ b/tests/federation/test_federation_catch_up.py @@ -1,4 +1,4 @@ -from typing import Callable, Collection, List, Optional, Tuple +from typing import Callable, Collection, Optional from unittest import mock from unittest.mock import AsyncMock, Mock @@ -55,8 +55,8 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: ) # whenever send_transaction is called, record the pdu data - self.pdus: List[JsonDict] = [] - self.failed_pdus: List[JsonDict] = [] + self.pdus: list[JsonDict] = [] + self.failed_pdus: list[JsonDict] = [] self.is_online = True self.federation_transport_client.send_transaction.side_effect = ( self.record_transaction @@ -269,7 +269,7 @@ def test_catch_up_from_blank_state(self) -> None: def make_fake_destination_queue( self, destination: str = "host2" - ) -> Tuple[PerDestinationQueue, List[EventBase]]: + ) -> tuple[PerDestinationQueue, list[EventBase]]: """ Makes a fake per-destination queue. """ @@ -279,8 +279,8 @@ def make_fake_destination_queue( async def fake_send( destination_tm: str, - pending_pdus: List[EventBase], - _pending_edus: List[Edu], + pending_pdus: list[EventBase], + _pending_edus: list[Edu], ) -> None: assert destination == destination_tm results_list.extend(pending_pdus) diff --git a/tests/federation/test_federation_out_of_band_membership.py b/tests/federation/test_federation_out_of_band_membership.py index fa4e7c63ba6..905f9e6580f 100644 --- a/tests/federation/test_federation_out_of_band_membership.py +++ b/tests/federation/test_federation_out_of_band_membership.py @@ -23,7 +23,7 @@ import time import urllib.parse from http import HTTPStatus -from typing import Any, Callable, Optional, Set, Tuple, TypeVar, Union +from typing import Any, Callable, Optional, TypeVar, Union from unittest.mock import Mock import attr @@ -147,7 +147,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: def do_sync( self, sync_body: JsonDict, *, since: Optional[str] = None, tok: str - ) -> Tuple[JsonDict, str]: + ) -> tuple[JsonDict, str]: """Do a sliding sync request with given body. Asserts the request was successful. @@ -350,7 +350,7 @@ async def get_json( self.federation_http_client.get_json.side_effect = get_json # PDU's that hs1 sent to hs2 - collected_pdus_from_hs1_federation_send: Set[str] = set() + collected_pdus_from_hs1_federation_send: set[str] = set() async def put_json( destination: str, @@ -503,7 +503,7 @@ def test_can_x_from_out_of_band_invite_after_we_are_already_participating_in_the T = TypeVar("T") # PDU's that hs1 sent to hs2 - collected_pdus_from_hs1_federation_send: Set[str] = set() + collected_pdus_from_hs1_federation_send: set[str] = set() async def put_json( destination: str, diff --git a/tests/federation/test_federation_sender.py b/tests/federation/test_federation_sender.py index 27b69a91805..20b67e3a73f 100644 --- a/tests/federation/test_federation_sender.py +++ b/tests/federation/test_federation_sender.py @@ -17,7 +17,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Callable, FrozenSet, List, Optional, Set +from typing import Callable, Optional from unittest.mock import AsyncMock, Mock from signedjson import key, sign @@ -435,7 +435,7 @@ def test_presence_batched(self) -> None: # A set of all user presence we see, this should end up matching the # number we sent out above. - seen_users: Set[str] = set() + seen_users: set[str] = set() for edu in presence_edus: presence_states = edu["content"]["push"] @@ -483,12 +483,12 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: # stub out `get_rooms_for_user` and `get_current_hosts_in_room` so that the # server thinks the user shares a room with `@user2:host2` - def get_rooms_for_user(user_id: str) -> "defer.Deferred[FrozenSet[str]]": + def get_rooms_for_user(user_id: str) -> "defer.Deferred[frozenset[str]]": return defer.succeed(frozenset({test_room_id})) hs.get_datastores().main.get_rooms_for_user = get_rooms_for_user # type: ignore[assignment] - async def get_current_hosts_in_room(room_id: str) -> Set[str]: + async def get_current_hosts_in_room(room_id: str) -> set[str]: if room_id == test_room_id: return {"host2"} else: @@ -504,7 +504,7 @@ async def get_current_hosts_in_room(room_id: str) -> Set[str]: self.device_handler = device_handler # whenever send_transaction is called, record the edu data - self.edus: List[JsonDict] = [] + self.edus: list[JsonDict] = [] self.federation_transport_client.send_transaction.side_effect = ( self.record_transaction ) diff --git a/tests/federation/transport/server/test__base.py b/tests/federation/transport/server/test__base.py index 0e3b41ec4d5..3c553e6e402 100644 --- a/tests/federation/transport/server/test__base.py +++ b/tests/federation/transport/server/test__base.py @@ -20,7 +20,6 @@ # from http import HTTPStatus -from typing import Dict, List, Tuple from twisted.web.resource import Resource @@ -52,14 +51,14 @@ def __init__( @cancellable async def on_GET( - self, origin: str, content: None, query: Dict[bytes, List[bytes]] - ) -> Tuple[int, JsonDict]: + self, origin: str, content: None, query: dict[bytes, list[bytes]] + ) -> tuple[int, JsonDict]: await self.clock.sleep(1.0) return HTTPStatus.OK, {"result": True} async def on_POST( - self, origin: str, content: JsonDict, query: Dict[bytes, List[bytes]] - ) -> Tuple[int, JsonDict]: + self, origin: str, content: JsonDict, query: dict[bytes, list[bytes]] + ) -> tuple[int, JsonDict]: await self.clock.sleep(1.0) return HTTPStatus.OK, {"result": True} diff --git a/tests/federation/transport/test_client.py b/tests/federation/transport/test_client.py index 3d882f99f2b..f538b67e41e 100644 --- a/tests/federation/transport/test_client.py +++ b/tests/federation/transport/test_client.py @@ -20,7 +20,7 @@ # import json -from typing import List, Optional +from typing import Optional from unittest.mock import Mock import ijson.common @@ -98,7 +98,7 @@ def parse(response: JsonDict) -> bool: def test_servers_in_room(self) -> None: """Check that the servers_in_room field is correctly parsed""" - def parse(response: JsonDict) -> Optional[List[str]]: + def parse(response: JsonDict) -> Optional[list[str]]: parser = SendJoinParser(RoomVersions.V1, False) serialised_response = json.dumps(response).encode() diff --git a/tests/federation/transport/test_knocking.py b/tests/federation/transport/test_knocking.py index a2439382559..e06dffb6c5e 100644 --- a/tests/federation/transport/test_knocking.py +++ b/tests/federation/transport/test_knocking.py @@ -19,7 +19,7 @@ # # from collections import OrderedDict -from typing import Any, Dict, List, Optional +from typing import Any, Dict, Optional from twisted.internet.testing import MemoryReactor @@ -161,7 +161,7 @@ def send_example_state_events_to_room( def check_knock_room_state_against_room_state( self, - knock_room_state: List[Dict], + knock_room_state: list[Dict], expected_room_state: Dict, ) -> None: """Test a list of stripped room state events received over federation against a diff --git a/tests/handlers/test_appservice.py b/tests/handlers/test_appservice.py index df36185b99e..e360019203a 100644 --- a/tests/handlers/test_appservice.py +++ b/tests/handlers/test_appservice.py @@ -26,7 +26,6 @@ Callable, Dict, Iterable, - List, Optional, TypeVar, ) @@ -450,7 +449,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: hs.get_application_service_handler().scheduler.txn_ctrl.send = self.send_mock # type: ignore[method-assign] # Mock out application services, and allow defining our own in tests - self._services: List[ApplicationService] = [] + self._services: list[ApplicationService] = [] self.hs.get_datastores().main.get_app_services = Mock( # type: ignore[method-assign] return_value=self._services ) @@ -884,7 +883,7 @@ def test_application_services_receive_bursts_of_to_device(self) -> None: # Count the total number of to-device messages that were sent out per-service. # Ensure that we only sent to-device messages to interested services, and that # each interested service received the full count of to-device messages. - service_id_to_message_count: Dict[str, int] = {} + service_id_to_message_count: dict[str, int] = {} for call in self.send_mock.call_args_list: ( @@ -1023,7 +1022,7 @@ def test_application_services_receive_local_to_device_for_many_users(self) -> No def _register_application_service( self, - namespaces: Optional[Dict[str, Iterable[Dict]]] = None, + namespaces: Optional[dict[str, Iterable[Dict]]] = None, ) -> ApplicationService: """ Register a new application service, with the given namespaces of interest. @@ -1073,7 +1072,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: hs.get_application_service_api().put_json = self.put_json # type: ignore[method-assign] # Mock out application services, and allow defining our own in tests - self._services: List[ApplicationService] = [] + self._services: list[ApplicationService] = [] self.hs.get_datastores().main.get_app_services = Mock( # type: ignore[method-assign] return_value=self._services ) diff --git a/tests/handlers/test_cas.py b/tests/handlers/test_cas.py index f677f3be2a6..02671fc2642 100644 --- a/tests/handlers/test_cas.py +++ b/tests/handlers/test_cas.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Any, Dict +from typing import Any from unittest.mock import AsyncMock, Mock from twisted.internet.testing import MemoryReactor @@ -35,7 +35,7 @@ class CasHandlerTestCase(HomeserverTestCase): - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() config["public_baseurl"] = BASE_URL cas_config = { diff --git a/tests/handlers/test_directory.py b/tests/handlers/test_directory.py index 45b8f2353a5..76b145b92b8 100644 --- a/tests/handlers/test_directory.py +++ b/tests/handlers/test_directory.py @@ -19,7 +19,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Any, Awaitable, Callable, Dict +from typing import Any, Awaitable, Callable from unittest.mock import AsyncMock, Mock from twisted.internet.testing import MemoryReactor @@ -43,7 +43,7 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: self.mock_federation = AsyncMock() self.mock_registry = Mock() - self.query_handlers: Dict[str, Callable[[dict], Awaitable[JsonDict]]] = {} + self.query_handlers: dict[str, Callable[[dict], Awaitable[JsonDict]]] = {} def register_query_handler( query_type: str, handler: Callable[[dict], Awaitable[JsonDict]] @@ -410,7 +410,7 @@ class TestCreateAliasACL(unittest.HomeserverTestCase): servlets = [directory.register_servlets, room.register_servlets] - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() # Add custom alias creation rules to the config. @@ -476,7 +476,7 @@ class TestCreatePublishedRoomACL(unittest.HomeserverTestCase): data = {"room_alias_name": "unofficial_test"} allowed_localpart = "allowed" - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() # Add custom room list publication rules to the config. diff --git a/tests/handlers/test_e2e_keys.py b/tests/handlers/test_e2e_keys.py index fca1f2cc44e..a4f9d55a133 100644 --- a/tests/handlers/test_e2e_keys.py +++ b/tests/handlers/test_e2e_keys.py @@ -20,7 +20,7 @@ # # import time -from typing import Dict, Iterable +from typing import Iterable from unittest import mock from parameterized import parameterized @@ -291,7 +291,7 @@ def test_claim_one_time_key_bulk(self) -> None: (chris, "chris_dev_2", "alg2"): 1, } # Convert to the format the handler wants. - query: Dict[str, Dict[str, Dict[str, int]]] = {} + query: dict[str, dict[str, dict[str, int]]] = {} for (user_id, device_id, algorithm), count in claims_to_make.items(): query.setdefault(user_id, {}).setdefault(device_id, {})[algorithm] = count claim_res = self.get_success( @@ -1510,7 +1510,7 @@ def test_query_appservice_with_fallback(self) -> None: ) # Setup a response. - response: Dict[str, Dict[str, Dict[str, JsonDict]]] = { + response: dict[str, dict[str, dict[str, JsonDict]]] = { local_user: {device_id_1: {**as_otk, **as_fallback_key}} } self.appservice_api.claim_client_keys.return_value = (response, []) diff --git a/tests/handlers/test_message.py b/tests/handlers/test_message.py index 4262e805e78..6450a904449 100644 --- a/tests/handlers/test_message.py +++ b/tests/handlers/test_message.py @@ -19,7 +19,6 @@ # # import logging -from typing import Tuple from twisted.internet.testing import MemoryReactor @@ -64,7 +63,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.requester = create_requester(self.user_id, device_id=device_id) - def _create_and_persist_member_event(self) -> Tuple[EventBase, EventContext]: + def _create_and_persist_member_event(self) -> tuple[EventBase, EventContext]: # Create a member event we can use as an auth_event memberEvent, memberEventContext = self.get_success( create_event( @@ -86,7 +85,7 @@ def _create_and_persist_member_event(self) -> Tuple[EventBase, EventContext]: def _create_duplicate_event( self, txn_id: str - ) -> Tuple[EventBase, UnpersistedEventContextBase]: + ) -> tuple[EventBase, UnpersistedEventContextBase]: """Create a new event with the given transaction ID. All events produced by this method will be considered duplicates. """ diff --git a/tests/handlers/test_oauth_delegation.py b/tests/handlers/test_oauth_delegation.py index 4640f35a1ea..43004bfc696 100644 --- a/tests/handlers/test_oauth_delegation.py +++ b/tests/handlers/test_oauth_delegation.py @@ -25,7 +25,7 @@ from http import HTTPStatus from http.server import BaseHTTPRequestHandler, HTTPServer from io import BytesIO -from typing import Any, ClassVar, Coroutine, Dict, Generator, Optional, TypeVar, Union +from typing import Any, ClassVar, Coroutine, Generator, Optional, TypeVar, Union from unittest.mock import ANY, AsyncMock, Mock from urllib.parse import parse_qs @@ -130,7 +130,7 @@ def device_scope(self) -> str: keys.register_servlets, ] - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() config["public_baseurl"] = BASE_URL config["disable_registration"] = True @@ -834,7 +834,7 @@ def till_deferred_has_result( return deferred - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() config["public_baseurl"] = BASE_URL config["disable_registration"] = True @@ -1100,9 +1100,9 @@ class DisabledEndpointsTestCase(HomeserverTestCase): admin.register_servlets, ] - config: Dict[str, Any] + config: dict[str, Any] - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() config["public_baseurl"] = BASE_URL config["disable_registration"] = True diff --git a/tests/handlers/test_oidc.py b/tests/handlers/test_oidc.py index 5207382f00f..3180969e7be 100644 --- a/tests/handlers/test_oidc.py +++ b/tests/handlers/test_oidc.py @@ -19,7 +19,7 @@ # # import os -from typing import Any, Awaitable, ContextManager, Dict, Optional, Tuple +from typing import Any, Awaitable, ContextManager, Optional from unittest.mock import ANY, AsyncMock, Mock, patch from urllib.parse import parse_qs, urlparse @@ -152,7 +152,7 @@ class OidcHandlerTestCase(HomeserverTestCase): if not HAS_OIDC: skip = "requires OIDC" - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() config["public_baseurl"] = BASE_URL return config @@ -204,7 +204,7 @@ def start_authorization( client_redirect_url: str = "http://client/redirect", scope: str = "openid", with_sid: bool = False, - ) -> Tuple[SynapseRequest, FakeAuthorizationGrant]: + ) -> tuple[SynapseRequest, FakeAuthorizationGrant]: """Start an authorization request, and get the callback request back.""" nonce = random_string(10) state = random_string(10) @@ -222,7 +222,7 @@ def start_authorization( def assertRenderedError( self, error: str, error_description: Optional[str] = None - ) -> Tuple[Any, ...]: + ) -> tuple[Any, ...]: self.render_error.assert_called_once() args = self.render_error.call_args[0] self.assertEqual(args[1], error) diff --git a/tests/handlers/test_password_providers.py b/tests/handlers/test_password_providers.py index aa418750638..faa269bd355 100644 --- a/tests/handlers/test_password_providers.py +++ b/tests/handlers/test_password_providers.py @@ -22,7 +22,7 @@ """Tests for the password_auth_provider interface""" from http import HTTPStatus -from typing import Any, Dict, List, Optional, Type, Union +from typing import Any, Optional, Union from unittest.mock import AsyncMock, Mock from twisted.internet.testing import MemoryReactor @@ -75,7 +75,7 @@ def parse_config(config: JsonDict) -> None: def __init__(self, config: None, account_handler: AccountHandler): pass - def get_supported_login_types(self) -> Dict[str, List[str]]: + def get_supported_login_types(self) -> dict[str, list[str]]: return {"test.login_type": ["test_field"]} def check_auth(self, *args: str) -> Mock: @@ -109,7 +109,7 @@ def parse_config(config: JsonDict) -> None: def __init__(self, config: None, account_handler: AccountHandler): pass - def get_supported_login_types(self) -> Dict[str, List[str]]: + def get_supported_login_types(self) -> dict[str, list[str]]: return {"m.login.password": ["password"], "test.login_type": ["test_field"]} def check_auth(self, *args: str) -> Mock: @@ -139,7 +139,7 @@ def check_pass(self, *args: str) -> Mock: return mock_password_provider.check_password(*args) -def legacy_providers_config(*providers: Type[Any]) -> dict: +def legacy_providers_config(*providers: type[Any]) -> dict: """Returns a config dict that will enable the given legacy password auth providers""" return { "password_providers": [ @@ -149,7 +149,7 @@ def legacy_providers_config(*providers: Type[Any]) -> dict: } -def providers_config(*providers: Type[Any]) -> dict: +def providers_config(*providers: type[Any]) -> dict: """Returns a config dict that will enable the given modules""" return { "modules": [ diff --git a/tests/handlers/test_profile.py b/tests/handlers/test_profile.py index 73426c7b04b..7a7f803ebd5 100644 --- a/tests/handlers/test_profile.py +++ b/tests/handlers/test_profile.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Any, Awaitable, Callable, Dict +from typing import Any, Awaitable, Callable from unittest.mock import AsyncMock, Mock from parameterized import parameterized @@ -44,7 +44,7 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: self.mock_federation = AsyncMock() self.mock_registry = Mock() - self.query_handlers: Dict[str, Callable[[dict], Awaitable[JsonDict]]] = {} + self.query_handlers: dict[str, Callable[[dict], Awaitable[JsonDict]]] = {} def register_query_handler( query_type: str, handler: Callable[[dict], Awaitable[JsonDict]] @@ -377,7 +377,7 @@ def test_check_avatar_on_remote_server(self, remote_server_name: str) -> None: self.get_success(self.handler.check_avatar_size_and_mime_type(remote_mxc)) ) - def _setup_local_files(self, names_and_props: Dict[str, Dict[str, Any]]) -> None: + def _setup_local_files(self, names_and_props: dict[str, dict[str, Any]]) -> None: """Stores metadata about files in the database. Args: diff --git a/tests/handlers/test_receipts.py b/tests/handlers/test_receipts.py index 4febccbfcf2..bb9e84d6441 100644 --- a/tests/handlers/test_receipts.py +++ b/tests/handlers/test_receipts.py @@ -20,7 +20,6 @@ # from copy import deepcopy -from typing import List from twisted.internet.testing import MemoryReactor @@ -334,7 +333,7 @@ def test_we_do_not_mutate(self) -> None: self.assertEqual(events, original_events) def _test_filters_private( - self, events: List[JsonDict], expected_output: List[JsonDict] + self, events: list[JsonDict], expected_output: list[JsonDict] ) -> None: """Tests that the _filter_out_private returns the expected output""" filtered_events = self.event_source.filter_out_private_receipts( diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index 5e2eb8dee74..20c2554e256 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -19,7 +19,7 @@ # # -from typing import Any, Collection, List, Optional, Tuple +from typing import Any, Collection, Optional from unittest.mock import AsyncMock, Mock from twisted.internet.testing import MemoryReactor @@ -65,7 +65,7 @@ async def check_registration_for_spam( self, email_threepid: Optional[dict], username: Optional[str], - request_info: Collection[Tuple[str, str]], + request_info: Collection[tuple[str, str]], auth_provider_id: Optional[str], ) -> RegistrationBehaviour: return RegistrationBehaviour.ALLOW @@ -76,7 +76,7 @@ async def check_registration_for_spam( self, email_threepid: Optional[dict], username: Optional[str], - request_info: Collection[Tuple[str, str]], + request_info: Collection[tuple[str, str]], auth_provider_id: Optional[str], ) -> RegistrationBehaviour: return RegistrationBehaviour.DENY @@ -87,7 +87,7 @@ async def check_registration_for_spam( self, email_threepid: Optional[dict], username: Optional[str], - request_info: Collection[Tuple[str, str]], + request_info: Collection[tuple[str, str]], auth_provider_id: Optional[str], ) -> RegistrationBehaviour: return RegistrationBehaviour.SHADOW_BAN @@ -98,7 +98,7 @@ async def check_registration_for_spam( self, email_threepid: Optional[dict], username: Optional[str], - request_info: Collection[Tuple[str, str]], + request_info: Collection[tuple[str, str]], auth_provider_id: Optional[str] = None, ) -> RegistrationBehaviour: # Reject any user coming from CAS and whose username contains profanity @@ -115,7 +115,7 @@ async def check_registration_for_spam( self, email_threepid: Optional[dict], username: Optional[str], - request_info: Collection[Tuple[str, str]], + request_info: Collection[tuple[str, str]], ) -> RegistrationBehaviour: return RegistrationBehaviour.ALLOW @@ -125,7 +125,7 @@ async def check_registration_for_spam( self, email_threepid: Optional[dict], username: Optional[str], - request_info: Collection[Tuple[str, str]], + request_info: Collection[tuple[str, str]], ) -> RegistrationBehaviour: return RegistrationBehaviour.ALLOW @@ -135,7 +135,7 @@ async def check_registration_for_spam( self, email_threepid: Optional[dict], username: Optional[str], - request_info: Collection[Tuple[str, str]], + request_info: Collection[tuple[str, str]], ) -> RegistrationBehaviour: return RegistrationBehaviour.DENY @@ -779,7 +779,7 @@ async def get_or_create_user( localpart: str, displayname: Optional[str], password_hash: Optional[str] = None, - ) -> Tuple[str, str]: + ) -> tuple[str, str]: """Creates a new user if the user does not exist, else revokes all previous access tokens and generates a new one. @@ -842,7 +842,7 @@ async def update_membership(*args: Any, **kwargs: Any) -> None: async def lookup_room_alias( *args: Any, **kwargs: Any - ) -> Tuple[RoomID, List[str]]: + ) -> tuple[RoomID, list[str]]: return RoomID.from_string(self.room_id), ["remotetest"] self.room_member_handler = Mock(spec=["update_membership", "lookup_room_alias"]) diff --git a/tests/handlers/test_room_list.py b/tests/handlers/test_room_list.py index 45cef09b223..f6e9309f1f1 100644 --- a/tests/handlers/test_room_list.py +++ b/tests/handlers/test_room_list.py @@ -1,5 +1,5 @@ from http import HTTPStatus -from typing import Optional, Set +from typing import Optional from synapse.rest import admin from synapse.rest.client import directory, login, room @@ -69,7 +69,7 @@ def test_acls_applied_to_room_directory_results(self) -> None: limit=50, from_federation_origin="test2" ) ) - room_ids_in_test2_list: Set[str] = { + room_ids_in_test2_list: set[str] = { entry["room_id"] for entry in room_list["chunk"] } @@ -78,7 +78,7 @@ def test_acls_applied_to_room_directory_results(self) -> None: limit=50, from_federation_origin="test3" ) ) - room_ids_in_test3_list: Set[str] = { + room_ids_in_test3_list: set[str] = { entry["room_id"] for entry in room_list["chunk"] } diff --git a/tests/handlers/test_room_summary.py b/tests/handlers/test_room_summary.py index 00592b9871a..3c8c483921e 100644 --- a/tests/handlers/test_room_summary.py +++ b/tests/handlers/test_room_summary.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Any, Dict, Iterable, List, Optional, Set, Tuple +from typing import Any, Iterable, Optional from unittest import mock from twisted.internet.defer import ensureDeferred @@ -60,7 +60,7 @@ def _create_event( return result -def _order(*events: mock.Mock) -> List[mock.Mock]: +def _order(*events: mock.Mock) -> list[mock.Mock]: return sorted(events, key=_child_events_comparison_key) @@ -152,7 +152,7 @@ def _add_child( room_id: str, token: str, order: Optional[str] = None, - via: Optional[List[str]] = None, + via: Optional[list[str]] = None, ) -> None: """Add a child room to a space.""" if via is None: @@ -170,7 +170,7 @@ def _add_child( ) def _assert_hierarchy( - self, result: JsonDict, rooms_and_children: Iterable[Tuple[str, Iterable[str]]] + self, result: JsonDict, rooms_and_children: Iterable[tuple[str, Iterable[str]]] ) -> None: """ Assert that the expected room IDs are in the response. @@ -547,7 +547,7 @@ def test_pagination(self) -> None: ) # The result should have the space and all of the links, plus some of the # rooms and a pagination token. - expected: List[Tuple[str, Iterable[str]]] = [(self.space, room_ids)] + expected: list[tuple[str, Iterable[str]]] = [(self.space, room_ids)] expected += [(room_id, ()) for room_id in room_ids[:6]] self._assert_hierarchy(result, expected) self.assertIn("next_batch", result) @@ -646,7 +646,7 @@ def test_max_depth(self) -> None: create_requester(self.user), self.space, max_depth=0 ) ) - expected: List[Tuple[str, Iterable[str]]] = [(spaces[0], [rooms[0], spaces[1]])] + expected: list[tuple[str, Iterable[str]]] = [(spaces[0], [rooms[0], spaces[1]])] self._assert_hierarchy(result, expected) # A single additional layer. @@ -740,7 +740,7 @@ def test_fed_complex(self) -> None: async def summarize_remote_room_hierarchy( _self: Any, room: Any, suggested_only: bool - ) -> Tuple[Optional[_RoomEntry], Dict[str, JsonDict], Set[str]]: + ) -> tuple[Optional[_RoomEntry], dict[str, JsonDict], set[str]]: return requested_room_entry, {subroom: child_room}, set() # Add a room to the space which is on another server. @@ -793,7 +793,7 @@ def test_fed_root(self) -> None: async def summarize_remote_room_hierarchy( _self: Any, room: Any, suggested_only: bool - ) -> Tuple[Optional[_RoomEntry], Dict[str, JsonDict], Set[str]]: + ) -> tuple[Optional[_RoomEntry], dict[str, JsonDict], set[str]]: return requested_room_entry, {fed_subroom: child_room}, set() expected = [ @@ -921,7 +921,7 @@ def test_fed_filtering(self) -> None: async def summarize_remote_room_hierarchy( _self: Any, room: Any, suggested_only: bool - ) -> Tuple[Optional[_RoomEntry], Dict[str, JsonDict], Set[str]]: + ) -> tuple[Optional[_RoomEntry], dict[str, JsonDict], set[str]]: return subspace_room_entry, dict(children_rooms), set() # Add a room to the space which is on another server. @@ -985,7 +985,7 @@ def test_fed_invited(self) -> None: async def summarize_remote_room_hierarchy( _self: Any, room: Any, suggested_only: bool - ) -> Tuple[Optional[_RoomEntry], Dict[str, JsonDict], Set[str]]: + ) -> tuple[Optional[_RoomEntry], dict[str, JsonDict], set[str]]: return fed_room_entry, {}, set() # Add a room to the space which is on another server. @@ -1120,7 +1120,7 @@ def test_fed_remote_room_hosts(self) -> None: async def summarize_remote_room_hierarchy( _self: Any, room: Any, suggested_only: bool - ) -> Tuple[Optional[_RoomEntry], Dict[str, JsonDict], Set[str]]: + ) -> tuple[Optional[_RoomEntry], dict[str, JsonDict], set[str]]: return requested_room_entry, {fed_subroom: child_room}, set() expected = [ @@ -1233,7 +1233,7 @@ def test_fed(self) -> None: async def summarize_remote_room_hierarchy( _self: Any, room: Any, suggested_only: bool - ) -> Tuple[Optional[_RoomEntry], Dict[str, JsonDict], Set[str]]: + ) -> tuple[Optional[_RoomEntry], dict[str, JsonDict], set[str]]: return requested_room_entry, {}, set() with mock.patch( diff --git a/tests/handlers/test_saml.py b/tests/handlers/test_saml.py index f7cbf911139..28159abbcb5 100644 --- a/tests/handlers/test_saml.py +++ b/tests/handlers/test_saml.py @@ -19,7 +19,7 @@ # # -from typing import Any, Dict, Optional, Set, Tuple +from typing import Any, Optional from unittest.mock import AsyncMock, Mock import attr @@ -73,7 +73,7 @@ def parse_config(config: JsonDict) -> None: return None @staticmethod - def get_saml_attributes(config: None) -> Tuple[Set[str], Set[str]]: + def get_saml_attributes(config: None) -> tuple[set[str], set[str]]: return {"uid"}, {"displayName"} def get_remote_user_id( @@ -102,10 +102,10 @@ def saml_response_to_user_attributes( class SamlHandlerTestCase(HomeserverTestCase): - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() config["public_baseurl"] = BASE_URL - saml_config: Dict[str, Any] = { + saml_config: dict[str, Any] = { "sp_config": {"metadata": {}}, # Disable grandfathering. "grandfathered_mxid_source_attribute": None, diff --git a/tests/handlers/test_send_email.py b/tests/handlers/test_send_email.py index 5f7839c82c1..d033ed3a1ca 100644 --- a/tests/handlers/test_send_email.py +++ b/tests/handlers/test_send_email.py @@ -20,7 +20,7 @@ # -from typing import Callable, List, Tuple, Type, Union +from typing import Callable, Union from unittest.mock import patch from zope.interface import implementer @@ -58,18 +58,18 @@ def TestingESMTPTLSClientFactory( class _DummyMessageDelivery: def __init__(self) -> None: # (recipient, message) tuples - self.messages: List[Tuple[smtp.Address, bytes]] = [] + self.messages: list[tuple[smtp.Address, bytes]] = [] def receivedHeader( self, - helo: Tuple[bytes, bytes], + helo: tuple[bytes, bytes], origin: smtp.Address, - recipients: List[smtp.User], + recipients: list[smtp.User], ) -> None: return None def validateFrom( - self, helo: Tuple[bytes, bytes], origin: smtp.Address + self, helo: tuple[bytes, bytes], origin: smtp.Address ) -> smtp.Address: return origin @@ -89,7 +89,7 @@ class _DummyMessage: def __init__(self, delivery: _DummyMessageDelivery, user: smtp.User): self._delivery = delivery self._user = user - self._buffer: List[bytes] = [] + self._buffer: list[bytes] = [] def lineReceived(self, line: bytes) -> None: self._buffer.append(line) @@ -104,7 +104,7 @@ def connectionLost(self) -> None: class SendEmailHandlerTestCaseIPv4(HomeserverTestCase): - ip_class: Union[Type[IPv4Address], Type[IPv6Address]] = IPv4Address + ip_class: Union[type[IPv4Address], type[IPv6Address]] = IPv4Address def setUp(self) -> None: super().setUp() diff --git a/tests/handlers/test_sliding_sync.py b/tests/handlers/test_sliding_sync.py index 1ffd15cadbd..a35910e4dd5 100644 --- a/tests/handlers/test_sliding_sync.py +++ b/tests/handlers/test_sliding_sync.py @@ -18,7 +18,7 @@ # # import logging -from typing import AbstractSet, Dict, Mapping, Optional, Set, Tuple +from typing import AbstractSet, Mapping, Optional from unittest.mock import patch import attr @@ -3278,7 +3278,7 @@ def _get_sync_room_ids_for_user( user: UserID, to_token: StreamToken, from_token: Optional[StreamToken], - ) -> Tuple[Dict[str, RoomsForUserType], AbstractSet[str], AbstractSet[str]]: + ) -> tuple[dict[str, RoomsForUserType], AbstractSet[str], AbstractSet[str]]: """ Get the rooms the user should be syncing with """ @@ -3615,7 +3615,7 @@ def _get_sync_room_ids_for_user( user: UserID, to_token: StreamToken, from_token: Optional[StreamToken], - ) -> Tuple[Dict[str, RoomsForUserType], AbstractSet[str], AbstractSet[str]]: + ) -> tuple[dict[str, RoomsForUserType], AbstractSet[str], AbstractSet[str]]: """ Get the rooms the user should be syncing with """ @@ -3824,13 +3824,13 @@ def test_default_bump_event_types(self) -> None: @attr.s(slots=True, auto_attribs=True, frozen=True) class RequiredStateChangesTestParameters: - previous_required_state_map: Dict[str, Set[str]] - request_required_state_map: Dict[str, Set[str]] + previous_required_state_map: dict[str, set[str]] + request_required_state_map: dict[str, set[str]] state_deltas: StateMap[str] - expected_with_state_deltas: Tuple[ + expected_with_state_deltas: tuple[ Optional[Mapping[str, AbstractSet[str]]], StateFilter ] - expected_without_state_deltas: Tuple[ + expected_without_state_deltas: tuple[ Optional[Mapping[str, AbstractSet[str]]], StateFilter ] @@ -4785,7 +4785,7 @@ def test_limit_retained_previous_state_keys( self, _test_label: str, event_type: str, - extra_state_keys: Set[str], + extra_state_keys: set[str], ) -> None: """ Test that we limit the number of state_keys that we remember but always include diff --git a/tests/handlers/test_sso.py b/tests/handlers/test_sso.py index b09d0a42f52..5ac088f6017 100644 --- a/tests/handlers/test_sso.py +++ b/tests/handlers/test_sso.py @@ -18,7 +18,7 @@ # # from http import HTTPStatus -from typing import BinaryIO, Callable, Dict, List, Optional, Tuple +from typing import BinaryIO, Callable, Optional from unittest.mock import Mock from twisted.internet.testing import MemoryReactor @@ -120,7 +120,7 @@ async def mock_get_file( max_size: Optional[int] = None, headers: Optional[RawHeaders] = None, is_allowed_content_type: Optional[Callable[[str], bool]] = None, -) -> Tuple[int, Dict[bytes, List[bytes]], str, int]: +) -> tuple[int, dict[bytes, list[bytes]], str, int]: fake_response = FakeResponse(code=404) if url == "http://my.server/me.png": fake_response = FakeResponse( diff --git a/tests/handlers/test_stats.py b/tests/handlers/test_stats.py index abec5c2e392..94f5e472ca0 100644 --- a/tests/handlers/test_stats.py +++ b/tests/handlers/test_stats.py @@ -18,7 +18,7 @@ # # -from typing import Any, Dict, List, Optional, Tuple, cast +from typing import Any, Optional, cast from twisted.internet.testing import MemoryReactor @@ -74,9 +74,9 @@ def _add_background_updates(self) -> None: ) ) - async def get_all_room_state(self) -> List[Optional[str]]: + async def get_all_room_state(self) -> list[Optional[str]]: rows = cast( - List[Tuple[Optional[str]]], + list[tuple[Optional[str]]], await self.store.db_pool.simple_select_list( "room_stats_state", None, retcols=("topic",) ), @@ -85,7 +85,7 @@ async def get_all_room_state(self) -> List[Optional[str]]: def _get_current_stats( self, stats_type: str, stat_id: str - ) -> Optional[Dict[str, Any]]: + ) -> Optional[dict[str, Any]]: table, id_col = stats.TYPE_TO_TABLE[stats_type] cols = list(stats.ABSOLUTE_STATS_FIELDS[stats_type]) diff --git a/tests/handlers/test_sync.py b/tests/handlers/test_sync.py index c61788fe900..140dd4a0bab 100644 --- a/tests/handlers/test_sync.py +++ b/tests/handlers/test_sync.py @@ -18,7 +18,7 @@ # # from http import HTTPStatus -from typing import Collection, ContextManager, List, Optional +from typing import Collection, ContextManager, Optional from unittest.mock import AsyncMock, Mock, patch from parameterized import parameterized, parameterized_class @@ -872,7 +872,7 @@ def test_archived_rooms_do_not_include_state_after_leave( # ... And the state should be empty self.assertEqual(sync_room_result.state, {}) - def _patch_get_latest_events(self, latest_events: List[str]) -> ContextManager: + def _patch_get_latest_events(self, latest_events: list[str]) -> ContextManager: """Monkey-patch `get_prev_events_for_room` Returns a context manager which will replace the implementation of @@ -902,7 +902,7 @@ async def _check_event_auth( async def _check_sigs_and_hash_for_pulled_events_and_fetch( dest: str, pdus: Collection[EventBase], room_version: RoomVersion - ) -> List[EventBase]: + ) -> list[EventBase]: return list(pdus) self.client._check_sigs_and_hash_for_pulled_events_and_fetch = ( # type: ignore[method-assign] diff --git a/tests/handlers/test_typing.py b/tests/handlers/test_typing.py index 90c185bc3d4..70557a4a5fa 100644 --- a/tests/handlers/test_typing.py +++ b/tests/handlers/test_typing.py @@ -21,7 +21,6 @@ import json -from typing import Dict, List, Set from unittest.mock import ANY, AsyncMock, Mock, call from netaddr import IPSet @@ -110,7 +109,7 @@ def make_homeserver( return hs - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: d = super().create_resource_dict() d["/_matrix/federation"] = TransportLayerServer(self.hs) return d @@ -143,7 +142,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: return_value=None ) - self.room_members: List[UserID] = [] + self.room_members: list[UserID] = [] async def check_user_in_room(room_id: str, requester: Requester) -> None: if requester.user.to_string() not in [ @@ -163,7 +162,7 @@ async def check_host_in_room(room_id: str, server_name: str) -> bool: side_effect=check_host_in_room ) - async def get_current_hosts_in_room(room_id: str) -> Set[str]: + async def get_current_hosts_in_room(room_id: str) -> set[str]: return {member.domain for member in self.room_members} hs.get_storage_controllers().state.get_current_hosts_in_room = Mock( # type: ignore[method-assign] @@ -174,7 +173,7 @@ async def get_current_hosts_in_room(room_id: str) -> Set[str]: side_effect=get_current_hosts_in_room ) - async def get_users_in_room(room_id: str) -> Set[str]: + async def get_users_in_room(room_id: str) -> set[str]: return {str(u) for u in self.room_members} self.datastore.get_users_in_room = Mock(side_effect=get_users_in_room) diff --git a/tests/handlers/test_user_directory.py b/tests/handlers/test_user_directory.py index 1ba0be51a2c..f50fa1f4a02 100644 --- a/tests/handlers/test_user_directory.py +++ b/tests/handlers/test_user_directory.py @@ -17,7 +17,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Any, Tuple +from typing import Any from unittest.mock import AsyncMock, Mock, patch from urllib.parse import quote @@ -313,7 +313,7 @@ def check_user_dir_for_private_room() -> None: def _create_rooms_and_inject_memberships( self, creator: str, token: str, joiner: str - ) -> Tuple[str, str]: + ) -> tuple[str, str]: """Create a public and private room as a normal user. Then get the `joiner` into those rooms. """ diff --git a/tests/http/__init__.py b/tests/http/__init__.py index 3c20e5e4426..b19a4840043 100644 --- a/tests/http/__init__.py +++ b/tests/http/__init__.py @@ -19,7 +19,6 @@ # import os.path import subprocess -from typing import List from incremental import Version from zope.interface import implementer @@ -85,7 +84,7 @@ def get_test_key_file() -> str: """ -def create_test_cert_file(sanlist: List[bytes]) -> str: +def create_test_cert_file(sanlist: list[bytes]) -> str: """build an x509 certificate file Args: @@ -151,7 +150,7 @@ class TestServerTLSConnectionFactory: """An SSL connection creator which returns connections which present a certificate signed by our test CA.""" - def __init__(self, sanlist: List[bytes]): + def __init__(self, sanlist: list[bytes]): """ Args: sanlist: a list of subjectAltName values for the cert @@ -166,7 +165,7 @@ def serverConnectionForTLS(self, tlsProtocol: TLSMemoryBIOProtocol) -> Connectio def wrap_server_factory_for_tls( - factory: IProtocolFactory, clock: IReactorTime, sanlist: List[bytes] + factory: IProtocolFactory, clock: IReactorTime, sanlist: list[bytes] ) -> TLSMemoryBIOFactory: """Wrap an existing Protocol Factory with a test TLSMemoryBIOFactory diff --git a/tests/http/federation/test_matrix_federation_agent.py b/tests/http/federation/test_matrix_federation_agent.py index c66ca489a46..949564fcc76 100644 --- a/tests/http/federation/test_matrix_federation_agent.py +++ b/tests/http/federation/test_matrix_federation_agent.py @@ -20,7 +20,7 @@ import base64 import logging import os -from typing import Generator, List, Optional, cast +from typing import Generator, Optional, cast from unittest.mock import AsyncMock, call, patch import treq @@ -110,7 +110,7 @@ def _make_connection( client_factory: IProtocolFactory, ssl: bool = True, expected_sni: Optional[bytes] = None, - tls_sanlist: Optional[List[bytes]] = None, + tls_sanlist: Optional[list[bytes]] = None, ) -> HTTPChannel: """Builds a test server, and completes the outgoing client connection Args: diff --git a/tests/http/federation/test_srv_resolver.py b/tests/http/federation/test_srv_resolver.py index a359b0a1415..54f3168a019 100644 --- a/tests/http/federation/test_srv_resolver.py +++ b/tests/http/federation/test_srv_resolver.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Dict, Generator, List, Tuple, cast +from typing import Generator, cast from unittest.mock import Mock from twisted.internet import defer @@ -44,20 +44,20 @@ def test_resolve(self) -> None: type=dns.SRV, payload=dns.Record_SRV(target=host_name) ) - result_deferred: "Deferred[Tuple[List[dns.RRHeader], None, None]]" = Deferred() + result_deferred: "Deferred[tuple[list[dns.RRHeader], None, None]]" = Deferred() dns_client_mock.lookupService.return_value = result_deferred - cache: Dict[bytes, List[Server]] = {} + cache: dict[bytes, list[Server]] = {} resolver = SrvResolver(dns_client=dns_client_mock, cache=cache) @defer.inlineCallbacks - def do_lookup() -> Generator["Deferred[object]", object, List[Server]]: + def do_lookup() -> Generator["Deferred[object]", object, list[Server]]: with LoggingContext( name="one", server_name="test_server", ) as ctx: resolve_d = resolver.resolve_service(service_name) - result: List[Server] + result: list[Server] result = yield defer.ensureDeferred(resolve_d) # type: ignore[assignment] # should have restored our context @@ -95,7 +95,7 @@ def test_from_cache_expired_and_dns_fail( cache = {service_name: [cast(Server, entry)]} resolver = SrvResolver(dns_client=dns_client_mock, cache=cache) - servers: List[Server] + servers: list[Server] servers = yield defer.ensureDeferred(resolver.resolve_service(service_name)) # type: ignore[assignment] dns_client_mock.lookupService.assert_called_once_with(service_name) @@ -122,7 +122,7 @@ def test_from_cache(self) -> Generator["Deferred[object]", object, None]: dns_client=dns_client_mock, cache=cache, get_time=clock.time ) - servers: List[Server] + servers: list[Server] servers = yield defer.ensureDeferred(resolver.resolve_service(service_name)) # type: ignore[assignment] self.assertFalse(dns_client_mock.lookupService.called) @@ -138,7 +138,7 @@ def test_empty_cache(self) -> Generator["Deferred[object]", object, None]: service_name = b"test_service.example.com" - cache: Dict[bytes, List[Server]] = {} + cache: dict[bytes, list[Server]] = {} resolver = SrvResolver(dns_client=dns_client_mock, cache=cache) with self.assertRaises(error.DNSServerError): @@ -152,10 +152,10 @@ def test_name_error(self) -> Generator["Deferred[object]", object, None]: service_name = b"test_service.example.com" - cache: Dict[bytes, List[Server]] = {} + cache: dict[bytes, list[Server]] = {} resolver = SrvResolver(dns_client=dns_client_mock, cache=cache) - servers: List[Server] + servers: list[Server] servers = yield defer.ensureDeferred(resolver.resolve_service(service_name)) # type: ignore[assignment] self.assertEqual(len(servers), 0) @@ -167,10 +167,10 @@ def test_disabled_service(self) -> None: """ service_name = b"test_service.example.com" - lookup_deferred: "Deferred[Tuple[List[dns.RRHeader], None, None]]" = Deferred() + lookup_deferred: "Deferred[tuple[list[dns.RRHeader], None, None]]" = Deferred() dns_client_mock = Mock() dns_client_mock.lookupService.return_value = lookup_deferred - cache: Dict[bytes, List[Server]] = {} + cache: dict[bytes, list[Server]] = {} resolver = SrvResolver(dns_client=dns_client_mock, cache=cache) # Old versions of Twisted don't have an ensureDeferred in failureResultOf. @@ -193,10 +193,10 @@ def test_non_srv_answer(self) -> None: """ service_name = b"test_service.example.com" - lookup_deferred: "Deferred[Tuple[List[dns.RRHeader], None, None]]" = Deferred() + lookup_deferred: "Deferred[tuple[list[dns.RRHeader], None, None]]" = Deferred() dns_client_mock = Mock() dns_client_mock.lookupService.return_value = lookup_deferred - cache: Dict[bytes, List[Server]] = {} + cache: dict[bytes, list[Server]] = {} resolver = SrvResolver(dns_client=dns_client_mock, cache=cache) # Old versions of Twisted don't have an ensureDeferred in successResultOf. diff --git a/tests/http/server/_base.py b/tests/http/server/_base.py index 8eec4329fe2..cc9b5fd6e1c 100644 --- a/tests/http/server/_base.py +++ b/tests/http/server/_base.py @@ -26,12 +26,8 @@ Any, Callable, ContextManager, - Dict, Generator, - List, Optional, - Set, - Tuple, TypeVar, Union, ) @@ -208,7 +204,7 @@ def make_request_with_cancellation_test( # The set of previously seen `await`s. # Each element is a stringified stack trace. - seen_awaits: Set[Tuple[str, ...]] = set() + seen_awaits: set[tuple[str, ...]] = set() _log_for_request( 0, f"Running make_request_with_cancellation_test for {test_name}..." @@ -337,7 +333,7 @@ class Deferred__await__Patch: deferred_patch.unblock_awaits() """ - def __init__(self, seen_awaits: Set[Tuple[str, ...]], request_number: int): + def __init__(self, seen_awaits: set[tuple[str, ...]], request_number: int): """ Args: seen_awaits: The set of stack traces of `await`s that have been previously @@ -365,10 +361,10 @@ def __init__(self, seen_awaits: Set[Tuple[str, ...]], request_number: int): # unresolved `Deferred` and return it out of `Deferred.__await__` / # `coroutine.send()`. We have to resolve it later, in case the `await`ing # coroutine is part of some shared processing, such as `@cached`. - self._to_unblock: Dict[Deferred, Union[object, Failure]] = {} + self._to_unblock: dict[Deferred, Union[object, Failure]] = {} # The last stack we logged. - self._previous_stack: List[inspect.FrameInfo] = [] + self._previous_stack: list[inspect.FrameInfo] = [] def patch(self) -> ContextManager[Mock]: """Returns a context manager which patches `Deferred.__await__`.""" @@ -507,8 +503,8 @@ def _log_for_request(request_number: int, message: str) -> None: def _log_await_stack( - stack: List[inspect.FrameInfo], - previous_stack: List[inspect.FrameInfo], + stack: list[inspect.FrameInfo], + previous_stack: list[inspect.FrameInfo], request_number: int, note: str, ) -> None: @@ -566,7 +562,7 @@ def _format_stack_frame(frame_info: inspect.FrameInfo) -> str: ) -def _get_stack(skip_frames: int) -> List[inspect.FrameInfo]: +def _get_stack(skip_frames: int) -> list[inspect.FrameInfo]: """Captures the stack for a request. Skips any twisted frames and stops at `JsonResource.wrapped_async_request_handler`. @@ -622,6 +618,6 @@ def _get_stack_frame_method_name(frame_info: inspect.FrameInfo) -> str: return method_name -def _hash_stack(stack: List[inspect.FrameInfo]) -> Tuple[str, ...]: +def _hash_stack(stack: list[inspect.FrameInfo]) -> tuple[str, ...]: """Turns a stack into a hashable value that can be put into a set.""" return tuple(_format_stack_frame(frame) for frame in stack) diff --git a/tests/http/test_client.py b/tests/http/test_client.py index a02f6fc7280..d9eaa78a392 100644 --- a/tests/http/test_client.py +++ b/tests/http/test_client.py @@ -20,7 +20,7 @@ # from io import BytesIO -from typing import Tuple, Union +from typing import Union from unittest.mock import Mock from netaddr import IPSet @@ -59,7 +59,7 @@ class ReadMultipartResponseTests(TestCase): def _build_multipart_response( self, response_length: Union[int, str], max_length: int - ) -> Tuple[ + ) -> tuple[ BytesIO, "Deferred[MultipartResponse]", _MultipartParserProtocol, @@ -209,7 +209,7 @@ def test_content_length(self) -> None: class ReadBodyWithMaxSizeTests(TestCase): def _build_response( self, length: Union[int, str] = UNKNOWN_LENGTH - ) -> Tuple[ + ) -> tuple[ BytesIO, "Deferred[int]", _DiscardBodyWithMaxSizeProtocol, diff --git a/tests/http/test_matrixfederationclient.py b/tests/http/test_matrixfederationclient.py index 6d875418884..6accb03b9fa 100644 --- a/tests/http/test_matrixfederationclient.py +++ b/tests/http/test_matrixfederationclient.py @@ -18,7 +18,7 @@ # # import io -from typing import Any, Dict, Generator +from typing import Any, Generator from unittest.mock import ANY, Mock, create_autospec from netaddr import IPSet @@ -745,7 +745,7 @@ def test_configurable_retry_and_delay_values(self) -> None: class FederationClientProxyTests(BaseMultiWorkerStreamTestCase): - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: conf = super().default_config() conf["instance_map"] = { "main": {"host": "testserv", "port": 8765}, diff --git a/tests/http/test_proxy.py b/tests/http/test_proxy.py index 7110dcf9f94..59a9b073bc9 100644 --- a/tests/http/test_proxy.py +++ b/tests/http/test_proxy.py @@ -18,7 +18,6 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Set from parameterized import parameterized @@ -64,7 +63,7 @@ class ProxyTests(TestCase): def test_parse_connection_header_value( self, connection_header_value: bytes, - expected_extra_headers_to_remove: Set[str], + expected_extra_headers_to_remove: set[str], ) -> None: """ Tests that the connection header value is parsed correctly diff --git a/tests/http/test_proxyagent.py b/tests/http/test_proxyagent.py index 5bc5d18d81b..a9b4f3d9561 100644 --- a/tests/http/test_proxyagent.py +++ b/tests/http/test_proxyagent.py @@ -21,7 +21,7 @@ import base64 import logging import os -from typing import List, Optional +from typing import Optional from unittest.mock import patch import treq @@ -252,7 +252,7 @@ def _make_connection( server_factory: IProtocolFactory, ssl: bool = False, expected_sni: Optional[bytes] = None, - tls_sanlist: Optional[List[bytes]] = None, + tls_sanlist: Optional[list[bytes]] = None, ) -> IProtocol: """Builds a test server, and completes the outgoing client connection diff --git a/tests/http/test_servlet.py b/tests/http/test_servlet.py index db39ecf2443..087191b2207 100644 --- a/tests/http/test_servlet.py +++ b/tests/http/test_servlet.py @@ -21,7 +21,7 @@ import json from http import HTTPStatus from io import BytesIO -from typing import Tuple, Union +from typing import Union from unittest.mock import Mock from synapse.api.errors import Codes, SynapseError @@ -108,11 +108,11 @@ def __init__(self, hs: HomeServer): self.clock = hs.get_clock() @cancellable - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await self.clock.sleep(1.0) return HTTPStatus.OK, {"result": True} - async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def on_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: await self.clock.sleep(1.0) return HTTPStatus.OK, {"result": True} diff --git a/tests/logging/test_remote_handler.py b/tests/logging/test_remote_handler.py index e0fd12ccf78..534a1fc4eef 100644 --- a/tests/logging/test_remote_handler.py +++ b/tests/logging/test_remote_handler.py @@ -18,7 +18,6 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Tuple from twisted.internet.protocol import Protocol from twisted.internet.testing import AccumulatingProtocol, MemoryReactorClock @@ -33,7 +32,7 @@ def connect_logging_client( reactor: MemoryReactorClock, client_id: int -) -> Tuple[Protocol, AccumulatingProtocol]: +) -> tuple[Protocol, AccumulatingProtocol]: # This is essentially tests.server.connect_client, but disabling autoflush on # the client transport. This is necessary to avoid an infinite loop due to # sending of data via the logging transport causing additional logs to be diff --git a/tests/media/test_media_storage.py b/tests/media/test_media_storage.py index 28c4ce676a8..d584ea951c6 100644 --- a/tests/media/test_media_storage.py +++ b/tests/media/test_media_storage.py @@ -23,7 +23,7 @@ import tempfile from binascii import unhexlify from io import BytesIO -from typing import Any, BinaryIO, ClassVar, Dict, List, Literal, Optional, Tuple, Union +from typing import Any, BinaryIO, ClassVar, Literal, Optional, Union from unittest.mock import MagicMock, Mock, patch from urllib import parse @@ -297,9 +297,9 @@ class MediaRepoTests(unittest.HomeserverTestCase): user_id = "@test:user" def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: - self.fetches: List[ - Tuple[ - "Deferred[Tuple[bytes, Tuple[int, Dict[bytes, List[bytes]]]]]", + self.fetches: list[ + tuple[ + "Deferred[tuple[bytes, tuple[int, dict[bytes, list[bytes]]]]]", str, str, Optional[QueryParams], @@ -317,12 +317,12 @@ def get_file( retry_on_dns_fail: bool = True, ignore_backoff: bool = False, follow_redirects: bool = False, - ) -> "Deferred[Tuple[int, Dict[bytes, List[bytes]]]]": + ) -> "Deferred[tuple[int, dict[bytes, list[bytes]]]]": """A mock for MatrixFederationHttpClient.get_file.""" def write_to( - r: Tuple[bytes, Tuple[int, Dict[bytes, List[bytes]]]], - ) -> Tuple[int, Dict[bytes, List[bytes]]]: + r: tuple[bytes, tuple[int, dict[bytes, list[bytes]]]], + ) -> tuple[int, dict[bytes, list[bytes]]]: data, response = r output_stream.write(data) return response @@ -332,7 +332,7 @@ def write_err(f: Failure) -> Failure: output_stream.write(f.value.response) return f - d: Deferred[Tuple[bytes, Tuple[int, Dict[bytes, List[bytes]]]]] = Deferred() + d: Deferred[tuple[bytes, tuple[int, dict[bytes, list[bytes]]]]] = Deferred() self.fetches.append((d, destination, path, args)) # Note that this callback changes the value held by d. d_after_callback = d.addCallbacks(write_to, write_err) @@ -370,7 +370,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.media_id = "example.com/12345" - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: resources = super().create_resource_dict() resources["/_matrix/media"] = self.hs.get_media_repository_resource() return resources @@ -860,12 +860,12 @@ class TestSpamCheckerLegacy: Uses the legacy Spam-Checker API. """ - def __init__(self, config: Dict[str, Any], api: ModuleApi) -> None: + def __init__(self, config: dict[str, Any], api: ModuleApi) -> None: self.config = config self.api = api @staticmethod - def parse_config(config: Dict[str, Any]) -> Dict[str, Any]: + def parse_config(config: dict[str, Any]) -> dict[str, Any]: return config async def check_event_for_spam(self, event: EventBase) -> Union[bool, str]: @@ -911,12 +911,12 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: load_legacy_spam_checkers(hs) - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: resources = super().create_resource_dict() resources["/_matrix/media"] = self.hs.get_media_repository_resource() return resources - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = default_config("test") config.update( @@ -965,14 +965,14 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: check_media_file_for_spam=self.check_media_file_for_spam ) - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: resources = super().create_resource_dict() resources["/_matrix/media"] = self.hs.get_media_repository_resource() return resources async def check_media_file_for_spam( self, file_wrapper: ReadableFileWrapper, file_info: FileInfo - ) -> Union[Codes, Literal["NOT_SPAM"], Tuple[Codes, JsonDict]]: + ) -> Union[Codes, Literal["NOT_SPAM"], tuple[Codes, JsonDict]]: buf = BytesIO() await file_wrapper.write_chunks_to(buf.write) @@ -1028,7 +1028,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.client = hs.get_federation_http_client() self.store = hs.get_datastores().main - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: # We need to manually set the resource tree to include media, the # default only does `/_matrix/client` APIs. return {"/_matrix/media": self.hs.get_media_repository_resource()} @@ -1280,7 +1280,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.store = hs.get_datastores().main self.client = hs.get_federation_http_client() - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: resources = super().create_resource_dict() resources["/_matrix/media"] = self.hs.get_media_repository_resource() return resources @@ -1377,7 +1377,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: is_user_allowed_to_upload_media_of_size=self.is_user_allowed_to_upload_media_of_size, ) - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: resources = super().create_resource_dict() resources["/_matrix/media"] = self.hs.get_media_repository_resource() return resources diff --git a/tests/metrics/test_metrics.py b/tests/metrics/test_metrics.py index b3f42c76f18..084eba3a5a6 100644 --- a/tests/metrics/test_metrics.py +++ b/tests/metrics/test_metrics.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Dict, NoReturn, Protocol, Tuple +from typing import NoReturn, Protocol from prometheus_client.core import Sample @@ -35,7 +35,7 @@ from tests import unittest -def get_sample_labels_value(sample: Sample) -> Tuple[Dict[str, str], float]: +def get_sample_labels_value(sample: Sample) -> tuple[dict[str, str], float]: """Extract the labels and values of a sample. prometheus_client 0.5 changed the sample type to a named tuple with more @@ -54,7 +54,7 @@ def get_sample_labels_value(sample: Sample) -> Tuple[Dict[str, str], float]: # Otherwise fall back to treating it as a plain 3 tuple. else: # In older versions of prometheus_client Sample was a 3-tuple. - labels: Dict[str, str] + labels: dict[str, str] value: float _, labels, value = sample # type: ignore[misc] return labels, value @@ -127,7 +127,7 @@ def handle2(metrics: MetricEntry) -> None: def get_metrics_from_gauge( self, gauge: InFlightGauge - ) -> Dict[str, Dict[Tuple[str, ...], float]]: + ) -> dict[str, dict[tuple[str, ...], float]]: results = {} for r in gauge.collect(): @@ -384,7 +384,7 @@ def raise_exception() -> NoReturn: self.assertEqual(hs2_metric_value, "2.0") -def get_latest_metrics() -> Dict[str, str]: +def get_latest_metrics() -> dict[str, str]: """ Collect the latest metrics from the registry and parse them into an easy to use map. The key includes the metric name and labels. diff --git a/tests/module_api/test_api.py b/tests/module_api/test_api.py index 86f987f2927..b768a913d77 100644 --- a/tests/module_api/test_api.py +++ b/tests/module_api/test_api.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Any, Dict, Optional +from typing import Any, Optional from unittest.mock import AsyncMock, Mock from twisted.internet import defer @@ -839,7 +839,7 @@ class ModuleApiWorkerTestCase(BaseModuleApiTestCase, BaseMultiWorkerStreamTestCa presence.register_servlets, ] - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: conf = super().default_config() conf["stream_writers"] = {"presence": ["presence_writer"]} conf["instance_map"] = { diff --git a/tests/push/test_email.py b/tests/push/test_email.py index 26819e2d3c5..b1d16669a63 100644 --- a/tests/push/test_email.py +++ b/tests/push/test_email.py @@ -21,7 +21,7 @@ import importlib.resources as importlib_resources import os from http import HTTPStatus -from typing import Any, Dict, List, Sequence, Tuple +from typing import Any, Dict, Sequence import attr from parameterized import parameterized @@ -83,8 +83,8 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: hs = self.setup_test_homeserver(config=config) - # List[Tuple[Deferred, args, kwargs]] - self.email_attempts: List[Tuple[Deferred, Sequence, Dict]] = [] + # List[tuple[Deferred, args, kwargs]] + self.email_attempts: list[tuple[Deferred, Sequence, Dict]] = [] def sendmail(*args: Any, **kwargs: Any) -> Deferred: # This mocks out synapse.reactor.send_email._sendmail. @@ -510,7 +510,7 @@ def test_remove_unlinked_pushers_background_job(self) -> None: ) self.assertEqual(len(pushers), 0) - def _check_for_mail(self) -> Tuple[Sequence, Dict]: + def _check_for_mail(self) -> tuple[Sequence, Dict]: """ Assert that synapse sent off exactly one email notification. diff --git a/tests/push/test_http.py b/tests/push/test_http.py index 4c8aae5782a..ca2ced01ed0 100644 --- a/tests/push/test_http.py +++ b/tests/push/test_http.py @@ -17,7 +17,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Any, Dict, List, Tuple +from typing import Any from unittest.mock import Mock from parameterized import parameterized @@ -51,7 +51,7 @@ class HTTPPusherTests(HomeserverTestCase): hijack_auth = False def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: - self.push_attempts: List[Tuple[Deferred, str, dict]] = [] + self.push_attempts: list[tuple[Deferred, str, dict]] = [] m = Mock() @@ -747,7 +747,7 @@ def _send_read_request( def _make_user_with_pusher( self, username: str, enabled: bool = True - ) -> Tuple[str, str]: + ) -> tuple[str, str]: """Registers a user and creates a pusher for them. Args: @@ -925,7 +925,7 @@ def test_update_different_device_access_token_device_id(self) -> None: ret = self.get_success( self.hs.get_datastores().main.get_pushers_by({"user_name": user_id}) ) - pushers: List[PusherConfig] = list(ret) + pushers: list[PusherConfig] = list(ret) # Check that we still have one pusher, and that the device ID associated with # it didn't change. @@ -1118,7 +1118,7 @@ def test_msc4076_badge_count( device_id = user_tuple.device_id # Set the push data dict based on test input parameters - push_data: Dict[str, Any] = { + push_data: dict[str, Any] = { "url": "http://example.com/_matrix/push/v1/notify", } if disable_badge_count: diff --git a/tests/push/test_presentable_names.py b/tests/push/test_presentable_names.py index bd42fc05809..4982a80cced 100644 --- a/tests/push/test_presentable_names.py +++ b/tests/push/test_presentable_names.py @@ -19,7 +19,7 @@ # # -from typing import Iterable, List, Optional, Tuple, cast +from typing import Iterable, Optional, cast from synapse.api.constants import EventTypes, Membership from synapse.api.room_versions import RoomVersions @@ -36,7 +36,7 @@ class MockDataStore: (I.e. the state key is used as the event ID.) """ - def __init__(self, events: Iterable[Tuple[StateKey, dict]]): + def __init__(self, events: Iterable[tuple[StateKey, dict]]): """ Args: events: A state map to event contents. @@ -63,7 +63,7 @@ async def get_event( assert allow_none, "Mock not configured for allow_none = False" # Decode the state key from the event ID. - state_key = cast(Tuple[str, str], tuple(event_id.split("|", 1))) + state_key = cast(tuple[str, str], tuple(event_id.split("|", 1))) return self._events.get(state_key) async def get_events(self, event_ids: Iterable[StateKey]) -> StateMap[EventBase]: @@ -77,7 +77,7 @@ class PresentableNamesTestCase(unittest.HomeserverTestCase): def _calculate_room_name( self, - events: Iterable[Tuple[Tuple[str, str], dict]], + events: Iterable[tuple[tuple[str, str], dict]], user_id: str = "", fallback_to_members: bool = True, fallback_to_single_member: bool = True, @@ -97,7 +97,7 @@ def _calculate_room_name( def test_name(self) -> None: """A room name event should be used.""" - events: List[Tuple[Tuple[str, str], dict]] = [ + events: list[tuple[tuple[str, str], dict]] = [ ((EventTypes.Name, ""), {"name": "test-name"}), ] self.assertEqual("test-name", self._calculate_room_name(events)) @@ -111,7 +111,7 @@ def test_name(self) -> None: def test_canonical_alias(self) -> None: """An canonical alias should be used.""" - events: List[Tuple[Tuple[str, str], dict]] = [ + events: list[tuple[tuple[str, str], dict]] = [ ((EventTypes.CanonicalAlias, ""), {"alias": "#test-name:test"}), ] self.assertEqual("#test-name:test", self._calculate_room_name(events)) @@ -125,7 +125,7 @@ def test_canonical_alias(self) -> None: def test_invite(self) -> None: """An invite has special behaviour.""" - events: List[Tuple[Tuple[str, str], dict]] = [ + events: list[tuple[tuple[str, str], dict]] = [ ((EventTypes.Member, self.USER_ID), {"membership": Membership.INVITE}), ((EventTypes.Member, self.OTHER_USER_ID), {"displayname": "Other User"}), ] @@ -151,7 +151,7 @@ def test_invite(self) -> None: def test_no_members(self) -> None: """Behaviour of an empty room.""" - events: List[Tuple[Tuple[str, str], dict]] = [] + events: list[tuple[tuple[str, str], dict]] = [] self.assertEqual("Empty Room", self._calculate_room_name(events)) # Note that events with invalid (or missing) membership are ignored. diff --git a/tests/push/test_push_rule_evaluator.py b/tests/push/test_push_rule_evaluator.py index 718c9614e50..b1f7ba69731 100644 --- a/tests/push/test_push_rule_evaluator.py +++ b/tests/push/test_push_rule_evaluator.py @@ -19,7 +19,7 @@ # # -from typing import Any, Dict, List, Optional, Union, cast +from typing import Any, Optional, Union, cast from twisted.internet.testing import MemoryReactor @@ -60,7 +60,7 @@ def test_nested(self) -> None: def test_non_string(self) -> None: """String, booleans, ints, nulls and list of those should be kept while other items are dropped.""" - input: Dict[str, Any] = { + input: dict[str, Any] = { "woo": "woo", "foo": True, "bar": 1, @@ -165,13 +165,13 @@ def _get_evaluator( ) room_member_count = 0 sender_power_level = 0 - power_levels: Dict[str, Union[int, Dict[str, int]]] = {} + power_levels: dict[str, Union[int, dict[str, int]]] = {} return PushRuleEvaluator( _flatten_dict(event), False, room_member_count, sender_power_level, - cast(Dict[str, int], power_levels.get("notifications", {})), + cast(dict[str, int], power_levels.get("notifications", {})), {} if related_events is None else related_events, related_event_match_enabled=True, room_version_feature_flags=event.room_version.msc3931_push_features, @@ -588,7 +588,7 @@ def test_tweaks_for_actions(self) -> None: This tests the behaviour of tweaks_for_actions. """ - actions: List[Union[Dict[str, str], str]] = [ + actions: list[Union[dict[str, str], str]] = [ {"set_tweak": "sound", "value": "default"}, {"set_tweak": "highlight"}, "notify", diff --git a/tests/replication/_base.py b/tests/replication/_base.py index 1a2dab4c7d7..ea1e01a212d 100644 --- a/tests/replication/_base.py +++ b/tests/replication/_base.py @@ -19,7 +19,7 @@ # import logging from collections import defaultdict -from typing import Any, Dict, List, Optional, Set, Tuple +from typing import Any, Optional from twisted.internet.address import IPv4Address from twisted.internet.protocol import Protocol, connectionDone @@ -108,7 +108,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self._client_transport: Optional[FakeTransport] = None self._server_transport: Optional[FakeTransport] = None - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: d = super().create_resource_dict() d["/_synapse/replication"] = ReplicationRestResource(self.hs) return d @@ -183,7 +183,7 @@ def handle_http_replication_attempt(self) -> SynapseRequest: # hook into the channel's request factory so that we can keep a record # of the requests - requests: List[SynapseRequest] = [] + requests: list[SynapseRequest] = [] real_request_factory = channel.requestFactory def request_factory(*args: Any, **kwargs: Any) -> SynapseRequest: @@ -251,7 +251,7 @@ class BaseMultiWorkerStreamTestCase(unittest.HomeserverTestCase): # Redis replication only takes place on Postgres skip = "Requires Postgres" - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: """ Overrides the default config to enable Redis. Even if the test only uses make_worker_hs, the main process needs Redis @@ -486,7 +486,7 @@ def __init__(self, hs: HomeServer): super().__init__(hs) # list of received (stream_name, token, row) tuples - self.received_rdata_rows: List[Tuple[str, int, Any]] = [] + self.received_rdata_rows: list[tuple[str, int, Any]] = [] async def on_rdata( self, stream_name: str, instance_name: str, token: int, rows: list @@ -500,7 +500,7 @@ class FakeRedisPubSubServer: """A fake Redis server for pub/sub.""" def __init__(self) -> None: - self._subscribers_by_channel: Dict[bytes, Set["FakeRedisPubSubProtocol"]] = ( + self._subscribers_by_channel: dict[bytes, set["FakeRedisPubSubProtocol"]] = ( defaultdict(set) ) diff --git a/tests/replication/http/test__base.py b/tests/replication/http/test__base.py index 31d3163c010..b757c6428a7 100644 --- a/tests/replication/http/test__base.py +++ b/tests/replication/http/test__base.py @@ -20,7 +20,6 @@ # from http import HTTPStatus -from typing import Tuple from twisted.web.server import Request @@ -52,7 +51,7 @@ async def _serialize_payload(**kwargs: ReplicationEndpoint) -> JsonDict: @cancellable async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await self.clock.sleep(1.0) return HTTPStatus.OK, {"result": True} @@ -73,7 +72,7 @@ async def _serialize_payload(**kwargs: ReplicationEndpoint) -> JsonDict: async def _handle_request( # type: ignore[override] self, request: Request, content: JsonDict - ) -> Tuple[int, JsonDict]: + ) -> tuple[int, JsonDict]: await self.clock.sleep(1.0) return HTTPStatus.OK, {"result": True} diff --git a/tests/replication/storage/test_events.py b/tests/replication/storage/test_events.py index fce32690052..1398689c2d4 100644 --- a/tests/replication/storage/test_events.py +++ b/tests/replication/storage/test_events.py @@ -19,7 +19,7 @@ # # import logging -from typing import Any, Iterable, List, Optional, Tuple +from typing import Any, Iterable, Optional from canonicaljson import encode_canonical_json from parameterized import parameterized @@ -244,13 +244,13 @@ def build_event( key: Optional[str] = None, internal: Optional[dict] = None, depth: Optional[int] = None, - prev_events: Optional[List[Tuple[str, dict]]] = None, - auth_events: Optional[List[str]] = None, - prev_state: Optional[List[str]] = None, + prev_events: Optional[list[tuple[str, dict]]] = None, + auth_events: Optional[list[str]] = None, + prev_state: Optional[list[str]] = None, redacts: Optional[str] = None, push_actions: Iterable = frozenset(), **content: object, - ) -> Tuple[EventBase, EventContext]: + ) -> tuple[EventBase, EventContext]: prev_events = prev_events or [] auth_events = auth_events or [] prev_state = prev_state or [] diff --git a/tests/replication/tcp/streams/test_events.py b/tests/replication/tcp/streams/test_events.py index 782dad39f5c..d171c095ab9 100644 --- a/tests/replication/tcp/streams/test_events.py +++ b/tests/replication/tcp/streams/test_events.py @@ -18,7 +18,7 @@ # # -from typing import Any, List, Optional +from typing import Any, Optional from parameterized import parameterized @@ -281,7 +281,7 @@ def test_update_function_huge_state_change( self.assertEqual(row.data.event_id, pl_event.event_id) # the state rows are unsorted - state_rows: List[EventsStreamCurrentStateRow] = [] + state_rows: list[EventsStreamCurrentStateRow] = [] for stream_name, _, row in received_rows: self.assertEqual("events", stream_name) self.assertIsInstance(row, EventsStreamRow) @@ -337,7 +337,7 @@ def test_update_function_state_row_limit(self) -> None: self.hs.get_datastores().main.get_latest_event_ids_in_room(self.room_id) ) - events: List[EventBase] = [] + events: list[EventBase] = [] for user in user_ids: events.extend( self._inject_state_event(sender=user) for _ in range(STATES_PER_USER) @@ -398,7 +398,7 @@ def test_update_function_state_row_limit(self) -> None: self.assertEqual(row.data.event_id, pl_events[i].event_id) # the state rows are unsorted - state_rows: List[EventsStreamCurrentStateRow] = [] + state_rows: list[EventsStreamCurrentStateRow] = [] for _ in range(STATES_PER_USER + 1): stream_name, token, row = received_rows.pop(0) self.assertEqual("events", stream_name) diff --git a/tests/replication/test_multi_media_repo.py b/tests/replication/test_multi_media_repo.py index f712ad1fe37..193c6c0198f 100644 --- a/tests/replication/test_multi_media_repo.py +++ b/tests/replication/test_multi_media_repo.py @@ -20,7 +20,7 @@ # import logging import os -from typing import Any, Optional, Tuple +from typing import Any, Optional from twisted.internet.protocol import Factory from twisted.internet.testing import MemoryReactor @@ -78,7 +78,7 @@ def make_worker_hs( def _get_media_req( self, hs: HomeServer, target: str, media_id: str - ) -> Tuple[FakeChannel, Request]: + ) -> tuple[FakeChannel, Request]: """Request some remote media from the given HS by calling the download API. @@ -293,7 +293,7 @@ def make_worker_hs( def _get_media_req( self, hs: HomeServer, target: str, media_id: str - ) -> Tuple[FakeChannel, Request]: + ) -> tuple[FakeChannel, Request]: """Request some remote media from the given HS by calling the download API. diff --git a/tests/rest/admin/test_admin.py b/tests/rest/admin/test_admin.py index 2a17389feb3..f3740a8e35e 100644 --- a/tests/rest/admin/test_admin.py +++ b/tests/rest/admin/test_admin.py @@ -20,7 +20,7 @@ # import urllib.parse -from typing import Dict, cast +from typing import cast from parameterized import parameterized @@ -65,7 +65,7 @@ class QuarantineMediaTestCase(unittest.HomeserverTestCase): room.register_servlets, ] - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: resources = super().create_resource_dict() resources["/_matrix/media"] = self.hs.get_media_repository_resource() return resources diff --git a/tests/rest/admin/test_event_reports.py b/tests/rest/admin/test_event_reports.py index 28be7fcd97a..19d945bb425 100644 --- a/tests/rest/admin/test_event_reports.py +++ b/tests/rest/admin/test_event_reports.py @@ -18,7 +18,6 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import List from twisted.internet.testing import MemoryReactor @@ -441,7 +440,7 @@ def _create_event_and_report_without_parameters( ) self.assertEqual(200, channel.code, msg=channel.json_body) - def _check_fields(self, content: List[JsonDict]) -> None: + def _check_fields(self, content: list[JsonDict]) -> None: """Checks that all attributes are present in an event report""" for c in content: self.assertIn("id", c) diff --git a/tests/rest/admin/test_federation.py b/tests/rest/admin/test_federation.py index d0b57d1faa1..5586bb47e1f 100644 --- a/tests/rest/admin/test_federation.py +++ b/tests/rest/admin/test_federation.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import List, Optional +from typing import Optional from parameterized import parameterized @@ -272,7 +272,7 @@ def test_order_by(self) -> None: """Testing order list with parameter `order_by`""" def _order_test( - expected_destination_list: List[str], + expected_destination_list: list[str], order_by: Optional[str], dir: Optional[str] = None, ) -> None: @@ -521,7 +521,7 @@ def _create_destinations(self, number_destinations: int) -> None: dest = f"sub{i}.example.com" self._create_destination(dest, 50, 50, 50, 100) - def _check_fields(self, content: List[JsonDict]) -> None: + def _check_fields(self, content: list[JsonDict]) -> None: """Checks that the expected destination attributes are present in content Args: @@ -820,7 +820,7 @@ def _create_destination_rooms( self, number_rooms: int, destination: Optional[str] = None, - ) -> List[str]: + ) -> list[str]: """ Create the given number of rooms. The given `destination` homeserver will be recorded as a participant. @@ -853,7 +853,7 @@ def _create_destination_rooms( return room_ids - def _check_fields(self, content: List[JsonDict]) -> None: + def _check_fields(self, content: list[JsonDict]) -> None: """Checks that the expected room attributes are present in content Args: diff --git a/tests/rest/admin/test_jwks.py b/tests/rest/admin/test_jwks.py index 55b822c4d03..ee5588951b5 100644 --- a/tests/rest/admin/test_jwks.py +++ b/tests/rest/admin/test_jwks.py @@ -19,7 +19,6 @@ # # -from typing import Dict from twisted.web.resource import Resource @@ -33,7 +32,7 @@ class JWKSTestCase(HomeserverTestCase): """Test /_synapse/jwks JWKS data.""" - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: d = super().create_resource_dict() d.update(build_synapse_client_resource_tree(self.hs)) return d diff --git a/tests/rest/admin/test_media.py b/tests/rest/admin/test_media.py index 86c09634cca..8cc54cc80c2 100644 --- a/tests/rest/admin/test_media.py +++ b/tests/rest/admin/test_media.py @@ -20,7 +20,6 @@ # # import os -from typing import Dict from parameterized import parameterized @@ -51,7 +50,7 @@ class _AdminMediaTests(unittest.HomeserverTestCase): media.register_servlets, ] - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: resources = super().create_resource_dict() resources["/_matrix/media"] = self.hs.get_media_repository_resource() return resources diff --git a/tests/rest/admin/test_room.py b/tests/rest/admin/test_room.py index 30b2de26e4a..6bd21630dbc 100644 --- a/tests/rest/admin/test_room.py +++ b/tests/rest/admin/test_room.py @@ -22,7 +22,7 @@ import time import urllib.parse from http import HTTPStatus -from typing import List, Optional +from typing import Optional from unittest.mock import AsyncMock, Mock from parameterized import parameterized @@ -1609,7 +1609,7 @@ def test_room_list_sort_order(self) -> None: def _order_test( order_type: str, - expected_room_list: List[str], + expected_room_list: list[str], reverse: bool = False, ) -> None: """Request the list of rooms in a certain order. Assert that order is what diff --git a/tests/rest/admin/test_scheduled_tasks.py b/tests/rest/admin/test_scheduled_tasks.py index 16b80e214b5..264c62e2ded 100644 --- a/tests/rest/admin/test_scheduled_tasks.py +++ b/tests/rest/admin/test_scheduled_tasks.py @@ -13,7 +13,7 @@ # # # -from typing import Mapping, Optional, Tuple +from typing import Mapping, Optional from twisted.internet.testing import MemoryReactor @@ -42,17 +42,17 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: # create and schedule a few tasks async def _test_task( task: ScheduledTask, - ) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: + ) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: return TaskStatus.ACTIVE, None, None async def _finished_test_task( task: ScheduledTask, - ) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: + ) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: return TaskStatus.COMPLETE, None, None async def _failed_test_task( task: ScheduledTask, - ) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: + ) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: return TaskStatus.FAILED, None, "Everything failed" self._task_scheduler.register_action(_test_task, "test_task") diff --git a/tests/rest/admin/test_server_notice.py b/tests/rest/admin/test_server_notice.py index ebb6867d7c5..5053fea9c98 100644 --- a/tests/rest/admin/test_server_notice.py +++ b/tests/rest/admin/test_server_notice.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import List, Sequence +from typing import Sequence from twisted.internet.testing import MemoryReactor @@ -729,7 +729,7 @@ def _check_invite_and_join_status( return invited_rooms - def _sync_and_get_messages(self, room_id: str, token: str) -> List[JsonDict]: + def _sync_and_get_messages(self, room_id: str, token: str) -> list[JsonDict]: """ Do a sync and get messages of a room. diff --git a/tests/rest/admin/test_statistics.py b/tests/rest/admin/test_statistics.py index 4026c47a23e..a18952983e4 100644 --- a/tests/rest/admin/test_statistics.py +++ b/tests/rest/admin/test_statistics.py @@ -19,7 +19,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Dict, List, Optional +from typing import Optional from twisted.internet.testing import MemoryReactor from twisted.web.resource import Resource @@ -50,7 +50,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.url = "/_synapse/admin/v1/statistics/users/media" - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: resources = super().create_resource_dict() resources["/_matrix/media"] = self.hs.get_media_repository_resource() return resources @@ -485,7 +485,7 @@ def _create_media(self, user_token: str, number_media: int) -> None: # Upload some media into the room self.helper.upload_media(SMALL_PNG, tok=user_token, expect_code=200) - def _check_fields(self, content: List[JsonDict]) -> None: + def _check_fields(self, content: list[JsonDict]) -> None: """Checks that all attributes are present in content Args: content: List that is checked for content @@ -497,7 +497,7 @@ def _check_fields(self, content: List[JsonDict]) -> None: self.assertIn("media_length", c) def _order_test( - self, order_type: str, expected_user_list: List[str], dir: Optional[str] = None + self, order_type: str, expected_user_list: list[str], dir: Optional[str] = None ) -> None: """Request the list of users in a certain order. Assert that order is what we expect diff --git a/tests/rest/admin/test_user.py b/tests/rest/admin/test_user.py index ca41cd6c31f..040b21d4712 100644 --- a/tests/rest/admin/test_user.py +++ b/tests/rest/admin/test_user.py @@ -27,7 +27,7 @@ import urllib.parse from binascii import unhexlify from http import HTTPStatus -from typing import Dict, List, Optional +from typing import Optional from unittest.mock import AsyncMock, Mock, patch from parameterized import parameterized, parameterized_class @@ -1185,7 +1185,7 @@ def test_filter_not_user_types(self) -> None: ) def test_user_type( - expected_user_ids: List[str], not_user_types: Optional[List[str]] = None + expected_user_ids: list[str], not_user_types: Optional[list[str]] = None ) -> None: """Runs a test for the not_user_types param Args: @@ -1262,7 +1262,7 @@ def test_filter_not_user_types_with_extra(self) -> None: ) def test_user_type( - expected_user_ids: List[str], not_user_types: Optional[List[str]] = None + expected_user_ids: list[str], not_user_types: Optional[list[str]] = None ) -> None: """Runs a test for the not_user_types param Args: @@ -1373,7 +1373,7 @@ def test_filter_locked(self) -> None: def _order_test( self, - expected_user_list: List[str], + expected_user_list: list[str], order_by: Optional[str], dir: Optional[str] = None, ) -> None: @@ -1403,7 +1403,7 @@ def _order_test( self.assertEqual(expected_user_list, returned_order) self._check_fields(channel.json_body["users"]) - def _check_fields(self, content: List[JsonDict]) -> None: + def _check_fields(self, content: list[JsonDict]) -> None: """Checks that the expected user attributes are present in content Args: content: List that is checked for content @@ -3690,7 +3690,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.other_user ) - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: resources = super().create_resource_dict() resources["/_matrix/media"] = self.hs.get_media_repository_resource() return resources @@ -4138,7 +4138,7 @@ def test_order_by(self) -> None: [media2] + sorted([media1, media3]), "safe_from_quarantine", "b" ) - def _create_media_for_user(self, user_token: str, number_media: int) -> List[str]: + def _create_media_for_user(self, user_token: str, number_media: int) -> list[str]: """ Create a number of media for a specific user Args: @@ -4195,7 +4195,7 @@ def _create_media_and_access( return media_id - def _check_fields(self, content: List[JsonDict]) -> None: + def _check_fields(self, content: list[JsonDict]) -> None: """Checks that the expected user attributes are present in content Args: content: List that is checked for content @@ -4212,7 +4212,7 @@ def _check_fields(self, content: List[JsonDict]) -> None: def _order_test( self, - expected_media_list: List[str], + expected_media_list: list[str], order_by: Optional[str], dir: Optional[str] = None, ) -> None: diff --git a/tests/rest/client/sliding_sync/test_extension_thread_subscriptions.py b/tests/rest/client/sliding_sync/test_extension_thread_subscriptions.py index 4e151b9aae6..de76334f64b 100644 --- a/tests/rest/client/sliding_sync/test_extension_thread_subscriptions.py +++ b/tests/rest/client/sliding_sync/test_extension_thread_subscriptions.py @@ -13,7 +13,7 @@ # import logging from http import HTTPStatus -from typing import List, Optional, Tuple, cast +from typing import Optional, cast from twisted.test.proto_helpers import MemoryReactor @@ -358,7 +358,7 @@ def test_limit_and_companion_backpagination(self) -> None: using the companion /thread_subscriptions endpoint. """ - thread_root_ids: List[str] = [] + thread_root_ids: list[str] = [] def make_subscription() -> None: thread_root_resp = self.helper.send( @@ -455,7 +455,7 @@ def make_subscription() -> None: def _do_backpaginate( self, *, from_tok: str, to_tok: str, limit: int, access_token: str - ) -> Tuple[JsonDict, Optional[str]]: + ) -> tuple[JsonDict, Optional[str]]: channel = self.make_request( "GET", "/_matrix/client/unstable/io.element.msc4308/thread_subscriptions" diff --git a/tests/rest/client/sliding_sync/test_extension_to_device.py b/tests/rest/client/sliding_sync/test_extension_to_device.py index a77b0a2e9f9..0b0a65babf0 100644 --- a/tests/rest/client/sliding_sync/test_extension_to_device.py +++ b/tests/rest/client/sliding_sync/test_extension_to_device.py @@ -12,7 +12,6 @@ # . # import logging -from typing import List from parameterized import parameterized_class @@ -59,7 +58,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: super().prepare(reactor, clock, hs) def _assert_to_device_response( - self, response_body: JsonDict, expected_messages: List[JsonDict] + self, response_body: JsonDict, expected_messages: list[JsonDict] ) -> str: """Assert the sliding sync response was successful and has the expected to-device messages. diff --git a/tests/rest/client/sliding_sync/test_rooms_timeline.py b/tests/rest/client/sliding_sync/test_rooms_timeline.py index 44a6068c11c..04a9cd5382a 100644 --- a/tests/rest/client/sliding_sync/test_rooms_timeline.py +++ b/tests/rest/client/sliding_sync/test_rooms_timeline.py @@ -12,7 +12,7 @@ # . # import logging -from typing import List, Optional +from typing import Optional from parameterized import parameterized_class @@ -75,14 +75,14 @@ def _assertListEqual( if actual_items == expected_items: return - expected_lines: List[str] = [] + expected_lines: list[str] = [] for expected_item in expected_items: is_expected_in_actual = expected_item in actual_items expected_lines.append( "{} {}".format(" " if is_expected_in_actual else "?", expected_item) ) - actual_lines: List[str] = [] + actual_lines: list[str] = [] for actual_item in actual_items: is_actual_in_expected = actual_item in expected_items actual_lines.append( @@ -101,8 +101,8 @@ def _assertTimelineEqual( self, *, room_id: str, - actual_event_ids: List[str], - expected_event_ids: List[str], + actual_event_ids: list[str], + expected_event_ids: list[str], message: Optional[str] = None, ) -> None: """ diff --git a/tests/rest/client/sliding_sync/test_sliding_sync.py b/tests/rest/client/sliding_sync/test_sliding_sync.py index 8da5863b3ad..9f4c6bad05e 100644 --- a/tests/rest/client/sliding_sync/test_sliding_sync.py +++ b/tests/rest/client/sliding_sync/test_sliding_sync.py @@ -12,7 +12,7 @@ # . # import logging -from typing import Any, Dict, Iterable, List, Literal, Optional, Tuple +from typing import Any, Iterable, Literal, Optional from unittest.mock import AsyncMock from parameterized import parameterized, parameterized_class @@ -82,7 +82,7 @@ def default_config(self) -> JsonDict: def do_sync( self, sync_body: JsonDict, *, since: Optional[str] = None, tok: str - ) -> Tuple[JsonDict, str]: + ) -> tuple[JsonDict, str]: """Do a sliding sync request with given body. Asserts the request was successful. @@ -170,7 +170,7 @@ def _add_new_dm_to_global_account_data( # Scrutinize the account data since it has no concrete type. We're just copying # everything into a known type. It should be a mapping from user ID to a list of # room IDs. Ignore anything else. - new_dm_map: Dict[str, List[str]] = {} + new_dm_map: dict[str, list[str]] = {} if isinstance(existing_dm_map, dict): for user_id, room_ids in existing_dm_map.items(): if isinstance(user_id, str) and isinstance(room_ids, list): @@ -239,7 +239,7 @@ def _create_dm_room( def _create_remote_invite_room_for_user( self, invitee_user_id: str, - unsigned_invite_room_state: Optional[List[StrippedStateEvent]], + unsigned_invite_room_state: Optional[list[StrippedStateEvent]], invite_room_id: Optional[str] = None, ) -> str: """ diff --git a/tests/rest/client/test_account.py b/tests/rest/client/test_account.py index 773f49dfc94..c4c62c78001 100644 --- a/tests/rest/client/test_account.py +++ b/tests/rest/client/test_account.py @@ -23,7 +23,7 @@ import re from email.parser import Parser from http import HTTPStatus -from typing import Any, Dict, List, Optional, Union +from typing import Any, Optional, Union from unittest.mock import Mock from twisted.internet.interfaces import IReactorTCP @@ -87,7 +87,7 @@ async def sendmail( ) -> None: self.email_attempts.append(msg_bytes) - self.email_attempts: List[bytes] = [] + self.email_attempts: list[bytes] = [] hs.get_send_email_handler()._sendmail = sendmail return hs @@ -721,7 +721,7 @@ class WhoamiTestCase(unittest.HomeserverTestCase): register.register_servlets, ] - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() config["allow_guest_access"] = True return config @@ -827,7 +827,7 @@ async def sendmail( ) -> None: self.email_attempts.append(msg_bytes) - self.email_attempts: List[bytes] = [] + self.email_attempts: list[bytes] = [] self.hs.get_send_email_handler()._sendmail = sendmail return self.hs @@ -1501,10 +1501,10 @@ async def is_expired(user_id: str) -> bool: def _test_status( self, - users: Optional[List[str]], + users: Optional[list[str]], expected_status_code: int = HTTPStatus.OK, - expected_statuses: Optional[Dict[str, Dict[str, bool]]] = None, - expected_failures: Optional[List[str]] = None, + expected_statuses: Optional[dict[str, dict[str, bool]]] = None, + expected_failures: Optional[list[str]] = None, expected_errcode: Optional[str] = None, ) -> None: """Send a request to the account status endpoint and check that the response diff --git a/tests/rest/client/test_auth.py b/tests/rest/client/test_auth.py index f5b7f957210..5955d4b7a2c 100644 --- a/tests/rest/client/test_auth.py +++ b/tests/rest/client/test_auth.py @@ -20,7 +20,7 @@ # import re from http import HTTPStatus -from typing import Any, Dict, List, Optional, Tuple, Union +from typing import Any, Optional, Union from twisted.internet.defer import succeed from twisted.internet.testing import MemoryReactor @@ -47,7 +47,7 @@ class DummyRecaptchaChecker(UserInteractiveAuthChecker): def __init__(self, hs: HomeServer) -> None: super().__init__(hs) - self.recaptcha_attempts: List[Tuple[dict, str]] = [] + self.recaptcha_attempts: list[tuple[dict, str]] = [] def is_enabled(self) -> bool: return True @@ -178,7 +178,7 @@ class UIAuthTests(unittest.HomeserverTestCase): register.register_servlets, ] - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() # public_baseurl uses an http:// scheme because FakeChannel.isSecure() returns @@ -195,7 +195,7 @@ def default_config(self) -> Dict[str, Any]: return config - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: resource_dict = super().create_resource_dict() resource_dict.update(build_synapse_client_resource_tree(self.hs)) return resource_dict @@ -1091,7 +1091,7 @@ def test_many_token_refresh(self) -> None: was very slow if a lot of refreshes had been performed for the session. """ - def _refresh(refresh_token: str) -> Tuple[str, str]: + def _refresh(refresh_token: str) -> tuple[str, str]: """ Performs one refresh, returning the next refresh token and access token. """ @@ -1172,7 +1172,7 @@ def _txn(txn: LoggingTransaction) -> int: def oidc_config( id: str, with_localpart_template: bool, **kwargs: Any -) -> Dict[str, Any]: +) -> dict[str, Any]: """Sample OIDC provider config used in backchannel logout tests. Args: @@ -1185,7 +1185,7 @@ def oidc_config( A dict suitable for the `oidc_config` or the `oidc_providers[]` parts of the HS config """ - config: Dict[str, Any] = { + config: dict[str, Any] = { "idp_id": id, "idp_name": id, "issuer": TEST_OIDC_ISSUER, @@ -1213,7 +1213,7 @@ class OidcBackchannelLogoutTests(unittest.HomeserverTestCase): login.register_servlets, ] - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() # public_baseurl uses an http:// scheme because FakeChannel.isSecure() returns @@ -1223,7 +1223,7 @@ def default_config(self) -> Dict[str, Any]: return config - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: resource_dict = super().create_resource_dict() resource_dict.update(build_synapse_client_resource_tree(self.hs)) return resource_dict @@ -1363,7 +1363,7 @@ def test_logout_during_mapping(self) -> None: # We should have a user_mapping_session cookie cookie_headers = channel.headers.getRawHeaders("Set-Cookie") assert cookie_headers - cookies: Dict[str, str] = {} + cookies: dict[str, str] = {} for h in cookie_headers: key, value = h.split(";")[0].split("=", maxsplit=1) cookies[key] = value diff --git a/tests/rest/client/test_delayed_events.py b/tests/rest/client/test_delayed_events.py index 221a4902f20..c67ffc76683 100644 --- a/tests/rest/client/test_delayed_events.py +++ b/tests/rest/client/test_delayed_events.py @@ -15,7 +15,6 @@ """Tests REST events for /delayed_events paths.""" from http import HTTPStatus -from typing import List from parameterized import parameterized @@ -574,7 +573,7 @@ def test_delayed_state_is_cancelled_by_new_state_from_other_user( ) self.assertEqual(setter_expected, content.get(setter_key), content) - def _get_delayed_events(self) -> List[JsonDict]: + def _get_delayed_events(self) -> list[JsonDict]: channel = self.make_request( "GET", PATH_PREFIX, diff --git a/tests/rest/client/test_login.py b/tests/rest/client/test_login.py index c54e409a6ce..1ebd59b42a3 100644 --- a/tests/rest/client/test_login.py +++ b/tests/rest/client/test_login.py @@ -25,11 +25,8 @@ BinaryIO, Callable, Collection, - Dict, - List, Literal, Optional, - Tuple, Union, ) from unittest.mock import Mock @@ -146,11 +143,11 @@ async def check_login_for_spam( user_id: str, device_id: Optional[str], initial_display_name: Optional[str], - request_info: Collection[Tuple[Optional[str], str]], + request_info: Collection[tuple[Optional[str], str]], auth_provider_id: Optional[str] = None, ) -> Union[ Literal["NOT_SPAM"], - Tuple["synapse.module_api.errors.Codes", JsonDict], + tuple["synapse.module_api.errors.Codes", JsonDict], ]: return "NOT_SPAM" @@ -170,11 +167,11 @@ async def check_login_for_spam( user_id: str, device_id: Optional[str], initial_display_name: Optional[str], - request_info: Collection[Tuple[Optional[str], str]], + request_info: Collection[tuple[Optional[str], str]], auth_provider_id: Optional[str] = None, ) -> Union[ Literal["NOT_SPAM"], - Tuple["synapse.module_api.errors.Codes", JsonDict], + tuple["synapse.module_api.errors.Codes", JsonDict], ]: # Return an odd set of values to ensure that they get correctly passed # to the client. @@ -633,7 +630,7 @@ class MultiSSOTestCase(unittest.HomeserverTestCase): login.register_servlets, ] - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() config["public_baseurl"] = PUBLIC_BASEURL @@ -678,7 +675,7 @@ def default_config(self) -> Dict[str, Any]: def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.login_sso_redirect_url_builder = LoginSSORedirectURIBuilder(hs.config) - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: d = super().create_resource_dict() d.update(build_synapse_client_resource_tree(self.hs)) return d @@ -730,7 +727,7 @@ def test_multi_sso_redirect(self) -> None: p.close() # there should be a link for each href - returned_idps: List[str] = [] + returned_idps: list[str] = [] for link in p.links: path, query = link.split("?", 1) self.assertEqual(path, "pick_idp") @@ -891,7 +888,7 @@ def test_login_via_oidc(self) -> None: # ... and should have set a cookie including the redirect url cookie_headers = channel.headers.getRawHeaders("Set-Cookie") assert cookie_headers - cookies: Dict[str, str] = {} + cookies: dict[str, str] = {} for h in cookie_headers: key, value = h.split(";")[0].split("=", maxsplit=1) cookies[key] = value @@ -1179,7 +1176,7 @@ class JWTTestCase(unittest.HomeserverTestCase): "algorithm": jwt_algorithm, } - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() # If jwt_config has been defined (eg via @override_config), don't replace it. @@ -1188,7 +1185,7 @@ def default_config(self) -> Dict[str, Any]: return config - def jwt_encode(self, payload: Dict[str, Any], secret: str = jwt_secret) -> str: + def jwt_encode(self, payload: dict[str, Any], secret: str = jwt_secret) -> str: header = {"alg": self.jwt_algorithm} result: bytes = jwt.encode(header, payload, secret) return result.decode("ascii") @@ -1426,7 +1423,7 @@ class JWTPubKeyTestCase(unittest.HomeserverTestCase): ] ) - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() config["jwt_config"] = { "enabled": True, @@ -1435,7 +1432,7 @@ def default_config(self) -> Dict[str, Any]: } return config - def jwt_encode(self, payload: Dict[str, Any], secret: str = jwt_privatekey) -> str: + def jwt_encode(self, payload: dict[str, Any], secret: str = jwt_privatekey) -> str: header = {"alg": "RS256"} if secret.startswith("-----BEGIN RSA PRIVATE KEY-----"): secret = JsonWebKey.import_key(secret, {"kty": "RSA"}) @@ -1630,7 +1627,7 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: ) return hs - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() config["public_baseurl"] = PUBLIC_BASEURL @@ -1649,7 +1646,7 @@ def default_config(self) -> Dict[str, Any]: config["sso"] = {"client_whitelist": ["https://x"]} return config - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: d = super().create_resource_dict() d.update(build_synapse_client_resource_tree(self.hs)) return d @@ -1660,7 +1657,7 @@ def proceed_to_username_picker_page( displayname: str, email: str, picture: str, - ) -> Tuple[str, str]: + ) -> tuple[str, str]: # do the start of the login flow channel, _ = self.helper.auth_via_oidc( fake_oidc_server, @@ -1681,7 +1678,7 @@ def proceed_to_username_picker_page( self.assertEqual(picker_url, "/_synapse/client/pick_username/account_details") # ... with a username_mapping_session cookie - cookies: Dict[str, str] = {} + cookies: dict[str, str] = {} channel.extract_cookies(cookies) self.assertIn("username_mapping_session", cookies) session_id = cookies["username_mapping_session"] @@ -1894,5 +1891,5 @@ async def mock_get_file( max_size: Optional[int] = None, headers: Optional[RawHeaders] = None, is_allowed_content_type: Optional[Callable[[str], bool]] = None, -) -> Tuple[int, Dict[bytes, List[bytes]], str, int]: +) -> tuple[int, dict[bytes, list[bytes]], str, int]: return 0, {b"Content-Type": [b"image/png"]}, "", 200 diff --git a/tests/rest/client/test_media.py b/tests/rest/client/test_media.py index 91bf94b672d..79f70db8a39 100644 --- a/tests/rest/client/test_media.py +++ b/tests/rest/client/test_media.py @@ -24,7 +24,7 @@ import os import re import shutil -from typing import Any, BinaryIO, ClassVar, Dict, List, Optional, Sequence, Tuple, Type +from typing import Any, BinaryIO, ClassVar, Optional, Sequence from unittest.mock import MagicMock, Mock, patch from urllib import parse from urllib.parse import quote, urlencode @@ -265,7 +265,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: assert self.media_repo.url_previewer is not None self.url_previewer = self.media_repo.url_previewer - self.lookups: Dict[str, Any] = {} + self.lookups: dict[str, Any] = {} class Resolver: def resolveHostName( @@ -273,7 +273,7 @@ def resolveHostName( resolutionReceiver: IResolutionReceiver, hostName: str, portNumber: int = 0, - addressTypes: Optional[Sequence[Type[IAddress]]] = None, + addressTypes: Optional[Sequence[type[IAddress]]] = None, transportSemantics: str = "TCP", ) -> IResolutionReceiver: resolution = HostResolution(hostName) @@ -1357,7 +1357,7 @@ def test_oembed_autodiscovery_blocked(self) -> None: self.assertEqual(body["og:title"], "Test") self.assertNotIn("og:image", body) - def _download_image(self) -> Tuple[str, str]: + def _download_image(self) -> tuple[str, str]: """Downloads an image into the URL cache. Returns: A (host, media_id) tuple representing the MXC URI of the image. @@ -1994,8 +1994,8 @@ class DownloadAndThumbnailTestCase(unittest.HomeserverTestCase): ] def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: - self.fetches: List[ - Tuple[ + self.fetches: list[ + tuple[ "Deferred[Any]", str, str, @@ -2014,12 +2014,12 @@ def federation_get_file( retry_on_dns_fail: bool = True, ignore_backoff: bool = False, follow_redirects: bool = False, - ) -> "Deferred[Tuple[int, Dict[bytes, List[bytes]], bytes]]": + ) -> "Deferred[tuple[int, dict[bytes, list[bytes]], bytes]]": """A mock for MatrixFederationHttpClient.federation_get_file.""" def write_to( - r: Tuple[bytes, Tuple[int, Dict[bytes, List[bytes]], bytes]], - ) -> Tuple[int, Dict[bytes, List[bytes]], bytes]: + r: tuple[bytes, tuple[int, dict[bytes, list[bytes]], bytes]], + ) -> tuple[int, dict[bytes, list[bytes]], bytes]: data, response = r output_stream.write(data) return response @@ -2029,7 +2029,7 @@ def write_err(f: Failure) -> Failure: output_stream.write(f.value.response) return f - d: Deferred[Tuple[bytes, Tuple[int, Dict[bytes, List[bytes]], bytes]]] = ( + d: Deferred[tuple[bytes, tuple[int, dict[bytes, list[bytes]], bytes]]] = ( Deferred() ) self.fetches.append((d, destination, path, args)) @@ -2048,12 +2048,12 @@ def get_file( retry_on_dns_fail: bool = True, ignore_backoff: bool = False, follow_redirects: bool = False, - ) -> "Deferred[Tuple[int, Dict[bytes, List[bytes]]]]": + ) -> "Deferred[tuple[int, dict[bytes, list[bytes]]]]": """A mock for MatrixFederationHttpClient.get_file.""" def write_to( - r: Tuple[bytes, Tuple[int, Dict[bytes, List[bytes]]]], - ) -> Tuple[int, Dict[bytes, List[bytes]]]: + r: tuple[bytes, tuple[int, dict[bytes, list[bytes]]]], + ) -> tuple[int, dict[bytes, list[bytes]]]: data, response = r output_stream.write(data) return response @@ -2063,7 +2063,7 @@ def write_err(f: Failure) -> Failure: output_stream.write(f.value.response) return f - d: Deferred[Tuple[bytes, Tuple[int, Dict[bytes, List[bytes]]]]] = Deferred() + d: Deferred[tuple[bytes, tuple[int, dict[bytes, list[bytes]]]]] = Deferred() self.fetches.append((d, destination, path, args)) # Note that this callback changes the value held by d. d_after_callback = d.addCallbacks(write_to, write_err) @@ -2538,7 +2538,7 @@ def test_same_quality(self, method: str, desired_size: int) -> None: @parameterized_class(configs) class AuthenticatedMediaTestCase(unittest.HomeserverTestCase): - extra_config: Dict[str, Any] + extra_config: dict[str, Any] servlets = [ media.register_servlets, login.register_servlets, @@ -2576,7 +2576,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.user = self.register_user("user", "pass") self.tok = self.login("user", "pass") - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: resources = super().create_resource_dict() resources["/_matrix/media"] = self.hs.get_media_repository_resource() return resources @@ -2895,7 +2895,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.user = self.register_user("user", "pass") self.tok = self.login("user", "pass") - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: resources = super().create_resource_dict() resources["/_matrix/media"] = self.hs.get_media_repository_resource() return resources @@ -3012,7 +3012,7 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: async def _get_media_upload_limits_for_user( self, user_id: str, - ) -> Optional[List[MediaUploadLimit]]: + ) -> Optional[list[MediaUploadLimit]]: # user1 has custom limits if user_id == self.user1: # n.b. we return these in increasing duration order and Synapse will need to sort them correctly @@ -3060,7 +3060,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: on_media_upload_limit_exceeded=self._on_media_upload_limit_exceeded, ) - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: resources = super().create_resource_dict() resources["/_matrix/media"] = self.hs.get_media_repository_resource() return resources diff --git a/tests/rest/client/test_notifications.py b/tests/rest/client/test_notifications.py index e00152389bb..7e2a63955cc 100644 --- a/tests/rest/client/test_notifications.py +++ b/tests/rest/client/test_notifications.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import List, Optional, Tuple +from typing import Optional from unittest.mock import AsyncMock, Mock from twisted.internet.testing import MemoryReactor @@ -156,7 +156,7 @@ def test_pagination_of_notifications(self) -> None: def _request_notifications( self, from_token: Optional[str], limit: int, expected_count: int - ) -> Tuple[List[str], str]: + ) -> tuple[list[str], str]: """ Make a request to /notifications to get the latest events to be notified about. diff --git a/tests/rest/client/test_profile.py b/tests/rest/client/test_profile.py index 18b3d3a089c..aa9b72c65eb 100644 --- a/tests/rest/client/test_profile.py +++ b/tests/rest/client/test_profile.py @@ -24,7 +24,7 @@ import logging import urllib.parse from http import HTTPStatus -from typing import Any, Dict, Optional +from typing import Any, Optional from canonicaljson import encode_canonical_json @@ -778,7 +778,7 @@ def test_set_custom_field_other(self) -> None: self.assertEqual(channel.code, 403, channel.result) self.assertEqual(channel.json_body["errcode"], Codes.FORBIDDEN) - def _setup_local_files(self, names_and_props: Dict[str, Dict[str, Any]]) -> None: + def _setup_local_files(self, names_and_props: dict[str, dict[str, Any]]) -> None: """Stores metadata about files in the database. Args: diff --git a/tests/rest/client/test_redactions.py b/tests/rest/client/test_redactions.py index e3ca108d03a..88be8748ee2 100644 --- a/tests/rest/client/test_redactions.py +++ b/tests/rest/client/test_redactions.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import List, Optional +from typing import Optional from parameterized import parameterized @@ -85,7 +85,7 @@ def _redact_event( room_id: str, event_id: str, expect_code: int = 200, - with_relations: Optional[List[str]] = None, + with_relations: Optional[list[str]] = None, content: Optional[JsonDict] = None, ) -> JsonDict: """Helper function to send a redaction event. @@ -104,7 +104,7 @@ def _redact_event( self.assertEqual(channel.code, expect_code) return channel.json_body - def _sync_room_timeline(self, access_token: str, room_id: str) -> List[JsonDict]: + def _sync_room_timeline(self, access_token: str, room_id: str) -> list[JsonDict]: channel = self.make_request("GET", "sync", access_token=access_token) self.assertEqual(channel.code, 200) room_sync = channel.json_body["rooms"]["join"][room_id] diff --git a/tests/rest/client/test_register.py b/tests/rest/client/test_register.py index c7c81aa81c6..2c0396a3de4 100644 --- a/tests/rest/client/test_register.py +++ b/tests/rest/client/test_register.py @@ -22,7 +22,7 @@ import datetime import importlib.resources as importlib_resources import os -from typing import Any, Dict, List, Tuple +from typing import Any from unittest.mock import AsyncMock from twisted.internet.testing import MemoryReactor @@ -54,7 +54,7 @@ class RegisterRestServletTestCase(unittest.HomeserverTestCase): ] url = b"/_matrix/client/r0/register" - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() config["allow_guest_access"] = True return config @@ -1032,7 +1032,7 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: async def sendmail(*args: Any, **kwargs: Any) -> None: self.email_attempts.append((args, kwargs)) - self.email_attempts: List[Tuple[Any, Any]] = [] + self.email_attempts: list[tuple[Any, Any]] = [] self.hs.get_send_email_handler()._sendmail = sendmail self.store = self.hs.get_datastores().main @@ -1146,7 +1146,7 @@ def test_deactivated_user(self) -> None: self.assertEqual(len(self.email_attempts), 0) - def create_user(self) -> Tuple[str, str]: + def create_user(self) -> tuple[str, str]: user_id = self.register_user("kermit", "monkey") tok = self.login("kermit", "monkey") # We need to manually add an email address otherwise the handler will do @@ -1250,7 +1250,7 @@ class RegistrationTokenValidityRestServletTestCase(unittest.HomeserverTestCase): servlets = [register.register_servlets] url = "/_matrix/client/v1/register/m.login.registration_token/validity" - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() config["registration_requires_token"] = True return config diff --git a/tests/rest/client/test_relations.py b/tests/rest/client/test_relations.py index 21fb86367a3..3912a3c7723 100644 --- a/tests/rest/client/test_relations.py +++ b/tests/rest/client/test_relations.py @@ -20,7 +20,7 @@ # import urllib.parse -from typing import Any, Callable, Dict, List, Optional, Tuple +from typing import Any, Callable, Optional from unittest.mock import AsyncMock, patch from twisted.internet.testing import MemoryReactor @@ -48,7 +48,7 @@ class BaseRelationsTestCase(unittest.HomeserverTestCase): ] hijack_auth = False - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: # We need to enable msc1849 support for aggregations config = super().default_config() @@ -69,7 +69,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: res = self.helper.send(self.room, body="Hi!", tok=self.user_token) self.parent_id = res["event_id"] - def _create_user(self, localpart: str) -> Tuple[str, str]: + def _create_user(self, localpart: str) -> tuple[str, str]: user_id = self.register_user(localpart, "abc123") access_token = self.login(localpart, "abc123") @@ -123,7 +123,7 @@ def _send_relation( self.assertEqual(expected_response_code, channel.code, channel.json_body) return channel - def _get_related_events(self) -> List[str]: + def _get_related_events(self) -> list[str]: """ Requests /relations on the parent ID and returns a list of event IDs. """ @@ -149,7 +149,7 @@ def _get_bundled_aggregations(self) -> JsonDict: self.assertEqual(200, channel.code, channel.json_body) return channel.json_body["unsigned"].get("m.relations", {}) - def _find_event_in_chunk(self, events: List[JsonDict]) -> JsonDict: + def _find_event_in_chunk(self, events: list[JsonDict]) -> JsonDict: """ Find the parent event in a chunk of events and assert that it has the proper bundled aggregations. """ @@ -846,7 +846,7 @@ def test_repeated_paginate_relations(self) -> None: expected_event_ids.append(channel.json_body["event_id"]) prev_token: Optional[str] = "" - found_event_ids: List[str] = [] + found_event_ids: list[str] = [] for _ in range(20): from_token = "" if prev_token: @@ -1484,9 +1484,9 @@ class RelationIgnoredUserTestCase(BaseRelationsTestCase): def _test_ignored_user( self, relation_type: str, - allowed_event_ids: List[str], - ignored_event_ids: List[str], - ) -> Tuple[JsonDict, JsonDict]: + allowed_event_ids: list[str], + ignored_event_ids: list[str], + ) -> tuple[JsonDict, JsonDict]: """ Fetch the relations and ensure they're all there, then ignore user2, and repeat. @@ -1600,7 +1600,7 @@ def _redact(self, event_id: str) -> None: ) self.assertEqual(200, channel.code, channel.json_body) - def _get_threads(self) -> List[Tuple[str, str]]: + def _get_threads(self) -> list[tuple[str, str]]: """Request the threads in the room and returns a list of thread ID and latest event ID.""" # Request the threads in the room. channel = self.make_request( @@ -1793,7 +1793,7 @@ def test_redact_parent_thread(self) -> None: class ThreadsTestCase(BaseRelationsTestCase): - def _get_threads(self, body: JsonDict) -> List[Tuple[str, str]]: + def _get_threads(self, body: JsonDict) -> list[tuple[str, str]]: return [ ( ev["event_id"], diff --git a/tests/rest/client/test_rendezvous.py b/tests/rest/client/test_rendezvous.py index 160f8527051..dc4f833fa29 100644 --- a/tests/rest/client/test_rendezvous.py +++ b/tests/rest/client/test_rendezvous.py @@ -19,7 +19,6 @@ # # -from typing import Dict from urllib.parse import urlparse from twisted.internet.testing import MemoryReactor @@ -46,7 +45,7 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: self.hs = self.setup_test_homeserver() return self.hs - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: return { **super().create_resource_dict(), "/_synapse/client/rendezvous": MSC4108RendezvousSessionResource(self.hs), diff --git a/tests/rest/client/test_retention.py b/tests/rest/client/test_retention.py index 7a816a66e07..758d62e63b7 100644 --- a/tests/rest/client/test_retention.py +++ b/tests/rest/client/test_retention.py @@ -17,7 +17,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Any, Dict +from typing import Any from unittest.mock import Mock from twisted.internet.testing import MemoryReactor @@ -265,7 +265,7 @@ class RetentionNoDefaultPolicyTestCase(unittest.HomeserverTestCase): room.register_servlets, ] - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() retention_config = { diff --git a/tests/rest/client/test_rooms.py b/tests/rest/client/test_rooms.py index feae5f77cdf..4142aed3632 100644 --- a/tests/rest/client/test_rooms.py +++ b/tests/rest/client/test_rooms.py @@ -25,7 +25,7 @@ import json from http import HTTPStatus -from typing import Any, Dict, Iterable, List, Literal, Optional, Tuple, Union +from typing import Any, Iterable, Literal, Optional, Union from unittest.mock import AsyncMock, Mock, call, patch from urllib import parse as urlparse @@ -989,7 +989,7 @@ async def user_may_join_room_tuple( mxid: str, room_id: str, is_invite: bool, - ) -> Tuple[Codes, dict]: + ) -> tuple[Codes, dict]: return Codes.INCOMPATIBLE_ROOM_VERSION, {} join_mock.side_effect = user_may_join_room_tuple @@ -1002,7 +1002,7 @@ async def user_may_join_room_tuple( self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body) self.assertEqual(join_mock.call_count, 0) - def _create_basic_room(self) -> Tuple[int, object]: + def _create_basic_room(self) -> tuple[int, object]: """ Tries to create a basic room and returns the response code. """ @@ -1351,7 +1351,7 @@ def test_spam_checker_may_join_room(self) -> None: """ # Register a dummy callback. Make it allow all room joins for now. - return_value: Union[Literal["NOT_SPAM"], Tuple[Codes, dict], Codes] = ( + return_value: Union[Literal["NOT_SPAM"], tuple[Codes, dict], Codes] = ( synapse.module_api.NOT_SPAM ) @@ -1359,7 +1359,7 @@ async def user_may_join_room( userid: str, room_id: str, is_invited: bool, - ) -> Union[Literal["NOT_SPAM"], Tuple[Codes, dict], Codes]: + ) -> Union[Literal["NOT_SPAM"], tuple[Codes, dict], Codes]: return return_value # `spec` argument is needed for this function mock to have `__qualname__`, which @@ -1848,12 +1848,12 @@ def test_rooms_messages_sent(self) -> None: def test_spam_checker_check_event_for_spam( self, name: str, - value: Union[str, bool, Codes, Tuple[Codes, JsonDict]], + value: Union[str, bool, Codes, tuple[Codes, JsonDict]], expected_code: int, expected_fields: dict, ) -> None: class SpamCheck: - mock_return_value: Union[str, bool, Codes, Tuple[Codes, JsonDict], bool] = ( + mock_return_value: Union[str, bool, Codes, tuple[Codes, JsonDict], bool] = ( "NOT_SPAM" ) mock_content: Optional[JsonDict] = None @@ -1861,7 +1861,7 @@ class SpamCheck: async def check_event_for_spam( self, event: synapse.events.EventBase, - ) -> Union[str, Codes, Tuple[Codes, JsonDict], bool]: + ) -> Union[str, Codes, tuple[Codes, JsonDict], bool]: self.mock_content = event.content return self.mock_return_value @@ -1915,7 +1915,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.admin_user_id = self.register_user("admin", "pass") self.admin_access_token = self.login("admin", "pass") - def power_levels(self, room_id: str) -> Dict[str, Any]: + def power_levels(self, room_id: str) -> dict[str, Any]: return self.helper.get_state( room_id, "m.room.power_levels", self.admin_access_token ) @@ -2076,7 +2076,7 @@ def test_any_room_override_defeats_config_override(self) -> None: # Given the server has config allowing normal users to post my event type # And I am a normal member of a room # But the room was created with special permissions - extra_content: Dict[str, Any] = { + extra_content: dict[str, Any] = { "power_level_content_override": {"events": {}}, } room_id = self.helper.create_room_as( @@ -2707,9 +2707,9 @@ def default_config(self) -> JsonDict: def make_public_rooms_request( self, - room_types: Optional[List[Union[str, None]]], + room_types: Optional[list[Union[str, None]]], instance_id: Optional[str] = None, - ) -> Tuple[List[Dict[str, Any]], int]: + ) -> tuple[list[dict[str, Any]], int]: body: JsonDict = {"filter": {PublicRoomsFilterFields.ROOM_TYPES: room_types}} if instance_id: body["third_party_instance_id"] = "test|test" @@ -3470,7 +3470,7 @@ def _send_labelled_messages_in_room(self) -> str: class RelationsTestCase(PaginationTestCase): - def _filter_messages(self, filter: JsonDict) -> List[str]: + def _filter_messages(self, filter: JsonDict) -> list[str]: """Make a request to /messages with a filter, returns the chunk of events.""" from_token = self.get_success( self.from_token.to_string(self.hs.get_datastores().main) @@ -4529,8 +4529,8 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: def _check_redactions( self, - original_events: List[EventBase], - pulled_events: List[JsonDict], + original_events: list[EventBase], + pulled_events: list[JsonDict], expect_redaction: bool, reason: Optional[str] = None, ) -> None: diff --git a/tests/rest/client/test_sync.py b/tests/rest/client/test_sync.py index e949bb69e63..fcbf3fd53cd 100644 --- a/tests/rest/client/test_sync.py +++ b/tests/rest/client/test_sync.py @@ -20,7 +20,6 @@ # import json import logging -from typing import List from parameterized import parameterized @@ -131,7 +130,7 @@ def test_sync_filter_labels_not_labels(self) -> None: self.assertEqual(len(events), 1, [event["content"] for event in events]) self.assertEqual(events[0]["content"]["body"], "with wrong label", events[0]) - def _test_sync_filter_labels(self, sync_filter: str) -> List[JsonDict]: + def _test_sync_filter_labels(self, sync_filter: str) -> list[JsonDict]: user_id = self.register_user("kermit", "test") tok = self.login("kermit", "test") diff --git a/tests/rest/client/test_third_party_rules.py b/tests/rest/client/test_third_party_rules.py index 4161faa11f9..812df4454eb 100644 --- a/tests/rest/client/test_third_party_rules.py +++ b/tests/rest/client/test_third_party_rules.py @@ -19,7 +19,7 @@ # # import threading -from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Dict, Optional, Union from unittest.mock import AsyncMock, Mock from twisted.internet.testing import MemoryReactor @@ -65,7 +65,7 @@ async def check_event_allowed( return True @staticmethod - def parse_config(config: Dict[str, Any]) -> Dict[str, Any]: + def parse_config(config: dict[str, Any]) -> dict[str, Any]: return config @@ -150,7 +150,7 @@ def test_third_party_rules(self) -> None: # types async def check( ev: EventBase, state: StateMap[EventBase] - ) -> Tuple[bool, Optional[JsonDict]]: + ) -> tuple[bool, Optional[JsonDict]]: return ev.type != "foo.bar.forbidden", None callback = Mock(spec=[], side_effect=check) @@ -207,7 +207,7 @@ def error_dict(self, config: Optional[HomeServerConfig]) -> JsonDict: # add a callback that will raise our hacky exception async def check( ev: EventBase, state: StateMap[EventBase] - ) -> Tuple[bool, Optional[JsonDict]]: + ) -> tuple[bool, Optional[JsonDict]]: raise NastyHackException(429, "message") self.hs.get_module_api_callbacks().third_party_event_rules._check_event_allowed_callbacks = [ @@ -235,7 +235,7 @@ def test_cannot_modify_event(self) -> None: # first patch the event checker so that it will try to modify the event async def check( ev: EventBase, state: StateMap[EventBase] - ) -> Tuple[bool, Optional[JsonDict]]: + ) -> tuple[bool, Optional[JsonDict]]: ev.content = {"x": "y"} return True, None @@ -260,7 +260,7 @@ def test_modify_event(self) -> None: # first patch the event checker so that it will modify the event async def check( ev: EventBase, state: StateMap[EventBase] - ) -> Tuple[bool, Optional[JsonDict]]: + ) -> tuple[bool, Optional[JsonDict]]: d = ev.get_dict() d["content"] = {"x": "y"} return True, d @@ -295,7 +295,7 @@ def test_message_edit(self) -> None: # first patch the event checker so that it will modify the event async def check( ev: EventBase, state: StateMap[EventBase] - ) -> Tuple[bool, Optional[JsonDict]]: + ) -> tuple[bool, Optional[JsonDict]]: d = ev.get_dict() d["content"] = { "msgtype": "m.text", @@ -443,7 +443,7 @@ def test_sent_event_end_up_in_room_state(self) -> None: # Define a callback that sends a custom event on power levels update. async def test_fn( event: EventBase, state_events: StateMap[EventBase] - ) -> Tuple[bool, Optional[JsonDict]]: + ) -> tuple[bool, Optional[JsonDict]]: if event.is_state() and event.type == EventTypes.PowerLevels: await api.create_and_send_event_into_room( { diff --git a/tests/rest/client/test_transactions.py b/tests/rest/client/test_transactions.py index bb83988d768..64d22d485ab 100644 --- a/tests/rest/client/test_transactions.py +++ b/tests/rest/client/test_transactions.py @@ -20,7 +20,7 @@ # from http import HTTPStatus -from typing import Any, Generator, Tuple, cast +from typing import Any, Generator, cast from unittest.mock import AsyncMock, Mock, call from twisted.internet import defer, reactor as _reactor @@ -92,7 +92,7 @@ def test_logcontexts_with_async_result( self, ) -> Generator["defer.Deferred[Any]", object, None]: @defer.inlineCallbacks - def cb() -> Generator["defer.Deferred[object]", object, Tuple[int, JsonDict]]: + def cb() -> Generator["defer.Deferred[object]", object, tuple[int, JsonDict]]: # Ignore `multiple-internal-clocks` linter error here since we are creating a `Clock` # for testing purposes. yield defer.ensureDeferred( @@ -124,7 +124,7 @@ def test_does_not_cache_exceptions( """ called = [False] - def cb() -> "defer.Deferred[Tuple[int, JsonDict]]": + def cb() -> "defer.Deferred[tuple[int, JsonDict]]": if called[0]: # return a valid result the second time return defer.succeed(self.mock_http_response) @@ -156,7 +156,7 @@ def test_does_not_cache_failures( """ called = [False] - def cb() -> "defer.Deferred[Tuple[int, JsonDict]]": + def cb() -> "defer.Deferred[tuple[int, JsonDict]]": if called[0]: # return a valid result the second time return defer.succeed(self.mock_http_response) diff --git a/tests/rest/client/utils.py b/tests/rest/client/utils.py index bb214759d98..6b99d0ab013 100644 --- a/tests/rest/client/utils.py +++ b/tests/rest/client/utils.py @@ -37,7 +37,6 @@ MutableMapping, Optional, Sequence, - Tuple, overload, ) from urllib.parse import urlencode @@ -88,7 +87,7 @@ def create_room_as( tok: Optional[str] = ..., expect_code: Literal[200] = ..., extra_content: Optional[Dict] = ..., - custom_headers: Optional[Iterable[Tuple[AnyStr, AnyStr]]] = ..., + custom_headers: Optional[Iterable[tuple[AnyStr, AnyStr]]] = ..., ) -> str: ... @overload @@ -100,7 +99,7 @@ def create_room_as( tok: Optional[str] = ..., expect_code: int = ..., extra_content: Optional[Dict] = ..., - custom_headers: Optional[Iterable[Tuple[AnyStr, AnyStr]]] = ..., + custom_headers: Optional[Iterable[tuple[AnyStr, AnyStr]]] = ..., ) -> Optional[str]: ... def create_room_as( @@ -111,7 +110,7 @@ def create_room_as( tok: Optional[str] = None, expect_code: int = HTTPStatus.OK, extra_content: Optional[Dict] = None, - custom_headers: Optional[Iterable[Tuple[AnyStr, AnyStr]]] = None, + custom_headers: Optional[Iterable[tuple[AnyStr, AnyStr]]] = None, ) -> Optional[str]: """ Create a room. @@ -310,7 +309,7 @@ def change_membership( self.auth_user_id = src path = f"/_matrix/client/r0/rooms/{room}/state/m.room.member/{targ}" - url_params: Dict[str, str] = {} + url_params: dict[str, str] = {} if tok: url_params["access_token"] = tok @@ -378,7 +377,7 @@ def send( txn_id: Optional[str] = None, tok: Optional[str] = None, expect_code: int = HTTPStatus.OK, - custom_headers: Optional[Iterable[Tuple[AnyStr, AnyStr]]] = None, + custom_headers: Optional[Iterable[tuple[AnyStr, AnyStr]]] = None, type: str = "m.room.message", ) -> JsonDict: if body is None: @@ -430,7 +429,7 @@ def send_event( txn_id: Optional[str] = None, tok: Optional[str] = None, expect_code: int = HTTPStatus.OK, - custom_headers: Optional[Iterable[Tuple[AnyStr, AnyStr]]] = None, + custom_headers: Optional[Iterable[tuple[AnyStr, AnyStr]]] = None, ) -> JsonDict: if txn_id is None: txn_id = "m%s" % (str(time.time())) @@ -497,7 +496,7 @@ def _read_write_state( self, room_id: str, event_type: str, - body: Optional[Dict[str, Any]], + body: Optional[dict[str, Any]], tok: Optional[str], expect_code: int = HTTPStatus.OK, state_key: str = "", @@ -575,7 +574,7 @@ def send_state( self, room_id: str, event_type: str, - body: Dict[str, Any], + body: dict[str, Any], tok: Optional[str] = None, expect_code: int = HTTPStatus.OK, state_key: str = "", @@ -684,7 +683,7 @@ def login_via_oidc( with_sid: bool = False, idp_id: Optional[str] = None, expected_status: int = 200, - ) -> Tuple[JsonDict, FakeAuthorizationGrant]: + ) -> tuple[JsonDict, FakeAuthorizationGrant]: """Log in (as a new user) via OIDC Returns the result of the final token login and the fake authorization grant. @@ -757,7 +756,7 @@ def auth_via_oidc( ui_auth_session_id: Optional[str] = None, with_sid: bool = False, idp_id: Optional[str] = None, - ) -> Tuple[FakeChannel, FakeAuthorizationGrant]: + ) -> tuple[FakeChannel, FakeAuthorizationGrant]: """Perform an OIDC authentication flow via a mock OIDC provider. This can be used for either login or user-interactive auth. @@ -790,7 +789,7 @@ def auth_via_oidc( went. """ - cookies: Dict[str, str] = {} + cookies: dict[str, str] = {} with fake_server.patch_homeserver(hs=self.hs): # if we're doing a ui auth, hit the ui auth redirect endpoint @@ -824,7 +823,7 @@ def complete_oidc_auth( cookies: Mapping[str, str], user_info_dict: JsonDict, with_sid: bool = False, - ) -> Tuple[FakeChannel, FakeAuthorizationGrant]: + ) -> tuple[FakeChannel, FakeAuthorizationGrant]: """Mock out an OIDC authentication flow Assumes that an OIDC auth has been initiated by one of initiate_sso_login or diff --git a/tests/rest/key/v2/test_remote_key_resource.py b/tests/rest/key/v2/test_remote_key_resource.py index 8d2489f7185..c412a19f9bf 100644 --- a/tests/rest/key/v2/test_remote_key_resource.py +++ b/tests/rest/key/v2/test_remote_key_resource.py @@ -19,7 +19,7 @@ # # from io import BytesIO, StringIO -from typing import Any, Dict, Optional, Union +from typing import Any, Optional, Union from unittest.mock import Mock import signedjson.key @@ -156,7 +156,7 @@ class EndToEndPerspectivesTests(BaseRemoteKeyResourceTestCase): endpoint, to check that the two implementations are compatible. """ - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = super().default_config() # replace the signing key with our own diff --git a/tests/rest/media/test_domain_blocking.py b/tests/rest/media/test_domain_blocking.py index 9eb0222102d..0bdbaa676d3 100644 --- a/tests/rest/media/test_domain_blocking.py +++ b/tests/rest/media/test_domain_blocking.py @@ -18,7 +18,6 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Dict from twisted.internet.testing import MemoryReactor from twisted.web.resource import Resource @@ -65,7 +64,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: ) ) - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: # We need to manually set the resource tree to include media, the # default only does `/_matrix/client` APIs. return {"/_matrix/media": self.hs.get_media_repository_resource()} diff --git a/tests/rest/media/test_url_preview.py b/tests/rest/media/test_url_preview.py index 7c8d2fc9985..5af2e79f451 100644 --- a/tests/rest/media/test_url_preview.py +++ b/tests/rest/media/test_url_preview.py @@ -22,7 +22,7 @@ import json import os import re -from typing import Any, Dict, Optional, Sequence, Tuple, Type +from typing import Any, Optional, Sequence from urllib.parse import quote, urlencode from twisted.internet._resolver import HostResolution @@ -127,7 +127,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: assert self.media_repo.url_previewer is not None self.url_previewer = self.media_repo.url_previewer - self.lookups: Dict[str, Any] = {} + self.lookups: dict[str, Any] = {} class Resolver: def resolveHostName( @@ -135,7 +135,7 @@ def resolveHostName( resolutionReceiver: IResolutionReceiver, hostName: str, portNumber: int = 0, - addressTypes: Optional[Sequence[Type[IAddress]]] = None, + addressTypes: Optional[Sequence[type[IAddress]]] = None, transportSemantics: str = "TCP", ) -> IResolutionReceiver: resolution = HostResolution(hostName) @@ -150,7 +150,7 @@ def resolveHostName( self.reactor.nameResolver = Resolver() # type: ignore[assignment] - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: """Create a resource tree for the test server A resource tree is a mapping from path to twisted.web.resource. @@ -1227,7 +1227,7 @@ def test_oembed_autodiscovery_blocked(self) -> None: self.assertEqual(body["og:title"], "Test") self.assertNotIn("og:image", body) - def _download_image(self) -> Tuple[str, str]: + def _download_image(self) -> tuple[str, str]: """Downloads an image into the URL cache. Returns: A (host, media_id) tuple representing the MXC URI of the image. diff --git a/tests/rest/synapse/client/test_federation_whitelist.py b/tests/rest/synapse/client/test_federation_whitelist.py index f0067a8f2bf..c4a990e32cd 100644 --- a/tests/rest/synapse/client/test_federation_whitelist.py +++ b/tests/rest/synapse/client/test_federation_whitelist.py @@ -11,7 +11,6 @@ # See the GNU Affero General Public License for more details: # . -from typing import Dict from twisted.web.resource import Resource @@ -28,7 +27,7 @@ class FederationWhitelistTests(unittest.HomeserverTestCase): login.register_servlets, ] - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: base = super().create_resource_dict() base.update(build_synapse_client_resource_tree(self.hs)) return base diff --git a/tests/scripts/test_new_matrix_user.py b/tests/scripts/test_new_matrix_user.py index cae096e72bf..2e71e2a7979 100644 --- a/tests/scripts/test_new_matrix_user.py +++ b/tests/scripts/test_new_matrix_user.py @@ -18,7 +18,7 @@ # # -from typing import List, Optional +from typing import Optional from unittest.mock import Mock, patch from synapse._scripts.register_new_matrix_user import request_registration @@ -60,8 +60,8 @@ def post( requests.post = post # The fake stdout will be written here - out: List[str] = [] - err_code: List[int] = [] + out: list[str] = [] + err_code: list[int] = [] with patch("synapse._scripts.register_new_matrix_user.requests", requests): request_registration( @@ -96,8 +96,8 @@ def get(url: str, verify: Optional[bool] = None) -> Mock: requests.get = get # The fake stdout will be written here - out: List[str] = [] - err_code: List[int] = [] + out: list[str] = [] + err_code: list[int] = [] with patch("synapse._scripts.register_new_matrix_user.requests", requests): request_registration( @@ -151,8 +151,8 @@ def post( requests.post = post # The fake stdout will be written here - out: List[str] = [] - err_code: List[int] = [] + out: list[str] = [] + err_code: list[int] = [] with patch("synapse._scripts.register_new_matrix_user.requests", requests): request_registration( diff --git a/tests/server.py b/tests/server.py index 208556abafa..52bc4add83e 100644 --- a/tests/server.py +++ b/tests/server.py @@ -36,14 +36,10 @@ Awaitable, Callable, Deque, - Dict, Iterable, - List, MutableMapping, Optional, Sequence, - Tuple, - Type, TypeVar, Union, cast, @@ -124,7 +120,7 @@ P = ParamSpec("P") # the type of thing that can be passed into `make_request` in the headers list -CustomHeaderType = Tuple[Union[str, bytes], Union[str, bytes]] +CustomHeaderType = tuple[Union[str, bytes], Union[str, bytes]] # A pre-prepared SQLite DB that is used as a template when creating new SQLite # DB each test run. This dramatically speeds up test set up when using SQLite. @@ -172,7 +168,7 @@ def json_body(self) -> JsonDict: return body @property - def json_list(self) -> List[JsonDict]: + def json_list(self) -> list[JsonDict]: body = json.loads(self.text_body) assert isinstance(body, list) return body @@ -211,7 +207,7 @@ def writeHeaders( version: bytes, code: bytes, reason: bytes, - headers: Union[Headers, List[Tuple[bytes, bytes]]], + headers: Union[Headers, list[tuple[bytes, bytes]]], ) -> None: self.result["version"] = version self.result["code"] = code @@ -367,7 +363,7 @@ def make_request( path: Union[bytes, str], content: Union[bytes, str, JsonDict] = b"", access_token: Optional[str] = None, - request: Type[Request] = SynapseRequest, + request: type[Request] = SynapseRequest, shorthand: bool = True, federation_auth_origin: Optional[bytes] = None, content_type: Optional[bytes] = None, @@ -492,9 +488,9 @@ class ThreadedMemoryReactorClock(MemoryReactorClock): def __init__(self) -> None: self.threadpool = ThreadPool(self) - self._tcp_callbacks: Dict[Tuple[str, int], Callable] = {} - self._udp: List[udp.Port] = [] - self.lookups: Dict[str, str] = {} + self._tcp_callbacks: dict[tuple[str, int], Callable] = {} + self._udp: list[udp.Port] = [] + self.lookups: dict[str, str] = {} self._thread_callbacks: Deque[Callable[..., R]] = deque() lookups = self.lookups @@ -622,7 +618,7 @@ def connectTCP( port: int, factory: ClientFactory, timeout: float = 30, - bindAddress: Optional[Tuple[str, int]] = None, + bindAddress: Optional[tuple[str, int]] = None, ) -> IConnector: """Fake L{IReactorTCP.connectTCP}.""" @@ -814,7 +810,7 @@ def _(res: Any) -> None: return d -def get_clock() -> Tuple[ThreadedMemoryReactorClock, Clock]: +def get_clock() -> tuple[ThreadedMemoryReactorClock, Clock]: # Ignore the linter error since this is an expected usage of creating a `Clock` for # testing purposes. reactor = ThreadedMemoryReactorClock() @@ -1041,7 +1037,7 @@ def setTcpKeepAlive(self, enabled: bool) -> None: def connect_client( reactor: ThreadedMemoryReactorClock, client_id: int -) -> Tuple[IProtocol, AccumulatingProtocol]: +) -> tuple[IProtocol, AccumulatingProtocol]: """ Connect a client to a fake TCP transport. @@ -1068,7 +1064,7 @@ def setup_test_homeserver( server_name: str = "test", config: Optional[HomeServerConfig] = None, reactor: Optional[ISynapseReactor] = None, - homeserver_to_use: Type[HomeServer] = TestHomeServer, + homeserver_to_use: type[HomeServer] = TestHomeServer, db_txn_limit: Optional[int] = None, **extra_homeserver_attributes: Any, ) -> HomeServer: diff --git a/tests/server_notices/test_resource_limits_server_notices.py b/tests/server_notices/test_resource_limits_server_notices.py index dd38528a7d3..a0c5582496c 100644 --- a/tests/server_notices/test_resource_limits_server_notices.py +++ b/tests/server_notices/test_resource_limits_server_notices.py @@ -17,7 +17,6 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Tuple from unittest.mock import AsyncMock, Mock from twisted.internet.testing import MemoryReactor @@ -363,7 +362,7 @@ def test_invite_with_notice(self) -> None: self.assertTrue(notice_in_room, "No server notice in room") - def _trigger_notice_and_join(self) -> Tuple[str, str, str]: + def _trigger_notice_and_join(self) -> tuple[str, str, str]: """Creates enough active users to hit the MAU limit and trigger a system notice about it, then joins the system notices room with one of the users created. diff --git a/tests/state/test_v2.py b/tests/state/test_v2.py index b4f2b98cc40..2cf411e30be 100644 --- a/tests/state/test_v2.py +++ b/tests/state/test_v2.py @@ -21,13 +21,9 @@ import itertools from typing import ( Collection, - Dict, Iterable, - List, Mapping, Optional, - Set, - Tuple, TypeVar, ) @@ -94,7 +90,7 @@ def __init__( self.content = content self.room_id = ROOM_ID - def to_event(self, auth_events: List[str], prev_events: List[str]) -> EventBase: + def to_event(self, auth_events: list[str], prev_events: list[str]) -> EventBase: """Given the auth_events and prev_events, convert to a Frozen Event Args: @@ -461,9 +457,9 @@ def test_mainline_sort(self) -> None: def do_check( self, - events: List[FakeEvent], - edges: List[List[str]], - expected_state_ids: List[str], + events: list[FakeEvent], + edges: list[list[str]], + expected_state_ids: list[str], ) -> None: """Take a list of events and edges and calculate the state of the graph at END, and asserts it matches `expected_state_ids` @@ -476,9 +472,9 @@ def do_check( the keys that haven't changed since START). """ # We want to sort the events into topological order for processing. - graph: Dict[str, Set[str]] = {} + graph: dict[str, set[str]] = {} - fake_event_map: Dict[str, FakeEvent] = {} + fake_event_map: dict[str, FakeEvent] = {} for ev in itertools.chain(INITIAL_EVENTS, events): graph[ev.node_id] = set() @@ -491,8 +487,8 @@ def do_check( for a, b in pairwise(edge_list): graph[a].add(b) - event_map: Dict[str, EventBase] = {} - state_at_event: Dict[str, StateMap[str]] = {} + event_map: dict[str, EventBase] = {} + state_at_event: dict[str, StateMap[str]] = {} # We copy the map as the sort consumes the graph graph_copy = {k: set(v) for k, v in graph.items()} @@ -568,7 +564,7 @@ def do_check( class LexicographicalTestCase(unittest.TestCase): def test_simple(self) -> None: - graph: Dict[str, Set[str]] = { + graph: dict[str, set[str]] = { "l": {"o"}, "m": {"n", "o"}, "n": {"o"}, @@ -1020,7 +1016,7 @@ def test_get_power_level_for_sender(self) -> None: T = TypeVar("T") -def pairwise(iterable: Iterable[T]) -> Iterable[Tuple[T, T]]: +def pairwise(iterable: Iterable[T]) -> Iterable[tuple[T, T]]: "s -> (s0,s1), (s1,s2), (s2, s3), ..." a, b = itertools.tee(iterable) next(b, None) @@ -1029,11 +1025,11 @@ def pairwise(iterable: Iterable[T]) -> Iterable[Tuple[T, T]]: @attr.s class TestStateResolutionStore: - event_map: Dict[str, EventBase] = attr.ib() + event_map: dict[str, EventBase] = attr.ib() def get_events( self, event_ids: Collection[str], allow_rejected: bool = False - ) -> "defer.Deferred[Dict[str, EventBase]]": + ) -> "defer.Deferred[dict[str, EventBase]]": """Get events from the database Args: @@ -1048,7 +1044,7 @@ def get_events( {eid: self.event_map[eid] for eid in event_ids if eid in self.event_map} ) - def _get_auth_chain(self, event_ids: Iterable[str]) -> List[str]: + def _get_auth_chain(self, event_ids: Iterable[str]) -> list[str]: """Gets the full auth chain for a set of events (including rejected events). @@ -1085,9 +1081,9 @@ def _get_auth_chain(self, event_ids: Iterable[str]) -> List[str]: def get_auth_chain_difference( self, room_id: str, - auth_sets: List[Set[str]], - conflicted_state: Optional[Set[str]], - additional_backwards_reachable_conflicted_events: Optional[Set[str]], + auth_sets: list[set[str]], + conflicted_state: Optional[set[str]], + additional_backwards_reachable_conflicted_events: Optional[set[str]], ) -> "defer.Deferred[StateDifference]": chains = [frozenset(self._get_auth_chain(a)) for a in auth_sets] diff --git a/tests/state/test_v21.py b/tests/state/test_v21.py index ff1715d4f7e..6d4929f918c 100644 --- a/tests/state/test_v21.py +++ b/tests/state/test_v21.py @@ -18,7 +18,7 @@ # # import itertools -from typing import Dict, List, Optional, Sequence, Set +from typing import Dict, Optional, Sequence from twisted.internet import defer from twisted.test.proto_helpers import MemoryReactor @@ -357,11 +357,11 @@ async def _get_auth_difference_and_conflicted_subgraph( self, room_id: str, state_maps: Sequence[StateMap[str]], - event_map: Optional[Dict[str, EventBase]], + event_map: Optional[dict[str, EventBase]], state_res_store: StateResolutionStoreInterface, - ) -> Set[str]: + ) -> set[str]: _, conflicted_state = _seperate(state_maps) - conflicted_set: Optional[Set[str]] = set( + conflicted_set: Optional[set[str]] = set( itertools.chain.from_iterable(conflicted_state.values()) ) if event_map is None: @@ -377,7 +377,7 @@ async def _get_auth_difference_and_conflicted_subgraph( def get_resolution_and_verify_expected( self, state_maps: Sequence[StateMap[str]], - events: List[EventBase], + events: list[EventBase], expected: StateMap[str], ) -> None: room_id = events[0].room_id @@ -476,8 +476,8 @@ def create_event( state_key: Optional[str], sender: str, content: Dict, - auth_events: List[str], - prev_events: Optional[List[str]] = None, + auth_events: list[str], + prev_events: Optional[list[str]] = None, room_id: Optional[str] = None, ) -> EventBase: """Short-hand for event_from_pdu_json for fields we typically care about. diff --git a/tests/storage/databases/main/test_end_to_end_keys.py b/tests/storage/databases/main/test_end_to_end_keys.py index d0dd8f866b9..35e1e15d66e 100644 --- a/tests/storage/databases/main/test_end_to_end_keys.py +++ b/tests/storage/databases/main/test_end_to_end_keys.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import List, Optional, Tuple +from typing import Optional from twisted.internet.testing import MemoryReactor @@ -99,7 +99,7 @@ def test_master_replacement_only_applies_to_latest_master_key( def check_timestamp_column( txn: LoggingTransaction, - ) -> List[Tuple[JsonDict, Optional[int]]]: + ) -> list[tuple[JsonDict, Optional[int]]]: """Fetch all rows for Alice's keys.""" txn.execute( """ diff --git a/tests/storage/databases/main/test_events_worker.py b/tests/storage/databases/main/test_events_worker.py index a7c6bdd9b4f..c786271c09a 100644 --- a/tests/storage/databases/main/test_events_worker.py +++ b/tests/storage/databases/main/test_events_worker.py @@ -20,7 +20,7 @@ # import json from contextlib import contextmanager -from typing import Generator, List, Set, Tuple +from typing import Generator from unittest import mock from twisted.enterprise.adbapi import ConnectionPool @@ -60,7 +60,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.token = self.login(self.user, "pass") self.room_id = self.helper.create_room_as(self.user, tok=self.token) - self.event_ids: List[str] = [] + self.event_ids: list[str] = [] for i in range(3): event = self.get_success( inject_event( @@ -316,7 +316,7 @@ def test_get_lots_of_messages(self) -> None: room_id = self.helper.create_room_as(user_id, tok=user_tok) - event_ids: Set[str] = set() + event_ids: set[str] = set() for i in range(num_events): event = self.get_success( inject_event( @@ -371,7 +371,7 @@ def _populate_events(self) -> None: ) ) - self.event_ids: List[str] = [] + self.event_ids: list[str] = [] for idx in range(1, 21): # Stream ordering starts at 1. event_json = { "type": f"test {idx}", @@ -504,7 +504,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: def blocking_get_event_calls( self, ) -> Generator[ - Tuple["Deferred[None]", "Deferred[None]", "Deferred[None]"], None, None + tuple["Deferred[None]", "Deferred[None]", "Deferred[None]"], None, None ]: """Starts two concurrent `get_event` calls for the same event. diff --git a/tests/storage/databases/main/test_receipts.py b/tests/storage/databases/main/test_receipts.py index d084f5c2ba5..2d63b52aca8 100644 --- a/tests/storage/databases/main/test_receipts.py +++ b/tests/storage/databases/main/test_receipts.py @@ -19,7 +19,7 @@ # # -from typing import Any, Dict, Optional, Sequence, Tuple +from typing import Any, Optional, Sequence from twisted.internet.testing import MemoryReactor @@ -51,8 +51,8 @@ def _test_background_receipts_unique_index( update_name: str, index_name: str, table: str, - receipts: Dict[Tuple[str, str, str], Sequence[Dict[str, Any]]], - expected_unique_receipts: Dict[Tuple[str, str, str], Optional[Dict[str, Any]]], + receipts: dict[tuple[str, str, str], Sequence[dict[str, Any]]], + expected_unique_receipts: dict[tuple[str, str, str], Optional[dict[str, Any]]], ) -> None: """Test that the background update to uniqueify non-thread receipts in the given receipts table works properly. diff --git a/tests/storage/test__base.py b/tests/storage/test__base.py index 5e773a5545f..56025318809 100644 --- a/tests/storage/test__base.py +++ b/tests/storage/test__base.py @@ -20,7 +20,7 @@ # import secrets -from typing import Generator, List, Tuple, cast +from typing import Generator, cast from twisted.internet.testing import MemoryReactor @@ -52,9 +52,9 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: ) ) - def _dump_table_to_tuple(self) -> Generator[Tuple[int, str, str], None, None]: + def _dump_table_to_tuple(self) -> Generator[tuple[int, str, str], None, None]: yield from cast( - List[Tuple[int, str, str]], + list[tuple[int, str, str]], self.get_success( self.storage.db_pool.simple_select_list( self.table_name, None, ["id, username, value"] diff --git a/tests/storage/test_account_data.py b/tests/storage/test_account_data.py index 13c4be988ee..d9307154da8 100644 --- a/tests/storage/test_account_data.py +++ b/tests/storage/test_account_data.py @@ -19,7 +19,7 @@ # # -from typing import Iterable, Optional, Set +from typing import Iterable, Optional from twisted.internet.testing import MemoryReactor @@ -52,7 +52,7 @@ def _update_ignore_list( ) def assert_ignorers( - self, ignored_user_id: str, expected_ignorer_user_ids: Set[str] + self, ignored_user_id: str, expected_ignorer_user_ids: set[str] ) -> None: self.assertEqual( self.get_success(self.store.ignored_by(ignored_user_id)), @@ -60,7 +60,7 @@ def assert_ignorers( ) def assert_ignored( - self, ignorer_user_id: str, expected_ignored_user_ids: Set[str] + self, ignorer_user_id: str, expected_ignored_user_ids: set[str] ) -> None: self.assertEqual( self.get_success(self.store.ignored_users(ignorer_user_id)), diff --git a/tests/storage/test_appservice.py b/tests/storage/test_appservice.py index b4df92c7a1d..4b9d069d6a3 100644 --- a/tests/storage/test_appservice.py +++ b/tests/storage/test_appservice.py @@ -21,7 +21,7 @@ import json import os import tempfile -from typing import List, cast +from typing import cast from unittest.mock import AsyncMock, Mock import yaml @@ -48,7 +48,7 @@ class ApplicationServiceStoreTestCase(unittest.HomeserverTestCase): def setUp(self) -> None: super().setUp() - self.as_yaml_files: List[str] = [] + self.as_yaml_files: list[str] = [] self.hs.config.appservice.app_service_config_files = self.as_yaml_files self.hs.config.caches.event_cache_size = 1 @@ -123,7 +123,7 @@ def test_retrieval_of_all_services(self) -> None: class ApplicationServiceTransactionStoreTestCase(unittest.HomeserverTestCase): def setUp(self) -> None: super().setUp() - self.as_yaml_files: List[str] = [] + self.as_yaml_files: list[str] = [] self.hs.config.appservice.app_service_config_files = self.as_yaml_files self.hs.config.caches.event_cache_size = 1 @@ -180,7 +180,7 @@ def _set_state(self, id: str, state: ApplicationServiceState) -> defer.Deferred: ) def _insert_txn( - self, as_id: str, txn_id: int, events: List[Mock] + self, as_id: str, txn_id: int, events: list[Mock] ) -> "defer.Deferred[None]": return self.db_pool.runOperation( self.engine.convert_param_style( @@ -277,7 +277,7 @@ def test_create_appservice_txn_first( self, ) -> None: service = Mock(id=self.as_list[0]["id"]) - events = cast(List[EventBase], [Mock(event_id="e1"), Mock(event_id="e2")]) + events = cast(list[EventBase], [Mock(event_id="e1"), Mock(event_id="e2")]) txn = self.get_success( defer.ensureDeferred( self.store.create_appservice_txn( diff --git a/tests/storage/test_background_update.py b/tests/storage/test_background_update.py index cf63b50c2f5..3505423691f 100644 --- a/tests/storage/test_background_update.py +++ b/tests/storage/test_background_update.py @@ -19,7 +19,7 @@ # # import logging -from typing import List, Tuple, cast +from typing import cast from unittest.mock import AsyncMock, Mock import yaml @@ -535,7 +535,7 @@ def delta(txn: LoggingTransaction) -> None: # Check the correct values are in the new table. rows = cast( - List[Tuple[int, int]], + list[tuple[int, int]], self.get_success( self.store.db_pool.simple_select_list( table="test_constraint", @@ -652,7 +652,7 @@ def delta(txn: LoggingTransaction) -> None: # Check the correct values are in the new table. rows = cast( - List[Tuple[int, int]], + list[tuple[int, int]], self.get_success( self.store.db_pool.simple_select_list( table="test_constraint", diff --git a/tests/storage/test_client_ips.py b/tests/storage/test_client_ips.py index 1cd97a9dd7f..2c1ba9d6c2f 100644 --- a/tests/storage/test_client_ips.py +++ b/tests/storage/test_client_ips.py @@ -19,7 +19,7 @@ # # -from typing import Any, Dict, List, Optional, Tuple, cast +from typing import Any, Optional, cast from unittest.mock import AsyncMock from parameterized import parameterized @@ -104,7 +104,7 @@ def test_insert_new_client_ip_none_device_id(self) -> None: self.pump(0) result = cast( - List[Tuple[str, str, str, Optional[str], int]], + list[tuple[str, str, str, Optional[str], int]], self.get_success( self.store.db_pool.simple_select_list( table="user_ips", @@ -135,7 +135,7 @@ def test_insert_new_client_ip_none_device_id(self) -> None: self.pump(0) result = cast( - List[Tuple[str, str, str, Optional[str], int]], + list[tuple[str, str, str, Optional[str], int]], self.get_success( self.store.db_pool.simple_select_list( table="user_ips", @@ -184,7 +184,7 @@ def test_get_last_client_ip_by_device(self, after_persisting: bool) -> None: else: # Check that the new IP and user agent has not been stored yet db_result = cast( - List[Tuple[str, Optional[str], Optional[str], str, Optional[int]]], + list[tuple[str, Optional[str], Optional[str], str, Optional[int]]], self.get_success( self.store.db_pool.simple_select_list( table="devices", @@ -266,7 +266,7 @@ def test_get_last_client_ip_by_device_combined_data(self) -> None: # Check that the new IP and user agent has not been stored yet db_result = cast( - List[Tuple[str, Optional[str], Optional[str], str, Optional[int]]], + list[tuple[str, Optional[str], Optional[str], str, Optional[int]]], self.get_success( self.store.db_pool.simple_select_list( table="devices", @@ -381,7 +381,7 @@ def test_get_user_ip_and_agents_combined_data(self) -> None: # Check that the new IP and user agent has not been stored yet db_result = cast( - List[Tuple[str, str, str, int]], + list[tuple[str, str, str, int]], self.get_success( self.store.db_pool.simple_select_list( table="user_ips", @@ -589,7 +589,7 @@ def test_old_user_ips_pruned(self) -> None: # We should see that in the DB result = cast( - List[Tuple[str, str, str, Optional[str], int]], + list[tuple[str, str, str, Optional[str], int]], self.get_success( self.store.db_pool.simple_select_list( table="user_ips", @@ -616,7 +616,7 @@ def test_old_user_ips_pruned(self) -> None: # We should get no results. result = cast( - List[Tuple[str, str, str, Optional[str], int]], + list[tuple[str, str, str, Optional[str], int]], self.get_success( self.store.db_pool.simple_select_list( table="user_ips", @@ -695,7 +695,7 @@ def test_invalid_user_agents_are_ignored(self) -> None: # We should see that in the DB result = cast( - List[Tuple[str, str, str, Optional[str], int]], + list[tuple[str, str, str, Optional[str], int]], self.get_success( self.store.db_pool.simple_select_list( table="user_ips", @@ -745,9 +745,9 @@ def test_request_from_getPeer(self) -> None: def _runtest( self, - headers: Dict[bytes, bytes], + headers: dict[bytes, bytes], expected_ip: str, - make_request_args: Dict[str, Any], + make_request_args: dict[str, Any], ) -> None: device_id = "bleb" diff --git a/tests/storage/test_database.py b/tests/storage/test_database.py index fd6963bb82c..ffcff3363f7 100644 --- a/tests/storage/test_database.py +++ b/tests/storage/test_database.py @@ -19,7 +19,7 @@ # # -from typing import Callable, Tuple +from typing import Callable from unittest.mock import Mock, call from twisted.internet import defer @@ -149,7 +149,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: def _run_interaction( self, func: Callable[[LoggingTransaction], object] - ) -> Tuple[Mock, Mock]: + ) -> tuple[Mock, Mock]: """Run the given function in a database transaction, with callbacks registered. Args: diff --git a/tests/storage/test_devices.py b/tests/storage/test_devices.py index bd6fcd8eebe..1d1979e19ff 100644 --- a/tests/storage/test_devices.py +++ b/tests/storage/test_devices.py @@ -19,7 +19,7 @@ # # -from typing import Collection, List, Tuple +from typing import Collection from twisted.internet.testing import MemoryReactor @@ -44,7 +44,7 @@ def default_config(self) -> JsonDict: config["federation_sender_instances"] = ["master"] return config - def add_device_change(self, user_id: str, device_ids: List[str], host: str) -> None: + def add_device_change(self, user_id: str, device_ids: list[str], host: str) -> None: """Add a device list change for the given device to `device_lists_outbound_pokes` table. """ @@ -306,7 +306,7 @@ def test_get_device_updates_by_remote_cross_signing_key_updates( def _check_devices_in_updates( self, expected_device_ids: Collection[str], - device_updates: List[Tuple[str, JsonDict]], + device_updates: list[tuple[str, JsonDict]], ) -> None: """Check that an specific device ids exist in a list of device update EDUs""" self.assertEqual(len(device_updates), len(expected_device_ids)) diff --git a/tests/storage/test_event_chain.py b/tests/storage/test_event_chain.py index fe9bb7bcca0..175a5ffc788 100644 --- a/tests/storage/test_event_chain.py +++ b/tests/storage/test_event_chain.py @@ -19,7 +19,7 @@ # # -from typing import Dict, List, Set, Tuple, cast +from typing import cast from parameterized import parameterized @@ -420,7 +420,7 @@ def test_out_of_order_events(self) -> None: def persist( self, - events: List[EventBase], + events: list[EventBase], ) -> None: """Persist the given events and check that the links generated match those given. @@ -464,11 +464,11 @@ def _persist(txn: LoggingTransaction) -> None: ) def fetch_chains( - self, events: List[EventBase] - ) -> Tuple[Dict[str, Tuple[int, int]], _LinkMap]: + self, events: list[EventBase] + ) -> tuple[dict[str, tuple[int, int]], _LinkMap]: # Fetch the map from event ID -> (chain ID, sequence number) rows = cast( - List[Tuple[str, int, int]], + list[tuple[str, int, int]], self.get_success( self.store.db_pool.simple_select_many_batch( table="event_auth_chains", @@ -487,7 +487,7 @@ def fetch_chains( # Fetch all the links and pass them to the _LinkMap. auth_chain_rows = cast( - List[Tuple[int, int, int, int]], + list[tuple[int, int, int, int]], self.get_success( self.store.db_pool.simple_select_many_batch( table="event_auth_chain_links", @@ -575,7 +575,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.token = self.login("foo", "pass") self.requester = create_requester(self.user_id) - def _generate_room(self) -> Tuple[str, List[Set[str]]]: + def _generate_room(self) -> tuple[str, list[set[str]]]: """Insert a room without a chain cover index.""" room_id = self.helper.create_room_as(self.user_id, tok=self.token) diff --git a/tests/storage/test_event_federation.py b/tests/storage/test_event_federation.py index ee9cf3687f1..d8c6a1cd042 100644 --- a/tests/storage/test_event_federation.py +++ b/tests/storage/test_event_federation.py @@ -21,14 +21,9 @@ import datetime from typing import ( Collection, - Dict, - FrozenSet, Iterable, - List, Mapping, NamedTuple, - Set, - Tuple, TypeVar, Union, cast, @@ -74,7 +69,7 @@ # | | # K J -AUTH_GRAPH: Dict[str, List[str]] = { +AUTH_GRAPH: dict[str, list[str]] = { "a": ["e"], "b": ["e"], "c": ["g", "i"], @@ -108,7 +103,7 @@ def get_all_topologically_sorted_orders( nodes: Iterable[T], graph: Mapping[T, Collection[T]], -) -> List[List[T]]: +) -> list[list[T]]: """Given a set of nodes and a graph, return all possible topological orderings. """ @@ -117,7 +112,7 @@ def get_all_topologically_sorted_orders( # we have a choice over which node to consider next. degree_map = dict.fromkeys(nodes, 0) - reverse_graph: Dict[T, Set[T]] = {} + reverse_graph: dict[T, set[T]] = {} for node, edges in graph.items(): if node not in degree_map: @@ -138,10 +133,10 @@ def get_all_topologically_sorted_orders( def _get_all_topologically_sorted_orders_inner( - reverse_graph: Dict[T, Set[T]], - zero_degree: List[T], - degree_map: Dict[T, int], -) -> List[List[T]]: + reverse_graph: dict[T, set[T]], + zero_degree: list[T], + degree_map: dict[T, int], +) -> list[list[T]]: new_paths = [] # Rather than only choosing *one* item from the list of nodes with zero @@ -175,7 +170,7 @@ def _get_all_topologically_sorted_orders_inner( def get_all_topologically_consistent_subsets( nodes: Iterable[T], graph: Mapping[T, Collection[T]], -) -> Set[FrozenSet[T]]: +) -> set[frozenset[T]]: """Get all subsets of the graph where if node N is in the subgraph, then all nodes that can reach that node (i.e. for all X there exists a path X -> N) are in the subgraph. @@ -195,7 +190,7 @@ def get_all_topologically_consistent_subsets( @attr.s(auto_attribs=True, frozen=True, slots=True) class _BackfillSetupInfo: room_id: str - depth_map: Dict[str, int] + depth_map: dict[str, int] class EventFederationWorkerStoreTestCase(tests.unittest.HomeserverTestCase): @@ -573,7 +568,7 @@ def test_auth_difference_partial_cover(self) -> None: # | | # K J - auth_graph: Dict[str, List[str]] = { + auth_graph: dict[str, list[str]] = { "a": ["e"], "b": ["e"], "c": ["g", "i"], @@ -756,11 +751,11 @@ class TestNode(NamedTuple): seq_num: int class TestLink(NamedTuple): - origin_chain_and_seq: Tuple[int, int] - target_chain_and_seq: Tuple[int, int] + origin_chain_and_seq: tuple[int, int] + target_chain_and_seq: tuple[int, int] # Map to chain IDs / seq nums - nodes: List[TestNode] = [ + nodes: list[TestNode] = [ TestNode("A1", 1, 1), TestNode("A2", 1, 2), TestNode("A3", 1, 3), @@ -779,7 +774,7 @@ class TestLink(NamedTuple): TestNode("G1", 7, 1), TestNode("G2", 7, 2), ] - links: List[TestLink] = [ + links: list[TestLink] = [ TestLink((2, 1), (1, 2)), # B1 -> A2 TestLink((3, 1), (2, 2)), # C1 -> B2 TestLink((4, 1), (3, 1)), # D1 -> C1 @@ -818,9 +813,9 @@ class TestLink(NamedTuple): # Define the test cases class TestCase(NamedTuple): name: str - conflicted: Set[str] - additional_backwards_reachable: Set[str] - want_conflicted_subgraph: Set[str] + conflicted: set[str] + additional_backwards_reachable: set[str] + want_conflicted_subgraph: set[str] # Reminder: # A1 <- A2 <- A3 @@ -936,7 +931,7 @@ def test_prune_inbound_federation_queue(self, room_version: RoomVersion) -> None room_id = "some_room_id" - def prev_event_format(prev_event_id: str) -> Union[Tuple[str, dict], str]: + def prev_event_format(prev_event_id: str) -> Union[tuple[str, dict], str]: """Account for differences in prev_events format across room versions""" if room_version.event_format == EventFormatVersions.ROOM_V1_V2: return prev_event_id, {} @@ -1034,7 +1029,7 @@ def _setup_room_for_backfill_tests(self) -> _BackfillSetupInfo: # | # 5 (newest) - event_graph: Dict[str, List[str]] = { + event_graph: dict[str, list[str]] = { "1": [], "2": ["1"], "3": ["2", "A"], @@ -1050,7 +1045,7 @@ def _setup_room_for_backfill_tests(self) -> _BackfillSetupInfo: "b6": ["3"], } - depth_map: Dict[str, int] = { + depth_map: dict[str, int] = { "1": 1, "2": 2, "b1": 3, @@ -1070,7 +1065,7 @@ def _setup_room_for_backfill_tests(self) -> _BackfillSetupInfo: # The rest are events in the room but not backfilled tet. our_server_events = {"5", "4", "B", "3", "A"} - complete_event_dict_map: Dict[str, JsonDict] = {} + complete_event_dict_map: dict[str, JsonDict] = {} stream_ordering = 0 for event_id, prev_event_ids in event_graph.items(): depth = depth_map[event_id] @@ -1425,14 +1420,14 @@ def test_get_event_ids_to_not_pull_from_backoff_retry_after_backoff_duration( class FakeEvent: event_id: str room_id: str - auth_events: List[str] + auth_events: list[str] type = "foo" state_key = "foo" internal_metadata = EventInternalMetadata({}) - def auth_event_ids(self) -> List[str]: + def auth_event_ids(self) -> list[str]: return self.auth_events def is_state(self) -> bool: diff --git a/tests/storage/test_event_push_actions.py b/tests/storage/test_event_push_actions.py index 30ba1ad94a4..ef6c0f24657 100644 --- a/tests/storage/test_event_push_actions.py +++ b/tests/storage/test_event_push_actions.py @@ -19,7 +19,7 @@ # # -from typing import Optional, Tuple +from typing import Optional from twisted.internet.testing import MemoryReactor @@ -47,7 +47,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: assert persist_events_store is not None self.persist_events_store = persist_events_store - def _create_users_and_room(self) -> Tuple[str, str, str, str, str]: + def _create_users_and_room(self) -> tuple[str, str, str, str, str]: """ Creates two users and a shared room. diff --git a/tests/storage/test_events.py b/tests/storage/test_events.py index 25a380e3251..5c7f8140782 100644 --- a/tests/storage/test_events.py +++ b/tests/storage/test_events.py @@ -20,7 +20,7 @@ # import logging -from typing import Dict, List, Optional +from typing import Optional from twisted.internet.testing import MemoryReactor @@ -54,7 +54,7 @@ def prepare( def test_get_senders_for_event_ids(self) -> None: """Tests the `get_senders_for_event_ids` storage function.""" - users_and_tokens: Dict[str, str] = {} + users_and_tokens: dict[str, str] = {} for localpart_suffix in range(10): localpart = f"user_{localpart_suffix}" user_id = self.register_user(localpart, "rabbit") @@ -70,7 +70,7 @@ def test_get_senders_for_event_ids(self) -> None: room_id = self.helper.create_room_as( room_creator_user_id, tok=room_creator_token ) - event_ids_to_senders: Dict[str, str] = {} + event_ids_to_senders: dict[str, str] = {} for user_id, token in users_and_tokens.items(): if user_id == room_creator_user_id: continue @@ -180,7 +180,7 @@ def persist_event( ) self.get_success(self._persistence.persist_event(event, context)) - def assert_extremities(self, expected_extremities: List[str]) -> None: + def assert_extremities(self, expected_extremities: list[str]) -> None: """Assert the current extremities for the room""" extremities = self.get_success( self.store.get_prev_events_for_room(self.room_id) diff --git a/tests/storage/test_events_bg_updates.py b/tests/storage/test_events_bg_updates.py index a1375aa4ac8..d1a794c5a18 100644 --- a/tests/storage/test_events_bg_updates.py +++ b/tests/storage/test_events_bg_updates.py @@ -13,7 +13,6 @@ # # -from typing import Dict from twisted.internet.testing import MemoryReactor @@ -48,7 +47,7 @@ def prepare( ) ) - def create_room(self, room_version: RoomVersion) -> Dict[str, int]: + def create_room(self, room_version: RoomVersion) -> dict[str, int]: """Create a room with a known room version and insert events. Returns the set of event IDs that exceed MAX_DEPTH and @@ -67,7 +66,7 @@ def create_room(self, room_version: RoomVersion) -> Dict[str, int]: ) # Insert events with some depths exceeding MAX_DEPTH - event_id_to_depth: Dict[str, int] = {} + event_id_to_depth: dict[str, int] = {} for depth in range(MAX_DEPTH - 5, MAX_DEPTH + 5): event_id = f"$event{depth}:example.com" event_id_to_depth[event_id] = depth diff --git a/tests/storage/test_id_generators.py b/tests/storage/test_id_generators.py index 4c1311a00e1..4846e8cac37 100644 --- a/tests/storage/test_id_generators.py +++ b/tests/storage/test_id_generators.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Dict, List, Optional +from typing import Optional from twisted.internet.testing import MemoryReactor @@ -43,12 +43,12 @@ class MultiWriterIdGeneratorBase(HomeserverTestCase): positive: bool = True - tables: List[str] = ["foobar"] + tables: list[str] = ["foobar"] def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.store = hs.get_datastores().main self.db_pool: DatabasePool = self.store.db_pool - self.instances: Dict[str, MultiWriterIdGenerator] = {} + self.instances: dict[str, MultiWriterIdGenerator] = {} self.get_success(self.db_pool.runInteraction("_setup_db", self._setup_db)) @@ -76,7 +76,7 @@ def _setup_db(self, txn: LoggingTransaction) -> None: def _create_id_generator( self, instance_name: str = "master", - writers: Optional[List[str]] = None, + writers: Optional[list[str]] = None, ) -> MultiWriterIdGenerator: def _create(conn: LoggingDatabaseConnection) -> MultiWriterIdGenerator: return MultiWriterIdGenerator( diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py index e684c6c1613..d607d4ef0a7 100644 --- a/tests/storage/test_monthly_active_users.py +++ b/tests/storage/test_monthly_active_users.py @@ -17,7 +17,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Any, Dict, List +from typing import Any from unittest.mock import AsyncMock from twisted.internet.testing import MemoryReactor @@ -32,7 +32,7 @@ FORTY_DAYS = 40 * 24 * 60 * 60 -def gen_3pids(count: int) -> List[Dict[str, Any]]: +def gen_3pids(count: int) -> list[dict[str, Any]]: """Generate `count` threepids as a list.""" return [ {"medium": "email", "address": "user%i@matrix.org" % i} for i in range(count) @@ -40,7 +40,7 @@ def gen_3pids(count: int) -> List[Dict[str, Any]]: class MonthlyActiveUsersTestCase(unittest.HomeserverTestCase): - def default_config(self) -> Dict[str, Any]: + def default_config(self) -> dict[str, Any]: config = default_config("test") config.update({"limit_usage_by_mau": True, "max_mau_value": 50}) diff --git a/tests/storage/test_redaction.py b/tests/storage/test_redaction.py index 7565376a592..2c188b80466 100644 --- a/tests/storage/test_redaction.py +++ b/tests/storage/test_redaction.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import List, Optional, cast +from typing import Optional, cast from canonicaljson import json @@ -247,8 +247,8 @@ def __init__(self, base_builder: EventBuilder, event_id: str): async def build( self, - prev_event_ids: List[str], - auth_event_ids: Optional[List[str]], + prev_event_ids: list[str], + auth_event_ids: Optional[list[str]], depth: Optional[int] = None, ) -> EventBase: built_event = await self._base_builder.build( diff --git a/tests/storage/test_rollback_worker.py b/tests/storage/test_rollback_worker.py index f61eb2e319e..125c4499b0c 100644 --- a/tests/storage/test_rollback_worker.py +++ b/tests/storage/test_rollback_worker.py @@ -18,7 +18,6 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import List from unittest import mock from twisted.internet.testing import MemoryReactor @@ -34,7 +33,7 @@ from tests.unittest import HomeserverTestCase -def fake_listdir(filepath: str) -> List[str]: +def fake_listdir(filepath: str) -> list[str]: """ A fake implementation of os.listdir which we can use to mock out the filesystem. diff --git a/tests/storage/test_room_search.py b/tests/storage/test_room_search.py index e530e59fa64..2c0ef19e9eb 100644 --- a/tests/storage/test_room_search.py +++ b/tests/storage/test_room_search.py @@ -19,7 +19,6 @@ # # -from typing import List, Tuple from unittest.case import SkipTest from twisted.internet.testing import MemoryReactor @@ -317,7 +316,7 @@ def test_tokenize_query(self) -> None: ) def _check_test_cases( - self, store: DataStore, cases: List[Tuple[str, bool]] + self, store: DataStore, cases: list[tuple[str, bool]] ) -> None: # Run all the test cases versus search_msgs for query, expect_to_contain in cases: diff --git a/tests/storage/test_roommember.py b/tests/storage/test_roommember.py index b8933d957b4..c5487d81e6b 100644 --- a/tests/storage/test_roommember.py +++ b/tests/storage/test_roommember.py @@ -20,7 +20,7 @@ # # import logging -from typing import List, Optional, Tuple, cast +from typing import Optional, cast from twisted.internet.testing import MemoryReactor @@ -133,7 +133,7 @@ def test__null_byte_in_display_name_properly_handled(self) -> None: room = self.helper.create_room_as(self.u_alice, tok=self.t_alice) res = cast( - List[Tuple[Optional[str], str]], + list[tuple[Optional[str], str]], self.get_success( self.store.db_pool.simple_select_list( "room_memberships", @@ -165,7 +165,7 @@ def test__null_byte_in_display_name_properly_handled(self) -> None: ) res2 = cast( - List[Tuple[Optional[str], str]], + list[tuple[Optional[str], str]], self.get_success( self.store.db_pool.simple_select_list( "room_memberships", @@ -408,7 +408,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: def _assert_member_summary( self, actual_member_summary: MemberSummary, - expected_member_list: List[str], + expected_member_list: list[str], *, expected_member_count: Optional[int] = None, ) -> None: diff --git a/tests/storage/test_sliding_sync_tables.py b/tests/storage/test_sliding_sync_tables.py index f0df166bab5..5cfc1a9c298 100644 --- a/tests/storage/test_sliding_sync_tables.py +++ b/tests/storage/test_sliding_sync_tables.py @@ -18,7 +18,7 @@ # # import logging -from typing import Dict, List, Optional, Tuple, cast +from typing import Optional, cast import attr from parameterized import parameterized @@ -112,7 +112,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.state_handler = self.hs.get_state_handler() - def _get_sliding_sync_joined_rooms(self) -> Dict[str, _SlidingSyncJoinedRoomResult]: + def _get_sliding_sync_joined_rooms(self) -> dict[str, _SlidingSyncJoinedRoomResult]: """ Return the rows from the `sliding_sync_joined_rooms` table. @@ -120,7 +120,7 @@ def _get_sliding_sync_joined_rooms(self) -> Dict[str, _SlidingSyncJoinedRoomResu Mapping from room_id to _SlidingSyncJoinedRoomResult. """ rows = cast( - List[Tuple[str, int, int, str, str, bool, str]], + list[tuple[str, int, int, str, str, bool, str]], self.get_success( self.store.db_pool.simple_select_list( "sliding_sync_joined_rooms", @@ -153,7 +153,7 @@ def _get_sliding_sync_joined_rooms(self) -> Dict[str, _SlidingSyncJoinedRoomResu def _get_sliding_sync_membership_snapshots( self, - ) -> Dict[Tuple[str, str], _SlidingSyncMembershipSnapshotResult]: + ) -> dict[tuple[str, str], _SlidingSyncMembershipSnapshotResult]: """ Return the rows from the `sliding_sync_membership_snapshots` table. @@ -161,7 +161,7 @@ def _get_sliding_sync_membership_snapshots( Mapping from the (room_id, user_id) to _SlidingSyncMembershipSnapshotResult. """ rows = cast( - List[Tuple[str, str, str, str, str, int, int, bool, str, str, bool, str]], + list[tuple[str, str, str, str, str, int, int, bool, str, str, bool, str]], self.get_success( self.store.db_pool.simple_select_list( "sliding_sync_membership_snapshots", @@ -207,8 +207,8 @@ def _get_sliding_sync_membership_snapshots( def _create_remote_invite_room_for_user( self, invitee_user_id: str, - unsigned_invite_room_state: Optional[List[StrippedStateEvent]], - ) -> Tuple[str, EventBase]: + unsigned_invite_room_state: Optional[list[StrippedStateEvent]], + ) -> tuple[str, EventBase]: """ Create a fake invite for a remote room and persist it. @@ -2246,7 +2246,7 @@ def test_non_join_server_left_room(self) -> None: ] ) def test_non_join_remote_invite_no_stripped_state( - self, _description: str, stripped_state: Optional[List[StrippedStateEvent]] + self, _description: str, stripped_state: Optional[list[StrippedStateEvent]] ) -> None: """ Test remote invite with no stripped state provided shows up in diff --git a/tests/storage/test_state.py b/tests/storage/test_state.py index bf6da715493..8e821c6d183 100644 --- a/tests/storage/test_state.py +++ b/tests/storage/test_state.py @@ -20,7 +20,7 @@ # import logging -from typing import List, Tuple, cast +from typing import cast from immutabledict import immutabledict @@ -593,7 +593,7 @@ def test_batched_state_group_storing(self) -> None: # check that only state events are in state_groups, and all state events are in state_groups res = cast( - List[Tuple[str]], + list[tuple[str]], self.get_success( self.store.db_pool.simple_select_list( table="state_groups", @@ -618,7 +618,7 @@ def test_batched_state_group_storing(self) -> None: for event, context in processed_events_and_context: if event.is_state(): state = cast( - List[Tuple[str, str]], + list[tuple[str, str]], self.get_success( self.store.db_pool.simple_select_list( table="state_groups_state", @@ -631,7 +631,7 @@ def test_batched_state_group_storing(self) -> None: self.assertEqual(event.state_key, state[0][1]) groups = cast( - List[Tuple[str]], + list[tuple[str]], self.get_success( self.store.db_pool.simple_select_list( table="state_group_edges", diff --git a/tests/storage/test_stream.py b/tests/storage/test_stream.py index 0777c254c0f..d51fa1f8bad 100644 --- a/tests/storage/test_stream.py +++ b/tests/storage/test_stream.py @@ -20,7 +20,6 @@ # import logging -from typing import List, Tuple from unittest.mock import AsyncMock, patch from immutabledict import immutabledict @@ -150,7 +149,7 @@ def prepare( ) self.event_id_none = res["event_id"] - def _filter_messages(self, filter: JsonDict) -> List[str]: + def _filter_messages(self, filter: JsonDict) -> list[str]: """Make a request to /messages with a filter, returns the chunk of events.""" events, next_key, _ = self.get_success( @@ -324,7 +323,7 @@ def _update_persisted_instance_name_for_event( def _send_event_on_instance( self, instance_name: str, room_id: str, access_token: str - ) -> Tuple[JsonDict, PersistedEventPosition]: + ) -> tuple[JsonDict, PersistedEventPosition]: """ Send an event in a room and mimic that it was persisted by a specific instance/worker. diff --git a/tests/storage/test_user_directory.py b/tests/storage/test_user_directory.py index 26e045135e4..83d3357c65d 100644 --- a/tests/storage/test_user_directory.py +++ b/tests/storage/test_user_directory.py @@ -19,7 +19,7 @@ # # import re -from typing import Any, Dict, List, Optional, Set, Tuple, cast +from typing import Any, Optional, cast from unittest import mock from unittest.mock import Mock, patch @@ -56,21 +56,21 @@ class GetUserDirectoryTables: def __init__(self, store: DataStore): self.store = store - async def get_users_in_public_rooms(self) -> Set[Tuple[str, str]]: + async def get_users_in_public_rooms(self) -> set[tuple[str, str]]: """Fetch the entire `users_in_public_rooms` table. Returns a list of tuples (user_id, room_id) where room_id is public and contains the user with the given id. """ r = cast( - List[Tuple[str, str]], + list[tuple[str, str]], await self.store.db_pool.simple_select_list( "users_in_public_rooms", None, ("user_id", "room_id") ), ) return set(r) - async def get_users_who_share_private_rooms(self) -> Set[Tuple[str, str, str]]: + async def get_users_who_share_private_rooms(self) -> set[tuple[str, str, str]]: """Fetch the entire `users_who_share_private_rooms` table. Returns a set of tuples (user_id, other_user_id, room_id) corresponding @@ -78,7 +78,7 @@ async def get_users_who_share_private_rooms(self) -> Set[Tuple[str, str, str]]: """ rows = cast( - List[Tuple[str, str, str]], + list[tuple[str, str, str]], await self.store.db_pool.simple_select_list( "users_who_share_private_rooms", None, @@ -87,13 +87,13 @@ async def get_users_who_share_private_rooms(self) -> Set[Tuple[str, str, str]]: ) return set(rows) - async def get_users_in_user_directory(self) -> Set[str]: + async def get_users_in_user_directory(self) -> set[str]: """Fetch the set of users in the `user_directory` table. This is useful when checking we've correctly excluded users from the directory. """ result = cast( - List[Tuple[str]], + list[tuple[str]], await self.store.db_pool.simple_select_list( "user_directory", None, @@ -102,7 +102,7 @@ async def get_users_in_user_directory(self) -> Set[str]: ) return {row[0] for row in result} - async def get_profiles_in_user_directory(self) -> Dict[str, ProfileInfo]: + async def get_profiles_in_user_directory(self) -> dict[str, ProfileInfo]: """Fetch users and their profiles from the `user_directory` table. This is useful when we want to inspect display names and avatars. @@ -110,7 +110,7 @@ async def get_profiles_in_user_directory(self) -> Dict[str, ProfileInfo]: thing missing is an unused room_id column. """ rows = cast( - List[Tuple[str, Optional[str], Optional[str]]], + list[tuple[str, Optional[str], Optional[str]]], await self.store.db_pool.simple_select_list( "user_directory", None, @@ -124,7 +124,7 @@ async def get_profiles_in_user_directory(self) -> Dict[str, ProfileInfo]: async def get_tables( self, - ) -> Tuple[Set[str], Set[Tuple[str, str]], Set[Tuple[str, str, str]]]: + ) -> tuple[set[str], set[tuple[str, str]], set[tuple[str, str, str]]]: """Multiple tests want to inspect these tables, so expose them together.""" return ( await self.get_users_in_user_directory(), @@ -277,7 +277,7 @@ def test_initial(self) -> None: def _create_rooms_and_inject_memberships( self, creator: str, token: str, joiner: str - ) -> Tuple[str, str]: + ) -> tuple[str, str]: """Create a public and private room as a normal user. Then get the `joiner` into those rooms. """ diff --git a/tests/storage/util/test_partial_state_events_tracker.py b/tests/storage/util/test_partial_state_events_tracker.py index 1e5663f137d..026bc581802 100644 --- a/tests/storage/util/test_partial_state_events_tracker.py +++ b/tests/storage/util/test_partial_state_events_tracker.py @@ -19,7 +19,7 @@ # # -from typing import Collection, Dict +from typing import Collection from unittest import mock from twisted.internet.defer import CancelledError, ensureDeferred @@ -35,9 +35,9 @@ class PartialStateEventsTrackerTestCase(TestCase): def setUp(self) -> None: # the results to be returned by the mocked get_partial_state_events - self._events_dict: Dict[str, bool] = {} + self._events_dict: dict[str, bool] = {} - async def get_partial_state_events(events: Collection[str]) -> Dict[str, bool]: + async def get_partial_state_events(events: Collection[str]) -> dict[str, bool]: return {e: self._events_dict[e] for e in events} self.mock_store = mock.Mock(spec_set=["get_partial_state_events"]) @@ -73,7 +73,7 @@ def test_un_partial_state_race(self) -> None: # registration of the listener, it should not block. self._events_dict = {"event1": True, "event2": False} - async def get_partial_state_events(events: Collection[str]) -> Dict[str, bool]: + async def get_partial_state_events(events: Collection[str]) -> dict[str, bool]: res = {e: self._events_dict[e] for e in events} # change the result for next time self._events_dict = {"event1": False, "event2": False} @@ -91,13 +91,13 @@ def test_un_partial_state_during_get_partial_state_events(self) -> None: self._events_dict = {"event1": True, "event2": False} - async def get_partial_state_events1(events: Collection[str]) -> Dict[str, bool]: + async def get_partial_state_events1(events: Collection[str]) -> dict[str, bool]: self.mock_store.get_partial_state_events.side_effect = ( get_partial_state_events2 ) return {e: self._events_dict[e] for e in events} - async def get_partial_state_events2(events: Collection[str]) -> Dict[str, bool]: + async def get_partial_state_events2(events: Collection[str]) -> dict[str, bool]: self.tracker.notify_un_partial_stated("event1") self._events_dict["event1"] = False return {e: self._events_dict[e] for e in events} diff --git a/tests/test_event_auth.py b/tests/test_event_auth.py index f12402f5f2c..6632e11eb7d 100644 --- a/tests/test_event_auth.py +++ b/tests/test_event_auth.py @@ -20,7 +20,7 @@ # import unittest -from typing import Any, Collection, Dict, Iterable, List, Optional +from typing import Any, Collection, Iterable, List, Optional from parameterized import parameterized @@ -39,7 +39,7 @@ class _StubEventSourceStore: """A stub implementation of the EventSourceStore""" def __init__(self) -> None: - self._store: Dict[str, EventBase] = {} + self._store: dict[str, EventBase] = {} def add_event(self, event: EventBase) -> None: self._store[event.event_id] = event @@ -54,7 +54,7 @@ async def get_events( redact_behaviour: EventRedactBehaviour, get_prev_content: bool = False, allow_rejected: bool = False, - ) -> Dict[str, EventBase]: + ) -> dict[str, EventBase]: assert allow_rejected assert not get_prev_content assert redact_behaviour == EventRedactBehaviour.as_is @@ -745,7 +745,7 @@ def test_room_v10_rejects_other_non_integer_power_levels(self) -> None: test_room_v10_rejects_string_power_levels above handles the string case. """ - def create_event(pl_event_content: Dict[str, Any]) -> EventBase: + def create_event(pl_event_content: dict[str, Any]) -> EventBase: return make_event_from_dict( { "room_id": TEST_ROOM_ID, @@ -759,7 +759,7 @@ def create_event(pl_event_content: Dict[str, Any]) -> EventBase: room_version=RoomVersions.V10, ) - contents: Iterable[Dict[str, Any]] = [ + contents: Iterable[dict[str, Any]] = [ {"notifications": {"room": None}}, {"users": {"@alice:wonderland": []}}, {"users_default": {}}, diff --git a/tests/test_mau.py b/tests/test_mau.py index fa98242bf75..e535e7dc2e4 100644 --- a/tests/test_mau.py +++ b/tests/test_mau.py @@ -20,7 +20,7 @@ """Tests REST events for /rooms paths.""" -from typing import List, Optional +from typing import Optional from twisted.internet.testing import MemoryReactor @@ -249,7 +249,7 @@ def test_tracked_but_not_limited(self) -> None: } ) def test_as_trial_days(self) -> None: - user_tokens: List[str] = [] + user_tokens: list[str] = [] def advance_time_and_sync() -> None: self.reactor.advance(24 * 60 * 61) diff --git a/tests/test_server.py b/tests/test_server.py index 1854a3c4d4e..e7d3febe3fb 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -20,7 +20,7 @@ import re from http import HTTPStatus -from typing import Awaitable, Callable, Dict, NoReturn, Optional, Tuple +from typing import Awaitable, Callable, NoReturn, Optional from twisted.internet.defer import Deferred from twisted.web.resource import Resource @@ -70,7 +70,7 @@ def test_handler_for_request(self) -> None: def _callback( request: SynapseRequest, **kwargs: object - ) -> Tuple[int, Dict[str, object]]: + ) -> tuple[int, dict[str, object]]: got_kwargs.update(kwargs) return 200, kwargs @@ -192,7 +192,7 @@ def test_head_request(self) -> None: def _callback( request: SynapseRequest, **kwargs: object - ) -> Tuple[int, Dict[str, object]]: + ) -> tuple[int, dict[str, object]]: return 200, {"result": True} res = JsonResource(self.homeserver) @@ -405,11 +405,11 @@ def __init__(self, clock: Clock): self.clock = clock @cancellable - async def _async_render_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def _async_render_GET(self, request: SynapseRequest) -> tuple[int, JsonDict]: await self.clock.sleep(1.0) return HTTPStatus.OK, {"result": True} - async def _async_render_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + async def _async_render_POST(self, request: SynapseRequest) -> tuple[int, JsonDict]: await self.clock.sleep(1.0) return HTTPStatus.OK, {"result": True} @@ -422,11 +422,11 @@ def __init__(self, clock: Clock): self.clock = clock @cancellable - async def _async_render_GET(self, request: SynapseRequest) -> Tuple[int, bytes]: + async def _async_render_GET(self, request: SynapseRequest) -> tuple[int, bytes]: await self.clock.sleep(1.0) return HTTPStatus.OK, b"ok" - async def _async_render_POST(self, request: SynapseRequest) -> Tuple[int, bytes]: + async def _async_render_POST(self, request: SynapseRequest) -> tuple[int, bytes]: await self.clock.sleep(1.0) return HTTPStatus.OK, b"ok" diff --git a/tests/test_state.py b/tests/test_state.py index ab7b52e90cf..6e5a6d845d3 100644 --- a/tests/test_state.py +++ b/tests/test_state.py @@ -21,14 +21,10 @@ from typing import ( Any, Collection, - Dict, Generator, Iterable, Iterator, - List, Optional, - Set, - Tuple, ) from unittest.mock import AsyncMock, Mock @@ -57,7 +53,7 @@ def create_event( state_key: Optional[str] = None, depth: int = 2, event_id: Optional[str] = None, - prev_events: Optional[List[Tuple[str, dict]]] = None, + prev_events: Optional[list[tuple[str, dict]]] = None, **kwargs: Any, ) -> EventBase: global _next_event_id @@ -91,16 +87,16 @@ def create_event( class _DummyStore: def __init__(self) -> None: - self._event_to_state_group: Dict[str, int] = {} - self._group_to_state: Dict[int, MutableStateMap[str]] = {} + self._event_to_state_group: dict[str, int] = {} + self._group_to_state: dict[int, MutableStateMap[str]] = {} - self._event_id_to_event: Dict[str, EventBase] = {} + self._event_id_to_event: dict[str, EventBase] = {} self._next_group = 1 async def get_state_groups_ids( self, room_id: str, event_ids: Collection[str] - ) -> Dict[int, MutableStateMap[str]]: + ) -> dict[int, MutableStateMap[str]]: groups = {} for event_id in event_ids: group = self._event_to_state_group.get(event_id) @@ -137,7 +133,7 @@ async def store_state_group( async def get_events( self, event_ids: Collection[str], **kwargs: Any - ) -> Dict[str, EventBase]: + ) -> dict[str, EventBase]: return { e_id: self._event_id_to_event[e_id] for e_id in event_ids @@ -146,12 +142,12 @@ async def get_events( async def get_partial_state_events( self, event_ids: Collection[str] - ) -> Dict[str, bool]: + ) -> dict[str, bool]: return dict.fromkeys(event_ids, False) async def get_state_group_delta( self, name: str - ) -> Tuple[Optional[int], Optional[StateMap[str]]]: + ) -> tuple[Optional[int], Optional[StateMap[str]]]: return None, None def register_events(self, events: Iterable[EventBase]) -> None: @@ -170,7 +166,7 @@ async def get_room_version_id(self, room_id: str) -> str: async def get_state_group_for_events( self, event_ids: Collection[str], await_full_state: bool = True - ) -> Dict[str, int]: + ) -> dict[str, int]: res = {} for event in event_ids: res[event] = self._event_to_state_group[event] @@ -178,7 +174,7 @@ async def get_state_group_for_events( async def get_state_for_groups( self, groups: Collection[int] - ) -> Dict[int, MutableStateMap[str]]: + ) -> dict[int, MutableStateMap[str]]: res = {} for group in groups: state = self._group_to_state[group] @@ -193,15 +189,15 @@ def __init__(self, **kwargs: Any) -> None: class Graph: - def __init__(self, nodes: Dict[str, DictObj], edges: Dict[str, List[str]]): - events: Dict[str, EventBase] = {} - clobbered: Set[str] = set() + def __init__(self, nodes: dict[str, DictObj], edges: dict[str, list[str]]): + events: dict[str, EventBase] = {} + clobbered: set[str] = set() for event_id, fields in nodes.items(): refs = edges.get(event_id) if refs: clobbered.difference_update(refs) - prev_events: List[Tuple[str, dict]] = [(r, {}) for r in refs] + prev_events: list[tuple[str, dict]] = [(r, {}) for r in refs] else: prev_events = [] @@ -281,7 +277,7 @@ def test_branch_no_conflict(self) -> Generator[defer.Deferred, Any, None]: self.dummy_store.register_events(graph.walk()) - context_store: Dict[str, EventContext] = {} + context_store: dict[str, EventContext] = {} for event in graph.walk(): context = yield defer.ensureDeferred( @@ -328,7 +324,7 @@ def test_branch_basic_conflict( self.dummy_store.register_events(graph.walk()) - context_store: Dict[str, EventContext] = {} + context_store: dict[str, EventContext] = {} for event in graph.walk(): context = yield defer.ensureDeferred( @@ -389,7 +385,7 @@ def test_branch_have_banned_conflict( self.dummy_store.register_events(graph.walk()) - context_store: Dict[str, EventContext] = {} + context_store: dict[str, EventContext] = {} for event in graph.walk(): context = yield defer.ensureDeferred( @@ -467,7 +463,7 @@ def test_branch_have_perms_conflict( self.dummy_store.register_events(graph.walk()) - context_store: Dict[str, EventContext] = {} + context_store: dict[str, EventContext] = {} for event in graph.walk(): context = yield defer.ensureDeferred( @@ -490,7 +486,7 @@ def test_branch_have_perms_conflict( self.assertEqual(ctx_d.state_group_before_event, ctx_d.state_group) def _add_depths( - self, nodes: Dict[str, DictObj], edges: Dict[str, List[str]] + self, nodes: dict[str, DictObj], edges: dict[str, list[str]] ) -> None: def _get_depth(ev: str) -> int: node = nodes[ev] diff --git a/tests/test_types.py b/tests/test_types.py index 0c08bc8ecc6..1802f0fae3e 100644 --- a/tests/test_types.py +++ b/tests/test_types.py @@ -19,7 +19,6 @@ # # -from typing import Type from unittest import skipUnless from immutabledict import immutabledict @@ -152,7 +151,7 @@ def test_non_ascii(self) -> None: class MultiWriterTokenTestCase(unittest.HomeserverTestCase): """Tests for the different types of multi writer tokens.""" - token_type: Type[AbstractMultiWriterStreamToken] + token_type: type[AbstractMultiWriterStreamToken] def test_basic_token(self) -> None: """Test that a simple stream token can be serialized and unserialized""" diff --git a/tests/test_utils/__init__.py b/tests/test_utils/__init__.py index 3e6fd03600b..0df5a4e6c3a 100644 --- a/tests/test_utils/__init__.py +++ b/tests/test_utils/__init__.py @@ -28,7 +28,7 @@ import sys import warnings from binascii import unhexlify -from typing import TYPE_CHECKING, Awaitable, Callable, Tuple, TypeVar +from typing import TYPE_CHECKING, Awaitable, Callable, TypeVar import attr import zope.interface @@ -102,7 +102,7 @@ class FakeResponse: # type: ignore[misc] attribute, and didn't support deliverBody until recently. """ - version: Tuple[bytes, int, int] = (b"HTTP", 1, 1) + version: tuple[bytes, int, int] = (b"HTTP", 1, 1) # HTTP response code code: int = 200 diff --git a/tests/test_utils/event_injection.py b/tests/test_utils/event_injection.py index c1eaf9a5751..9cdb456b1b1 100644 --- a/tests/test_utils/event_injection.py +++ b/tests/test_utils/event_injection.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Any, List, Optional, Tuple +from typing import Any, Optional import synapse.server from synapse.api.constants import EventTypes @@ -62,7 +62,7 @@ async def inject_member_event( async def inject_event( hs: synapse.server.HomeServer, room_version: Optional[str] = None, - prev_event_ids: Optional[List[str]] = None, + prev_event_ids: Optional[list[str]] = None, **kwargs: Any, ) -> EventBase: """Inject a generic event into a room @@ -87,9 +87,9 @@ async def inject_event( async def create_event( hs: synapse.server.HomeServer, room_version: Optional[str] = None, - prev_event_ids: Optional[List[str]] = None, + prev_event_ids: Optional[list[str]] = None, **kwargs: Any, -) -> Tuple[EventBase, EventContext]: +) -> tuple[EventBase, EventContext]: if room_version is None: room_version = await hs.get_datastores().main.get_room_version_id( kwargs["room_id"] diff --git a/tests/test_utils/html_parsers.py b/tests/test_utils/html_parsers.py index a0f39cb1303..aff1626295c 100644 --- a/tests/test_utils/html_parsers.py +++ b/tests/test_utils/html_parsers.py @@ -20,7 +20,7 @@ # from html.parser import HTMLParser -from typing import Dict, Iterable, List, NoReturn, Optional, Tuple +from typing import Iterable, NoReturn, Optional class TestHtmlParser(HTMLParser): @@ -30,16 +30,16 @@ def __init__(self) -> None: super().__init__() # a list of links found in the doc - self.links: List[str] = [] + self.links: list[str] = [] # the values of any hidden s: map from name to value - self.hiddens: Dict[str, Optional[str]] = {} + self.hiddens: dict[str, Optional[str]] = {} # the values of any radio buttons: map from name to list of values - self.radios: Dict[str, List[Optional[str]]] = {} + self.radios: dict[str, list[Optional[str]]] = {} def handle_starttag( - self, tag: str, attrs: Iterable[Tuple[str, Optional[str]]] + self, tag: str, attrs: Iterable[tuple[str, Optional[str]]] ) -> None: attr_dict = dict(attrs) if tag == "a": diff --git a/tests/test_utils/oidc.py b/tests/test_utils/oidc.py index f2de8bded5d..c2d6af029a1 100644 --- a/tests/test_utils/oidc.py +++ b/tests/test_utils/oidc.py @@ -23,7 +23,7 @@ import base64 import json from hashlib import sha256 -from typing import Any, ContextManager, Dict, List, Optional, Tuple +from typing import Any, ContextManager, Optional from unittest.mock import Mock, patch from urllib.parse import parse_qs @@ -75,16 +75,16 @@ def __init__(self, clock: Clock, issuer: str): self.post_token_handler = Mock(side_effect=self._post_token_handler) # A code -> grant mapping - self._authorization_grants: Dict[str, FakeAuthorizationGrant] = {} + self._authorization_grants: dict[str, FakeAuthorizationGrant] = {} # An access token -> grant mapping - self._sessions: Dict[str, FakeAuthorizationGrant] = {} + self._sessions: dict[str, FakeAuthorizationGrant] = {} # We generate here an ECDSA key with the P-256 curve (ES256 algorithm) used for # signing JWTs. ECDSA keys are really quick to generate compared to RSA. self._key = ECKey.generate_key(crv="P-256", is_private=True) self._jwks = KeySet([ECKey.import_key(self._key.as_pem(is_private=False))]) - self._id_token_overrides: Dict[str, Any] = {} + self._id_token_overrides: dict[str, Any] = {} def reset_mocks(self) -> None: self.request.reset_mock() @@ -222,7 +222,7 @@ def start_authorization( userinfo: dict, nonce: Optional[str] = None, with_sid: bool = False, - ) -> Tuple[str, FakeAuthorizationGrant]: + ) -> tuple[str, FakeAuthorizationGrant]: """Start an authorization request, and get back the code to use on the authorization endpoint.""" code = random_string(10) sid = None @@ -242,7 +242,7 @@ def start_authorization( return code, grant - def exchange_code(self, code: str) -> Optional[Dict[str, Any]]: + def exchange_code(self, code: str) -> Optional[dict[str, Any]]: grant = self._authorization_grants.pop(code, None) if grant is None: return None @@ -269,7 +269,7 @@ def buggy_endpoint( metadata: bool = False, token: bool = False, userinfo: bool = False, - ) -> ContextManager[Dict[str, Mock]]: + ) -> ContextManager[dict[str, Mock]]: """A context which makes a set of endpoints return a 500 error. Args: @@ -356,7 +356,7 @@ def _get_userinfo_handler(self, access_token: Optional[str]) -> IResponse: return FakeResponse.json(payload=user_info) - def _post_token_handler(self, params: Dict[str, List[str]]) -> IResponse: + def _post_token_handler(self, params: dict[str, list[str]]) -> IResponse: """Handles requests to the token endpoint.""" code = params.get("code", []) diff --git a/tests/unittest.py b/tests/unittest.py index 9ab052e7c0c..1007f404561 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -33,16 +33,12 @@ Awaitable, Callable, ClassVar, - Dict, Generic, Iterable, - List, Mapping, NoReturn, Optional, Protocol, - Tuple, - Type, TypeVar, Union, ) @@ -169,7 +165,7 @@ def _parse_config_dict(config: str) -> HomeServerConfig: return config_obj -def make_homeserver_config_obj(config: Dict[str, Any]) -> HomeServerConfig: +def make_homeserver_config_obj(config: dict[str, Any]) -> HomeServerConfig: """Creates a :class:`HomeServerConfig` instance with the given configuration dict. This is equivalent to:: @@ -250,7 +246,7 @@ def tearDown(orig: Callable[[], R]) -> R: return ret - def assertObjectHasAttributes(self, attrs: Dict[str, object], obj: object) -> None: + def assertObjectHasAttributes(self, attrs: dict[str, object], obj: object) -> None: """Asserts that the given object has each of the attributes given, and that the value of each matches according to assertEqual.""" for key in attrs.keys(): @@ -299,14 +295,14 @@ def assertIncludes( elif not exact and actual_items >= expected_items: return - expected_lines: List[str] = [] + expected_lines: list[str] = [] for expected_item in expected_items: is_expected_in_actual = expected_item in actual_items expected_lines.append( "{} {}".format(" " if is_expected_in_actual else "?", expected_item) ) - actual_lines: List[str] = [] + actual_lines: list[str] = [] for actual_item in actual_items: is_actual_in_expected = actual_item in expected_items actual_lines.append( @@ -379,7 +375,7 @@ class HomeserverTestCase(TestCase): hijack_auth: ClassVar[bool] = True needs_threadpool: ClassVar[bool] = False - servlets: ClassVar[List[RegisterServletsFunc]] = [] + servlets: ClassVar[list[RegisterServletsFunc]] = [] def __init__(self, methodName: str): super().__init__(methodName) @@ -527,7 +523,7 @@ def create_test_resource(self) -> Resource: create_resource_tree(self.create_resource_dict(), root_resource) return root_resource - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: """Create a resource tree for the test server A resource tree is a mapping from path to twisted.web.resource. @@ -578,7 +574,7 @@ def make_request( path: Union[bytes, str], content: Union[bytes, str, JsonDict] = b"", access_token: Optional[str] = None, - request: Type[Request] = SynapseRequest, + request: type[Request] = SynapseRequest, shorthand: bool = True, federation_auth_origin: Optional[bytes] = None, content_type: Optional[bytes] = None, @@ -709,7 +705,7 @@ def get_success(self, d: Awaitable[TV], by: float = 0.0) -> TV: return self.successResultOf(deferred) def get_failure( - self, d: Awaitable[Any], exc: Type[_ExcType], by: float = 0.0 + self, d: Awaitable[Any], exc: type[_ExcType], by: float = 0.0 ) -> _TypedFailure[_ExcType]: """ Run a Deferred and get a Failure from it. The failure must be of the type `exc`. @@ -799,7 +795,7 @@ def register_appservice_user( username: str, appservice_token: str, inhibit_login: bool = False, - ) -> Tuple[str, Optional[str]]: + ) -> tuple[str, Optional[str]]: """Register an appservice user as an application service. Requires the client-facing registration API be registered. @@ -831,7 +827,7 @@ def login( username: str, password: str, device_id: Optional[str] = None, - additional_request_fields: Optional[Dict[str, str]] = None, + additional_request_fields: Optional[dict[str, str]] = None, custom_headers: Optional[Iterable[CustomHeaderType]] = None, ) -> str: """ @@ -871,7 +867,7 @@ def create_and_send_event( room_id: str, user: UserID, soft_failed: bool = False, - prev_event_ids: Optional[List[str]] = None, + prev_event_ids: Optional[list[str]] = None, ) -> str: """ Create and send an event. @@ -963,7 +959,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: ) ) - def create_resource_dict(self) -> Dict[str, Resource]: + def create_resource_dict(self) -> dict[str, Resource]: d = super().create_resource_dict() d["/_matrix/federation"] = TransportLayerServer(self.hs) return d diff --git a/tests/util/caches/test_deferred_cache.py b/tests/util/caches/test_deferred_cache.py index f0deb1554ef..fc01a2f5e96 100644 --- a/tests/util/caches/test_deferred_cache.py +++ b/tests/util/caches/test_deferred_cache.py @@ -20,7 +20,6 @@ # from functools import partial -from typing import List, Tuple from twisted.internet import defer @@ -169,7 +168,7 @@ def test_get_immediate(self) -> None: self.assertEqual(v, 2) def test_invalidate(self) -> None: - cache: DeferredCache[Tuple[str], int] = DeferredCache( + cache: DeferredCache[tuple[str], int] = DeferredCache( name="test", clock=self.clock, server_name="test_server" ) cache.prefill(("foo",), 123) @@ -266,7 +265,7 @@ def test_eviction_lru(self) -> None: cache.get(3) def test_eviction_iterable(self) -> None: - cache: DeferredCache[int, List[str]] = DeferredCache( + cache: DeferredCache[int, list[str]] = DeferredCache( name="test", clock=self.clock, server_name="test_server", diff --git a/tests/util/caches/test_descriptors.py b/tests/util/caches/test_descriptors.py index 0e3b6ae36b7..e27f84fa6dd 100644 --- a/tests/util/caches/test_descriptors.py +++ b/tests/util/caches/test_descriptors.py @@ -23,12 +23,9 @@ Any, Generator, Iterable, - List, Mapping, NoReturn, Optional, - Set, - Tuple, cast, ) from unittest import mock @@ -257,7 +254,7 @@ def fn(self, arg1: int) -> Deferred: return self.result obj = Cls() - callbacks: Set[str] = set() + callbacks: set[str] = set() # set off an asynchronous request origin_d: Deferred = Deferred() @@ -435,7 +432,7 @@ def __init__(self) -> None: _, self.clock = get_clock() # nb must be called this for @cached @descriptors.cached(iterable=True) - def fn(self, arg1: int, arg2: int) -> Tuple[str, ...]: + def fn(self, arg1: int, arg2: int) -> tuple[str, ...]: return self.mock(arg1, arg2) obj = Cls() @@ -925,7 +922,7 @@ def fn(self, arg1: int) -> None: pass @descriptors.cachedList(cached_method_name="fn", list_name="args1") - def list_fn(self, args1: List[int]) -> "Deferred[Mapping[int, str]]": + def list_fn(self, args1: list[int]) -> "Deferred[Mapping[int, str]]": return self.mock(args1) obj = Cls() @@ -970,7 +967,7 @@ def fn(self, arg1: int, arg2: int) -> None: pass @descriptors.cachedList(cached_method_name="fn", list_name="args1") - async def list_fn(self, args1: List[int], arg2: int) -> Mapping[int, str]: + async def list_fn(self, args1: list[int], arg2: int) -> Mapping[int, str]: # we want this to behave like an asynchronous function await run_on_reactor() return self.mock(args1, arg2) @@ -1012,7 +1009,7 @@ def fn(self, arg1: int) -> None: pass @cachedList(cached_method_name="fn", list_name="args") - async def list_fn(self, args: List[int]) -> Mapping[int, str]: + async def list_fn(self, args: list[int]) -> Mapping[int, str]: await complete_lookup return {arg: str(arg) for arg in args} @@ -1049,7 +1046,7 @@ def fn(self, arg1: int) -> None: pass @cachedList(cached_method_name="fn", list_name="args") - async def list_fn(self, args: List[int]) -> Mapping[int, str]: + async def list_fn(self, args: list[int]) -> Mapping[int, str]: await make_deferred_yieldable(complete_lookup) self.inner_context_was_finished = current_context().finished return {arg: str(arg) for arg in args} @@ -1097,7 +1094,7 @@ def fn(self, room_id: str, event_id: str) -> None: # of arguments as the underlying cached function, just with one of # the arguments being an iterable @descriptors.cachedList(cached_method_name="fn", list_name="keys") - def list_fn(self, keys: Iterable[Tuple[str, str]]) -> None: + def list_fn(self, keys: Iterable[tuple[str, str]]) -> None: pass # Corrected syntax ✅ diff --git a/tests/util/test_async_helpers.py b/tests/util/test_async_helpers.py index fd8d576aea8..a02a2f0cef3 100644 --- a/tests/util/test_async_helpers.py +++ b/tests/util/test_async_helpers.py @@ -19,7 +19,7 @@ # import logging import traceback -from typing import Any, Coroutine, List, NoReturn, Optional, Tuple, TypeVar +from typing import Any, Coroutine, NoReturn, Optional, TypeVar from parameterized import parameterized_class @@ -71,7 +71,7 @@ def check_called_first(res: int) -> int: observer1.addBoth(check_called_first) # store the results - results: List[Optional[int]] = [None, None] + results: list[Optional[int]] = [None, None] def check_val(res: int, idx: int) -> int: results[idx] = res @@ -102,7 +102,7 @@ def check_called_first(res: int) -> int: observer1.addBoth(check_called_first) # store the results - results: List[Optional[Failure]] = [None, None] + results: list[Optional[Failure]] = [None, None] def check_failure(res: Failure, idx: int) -> None: results[idx] = res @@ -644,7 +644,7 @@ def test_multiple_sleepers_wake(self) -> None: class GatherCoroutineTests(TestCase): """Tests for `gather_optional_coroutines`""" - def make_coroutine(self) -> Tuple[Coroutine[Any, Any, T], "defer.Deferred[T]"]: + def make_coroutine(self) -> tuple[Coroutine[Any, Any, T], "defer.Deferred[T]"]: """Returns a coroutine and a deferred that it is waiting on to resolve""" d: "defer.Deferred[T]" = defer.Deferred() diff --git a/tests/util/test_batching_queue.py b/tests/util/test_batching_queue.py index 60bfdf38aaa..30b07dc6ad9 100644 --- a/tests/util/test_batching_queue.py +++ b/tests/util/test_batching_queue.py @@ -18,7 +18,6 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import List, Tuple from prometheus_client import Gauge @@ -47,7 +46,7 @@ def setUp(self) -> None: except KeyError: pass - self._pending_calls: List[Tuple[List[str], defer.Deferred]] = [] + self._pending_calls: list[tuple[list[str], defer.Deferred]] = [] self.queue: BatchingQueue[str, str] = BatchingQueue( name="test_queue", hs=self.hs, @@ -55,7 +54,7 @@ def setUp(self) -> None: process_batch_callback=self._process_queue, ) - async def _process_queue(self, values: List[str]) -> str: + async def _process_queue(self, values: list[str]) -> str: d: "defer.Deferred[str]" = defer.Deferred() self._pending_calls.append((values, d)) return await make_deferred_yieldable(d) diff --git a/tests/util/test_expiring_cache.py b/tests/util/test_expiring_cache.py index 35c0f02e3fb..8964359a6ef 100644 --- a/tests/util/test_expiring_cache.py +++ b/tests/util/test_expiring_cache.py @@ -19,7 +19,6 @@ # # -from typing import List from synapse.util.caches.expiringcache import ExpiringCache @@ -65,7 +64,7 @@ def test_eviction(self) -> None: def test_iterable_eviction(self) -> None: reactor, clock = get_clock() - cache: ExpiringCache[str, List[int]] = ExpiringCache( + cache: ExpiringCache[str, list[int]] = ExpiringCache( cache_name="test", server_name="testserver", hs=self.hs, diff --git a/tests/util/test_itertools.py b/tests/util/test_itertools.py index 7a593cc6834..016389d49ba 100644 --- a/tests/util/test_itertools.py +++ b/tests/util/test_itertools.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Dict, Iterable, List, Sequence +from typing import Iterable, Sequence from synapse.util.iterutils import ( chunk_seq, @@ -67,13 +67,13 @@ class SortTopologically(TestCase): def test_empty(self) -> None: "Test that an empty graph works correctly" - graph: Dict[int, List[int]] = {} + graph: dict[int, list[int]] = {} self.assertEqual(list(sorted_topologically([], graph)), []) def test_handle_empty_graph(self) -> None: "Test that a graph where a node doesn't have an entry is treated as empty" - graph: Dict[int, List[int]] = {} + graph: dict[int, list[int]] = {} # For disconnected nodes the output is simply sorted. self.assertEqual(list(sorted_topologically([1, 2], graph)), [1, 2]) @@ -81,7 +81,7 @@ def test_handle_empty_graph(self) -> None: def test_disconnected(self) -> None: "Test that a graph with no edges work" - graph: Dict[int, List[int]] = {1: [], 2: []} + graph: dict[int, list[int]] = {1: [], 2: []} # For disconnected nodes the output is simply sorted. self.assertEqual(list(sorted_topologically([1, 2], graph)), [1, 2]) @@ -89,19 +89,19 @@ def test_disconnected(self) -> None: def test_linear(self) -> None: "Test that a simple `4 -> 3 -> 2 -> 1` graph works" - graph: Dict[int, List[int]] = {1: [], 2: [1], 3: [2], 4: [3]} + graph: dict[int, list[int]] = {1: [], 2: [1], 3: [2], 4: [3]} self.assertEqual(list(sorted_topologically([4, 3, 2, 1], graph)), [1, 2, 3, 4]) def test_subset(self) -> None: "Test that only sorting a subset of the graph works" - graph: Dict[int, List[int]] = {1: [], 2: [1], 3: [2], 4: [3]} + graph: dict[int, list[int]] = {1: [], 2: [1], 3: [2], 4: [3]} self.assertEqual(list(sorted_topologically([4, 3], graph)), [3, 4]) def test_fork(self) -> None: "Test that a forked graph works" - graph: Dict[int, List[int]] = {1: [], 2: [1], 3: [1], 4: [2, 3]} + graph: dict[int, list[int]] = {1: [], 2: [1], 3: [1], 4: [2, 3]} # Valid orderings are `[1, 3, 2, 4]` or `[1, 2, 3, 4]`, but we should # always get the same one. @@ -109,13 +109,13 @@ def test_fork(self) -> None: def test_duplicates(self) -> None: "Test that a graph with duplicate edges work" - graph: Dict[int, List[int]] = {1: [], 2: [1, 1], 3: [2, 2], 4: [3]} + graph: dict[int, list[int]] = {1: [], 2: [1, 1], 3: [2, 2], 4: [3]} self.assertEqual(list(sorted_topologically([4, 3, 2, 1], graph)), [1, 2, 3, 4]) def test_multiple_paths(self) -> None: "Test that a graph with multiple paths between two nodes work" - graph: Dict[int, List[int]] = {1: [], 2: [1], 3: [2], 4: [3, 2, 1]} + graph: dict[int, list[int]] = {1: [], 2: [1], 3: [2], 4: [3, 2, 1]} self.assertEqual(list(sorted_topologically([4, 3, 2, 1], graph)), [1, 2, 3, 4]) @@ -126,13 +126,13 @@ class SortTopologicallyBatched(TestCase): def test_empty(self) -> None: "Test that an empty graph works correctly" - graph: Dict[int, List[int]] = {} + graph: dict[int, list[int]] = {} self.assertEqual(list(sorted_topologically_batched([], graph)), []) def test_handle_empty_graph(self) -> None: "Test that a graph where a node doesn't have an entry is treated as empty" - graph: Dict[int, List[int]] = {} + graph: dict[int, list[int]] = {} # For disconnected nodes the output is simply sorted. self.assertEqual(list(sorted_topologically_batched([1, 2], graph)), [[1, 2]]) @@ -140,7 +140,7 @@ def test_handle_empty_graph(self) -> None: def test_disconnected(self) -> None: "Test that a graph with no edges work" - graph: Dict[int, List[int]] = {1: [], 2: []} + graph: dict[int, list[int]] = {1: [], 2: []} # For disconnected nodes the output is simply sorted. self.assertEqual(list(sorted_topologically_batched([1, 2], graph)), [[1, 2]]) @@ -148,7 +148,7 @@ def test_disconnected(self) -> None: def test_linear(self) -> None: "Test that a simple `4 -> 3 -> 2 -> 1` graph works" - graph: Dict[int, List[int]] = {1: [], 2: [1], 3: [2], 4: [3]} + graph: dict[int, list[int]] = {1: [], 2: [1], 3: [2], 4: [3]} self.assertEqual( list(sorted_topologically_batched([4, 3, 2, 1], graph)), @@ -157,13 +157,13 @@ def test_linear(self) -> None: def test_subset(self) -> None: "Test that only sorting a subset of the graph works" - graph: Dict[int, List[int]] = {1: [], 2: [1], 3: [2], 4: [3]} + graph: dict[int, list[int]] = {1: [], 2: [1], 3: [2], 4: [3]} self.assertEqual(list(sorted_topologically_batched([4, 3], graph)), [[3], [4]]) def test_fork(self) -> None: "Test that a forked graph works" - graph: Dict[int, List[int]] = {1: [], 2: [1], 3: [1], 4: [2, 3]} + graph: dict[int, list[int]] = {1: [], 2: [1], 3: [1], 4: [2, 3]} # Valid orderings are `[1, 3, 2, 4]` or `[1, 2, 3, 4]`, but we should # always get the same one. @@ -173,7 +173,7 @@ def test_fork(self) -> None: def test_duplicates(self) -> None: "Test that a graph with duplicate edges work" - graph: Dict[int, List[int]] = {1: [], 2: [1, 1], 3: [2, 2], 4: [3]} + graph: dict[int, list[int]] = {1: [], 2: [1, 1], 3: [2, 2], 4: [3]} self.assertEqual( list(sorted_topologically_batched([4, 3, 2, 1], graph)), @@ -182,7 +182,7 @@ def test_duplicates(self) -> None: def test_multiple_paths(self) -> None: "Test that a graph with multiple paths between two nodes work" - graph: Dict[int, List[int]] = {1: [], 2: [1], 3: [2], 4: [3, 2, 1]} + graph: dict[int, list[int]] = {1: [], 2: [1], 3: [2], 4: [3, 2, 1]} self.assertEqual( list(sorted_topologically_batched([4, 3, 2, 1], graph)), diff --git a/tests/util/test_linearizer.py b/tests/util/test_linearizer.py index 722ce79dcc6..b2a631d7473 100644 --- a/tests/util/test_linearizer.py +++ b/tests/util/test_linearizer.py @@ -19,7 +19,7 @@ # # -from typing import Hashable, Protocol, Tuple +from typing import Hashable, Protocol from twisted.internet import defer from twisted.internet.defer import CancelledError, Deferred @@ -43,7 +43,7 @@ def setUp(self) -> None: def _start_task( self, linearizer: Linearizer, key: Hashable - ) -> Tuple["Deferred[None]", "Deferred[None]", UnblockFunction]: + ) -> tuple["Deferred[None]", "Deferred[None]", UnblockFunction]: """Starts a task which acquires the linearizer lock, blocks, then completes. Args: diff --git a/tests/util/test_lrucache.py b/tests/util/test_lrucache.py index 56e9996b005..dcbfcfa2e47 100644 --- a/tests/util/test_lrucache.py +++ b/tests/util/test_lrucache.py @@ -20,7 +20,6 @@ # -from typing import List, Tuple from unittest.mock import Mock, patch from synapse.metrics.jemalloc import JemallocStats @@ -84,7 +83,7 @@ def test_pop(self) -> None: def test_del_multi(self) -> None: # The type here isn't quite correct as they don't handle TreeCache well. - cache: LruCache[Tuple[str, str], str] = LruCache( + cache: LruCache[tuple[str, str], str] = LruCache( max_size=4, clock=self.clock, cache_type=TreeCache, @@ -211,7 +210,7 @@ def test_del_multi(self) -> None: m3 = Mock() m4 = Mock() # The type here isn't quite correct as they don't handle TreeCache well. - cache: LruCache[Tuple[str, str], str] = LruCache( + cache: LruCache[tuple[str, str], str] = LruCache( max_size=4, clock=self.clock, cache_type=TreeCache, @@ -295,7 +294,7 @@ def test_eviction(self) -> None: class LruCacheSizedTestCase(unittest.HomeserverTestCase): def test_evict(self) -> None: - cache: LruCache[str, List[int]] = LruCache( + cache: LruCache[str, list[int]] = LruCache( max_size=5, clock=self.clock, size_callback=len, server_name="test_server" ) cache["key1"] = [0] @@ -320,7 +319,7 @@ def test_evict(self) -> None: def test_zero_size_drop_from_cache(self) -> None: """Test that `drop_from_cache` works correctly with 0-sized entries.""" - cache: LruCache[str, List[int]] = LruCache( + cache: LruCache[str, list[int]] = LruCache( max_size=5, clock=self.clock, size_callback=lambda x: 0, diff --git a/tests/util/test_mutable_overlay_mapping.py b/tests/util/test_mutable_overlay_mapping.py index a7335fca73a..ed738919e4b 100644 --- a/tests/util/test_mutable_overlay_mapping.py +++ b/tests/util/test_mutable_overlay_mapping.py @@ -13,7 +13,6 @@ # import unittest -from typing import Dict from synapse.util import MutableOverlayMapping @@ -24,7 +23,7 @@ class TestMutableOverlayMapping(unittest.TestCase): def test_init(self) -> None: """Test initialization with different input types.""" # Test with empty dict - empty_dict: Dict[str, int] = {} + empty_dict: dict[str, int] = {} mapping = MutableOverlayMapping(empty_dict) self.assertEqual(len(mapping), 0) diff --git a/tests/util/test_rwlock.py b/tests/util/test_rwlock.py index 12f821d684c..36771188ae1 100644 --- a/tests/util/test_rwlock.py +++ b/tests/util/test_rwlock.py @@ -19,7 +19,7 @@ # # -from typing import AsyncContextManager, Callable, Sequence, Tuple +from typing import AsyncContextManager, Callable, Sequence from twisted.internet import defer from twisted.internet.defer import CancelledError, Deferred @@ -35,7 +35,7 @@ def _start_reader_or_writer( read_or_write: Callable[[str], AsyncContextManager], key: str, return_value: str, - ) -> Tuple["Deferred[str]", "Deferred[None]", "Deferred[None]"]: + ) -> tuple["Deferred[str]", "Deferred[None]", "Deferred[None]"]: """Starts a reader or writer which acquires the lock, blocks, then completes. Args: @@ -67,7 +67,7 @@ async def reader_or_writer() -> str: def _start_blocking_reader( self, rwlock: ReadWriteLock, key: str, return_value: str - ) -> Tuple["Deferred[str]", "Deferred[None]", "Deferred[None]"]: + ) -> tuple["Deferred[str]", "Deferred[None]", "Deferred[None]"]: """Starts a reader which acquires the lock, blocks, then releases the lock. See the docstring for `_start_reader_or_writer` for details about the arguments @@ -77,7 +77,7 @@ def _start_blocking_reader( def _start_blocking_writer( self, rwlock: ReadWriteLock, key: str, return_value: str - ) -> Tuple["Deferred[str]", "Deferred[None]", "Deferred[None]"]: + ) -> tuple["Deferred[str]", "Deferred[None]", "Deferred[None]"]: """Starts a writer which acquires the lock, blocks, then releases the lock. See the docstring for `_start_reader_or_writer` for details about the arguments @@ -87,7 +87,7 @@ def _start_blocking_writer( def _start_nonblocking_reader( self, rwlock: ReadWriteLock, key: str, return_value: str - ) -> Tuple["Deferred[str]", "Deferred[None]"]: + ) -> tuple["Deferred[str]", "Deferred[None]"]: """Starts a reader which acquires the lock, then releases it immediately. See the docstring for `_start_reader_or_writer` for details about the arguments. @@ -106,7 +106,7 @@ def _start_nonblocking_reader( def _start_nonblocking_writer( self, rwlock: ReadWriteLock, key: str, return_value: str - ) -> Tuple["Deferred[str]", "Deferred[None]"]: + ) -> tuple["Deferred[str]", "Deferred[None]"]: """Starts a writer which acquires the lock, then releases it immediately. See the docstring for `_start_reader_or_writer` for details about the arguments. diff --git a/tests/util/test_task_scheduler.py b/tests/util/test_task_scheduler.py index e97f0ed6111..43c3ce52eaa 100644 --- a/tests/util/test_task_scheduler.py +++ b/tests/util/test_task_scheduler.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import List, Optional, Tuple +from typing import Optional from twisted.internet.task import deferLater from twisted.internet.testing import MemoryReactor @@ -42,7 +42,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: async def _test_task( self, task: ScheduledTask - ) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: + ) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: # This test task will copy the parameters to the result result = None if task.params: @@ -85,7 +85,7 @@ def test_schedule_task(self) -> None: async def _sleeping_task( self, task: ScheduledTask - ) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: + ) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: # Sleep for a second await deferLater(self.reactor, 1, lambda: None) return TaskStatus.COMPLETE, None, None @@ -103,7 +103,7 @@ def test_schedule_lot_of_tasks(self) -> None: ) ) - def get_tasks_of_status(status: TaskStatus) -> List[ScheduledTask]: + def get_tasks_of_status(status: TaskStatus) -> list[ScheduledTask]: tasks = ( self.get_success(self.task_scheduler.get_task(task_id)) for task_id in task_ids @@ -151,7 +151,7 @@ def get_tasks_of_status(status: TaskStatus) -> List[ScheduledTask]: async def _raising_task( self, task: ScheduledTask - ) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: + ) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: raise Exception("raising") def test_schedule_raising_task(self) -> None: @@ -165,7 +165,7 @@ def test_schedule_raising_task(self) -> None: async def _resumable_task( self, task: ScheduledTask - ) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: + ) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: if task.result and "in_progress" in task.result: return TaskStatus.COMPLETE, {"success": True}, None else: @@ -201,7 +201,7 @@ def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: async def _test_task( self, task: ScheduledTask - ) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: + ) -> tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: return (TaskStatus.COMPLETE, None, None) @override_config({"run_background_tasks_on": "worker1"}) diff --git a/tests/utils.py b/tests/utils.py index 051388ee2ee..b3d59a0ebe3 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -24,10 +24,8 @@ import signal from types import FrameType, TracebackType from typing import ( - Dict, Literal, Optional, - Type, TypeVar, Union, overload, @@ -134,7 +132,7 @@ def _cleanup() -> None: @overload def default_config( server_name: str, parse: Literal[False] = ... -) -> Dict[str, object]: ... +) -> dict[str, object]: ... @overload @@ -143,7 +141,7 @@ def default_config(server_name: str, parse: Literal[True]) -> HomeServerConfig: def default_config( server_name: str, parse: bool = False -) -> Union[Dict[str, object], HomeServerConfig]: +) -> Union[dict[str, object], HomeServerConfig]: """ Create a reasonable test config. @@ -283,7 +281,7 @@ async def create_room(hs: HomeServer, room_id: str, creator_id: str) -> None: T = TypeVar("T") -def checked_cast(type: Type[T], x: object) -> T: +def checked_cast(type: type[T], x: object) -> T: """A version of typing.cast that is checked at runtime. We have our own function for this for two reasons: @@ -337,7 +335,7 @@ def __enter__(self) -> None: def __exit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: From e501f10062e2fff4dd02396960be018bcdf71724 Mon Sep 17 00:00:00 2001 From: Andrew Ferrazzutti Date: Sun, 12 Oct 2025 22:55:54 -0400 Subject: [PATCH 2/6] Add changelog --- changelog.d/19046.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/19046.misc diff --git a/changelog.d/19046.misc b/changelog.d/19046.misc new file mode 100644 index 00000000000..4013804f7f2 --- /dev/null +++ b/changelog.d/19046.misc @@ -0,0 +1 @@ +Use type hinting generics in standard collections, as per PEP 585, added in Python 3.9. From 96d9a5ee5e90117b934ce6abb4ed84eb342a22ed Mon Sep 17 00:00:00 2001 From: Andrew Ferrazzutti Date: Mon, 13 Oct 2025 22:50:22 -0400 Subject: [PATCH 3/6] Add ruff lint rule for PEP585 --- pyproject.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 009d1553e56..c0300a22069 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -78,6 +78,8 @@ select = [ "LOG", # flake8-logging-format "G", + # pyupgrade + "UP006", ] [tool.ruff.lint.isort] From 80bdcd496c2af22c36c4fc7c40f011a0aa34d042 Mon Sep 17 00:00:00 2001 From: Andrew Ferrazzutti Date: Mon, 13 Oct 2025 23:29:56 -0400 Subject: [PATCH 4/6] Apply rest of PEP585 updates Do so by locally allowing ruff's UP006 to apply fixes. Do not set that permanently since it is an unsafe rule. --- stubs/sortedcontainers/sorteddict.pyi | 32 ++++++++----------- stubs/sortedcontainers/sortedlist.pyi | 19 +++++------ stubs/sortedcontainers/sortedset.pyi | 12 +++---- stubs/txredisapi.pyi | 8 ++--- synapse/config/_base.pyi | 30 ++++++++--------- synapse/config/modules.py | 4 +-- synapse/config/oidc.py | 4 +-- synapse/config/password_auth_providers.py | 4 +-- synapse/config/server.py | 4 +-- synapse/config/spam_checker.py | 4 +-- synapse/crypto/keyring.py | 4 +-- synapse/events/presence_router.py | 8 ++--- synapse/federation/federation_client.py | 3 +- synapse/federation/federation_server.py | 5 ++- synapse/federation/send_queue.py | 3 +- synapse/federation/transport/client.py | 6 ++-- synapse/handlers/auth.py | 7 ++-- synapse/logging/_remote.py | 6 ++-- synapse/metrics/background_process_metrics.py | 3 +- synapse/push/httppusher.py | 4 +-- synapse/replication/tcp/handler.py | 4 +-- synapse/replication/tcp/protocol.py | 4 +-- synapse/replication/tcp/redis.py | 4 +-- synapse/replication/tcp/streams/_base.py | 3 +- synapse/replication/tcp/streams/events.py | 10 +++--- synapse/rest/admin/experimental_features.py | 4 +-- synapse/rest/admin/users.py | 4 +-- synapse/state/__init__.py | 3 +- synapse/storage/controllers/persist_events.py | 6 ++-- synapse/storage/database.py | 11 +++---- .../databases/main/event_push_actions.py | 3 +- synapse/storage/types.py | 9 +++--- synapse/synapse_rust/acl.pyi | 4 +-- synapse/synapse_rust/events.pyi | 4 +-- synapse/synapse_rust/push.pyi | 8 ++--- synapse/synapse_rust/segmenter.pyi | 4 +-- synapse/util/caches/descriptors.py | 15 ++++----- synapse/util/module_loader.py | 4 +-- synapse/util/ratelimitutils.py | 3 +- tests/federation/transport/test_knocking.py | 6 ++-- tests/handlers/test_appservice.py | 3 +- tests/push/test_email.py | 8 ++--- tests/rest/client/test_third_party_rules.py | 8 ++--- tests/rest/client/utils.py | 7 ++-- tests/server.py | 3 +- tests/state/test_v21.py | 4 +-- tests/test_event_auth.py | 4 +-- 47 files changed, 138 insertions(+), 182 deletions(-) diff --git a/stubs/sortedcontainers/sorteddict.pyi b/stubs/sortedcontainers/sorteddict.pyi index 81f581b034b..a0be3e6349f 100644 --- a/stubs/sortedcontainers/sorteddict.pyi +++ b/stubs/sortedcontainers/sorteddict.pyi @@ -7,18 +7,14 @@ from __future__ import annotations from typing import ( Any, Callable, - Dict, Hashable, ItemsView, Iterable, Iterator, KeysView, - List, Mapping, Optional, Sequence, - Tuple, - Type, TypeVar, Union, ValuesView, @@ -35,14 +31,14 @@ _VT_co = TypeVar("_VT_co", covariant=True) _SD = TypeVar("_SD", bound=SortedDict) _Key = Callable[[_T], Any] -class SortedDict(Dict[_KT, _VT]): +class SortedDict(dict[_KT, _VT]): @overload def __init__(self, **kwargs: _VT) -> None: ... @overload def __init__(self, __map: Mapping[_KT, _VT], **kwargs: _VT) -> None: ... @overload def __init__( - self, __iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT + self, __iterable: Iterable[tuple[_KT, _VT]], **kwargs: _VT ) -> None: ... @overload def __init__(self, __key: _Key[_KT], **kwargs: _VT) -> None: ... @@ -52,7 +48,7 @@ class SortedDict(Dict[_KT, _VT]): ) -> None: ... @overload def __init__( - self, __key: _Key[_KT], __iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT + self, __key: _Key[_KT], __iterable: Iterable[tuple[_KT, _VT]], **kwargs: _VT ) -> None: ... @property def key(self) -> Optional[_Key[_KT]]: ... @@ -84,8 +80,8 @@ class SortedDict(Dict[_KT, _VT]): def pop(self, key: _KT) -> _VT: ... @overload def pop(self, key: _KT, default: _T = ...) -> Union[_VT, _T]: ... - def popitem(self, index: int = ...) -> Tuple[_KT, _VT]: ... - def peekitem(self, index: int = ...) -> Tuple[_KT, _VT]: ... + def popitem(self, index: int = ...) -> tuple[_KT, _VT]: ... + def peekitem(self, index: int = ...) -> tuple[_KT, _VT]: ... def setdefault(self, key: _KT, default: Optional[_VT] = ...) -> _VT: ... # Mypy now reports the first overload as an error, because typeshed widened the type # of `__map` to its internal `_typeshed.SupportsKeysAndGetItem` type in @@ -102,9 +98,9 @@ class SortedDict(Dict[_KT, _VT]): # def update(self, **kwargs: _VT) -> None: ... def __reduce__( self, - ) -> Tuple[ - Type[SortedDict[_KT, _VT]], - Tuple[Callable[[_KT], Any], List[Tuple[_KT, _VT]]], + ) -> tuple[ + type[SortedDict[_KT, _VT]], + tuple[Callable[[_KT], Any], list[tuple[_KT, _VT]]], ]: ... def __repr__(self) -> str: ... def _check(self) -> None: ... @@ -121,20 +117,20 @@ class SortedKeysView(KeysView[_KT_co], Sequence[_KT_co]): @overload def __getitem__(self, index: int) -> _KT_co: ... @overload - def __getitem__(self, index: slice) -> List[_KT_co]: ... + def __getitem__(self, index: slice) -> list[_KT_co]: ... def __delitem__(self, index: Union[int, slice]) -> None: ... -class SortedItemsView(ItemsView[_KT_co, _VT_co], Sequence[Tuple[_KT_co, _VT_co]]): - def __iter__(self) -> Iterator[Tuple[_KT_co, _VT_co]]: ... +class SortedItemsView(ItemsView[_KT_co, _VT_co], Sequence[tuple[_KT_co, _VT_co]]): + def __iter__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ... @overload - def __getitem__(self, index: int) -> Tuple[_KT_co, _VT_co]: ... + def __getitem__(self, index: int) -> tuple[_KT_co, _VT_co]: ... @overload - def __getitem__(self, index: slice) -> List[Tuple[_KT_co, _VT_co]]: ... + def __getitem__(self, index: slice) -> list[tuple[_KT_co, _VT_co]]: ... def __delitem__(self, index: Union[int, slice]) -> None: ... class SortedValuesView(ValuesView[_VT_co], Sequence[_VT_co]): @overload def __getitem__(self, index: int) -> _VT_co: ... @overload - def __getitem__(self, index: slice) -> List[_VT_co]: ... + def __getitem__(self, index: slice) -> list[_VT_co]: ... def __delitem__(self, index: Union[int, slice]) -> None: ... diff --git a/stubs/sortedcontainers/sortedlist.pyi b/stubs/sortedcontainers/sortedlist.pyi index 0e745c0a79a..25ceb74cc91 100644 --- a/stubs/sortedcontainers/sortedlist.pyi +++ b/stubs/sortedcontainers/sortedlist.pyi @@ -9,12 +9,9 @@ from typing import ( Callable, Iterable, Iterator, - List, MutableSequence, Optional, Sequence, - Tuple, - Type, TypeVar, Union, overload, @@ -37,11 +34,11 @@ class SortedList(MutableSequence[_T]): ): ... # NB: currently mypy does not honour return type, see mypy #3307 @overload - def __new__(cls: Type[_SL], iterable: None, key: None) -> _SL: ... + def __new__(cls: type[_SL], iterable: None, key: None) -> _SL: ... @overload - def __new__(cls: Type[_SL], iterable: None, key: _Key[_T]) -> SortedKeyList[_T]: ... + def __new__(cls: type[_SL], iterable: None, key: _Key[_T]) -> SortedKeyList[_T]: ... @overload - def __new__(cls: Type[_SL], iterable: Iterable[_T], key: None) -> _SL: ... + def __new__(cls: type[_SL], iterable: Iterable[_T], key: None) -> _SL: ... @overload def __new__(cls, iterable: Iterable[_T], key: _Key[_T]) -> SortedKeyList[_T]: ... @property @@ -64,11 +61,11 @@ class SortedList(MutableSequence[_T]): @overload def __getitem__(self, index: int) -> _T: ... @overload - def __getitem__(self, index: slice) -> List[_T]: ... + def __getitem__(self, index: slice) -> list[_T]: ... @overload def _getitem(self, index: int) -> _T: ... @overload - def _getitem(self, index: slice) -> List[_T]: ... + def _getitem(self, index: slice) -> list[_T]: ... @overload def __setitem__(self, index: int, value: _T) -> None: ... @overload @@ -95,7 +92,7 @@ class SortedList(MutableSequence[_T]): self, minimum: Optional[int] = ..., maximum: Optional[int] = ..., - inclusive: Tuple[bool, bool] = ..., + inclusive: tuple[bool, bool] = ..., reverse: bool = ..., ) -> Iterator[_T]: ... def bisect_left(self, value: _T) -> int: ... @@ -151,14 +148,14 @@ class SortedKeyList(SortedList[_T]): self, minimum: Optional[int] = ..., maximum: Optional[int] = ..., - inclusive: Tuple[bool, bool] = ..., + inclusive: tuple[bool, bool] = ..., reverse: bool = ..., ) -> Iterator[_T]: ... def irange_key( self, min_key: Optional[Any] = ..., max_key: Optional[Any] = ..., - inclusive: Tuple[bool, bool] = ..., + inclusive: tuple[bool, bool] = ..., reserve: bool = ..., ) -> Iterator[_T]: ... def bisect_left(self, value: _T) -> int: ... diff --git a/stubs/sortedcontainers/sortedset.pyi b/stubs/sortedcontainers/sortedset.pyi index 6db11eacbed..a3593ca579c 100644 --- a/stubs/sortedcontainers/sortedset.pyi +++ b/stubs/sortedcontainers/sortedset.pyi @@ -10,13 +10,9 @@ from typing import ( Hashable, Iterable, Iterator, - List, MutableSet, Optional, Sequence, - Set, - Tuple, - Type, TypeVar, Union, overload, @@ -37,7 +33,7 @@ class SortedSet(MutableSet[_T], Sequence[_T]): ) -> None: ... @classmethod def _fromset( - cls, values: Set[_T], key: Optional[_Key[_T]] = ... + cls, values: set[_T], key: Optional[_Key[_T]] = ... ) -> SortedSet[_T]: ... @property def key(self) -> Optional[_Key[_T]]: ... @@ -45,7 +41,7 @@ class SortedSet(MutableSet[_T], Sequence[_T]): @overload def __getitem__(self, index: int) -> _T: ... @overload - def __getitem__(self, index: slice) -> List[_T]: ... + def __getitem__(self, index: slice) -> list[_T]: ... def __delitem__(self, index: Union[int, slice]) -> None: ... def __eq__(self, other: Any) -> bool: ... def __ne__(self, other: Any) -> bool: ... @@ -94,7 +90,7 @@ class SortedSet(MutableSet[_T], Sequence[_T]): def _update(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ... def __reduce__( self, - ) -> Tuple[Type[SortedSet[_T]], Set[_T], Callable[[_T], Any]]: ... + ) -> tuple[type[SortedSet[_T]], set[_T], Callable[[_T], Any]]: ... def __repr__(self) -> str: ... def _check(self) -> None: ... def bisect_left(self, value: _T) -> int: ... @@ -109,7 +105,7 @@ class SortedSet(MutableSet[_T], Sequence[_T]): self, minimum: Optional[_T] = ..., maximum: Optional[_T] = ..., - inclusive: Tuple[bool, bool] = ..., + inclusive: tuple[bool, bool] = ..., reverse: bool = ..., ) -> Iterator[_T]: ... def index( diff --git a/stubs/txredisapi.pyi b/stubs/txredisapi.pyi index c9a4114b1eb..d2539aa37df 100644 --- a/stubs/txredisapi.pyi +++ b/stubs/txredisapi.pyi @@ -15,7 +15,7 @@ """Contains *incomplete* type hints for txredisapi.""" -from typing import Any, List, Optional, Type, Union +from typing import Any, Optional, Union from twisted.internet import protocol from twisted.internet.defer import Deferred @@ -39,7 +39,7 @@ class RedisProtocol(protocol.Protocol): class SubscriberProtocol(RedisProtocol): def __init__(self, *args: object, **kwargs: object): ... password: Optional[str] - def subscribe(self, channels: Union[str, List[str]]) -> "Deferred[None]": ... + def subscribe(self, channels: Union[str, list[str]]) -> "Deferred[None]": ... def connectionMade(self) -> None: ... # type-ignore: twisted.internet.protocol.Protocol provides a default argument for # `reason`. txredisapi's LineReceiver Protocol doesn't. But that's fine: it's what's @@ -69,7 +69,7 @@ class UnixConnectionHandler(ConnectionHandler): ... class RedisFactory(protocol.ReconnectingClientFactory): continueTrying: bool handler: ConnectionHandler - pool: List[RedisProtocol] + pool: list[RedisProtocol] replyTimeout: Optional[int] def __init__( self, @@ -77,7 +77,7 @@ class RedisFactory(protocol.ReconnectingClientFactory): dbid: Optional[int], poolsize: int, isLazy: bool = False, - handler: Type = ConnectionHandler, + handler: type = ConnectionHandler, charset: str = "utf-8", password: Optional[str] = None, replyTimeout: Optional[int] = None, diff --git a/synapse/config/_base.pyi b/synapse/config/_base.pyi index 02543da3884..1a9cb7db470 100644 --- a/synapse/config/_base.pyi +++ b/synapse/config/_base.pyi @@ -2,15 +2,11 @@ import argparse from typing import ( Any, Collection, - Dict, Iterable, Iterator, - List, Literal, MutableMapping, Optional, - Tuple, - Type, TypeVar, Union, overload, @@ -129,8 +125,8 @@ class RootConfig: mas: mas.MasConfig matrix_rtc: matrixrtc.MatrixRtcConfig - config_classes: List[Type["Config"]] = ... - config_files: List[str] + config_classes: list[type["Config"]] = ... + config_files: list[str] def __init__(self, config_files: Collection[str] = ...) -> None: ... def invoke_all( self, func_name: str, *args: Any, **kwargs: Any @@ -139,7 +135,7 @@ class RootConfig: def invoke_all_static(cls, func_name: str, *args: Any, **kwargs: Any) -> None: ... def parse_config_dict( self, - config_dict: Dict[str, Any], + config_dict: dict[str, Any], config_dir_path: str, data_dir_path: str, allow_secrets_in_config: bool = ..., @@ -158,11 +154,11 @@ class RootConfig: ) -> str: ... @classmethod def load_or_generate_config( - cls: Type[TRootConfig], description: str, argv_options: List[str] + cls: type[TRootConfig], description: str, argv_options: list[str] ) -> Optional[TRootConfig]: ... @classmethod def load_config( - cls: Type[TRootConfig], description: str, argv_options: List[str] + cls: type[TRootConfig], description: str, argv_options: list[str] ) -> TRootConfig: ... @classmethod def add_arguments_to_parser( @@ -170,8 +166,8 @@ class RootConfig: ) -> None: ... @classmethod def load_config_with_parser( - cls: Type[TRootConfig], parser: argparse.ArgumentParser, argv_options: List[str] - ) -> Tuple[TRootConfig, argparse.Namespace]: ... + cls: type[TRootConfig], parser: argparse.ArgumentParser, argv_options: list[str] + ) -> tuple[TRootConfig, argparse.Namespace]: ... def generate_missing_files( self, config_dict: dict, config_dir_path: str ) -> None: ... @@ -203,16 +199,16 @@ class Config: def read_template(self, filenames: str) -> jinja2.Template: ... def read_templates( self, - filenames: List[str], + filenames: list[str], custom_template_directories: Optional[Iterable[str]] = None, - ) -> List[jinja2.Template]: ... + ) -> list[jinja2.Template]: ... -def read_config_files(config_files: Iterable[str]) -> Dict[str, Any]: ... -def find_config_files(search_paths: List[str]) -> List[str]: ... +def read_config_files(config_files: Iterable[str]) -> dict[str, Any]: ... +def find_config_files(search_paths: list[str]) -> list[str]: ... class ShardedWorkerHandlingConfig: - instances: List[str] - def __init__(self, instances: List[str]) -> None: ... + instances: list[str] + def __init__(self, instances: list[str]) -> None: ... def should_handle(self, instance_name: str, key: str) -> bool: ... # noqa: F811 class RoutableShardedWorkerHandlingConfig(ShardedWorkerHandlingConfig): diff --git a/synapse/config/modules.py b/synapse/config/modules.py index 42bcffd1839..17319c9e374 100644 --- a/synapse/config/modules.py +++ b/synapse/config/modules.py @@ -18,7 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Any, Dict +from typing import Any from synapse.config._base import Config, ConfigError from synapse.types import JsonDict @@ -29,7 +29,7 @@ class ModulesConfig(Config): section = "modules" def read_config(self, config: JsonDict, **kwargs: Any) -> None: - self.loaded_modules: list[tuple[Any, Dict]] = [] + self.loaded_modules: list[tuple[Any, dict]] = [] configured_modules = config.get("modules") or [] for i, module in enumerate(configured_modules): diff --git a/synapse/config/oidc.py b/synapse/config/oidc.py index 3179635220d..ada89bb8bc8 100644 --- a/synapse/config/oidc.py +++ b/synapse/config/oidc.py @@ -21,7 +21,7 @@ # from collections import Counter -from typing import Any, Collection, Iterable, Mapping, Optional, Type +from typing import Any, Collection, Iterable, Mapping, Optional import attr @@ -491,7 +491,7 @@ class OidcProviderConfig: allow_existing_users: bool # the class of the user mapping provider - user_mapping_provider_class: Type + user_mapping_provider_class: type # the config of the user mapping provider user_mapping_provider_config: Any diff --git a/synapse/config/password_auth_providers.py b/synapse/config/password_auth_providers.py index e01fa9d07b7..c2894f58dc9 100644 --- a/synapse/config/password_auth_providers.py +++ b/synapse/config/password_auth_providers.py @@ -19,7 +19,7 @@ # # -from typing import Any, Type +from typing import Any from synapse.types import JsonDict from synapse.util.module_loader import load_module @@ -56,7 +56,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: for backwards compatibility. """ - self.password_providers: list[tuple[Type, Any]] = [] + self.password_providers: list[tuple[type, Any]] = [] providers = [] # We want to be backwards compatible with the old `ldap_config` diff --git a/synapse/config/server.py b/synapse/config/server.py index a486f16542c..662ed24a132 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -25,7 +25,7 @@ import os.path import urllib.parse from textwrap import indent -from typing import Any, Iterable, List, Optional, TypedDict, Union +from typing import Any, Iterable, Optional, TypedDict, Union from urllib.request import getproxies_environment import attr @@ -239,7 +239,7 @@ class TCPListenerConfig: """Object describing the configuration of a single TCP listener.""" port: int = attr.ib(validator=attr.validators.instance_of(int)) - bind_addresses: list[str] = attr.ib(validator=attr.validators.instance_of(List)) + bind_addresses: list[str] = attr.ib(validator=attr.validators.instance_of(list)) type: str = attr.ib(validator=attr.validators.in_(KNOWN_LISTENER_TYPES)) tls: bool = False diff --git a/synapse/config/spam_checker.py b/synapse/config/spam_checker.py index 0a8b3d3093e..02d7cee88fc 100644 --- a/synapse/config/spam_checker.py +++ b/synapse/config/spam_checker.py @@ -19,7 +19,7 @@ # import logging -from typing import Any, Dict +from typing import Any from synapse.config import ConfigError from synapse.types import JsonDict @@ -41,7 +41,7 @@ class SpamCheckerConfig(Config): section = "spamchecker" def read_config(self, config: JsonDict, **kwargs: Any) -> None: - self.spam_checkers: list[tuple[Any, Dict]] = [] + self.spam_checkers: list[tuple[Any, dict]] = [] spam_checkers = config.get("spam_checker") or [] if isinstance(spam_checkers, dict): diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index e038dd54165..24a693fdb1c 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -21,7 +21,7 @@ import abc import logging -from typing import TYPE_CHECKING, Callable, Dict, Iterable, Optional +from typing import TYPE_CHECKING, Callable, Iterable, Optional import attr from signedjson.key import ( @@ -644,7 +644,7 @@ async def _fetch_keys( ) -> dict[str, dict[str, FetchKeyResult]]: """see KeyFetcher._fetch_keys""" - async def get_key(key_server: TrustedKeyServer) -> Dict: + async def get_key(key_server: TrustedKeyServer) -> dict: try: return await self.get_server_verify_key_v2_indirect( keys_to_fetch, key_server diff --git a/synapse/events/presence_router.py b/synapse/events/presence_router.py index 006002d44ed..39dd7ee2b3c 100644 --- a/synapse/events/presence_router.py +++ b/synapse/events/presence_router.py @@ -24,10 +24,8 @@ Any, Awaitable, Callable, - Dict, Iterable, Optional, - Set, TypeVar, Union, ) @@ -173,7 +171,7 @@ async def get_users_for_states( logger.warning("Failed to run module API callback %s: %s", callback, e) continue - if not isinstance(result, Dict): + if not isinstance(result, dict): logger.warning( "Wrong type returned by module API callback %s: %s, expected Dict", callback, @@ -182,7 +180,7 @@ async def get_users_for_states( continue for key, new_entries in result.items(): - if not isinstance(new_entries, Set): + if not isinstance(new_entries, set): logger.warning( "Wrong type returned by module API callback %s: %s, expected Set", callback, @@ -233,7 +231,7 @@ async def get_interested_users(self, user_id: str) -> Union[set[str], str]: if result == PresenceRouter.ALL_USERS: return PresenceRouter.ALL_USERS - if not isinstance(result, Set): + if not isinstance(result, set): logger.warning( "Wrong type returned by module API callback %s: %s, expected set", callback, diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index c149366395c..cb2fa59f545 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -32,7 +32,6 @@ Callable, Collection, Container, - Dict, Iterable, Mapping, Optional, @@ -1566,7 +1565,7 @@ async def get_public_rooms( remote_server: str, limit: Optional[int] = None, since_token: Optional[str] = None, - search_filter: Optional[Dict] = None, + search_filter: Optional[dict] = None, include_all_networks: bool = False, third_party_instance_id: Optional[str] = None, ) -> JsonDict: diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 2046808225f..6e14f4a0496 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -27,7 +27,6 @@ Awaitable, Callable, Collection, - Dict, Mapping, Optional, Union, @@ -1361,13 +1360,13 @@ async def _process_incoming_pdus_in_room_inner( lock = new_lock async def exchange_third_party_invite( - self, sender_user_id: str, target_user_id: str, room_id: str, signed: Dict + self, sender_user_id: str, target_user_id: str, room_id: str, signed: dict ) -> None: await self.handler.exchange_third_party_invite( sender_user_id, target_user_id, room_id, signed ) - async def on_exchange_third_party_invite_request(self, event_dict: Dict) -> None: + async def on_exchange_third_party_invite_request(self, event_dict: dict) -> None: await self.handler.on_exchange_third_party_invite_request(event_dict) async def check_server_matches_acl(self, server_name: str, room_id: str) -> None: diff --git a/synapse/federation/send_queue.py b/synapse/federation/send_queue.py index 634f7f3f0d6..80f31798e8b 100644 --- a/synapse/federation/send_queue.py +++ b/synapse/federation/send_queue.py @@ -44,7 +44,6 @@ Iterable, Optional, Sized, - Tuple, ) import attr @@ -292,7 +291,7 @@ def federation_ack(self, instance_name: str, token: int) -> None: async def get_replication_rows( self, instance_name: str, from_token: int, to_token: int, target_row_count: int - ) -> tuple[list[tuple[int, Tuple]], int, bool]: + ) -> tuple[list[tuple[int, tuple]], int, bool]: """Get rows to be sent over federation between the two tokens Args: diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index a180cd5a553..ee15b4804e3 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -28,10 +28,8 @@ BinaryIO, Callable, Collection, - Dict, Generator, Iterable, - List, Mapping, Optional, Union, @@ -237,7 +235,7 @@ async def backfill( async def timestamp_to_event( self, destination: str, room_id: str, timestamp: int, direction: Direction - ) -> Union[JsonDict, List]: + ) -> Union[JsonDict, list]: """ Calls a remote federating server at `destination` asking for their closest event to the given timestamp in the given direction. @@ -532,7 +530,7 @@ async def get_public_rooms( remote_server: str, limit: Optional[int] = None, since_token: Optional[str] = None, - search_filter: Optional[Dict] = None, + search_filter: Optional[dict] = None, include_all_networks: bool = False, third_party_instance_id: Optional[str] = None, ) -> JsonDict: diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 622783d78ea..2dde8a83a3b 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -31,12 +31,9 @@ Any, Awaitable, Callable, - Dict, Iterable, Mapping, Optional, - Tuple, - Type, Union, cast, ) @@ -1926,7 +1923,7 @@ def load_legacy_password_auth_providers(hs: "HomeServer") -> None: def load_single_legacy_password_auth_provider( - module: Type, + module: type, config: JsonDict, api: "ModuleApi", ) -> None: @@ -2011,7 +2008,7 @@ async def wrapped_check_3pid_auth( return wrapped_check_3pid_auth - def run(*args: Tuple, **kwargs: Dict) -> Awaitable: + def run(*args: tuple, **kwargs: dict) -> Awaitable: # mypy doesn't do well across function boundaries so we need to tell it # f is definitely not None. assert f is not None diff --git a/synapse/logging/_remote.py b/synapse/logging/_remote.py index ac34fa6525d..a3444221a0c 100644 --- a/synapse/logging/_remote.py +++ b/synapse/logging/_remote.py @@ -25,7 +25,7 @@ from collections import deque from ipaddress import IPv4Address, IPv6Address, ip_address from math import floor -from typing import Callable, Deque, Optional +from typing import Callable, Optional import attr from zope.interface import implementer @@ -66,7 +66,7 @@ class LogProducer: # (connected and registerProducer) which are part of the implementation. transport: Connection _format: Callable[[logging.LogRecord], str] - _buffer: Deque[logging.LogRecord] + _buffer: deque[logging.LogRecord] _paused: bool = attr.ib(default=False, init=False) def pauseProducing(self) -> None: @@ -120,7 +120,7 @@ def __init__( self.port = port self.maximum_buffer = maximum_buffer - self._buffer: Deque[logging.LogRecord] = deque() + self._buffer: deque[logging.LogRecord] = deque() self._connection_waiter: Optional[Deferred] = None self._producer: Optional[LogProducer] = None diff --git a/synapse/metrics/background_process_metrics.py b/synapse/metrics/background_process_metrics.py index b457369211d..c871598680a 100644 --- a/synapse/metrics/background_process_metrics.py +++ b/synapse/metrics/background_process_metrics.py @@ -33,7 +33,6 @@ Iterable, Optional, Protocol, - Set, TypeVar, Union, ) @@ -142,7 +141,7 @@ # background processes stacking up behind a lock or linearizer, where we then # only need to iterate over and update metrics for the process that have # actually been active and can ignore the idle ones. -_background_processes_active_since_last_scrape: "Set[_BackgroundProcess]" = set() +_background_processes_active_since_last_scrape: "set[_BackgroundProcess]" = set() # A lock that covers the above set and dict _bg_metrics_lock = threading.Lock() diff --git a/synapse/push/httppusher.py b/synapse/push/httppusher.py index c6c19ab15a7..8df106b8593 100644 --- a/synapse/push/httppusher.py +++ b/synapse/push/httppusher.py @@ -21,7 +21,7 @@ import logging import random import urllib.parse -from typing import TYPE_CHECKING, Dict, Optional, Union +from typing import TYPE_CHECKING, Optional, Union from prometheus_client import Counter @@ -68,7 +68,7 @@ ) -def tweaks_for_actions(actions: list[Union[str, Dict]]) -> JsonMapping: +def tweaks_for_actions(actions: list[Union[str, dict]]) -> JsonMapping: """ Converts a list of actions into a `tweaks` dict (which can then be passed to the push gateway). diff --git a/synapse/replication/tcp/handler.py b/synapse/replication/tcp/handler.py index 8ef3d0fcc20..bd1ee5ff9da 100644 --- a/synapse/replication/tcp/handler.py +++ b/synapse/replication/tcp/handler.py @@ -20,11 +20,11 @@ # # import logging +from collections import deque from typing import ( TYPE_CHECKING, Any, Awaitable, - Deque, Iterable, Iterator, Optional, @@ -115,7 +115,7 @@ # the type of the entries in _command_queues_by_stream -_StreamCommandQueue = Deque[ +_StreamCommandQueue = deque[ tuple[Union[RdataCommand, PositionCommand], IReplicationConnection] ] diff --git a/synapse/replication/tcp/protocol.py b/synapse/replication/tcp/protocol.py index f3a54ed3ffe..733643cb645 100644 --- a/synapse/replication/tcp/protocol.py +++ b/synapse/replication/tcp/protocol.py @@ -28,7 +28,7 @@ import logging import struct from inspect import isawaitable -from typing import TYPE_CHECKING, Any, Collection, List, Optional +from typing import TYPE_CHECKING, Any, Collection, Optional from prometheus_client import Counter from zope.interface import Interface, implementer @@ -82,7 +82,7 @@ # A list of all connected protocols. This allows us to send metrics about the # connections. -connected_connections: "List[BaseReplicationStreamProtocol]" = [] +connected_connections: "list[BaseReplicationStreamProtocol]" = [] logger = logging.getLogger(__name__) diff --git a/synapse/replication/tcp/redis.py b/synapse/replication/tcp/redis.py index ef8a07ea695..4448117d627 100644 --- a/synapse/replication/tcp/redis.py +++ b/synapse/replication/tcp/redis.py @@ -21,7 +21,7 @@ import logging from inspect import isawaitable -from typing import TYPE_CHECKING, Any, Generic, Optional, Type, TypeVar, cast +from typing import TYPE_CHECKING, Any, Generic, Optional, TypeVar, cast import attr from txredisapi import ( @@ -296,7 +296,7 @@ def __init__( dbid: Optional[int], poolsize: int, isLazy: bool = False, - handler: Type = ConnectionHandler, + handler: type = ConnectionHandler, charset: str = "utf-8", password: Optional[str] = None, replyTimeout: int = 30, diff --git a/synapse/replication/tcp/streams/_base.py b/synapse/replication/tcp/streams/_base.py index a6e016be278..d80bdb9b350 100644 --- a/synapse/replication/tcp/streams/_base.py +++ b/synapse/replication/tcp/streams/_base.py @@ -27,7 +27,6 @@ Awaitable, Callable, Optional, - Tuple, TypeVar, ) @@ -55,7 +54,7 @@ # parsing with Stream.parse_row (which turns it into a `ROW_TYPE`). Normally it's # just a row from a database query, though this is dependent on the stream in question. # -StreamRow = TypeVar("StreamRow", bound=Tuple) +StreamRow = TypeVar("StreamRow", bound=tuple) # The type returned by the update_function of a stream, as well as get_updates(), # get_updates_since, etc. diff --git a/synapse/replication/tcp/streams/events.py b/synapse/replication/tcp/streams/events.py index 42eaa0c0494..a6314b0c7dd 100644 --- a/synapse/replication/tcp/streams/events.py +++ b/synapse/replication/tcp/streams/events.py @@ -20,7 +20,7 @@ # import heapq from collections import defaultdict -from typing import TYPE_CHECKING, Iterable, Optional, Tuple, TypeVar, cast +from typing import TYPE_CHECKING, Iterable, Optional, TypeVar, cast import attr @@ -237,7 +237,7 @@ async def _update_function( # distinguish the row type). At the same time, we can limit the event_rows # to the max stream_id from state_rows. - event_updates: Iterable[tuple[int, Tuple]] = ( + event_updates: Iterable[tuple[int, tuple]] = ( (stream_id, (EventsStreamEventRow.TypeId, rest)) for (stream_id, *rest) in event_rows if stream_id <= upper_limit @@ -254,20 +254,20 @@ async def _update_function( for room_id, stream_ids in state_updates_by_room.items() if len(stream_ids) >= _MAX_STATE_UPDATES_PER_ROOM ] - state_all_updates: Iterable[tuple[int, Tuple]] = ( + state_all_updates: Iterable[tuple[int, tuple]] = ( (max_stream_id, (EventsStreamAllStateRow.TypeId, (room_id,))) for (max_stream_id, room_id) in state_all_rows ) # Any remaining state updates are sent individually. state_all_rooms = {room_id for _, room_id in state_all_rows} - state_updates: Iterable[tuple[int, Tuple]] = ( + state_updates: Iterable[tuple[int, tuple]] = ( (stream_id, (EventsStreamCurrentStateRow.TypeId, rest)) for (stream_id, *rest) in state_rows if rest[0] not in state_all_rooms ) - ex_outliers_updates: Iterable[tuple[int, Tuple]] = ( + ex_outliers_updates: Iterable[tuple[int, tuple]] = ( (stream_id, (EventsStreamEventRow.TypeId, rest)) for (stream_id, *rest) in ex_outliers_rows ) diff --git a/synapse/rest/admin/experimental_features.py b/synapse/rest/admin/experimental_features.py index 1b8d24615e6..abdb9377932 100644 --- a/synapse/rest/admin/experimental_features.py +++ b/synapse/rest/admin/experimental_features.py @@ -22,7 +22,7 @@ from enum import Enum from http import HTTPStatus -from typing import TYPE_CHECKING, Dict +from typing import TYPE_CHECKING from synapse.api.errors import SynapseError from synapse.http.servlet import RestServlet, parse_json_object_from_request @@ -99,7 +99,7 @@ async def on_GET( async def on_PUT( self, request: SynapseRequest, user_id: str - ) -> tuple[HTTPStatus, Dict]: + ) -> tuple[HTTPStatus, dict]: """ Enable or disable the provided features for the requester """ diff --git a/synapse/rest/admin/users.py b/synapse/rest/admin/users.py index 26d33daa0b5..e29b0d36e02 100644 --- a/synapse/rest/admin/users.py +++ b/synapse/rest/admin/users.py @@ -349,14 +349,14 @@ async def on_PUT( "'approved' parameter is not of type boolean", ) - # convert List[dict[str, str]] into List[tuple[str, str]] + # convert list[dict[str, str]] into list[tuple[str, str]] if external_ids is not None: new_external_ids = [ (external_id["auth_provider"], external_id["external_id"]) for external_id in external_ids ] - # convert List[dict[str, str]] into Set[tuple[str, str]] + # convert list[dict[str, str]] into set[tuple[str, str]] if threepids is not None: new_threepids = { (threepid["medium"], threepid["address"]) for threepid in threepids diff --git a/synapse/state/__init__.py b/synapse/state/__init__.py index 5b861f55342..991e1f847a5 100644 --- a/synapse/state/__init__.py +++ b/synapse/state/__init__.py @@ -26,7 +26,6 @@ Any, Awaitable, Callable, - DefaultDict, Mapping, Optional, Sequence, @@ -660,7 +659,7 @@ def __init__(self, hs: "HomeServer"): # # tracks the amount of work done on state res per room - self._state_res_metrics: DefaultDict[str, _StateResMetrics] = defaultdict( + self._state_res_metrics: defaultdict[str, _StateResMetrics] = defaultdict( _StateResMetrics ) diff --git a/synapse/storage/controllers/persist_events.py b/synapse/storage/controllers/persist_events.py index e02dfe2c093..0daf4830d9b 100644 --- a/synapse/storage/controllers/persist_events.py +++ b/synapse/storage/controllers/persist_events.py @@ -31,11 +31,9 @@ Callable, ClassVar, Collection, - Deque, Generator, Generic, Iterable, - List, Optional, TypeVar, Union, @@ -175,7 +173,7 @@ class _EventPersistQueueItem(Generic[_PersistResult]): task: _EventPersistQueueTask deferred: ObservableDeferred[_PersistResult] - parent_opentracing_span_contexts: List = attr.ib(factory=list) + parent_opentracing_span_contexts: list = attr.ib(factory=list) """A list of opentracing spans waiting for this batch""" opentracing_span_context: Any = None @@ -205,7 +203,7 @@ def __init__( """ self.server_name = server_name self.hs = hs - self._event_persist_queues: dict[str, Deque[_EventPersistQueueItem]] = {} + self._event_persist_queues: dict[str, deque[_EventPersistQueueItem]] = {} self._currently_persisting_rooms: set[str] = set() self._per_item_callback = per_item_callback diff --git a/synapse/storage/database.py b/synapse/storage/database.py index 795a036ff22..764ca9f2291 100644 --- a/synapse/storage/database.py +++ b/synapse/storage/database.py @@ -37,7 +37,6 @@ Mapping, Optional, Sequence, - Tuple, TypeVar, cast, overload, @@ -380,16 +379,16 @@ def call_on_exception( assert self.exception_callbacks is not None self.exception_callbacks.append((callback, args, kwargs)) - def fetchone(self) -> Optional[Tuple]: + def fetchone(self) -> Optional[tuple]: return self.txn.fetchone() - def fetchmany(self, size: Optional[int] = None) -> list[Tuple]: + def fetchmany(self, size: Optional[int] = None) -> list[tuple]: return self.txn.fetchmany(size=size) - def fetchall(self) -> list[Tuple]: + def fetchall(self) -> list[tuple]: return self.txn.fetchall() - def __iter__(self) -> Iterator[Tuple]: + def __iter__(self) -> Iterator[tuple]: return self.txn.__iter__() @property @@ -432,7 +431,7 @@ def execute_values( values: Iterable[Iterable[Any]], template: Optional[str] = None, fetch: bool = True, - ) -> list[Tuple]: + ) -> list[tuple]: """Corresponds to psycopg2.extras.execute_values. Only available when using postgres. diff --git a/synapse/storage/databases/main/event_push_actions.py b/synapse/storage/databases/main/event_push_actions.py index 9c624c739af..d65ab82fffc 100644 --- a/synapse/storage/databases/main/event_push_actions.py +++ b/synapse/storage/databases/main/event_push_actions.py @@ -86,7 +86,6 @@ Collection, Mapping, Optional, - Tuple, Union, cast, ) @@ -1467,7 +1466,7 @@ def _handle_new_receipts_for_notifs_txn(self, txn: LoggingTransaction) -> bool: continue thread_clause = "" - thread_args: Tuple = () + thread_args: tuple = () if thread_id is not None: thread_clause = "AND thread_id = ?" thread_args = (thread_id,) diff --git a/synapse/storage/types.py b/synapse/storage/types.py index 69b29717912..fedf10dfc09 100644 --- a/synapse/storage/types.py +++ b/synapse/storage/types.py @@ -27,7 +27,6 @@ Optional, Protocol, Sequence, - Tuple, Union, ) @@ -45,11 +44,11 @@ def executemany( self, sql: str, parameters: Sequence[SQLQueryParameters] ) -> Any: ... - def fetchone(self) -> Optional[Tuple]: ... + def fetchone(self) -> Optional[tuple]: ... - def fetchmany(self, size: Optional[int] = ...) -> list[Tuple]: ... + def fetchmany(self, size: Optional[int] = ...) -> list[tuple]: ... - def fetchall(self) -> list[Tuple]: ... + def fetchall(self) -> list[tuple]: ... @property def description( @@ -64,7 +63,7 @@ def description( def rowcount(self) -> int: return 0 - def __iter__(self) -> Iterator[Tuple]: ... + def __iter__(self) -> Iterator[tuple]: ... def close(self) -> None: ... diff --git a/synapse/synapse_rust/acl.pyi b/synapse/synapse_rust/acl.pyi index 985994d3136..934d0de80a1 100644 --- a/synapse/synapse_rust/acl.pyi +++ b/synapse/synapse_rust/acl.pyi @@ -13,10 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import List - class ServerAclEvaluator: def __init__( - self, allow_ip_literals: bool, allow: List[str], deny: List[str] + self, allow_ip_literals: bool, allow: list[str], deny: list[str] ) -> None: ... def server_matches_acl_event(self, server_name: str) -> bool: ... diff --git a/synapse/synapse_rust/events.pyi b/synapse/synapse_rust/events.pyi index a82211283b4..08c976121a4 100644 --- a/synapse/synapse_rust/events.pyi +++ b/synapse/synapse_rust/events.pyi @@ -10,7 +10,7 @@ # See the GNU Affero General Public License for more details: # . -from typing import List, Mapping, Optional, Tuple +from typing import Mapping, Optional from synapse.types import JsonDict @@ -115,7 +115,7 @@ def event_visible_to_server( history_visibility: str, erased_senders: Mapping[str, bool], partial_state_invisible: bool, - memberships: List[Tuple[str, str]], + memberships: list[tuple[str, str]], ) -> bool: """Determine whether the server is allowed to see the unredacted event. diff --git a/synapse/synapse_rust/push.pyi b/synapse/synapse_rust/push.pyi index a3e12ad648e..1e135b8c69e 100644 --- a/synapse/synapse_rust/push.pyi +++ b/synapse/synapse_rust/push.pyi @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Collection, Dict, Mapping, Optional, Sequence, Tuple, Union +from typing import Any, Collection, Mapping, Optional, Sequence, Union from synapse.types import JsonDict, JsonValue @@ -43,7 +43,7 @@ class FilteredPushRules: def __init__( self, push_rules: PushRules, - enabled_map: Dict[str, bool], + enabled_map: dict[str, bool], msc1767_enabled: bool, msc3381_polls_enabled: bool, msc3664_enabled: bool, @@ -51,7 +51,7 @@ class FilteredPushRules: msc4210_enabled: bool, msc4306_enabled: bool, ): ... - def rules(self) -> Collection[Tuple[PushRule, bool]]: ... + def rules(self) -> Collection[tuple[PushRule, bool]]: ... def get_base_rule_ids() -> Collection[str]: ... @@ -65,7 +65,7 @@ class PushRuleEvaluator: notification_power_levels: Mapping[str, int], related_events_flattened: Mapping[str, Mapping[str, JsonValue]], related_event_match_enabled: bool, - room_version_feature_flags: Tuple[str, ...], + room_version_feature_flags: tuple[str, ...], msc3931_enabled: bool, msc4210_enabled: bool, msc4306_enabled: bool, diff --git a/synapse/synapse_rust/segmenter.pyi b/synapse/synapse_rust/segmenter.pyi index 5f367659479..19a0a4d83cf 100644 --- a/synapse/synapse_rust/segmenter.pyi +++ b/synapse/synapse_rust/segmenter.pyi @@ -1,3 +1 @@ -from typing import List - -def parse_words(text: str) -> List[str]: ... +def parse_words(text: str) -> list[str]: ... diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py index b48bb6e918a..7cc83bad377 100644 --- a/synapse/util/caches/descriptors.py +++ b/synapse/util/caches/descriptors.py @@ -26,7 +26,6 @@ Awaitable, Callable, Collection, - Dict, Generic, Hashable, Iterable, @@ -34,8 +33,6 @@ Optional, Protocol, Sequence, - Tuple, - Type, TypeVar, Union, cast, @@ -56,7 +53,7 @@ logger = logging.getLogger(__name__) -CacheKey = Union[Tuple, Any] +CacheKey = Union[tuple, Any] F = TypeVar("F", bound=Callable[..., Any]) @@ -246,7 +243,7 @@ def __init__( self.prune_unread_entries = prune_unread_entries def __get__( - self, obj: Optional[HasServerNameAndClock], owner: Optional[Type] + self, obj: Optional[HasServerNameAndClock], owner: Optional[type] ) -> Callable[..., "defer.Deferred[Any]"]: # We need access to instance-level `obj.server_name` attribute assert obj is not None, ( @@ -331,7 +328,7 @@ class DeferredCacheListDescriptor(_CacheDescriptorBase): def __init__( self, - orig: Callable[..., Awaitable[Dict]], + orig: Callable[..., Awaitable[dict]], cached_method_name: str, list_name: str, num_args: Optional[int] = None, @@ -362,7 +359,7 @@ def __init__( ) def __get__( - self, obj: Optional[Any], objtype: Optional[Type] = None + self, obj: Optional[Any], objtype: Optional[type] = None ) -> Callable[..., "defer.Deferred[dict[Hashable, Any]]"]: cached_method = getattr(obj, self.cached_method_name) cache: DeferredCache[CacheKey, Any] = cached_method.cache @@ -375,7 +372,7 @@ def __get__( ) @functools.wraps(self.orig) - def wrapped(*args: Any, **kwargs: Any) -> "defer.Deferred[Dict]": + def wrapped(*args: Any, **kwargs: Any) -> "defer.Deferred[dict]": # If we're passed a cache_context then we'll want to call its # invalidate() whenever we are invalidated invalidate_callback = kwargs.pop("on_invalidate", None) @@ -414,7 +411,7 @@ def cache_key_to_arg(key: tuple) -> Hashable: cached_defers: list["defer.Deferred[Any]"] = [] if pending_deferred: - def update_results(r: Dict) -> None: + def update_results(r: dict) -> None: for k, v in r.items(): results[cache_key_to_arg(k)] = v diff --git a/synapse/util/module_loader.py b/synapse/util/module_loader.py index 8c0af8a323f..ae50e302ed1 100644 --- a/synapse/util/module_loader.py +++ b/synapse/util/module_loader.py @@ -21,7 +21,7 @@ import importlib import importlib.util from types import ModuleType -from typing import Any, Type +from typing import Any import jsonschema @@ -30,7 +30,7 @@ from synapse.types import StrSequence -def load_module(provider: dict, config_path: StrSequence) -> tuple[Type, Any]: +def load_module(provider: dict, config_path: StrSequence) -> tuple[type, Any]: """Loads a synapse module with its config Args: diff --git a/synapse/util/ratelimitutils.py b/synapse/util/ratelimitutils.py index c571a50b3ec..37d2e4505d8 100644 --- a/synapse/util/ratelimitutils.py +++ b/synapse/util/ratelimitutils.py @@ -28,7 +28,6 @@ Any, Callable, ContextManager, - DefaultDict, Iterator, Mapping, MutableSet, @@ -187,7 +186,7 @@ def new_limiter() -> "_PerHostRatelimiter": metrics_name=metrics_name, ) - self.ratelimiters: DefaultDict[str, "_PerHostRatelimiter"] = ( + self.ratelimiters: collections.defaultdict[str, "_PerHostRatelimiter"] = ( collections.defaultdict(new_limiter) ) diff --git a/tests/federation/transport/test_knocking.py b/tests/federation/transport/test_knocking.py index e06dffb6c5e..9e92b06d91b 100644 --- a/tests/federation/transport/test_knocking.py +++ b/tests/federation/transport/test_knocking.py @@ -19,7 +19,7 @@ # # from collections import OrderedDict -from typing import Any, Dict, Optional +from typing import Any, Optional from twisted.internet.testing import MemoryReactor @@ -161,8 +161,8 @@ def send_example_state_events_to_room( def check_knock_room_state_against_room_state( self, - knock_room_state: list[Dict], - expected_room_state: Dict, + knock_room_state: list[dict], + expected_room_state: dict, ) -> None: """Test a list of stripped room state events received over federation against a dict of expected state events. diff --git a/tests/handlers/test_appservice.py b/tests/handlers/test_appservice.py index e360019203a..7d6bd35a9a1 100644 --- a/tests/handlers/test_appservice.py +++ b/tests/handlers/test_appservice.py @@ -24,7 +24,6 @@ Any, Awaitable, Callable, - Dict, Iterable, Optional, TypeVar, @@ -1022,7 +1021,7 @@ def test_application_services_receive_local_to_device_for_many_users(self) -> No def _register_application_service( self, - namespaces: Optional[dict[str, Iterable[Dict]]] = None, + namespaces: Optional[dict[str, Iterable[dict]]] = None, ) -> ApplicationService: """ Register a new application service, with the given namespaces of interest. diff --git a/tests/push/test_email.py b/tests/push/test_email.py index b1d16669a63..d3822b8643d 100644 --- a/tests/push/test_email.py +++ b/tests/push/test_email.py @@ -21,7 +21,7 @@ import importlib.resources as importlib_resources import os from http import HTTPStatus -from typing import Any, Dict, Sequence +from typing import Any, Sequence import attr from parameterized import parameterized @@ -83,8 +83,8 @@ def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: hs = self.setup_test_homeserver(config=config) - # List[tuple[Deferred, args, kwargs]] - self.email_attempts: list[tuple[Deferred, Sequence, Dict]] = [] + # list[tuple[Deferred, args, kwargs]] + self.email_attempts: list[tuple[Deferred, Sequence, dict]] = [] def sendmail(*args: Any, **kwargs: Any) -> Deferred: # This mocks out synapse.reactor.send_email._sendmail. @@ -510,7 +510,7 @@ def test_remove_unlinked_pushers_background_job(self) -> None: ) self.assertEqual(len(pushers), 0) - def _check_for_mail(self) -> tuple[Sequence, Dict]: + def _check_for_mail(self) -> tuple[Sequence, dict]: """ Assert that synapse sent off exactly one email notification. diff --git a/tests/rest/client/test_third_party_rules.py b/tests/rest/client/test_third_party_rules.py index 812df4454eb..78fa8f4e1c9 100644 --- a/tests/rest/client/test_third_party_rules.py +++ b/tests/rest/client/test_third_party_rules.py @@ -19,7 +19,7 @@ # # import threading -from typing import TYPE_CHECKING, Any, Dict, Optional, Union +from typing import TYPE_CHECKING, Any, Optional, Union from unittest.mock import AsyncMock, Mock from twisted.internet.testing import MemoryReactor @@ -48,7 +48,7 @@ class LegacyThirdPartyRulesTestModule: - def __init__(self, config: Dict, module_api: "ModuleApi") -> None: + def __init__(self, config: dict, module_api: "ModuleApi") -> None: # keep a record of the "current" rules module, so that the test can patch # it if desired. thread_local.rules_module = self @@ -70,7 +70,7 @@ def parse_config(config: dict[str, Any]) -> dict[str, Any]: class LegacyDenyNewRooms(LegacyThirdPartyRulesTestModule): - def __init__(self, config: Dict, module_api: "ModuleApi") -> None: + def __init__(self, config: dict, module_api: "ModuleApi") -> None: super().__init__(config, module_api) async def on_create_room( @@ -80,7 +80,7 @@ async def on_create_room( class LegacyChangeEvents(LegacyThirdPartyRulesTestModule): - def __init__(self, config: Dict, module_api: "ModuleApi") -> None: + def __init__(self, config: dict, module_api: "ModuleApi") -> None: super().__init__(config, module_api) async def check_event_allowed( diff --git a/tests/rest/client/utils.py b/tests/rest/client/utils.py index 6b99d0ab013..d5c824b2918 100644 --- a/tests/rest/client/utils.py +++ b/tests/rest/client/utils.py @@ -30,7 +30,6 @@ Any, AnyStr, Callable, - Dict, Iterable, Literal, Mapping, @@ -86,7 +85,7 @@ def create_room_as( room_version: Optional[str] = ..., tok: Optional[str] = ..., expect_code: Literal[200] = ..., - extra_content: Optional[Dict] = ..., + extra_content: Optional[dict] = ..., custom_headers: Optional[Iterable[tuple[AnyStr, AnyStr]]] = ..., ) -> str: ... @@ -98,7 +97,7 @@ def create_room_as( room_version: Optional[str] = ..., tok: Optional[str] = ..., expect_code: int = ..., - extra_content: Optional[Dict] = ..., + extra_content: Optional[dict] = ..., custom_headers: Optional[Iterable[tuple[AnyStr, AnyStr]]] = ..., ) -> Optional[str]: ... @@ -109,7 +108,7 @@ def create_room_as( room_version: Optional[str] = None, tok: Optional[str] = None, expect_code: int = HTTPStatus.OK, - extra_content: Optional[Dict] = None, + extra_content: Optional[dict] = None, custom_headers: Optional[Iterable[tuple[AnyStr, AnyStr]]] = None, ) -> Optional[str]: """ diff --git a/tests/server.py b/tests/server.py index 52bc4add83e..ff5c6061802 100644 --- a/tests/server.py +++ b/tests/server.py @@ -35,7 +35,6 @@ Any, Awaitable, Callable, - Deque, Iterable, MutableMapping, Optional, @@ -491,7 +490,7 @@ def __init__(self) -> None: self._tcp_callbacks: dict[tuple[str, int], Callable] = {} self._udp: list[udp.Port] = [] self.lookups: dict[str, str] = {} - self._thread_callbacks: Deque[Callable[..., R]] = deque() + self._thread_callbacks: deque[Callable[..., R]] = deque() lookups = self.lookups diff --git a/tests/state/test_v21.py b/tests/state/test_v21.py index 6d4929f918c..7bef3decf05 100644 --- a/tests/state/test_v21.py +++ b/tests/state/test_v21.py @@ -18,7 +18,7 @@ # # import itertools -from typing import Dict, Optional, Sequence +from typing import Optional, Sequence from twisted.internet import defer from twisted.test.proto_helpers import MemoryReactor @@ -475,7 +475,7 @@ def create_event( event_type: str, state_key: Optional[str], sender: str, - content: Dict, + content: dict, auth_events: list[str], prev_events: Optional[list[str]] = None, room_id: Optional[str] = None, diff --git a/tests/test_event_auth.py b/tests/test_event_auth.py index 6632e11eb7d..7737101967c 100644 --- a/tests/test_event_auth.py +++ b/tests/test_event_auth.py @@ -20,7 +20,7 @@ # import unittest -from typing import Any, Collection, Iterable, List, Optional +from typing import Any, Collection, Iterable, Optional from parameterized import parameterized @@ -861,7 +861,7 @@ def _alias_event(room_version: RoomVersion, sender: str, **kwargs: Any) -> Event def _build_auth_dict_for_room_version( room_version: RoomVersion, auth_events: Iterable[EventBase] -) -> List: +) -> list: if room_version.event_format == EventFormatVersions.ROOM_V1_V2: return [(e.event_id, "not_used") for e in auth_events] else: From c9a43f0c2d63c432d48304c7c13c2fa1e392b862 Mon Sep 17 00:00:00 2001 From: Andrew Ferrazzutti Date: Wed, 15 Oct 2025 16:53:27 -0400 Subject: [PATCH 5/6] Revert invalid format changes to saml2 imports --- synapse/handlers/saml.py | 2 +- synapse/rest/synapse/client/saml2/metadata_resource.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/synapse/handlers/saml.py b/synapse/handlers/saml.py index e91c25cabca..218fbcaaa77 100644 --- a/synapse/handlers/saml.py +++ b/synapse/handlers/saml.py @@ -23,10 +23,10 @@ from typing import TYPE_CHECKING, Callable, Optional import attr - import saml2 import saml2.response from saml2.client import Saml2Client + from synapse.api.errors import SynapseError from synapse.config import ConfigError from synapse.handlers.sso import MappingException, UserAttributes diff --git a/synapse/rest/synapse/client/saml2/metadata_resource.py b/synapse/rest/synapse/client/saml2/metadata_resource.py index e7ed96174f8..bcd5195108e 100644 --- a/synapse/rest/synapse/client/saml2/metadata_resource.py +++ b/synapse/rest/synapse/client/saml2/metadata_resource.py @@ -20,11 +20,11 @@ from typing import TYPE_CHECKING +import saml2.metadata + from twisted.web.resource import Resource from twisted.web.server import Request -import saml2.metadata - if TYPE_CHECKING: from synapse.server import HomeServer From 3d079b9aae58ebf8ceb0a5d7f2c793aa901701c4 Mon Sep 17 00:00:00 2001 From: Andrew Ferrazzutti Date: Mon, 20 Oct 2025 16:41:05 -0400 Subject: [PATCH 6/6] Ignore mypy errors for some known-safe cases Suppress some false-positive errors caused by migrating from typing.Type --- synapse/config/saml2.py | 2 +- synapse/handlers/oidc.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/config/saml2.py b/synapse/config/saml2.py index b5ea4f09f62..c04b909448d 100644 --- a/synapse/config/saml2.py +++ b/synapse/config/saml2.py @@ -161,7 +161,7 @@ def read_config(self, config: JsonDict, **kwargs: Any) -> None: # Get the desired saml auth response attributes from the module saml2_config_dict = self._default_saml_config_dict( - *self.saml2_user_mapping_provider_class.get_saml_attributes( + *self.saml2_user_mapping_provider_class.get_saml_attributes( # type: ignore[attr-defined] self.saml2_user_mapping_provider_config ) ) diff --git a/synapse/handlers/oidc.py b/synapse/handlers/oidc.py index d6ee091c091..e790a4b2191 100644 --- a/synapse/handlers/oidc.py +++ b/synapse/handlers/oidc.py @@ -426,7 +426,7 @@ def __init__( self._jwks = RetryOnExceptionCachedCall(self._load_jwks) user_mapping_provider_init_method = ( - provider.user_mapping_provider_class.__init__ + provider.user_mapping_provider_class.__init__ # type: ignore[misc] ) if len(inspect.signature(user_mapping_provider_init_method).parameters) == 3: self._user_mapping_provider = provider.user_mapping_provider_class(