Skip to content
Open
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
4 changes: 2 additions & 2 deletions build_rust.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,13 @@

import itertools
import os
from typing import Any, Dict
from typing import Any

from packaging.specifiers import SpecifierSet
from setuptools_rust import Binding, RustExtension


def build(setup_kwargs: Dict[str, Any]) -> None:
def build(setup_kwargs: dict[str, Any]) -> None:
original_project_dir = os.path.dirname(os.path.realpath(__file__))
cargo_toml_path = os.path.join(original_project_dir, "rust", "Cargo.toml")

Expand Down
1 change: 1 addition & 0 deletions changelog.d/19046.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Use type hinting generics in standard collections, as per PEP 585, added in Python 3.9.
5 changes: 2 additions & 3 deletions contrib/graph/graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
import html
import json
import urllib.request
from typing import List

import pydot

Expand All @@ -33,7 +32,7 @@ def make_name(pdu_id: str, origin: str) -> str:
return f"{pdu_id}@{origin}"


def make_graph(pdus: List[dict], filename_prefix: str) -> None:
def make_graph(pdus: list[dict], filename_prefix: str) -> None:
"""
Generate a dot and SVG file for a graph of events in the room based on the
topological ordering by querying a homeserver.
Expand Down Expand Up @@ -127,7 +126,7 @@ def make_graph(pdus: List[dict], filename_prefix: str) -> None:
graph.write_svg("%s.svg" % filename_prefix, prog="dot")


def get_pdus(host: str, room: str) -> List[dict]:
def get_pdus(host: str, room: str) -> list[dict]:
transaction = json.loads(
urllib.request.urlopen(
f"http://{host}/_matrix/federation/v1/context/{room}/"
Expand Down
55 changes: 26 additions & 29 deletions docker/configure_workers_and_start.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,13 +65,10 @@
from pathlib import Path
from typing import (
Any,
Dict,
List,
Mapping,
MutableMapping,
NoReturn,
Optional,
Set,
SupportsIndex,
)

Expand All @@ -96,7 +93,7 @@
# Watching /_matrix/media and related needs a "media" listener
# Stream Writers require "client" and "replication" listeners because they
# have to attach by instance_map to the master process and have client endpoints.
WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
WORKERS_CONFIG: dict[str, dict[str, Any]] = {
"pusher": {
"app": "synapse.app.generic_worker",
"listener_resources": [],
Expand Down Expand Up @@ -408,7 +405,7 @@ def convert(src: str, dst: str, **template_vars: object) -> None:

def add_worker_roles_to_shared_config(
shared_config: dict,
worker_types_set: Set[str],
worker_types_set: set[str],
worker_name: str,
worker_port: int,
) -> None:
Expand Down Expand Up @@ -471,9 +468,9 @@ def add_worker_roles_to_shared_config(


def merge_worker_template_configs(
existing_dict: Optional[Dict[str, Any]],
to_be_merged_dict: Dict[str, Any],
) -> Dict[str, Any]:
existing_dict: Optional[dict[str, Any]],
to_be_merged_dict: dict[str, Any],
) -> dict[str, Any]:
"""When given an existing dict of worker template configuration consisting with both
dicts and lists, merge new template data from WORKERS_CONFIG(or create) and
return new dict.
Expand All @@ -484,7 +481,7 @@ def merge_worker_template_configs(
existing_dict.
Returns: The newly merged together dict values.
"""
new_dict: Dict[str, Any] = {}
new_dict: dict[str, Any] = {}
if not existing_dict:
# It doesn't exist yet, just use the new dict(but take a copy not a reference)
new_dict = to_be_merged_dict.copy()
Expand All @@ -509,8 +506,8 @@ def merge_worker_template_configs(


def insert_worker_name_for_worker_config(
existing_dict: Dict[str, Any], worker_name: str
) -> Dict[str, Any]:
existing_dict: dict[str, Any], worker_name: str
) -> dict[str, Any]:
"""Insert a given worker name into the worker's configuration dict.

Args:
Expand All @@ -526,7 +523,7 @@ def insert_worker_name_for_worker_config(
return dict_to_edit


def apply_requested_multiplier_for_worker(worker_types: List[str]) -> List[str]:
def apply_requested_multiplier_for_worker(worker_types: list[str]) -> list[str]:
"""
Apply multiplier(if found) by returning a new expanded list with some basic error
checking.
Expand Down Expand Up @@ -587,7 +584,7 @@ def is_sharding_allowed_for_worker_type(worker_type: str) -> bool:

def split_and_strip_string(
given_string: str, split_char: str, max_split: SupportsIndex = -1
) -> List[str]:
) -> list[str]:
"""
Helper to split a string on split_char and strip whitespace from each end of each
element.
Expand Down Expand Up @@ -616,8 +613,8 @@ def generate_base_homeserver_config() -> None:


def parse_worker_types(
requested_worker_types: List[str],
) -> Dict[str, Set[str]]:
requested_worker_types: list[str],
) -> dict[str, set[str]]:
"""Read the desired list of requested workers and prepare the data for use in
generating worker config files while also checking for potential gotchas.

Expand All @@ -633,14 +630,14 @@ def parse_worker_types(
# A counter of worker_base_name -> int. Used for determining the name for a given
# worker when generating its config file, as each worker's name is just
# worker_base_name followed by instance number
worker_base_name_counter: Dict[str, int] = defaultdict(int)
worker_base_name_counter: dict[str, int] = defaultdict(int)

# Similar to above, but more finely grained. This is used to determine we don't have
# more than a single worker for cases where multiples would be bad(e.g. presence).
worker_type_shard_counter: Dict[str, int] = defaultdict(int)
worker_type_shard_counter: dict[str, int] = defaultdict(int)

# The final result of all this processing
dict_to_return: Dict[str, Set[str]] = {}
dict_to_return: dict[str, set[str]] = {}

# Handle any multipliers requested for given workers.
multiple_processed_worker_types = apply_requested_multiplier_for_worker(
Expand Down Expand Up @@ -684,7 +681,7 @@ def parse_worker_types(

# Split the worker_type_string on "+", remove whitespace from ends then make
# the list a set so it's deduplicated.
worker_types_set: Set[str] = set(
worker_types_set: set[str] = set(
split_and_strip_string(worker_type_string, "+")
)

Expand Down Expand Up @@ -743,7 +740,7 @@ def generate_worker_files(
environ: Mapping[str, str],
config_path: str,
data_dir: str,
requested_worker_types: Dict[str, Set[str]],
requested_worker_types: dict[str, set[str]],
) -> None:
"""Read the desired workers(if any) that is passed in and generate shared
homeserver, nginx and supervisord configs.
Expand All @@ -764,7 +761,7 @@ def generate_worker_files(
# First read the original config file and extract the listeners block. Then we'll
# add another listener for replication. Later we'll write out the result to the
# shared config file.
listeners: List[Any]
listeners: list[Any]
if using_unix_sockets:
listeners = [
{
Expand Down Expand Up @@ -792,27 +789,27 @@ def generate_worker_files(
# base shared worker jinja2 template. This config file will be passed to all
# workers, included Synapse's main process. It is intended mainly for disabling
# functionality when certain workers are spun up, and adding a replication listener.
shared_config: Dict[str, Any] = {"listeners": listeners}
shared_config: dict[str, Any] = {"listeners": listeners}

# List of dicts that describe workers.
# We pass this to the Supervisor template later to generate the appropriate
# program blocks.
worker_descriptors: List[Dict[str, Any]] = []
worker_descriptors: list[dict[str, Any]] = []

# Upstreams for load-balancing purposes. This dict takes the form of the worker
# type to the ports of each worker. For example:
# {
# worker_type: {1234, 1235, ...}}
# }
# and will be used to construct 'upstream' nginx directives.
nginx_upstreams: Dict[str, Set[int]] = {}
nginx_upstreams: dict[str, set[int]] = {}

# A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what
# will be placed after the proxy_pass directive. The main benefit to representing
# this data as a dict over a str is that we can easily deduplicate endpoints
# across multiple instances of the same worker. The final rendering will be combined
# with nginx_upstreams and placed in /etc/nginx/conf.d.
nginx_locations: Dict[str, str] = {}
nginx_locations: dict[str, str] = {}

# Create the worker configuration directory if it doesn't already exist
os.makedirs("/conf/workers", exist_ok=True)
Expand Down Expand Up @@ -846,7 +843,7 @@ def generate_worker_files(
# yaml config file
for worker_name, worker_types_set in requested_worker_types.items():
# The collected and processed data will live here.
worker_config: Dict[str, Any] = {}
worker_config: dict[str, Any] = {}

# Merge all worker config templates for this worker into a single config
for worker_type in worker_types_set:
Expand Down Expand Up @@ -1029,7 +1026,7 @@ def generate_worker_log_config(
Returns: the path to the generated file
"""
# Check whether we should write worker logs to disk, in addition to the console
extra_log_template_args: Dict[str, Optional[str]] = {}
extra_log_template_args: dict[str, Optional[str]] = {}
if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"):
extra_log_template_args["LOG_FILE_PATH"] = f"{data_dir}/logs/{worker_name}.log"

Expand All @@ -1053,7 +1050,7 @@ def generate_worker_log_config(
return log_config_filepath


def main(args: List[str], environ: MutableMapping[str, str]) -> None:
def main(args: list[str], environ: MutableMapping[str, str]) -> None:
parser = ArgumentParser()
parser.add_argument(
"--generate-only",
Expand Down Expand Up @@ -1087,7 +1084,7 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None:
if not worker_types_env:
# No workers, just the main process
worker_types = []
requested_worker_types: Dict[str, Any] = {}
requested_worker_types: dict[str, Any] = {}
else:
# Split type names by comma, ignoring whitespace.
worker_types = split_and_strip_string(worker_types_env, ",")
Expand Down
6 changes: 3 additions & 3 deletions docker/start.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import platform
import subprocess
import sys
from typing import Any, Dict, List, Mapping, MutableMapping, NoReturn, Optional
from typing import Any, Mapping, MutableMapping, NoReturn, Optional

import jinja2

Expand Down Expand Up @@ -69,7 +69,7 @@ def generate_config_from_template(
)

# populate some params from data files (if they exist, else create new ones)
environ: Dict[str, Any] = dict(os_environ)
environ: dict[str, Any] = dict(os_environ)
secrets = {
"registration": "SYNAPSE_REGISTRATION_SHARED_SECRET",
"macaroon": "SYNAPSE_MACAROON_SECRET_KEY",
Expand Down Expand Up @@ -200,7 +200,7 @@ def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) ->
subprocess.run(args, check=True)


def main(args: List[str], environ: MutableMapping[str, str]) -> None:
def main(args: list[str], environ: MutableMapping[str, str]) -> None:
mode = args[1] if len(args) > 1 else "run"

# if we were given an explicit user to switch to, do so
Expand Down
2 changes: 2 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,8 @@ select = [
"LOG",
# flake8-logging-format
"G",
# pyupgrade
"UP006",
]

[tool.ruff.lint.isort]
Expand Down
4 changes: 2 additions & 2 deletions scripts-dev/build_debian_packages.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
import threading
from concurrent.futures import ThreadPoolExecutor
from types import FrameType
from typing import Collection, Optional, Sequence, Set
from typing import Collection, Optional, Sequence

# These are expanded inside the dockerfile to be a fully qualified image name.
# e.g. docker.io/library/debian:bullseye
Expand Down Expand Up @@ -54,7 +54,7 @@ def __init__(
):
self.redirect_stdout = redirect_stdout
self._docker_build_args = tuple(docker_build_args or ())
self.active_containers: Set[str] = set()
self.active_containers: set[str] = set()
self._lock = threading.Lock()
self._failed = False

Expand Down
3 changes: 1 addition & 2 deletions scripts-dev/check_locked_deps_have_sdists.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
#
import sys
from pathlib import Path
from typing import Dict, List

import tomli

Expand All @@ -33,7 +32,7 @@ def main() -> None:

# Poetry 1.3+ lockfile format:
# There's a `files` inline table in each [[package]]
packages_to_assets: Dict[str, List[Dict[str, str]]] = {
packages_to_assets: dict[str, list[dict[str, str]]] = {
package["name"]: package["files"] for package in lockfile_content["package"]
}

Expand Down
28 changes: 12 additions & 16 deletions scripts-dev/check_pydantic_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,11 +47,7 @@
from typing import (
Any,
Callable,
Dict,
Generator,
List,
Set,
Type,
TypeVar,
)

Expand All @@ -69,7 +65,7 @@

logger = logging.getLogger(__name__)

CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG: List[Callable] = [
CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG: list[Callable] = [
constr,
conbytes,
conint,
Expand Down Expand Up @@ -145,7 +141,7 @@ class PatchedBaseModel(PydanticBaseModel):
"""

@classmethod
def __init_subclass__(cls: Type[PydanticBaseModel], **kwargs: object):
def __init_subclass__(cls: type[PydanticBaseModel], **kwargs: object):
for field in cls.__fields__.values():
# Note that field.type_ and field.outer_type are computed based on the
# annotation type, see pydantic.fields.ModelField._type_analysis
Expand Down Expand Up @@ -212,7 +208,7 @@ def lint() -> int:
return os.EX_DATAERR if failures else os.EX_OK


def do_lint() -> Set[str]:
def do_lint() -> set[str]:
"""Try to import all of Synapse and see if we spot any Pydantic type coercions."""
failures = set()

Expand Down Expand Up @@ -258,8 +254,8 @@ def run_test_snippet(source: str) -> None:
# > Remember that at the module level, globals and locals are the same dictionary.
# > If exec gets two separate objects as globals and locals, the code will be
# > executed as if it were embedded in a class definition.
globals_: Dict[str, object]
locals_: Dict[str, object]
globals_: dict[str, object]
locals_: dict[str, object]
globals_ = locals_ = {}
exec(textwrap.dedent(source), globals_, locals_)

Expand Down Expand Up @@ -394,10 +390,10 @@ class TestFieldTypeInspection(unittest.TestCase):
("bool"),
("Optional[str]",),
("Union[None, str]",),
("List[str]",),
("List[List[str]]",),
("Dict[StrictStr, str]",),
("Dict[str, StrictStr]",),
("list[str]",),
("list[list[str]]",),
("dict[StrictStr, str]",),
("dict[str, StrictStr]",),
("TypedDict('D', x=int)",),
]
)
Expand Down Expand Up @@ -425,9 +421,9 @@ class C(BaseModel):
("constr(strict=True, min_length=10)",),
("Optional[StrictStr]",),
("Union[None, StrictStr]",),
("List[StrictStr]",),
("List[List[StrictStr]]",),
("Dict[StrictStr, StrictStr]",),
("list[StrictStr]",),
("list[list[StrictStr]]",),
("dict[StrictStr, StrictStr]",),
("TypedDict('D', x=StrictInt)",),
]
)
Expand Down
Loading
Loading