@@ -109,10 +109,8 @@ def __init__(
109109
110110 # Ideally we'd move these ID gens here, unfortunately some other ID
111111 # generators are chained off them so doing so is a bit of a PITA.
112- self ._backfill_id_gen = (
113- self .store ._backfill_id_gen
114- ) # type: MultiWriterIdGenerator
115- self ._stream_id_gen = self .store ._stream_id_gen # type: MultiWriterIdGenerator
112+ self ._backfill_id_gen : MultiWriterIdGenerator = self .store ._backfill_id_gen
113+ self ._stream_id_gen : MultiWriterIdGenerator = self .store ._stream_id_gen
116114
117115 # This should only exist on instances that are configured to write
118116 assert (
@@ -221,7 +219,7 @@ async def _get_events_which_are_prevs(self, event_ids: Iterable[str]) -> List[st
221219 Returns:
222220 Filtered event ids
223221 """
224- results = [] # type: List[str ]
222+ results : List [ str ] = [ ]
225223
226224 def _get_events_which_are_prevs_txn (txn , batch ):
227225 sql = """
@@ -508,7 +506,7 @@ def _add_chain_cover_index(
508506 """
509507
510508 # Map from event ID to chain ID/sequence number.
511- chain_map = {} # type : Dict[str, Tuple[int, int]]
509+ chain_map : Dict [str , Tuple [int , int ]] = {}
512510
513511 # Set of event IDs to calculate chain ID/seq numbers for.
514512 events_to_calc_chain_id_for = set (event_to_room_id )
@@ -817,8 +815,8 @@ def _allocate_chain_ids(
817815 # new chain if the sequence number has already been allocated.
818816 #
819817
820- existing_chains = set () # type : Set[int]
821- tree = [] # type : List[Tuple[str, Optional[str]]]
818+ existing_chains : Set [int ] = set ()
819+ tree : List [Tuple [str , Optional [str ]]] = [ ]
822820
823821 # We need to do this in a topologically sorted order as we want to
824822 # generate chain IDs/sequence numbers of an event's auth events before
@@ -848,7 +846,7 @@ def _allocate_chain_ids(
848846 )
849847 txn .execute (sql % (clause ,), args )
850848
851- chain_to_max_seq_no = {row [0 ]: row [1 ] for row in txn } # type: Dict[Any, int]
849+ chain_to_max_seq_no : Dict [ Any , int ] = {row [0 ]: row [1 ] for row in txn }
852850
853851 # Allocate the new events chain ID/sequence numbers.
854852 #
@@ -858,8 +856,8 @@ def _allocate_chain_ids(
858856 # number of new chain IDs in one call, replacing all temporary
859857 # objects with real allocated chain IDs.
860858
861- unallocated_chain_ids = set () # type : Set[object]
862- new_chain_tuples = {} # type : Dict[str, Tuple[Any, int]]
859+ unallocated_chain_ids : Set [object ] = set ()
860+ new_chain_tuples : Dict [str , Tuple [Any , int ]] = {}
863861 for event_id , auth_event_id in tree :
864862 # If we reference an auth_event_id we fetch the allocated chain ID,
865863 # either from the existing `chain_map` or the newly generated
@@ -870,7 +868,7 @@ def _allocate_chain_ids(
870868 if not existing_chain_id :
871869 existing_chain_id = chain_map [auth_event_id ]
872870
873- new_chain_tuple = None # type : Optional[Tuple[Any, int]]
871+ new_chain_tuple : Optional [Tuple [Any , int ]] = None
874872 if existing_chain_id :
875873 # We found a chain ID/sequence number candidate, check its
876874 # not already taken.
@@ -897,9 +895,9 @@ def _allocate_chain_ids(
897895 )
898896
899897 # Map from potentially temporary chain ID to real chain ID
900- chain_id_to_allocated_map = dict (
898+ chain_id_to_allocated_map : Dict [ Any , int ] = dict (
901899 zip (unallocated_chain_ids , newly_allocated_chain_ids )
902- ) # type: Dict[Any, int]
900+ )
903901 chain_id_to_allocated_map .update ((c , c ) for c in existing_chains )
904902
905903 return {
@@ -1175,9 +1173,9 @@ def _filter_events_and_contexts_for_duplicates(
11751173 Returns:
11761174 list[(EventBase, EventContext)]: filtered list
11771175 """
1178- new_events_and_contexts = (
1179- OrderedDict ()
1180- ) # type: OrderedDict[str, Tuple[EventBase, EventContext]]
1176+ new_events_and_contexts : OrderedDict [
1177+ str , Tuple [ EventBase , EventContext ]
1178+ ] = OrderedDict ()
11811179 for event , context in events_and_contexts :
11821180 prev_event_context = new_events_and_contexts .get (event .event_id )
11831181 if prev_event_context :
@@ -1205,7 +1203,7 @@ def _update_room_depths_txn(
12051203 we are persisting
12061204 backfilled (bool): True if the events were backfilled
12071205 """
1208- depth_updates = {} # type : Dict[str, int]
1206+ depth_updates : Dict [str , int ] = {}
12091207 for event , context in events_and_contexts :
12101208 # Remove the any existing cache entries for the event_ids
12111209 txn .call_after (self .store ._invalidate_get_event_cache , event .event_id )
@@ -1885,7 +1883,7 @@ def _set_push_actions_for_event_and_users_txn(
18851883 ),
18861884 )
18871885
1888- room_to_event_ids = {} # type : Dict[str, List[str]]
1886+ room_to_event_ids : Dict [str , List [str ]] = {}
18891887 for e , _ in events_and_contexts :
18901888 room_to_event_ids .setdefault (e .room_id , []).append (e .event_id )
18911889
@@ -2012,7 +2010,7 @@ def _update_backward_extremeties(self, txn, events):
20122010
20132011 Forward extremities are handled when we first start persisting the events.
20142012 """
2015- events_by_room = {} # type : Dict[str, List[EventBase]]
2013+ events_by_room : Dict [str , List [EventBase ]] = {}
20162014 for ev in events :
20172015 events_by_room .setdefault (ev .room_id , []).append (ev )
20182016
0 commit comments