diff --git a/.github/workflows/fix_lint.yaml b/.github/workflows/fix_lint.yaml
index 5970b4e82697f93676cf3639b3c9b183ccae2c91..909b0a847ff72ed8f48335bbd80a5a8020ea2fff 100644
--- a/.github/workflows/fix_lint.yaml
+++ b/.github/workflows/fix_lint.yaml
@@ -29,10 +29,14 @@ jobs:
         with:
           install-project: "false"
 
-      - name: Run ruff
+      - name: Run ruff check
         continue-on-error: true
         run: poetry run ruff check --fix .
 
+      - name: Run ruff format
+        continue-on-error: true
+        run: poetry run ruff format --quiet .
+
       - run: cargo clippy --all-features --fix -- -D warnings
         continue-on-error: true
 
diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml
index add046ec6a56c7d45139de35b2c1a4982997ecef..5586bd6d947202865916d44b18cfc8ed492f73fb 100644
--- a/.github/workflows/tests.yml
+++ b/.github/workflows/tests.yml
@@ -131,9 +131,12 @@ jobs:
         with:
           install-project: "false"
 
-      - name: Check style
+      - name: Run ruff check
         run: poetry run ruff check --output-format=github .
 
+      - name: Run ruff format
+        run: poetry run ruff format --check .
+
   lint-mypy:
     runs-on: ubuntu-latest
     name: Typechecking
diff --git a/changelog.d/17643.misc b/changelog.d/17643.misc
new file mode 100644
index 0000000000000000000000000000000000000000..f583cdcb38f3ba4a79af4f19af22dde1f954f83b
--- /dev/null
+++ b/changelog.d/17643.misc
@@ -0,0 +1 @@
+Replace `isort` and `black with `ruff`.
diff --git a/contrib/cmdclient/console.py b/contrib/cmdclient/console.py
index d4ddeb4dc71e85edd33c074f8fa1014fd4349600..ca2e72b5e8e71c6556a28f647aeab48809ec89ae 100755
--- a/contrib/cmdclient/console.py
+++ b/contrib/cmdclient/console.py
@@ -21,7 +21,8 @@
 #
 #
 
-""" Starts a synapse client console. """
+"""Starts a synapse client console."""
+
 import argparse
 import binascii
 import cmd
diff --git a/scripts-dev/check_pydantic_models.py b/scripts-dev/check_pydantic_models.py
index 9e67375b6ad4b054db5a0fce247a759bc2e252eb..26d667aba0c040237cb851acf8c11c612968b5f2 100755
--- a/scripts-dev/check_pydantic_models.py
+++ b/scripts-dev/check_pydantic_models.py
@@ -31,6 +31,7 @@ Pydantic does not yet offer a strict mode, but it is planned for pydantic v2. Se
 until then, this script is a best effort to stop us from introducing type coersion bugs
 (like the infamous stringy power levels fixed in room version 10).
 """
+
 import argparse
 import contextlib
 import functools
diff --git a/scripts-dev/lint.sh b/scripts-dev/lint.sh
index fa6ff9070881bcfc3b070147d19a1ca1d44a8fe3..c656047729d585535e0627142f00653f09d92ca2 100755
--- a/scripts-dev/lint.sh
+++ b/scripts-dev/lint.sh
@@ -109,6 +109,9 @@ set -x
 # --quiet suppresses the update check.
 ruff check --quiet --fix "${files[@]}"
 
+# Reformat Python code.
+ruff format --quiet "${files[@]}"
+
 # Catch any common programming mistakes in Rust code.
 #
 # --bins, --examples, --lib, --tests combined explicitly disable checking
diff --git a/scripts-dev/release.py b/scripts-dev/release.py
index 1ace804682e8deb87959ee51b2fdc7a7b24870e4..44356242679c8c758ef3221be393b0a7d12ce9c2 100755
--- a/scripts-dev/release.py
+++ b/scripts-dev/release.py
@@ -20,8 +20,7 @@
 #
 #
 
-"""An interactive script for doing a release. See `cli()` below.
-"""
+"""An interactive script for doing a release. See `cli()` below."""
 
 import glob
 import json
diff --git a/stubs/txredisapi.pyi b/stubs/txredisapi.pyi
index a141218d3db222d344877893a5e2a2e246aca534..c9a4114b1eb292a29fb1487155b51af9e813a514 100644
--- a/stubs/txredisapi.pyi
+++ b/stubs/txredisapi.pyi
@@ -13,8 +13,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-"""Contains *incomplete* type hints for txredisapi.
-"""
+"""Contains *incomplete* type hints for txredisapi."""
+
 from typing import Any, List, Optional, Type, Union
 
 from twisted.internet import protocol
diff --git a/synapse/__init__.py b/synapse/__init__.py
index 99ed7a537416414daecda6d8d1eacfd49135557e..73b92f12beaeda9f70240af0672f5597cf4f0656 100644
--- a/synapse/__init__.py
+++ b/synapse/__init__.py
@@ -20,8 +20,7 @@
 #
 #
 
-""" This is an implementation of a Matrix homeserver.
-"""
+"""This is an implementation of a Matrix homeserver."""
 
 import os
 import sys
diff --git a/synapse/_scripts/generate_workers_map.py b/synapse/_scripts/generate_workers_map.py
index 715c7ddc1708d25c2309631f666edd5b93a3f451..09feb8cf30f9b512bd1bc6166a52e08b2dbd5caa 100755
--- a/synapse/_scripts/generate_workers_map.py
+++ b/synapse/_scripts/generate_workers_map.py
@@ -171,7 +171,7 @@ def elide_http_methods_if_unconflicting(
     """
 
     def paths_to_methods_dict(
-        methods_and_paths: Iterable[Tuple[str, str]]
+        methods_and_paths: Iterable[Tuple[str, str]],
     ) -> Dict[str, Set[str]]:
         """
         Given (method, path) pairs, produces a dict from path to set of methods
@@ -201,7 +201,7 @@ def elide_http_methods_if_unconflicting(
 
 
 def simplify_path_regexes(
-    registrations: Dict[Tuple[str, str], EndpointDescription]
+    registrations: Dict[Tuple[str, str], EndpointDescription],
 ) -> Dict[Tuple[str, str], EndpointDescription]:
     """
     Simplify all the path regexes for the dict of endpoint descriptions,
diff --git a/synapse/_scripts/review_recent_signups.py b/synapse/_scripts/review_recent_signups.py
index ad88df477a84ea6d0c5e375e6b42c70742217a14..62723c539debd798f2c72ef69f697f595d63723a 100644
--- a/synapse/_scripts/review_recent_signups.py
+++ b/synapse/_scripts/review_recent_signups.py
@@ -40,6 +40,7 @@ from synapse.storage.engines import create_engine
 
 class ReviewConfig(RootConfig):
     "A config class that just pulls out the database config"
+
     config_classes = [DatabaseConfig]
 
 
@@ -160,7 +161,11 @@ def main() -> None:
 
     with make_conn(database_config, engine, "review_recent_signups") as db_conn:
         # This generates a type of Cursor, not LoggingTransaction.
-        user_infos = get_recent_users(db_conn.cursor(), since_ms, exclude_users_with_appservice)  # type: ignore[arg-type]
+        user_infos = get_recent_users(
+            db_conn.cursor(),
+            since_ms,  # type: ignore[arg-type]
+            exclude_users_with_appservice,
+        )
 
     for user_info in user_infos:
         if exclude_users_with_email and user_info.emails:
diff --git a/synapse/_scripts/synapse_port_db.py b/synapse/_scripts/synapse_port_db.py
index 195c95d3766a90a7621948eea9e71ea3783eed35..31639d366ecaee1efc8fa1d7cbd10c6245667fd5 100755
--- a/synapse/_scripts/synapse_port_db.py
+++ b/synapse/_scripts/synapse_port_db.py
@@ -717,9 +717,7 @@ class Porter:
                 return
 
             # Check if all background updates are done, abort if not.
-            updates_complete = (
-                await self.sqlite_store.db_pool.updates.has_completed_background_updates()
-            )
+            updates_complete = await self.sqlite_store.db_pool.updates.has_completed_background_updates()
             if not updates_complete:
                 end_error = (
                     "Pending background updates exist in the SQLite3 database."
@@ -1095,10 +1093,10 @@ class Porter:
         return done, remaining + done
 
     async def _setup_state_group_id_seq(self) -> None:
-        curr_id: Optional[int] = (
-            await self.sqlite_store.db_pool.simple_select_one_onecol(
-                table="state_groups", keyvalues={}, retcol="MAX(id)", allow_none=True
-            )
+        curr_id: Optional[
+            int
+        ] = await self.sqlite_store.db_pool.simple_select_one_onecol(
+            table="state_groups", keyvalues={}, retcol="MAX(id)", allow_none=True
         )
 
         if not curr_id:
@@ -1186,13 +1184,13 @@ class Porter:
         )
 
     async def _setup_auth_chain_sequence(self) -> None:
-        curr_chain_id: Optional[int] = (
-            await self.sqlite_store.db_pool.simple_select_one_onecol(
-                table="event_auth_chains",
-                keyvalues={},
-                retcol="MAX(chain_id)",
-                allow_none=True,
-            )
+        curr_chain_id: Optional[
+            int
+        ] = await self.sqlite_store.db_pool.simple_select_one_onecol(
+            table="event_auth_chains",
+            keyvalues={},
+            retcol="MAX(chain_id)",
+            allow_none=True,
         )
 
         def r(txn: LoggingTransaction) -> None:
diff --git a/synapse/api/urls.py b/synapse/api/urls.py
index d077a2c613f4adbd02dcfc4469307a40692602ae..03a3e96f289149f49d14e246646d046cda3d3d44 100644
--- a/synapse/api/urls.py
+++ b/synapse/api/urls.py
@@ -19,7 +19,8 @@
 #
 #
 
-"""Contains the URL paths to prefix various aspects of the server with. """
+"""Contains the URL paths to prefix various aspects of the server with."""
+
 import hmac
 from hashlib import sha256
 from urllib.parse import urlencode
diff --git a/synapse/appservice/scheduler.py b/synapse/appservice/scheduler.py
index bec83419a22fb62380e74bcdd245221a5559fc9f..7994da0868bea0b19a78a037abec6ef8e41954bb 100644
--- a/synapse/appservice/scheduler.py
+++ b/synapse/appservice/scheduler.py
@@ -54,6 +54,7 @@ UP & quit           +---------- YES                       SUCCESS
 This is all tied together by the AppServiceScheduler which DIs the required
 components.
 """
+
 import logging
 from typing import (
     TYPE_CHECKING,
diff --git a/synapse/config/key.py b/synapse/config/key.py
index b9925a52d20eb506f58aa7bfd31b27cae2ce8aa8..bc9688896765e62a25cde0b7c17c3451c2f8620b 100644
--- a/synapse/config/key.py
+++ b/synapse/config/key.py
@@ -200,16 +200,13 @@ class KeyConfig(Config):
             )
             form_secret = 'form_secret: "%s"' % random_string_with_symbols(50)
 
-        return (
-            """\
+        return """\
         %(macaroon_secret_key)s
         %(form_secret)s
         signing_key_path: "%(base_key_name)s.signing.key"
         trusted_key_servers:
           - server_name: "matrix.org"
-        """
-            % locals()
-        )
+        """ % locals()
 
     def read_signing_keys(self, signing_key_path: str, name: str) -> List[SigningKey]:
         """Read the signing keys in the given path.
@@ -249,7 +246,9 @@ class KeyConfig(Config):
             if is_signing_algorithm_supported(key_id):
                 key_base64 = key_data["key"]
                 key_bytes = decode_base64(key_base64)
-                verify_key: "VerifyKeyWithExpiry" = decode_verify_key_bytes(key_id, key_bytes)  # type: ignore[assignment]
+                verify_key: "VerifyKeyWithExpiry" = decode_verify_key_bytes(
+                    key_id, key_bytes
+                )  # type: ignore[assignment]
                 verify_key.expired = key_data["expired_ts"]
                 keys[key_id] = verify_key
             else:
diff --git a/synapse/config/logger.py b/synapse/config/logger.py
index fca0b08d6d33aee549b4c721feec19352c9368c4..cfc1a57107f66ac9fe4701d7baba9fa1c52260e8 100644
--- a/synapse/config/logger.py
+++ b/synapse/config/logger.py
@@ -157,12 +157,9 @@ class LoggingConfig(Config):
         self, config_dir_path: str, server_name: str, **kwargs: Any
     ) -> str:
         log_config = os.path.join(config_dir_path, server_name + ".log.config")
-        return (
-            """\
+        return """\
         log_config: "%(log_config)s"
-        """
-            % locals()
-        )
+        """ % locals()
 
     def read_arguments(self, args: argparse.Namespace) -> None:
         if args.no_redirect_stdio is not None:
diff --git a/synapse/config/server.py b/synapse/config/server.py
index fd52c0475cf8f8beaf77d44e595fc9ff21a1531e..488604a30c8dde7be061f3b0ef3e1ea8e1349dd8 100644
--- a/synapse/config/server.py
+++ b/synapse/config/server.py
@@ -828,13 +828,10 @@ class ServerConfig(Config):
             ).lstrip()
 
         if not unsecure_listeners:
-            unsecure_http_bindings = (
-                """- port: %(unsecure_port)s
+            unsecure_http_bindings = """- port: %(unsecure_port)s
             tls: false
             type: http
-            x_forwarded: true"""
-                % locals()
-            )
+            x_forwarded: true""" % locals()
 
             if not open_private_ports:
                 unsecure_http_bindings += (
@@ -853,16 +850,13 @@ class ServerConfig(Config):
         if not secure_listeners:
             secure_http_bindings = ""
 
-        return (
-            """\
+        return """\
         server_name: "%(server_name)s"
         pid_file: %(pid_file)s
         listeners:
           %(secure_http_bindings)s
           %(unsecure_http_bindings)s
-        """
-            % locals()
-        )
+        """ % locals()
 
     def read_arguments(self, args: argparse.Namespace) -> None:
         if args.manhole is not None:
diff --git a/synapse/config/workers.py b/synapse/config/workers.py
index 7ecf349e4ad76494e6a8322a03a42644f2ea2c98..b013ffa354ba0b4f819bc198cab2e087125bb673 100644
--- a/synapse/config/workers.py
+++ b/synapse/config/workers.py
@@ -328,10 +328,11 @@ class WorkerConfig(Config):
                 )
 
         # type-ignore: the expression `Union[A, B]` is not a Type[Union[A, B]] currently
-        self.instance_map: Dict[
-            str, InstanceLocationConfig
-        ] = parse_and_validate_mapping(
-            instance_map, InstanceLocationConfig  # type: ignore[arg-type]
+        self.instance_map: Dict[str, InstanceLocationConfig] = (
+            parse_and_validate_mapping(
+                instance_map,
+                InstanceLocationConfig,  # type: ignore[arg-type]
+            )
         )
 
         # Map from type of streams to source, c.f. WriterLocations.
diff --git a/synapse/event_auth.py b/synapse/event_auth.py
index f5abcde2dbe6b729a5ff7f0331779938261ae810..b834547d11e18eb75980f87652ed0580ac4da2e8 100644
--- a/synapse/event_auth.py
+++ b/synapse/event_auth.py
@@ -887,7 +887,8 @@ def _check_power_levels(
                     raise SynapseError(400, f"{v!r} must be an integer.")
             if k in {"events", "notifications", "users"}:
                 if not isinstance(v, collections.abc.Mapping) or not all(
-                    type(v) is int for v in v.values()  # noqa: E721
+                    type(v) is int
+                    for v in v.values()  # noqa: E721
                 ):
                     raise SynapseError(
                         400,
diff --git a/synapse/events/presence_router.py b/synapse/events/presence_router.py
index 9cb053cd8e93522730e84fcde7e517dc782d921c..9713b141bce8e85395b1932785964dff550e3f7d 100644
--- a/synapse/events/presence_router.py
+++ b/synapse/events/presence_router.py
@@ -80,7 +80,7 @@ def load_legacy_presence_router(hs: "HomeServer") -> None:
     # All methods that the module provides should be async, but this wasn't enforced
     # in the old module system, so we wrap them if needed
     def async_wrapper(
-        f: Optional[Callable[P, R]]
+        f: Optional[Callable[P, R]],
     ) -> Optional[Callable[P, Awaitable[R]]]:
         # f might be None if the callback isn't implemented by the module. In this
         # case we don't want to register a callback at all so we return None.
diff --git a/synapse/events/snapshot.py b/synapse/events/snapshot.py
index 6b70ea94d13d83d8d9213bb786cc5166657e7d05..dd21a6136b1b5ade70aaf0c8ff64a66de041fe58 100644
--- a/synapse/events/snapshot.py
+++ b/synapse/events/snapshot.py
@@ -504,7 +504,7 @@ class UnpersistedEventContext(UnpersistedEventContextBase):
 
 
 def _encode_state_group_delta(
-    state_group_delta: Dict[Tuple[int, int], StateMap[str]]
+    state_group_delta: Dict[Tuple[int, int], StateMap[str]],
 ) -> List[Tuple[int, int, Optional[List[Tuple[str, str, str]]]]]:
     if not state_group_delta:
         return []
@@ -517,7 +517,7 @@ def _encode_state_group_delta(
 
 
 def _decode_state_group_delta(
-    input: List[Tuple[int, int, List[Tuple[str, str, str]]]]
+    input: List[Tuple[int, int, List[Tuple[str, str, str]]]],
 ) -> Dict[Tuple[int, int], StateMap[str]]:
     if not input:
         return {}
@@ -544,7 +544,7 @@ def _encode_state_dict(
 
 
 def _decode_state_dict(
-    input: Optional[List[Tuple[str, str, str]]]
+    input: Optional[List[Tuple[str, str, str]]],
 ) -> Optional[StateMap[str]]:
     """Decodes a state dict encoded using `_encode_state_dict` above"""
     if input is None:
diff --git a/synapse/federation/__init__.py b/synapse/federation/__init__.py
index a571eff5902e2c1f27915199b436ce3d07006344..61e28bff6643e9f89a4596b07ee0db2e5ed36238 100644
--- a/synapse/federation/__init__.py
+++ b/synapse/federation/__init__.py
@@ -19,5 +19,4 @@
 #
 #
 
-""" This package includes all the federation specific logic.
-"""
+"""This package includes all the federation specific logic."""
diff --git a/synapse/federation/persistence.py b/synapse/federation/persistence.py
index 0bfde003154c3e15e2fad94c7901494e242dfa2d..8340b4850313a4aa6bc05732acfa161bc18f3c3e 100644
--- a/synapse/federation/persistence.py
+++ b/synapse/federation/persistence.py
@@ -20,7 +20,7 @@
 #
 #
 
-""" This module contains all the persistence actions done by the federation
+"""This module contains all the persistence actions done by the federation
 package.
 
 These actions are mostly only used by the :py:mod:`.replication` module.
diff --git a/synapse/federation/transport/server/federation.py b/synapse/federation/transport/server/federation.py
index 20f87c885ebe2a8c2fb1e1062f8d126a081afb41..a05e5d5319c7e1ab6626ab7a921b1033f87f4c2a 100644
--- a/synapse/federation/transport/server/federation.py
+++ b/synapse/federation/transport/server/federation.py
@@ -859,7 +859,6 @@ class FederationMediaThumbnailServlet(BaseFederationServerServlet):
         request: SynapseRequest,
         media_id: str,
     ) -> None:
-
         width = parse_integer(request, "width", required=True)
         height = parse_integer(request, "height", required=True)
         method = parse_string(request, "method", "scale")
diff --git a/synapse/federation/units.py b/synapse/federation/units.py
index b2c8ba5887d5b2eb6712dc055951f0ba28c160c8..d8b67a6a5bdf8d290d9ac2b43eab3f91390dfa22 100644
--- a/synapse/federation/units.py
+++ b/synapse/federation/units.py
@@ -19,7 +19,7 @@
 #
 #
 
-""" Defines the JSON structure of the protocol units used by the server to
+"""Defines the JSON structure of the protocol units used by the server to
 server protocol.
 """
 
diff --git a/synapse/handlers/account.py b/synapse/handlers/account.py
index 89e944bc17146847630ffe9dff35b5462b0745a7..37cc3d3ff560bd3524b3e994f6e1c7cf0badfe9a 100644
--- a/synapse/handlers/account.py
+++ b/synapse/handlers/account.py
@@ -118,10 +118,10 @@ class AccountHandler:
             }
 
             if self._use_account_validity_in_account_status:
-                status["org.matrix.expired"] = (
-                    await self._account_validity_handler.is_user_expired(
-                        user_id.to_string()
-                    )
+                status[
+                    "org.matrix.expired"
+                ] = await self._account_validity_handler.is_user_expired(
+                    user_id.to_string()
                 )
 
         return status
diff --git a/synapse/handlers/admin.py b/synapse/handlers/admin.py
index b44e862493d98eea98a3d0a61f4ddbc4a7212dec..c874d22eaceedffd6b5689a5187fb7cfcca6b71b 100644
--- a/synapse/handlers/admin.py
+++ b/synapse/handlers/admin.py
@@ -197,14 +197,15 @@ class AdminHandler:
             # events that we have and then filtering, this isn't the most
             # efficient method perhaps but it does guarantee we get everything.
             while True:
-                events, _ = (
-                    await self._store.paginate_room_events_by_topological_ordering(
-                        room_id=room_id,
-                        from_key=from_key,
-                        to_key=to_key,
-                        limit=100,
-                        direction=Direction.FORWARDS,
-                    )
+                (
+                    events,
+                    _,
+                ) = await self._store.paginate_room_events_by_topological_ordering(
+                    room_id=room_id,
+                    from_key=from_key,
+                    to_key=to_key,
+                    limit=100,
+                    direction=Direction.FORWARDS,
                 )
                 if not events:
                     break
diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py
index a1fab99f6b704609b81984665d1648abaf104662..1f4264ad7e75165ab78e643c5ded38e507528b59 100644
--- a/synapse/handlers/auth.py
+++ b/synapse/handlers/auth.py
@@ -166,8 +166,7 @@ def login_id_phone_to_thirdparty(identifier: JsonDict) -> Dict[str, str]:
     if "country" not in identifier or (
         # The specification requires a "phone" field, while Synapse used to require a "number"
         # field. Accept both for backwards compatibility.
-        "phone" not in identifier
-        and "number" not in identifier
+        "phone" not in identifier and "number" not in identifier
     ):
         raise SynapseError(
             400, "Invalid phone-type identifier", errcode=Codes.INVALID_PARAM
diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py
index ad2b0f5fcc974e08132d272cfd9d2058ad26f9e1..62ce16794f72a10e8a201cbfd35b59ac08ad97ec 100644
--- a/synapse/handlers/directory.py
+++ b/synapse/handlers/directory.py
@@ -265,9 +265,9 @@ class DirectoryHandler:
     async def get_association(self, room_alias: RoomAlias) -> JsonDict:
         room_id = None
         if self.hs.is_mine(room_alias):
-            result: Optional[RoomAliasMapping] = (
-                await self.get_association_from_room_alias(room_alias)
-            )
+            result: Optional[
+                RoomAliasMapping
+            ] = await self.get_association_from_room_alias(room_alias)
 
             if result:
                 room_id = result.room_id
@@ -512,11 +512,9 @@ class DirectoryHandler:
                 raise SynapseError(403, "Not allowed to publish room")
 
             # Check if publishing is blocked by a third party module
-            allowed_by_third_party_rules = (
-                await (
-                    self._third_party_event_rules.check_visibility_can_be_modified(
-                        room_id, visibility
-                    )
+            allowed_by_third_party_rules = await (
+                self._third_party_event_rules.check_visibility_can_be_modified(
+                    room_id, visibility
                 )
             )
             if not allowed_by_third_party_rules:
diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py
index 299588e4768136807260e760b25ab642ab57b93f..2b7aad5b581658367624ffa77dfcb19b7ecc5b00 100644
--- a/synapse/handlers/federation.py
+++ b/synapse/handlers/federation.py
@@ -1001,11 +1001,11 @@ class FederationHandler:
                     )
 
                 if include_auth_user_id:
-                    event_content[EventContentFields.AUTHORISING_USER] = (
-                        await self._event_auth_handler.get_user_which_could_invite(
-                            room_id,
-                            state_ids,
-                        )
+                    event_content[
+                        EventContentFields.AUTHORISING_USER
+                    ] = await self._event_auth_handler.get_user_which_could_invite(
+                        room_id,
+                        state_ids,
                     )
 
         builder = self.event_builder_factory.for_room_version(
diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py
index cb31d65aa9c8a732848303dc3da6fbcb022da0fe..89191217d6a6a2303a3d7fe1bc9fb2803548304f 100644
--- a/synapse/handlers/identity.py
+++ b/synapse/handlers/identity.py
@@ -21,6 +21,7 @@
 #
 
 """Utilities for interacting with Identity Servers"""
+
 import logging
 import urllib.parse
 from typing import TYPE_CHECKING, Awaitable, Callable, Dict, List, Optional, Tuple
diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py
index 5aa48230ec77892aebac60e9d8eee12dd8c3963c..204965afeec9425c920da60213b91348a18a4d49 100644
--- a/synapse/handlers/message.py
+++ b/synapse/handlers/message.py
@@ -1225,10 +1225,9 @@ class EventCreationHandler:
             )
 
         if prev_event_ids is not None:
-            assert (
-                len(prev_event_ids) <= 10
-            ), "Attempting to create an event with %i prev_events" % (
-                len(prev_event_ids),
+            assert len(prev_event_ids) <= 10, (
+                "Attempting to create an event with %i prev_events"
+                % (len(prev_event_ids),)
             )
         else:
             prev_event_ids = await self.store.get_prev_events_for_room(builder.room_id)
diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py
index 6fd7afa2808cff7bdbc96831ae681b69ea728615..3c44458fa3c0e9a892ba7f68b30016d46bea4fce 100644
--- a/synapse/handlers/pagination.py
+++ b/synapse/handlers/pagination.py
@@ -507,15 +507,16 @@ class PaginationHandler:
 
         # Initially fetch the events from the database. With any luck, we can return
         # these without blocking on backfill (handled below).
-        events, next_key = (
-            await self.store.paginate_room_events_by_topological_ordering(
-                room_id=room_id,
-                from_key=from_token.room_key,
-                to_key=to_room_key,
-                direction=pagin_config.direction,
-                limit=pagin_config.limit,
-                event_filter=event_filter,
-            )
+        (
+            events,
+            next_key,
+        ) = await self.store.paginate_room_events_by_topological_ordering(
+            room_id=room_id,
+            from_key=from_token.room_key,
+            to_key=to_room_key,
+            direction=pagin_config.direction,
+            limit=pagin_config.limit,
+            event_filter=event_filter,
         )
 
         if pagin_config.direction == Direction.BACKWARDS:
@@ -584,15 +585,16 @@ class PaginationHandler:
                 # If we did backfill something, refetch the events from the database to
                 # catch anything new that might have been added since we last fetched.
                 if did_backfill:
-                    events, next_key = (
-                        await self.store.paginate_room_events_by_topological_ordering(
-                            room_id=room_id,
-                            from_key=from_token.room_key,
-                            to_key=to_room_key,
-                            direction=pagin_config.direction,
-                            limit=pagin_config.limit,
-                            event_filter=event_filter,
-                        )
+                    (
+                        events,
+                        next_key,
+                    ) = await self.store.paginate_room_events_by_topological_ordering(
+                        room_id=room_id,
+                        from_key=from_token.room_key,
+                        to_key=to_room_key,
+                        direction=pagin_config.direction,
+                        limit=pagin_config.limit,
+                        event_filter=event_filter,
                     )
             else:
                 # Otherwise, we can backfill in the background for eventual
diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py
index 37ee625f717705110868d8ee823d17ffcd5b41c8..390cafa8f637b1ebcdb160926b3d0c848f20b2d3 100644
--- a/synapse/handlers/presence.py
+++ b/synapse/handlers/presence.py
@@ -71,6 +71,7 @@ user state; this device follows the normal timeout logic (see above) and will
 automatically be replaced with any information from currently available devices.
 
 """
+
 import abc
 import contextlib
 import itertools
@@ -493,9 +494,9 @@ class WorkerPresenceHandler(BasePresenceHandler):
 
         # The number of ongoing syncs on this process, by (user ID, device ID).
         # Empty if _presence_enabled is false.
-        self._user_device_to_num_current_syncs: Dict[Tuple[str, Optional[str]], int] = (
-            {}
-        )
+        self._user_device_to_num_current_syncs: Dict[
+            Tuple[str, Optional[str]], int
+        ] = {}
 
         self.notifier = hs.get_notifier()
         self.instance_id = hs.get_instance_id()
@@ -818,9 +819,9 @@ class PresenceHandler(BasePresenceHandler):
 
         # Keeps track of the number of *ongoing* syncs on this process. While
         # this is non zero a user will never go offline.
-        self._user_device_to_num_current_syncs: Dict[Tuple[str, Optional[str]], int] = (
-            {}
-        )
+        self._user_device_to_num_current_syncs: Dict[
+            Tuple[str, Optional[str]], int
+        ] = {}
 
         # Keeps track of the number of *ongoing* syncs on other processes.
         #
diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py
index af8cd838eebb07e9f01e9ea0efc7ce83172de672..ac4544ca4c08ef0058f66ffcb7b36ab48ee23ccf 100644
--- a/synapse/handlers/profile.py
+++ b/synapse/handlers/profile.py
@@ -351,9 +351,9 @@ class ProfileHandler:
             server_name = host
 
         if self._is_mine_server_name(server_name):
-            media_info: Optional[Union[LocalMedia, RemoteMedia]] = (
-                await self.store.get_local_media(media_id)
-            )
+            media_info: Optional[
+                Union[LocalMedia, RemoteMedia]
+            ] = await self.store.get_local_media(media_id)
         else:
             media_info = await self.store.get_cached_remote_media(server_name, media_id)
 
diff --git a/synapse/handlers/relations.py b/synapse/handlers/relations.py
index efe31e81f98afdca8894cb3e04444c4500d5eb3f..b1158ee77d507b53b53e018b1c304406410b51da 100644
--- a/synapse/handlers/relations.py
+++ b/synapse/handlers/relations.py
@@ -188,13 +188,13 @@ class RelationsHandler:
         if include_original_event:
             # Do not bundle aggregations when retrieving the original event because
             # we want the content before relations are applied to it.
-            return_value["original_event"] = (
-                await self._event_serializer.serialize_event(
-                    event,
-                    now,
-                    bundle_aggregations=None,
-                    config=serialize_options,
-                )
+            return_value[
+                "original_event"
+            ] = await self._event_serializer.serialize_event(
+                event,
+                now,
+                bundle_aggregations=None,
+                config=serialize_options,
             )
 
         if next_token:
diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py
index 2c6e672ede8752119d2166e248e115f165c0dfb3..35c88f1b919be5ca5628b57f635b8eecd5c52746 100644
--- a/synapse/handlers/room.py
+++ b/synapse/handlers/room.py
@@ -20,6 +20,7 @@
 #
 
 """Contains functions for performing actions on rooms."""
+
 import itertools
 import logging
 import math
@@ -900,11 +901,9 @@ class RoomCreationHandler:
         )
 
         # Check whether this visibility value is blocked by a third party module
-        allowed_by_third_party_rules = (
-            await (
-                self._third_party_event_rules.check_visibility_can_be_modified(
-                    room_id, visibility
-                )
+        allowed_by_third_party_rules = await (
+            self._third_party_event_rules.check_visibility_can_be_modified(
+                room_id, visibility
             )
         )
         if not allowed_by_third_party_rules:
diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py
index 51b9772329f02c34cd7c818b62140bfc36885370..75c60e3c34de77dc1b20031fead7ec2ac27cb6b8 100644
--- a/synapse/handlers/room_member.py
+++ b/synapse/handlers/room_member.py
@@ -1302,11 +1302,11 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
         # If this is going to be a local join, additional information must
         # be included in the event content in order to efficiently validate
         # the event.
-        content[EventContentFields.AUTHORISING_USER] = (
-            await self.event_auth_handler.get_user_which_could_invite(
-                room_id,
-                state_before_join,
-            )
+        content[
+            EventContentFields.AUTHORISING_USER
+        ] = await self.event_auth_handler.get_user_which_could_invite(
+            room_id,
+            state_before_join,
         )
 
         return False, []
@@ -1415,9 +1415,9 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
 
         if requester is not None:
             sender = UserID.from_string(event.sender)
-            assert (
-                sender == requester.user
-            ), "Sender (%s) must be same as requester (%s)" % (sender, requester.user)
+            assert sender == requester.user, (
+                "Sender (%s) must be same as requester (%s)" % (sender, requester.user)
+            )
             assert self.hs.is_mine(sender), "Sender must be our own: %s" % (sender,)
         else:
             requester = types.create_requester(target_user)
diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py
index a7d52fa6483a80c6449b6a5a442e8e1ed752136b..1a71135d5fa75794c966c2041fdad8e41a85ba3a 100644
--- a/synapse/handlers/search.py
+++ b/synapse/handlers/search.py
@@ -423,9 +423,9 @@ class SearchHandler:
             }
 
         if search_result.room_groups and "room_id" in group_keys:
-            rooms_cat_res.setdefault("groups", {})[
-                "room_id"
-            ] = search_result.room_groups
+            rooms_cat_res.setdefault("groups", {})["room_id"] = (
+                search_result.room_groups
+            )
 
         if sender_group and "sender" in group_keys:
             rooms_cat_res.setdefault("groups", {})["sender"] = sender_group
diff --git a/synapse/handlers/sliding_sync/__init__.py b/synapse/handlers/sliding_sync/__init__.py
index d92bdad307c0db1abe1fd4213f7fb5e678b528ca..f79796a33687ba4a7b0379d5c0ae7dc4700629f7 100644
--- a/synapse/handlers/sliding_sync/__init__.py
+++ b/synapse/handlers/sliding_sync/__init__.py
@@ -587,9 +587,7 @@ class SlidingSyncHandler:
                 Membership.LEAVE,
                 Membership.BAN,
             ):
-                to_bound = (
-                    room_membership_for_user_at_to_token.event_pos.to_room_stream_token()
-                )
+                to_bound = room_membership_for_user_at_to_token.event_pos.to_room_stream_token()
 
             timeline_from_bound = from_bound
             if ignore_timeline_bound:
diff --git a/synapse/handlers/sliding_sync/extensions.py b/synapse/handlers/sliding_sync/extensions.py
index d9f4c56e6e08ca33e571f83bd84002405e3747c7..6f37cc3462f1d4265e2e1e1fd9548ff06c04e4b5 100644
--- a/synapse/handlers/sliding_sync/extensions.py
+++ b/synapse/handlers/sliding_sync/extensions.py
@@ -386,9 +386,9 @@ class SlidingSyncExtensionHandler:
             if have_push_rules_changed:
                 global_account_data_map = dict(global_account_data_map)
                 # TODO: This should take into account the `from_token` and `to_token`
-                global_account_data_map[AccountDataTypes.PUSH_RULES] = (
-                    await self.push_rules_handler.push_rules_for_user(sync_config.user)
-                )
+                global_account_data_map[
+                    AccountDataTypes.PUSH_RULES
+                ] = await self.push_rules_handler.push_rules_for_user(sync_config.user)
         else:
             # TODO: This should take into account the `to_token`
             all_global_account_data = await self.store.get_global_account_data_for_user(
@@ -397,9 +397,9 @@ class SlidingSyncExtensionHandler:
 
             global_account_data_map = dict(all_global_account_data)
             # TODO: This should take into account the  `to_token`
-            global_account_data_map[AccountDataTypes.PUSH_RULES] = (
-                await self.push_rules_handler.push_rules_for_user(sync_config.user)
-            )
+            global_account_data_map[
+                AccountDataTypes.PUSH_RULES
+            ] = await self.push_rules_handler.push_rules_for_user(sync_config.user)
 
         # Fetch room account data
         account_data_by_room_map: Mapping[str, Mapping[str, JsonMapping]] = {}
diff --git a/synapse/handlers/sliding_sync/room_lists.py b/synapse/handlers/sliding_sync/room_lists.py
index 12b7958c6f678cc6e4e1a8f9fc638bd51207b3c1..1423d6ca53a3745376187ea16ef85aff1c8ace99 100644
--- a/synapse/handlers/sliding_sync/room_lists.py
+++ b/synapse/handlers/sliding_sync/room_lists.py
@@ -293,10 +293,11 @@ class SlidingSyncRoomLists:
                         is_encrypted=is_encrypted,
                     )
 
-        newly_joined_room_ids, newly_left_room_map = (
-            await self._get_newly_joined_and_left_rooms(
-                user_id, from_token=from_token, to_token=to_token
-            )
+        (
+            newly_joined_room_ids,
+            newly_left_room_map,
+        ) = await self._get_newly_joined_and_left_rooms(
+            user_id, from_token=from_token, to_token=to_token
         )
         dm_room_ids = await self._get_dm_rooms_for_user(user_id)
 
@@ -958,10 +959,11 @@ class SlidingSyncRoomLists:
             else:
                 rooms_for_user[room_id] = change_room_for_user
 
-        newly_joined_room_ids, newly_left_room_ids = (
-            await self._get_newly_joined_and_left_rooms(
-                user_id, to_token=to_token, from_token=from_token
-            )
+        (
+            newly_joined_room_ids,
+            newly_left_room_ids,
+        ) = await self._get_newly_joined_and_left_rooms(
+            user_id, to_token=to_token, from_token=from_token
         )
 
         dm_room_ids = await self._get_dm_rooms_for_user(user_id)
diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py
index c44baa7042de3b284277487d726bcfd750fd7988..609840bfe9d89014d8f4b003e6cdcd495fb4a743 100644
--- a/synapse/handlers/sync.py
+++ b/synapse/handlers/sync.py
@@ -183,10 +183,7 @@ class JoinedSyncResult:
         to tell if room needs to be part of the sync result.
         """
         return bool(
-            self.timeline
-            or self.state
-            or self.ephemeral
-            or self.account_data
+            self.timeline or self.state or self.ephemeral or self.account_data
             # nb the notification count does not, er, count: if there's nothing
             # else in the result, we don't need to send it.
         )
@@ -575,10 +572,10 @@ class SyncHandler:
         if timeout == 0 or since_token is None or full_state:
             # we are going to return immediately, so don't bother calling
             # notifier.wait_for_events.
-            result: Union[SyncResult, E2eeSyncResult] = (
-                await self.current_sync_for_user(
-                    sync_config, sync_version, since_token, full_state=full_state
-                )
+            result: Union[
+                SyncResult, E2eeSyncResult
+            ] = await self.current_sync_for_user(
+                sync_config, sync_version, since_token, full_state=full_state
             )
         else:
             # Otherwise, we wait for something to happen and report it to the user.
@@ -673,10 +670,10 @@ class SyncHandler:
 
             # Go through the `/sync` v2 path
             if sync_version == SyncVersion.SYNC_V2:
-                sync_result: Union[SyncResult, E2eeSyncResult] = (
-                    await self.generate_sync_result(
-                        sync_config, since_token, full_state
-                    )
+                sync_result: Union[
+                    SyncResult, E2eeSyncResult
+                ] = await self.generate_sync_result(
+                    sync_config, since_token, full_state
                 )
             # Go through the MSC3575 Sliding Sync `/sync/e2ee` path
             elif sync_version == SyncVersion.E2EE_SYNC:
@@ -1488,13 +1485,16 @@ class SyncHandler:
                     # timeline here. The caller will then dedupe any redundant
                     # ones.
 
-                    state_ids = await self._state_storage_controller.get_state_ids_for_event(
-                        batch.events[0].event_id,
-                        # we only want members!
-                        state_filter=StateFilter.from_types(
-                            (EventTypes.Member, member) for member in members_to_fetch
-                        ),
-                        await_full_state=False,
+                    state_ids = (
+                        await self._state_storage_controller.get_state_ids_for_event(
+                            batch.events[0].event_id,
+                            # we only want members!
+                            state_filter=StateFilter.from_types(
+                                (EventTypes.Member, member)
+                                for member in members_to_fetch
+                            ),
+                            await_full_state=False,
+                        )
                     )
             return state_ids
 
@@ -2166,18 +2166,18 @@ class SyncHandler:
 
             if push_rules_changed:
                 global_account_data = dict(global_account_data)
-                global_account_data[AccountDataTypes.PUSH_RULES] = (
-                    await self._push_rules_handler.push_rules_for_user(sync_config.user)
-                )
+                global_account_data[
+                    AccountDataTypes.PUSH_RULES
+                ] = await self._push_rules_handler.push_rules_for_user(sync_config.user)
         else:
             all_global_account_data = await self.store.get_global_account_data_for_user(
                 user_id
             )
 
             global_account_data = dict(all_global_account_data)
-            global_account_data[AccountDataTypes.PUSH_RULES] = (
-                await self._push_rules_handler.push_rules_for_user(sync_config.user)
-            )
+            global_account_data[
+                AccountDataTypes.PUSH_RULES
+            ] = await self._push_rules_handler.push_rules_for_user(sync_config.user)
 
         account_data_for_user = (
             await sync_config.filter_collection.filter_global_account_data(
diff --git a/synapse/handlers/worker_lock.py b/synapse/handlers/worker_lock.py
index 7e578cf46208a14037da1c03e469f49951e1fddd..db998f6701a8ae0b7ed7d0698e114452d2e8a6c1 100644
--- a/synapse/handlers/worker_lock.py
+++ b/synapse/handlers/worker_lock.py
@@ -183,7 +183,7 @@ class WorkerLocksHandler:
             return
 
         def _wake_all_locks(
-            locks: Collection[Union[WaitingLock, WaitingMultiLock]]
+            locks: Collection[Union[WaitingLock, WaitingMultiLock]],
         ) -> None:
             for lock in locks:
                 deferred = lock.deferred
diff --git a/synapse/http/client.py b/synapse/http/client.py
index cb4f72d7714f47fc4ee1cee4eb5abe6feeb1c58e..143fee979641687a077fe534ef32fa7030a510c0 100644
--- a/synapse/http/client.py
+++ b/synapse/http/client.py
@@ -1313,6 +1313,5 @@ def is_unknown_endpoint(
         )
     ) or (
         # Older Synapses returned a 400 error.
-        e.code == 400
-        and synapse_error.errcode == Codes.UNRECOGNIZED
+        e.code == 400 and synapse_error.errcode == Codes.UNRECOGNIZED
     )
diff --git a/synapse/http/server.py b/synapse/http/server.py
index 211795dc396a34d3352e6c8a729b1b682b2d2350..3e2d94d399a2f3a8ef5e47d890c09336460e411d 100644
--- a/synapse/http/server.py
+++ b/synapse/http/server.py
@@ -233,7 +233,7 @@ def return_html_error(
 
 
 def wrap_async_request_handler(
-    h: Callable[["_AsyncResource", "SynapseRequest"], Awaitable[None]]
+    h: Callable[["_AsyncResource", "SynapseRequest"], Awaitable[None]],
 ) -> Callable[["_AsyncResource", "SynapseRequest"], "defer.Deferred[None]"]:
     """Wraps an async request handler so that it calls request.processing.
 
diff --git a/synapse/logging/_terse_json.py b/synapse/logging/_terse_json.py
index 6a6afbfc0b723c1052d53753ffeed2a26fc32c27..d9ff70b2520e050f88da32a33a9153b9e86c5cee 100644
--- a/synapse/logging/_terse_json.py
+++ b/synapse/logging/_terse_json.py
@@ -22,6 +22,7 @@
 """
 Log formatters that output terse JSON.
 """
+
 import json
 import logging
 
diff --git a/synapse/logging/context.py b/synapse/logging/context.py
index 4650b60962d627e9fcb1379a0a6d96d0ad225991..ae2b3d11c0752236bebdeea14da74a7bab5ee4b8 100644
--- a/synapse/logging/context.py
+++ b/synapse/logging/context.py
@@ -20,7 +20,7 @@
 #
 #
 
-""" Thread-local-alike tracking of log contexts within synapse
+"""Thread-local-alike tracking of log contexts within synapse
 
 This module provides objects and utilities for tracking contexts through
 synapse code, so that log lines can include a request identifier, and so that
@@ -29,6 +29,7 @@ them.
 
 See doc/log_contexts.rst for details on how this works.
 """
+
 import logging
 import threading
 import typing
@@ -751,7 +752,7 @@ def preserve_fn(
     f: Union[
         Callable[P, R],
         Callable[P, Awaitable[R]],
-    ]
+    ],
 ) -> Callable[P, "defer.Deferred[R]"]:
     """Function decorator which wraps the function with run_in_background"""
 
diff --git a/synapse/logging/opentracing.py b/synapse/logging/opentracing.py
index e32b3f67810668c5ef1e709f26e8938f853e23f9..d976e58e49ee9578d140c06fc0fd23ff55b089f0 100644
--- a/synapse/logging/opentracing.py
+++ b/synapse/logging/opentracing.py
@@ -169,6 +169,7 @@ Gotchas
   than one caller? Will all of those calling functions have be in a context
   with an active span?
 """
+
 import contextlib
 import enum
 import inspect
@@ -414,7 +415,7 @@ def ensure_active_span(
     """
 
     def ensure_active_span_inner_1(
-        func: Callable[P, R]
+        func: Callable[P, R],
     ) -> Callable[P, Union[Optional[T], R]]:
         @wraps(func)
         def ensure_active_span_inner_2(
@@ -700,7 +701,7 @@ def set_operation_name(operation_name: str) -> None:
 
 @only_if_tracing
 def force_tracing(
-    span: Union["opentracing.Span", _Sentinel] = _Sentinel.sentinel
+    span: Union["opentracing.Span", _Sentinel] = _Sentinel.sentinel,
 ) -> None:
     """Force sampling for the active/given span and its children.
 
@@ -1093,9 +1094,10 @@ def trace_servlet(
 
             # Mypy seems to think that start_context.tag below can be Optional[str], but
             # that doesn't appear to be correct and works in practice.
-            request_tags[
-                SynapseTags.REQUEST_TAG
-            ] = request.request_metrics.start_context.tag  # type: ignore[assignment]
+
+            request_tags[SynapseTags.REQUEST_TAG] = (
+                request.request_metrics.start_context.tag  # type: ignore[assignment]
+            )
 
             # set the tags *after* the servlet completes, in case it decided to
             # prioritise the span (tags will get dropped on unprioritised spans)
diff --git a/synapse/metrics/background_process_metrics.py b/synapse/metrics/background_process_metrics.py
index 19c92b02a08baf013af6f88e942e93149d773bf7..49d0ff9fc196e9e323964b214011229963320b29 100644
--- a/synapse/metrics/background_process_metrics.py
+++ b/synapse/metrics/background_process_metrics.py
@@ -293,7 +293,7 @@ def wrap_as_background_process(
     """
 
     def wrap_as_background_process_inner(
-        func: Callable[P, Awaitable[Optional[R]]]
+        func: Callable[P, Awaitable[Optional[R]]],
     ) -> Callable[P, "defer.Deferred[Optional[R]]"]:
         @wraps(func)
         def wrap_as_background_process_inner_2(
diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py
index 34ab637c3d006d1845fad8fe62bff029889c8c7a..679cbe9afa0fdcf51d9180a67bf3b034c88513ce 100644
--- a/synapse/push/bulk_push_rule_evaluator.py
+++ b/synapse/push/bulk_push_rule_evaluator.py
@@ -304,9 +304,9 @@ class BulkPushRuleEvaluator:
                     if relation_type == "m.thread" and event.content.get(
                         "m.relates_to", {}
                     ).get("is_falling_back", False):
-                        related_events["m.in_reply_to"][
-                            "im.vector.is_falling_back"
-                        ] = ""
+                        related_events["m.in_reply_to"]["im.vector.is_falling_back"] = (
+                            ""
+                        )
 
         return related_events
 
@@ -372,7 +372,8 @@ class BulkPushRuleEvaluator:
                 gather_results(
                     (
                         run_in_background(  # type: ignore[call-arg]
-                            self.store.get_number_joined_users_in_room, event.room_id  # type: ignore[arg-type]
+                            self.store.get_number_joined_users_in_room,
+                            event.room_id,  # type: ignore[arg-type]
                         ),
                         run_in_background(
                             self._get_power_levels_and_sender_level,
diff --git a/synapse/replication/http/federation.py b/synapse/replication/http/federation.py
index 9c537427df53bbbb3d42d7d3741b471fa09691e9..940f418396c5b067e89be0293c7452070d5271c0 100644
--- a/synapse/replication/http/federation.py
+++ b/synapse/replication/http/federation.py
@@ -119,7 +119,9 @@ class ReplicationFederationSendEventsRestServlet(ReplicationEndpoint):
 
         return payload
 
-    async def _handle_request(self, request: Request, content: JsonDict) -> Tuple[int, JsonDict]:  # type: ignore[override]
+    async def _handle_request(  # type: ignore[override]
+        self, request: Request, content: JsonDict
+    ) -> Tuple[int, JsonDict]:
         with Measure(self.clock, "repl_fed_send_events_parse"):
             room_id = content["room_id"]
             backfilled = content["backfilled"]
diff --git a/synapse/replication/http/push.py b/synapse/replication/http/push.py
index de07e75b469a74aabe4d01800d7b0a0df7e4a3e0..2e06c43ce5960b7856cc49c81111956bbe95df69 100644
--- a/synapse/replication/http/push.py
+++ b/synapse/replication/http/push.py
@@ -98,7 +98,9 @@ class ReplicationCopyPusherRestServlet(ReplicationEndpoint):
         self._store = hs.get_datastores().main
 
     @staticmethod
-    async def _serialize_payload(user_id: str, old_room_id: str, new_room_id: str) -> JsonDict:  # type: ignore[override]
+    async def _serialize_payload(  # type: ignore[override]
+        user_id: str, old_room_id: str, new_room_id: str
+    ) -> JsonDict:
         return {}
 
     async def _handle_request(  # type: ignore[override]
@@ -109,7 +111,6 @@ class ReplicationCopyPusherRestServlet(ReplicationEndpoint):
         old_room_id: str,
         new_room_id: str,
     ) -> Tuple[int, JsonDict]:
-
         await self._store.copy_push_rules_from_room_to_room_for_user(
             old_room_id, new_room_id, user_id
         )
diff --git a/synapse/replication/tcp/client.py b/synapse/replication/tcp/client.py
index 3dddbb70b488559c477f200973c34f1b970df5f6..0bd5478cd355b02cd158b79b7a1bb1409399355f 100644
--- a/synapse/replication/tcp/client.py
+++ b/synapse/replication/tcp/client.py
@@ -18,8 +18,8 @@
 # [This file includes modifications made by New Vector Limited]
 #
 #
-"""A replication client for use by synapse workers.
-"""
+"""A replication client for use by synapse workers."""
+
 import logging
 from typing import TYPE_CHECKING, Dict, Iterable, Optional, Set, Tuple
 
diff --git a/synapse/replication/tcp/commands.py b/synapse/replication/tcp/commands.py
index b7a7e7759799e3ace3a9464556fe83451a80b3b6..7d51441e9145d49aea8b083dc3ede9cf018317de 100644
--- a/synapse/replication/tcp/commands.py
+++ b/synapse/replication/tcp/commands.py
@@ -23,6 +23,7 @@
 The VALID_SERVER_COMMANDS and VALID_CLIENT_COMMANDS define which commands are
 allowed to be sent by which side.
 """
+
 import abc
 import logging
 from typing import List, Optional, Tuple, Type, TypeVar
diff --git a/synapse/replication/tcp/handler.py b/synapse/replication/tcp/handler.py
index 72a42cb6cc4cb4a72605d8ff39a51e8aa674dd9d..61012269380fb7fff7ab85e126e343c5b8acc6e8 100644
--- a/synapse/replication/tcp/handler.py
+++ b/synapse/replication/tcp/handler.py
@@ -857,7 +857,7 @@ UpdateRow = TypeVar("UpdateRow")
 
 
 def _batch_updates(
-    updates: Iterable[Tuple[UpdateToken, UpdateRow]]
+    updates: Iterable[Tuple[UpdateToken, UpdateRow]],
 ) -> Iterator[Tuple[UpdateToken, List[UpdateRow]]]:
     """Collect stream updates with the same token together
 
diff --git a/synapse/replication/tcp/protocol.py b/synapse/replication/tcp/protocol.py
index 4471cc8f0c0a581aa822e2d2fac1d1374d27b42a..fb9c539122badc8ea01a49af1b2e89b7ce4b867b 100644
--- a/synapse/replication/tcp/protocol.py
+++ b/synapse/replication/tcp/protocol.py
@@ -23,6 +23,7 @@ protocols.
 
 An explanation of this protocol is available in docs/tcp_replication.md
 """
+
 import fcntl
 import logging
 import struct
diff --git a/synapse/replication/tcp/resource.py b/synapse/replication/tcp/resource.py
index c0329378ac052c1311f496bb441479b1e9e75494..d647a2b33268b798b7975d40d2d683427627a264 100644
--- a/synapse/replication/tcp/resource.py
+++ b/synapse/replication/tcp/resource.py
@@ -18,8 +18,7 @@
 # [This file includes modifications made by New Vector Limited]
 #
 #
-"""The server side of the replication stream.
-"""
+"""The server side of the replication stream."""
 
 import logging
 import random
@@ -307,7 +306,7 @@ class ReplicationStreamer:
 
 
 def _batch_updates(
-    updates: List[Tuple[Token, StreamRow]]
+    updates: List[Tuple[Token, StreamRow]],
 ) -> List[Tuple[Optional[Token], StreamRow]]:
     """Takes a list of updates of form [(token, row)] and sets the token to
     None for all rows where the next row has the same token. This is used to
diff --git a/synapse/replication/tcp/streams/_base.py b/synapse/replication/tcp/streams/_base.py
index d021904de72fa2405b0e99cf1e2979ab59de9e69..ebf5964d29c150c099553f66be7d955f7d259475 100644
--- a/synapse/replication/tcp/streams/_base.py
+++ b/synapse/replication/tcp/streams/_base.py
@@ -247,7 +247,7 @@ class _StreamFromIdGen(Stream):
 
 
 def current_token_without_instance(
-    current_token: Callable[[], int]
+    current_token: Callable[[], int],
 ) -> Callable[[str], int]:
     """Takes a current token callback function for a single writer stream
     that doesn't take an instance name parameter and wraps it in a function that
diff --git a/synapse/rest/admin/registration_tokens.py b/synapse/rest/admin/registration_tokens.py
index 0867f7a51c1c86cb511b32a1189f76451778a542..bec233159006bd52c9fb919598f8a2f20d7a1b61 100644
--- a/synapse/rest/admin/registration_tokens.py
+++ b/synapse/rest/admin/registration_tokens.py
@@ -181,8 +181,7 @@ class NewRegistrationTokenRestServlet(RestServlet):
 
         uses_allowed = body.get("uses_allowed", None)
         if not (
-            uses_allowed is None
-            or (type(uses_allowed) is int and uses_allowed >= 0)  # noqa: E721
+            uses_allowed is None or (type(uses_allowed) is int and uses_allowed >= 0)  # noqa: E721
         ):
             raise SynapseError(
                 HTTPStatus.BAD_REQUEST,
diff --git a/synapse/rest/client/_base.py b/synapse/rest/client/_base.py
index 93dec6375a6dc09210314a0faf321f734e304308..6cf37869d89062b2de6bced76ddadbc6dae10b78 100644
--- a/synapse/rest/client/_base.py
+++ b/synapse/rest/client/_base.py
@@ -19,8 +19,8 @@
 #
 #
 
-"""This module contains base REST classes for constructing client v1 servlets.
-"""
+"""This module contains base REST classes for constructing client v1 servlets."""
+
 import logging
 import re
 from typing import Any, Awaitable, Callable, Iterable, Pattern, Tuple, TypeVar, cast
diff --git a/synapse/rest/client/account_data.py b/synapse/rest/client/account_data.py
index 0ee24081fac62fb74f8dde88cdb0df776c1a4a07..734c9e992f55f9916d2954f56b8c28c6028594ca 100644
--- a/synapse/rest/client/account_data.py
+++ b/synapse/rest/client/account_data.py
@@ -108,9 +108,9 @@ class AccountDataServlet(RestServlet):
 
         # Push rules are stored in a separate table and must be queried separately.
         if account_data_type == AccountDataTypes.PUSH_RULES:
-            account_data: Optional[JsonMapping] = (
-                await self._push_rules_handler.push_rules_for_user(requester.user)
-            )
+            account_data: Optional[
+                JsonMapping
+            ] = await self._push_rules_handler.push_rules_for_user(requester.user)
         else:
             account_data = await self.store.get_global_account_data_by_type_for_user(
                 user_id, account_data_type
diff --git a/synapse/rest/client/account_validity.py b/synapse/rest/client/account_validity.py
index 6222a5cc37853d6121575ab4baf1cb765e53ed32..ec7836b647fa626e3aced7961ffa64d5bbf3d300 100644
--- a/synapse/rest/client/account_validity.py
+++ b/synapse/rest/client/account_validity.py
@@ -48,9 +48,7 @@ class AccountValidityRenewServlet(RestServlet):
         self.account_renewed_template = (
             hs.config.account_validity.account_validity_account_renewed_template
         )
-        self.account_previously_renewed_template = (
-            hs.config.account_validity.account_validity_account_previously_renewed_template
-        )
+        self.account_previously_renewed_template = hs.config.account_validity.account_validity_account_previously_renewed_template
         self.invalid_token_template = (
             hs.config.account_validity.account_validity_invalid_token_template
         )
diff --git a/synapse/rest/client/events.py b/synapse/rest/client/events.py
index 613890061eef7d491df6385c10900538dab13a87..ad23cc76ce0f90fd13fe3123bb87808c134d83ac 100644
--- a/synapse/rest/client/events.py
+++ b/synapse/rest/client/events.py
@@ -20,6 +20,7 @@
 #
 
 """This module contains REST servlets to do with event streaming, /events."""
+
 import logging
 from typing import TYPE_CHECKING, Dict, List, Tuple, Union
 
diff --git a/synapse/rest/client/presence.py b/synapse/rest/client/presence.py
index 572e92642c3c1bbe9248fa7d167e730976df7b2f..ecc52956e481bdafda257d5085b990837882c0c2 100644
--- a/synapse/rest/client/presence.py
+++ b/synapse/rest/client/presence.py
@@ -19,8 +19,8 @@
 #
 #
 
-""" This module contains REST servlets to do with presence: /presence/<paths>
-"""
+"""This module contains REST servlets to do with presence: /presence/<paths>"""
+
 import logging
 from typing import TYPE_CHECKING, Tuple
 
diff --git a/synapse/rest/client/profile.py b/synapse/rest/client/profile.py
index c1a80c5c3d51ff52358cb9846600ba0777ea04d5..7a95b9445d1ab134a1379b94fd32d45b248cee07 100644
--- a/synapse/rest/client/profile.py
+++ b/synapse/rest/client/profile.py
@@ -19,7 +19,7 @@
 #
 #
 
-""" This module contains REST servlets to do with profile: /profile/<paths> """
+"""This module contains REST servlets to do with profile: /profile/<paths>"""
 
 from http import HTTPStatus
 from typing import TYPE_CHECKING, Tuple
diff --git a/synapse/rest/client/register.py b/synapse/rest/client/register.py
index 5dddbc69be73e9b32ff7965c4969d9582b1c46f3..61e1436841855494e4fe23e1e32c3b387eca7a79 100644
--- a/synapse/rest/client/register.py
+++ b/synapse/rest/client/register.py
@@ -640,12 +640,10 @@ class RegisterRestServlet(RestServlet):
             if not password_hash:
                 raise SynapseError(400, "Missing params: password", Codes.MISSING_PARAM)
 
-            desired_username = (
-                await (
-                    self.password_auth_provider.get_username_for_registration(
-                        auth_result,
-                        params,
-                    )
+            desired_username = await (
+                self.password_auth_provider.get_username_for_registration(
+                    auth_result,
+                    params,
                 )
             )
 
@@ -696,11 +694,9 @@ class RegisterRestServlet(RestServlet):
                 session_id
             )
 
-            display_name = (
-                await (
-                    self.password_auth_provider.get_displayname_for_registration(
-                        auth_result, params
-                    )
+            display_name = await (
+                self.password_auth_provider.get_displayname_for_registration(
+                    auth_result, params
                 )
             )
 
diff --git a/synapse/rest/client/room.py b/synapse/rest/client/room.py
index 7d57904d69a96dd27e773ac2a37048e91599a78d..83f84e499890b5c8c0c3456a4ddc665cce3e01d5 100644
--- a/synapse/rest/client/room.py
+++ b/synapse/rest/client/room.py
@@ -19,7 +19,8 @@
 #
 #
 
-""" This module contains REST servlets to do with rooms: /rooms/<paths> """
+"""This module contains REST servlets to do with rooms: /rooms/<paths>"""
+
 import logging
 import re
 from enum import Enum
diff --git a/synapse/rest/client/sync.py b/synapse/rest/client/sync.py
index 22c85e497ac20585c138592f1c86fb2c420c8471..cc9fbfe5464ef0c6d48471bea3d61780465a50f8 100644
--- a/synapse/rest/client/sync.py
+++ b/synapse/rest/client/sync.py
@@ -1045,9 +1045,9 @@ class SlidingSyncRestServlet(RestServlet):
                 serialized_rooms[room_id]["initial"] = room_result.initial
 
             if room_result.unstable_expanded_timeline:
-                serialized_rooms[room_id][
-                    "unstable_expanded_timeline"
-                ] = room_result.unstable_expanded_timeline
+                serialized_rooms[room_id]["unstable_expanded_timeline"] = (
+                    room_result.unstable_expanded_timeline
+                )
 
             # This will be omitted for invite/knock rooms with `stripped_state`
             if (
@@ -1082,9 +1082,9 @@ class SlidingSyncRestServlet(RestServlet):
 
             # This will be omitted for invite/knock rooms with `stripped_state`
             if room_result.prev_batch is not None:
-                serialized_rooms[room_id]["prev_batch"] = (
-                    await room_result.prev_batch.to_string(self.store)
-                )
+                serialized_rooms[room_id][
+                    "prev_batch"
+                ] = await room_result.prev_batch.to_string(self.store)
 
             # This will be omitted for invite/knock rooms with `stripped_state`
             if room_result.num_live is not None:
diff --git a/synapse/rest/client/transactions.py b/synapse/rest/client/transactions.py
index 30c1f17fc6d51d6488db1c864b3a9e03264827bb..f791904168b1d36ede9b1f91b07fcc95655a8f3a 100644
--- a/synapse/rest/client/transactions.py
+++ b/synapse/rest/client/transactions.py
@@ -21,6 +21,7 @@
 
 """This module contains logic for storing HTTP PUT transactions. This is used
 to ensure idempotency when performing PUTs using the REST API."""
+
 import logging
 from typing import TYPE_CHECKING, Awaitable, Callable, Dict, Hashable, Tuple
 
diff --git a/synapse/rest/key/v2/remote_key_resource.py b/synapse/rest/key/v2/remote_key_resource.py
index 1975ebb47753a2cd1fb75ad8711fc445af79de72..3c2028a2ada022f589372b18ac2a3b4e580292d3 100644
--- a/synapse/rest/key/v2/remote_key_resource.py
+++ b/synapse/rest/key/v2/remote_key_resource.py
@@ -191,10 +191,10 @@ class RemoteKey(RestServlet):
         server_keys: Dict[Tuple[str, str], Optional[FetchKeyResultForRemote]] = {}
         for server_name, key_ids in query.items():
             if key_ids:
-                results: Mapping[str, Optional[FetchKeyResultForRemote]] = (
-                    await self.store.get_server_keys_json_for_remote(
-                        server_name, key_ids
-                    )
+                results: Mapping[
+                    str, Optional[FetchKeyResultForRemote]
+                ] = await self.store.get_server_keys_json_for_remote(
+                    server_name, key_ids
                 )
             else:
                 results = await self.store.get_all_server_keys_json_for_remote(
diff --git a/synapse/rest/well_known.py b/synapse/rest/well_known.py
index 989e570671b57d4232b402f862262d013752dc43..d336d60c93b91ccbee6edbf636f4ff020096dc70 100644
--- a/synapse/rest/well_known.py
+++ b/synapse/rest/well_known.py
@@ -65,9 +65,9 @@ class WellKnownBuilder:
             }
             account_management_url = await auth.account_management_url()
             if account_management_url is not None:
-                result["org.matrix.msc2965.authentication"][
-                    "account"
-                ] = account_management_url
+                result["org.matrix.msc2965.authentication"]["account"] = (
+                    account_management_url
+                )
 
         if self._config.server.extra_well_known_client_content:
             for (
diff --git a/synapse/server_notices/resource_limits_server_notices.py b/synapse/server_notices/resource_limits_server_notices.py
index f6ea90bd4f25380b340bd0d6cff9394b610b7d86..e88e8c9b453820956efee6b9fe45ff794f7b291d 100644
--- a/synapse/server_notices/resource_limits_server_notices.py
+++ b/synapse/server_notices/resource_limits_server_notices.py
@@ -119,7 +119,9 @@ class ResourceLimitsServerNotices:
             elif not currently_blocked and limit_msg:
                 # Room is not notifying of a block, when it ought to be.
                 await self._apply_limit_block_notification(
-                    user_id, limit_msg, limit_type  # type: ignore
+                    user_id,
+                    limit_msg,
+                    limit_type,  # type: ignore
                 )
         except SynapseError as e:
             logger.error("Error sending resource limits server notice: %s", e)
diff --git a/synapse/storage/controllers/persist_events.py b/synapse/storage/controllers/persist_events.py
index ac0919340b125c6debd5329b3dccf431e7b47207..879ee9039e19f3a53fc09afef9af2b66daaad122 100644
--- a/synapse/storage/controllers/persist_events.py
+++ b/synapse/storage/controllers/persist_events.py
@@ -416,7 +416,7 @@ class EventsPersistenceStorageController:
         set_tag(SynapseTags.FUNC_ARG_PREFIX + "backfilled", str(backfilled))
 
         async def enqueue(
-            item: Tuple[str, List[Tuple[EventBase, EventContext]]]
+            item: Tuple[str, List[Tuple[EventBase, EventContext]]],
         ) -> Dict[str, str]:
             room_id, evs_ctxs = item
             return await self._event_persist_queue.add_to_queue(
@@ -792,9 +792,9 @@ class EventsPersistenceStorageController:
         )
 
         # Remove any events which are prev_events of any existing events.
-        existing_prevs: Collection[str] = (
-            await self.persist_events_store._get_events_which_are_prevs(result)
-        )
+        existing_prevs: Collection[
+            str
+        ] = await self.persist_events_store._get_events_which_are_prevs(result)
         result.difference_update(existing_prevs)
 
         # Finally handle the case where the new events have soft-failed prev
diff --git a/synapse/storage/databases/main/client_ips.py b/synapse/storage/databases/main/client_ips.py
index 4b662476402d23bb01fe7d1638e9395803fcc062..bf6cfcbfd9d75ea488bf03300c31dcd1d2c83cbd 100644
--- a/synapse/storage/databases/main/client_ips.py
+++ b/synapse/storage/databases/main/client_ips.py
@@ -238,9 +238,7 @@ class ClientIpBackgroundUpdateStore(SQLBaseStore):
                 INNER JOIN user_ips USING (user_id, access_token, ip)
                 GROUP BY user_id, access_token, ip
                 HAVING count(*) > 1
-                """.format(
-                    clause
-                ),
+                """.format(clause),
                 args,
             )
             res = cast(
@@ -373,9 +371,7 @@ class ClientIpBackgroundUpdateStore(SQLBaseStore):
                     LIMIT ?
                 ) c
                 INNER JOIN user_ips AS u USING (user_id, device_id, last_seen)
-            """ % {
-                "where_clause": where_clause
-            }
+            """ % {"where_clause": where_clause}
             txn.execute(sql, where_args + [batch_size])
 
             rows = cast(List[Tuple[int, str, str, str, str]], txn.fetchall())
diff --git a/synapse/storage/databases/main/deviceinbox.py b/synapse/storage/databases/main/deviceinbox.py
index 042d595ea0214cf74808b22d9625277d54b05673..0612b82b9b766438601a2ea5deb0cc205fdccc2c 100644
--- a/synapse/storage/databases/main/deviceinbox.py
+++ b/synapse/storage/databases/main/deviceinbox.py
@@ -1116,7 +1116,7 @@ class DeviceInboxBackgroundUpdateStore(SQLBaseStore):
 
             txn.execute(sql, (start, stop))
 
-            destinations = {d for d, in txn}
+            destinations = {d for (d,) in txn}
             to_remove = set()
             for d in destinations:
                 try:
diff --git a/synapse/storage/databases/main/devices.py b/synapse/storage/databases/main/devices.py
index 53024bddc3e66aaec36be50ca260cece667d9a1f..a83df4075a39c609e41fe5421a45f549d15358b9 100644
--- a/synapse/storage/databases/main/devices.py
+++ b/synapse/storage/databases/main/devices.py
@@ -670,9 +670,7 @@ class DeviceWorkerStore(RoomMemberWorkerStore, EndToEndKeyWorkerStore):
                         result["keys"] = keys
 
                     device_display_name = None
-                    if (
-                        self.hs.config.federation.allow_device_name_lookup_over_federation
-                    ):
+                    if self.hs.config.federation.allow_device_name_lookup_over_federation:
                         device_display_name = device.display_name
                     if device_display_name:
                         result["device_display_name"] = device_display_name
@@ -917,7 +915,7 @@ class DeviceWorkerStore(RoomMemberWorkerStore, EndToEndKeyWorkerStore):
             from_key,
             to_key,
         )
-        return {u for u, in rows}
+        return {u for (u,) in rows}
 
     @cancellable
     async def get_users_whose_devices_changed(
@@ -968,7 +966,7 @@ class DeviceWorkerStore(RoomMemberWorkerStore, EndToEndKeyWorkerStore):
                     txn.database_engine, "user_id", chunk
                 )
                 txn.execute(sql % (clause,), [from_key, to_key] + args)
-                changes.update(user_id for user_id, in txn)
+                changes.update(user_id for (user_id,) in txn)
 
             return changes
 
@@ -1520,7 +1518,7 @@ class DeviceWorkerStore(RoomMemberWorkerStore, EndToEndKeyWorkerStore):
             args: List[Any],
         ) -> Set[str]:
             txn.execute(sql.format(clause=clause), args)
-            return {user_id for user_id, in txn}
+            return {user_id for (user_id,) in txn}
 
         changes = set()
         for chunk in batch_iter(changed_room_ids, 1000):
@@ -1560,7 +1558,7 @@ class DeviceWorkerStore(RoomMemberWorkerStore, EndToEndKeyWorkerStore):
             txn: LoggingTransaction,
         ) -> Set[str]:
             txn.execute(sql, (from_id, to_id))
-            return {room_id for room_id, in txn}
+            return {room_id for (room_id,) in txn}
 
         return await self.db_pool.runInteraction(
             "get_all_device_list_changes",
diff --git a/synapse/storage/databases/main/e2e_room_keys.py b/synapse/storage/databases/main/e2e_room_keys.py
index 4d6a921ab25589f3a96ccb8f99863a841eee11dc..c2c93e12d90e2d3219ba580300ba9bb8748a82ee 100644
--- a/synapse/storage/databases/main/e2e_room_keys.py
+++ b/synapse/storage/databases/main/e2e_room_keys.py
@@ -387,9 +387,7 @@ class EndToEndRoomKeyStore(EndToEndRoomKeyBackgroundStore):
                is_verified, session_data
         FROM e2e_room_keys
         WHERE user_id = ? AND version = ? AND (%s)
-        """ % (
-            " OR ".join(where_clauses)
-        )
+        """ % (" OR ".join(where_clauses))
 
         txn.execute(sql, params)
 
diff --git a/synapse/storage/databases/main/end_to_end_keys.py b/synapse/storage/databases/main/end_to_end_keys.py
index 9e6c9561ae80bb5f10561374e084f0521466a4d5..575aaf498baf96488af1dc38b565a2ea04bb1e0c 100644
--- a/synapse/storage/databases/main/end_to_end_keys.py
+++ b/synapse/storage/databases/main/end_to_end_keys.py
@@ -472,9 +472,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
         signature_sql = """
             SELECT user_id, key_id, target_device_id, signature
             FROM e2e_cross_signing_signatures WHERE %s
-            """ % (
-            " OR ".join("(" + q + ")" for q in signature_query_clauses)
-        )
+            """ % (" OR ".join("(" + q + ")" for q in signature_query_clauses))
 
         txn.execute(signature_sql, signature_query_params)
         return cast(
@@ -917,9 +915,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
                         FROM e2e_cross_signing_keys
                         WHERE %(clause)s
                         ORDER BY user_id, keytype, stream_id DESC
-                """ % {
-                    "clause": clause
-                }
+                """ % {"clause": clause}
             else:
                 # SQLite has special handling for bare columns when using
                 # MIN/MAX with a `GROUP BY` clause where it picks the value from
@@ -929,9 +925,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
                         FROM e2e_cross_signing_keys
                         WHERE %(clause)s
                         GROUP BY user_id, keytype
-                """ % {
-                    "clause": clause
-                }
+                """ % {"clause": clause}
 
             txn.execute(sql, params)
 
diff --git a/synapse/storage/databases/main/event_federation.py b/synapse/storage/databases/main/event_federation.py
index 715846865b0fcc74f6bbb6a1d2ea491ab0c781cf..46aa5902d8da1cf83c5d964d4f348020c5292cf9 100644
--- a/synapse/storage/databases/main/event_federation.py
+++ b/synapse/storage/databases/main/event_federation.py
@@ -326,7 +326,7 @@ class EventFederationWorkerStore(SignatureWorkerStore, EventsWorkerStore, SQLBas
             """
 
             rows = txn.execute_values(sql, chains.items())
-            results.update(r for r, in rows)
+            results.update(r for (r,) in rows)
         else:
             # For SQLite we just fall back to doing a noddy for loop.
             sql = """
@@ -335,7 +335,7 @@ class EventFederationWorkerStore(SignatureWorkerStore, EventsWorkerStore, SQLBas
             """
             for chain_id, max_no in chains.items():
                 txn.execute(sql, (chain_id, max_no))
-                results.update(r for r, in txn)
+                results.update(r for (r,) in txn)
 
         return results
 
@@ -645,7 +645,7 @@ class EventFederationWorkerStore(SignatureWorkerStore, EventsWorkerStore, SQLBas
             ]
 
             rows = txn.execute_values(sql, args)
-            result.update(r for r, in rows)
+            result.update(r for (r,) in rows)
         else:
             # For SQLite we just fall back to doing a noddy for loop.
             sql = """
@@ -654,7 +654,7 @@ class EventFederationWorkerStore(SignatureWorkerStore, EventsWorkerStore, SQLBas
             """
             for chain_id, (min_no, max_no) in chain_to_gap.items():
                 txn.execute(sql, (chain_id, min_no, max_no))
-                result.update(r for r, in txn)
+                result.update(r for (r,) in txn)
 
         return result
 
@@ -1220,13 +1220,11 @@ class EventFederationWorkerStore(SignatureWorkerStore, EventsWorkerStore, SQLBas
                 HAVING count(*) > ?
                 ORDER BY count(*) DESC
                 LIMIT ?
-            """ % (
-                where_clause,
-            )
+            """ % (where_clause,)
 
             query_args = list(itertools.chain(room_id_filter, [min_count, limit]))
             txn.execute(sql, query_args)
-            return [room_id for room_id, in txn]
+            return [room_id for (room_id,) in txn]
 
         return await self.db_pool.runInteraction(
             "get_rooms_with_many_extremities", _get_rooms_with_many_extremities_txn
@@ -1358,7 +1356,7 @@ class EventFederationWorkerStore(SignatureWorkerStore, EventsWorkerStore, SQLBas
 
         def get_forward_extremeties_for_room_txn(txn: LoggingTransaction) -> List[str]:
             txn.execute(sql, (stream_ordering, room_id))
-            return [event_id for event_id, in txn]
+            return [event_id for (event_id,) in txn]
 
         event_ids = await self.db_pool.runInteraction(
             "get_forward_extremeties_for_room", get_forward_extremeties_for_room_txn
diff --git a/synapse/storage/databases/main/event_push_actions.py b/synapse/storage/databases/main/event_push_actions.py
index 0ebf5b53d5e3d686d0ab52b2246161fac1d269e5..f42023418e2742b0bfac536d1d87f5131fe58499 100644
--- a/synapse/storage/databases/main/event_push_actions.py
+++ b/synapse/storage/databases/main/event_push_actions.py
@@ -1860,9 +1860,7 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
                     AND epa.notif = 1
                 ORDER BY epa.stream_ordering DESC
                 LIMIT ?
-            """ % (
-                before_clause,
-            )
+            """ % (before_clause,)
             txn.execute(sql, args)
             return cast(
                 List[Tuple[str, str, int, int, str, bool, str, int]], txn.fetchall()
diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py
index e44b8d8e542531ec85718235dd85300486b65ea1..d423d80efa7bba5da6c001aebd3b4c276cb8f7f2 100644
--- a/synapse/storage/databases/main/events.py
+++ b/synapse/storage/databases/main/events.py
@@ -429,9 +429,7 @@ class PersistEventsStore:
             if event_type == EventTypes.Member and self.is_mine_id(state_key)
         ]
 
-        membership_snapshot_shared_insert_values: (
-            SlidingSyncMembershipSnapshotSharedInsertValues
-        ) = {}
+        membership_snapshot_shared_insert_values: SlidingSyncMembershipSnapshotSharedInsertValues = {}
         membership_infos_to_insert_membership_snapshots: List[
             SlidingSyncMembershipInfo
         ] = []
@@ -719,7 +717,7 @@ class PersistEventsStore:
             keyvalues={},
             retcols=("event_id",),
         )
-        already_persisted_events = {event_id for event_id, in rows}
+        already_persisted_events = {event_id for (event_id,) in rows}
         state_events = [
             event
             for event in state_events
@@ -1830,12 +1828,8 @@ class PersistEventsStore:
         if sliding_sync_table_changes.to_insert_membership_snapshots:
             # Update the `sliding_sync_membership_snapshots` table
             #
-            sliding_sync_snapshot_keys = (
-                sliding_sync_table_changes.membership_snapshot_shared_insert_values.keys()
-            )
-            sliding_sync_snapshot_values = (
-                sliding_sync_table_changes.membership_snapshot_shared_insert_values.values()
-            )
+            sliding_sync_snapshot_keys = sliding_sync_table_changes.membership_snapshot_shared_insert_values.keys()
+            sliding_sync_snapshot_values = sliding_sync_table_changes.membership_snapshot_shared_insert_values.values()
             # We need to insert/update regardless of whether we have
             # `sliding_sync_snapshot_keys` because there are other fields in the `ON
             # CONFLICT` upsert to run (see inherit case (explained in
@@ -3361,7 +3355,7 @@ class PersistEventsStore:
         )
 
         potential_backwards_extremities.difference_update(
-            e for e, in existing_events_outliers
+            e for (e,) in existing_events_outliers
         )
 
         if potential_backwards_extremities:
diff --git a/synapse/storage/databases/main/events_bg_updates.py b/synapse/storage/databases/main/events_bg_updates.py
index b227e057730d99aad39be4b35b19761fc085b1a4..4209100a5ca0a7b1baaf9045a0a999f331e1b703 100644
--- a/synapse/storage/databases/main/events_bg_updates.py
+++ b/synapse/storage/databases/main/events_bg_updates.py
@@ -647,7 +647,8 @@ class EventsBackgroundUpdatesStore(StreamWorkerStore, StateDeltasStore, SQLBaseS
                 room_ids = {row[0] for row in rows}
                 for room_id in room_ids:
                     txn.call_after(
-                        self.get_latest_event_ids_in_room.invalidate, (room_id,)  # type: ignore[attr-defined]
+                        self.get_latest_event_ids_in_room.invalidate,  # type: ignore[attr-defined]
+                        (room_id,),
                     )
 
             self.db_pool.simple_delete_many_txn(
@@ -2065,9 +2066,7 @@ class EventsBackgroundUpdatesStore(StreamWorkerStore, StateDeltasStore, SQLBaseS
                 )
 
             # Map of values to insert/update in the `sliding_sync_membership_snapshots` table
-            sliding_sync_membership_snapshots_insert_map: (
-                SlidingSyncMembershipSnapshotSharedInsertValues
-            ) = {}
+            sliding_sync_membership_snapshots_insert_map: SlidingSyncMembershipSnapshotSharedInsertValues = {}
             if membership == Membership.JOIN:
                 # If we're still joined, we can pull from current state.
                 current_state_ids_map: StateMap[
@@ -2149,14 +2148,15 @@ class EventsBackgroundUpdatesStore(StreamWorkerStore, StateDeltasStore, SQLBaseS
                 # membership (i.e. the room shouldn't disappear if your using the
                 # `is_encrypted` filter and you leave).
                 if membership in (Membership.LEAVE, Membership.BAN) and is_outlier:
-                    invite_or_knock_event_id, invite_or_knock_membership = (
-                        await self.db_pool.runInteraction(
-                            "sliding_sync_membership_snapshots_bg_update._find_previous_membership",
-                            _find_previous_membership_txn,
-                            room_id,
-                            user_id,
-                            membership_event_id,
-                        )
+                    (
+                        invite_or_knock_event_id,
+                        invite_or_knock_membership,
+                    ) = await self.db_pool.runInteraction(
+                        "sliding_sync_membership_snapshots_bg_update._find_previous_membership",
+                        _find_previous_membership_txn,
+                        room_id,
+                        user_id,
+                        membership_event_id,
                     )
 
                 # Pull from the stripped state on the invite/knock event
@@ -2484,9 +2484,7 @@ def _resolve_stale_data_in_sliding_sync_joined_rooms_table(
                 "progress_json": "{}",
             },
         )
-        depends_on = (
-            _BackgroundUpdates.SLIDING_SYNC_PREFILL_JOINED_ROOMS_TO_RECALCULATE_TABLE_BG_UPDATE
-        )
+        depends_on = _BackgroundUpdates.SLIDING_SYNC_PREFILL_JOINED_ROOMS_TO_RECALCULATE_TABLE_BG_UPDATE
 
     # Now kick-off the background update to catch-up with what we missed while Synapse
     # was downgraded.
diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py
index 1d833908279da64cdca09323318f92ad6859fd25..b188f32927d8640f0cd1a02518c6f0f1a2973148 100644
--- a/synapse/storage/databases/main/events_worker.py
+++ b/synapse/storage/databases/main/events_worker.py
@@ -1665,7 +1665,7 @@ class EventsWorkerStore(SQLBaseStore):
                 txn.database_engine, "e.event_id", event_ids
             )
             txn.execute(sql + clause, args)
-            found_events = {eid for eid, in txn}
+            found_events = {eid for (eid,) in txn}
 
             # ... and then we can update the results for each key
             return {eid: (eid in found_events) for eid in event_ids}
@@ -1864,9 +1864,9 @@ class EventsWorkerStore(SQLBaseStore):
                 " LIMIT ?"
             )
             txn.execute(sql, (-last_id, -current_id, instance_name, limit))
-            new_event_updates: List[Tuple[int, Tuple[str, str, str, str, str, str]]] = (
-                []
-            )
+            new_event_updates: List[
+                Tuple[int, Tuple[str, str, str, str, str, str]]
+            ] = []
             row: Tuple[int, str, str, str, str, str, str]
             # Type safety: iterating over `txn` yields `Tuple`, i.e.
             # `Tuple[Any, ...]` of arbitrary length. Mypy detects assigning a
diff --git a/synapse/storage/databases/main/purge_events.py b/synapse/storage/databases/main/purge_events.py
index fc4c2865951e7540adeff7919a8093041e722e75..08244153a39114daf0a2b30360bb868c73a43218 100644
--- a/synapse/storage/databases/main/purge_events.py
+++ b/synapse/storage/databases/main/purge_events.py
@@ -201,7 +201,7 @@ class PurgeEventsStore(StateGroupWorkerStore, CacheInvalidationWorkerStore):
         txn.execute_batch(
             "INSERT INTO event_backward_extremities (room_id, event_id)"
             " VALUES (?, ?)",
-            [(room_id, event_id) for event_id, in new_backwards_extrems],
+            [(room_id, event_id) for (event_id,) in new_backwards_extrems],
         )
 
         logger.info("[purge] finding state groups referenced by deleted events")
@@ -215,7 +215,7 @@ class PurgeEventsStore(StateGroupWorkerStore, CacheInvalidationWorkerStore):
         """
         )
 
-        referenced_state_groups = {sg for sg, in txn}
+        referenced_state_groups = {sg for (sg,) in txn}
         logger.info(
             "[purge] found %i referenced state groups", len(referenced_state_groups)
         )
diff --git a/synapse/storage/databases/main/receipts.py b/synapse/storage/databases/main/receipts.py
index bf1074357413dcca20859f44fd6222d3c6c3c783..996433151075810862a536af28e94aad8269b730 100644
--- a/synapse/storage/databases/main/receipts.py
+++ b/synapse/storage/databases/main/receipts.py
@@ -762,7 +762,7 @@ class ReceiptsWorkerStore(SQLBaseStore):
 
             txn.execute(sql, args)
 
-            return [room_id for room_id, in txn]
+            return [room_id for (room_id,) in txn]
 
         results: List[str] = []
         for batch in batch_iter(room_ids, 1000):
@@ -1030,9 +1030,7 @@ class ReceiptsWorkerStore(SQLBaseStore):
             SELECT event_id WHERE room_id = ? AND stream_ordering IN (
                 SELECT max(stream_ordering) WHERE %s
             )
-        """ % (
-            clause,
-        )
+        """ % (clause,)
 
         txn.execute(sql, [room_id] + list(args))
         rows = txn.fetchall()
diff --git a/synapse/storage/databases/main/registration.py b/synapse/storage/databases/main/registration.py
index df7f8a43b70f817c18e4d87950ed42c509c9b5a0..d7cbe3341182121520a44481ddf9829838f7ffb2 100644
--- a/synapse/storage/databases/main/registration.py
+++ b/synapse/storage/databases/main/registration.py
@@ -1250,9 +1250,7 @@ class RegistrationWorkerStore(CacheInvalidationWorkerStore):
                 SELECT address, session_id, medium, client_secret,
                 last_send_attempt, validated_at
                 FROM threepid_validation_session WHERE %s
-                """ % (
-                " AND ".join("%s = ?" % k for k in keyvalues.keys()),
-            )
+                """ % (" AND ".join("%s = ?" % k for k in keyvalues.keys()),)
 
             if validated is not None:
                 sql += " AND validated_at IS " + ("NOT NULL" if validated else "NULL")
diff --git a/synapse/storage/databases/main/room.py b/synapse/storage/databases/main/room.py
index 80a4bf95f2fcfa2c855842c2ce6432f70ac822b9..68b080604188808521a18a053d02a7074968daf4 100644
--- a/synapse/storage/databases/main/room.py
+++ b/synapse/storage/databases/main/room.py
@@ -1608,9 +1608,7 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
                 FROM event_reports AS er
                 JOIN room_stats_state ON room_stats_state.room_id = er.room_id
                 {}
-                """.format(
-                where_clause
-            )
+                """.format(where_clause)
             txn.execute(sql, args)
             count = cast(Tuple[int], txn.fetchone())[0]
 
diff --git a/synapse/storage/databases/main/roommember.py b/synapse/storage/databases/main/roommember.py
index 57b9b95c281803b8a1e7c1c94d53b460d2c0f785..3d834b4bf1d33a1c267739f346e0e863d3c8f8ea 100644
--- a/synapse/storage/databases/main/roommember.py
+++ b/synapse/storage/databases/main/roommember.py
@@ -232,9 +232,7 @@ class RoomMemberWorkerStore(EventsWorkerStore, CacheInvalidationWorkerStore):
                 AND m.room_id = c.room_id
                 AND m.user_id = c.state_key
                 WHERE c.type = 'm.room.member' AND c.room_id = ? AND m.membership = ? AND %s
-            """ % (
-                clause,
-            )
+            """ % (clause,)
             txn.execute(sql, (room_id, Membership.JOIN, *ids))
 
             return {r[0]: ProfileInfo(display_name=r[1], avatar_url=r[2]) for r in txn}
@@ -531,9 +529,7 @@ class RoomMemberWorkerStore(EventsWorkerStore, CacheInvalidationWorkerStore):
             WHERE
                 user_id = ?
                 AND %s
-        """ % (
-            clause,
-        )
+        """ % (clause,)
 
         txn.execute(sql, (user_id, *args))
         results = [
@@ -813,7 +809,7 @@ class RoomMemberWorkerStore(EventsWorkerStore, CacheInvalidationWorkerStore):
             """
 
             txn.execute(sql, (user_id, *args))
-            return {u: True for u, in txn}
+            return {u: True for (u,) in txn}
 
         to_return = {}
         for batch_user_ids in batch_iter(other_user_ids, 1000):
@@ -1031,7 +1027,7 @@ class RoomMemberWorkerStore(EventsWorkerStore, CacheInvalidationWorkerStore):
                     AND room_id = ?
             """
             txn.execute(sql, (room_id,))
-            return {d for d, in txn}
+            return {d for (d,) in txn}
 
         return await self.db_pool.runInteraction(
             "get_current_hosts_in_room", get_current_hosts_in_room_txn
@@ -1099,7 +1095,7 @@ class RoomMemberWorkerStore(EventsWorkerStore, CacheInvalidationWorkerStore):
             """
             txn.execute(sql, (room_id,))
             # `server_domain` will be `NULL` for malformed MXIDs with no colons.
-            return tuple(d for d, in txn if d is not None)
+            return tuple(d for (d,) in txn if d is not None)
 
         return await self.db_pool.runInteraction(
             "get_current_hosts_in_room_ordered", get_current_hosts_in_room_ordered_txn
@@ -1316,9 +1312,7 @@ class RoomMemberWorkerStore(EventsWorkerStore, CacheInvalidationWorkerStore):
                 room_id = ? AND membership = ?
                 AND NOT (%s)
                 LIMIT 1
-        """ % (
-            clause,
-        )
+        """ % (clause,)
 
         def _is_local_host_in_room_ignoring_users_txn(
             txn: LoggingTransaction,
@@ -1464,10 +1458,12 @@ class RoomMemberBackgroundUpdateStore(SQLBaseStore):
         self, progress: JsonDict, batch_size: int
     ) -> int:
         target_min_stream_id = progress.get(
-            "target_min_stream_id_inclusive", self._min_stream_order_on_start  # type: ignore[attr-defined]
+            "target_min_stream_id_inclusive",
+            self._min_stream_order_on_start,  # type: ignore[attr-defined]
         )
         max_stream_id = progress.get(
-            "max_stream_id_exclusive", self._stream_order_on_start + 1  # type: ignore[attr-defined]
+            "max_stream_id_exclusive",
+            self._stream_order_on_start + 1,  # type: ignore[attr-defined]
         )
 
         def add_membership_profile_txn(txn: LoggingTransaction) -> int:
diff --git a/synapse/storage/databases/main/search.py b/synapse/storage/databases/main/search.py
index 20fcfd3122d5d0c80de6b1e2eb4fdafef9fc683c..b436275f3fda0b2a516a2453c9fdd2f9862dd6ae 100644
--- a/synapse/storage/databases/main/search.py
+++ b/synapse/storage/databases/main/search.py
@@ -177,9 +177,7 @@ class SearchBackgroundUpdateStore(SearchWorkerStore):
             AND (%s)
             ORDER BY stream_ordering DESC
             LIMIT ?
-            """ % (
-                " OR ".join("type = '%s'" % (t,) for t in TYPES),
-            )
+            """ % (" OR ".join("type = '%s'" % (t,) for t in TYPES),)
 
             txn.execute(sql, (target_min_stream_id, max_stream_id, batch_size))
 
diff --git a/synapse/storage/databases/main/state.py b/synapse/storage/databases/main/state.py
index 62bc4600fb252839ab365cf2c076f1d2020e8d78..c5caaf56b05314d3dcb94119297232aba00096e5 100644
--- a/synapse/storage/databases/main/state.py
+++ b/synapse/storage/databases/main/state.py
@@ -535,7 +535,7 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore):
             desc="check_if_events_in_current_state",
         )
 
-        return frozenset(event_id for event_id, in rows)
+        return frozenset(event_id for (event_id,) in rows)
 
     # FIXME: how should this be cached?
     @cancellable
diff --git a/synapse/storage/databases/main/stats.py b/synapse/storage/databases/main/stats.py
index e9f6a918c7ac5cc6f0a74afb6e1c595e88fbc7da..79c49e7fd93255c376e8095ecd9e5019c2e9fda5 100644
--- a/synapse/storage/databases/main/stats.py
+++ b/synapse/storage/databases/main/stats.py
@@ -161,7 +161,7 @@ class StatsStore(StateDeltasStore):
                     LIMIT ?
                 """
             txn.execute(sql, (last_user_id, batch_size))
-            return [r for r, in txn]
+            return [r for (r,) in txn]
 
         users_to_work_on = await self.db_pool.runInteraction(
             "_populate_stats_process_users", _get_next_batch
@@ -207,7 +207,7 @@ class StatsStore(StateDeltasStore):
                     LIMIT ?
                 """
             txn.execute(sql, (last_room_id, batch_size))
-            return [r for r, in txn]
+            return [r for (r,) in txn]
 
         rooms_to_work_on = await self.db_pool.runInteraction(
             "populate_stats_rooms_get_batch", _get_next_batch
@@ -751,9 +751,7 @@ class StatsStore(StateDeltasStore):
                 LEFT JOIN profiles AS p ON lmr.user_id = p.full_user_id
                 {}
                 GROUP BY lmr.user_id, displayname
-            """.format(
-                where_clause
-            )
+            """.format(where_clause)
 
             # SQLite does not support SELECT COUNT(*) OVER()
             sql = """
diff --git a/synapse/storage/databases/main/stream.py b/synapse/storage/databases/main/stream.py
index 1a59e0b5a8d434496e0ac8d7cea8eb203341bbb4..68d41686219432620e29c415c656eec94809a3f6 100644
--- a/synapse/storage/databases/main/stream.py
+++ b/synapse/storage/databases/main/stream.py
@@ -21,7 +21,7 @@
 #
 #
 
-""" This module is responsible for getting events from the DB for pagination
+"""This module is responsible for getting events from the DB for pagination
 and event streaming.
 
 The order it returns events in depend on whether we are streaming forwards or
@@ -1122,9 +1122,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore):
                     AND e.stream_ordering > ? AND e.stream_ordering <= ?
                     %s
                 ORDER BY e.stream_ordering ASC
-            """ % (
-                ignore_room_clause,
-            )
+            """ % (ignore_room_clause,)
 
             txn.execute(sql, args)
 
diff --git a/synapse/storage/databases/main/user_directory.py b/synapse/storage/databases/main/user_directory.py
index 6e18f714d750625f8bab373b3d81e7f4f421e5a3..51cffb09865b74b4d6b2b34e51b527a4d3d2c6cd 100644
--- a/synapse/storage/databases/main/user_directory.py
+++ b/synapse/storage/databases/main/user_directory.py
@@ -224,9 +224,7 @@ class UserDirectoryBackgroundUpdateStore(StateDeltasStore):
                 SELECT room_id, events FROM %s
                 ORDER BY events DESC
                 LIMIT 250
-            """ % (
-                TEMP_TABLE + "_rooms",
-            )
+            """ % (TEMP_TABLE + "_rooms",)
             txn.execute(sql)
             rooms_to_work_on = cast(List[Tuple[str, int]], txn.fetchall())
 
diff --git a/synapse/storage/databases/state/store.py b/synapse/storage/databases/state/store.py
index d4ac74c1ee525b067f6d8a10c05b19b01045a40c..aea71b8fcc5aeb367b763bc9ace5e76c1a1c5090 100644
--- a/synapse/storage/databases/state/store.py
+++ b/synapse/storage/databases/state/store.py
@@ -767,7 +767,7 @@ class StateGroupDataStore(StateBackgroundUpdateStore, SQLBaseStore):
 
         remaining_state_groups = {
             state_group
-            for state_group, in rows
+            for (state_group,) in rows
             if state_group not in state_groups_to_delete
         }
 
diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py
index aaffe5ecc9ebd88ff93bafa4229f5d1f25d5addf..bf087702ea9dc092984f9073bf0a2e69f79b6849 100644
--- a/synapse/storage/prepare_database.py
+++ b/synapse/storage/prepare_database.py
@@ -607,7 +607,7 @@ def _apply_module_schema_files(
         "SELECT file FROM applied_module_schemas WHERE module_name = ?",
         (modname,),
     )
-    applied_deltas = {d for d, in cur}
+    applied_deltas = {d for (d,) in cur}
     for name, stream in names_and_streams:
         if name in applied_deltas:
             continue
@@ -710,7 +710,7 @@ def _get_or_create_schema_state(
         "SELECT file FROM applied_schema_deltas WHERE version >= ?",
         (current_version,),
     )
-    applied_deltas = tuple(d for d, in txn)
+    applied_deltas = tuple(d for (d,) in txn)
 
     return _SchemaState(
         current_version=current_version,
diff --git a/synapse/storage/schema/main/delta/56/unique_user_filter_index.py b/synapse/storage/schema/main/delta/56/unique_user_filter_index.py
index 2461f87d77276af703d3e851f2b4627b197368cb..b7535dae14b95510adbf4706c44d039e356a6dac 100644
--- a/synapse/storage/schema/main/delta/56/unique_user_filter_index.py
+++ b/synapse/storage/schema/main/delta/56/unique_user_filter_index.py
@@ -41,8 +41,6 @@ def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) ->
                 (user_id, filter_id);
             DROP TABLE user_filters;
             ALTER TABLE user_filters_migration RENAME TO user_filters;
-        """ % (
-        select_clause,
-    )
+        """ % (select_clause,)
 
     execute_statements_from_stream(cur, StringIO(sql))
diff --git a/synapse/storage/schema/main/delta/61/03recreate_min_depth.py b/synapse/storage/schema/main/delta/61/03recreate_min_depth.py
index 5d3578eaf40b08e923efa51f2fc307fddcc71e06..a847ef4147b3c5015532aced175c8404d2960fbf 100644
--- a/synapse/storage/schema/main/delta/61/03recreate_min_depth.py
+++ b/synapse/storage/schema/main/delta/61/03recreate_min_depth.py
@@ -23,6 +23,7 @@
 This migration handles the process of changing the type of `room_depth.min_depth` to
 a BIGINT.
 """
+
 from synapse.storage.database import LoggingTransaction
 from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine
 
diff --git a/synapse/storage/schema/main/delta/68/05partial_state_rooms_triggers.py b/synapse/storage/schema/main/delta/68/05partial_state_rooms_triggers.py
index b4d4b6536bd35781c4e1bc6d1a29db8d0995ad03..9ac3d1d31f07de5f5979df2ac9fd2e7731f68510 100644
--- a/synapse/storage/schema/main/delta/68/05partial_state_rooms_triggers.py
+++ b/synapse/storage/schema/main/delta/68/05partial_state_rooms_triggers.py
@@ -25,6 +25,7 @@ This migration adds triggers to the partial_state_events tables to enforce uniqu
 
 Triggers cannot be expressed in .sql files, so we have to use a separate file.
 """
+
 from synapse.storage.database import LoggingTransaction
 from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine, Sqlite3Engine
 
diff --git a/synapse/storage/schema/main/delta/72/07force_update_current_state_events_membership.py b/synapse/storage/schema/main/delta/72/07force_update_current_state_events_membership.py
index 93543fca7c4806f42b5d1a940ca0d86e9b8775e0..be80a6747da06016343a7dc41465fe5804b96c74 100644
--- a/synapse/storage/schema/main/delta/72/07force_update_current_state_events_membership.py
+++ b/synapse/storage/schema/main/delta/72/07force_update_current_state_events_membership.py
@@ -26,6 +26,7 @@ for its completion can be removed.
 
 Note the background job must still remain defined in the database class.
 """
+
 from synapse.config.homeserver import HomeServerConfig
 from synapse.storage.database import LoggingTransaction
 from synapse.storage.engines import BaseDatabaseEngine
diff --git a/synapse/storage/schema/main/delta/74/04_membership_tables_event_stream_ordering_triggers.py b/synapse/storage/schema/main/delta/74/04_membership_tables_event_stream_ordering_triggers.py
index 6609ef0dac81d83d57ebe0977a74cbd6f6709303..a847a93494090a126bf50b425e147c635ab37a2a 100644
--- a/synapse/storage/schema/main/delta/74/04_membership_tables_event_stream_ordering_triggers.py
+++ b/synapse/storage/schema/main/delta/74/04_membership_tables_event_stream_ordering_triggers.py
@@ -24,6 +24,7 @@
 This migration adds triggers to the room membership tables to enforce consistency.
 Triggers cannot be expressed in .sql files, so we have to use a separate file.
 """
+
 from synapse.storage.database import LoggingTransaction
 from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine, Sqlite3Engine
 
diff --git a/synapse/storage/schema/main/delta/78/03event_extremities_constraints.py b/synapse/storage/schema/main/delta/78/03event_extremities_constraints.py
index ad9c3941624ee4c44be285e5429068f089203ea0..1c823a3aa1c6ac020ade1eb3e41bfc7f6d72bbdc 100644
--- a/synapse/storage/schema/main/delta/78/03event_extremities_constraints.py
+++ b/synapse/storage/schema/main/delta/78/03event_extremities_constraints.py
@@ -23,6 +23,7 @@
 """
 This migration adds foreign key constraint to `event_forward_extremities` table.
 """
+
 from synapse.storage.background_updates import (
     ForeignKeyConstraint,
     run_validate_constraint_and_delete_rows_schema_delta,
diff --git a/synapse/types/__init__.py b/synapse/types/__init__.py
index 5259550f1c5ddab746698ad4916fd06f74835752..26783c56221c44af9494d7a8c46e4582c9880bc2 100644
--- a/synapse/types/__init__.py
+++ b/synapse/types/__init__.py
@@ -1308,7 +1308,7 @@ class DeviceListUpdates:
 
 
 def get_verify_key_from_cross_signing_key(
-    key_info: Mapping[str, Any]
+    key_info: Mapping[str, Any],
 ) -> Tuple[str, VerifyKey]:
     """Get the key ID and signedjson verify key from a cross-signing key dict
 
diff --git a/synapse/types/rest/client/__init__.py b/synapse/types/rest/client/__init__.py
index 93b537ab7b058428575f77c6be04acf0bd1e0e1b..9f6fb087c1fcc35cbe7cf3835c4c37e23e3c8b0c 100644
--- a/synapse/types/rest/client/__init__.py
+++ b/synapse/types/rest/client/__init__.py
@@ -268,7 +268,9 @@ class SlidingSyncBody(RequestBodyModel):
         if TYPE_CHECKING:
             ranges: Optional[List[Tuple[int, int]]] = None
         else:
-            ranges: Optional[List[Tuple[conint(ge=0, strict=True), conint(ge=0, strict=True)]]] = None  # type: ignore[valid-type]
+            ranges: Optional[
+                List[Tuple[conint(ge=0, strict=True), conint(ge=0, strict=True)]]
+            ] = None  # type: ignore[valid-type]
         slow_get_all_rooms: Optional[StrictBool] = False
         filters: Optional[Filters] = None
 
@@ -388,7 +390,9 @@ class SlidingSyncBody(RequestBodyModel):
     if TYPE_CHECKING:
         lists: Optional[Dict[str, SlidingSyncList]] = None
     else:
-        lists: Optional[Dict[constr(max_length=64, strict=True), SlidingSyncList]] = None  # type: ignore[valid-type]
+        lists: Optional[Dict[constr(max_length=64, strict=True), SlidingSyncList]] = (
+            None  # type: ignore[valid-type]
+        )
     room_subscriptions: Optional[Dict[StrictStr, RoomSubscription]] = None
     extensions: Optional[Extensions] = None
 
diff --git a/synapse/types/state.py b/synapse/types/state.py
index c958a957015bbd5b3c3e8ceecf28f565fceffbc3..1141c4b5c1f526f843a8513c1b323709a32d5be7 100644
--- a/synapse/types/state.py
+++ b/synapse/types/state.py
@@ -503,13 +503,19 @@ class StateFilter:
         #   - if so, which event types are excluded? ('excludes')
         #   - which entire event types to include ('wildcards')
         #   - which concrete state keys to include ('concrete state keys')
-        (self_all, self_excludes), (
-            self_wildcards,
-            self_concrete_keys,
+        (
+            (self_all, self_excludes),
+            (
+                self_wildcards,
+                self_concrete_keys,
+            ),
         ) = self._decompose_into_four_parts()
-        (other_all, other_excludes), (
-            other_wildcards,
-            other_concrete_keys,
+        (
+            (other_all, other_excludes),
+            (
+                other_wildcards,
+                other_concrete_keys,
+            ),
         ) = other._decompose_into_four_parts()
 
         # Start with an estimate of the difference based on self
diff --git a/synapse/util/linked_list.py b/synapse/util/linked_list.py
index e9a5fff2118b7d539a0723a703e538a62857300b..87f801c0cf150e79ed60060f9067f0ad2a221b77 100644
--- a/synapse/util/linked_list.py
+++ b/synapse/util/linked_list.py
@@ -19,8 +19,7 @@
 #
 #
 
-"""A circular doubly linked list implementation.
-"""
+"""A circular doubly linked list implementation."""
 
 import threading
 from typing import Generic, Optional, Type, TypeVar
diff --git a/synapse/util/metrics.py b/synapse/util/metrics.py
index 517e79ce5fb2acbb94495b867cad503a9af30216..020618598ccee5466b44d0f71f7a92d3c63f51a5 100644
--- a/synapse/util/metrics.py
+++ b/synapse/util/metrics.py
@@ -110,7 +110,7 @@ def measure_func(
     """
 
     def wrapper(
-        func: Callable[Concatenate[HasClock, P], Awaitable[R]]
+        func: Callable[Concatenate[HasClock, P], Awaitable[R]],
     ) -> Callable[P, Awaitable[R]]:
         block_name = func.__name__ if name is None else name
 
diff --git a/synapse/util/patch_inline_callbacks.py b/synapse/util/patch_inline_callbacks.py
index 46dad32156b139f3edffb2e21be16d308cda2143..56bdf451dad822626a8351df13d1c58da5204e37 100644
--- a/synapse/util/patch_inline_callbacks.py
+++ b/synapse/util/patch_inline_callbacks.py
@@ -50,7 +50,7 @@ def do_patch() -> None:
         return
 
     def new_inline_callbacks(
-        f: Callable[P, Generator["Deferred[object]", object, T]]
+        f: Callable[P, Generator["Deferred[object]", object, T]],
     ) -> Callable[P, "Deferred[T]"]:
         @functools.wraps(f)
         def wrapped(*args: P.args, **kwargs: P.kwargs) -> "Deferred[T]":
diff --git a/synapse/util/ratelimitutils.py b/synapse/util/ratelimitutils.py
index 8ead72bb7a4f54eb02e2df12f4c53511b1a3f520..3f067b792c4fdc6a953e9ce0dd99ccb821428058 100644
--- a/synapse/util/ratelimitutils.py
+++ b/synapse/util/ratelimitutils.py
@@ -103,7 +103,7 @@ _rate_limiter_instances_lock = threading.Lock()
 
 
 def _get_counts_from_rate_limiter_instance(
-    count_func: Callable[["FederationRateLimiter"], int]
+    count_func: Callable[["FederationRateLimiter"], int],
 ) -> Mapping[Tuple[str, ...], int]:
     """Returns a count of something (slept/rejected hosts) by (metrics_name)"""
     # Cast to a list to prevent it changing while the Prometheus
diff --git a/synapse/visibility.py b/synapse/visibility.py
index 128413c8aab170f7a035acd33ca1f3e5dbd2415b..3a2782bade77235b8369c0b2b829e6e732a33f3f 100644
--- a/synapse/visibility.py
+++ b/synapse/visibility.py
@@ -135,9 +135,9 @@ async def filter_events_for_client(
         retention_policies: Dict[str, RetentionPolicy] = {}
 
         for room_id in room_ids:
-            retention_policies[room_id] = (
-                await storage.main.get_retention_policy_for_room(room_id)
-            )
+            retention_policies[
+                room_id
+            ] = await storage.main.get_retention_policy_for_room(room_id)
 
     def allowed(event: EventBase) -> Optional[EventBase]:
         state_after_event = event_id_to_state.get(event.event_id)
diff --git a/synmark/__main__.py b/synmark/__main__.py
index cac57cf1115f9c674f3cce3c87b7ba1574597c0b..746261a1eca4097adce91337e8da732797e96981 100644
--- a/synmark/__main__.py
+++ b/synmark/__main__.py
@@ -40,7 +40,7 @@ T = TypeVar("T")
 
 
 def make_test(
-    main: Callable[[ISynapseReactor, int], Coroutine[Any, Any, float]]
+    main: Callable[[ISynapseReactor, int], Coroutine[Any, Any, float]],
 ) -> Callable[[int], float]:
     """
     Take a benchmark function and wrap it in a reactor start and stop.
diff --git a/tests/appservice/test_scheduler.py b/tests/appservice/test_scheduler.py
index a1c7ccdd0be1bd09696db9ccbad97a8b8f995485..730b00a9fbb3ff39ad3f987b5421cdeb6dc6f13f 100644
--- a/tests/appservice/test_scheduler.py
+++ b/tests/appservice/test_scheduler.py
@@ -150,7 +150,8 @@ class ApplicationServiceSchedulerTransactionCtrlTestCase(unittest.TestCase):
         self.assertEqual(1, len(self.txnctrl.recoverers))  # and stored
         self.assertEqual(0, txn.complete.call_count)  # txn not completed
         self.store.set_appservice_state.assert_called_once_with(
-            service, ApplicationServiceState.DOWN  # service marked as down
+            service,
+            ApplicationServiceState.DOWN,  # service marked as down
         )
 
 
diff --git a/tests/events/test_utils.py b/tests/events/test_utils.py
index 30f8787758834e6c1ab3ef5d916018ba49661abc..654e6521a20d3612047e211aa63e8c2e6d3602f1 100644
--- a/tests/events/test_utils.py
+++ b/tests/events/test_utils.py
@@ -756,7 +756,8 @@ class SerializeEventTestCase(stdlib_unittest.TestCase):
     def test_event_fields_fail_if_fields_not_str(self) -> None:
         with self.assertRaises(TypeError):
             self.serialize(
-                MockEvent(room_id="!foo:bar", content={"foo": "bar"}), ["room_id", 4]  # type: ignore[list-item]
+                MockEvent(room_id="!foo:bar", content={"foo": "bar"}),
+                ["room_id", 4],  # type: ignore[list-item]
             )
 
 
diff --git a/tests/federation/test_complexity.py b/tests/federation/test_complexity.py
index 9bd97e5d4ef5e3f2370b1cf3804c879ec6266a7a..87b9ffc0c6eddb8c92a1979b6139f95f4cfa2faa 100644
--- a/tests/federation/test_complexity.py
+++ b/tests/federation/test_complexity.py
@@ -158,7 +158,9 @@ class RoomComplexityTests(unittest.FederatingHomeserverTestCase):
         async def get_current_state_event_counts(room_id: str) -> int:
             return 600
 
-        self.hs.get_datastores().main.get_current_state_event_counts = get_current_state_event_counts  # type: ignore[method-assign]
+        self.hs.get_datastores().main.get_current_state_event_counts = (  # type: ignore[method-assign]
+            get_current_state_event_counts
+        )
 
         d = handler._remote_join(
             create_requester(u1),
diff --git a/tests/federation/test_federation_catch_up.py b/tests/federation/test_federation_catch_up.py
index 08214b0013166810188bb366affc1b1f733c18d0..1e1ed8e642663ea333504134f8706217b012814e 100644
--- a/tests/federation/test_federation_catch_up.py
+++ b/tests/federation/test_federation_catch_up.py
@@ -401,7 +401,10 @@ class FederationCatchUpTestCases(FederatingHomeserverTestCase):
         now = self.clock.time_msec()
         self.get_success(
             self.hs.get_datastores().main.set_destination_retry_timings(
-                "zzzerver", now, now, 24 * 60 * 60 * 1000  # retry in 1 day
+                "zzzerver",
+                now,
+                now,
+                24 * 60 * 60 * 1000,  # retry in 1 day
             )
         )
 
diff --git a/tests/federation/test_federation_media.py b/tests/federation/test_federation_media.py
index 0dcf20f5f51d118a423fc9363bae504cdfc8f75a..e66aae499b09b3bef541fd9f41eeb5414b584410 100644
--- a/tests/federation/test_federation_media.py
+++ b/tests/federation/test_federation_media.py
@@ -40,7 +40,6 @@ from tests.test_utils import SMALL_PNG
 
 
 class FederationMediaDownloadsTest(unittest.FederatingHomeserverTestCase):
-
     def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
         super().prepare(reactor, clock, hs)
         self.test_dir = tempfile.mkdtemp(prefix="synapse-tests-")
@@ -150,7 +149,6 @@ class FederationMediaDownloadsTest(unittest.FederatingHomeserverTestCase):
 
 
 class FederationThumbnailTest(unittest.FederatingHomeserverTestCase):
-
     def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
         super().prepare(reactor, clock, hs)
         self.test_dir = tempfile.mkdtemp(prefix="synapse-tests-")
diff --git a/tests/handlers/test_federation_event.py b/tests/handlers/test_federation_event.py
index 1b83aea5791a35d42ad69979d7fb61e80b093d92..5db10fa74c2f616c880e72a9f3c9eae8dbc20c2a 100644
--- a/tests/handlers/test_federation_event.py
+++ b/tests/handlers/test_federation_event.py
@@ -288,13 +288,15 @@ class FederationEventHandlerTests(unittest.FederatingHomeserverTestCase):
         }
 
         # We also expect an outbound request to /state
-        self.mock_federation_transport_client.get_room_state.return_value = StateRequestResponse(
-            # Mimic the other server not knowing about the state at all.
-            # We want to cause Synapse to throw an error (`Unable to get
-            # missing prev_event $fake_prev_event`) and fail to backfill
-            # the pulled event.
-            auth_events=[],
-            state=[],
+        self.mock_federation_transport_client.get_room_state.return_value = (
+            StateRequestResponse(
+                # Mimic the other server not knowing about the state at all.
+                # We want to cause Synapse to throw an error (`Unable to get
+                # missing prev_event $fake_prev_event`) and fail to backfill
+                # the pulled event.
+                auth_events=[],
+                state=[],
+            )
         )
 
         pulled_event = make_event_from_dict(
diff --git a/tests/handlers/test_presence.py b/tests/handlers/test_presence.py
index cc630d606ca3afed88147de79f70b28fe871882d..598d6c13cdb468aceb50343a003cee78acf5ecd1 100644
--- a/tests/handlers/test_presence.py
+++ b/tests/handlers/test_presence.py
@@ -1107,7 +1107,9 @@ class PresenceHandlerTestCase(BaseMultiWorkerStreamTestCase):
                 ),
             ]
         ],
-        name_func=lambda testcase_func, param_num, params: f"{testcase_func.__name__}_{param_num}_{'workers' if params.args[5] else 'monolith'}",
+        name_func=lambda testcase_func,
+        param_num,
+        params: f"{testcase_func.__name__}_{param_num}_{'workers' if params.args[5] else 'monolith'}",
     )
     @unittest.override_config({"experimental_features": {"msc3026_enabled": True}})
     def test_set_presence_from_syncing_multi_device(
@@ -1343,7 +1345,9 @@ class PresenceHandlerTestCase(BaseMultiWorkerStreamTestCase):
                 ),
             ]
         ],
-        name_func=lambda testcase_func, param_num, params: f"{testcase_func.__name__}_{param_num}_{'workers' if params.args[4] else 'monolith'}",
+        name_func=lambda testcase_func,
+        param_num,
+        params: f"{testcase_func.__name__}_{param_num}_{'workers' if params.args[4] else 'monolith'}",
     )
     @unittest.override_config({"experimental_features": {"msc3026_enabled": True}})
     def test_set_presence_from_non_syncing_multi_device(
diff --git a/tests/handlers/test_sync.py b/tests/handlers/test_sync.py
index fa55f769167db1158e5e092e89b83fcf344e75c6..d7bbc680373193289292e1ae0f5485f28f87ebf6 100644
--- a/tests/handlers/test_sync.py
+++ b/tests/handlers/test_sync.py
@@ -843,7 +843,9 @@ class SyncTestCase(tests.unittest.HomeserverTestCase):
         ) -> List[EventBase]:
             return list(pdus)
 
-        self.client._check_sigs_and_hash_for_pulled_events_and_fetch = _check_sigs_and_hash_for_pulled_events_and_fetch  # type: ignore[assignment]
+        self.client._check_sigs_and_hash_for_pulled_events_and_fetch = (  # type: ignore[method-assign]
+            _check_sigs_and_hash_for_pulled_events_and_fetch  # type: ignore[assignment]
+        )
 
         prev_events = self.get_success(self.store.get_prev_events_for_room(room_id))
 
diff --git a/tests/http/federation/test_srv_resolver.py b/tests/http/federation/test_srv_resolver.py
index 8e8621e348a82529b621ccf7ca4dcaeeb5300085..ffcbf4b3ca1e62f53c0d132221f50e606fa6ff64 100644
--- a/tests/http/federation/test_srv_resolver.py
+++ b/tests/http/federation/test_srv_resolver.py
@@ -93,9 +93,7 @@ class SrvResolverTestCase(unittest.TestCase):
         resolver = SrvResolver(dns_client=dns_client_mock, cache=cache)
 
         servers: List[Server]
-        servers = yield defer.ensureDeferred(
-            resolver.resolve_service(service_name)
-        )  # type: ignore[assignment]
+        servers = yield defer.ensureDeferred(resolver.resolve_service(service_name))  # type: ignore[assignment]
 
         dns_client_mock.lookupService.assert_called_once_with(service_name)
 
@@ -122,9 +120,7 @@ class SrvResolverTestCase(unittest.TestCase):
         )
 
         servers: List[Server]
-        servers = yield defer.ensureDeferred(
-            resolver.resolve_service(service_name)
-        )  # type: ignore[assignment]
+        servers = yield defer.ensureDeferred(resolver.resolve_service(service_name))  # type: ignore[assignment]
 
         self.assertFalse(dns_client_mock.lookupService.called)
 
@@ -157,9 +153,7 @@ class SrvResolverTestCase(unittest.TestCase):
         resolver = SrvResolver(dns_client=dns_client_mock, cache=cache)
 
         servers: List[Server]
-        servers = yield defer.ensureDeferred(
-            resolver.resolve_service(service_name)
-        )  # type: ignore[assignment]
+        servers = yield defer.ensureDeferred(resolver.resolve_service(service_name))  # type: ignore[assignment]
 
         self.assertEqual(len(servers), 0)
         self.assertEqual(len(cache), 0)
diff --git a/tests/http/test_client.py b/tests/http/test_client.py
index f2abec190bd1de1679531be670c6ca17dad227d5..ac6470ebbdc6603b958a9b192d2dd22667357108 100644
--- a/tests/http/test_client.py
+++ b/tests/http/test_client.py
@@ -207,7 +207,9 @@ class ReadMultipartResponseTests(TestCase):
 
 
 class ReadBodyWithMaxSizeTests(TestCase):
-    def _build_response(self, length: Union[int, str] = UNKNOWN_LENGTH) -> Tuple[
+    def _build_response(
+        self, length: Union[int, str] = UNKNOWN_LENGTH
+    ) -> Tuple[
         BytesIO,
         "Deferred[int]",
         _DiscardBodyWithMaxSizeProtocol,
diff --git a/tests/http/test_matrixfederationclient.py b/tests/http/test_matrixfederationclient.py
index 6827412373660fdfe6d3bbadf417334347717640..6588695e3734fad90333beab53cf952c30c8cf44 100644
--- a/tests/http/test_matrixfederationclient.py
+++ b/tests/http/test_matrixfederationclient.py
@@ -895,21 +895,23 @@ class FederationClientProxyTests(BaseMultiWorkerStreamTestCase):
         )
 
         # Fake `remoteserv:8008` responding to requests
-        mock_agent_on_federation_sender.request.side_effect = lambda *args, **kwargs: defer.succeed(
-            FakeResponse(
-                code=200,
-                body=b'{"foo": "bar"}',
-                headers=Headers(
-                    {
-                        "Content-Type": ["application/json"],
-                        "Connection": ["close, X-Foo, X-Bar"],
-                        # Should be removed because it's defined in the `Connection` header
-                        "X-Foo": ["foo"],
-                        "X-Bar": ["bar"],
-                        # Should be removed because it's a hop-by-hop header
-                        "Proxy-Authorization": "abcdef",
-                    }
-                ),
+        mock_agent_on_federation_sender.request.side_effect = (
+            lambda *args, **kwargs: defer.succeed(
+                FakeResponse(
+                    code=200,
+                    body=b'{"foo": "bar"}',
+                    headers=Headers(
+                        {
+                            "Content-Type": ["application/json"],
+                            "Connection": ["close, X-Foo, X-Bar"],
+                            # Should be removed because it's defined in the `Connection` header
+                            "X-Foo": ["foo"],
+                            "X-Bar": ["bar"],
+                            # Should be removed because it's a hop-by-hop header
+                            "Proxy-Authorization": "abcdef",
+                        }
+                    ),
+                )
             )
         )
 
diff --git a/tests/http/test_servlet.py b/tests/http/test_servlet.py
index 18af2735feeaa751fcf3b7ce87e0e6fd9aabfd53..db39ecf2443bf6f9ee6f84393632b23e9d5fad83 100644
--- a/tests/http/test_servlet.py
+++ b/tests/http/test_servlet.py
@@ -76,7 +76,7 @@ class TestServletUtils(unittest.TestCase):
 
         # Invalid UTF-8.
         with self.assertRaises(SynapseError):
-            parse_json_value_from_request(make_request(b"\xFF\x00"))
+            parse_json_value_from_request(make_request(b"\xff\x00"))
 
         # Invalid JSON.
         with self.assertRaises(SynapseError):
diff --git a/tests/media/test_media_storage.py b/tests/media/test_media_storage.py
index e55001fb40c2c32d8c03359de24c79a0ffb7ceeb..e50ff5fa7805403005310516b443fffa8639612e 100644
--- a/tests/media/test_media_storage.py
+++ b/tests/media/test_media_storage.py
@@ -261,7 +261,7 @@ class MediaRepoTests(unittest.HomeserverTestCase):
             """A mock for MatrixFederationHttpClient.get_file."""
 
             def write_to(
-                r: Tuple[bytes, Tuple[int, Dict[bytes, List[bytes]]]]
+                r: Tuple[bytes, Tuple[int, Dict[bytes, List[bytes]]]],
             ) -> Tuple[int, Dict[bytes, List[bytes]]]:
                 data, response = r
                 output_stream.write(data)
diff --git a/tests/module_api/test_account_data_manager.py b/tests/module_api/test_account_data_manager.py
index fd87eaffd04eeed6437c69757e100b747296d9ad..1a1d5609b2eaafcbafb487c9239f153e6a85d6e5 100644
--- a/tests/module_api/test_account_data_manager.py
+++ b/tests/module_api/test_account_data_manager.py
@@ -164,6 +164,8 @@ class ModuleApiTestCase(HomeserverTestCase):
             # noinspection PyTypeChecker
             self.get_success_or_raise(
                 self._module_api.account_data_manager.put_global(
-                    self.user_id, "test.data", 42  # type: ignore[arg-type]
+                    self.user_id,
+                    "test.data",
+                    42,  # type: ignore[arg-type]
                 )
             )
diff --git a/tests/push/test_email.py b/tests/push/test_email.py
index e0aab1c046c54a72b5891134d636c12498d1488c..4fafb718976c12ed08be9b2132f83624921adf8d 100644
--- a/tests/push/test_email.py
+++ b/tests/push/test_email.py
@@ -44,6 +44,7 @@ from tests.unittest import HomeserverTestCase
 @attr.s(auto_attribs=True)
 class _User:
     "Helper wrapper for user ID and access token"
+
     id: str
     token: str
 
diff --git a/tests/rest/admin/test_server_notice.py b/tests/rest/admin/test_server_notice.py
index 2a1e42bbc821eb06f20f81cec92dfc12609da418..150caeeee258ddd205998a48411a191008593b49 100644
--- a/tests/rest/admin/test_server_notice.py
+++ b/tests/rest/admin/test_server_notice.py
@@ -531,9 +531,7 @@ class ServerNoticeTestCase(unittest.HomeserverTestCase):
 
         # simulate a change in server config after a server restart.
         new_display_name = "new display name"
-        self.server_notices_manager._config.servernotices.server_notices_mxid_display_name = (
-            new_display_name
-        )
+        self.server_notices_manager._config.servernotices.server_notices_mxid_display_name = new_display_name
         self.server_notices_manager.get_or_create_notice_room_for_user.cache.invalidate_all()
 
         self.make_request(
@@ -577,9 +575,7 @@ class ServerNoticeTestCase(unittest.HomeserverTestCase):
 
         # simulate a change in server config after a server restart.
         new_avatar_url = "test/new-url"
-        self.server_notices_manager._config.servernotices.server_notices_mxid_avatar_url = (
-            new_avatar_url
-        )
+        self.server_notices_manager._config.servernotices.server_notices_mxid_avatar_url = new_avatar_url
         self.server_notices_manager.get_or_create_notice_room_for_user.cache.invalidate_all()
 
         self.make_request(
@@ -692,9 +688,7 @@ class ServerNoticeTestCase(unittest.HomeserverTestCase):
 
         # simulate a change in server config after a server restart.
         new_avatar_url = "test/new-url"
-        self.server_notices_manager._config.servernotices.server_notices_room_avatar_url = (
-            new_avatar_url
-        )
+        self.server_notices_manager._config.servernotices.server_notices_room_avatar_url = new_avatar_url
         self.server_notices_manager.get_or_create_notice_room_for_user.cache.invalidate_all()
 
         self.make_request(
diff --git a/tests/rest/client/sliding_sync/test_connection_tracking.py b/tests/rest/client/sliding_sync/test_connection_tracking.py
index 436bd4466cb56d557a282cd3e806b6a6b3a6f67d..5b819103c267b8ab427e255c92e3862f5685aa5d 100644
--- a/tests/rest/client/sliding_sync/test_connection_tracking.py
+++ b/tests/rest/client/sliding_sync/test_connection_tracking.py
@@ -38,7 +38,9 @@ logger = logging.getLogger(__name__)
         (True,),
         (False,),
     ],
-    class_name_func=lambda cls, num, params_dict: f"{cls.__name__}_{'new' if params_dict['use_new_tables'] else 'fallback'}",
+    class_name_func=lambda cls,
+    num,
+    params_dict: f"{cls.__name__}_{'new' if params_dict['use_new_tables'] else 'fallback'}",
 )
 class SlidingSyncConnectionTrackingTestCase(SlidingSyncBase):
     """
diff --git a/tests/rest/client/sliding_sync/test_extension_account_data.py b/tests/rest/client/sliding_sync/test_extension_account_data.py
index 65a6adf4af3d7e0f43439c82486a961125464599..6cc883a4beef5f6ea1fac360d342b85a42eaf562 100644
--- a/tests/rest/client/sliding_sync/test_extension_account_data.py
+++ b/tests/rest/client/sliding_sync/test_extension_account_data.py
@@ -40,7 +40,9 @@ logger = logging.getLogger(__name__)
         (True,),
         (False,),
     ],
-    class_name_func=lambda cls, num, params_dict: f"{cls.__name__}_{'new' if params_dict['use_new_tables'] else 'fallback'}",
+    class_name_func=lambda cls,
+    num,
+    params_dict: f"{cls.__name__}_{'new' if params_dict['use_new_tables'] else 'fallback'}",
 )
 class SlidingSyncAccountDataExtensionTestCase(SlidingSyncBase):
     """Tests for the account_data sliding sync extension"""
diff --git a/tests/rest/client/sliding_sync/test_extension_e2ee.py b/tests/rest/client/sliding_sync/test_extension_e2ee.py
index 2ff668779665a1b11fefb47ca6a081a65e5266f8..7ce6592d8f3c8b5a4c19a816a0841603c863f0d8 100644
--- a/tests/rest/client/sliding_sync/test_extension_e2ee.py
+++ b/tests/rest/client/sliding_sync/test_extension_e2ee.py
@@ -39,7 +39,9 @@ logger = logging.getLogger(__name__)
         (True,),
         (False,),
     ],
-    class_name_func=lambda cls, num, params_dict: f"{cls.__name__}_{'new' if params_dict['use_new_tables'] else 'fallback'}",
+    class_name_func=lambda cls,
+    num,
+    params_dict: f"{cls.__name__}_{'new' if params_dict['use_new_tables'] else 'fallback'}",
 )
 class SlidingSyncE2eeExtensionTestCase(SlidingSyncBase):
     """Tests for the e2ee sliding sync extension"""
diff --git a/tests/rest/client/sliding_sync/test_extension_receipts.py b/tests/rest/client/sliding_sync/test_extension_receipts.py
index 90b035dd75bc635c9d842bcab7f252cc7c1c5514..6e7700b533f80e471f102ac4719feeb9ca207aca 100644
--- a/tests/rest/client/sliding_sync/test_extension_receipts.py
+++ b/tests/rest/client/sliding_sync/test_extension_receipts.py
@@ -40,7 +40,9 @@ logger = logging.getLogger(__name__)
         (True,),
         (False,),
     ],
-    class_name_func=lambda cls, num, params_dict: f"{cls.__name__}_{'new' if params_dict['use_new_tables'] else 'fallback'}",
+    class_name_func=lambda cls,
+    num,
+    params_dict: f"{cls.__name__}_{'new' if params_dict['use_new_tables'] else 'fallback'}",
 )
 class SlidingSyncReceiptsExtensionTestCase(SlidingSyncBase):
     """Tests for the receipts sliding sync extension"""
diff --git a/tests/rest/client/sliding_sync/test_extension_to_device.py b/tests/rest/client/sliding_sync/test_extension_to_device.py
index 5ba2443089ae48340c7b87f166526dacf4af4c34..790abb739db13d19f1874e746dbfb1936cbfa599 100644
--- a/tests/rest/client/sliding_sync/test_extension_to_device.py
+++ b/tests/rest/client/sliding_sync/test_extension_to_device.py
@@ -40,7 +40,9 @@ logger = logging.getLogger(__name__)
         (True,),
         (False,),
     ],
-    class_name_func=lambda cls, num, params_dict: f"{cls.__name__}_{'new' if params_dict['use_new_tables'] else 'fallback'}",
+    class_name_func=lambda cls,
+    num,
+    params_dict: f"{cls.__name__}_{'new' if params_dict['use_new_tables'] else 'fallback'}",
 )
 class SlidingSyncToDeviceExtensionTestCase(SlidingSyncBase):
     """Tests for the to-device sliding sync extension"""
diff --git a/tests/rest/client/sliding_sync/test_extension_typing.py b/tests/rest/client/sliding_sync/test_extension_typing.py
index 0a0f5aff1a21f261bc04f7db40e0990b10a82e91..f87c3c8b1748ad8c27bba6b9bef841f36d71a74a 100644
--- a/tests/rest/client/sliding_sync/test_extension_typing.py
+++ b/tests/rest/client/sliding_sync/test_extension_typing.py
@@ -40,7 +40,9 @@ logger = logging.getLogger(__name__)
         (True,),
         (False,),
     ],
-    class_name_func=lambda cls, num, params_dict: f"{cls.__name__}_{'new' if params_dict['use_new_tables'] else 'fallback'}",
+    class_name_func=lambda cls,
+    num,
+    params_dict: f"{cls.__name__}_{'new' if params_dict['use_new_tables'] else 'fallback'}",
 )
 class SlidingSyncTypingExtensionTestCase(SlidingSyncBase):
     """Tests for the typing notification sliding sync extension"""
diff --git a/tests/rest/client/sliding_sync/test_extensions.py b/tests/rest/client/sliding_sync/test_extensions.py
index 32478467aa70e1e4da335e717cf5f80607924370..30230e5c4b72a0e23d404ba66b9793684acb2746 100644
--- a/tests/rest/client/sliding_sync/test_extensions.py
+++ b/tests/rest/client/sliding_sync/test_extensions.py
@@ -40,7 +40,9 @@ logger = logging.getLogger(__name__)
         (True,),
         (False,),
     ],
-    class_name_func=lambda cls, num, params_dict: f"{cls.__name__}_{'new' if params_dict['use_new_tables'] else 'fallback'}",
+    class_name_func=lambda cls,
+    num,
+    params_dict: f"{cls.__name__}_{'new' if params_dict['use_new_tables'] else 'fallback'}",
 )
 class SlidingSyncExtensionsTestCase(SlidingSyncBase):
     """
diff --git a/tests/rest/client/sliding_sync/test_room_subscriptions.py b/tests/rest/client/sliding_sync/test_room_subscriptions.py
index e81d2518399130e85b6ddd6a9c46fbda08ae78c0..285fdaaf78c9593368aed71c5418f2eb00f91afb 100644
--- a/tests/rest/client/sliding_sync/test_room_subscriptions.py
+++ b/tests/rest/client/sliding_sync/test_room_subscriptions.py
@@ -39,7 +39,9 @@ logger = logging.getLogger(__name__)
         (True,),
         (False,),
     ],
-    class_name_func=lambda cls, num, params_dict: f"{cls.__name__}_{'new' if params_dict['use_new_tables'] else 'fallback'}",
+    class_name_func=lambda cls,
+    num,
+    params_dict: f"{cls.__name__}_{'new' if params_dict['use_new_tables'] else 'fallback'}",
 )
 class SlidingSyncRoomSubscriptionsTestCase(SlidingSyncBase):
     """
diff --git a/tests/rest/client/sliding_sync/test_rooms_invites.py b/tests/rest/client/sliding_sync/test_rooms_invites.py
index f6f45c2500e6038761f3e4e53faac7ba62d38927..882762ca29acfbdb222e203d0aca117757962a68 100644
--- a/tests/rest/client/sliding_sync/test_rooms_invites.py
+++ b/tests/rest/client/sliding_sync/test_rooms_invites.py
@@ -39,7 +39,9 @@ logger = logging.getLogger(__name__)
         (True,),
         (False,),
     ],
-    class_name_func=lambda cls, num, params_dict: f"{cls.__name__}_{'new' if params_dict['use_new_tables'] else 'fallback'}",
+    class_name_func=lambda cls,
+    num,
+    params_dict: f"{cls.__name__}_{'new' if params_dict['use_new_tables'] else 'fallback'}",
 )
 class SlidingSyncRoomsInvitesTestCase(SlidingSyncBase):
     """
diff --git a/tests/rest/client/sliding_sync/test_rooms_meta.py b/tests/rest/client/sliding_sync/test_rooms_meta.py
index 71542923da1f1c948eb84b499319c70c618ca94d..4ed49040c165ab849b25167691f1663c9e99db89 100644
--- a/tests/rest/client/sliding_sync/test_rooms_meta.py
+++ b/tests/rest/client/sliding_sync/test_rooms_meta.py
@@ -40,7 +40,9 @@ logger = logging.getLogger(__name__)
         (True,),
         (False,),
     ],
-    class_name_func=lambda cls, num, params_dict: f"{cls.__name__}_{'new' if params_dict['use_new_tables'] else 'fallback'}",
+    class_name_func=lambda cls,
+    num,
+    params_dict: f"{cls.__name__}_{'new' if params_dict['use_new_tables'] else 'fallback'}",
 )
 class SlidingSyncRoomsMetaTestCase(SlidingSyncBase):
     """
diff --git a/tests/rest/client/sliding_sync/test_rooms_required_state.py b/tests/rest/client/sliding_sync/test_rooms_required_state.py
index 436ae684da8095b23bd48bf27c55eeaeb0c5e868..91ac6c5a0ee70c44ac897ad80eed22bc0cd5eb6a 100644
--- a/tests/rest/client/sliding_sync/test_rooms_required_state.py
+++ b/tests/rest/client/sliding_sync/test_rooms_required_state.py
@@ -40,7 +40,9 @@ logger = logging.getLogger(__name__)
         (True,),
         (False,),
     ],
-    class_name_func=lambda cls, num, params_dict: f"{cls.__name__}_{'new' if params_dict['use_new_tables'] else 'fallback'}",
+    class_name_func=lambda cls,
+    num,
+    params_dict: f"{cls.__name__}_{'new' if params_dict['use_new_tables'] else 'fallback'}",
 )
 class SlidingSyncRoomsRequiredStateTestCase(SlidingSyncBase):
     """
diff --git a/tests/rest/client/sliding_sync/test_rooms_timeline.py b/tests/rest/client/sliding_sync/test_rooms_timeline.py
index e56fb58012c5fa4c7c1a5d54318391eaa8ff7d89..0e027ff39d6f8631f24c062ec10ae33f1d556193 100644
--- a/tests/rest/client/sliding_sync/test_rooms_timeline.py
+++ b/tests/rest/client/sliding_sync/test_rooms_timeline.py
@@ -40,7 +40,9 @@ logger = logging.getLogger(__name__)
         (True,),
         (False,),
     ],
-    class_name_func=lambda cls, num, params_dict: f"{cls.__name__}_{'new' if params_dict['use_new_tables'] else 'fallback'}",
+    class_name_func=lambda cls,
+    num,
+    params_dict: f"{cls.__name__}_{'new' if params_dict['use_new_tables'] else 'fallback'}",
 )
 class SlidingSyncRoomsTimelineTestCase(SlidingSyncBase):
     """
diff --git a/tests/rest/client/sliding_sync/test_sliding_sync.py b/tests/rest/client/sliding_sync/test_sliding_sync.py
index 1dcc15b08291e67d998970b6a922b90054f6c92d..930cb5ef450c6dff8430a6d0435688bcd811a18f 100644
--- a/tests/rest/client/sliding_sync/test_sliding_sync.py
+++ b/tests/rest/client/sliding_sync/test_sliding_sync.py
@@ -232,7 +232,9 @@ class SlidingSyncBase(unittest.HomeserverTestCase):
         (True,),
         (False,),
     ],
-    class_name_func=lambda cls, num, params_dict: f"{cls.__name__}_{'new' if params_dict['use_new_tables'] else 'fallback'}",
+    class_name_func=lambda cls,
+    num,
+    params_dict: f"{cls.__name__}_{'new' if params_dict['use_new_tables'] else 'fallback'}",
 )
 class SlidingSyncTestCase(SlidingSyncBase):
     """
diff --git a/tests/rest/client/test_auth_issuer.py b/tests/rest/client/test_auth_issuer.py
index 299475a35cc1ea76a846eb2689b0de93af09d82f..d6f334a7aba3a9991363af8069cf8982cfbf9228 100644
--- a/tests/rest/client/test_auth_issuer.py
+++ b/tests/rest/client/test_auth_issuer.py
@@ -63,7 +63,9 @@ class AuthIssuerTestCase(HomeserverTestCase):
         self.assertEqual(channel.code, HTTPStatus.OK)
         self.assertEqual(channel.json_body, {"issuer": ISSUER})
 
-        req_mock.assert_called_with("https://account.example.com/.well-known/openid-configuration")
+        req_mock.assert_called_with(
+            "https://account.example.com/.well-known/openid-configuration"
+        )
         req_mock.reset_mock()
 
         # Second call it should use the cached value
diff --git a/tests/rest/client/test_events.py b/tests/rest/client/test_events.py
index 06f1c1b2348bbcf0bd4939181949cc1a720aca8b..039144fdbe3fe75d9d92a08ad387e6d38d32c4e5 100644
--- a/tests/rest/client/test_events.py
+++ b/tests/rest/client/test_events.py
@@ -19,7 +19,7 @@
 #
 #
 
-""" Tests REST events for /events paths."""
+"""Tests REST events for /events paths."""
 
 from unittest.mock import Mock
 
diff --git a/tests/rest/client/test_media.py b/tests/rest/client/test_media.py
index 30b6d31d0af7af832aaa6014ae4b16ee65263696..42014e257e48fc0ee4c8ee7178229bee5ba035a4 100644
--- a/tests/rest/client/test_media.py
+++ b/tests/rest/client/test_media.py
@@ -1957,7 +1957,7 @@ class DownloadAndThumbnailTestCase(unittest.HomeserverTestCase):
             """A mock for MatrixFederationHttpClient.federation_get_file."""
 
             def write_to(
-                r: Tuple[bytes, Tuple[int, Dict[bytes, List[bytes]], bytes]]
+                r: Tuple[bytes, Tuple[int, Dict[bytes, List[bytes]], bytes]],
             ) -> Tuple[int, Dict[bytes, List[bytes]], bytes]:
                 data, response = r
                 output_stream.write(data)
@@ -1991,7 +1991,7 @@ class DownloadAndThumbnailTestCase(unittest.HomeserverTestCase):
             """A mock for MatrixFederationHttpClient.get_file."""
 
             def write_to(
-                r: Tuple[bytes, Tuple[int, Dict[bytes, List[bytes]]]]
+                r: Tuple[bytes, Tuple[int, Dict[bytes, List[bytes]]]],
             ) -> Tuple[int, Dict[bytes, List[bytes]]]:
                 data, response = r
                 output_stream.write(data)
diff --git a/tests/rest/client/test_profile.py b/tests/rest/client/test_profile.py
index f98f3f77aae127fc93ca1d8836c77e8496697654..a92713d220e80c50c8dc888193beb93e0f7c8d28 100644
--- a/tests/rest/client/test_profile.py
+++ b/tests/rest/client/test_profile.py
@@ -20,6 +20,7 @@
 #
 
 """Tests REST events for /profile paths."""
+
 import urllib.parse
 from http import HTTPStatus
 from typing import Any, Dict, Optional
diff --git a/tests/rest/client/test_register.py b/tests/rest/client/test_register.py
index 694f143eff8756f3f838577f2979ede82b62629a..c091f403cc0659ba83d09a381c7fa8238578e5ee 100644
--- a/tests/rest/client/test_register.py
+++ b/tests/rest/client/test_register.py
@@ -1049,9 +1049,7 @@ class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase):
 
         # Check that the HTML we're getting is the one we expect when using an
         # invalid/unknown token.
-        expected_html = (
-            self.hs.config.account_validity.account_validity_invalid_token_template.render()
-        )
+        expected_html = self.hs.config.account_validity.account_validity_invalid_token_template.render()
         self.assertEqual(
             channel.result["body"], expected_html.encode("utf8"), channel.result
         )
diff --git a/tests/rest/client/utils.py b/tests/rest/client/utils.py
index 9614cdd66acd7735a1d9e6996a443bdd16323233..a1c284726ad963a5f61a6383a7eb63aec1b107fc 100644
--- a/tests/rest/client/utils.py
+++ b/tests/rest/client/utils.py
@@ -330,22 +330,24 @@ class RestHelper:
             data,
         )
 
-        assert (
-            channel.code == expect_code
-        ), "Expected: %d, got: %d, PUT %s -> resp: %r" % (
-            expect_code,
-            channel.code,
-            path,
-            channel.result["body"],
+        assert channel.code == expect_code, (
+            "Expected: %d, got: %d, PUT %s -> resp: %r"
+            % (
+                expect_code,
+                channel.code,
+                path,
+                channel.result["body"],
+            )
         )
 
         if expect_errcode:
-            assert (
-                str(channel.json_body["errcode"]) == expect_errcode
-            ), "Expected: %r, got: %r, resp: %r" % (
-                expect_errcode,
-                channel.json_body["errcode"],
-                channel.result["body"],
+            assert str(channel.json_body["errcode"]) == expect_errcode, (
+                "Expected: %r, got: %r, resp: %r"
+                % (
+                    expect_errcode,
+                    channel.json_body["errcode"],
+                    channel.result["body"],
+                )
             )
 
         if expect_additional_fields is not None:
@@ -354,13 +356,14 @@ class RestHelper:
                     expect_key,
                     channel.json_body,
                 )
-                assert (
-                    channel.json_body[expect_key] == expect_value
-                ), "Expected: %s at %s, got: %s, resp: %s" % (
-                    expect_value,
-                    expect_key,
-                    channel.json_body[expect_key],
-                    channel.json_body,
+                assert channel.json_body[expect_key] == expect_value, (
+                    "Expected: %s at %s, got: %s, resp: %s"
+                    % (
+                        expect_value,
+                        expect_key,
+                        channel.json_body[expect_key],
+                        channel.json_body,
+                    )
                 )
 
         self.auth_user_id = temp_id
diff --git a/tests/rest/test_well_known.py b/tests/rest/test_well_known.py
index ac992766e87438346746de1ac9476f00bdd9a20c..c73717f014412438474b8d933a33d1eae3cd4215 100644
--- a/tests/rest/test_well_known.py
+++ b/tests/rest/test_well_known.py
@@ -124,7 +124,12 @@ class WellKnownTests(unittest.HomeserverTestCase):
     )
     def test_client_well_known_msc3861_oauth_delegation(self) -> None:
         # Patch the HTTP client to return the issuer metadata
-        req_mock = AsyncMock(return_value={"issuer": "https://issuer", "account_management_uri": "https://my-account.issuer"})
+        req_mock = AsyncMock(
+            return_value={
+                "issuer": "https://issuer",
+                "account_management_uri": "https://my-account.issuer",
+            }
+        )
         self.hs.get_proxied_http_client().get_json = req_mock  # type: ignore[method-assign]
 
         for _ in range(2):
@@ -145,4 +150,6 @@ class WellKnownTests(unittest.HomeserverTestCase):
             )
 
         # It should have been called exactly once, because it gets cached
-        req_mock.assert_called_once_with("https://issuer/.well-known/openid-configuration")
+        req_mock.assert_called_once_with(
+            "https://issuer/.well-known/openid-configuration"
+        )
diff --git a/tests/test_event_auth.py b/tests/test_event_auth.py
index 6d1ae4c8d7328ae4cec7a9722a3ec0e09b76899b..f12402f5f2c9d829296625eb650df8d06fa3b419 100644
--- a/tests/test_event_auth.py
+++ b/tests/test_event_auth.py
@@ -292,12 +292,14 @@ class EventAuthTestCase(unittest.TestCase):
         ]
 
         # pleb should not be able to send state
-        self.assertRaises(
-            AuthError,
-            event_auth.check_state_dependent_auth_rules,
-            _random_state_event(RoomVersions.V1, pleb),
-            auth_events,
-        ),
+        (
+            self.assertRaises(
+                AuthError,
+                event_auth.check_state_dependent_auth_rules,
+                _random_state_event(RoomVersions.V1, pleb),
+                auth_events,
+            ),
+        )
 
         # king should be able to send state
         event_auth.check_state_dependent_auth_rules(
diff --git a/tests/test_federation.py b/tests/test_federation.py
index 4e9adc062528fabbed16cb3a4c156722a9762441..94b0fa98565aa56be62b3926d5b629ae7c6da854 100644
--- a/tests/test_federation.py
+++ b/tests/test_federation.py
@@ -101,7 +101,9 @@ class MessageAcceptTests(unittest.HomeserverTestCase):
         ) -> List[EventBase]:
             return list(pdus)
 
-        self.client._check_sigs_and_hash_for_pulled_events_and_fetch = _check_sigs_and_hash_for_pulled_events_and_fetch  # type: ignore[assignment]
+        self.client._check_sigs_and_hash_for_pulled_events_and_fetch = (  # type: ignore[method-assign]
+            _check_sigs_and_hash_for_pulled_events_and_fetch  # type: ignore[assignment]
+        )
 
         # Send the join, it should return None (which is not an error)
         self.assertEqual(
diff --git a/tests/test_types.py b/tests/test_types.py
index 00adc65a5a9861ba03cb4bc760729b93f2554a1f..0c08bc8ecc6690acb88a011c5f04284e1ac68679 100644
--- a/tests/test_types.py
+++ b/tests/test_types.py
@@ -145,7 +145,9 @@ class MapUsernameTestCase(unittest.TestCase):
         (MultiWriterStreamToken,),
         (RoomStreamToken,),
     ],
-    class_name_func=lambda cls, num, params_dict: f"{cls.__name__}_{params_dict['token_type'].__name__}",
+    class_name_func=lambda cls,
+    num,
+    params_dict: f"{cls.__name__}_{params_dict['token_type'].__name__}",
 )
 class MultiWriterTokenTestCase(unittest.HomeserverTestCase):
     """Tests for the different types of multi writer tokens."""
diff --git a/tests/test_utils/__init__.py b/tests/test_utils/__init__.py
index 4ab42a02b9e11ff60cdadaf0f1fda30a7829a27f..4d7adf720497a7a6da1fcf172cf42e693bae56c8 100644
--- a/tests/test_utils/__init__.py
+++ b/tests/test_utils/__init__.py
@@ -22,6 +22,7 @@
 """
 Utilities for running the unit tests
 """
+
 import json
 import sys
 import warnings
diff --git a/tests/unittest.py b/tests/unittest.py
index 2532fa49fbafc0708d2da30f94838e2b8c3120e9..614e805abd505f84177867940acd309df1d1821c 100644
--- a/tests/unittest.py
+++ b/tests/unittest.py
@@ -457,7 +457,9 @@ class HomeserverTestCase(TestCase):
                 # Type ignore: mypy doesn't like us assigning to methods.
                 self.hs.get_auth().get_user_by_req = get_requester  # type: ignore[method-assign]
                 self.hs.get_auth().get_user_by_access_token = get_requester  # type: ignore[method-assign]
-                self.hs.get_auth().get_access_token_from_request = Mock(return_value=token)  # type: ignore[method-assign]
+                self.hs.get_auth().get_access_token_from_request = Mock(  # type: ignore[method-assign]
+                    return_value=token
+                )
 
         if self.needs_threadpool:
             self.reactor.threadpool = ThreadPool()  # type: ignore[assignment]