diff --git a/changelog.d/13304.misc b/changelog.d/13304.misc
deleted file mode 100644
index 156d3d71d79c09e768523752b57ada84af3319f1..0000000000000000000000000000000000000000
--- a/changelog.d/13304.misc
+++ /dev/null
@@ -1 +0,0 @@
-Make all replication row processing methods asynchronous. Contributed by Nick @ Beeper (@fizzadar).
diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py
index 26edeeca3c232bbca8108189ddc464f5fed4b00d..d104ea07fedf00da104b0a8c702c3a33a2bbbe66 100644
--- a/synapse/handlers/typing.py
+++ b/synapse/handlers/typing.py
@@ -158,7 +158,7 @@ class FollowerTypingHandler:
         except Exception:
             logger.exception("Error pushing typing notif to remotes")
 
-    async def process_replication_rows(
+    def process_replication_rows(
         self, token: int, rows: List[TypingStream.TypingStreamRow]
     ) -> None:
         """Should be called whenever we receive updates for typing stream."""
@@ -444,7 +444,7 @@ class TypingWriterHandler(FollowerTypingHandler):
 
         return rows, current_id, limited
 
-    async def process_replication_rows(
+    def process_replication_rows(
         self, token: int, rows: List[TypingStream.TypingStreamRow]
     ) -> None:
         # The writing process should never get updates from replication.
diff --git a/synapse/replication/slave/storage/devices.py b/synapse/replication/slave/storage/devices.py
index 22f79997215a450bc6e624a5444d7a76bdb7e0bd..a48cc0206944816e47a7ab40ed341b6e6c82d37c 100644
--- a/synapse/replication/slave/storage/devices.py
+++ b/synapse/replication/slave/storage/devices.py
@@ -49,7 +49,7 @@ class SlavedDeviceStore(DeviceWorkerStore, BaseSlavedStore):
     def get_device_stream_token(self) -> int:
         return self._device_list_id_gen.get_current_token()
 
-    async def process_replication_rows(
+    def process_replication_rows(
         self, stream_name: str, instance_name: str, token: int, rows: Iterable[Any]
     ) -> None:
         if stream_name == DeviceListsStream.NAME:
@@ -59,9 +59,7 @@ class SlavedDeviceStore(DeviceWorkerStore, BaseSlavedStore):
             self._device_list_id_gen.advance(instance_name, token)
             for row in rows:
                 self._user_signature_stream_cache.entity_has_changed(row.user_id, token)
-        return await super().process_replication_rows(
-            stream_name, instance_name, token, rows
-        )
+        return super().process_replication_rows(stream_name, instance_name, token, rows)
 
     def _invalidate_caches_for_devices(
         self, token: int, rows: Iterable[DeviceListsStream.DeviceListsStreamRow]
diff --git a/synapse/replication/slave/storage/push_rule.py b/synapse/replication/slave/storage/push_rule.py
index e1838a81a9faabde9fa3f4d6b8fc3b116de26319..52ee3f7e58eace06890913476a2ee4a8febca51d 100644
--- a/synapse/replication/slave/storage/push_rule.py
+++ b/synapse/replication/slave/storage/push_rule.py
@@ -24,7 +24,7 @@ class SlavedPushRuleStore(SlavedEventStore, PushRulesWorkerStore):
     def get_max_push_rules_stream_id(self) -> int:
         return self._push_rules_stream_id_gen.get_current_token()
 
-    async def process_replication_rows(
+    def process_replication_rows(
         self, stream_name: str, instance_name: str, token: int, rows: Iterable[Any]
     ) -> None:
         if stream_name == PushRulesStream.NAME:
@@ -33,6 +33,4 @@ class SlavedPushRuleStore(SlavedEventStore, PushRulesWorkerStore):
                 self.get_push_rules_for_user.invalidate((row.user_id,))
                 self.get_push_rules_enabled_for_user.invalidate((row.user_id,))
                 self.push_rules_stream_cache.entity_has_changed(row.user_id, token)
-        return await super().process_replication_rows(
-            stream_name, instance_name, token, rows
-        )
+        return super().process_replication_rows(stream_name, instance_name, token, rows)
diff --git a/synapse/replication/slave/storage/pushers.py b/synapse/replication/slave/storage/pushers.py
index fb3f5653af1d97b84a8821107db363279fceb81a..de642bba71b02758e5a5f4da74b967181288e706 100644
--- a/synapse/replication/slave/storage/pushers.py
+++ b/synapse/replication/slave/storage/pushers.py
@@ -40,11 +40,9 @@ class SlavedPusherStore(PusherWorkerStore, BaseSlavedStore):
     def get_pushers_stream_token(self) -> int:
         return self._pushers_id_gen.get_current_token()
 
-    async def process_replication_rows(
+    def process_replication_rows(
         self, stream_name: str, instance_name: str, token: int, rows: Iterable[Any]
     ) -> None:
         if stream_name == PushersStream.NAME:
             self._pushers_id_gen.advance(instance_name, token)
-        return await super().process_replication_rows(
-            stream_name, instance_name, token, rows
-        )
+        return super().process_replication_rows(stream_name, instance_name, token, rows)
diff --git a/synapse/replication/tcp/client.py b/synapse/replication/tcp/client.py
index f9722ccb4ff5e3a12f7c5ec6442a71df2931c6c5..2f59245058e72b8d11a93b2e52a5ee89432932f3 100644
--- a/synapse/replication/tcp/client.py
+++ b/synapse/replication/tcp/client.py
@@ -144,15 +144,13 @@ class ReplicationDataHandler:
             token: stream token for this batch of rows
             rows: a list of Stream.ROW_TYPE objects as returned by Stream.parse_row.
         """
-        await self.store.process_replication_rows(
-            stream_name, instance_name, token, rows
-        )
+        self.store.process_replication_rows(stream_name, instance_name, token, rows)
 
         if self.send_handler:
             await self.send_handler.process_replication_rows(stream_name, token, rows)
 
         if stream_name == TypingStream.NAME:
-            await self._typing_handler.process_replication_rows(token, rows)
+            self._typing_handler.process_replication_rows(token, rows)
             self.notifier.on_new_event(
                 StreamKeyType.TYPING, token, rooms=[row.room_id for row in rows]
             )
diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py
index 822108e83bf0f34e7c61b4331268586b381a996c..b8c8dcd76bfc6218543b80589fc7c408d2e1bb7d 100644
--- a/synapse/storage/_base.py
+++ b/synapse/storage/_base.py
@@ -47,7 +47,7 @@ class SQLBaseStore(metaclass=ABCMeta):
         self.database_engine = database.engine
         self.db_pool = database
 
-    async def process_replication_rows(
+    def process_replication_rows(
         self,
         stream_name: str,
         instance_name: str,
diff --git a/synapse/storage/databases/main/account_data.py b/synapse/storage/databases/main/account_data.py
index 337b22294e1aeb1ad57223e1aadea058477245ae..9af9f4f18e19c2510dc031b69d61823b8c08debf 100644
--- a/synapse/storage/databases/main/account_data.py
+++ b/synapse/storage/databases/main/account_data.py
@@ -414,7 +414,7 @@ class AccountDataWorkerStore(PushRulesWorkerStore, CacheInvalidationWorkerStore)
             )
         )
 
-    async def process_replication_rows(
+    def process_replication_rows(
         self,
         stream_name: str,
         instance_name: str,
@@ -437,7 +437,7 @@ class AccountDataWorkerStore(PushRulesWorkerStore, CacheInvalidationWorkerStore)
                 )
                 self._account_data_stream_cache.entity_has_changed(row.user_id, token)
 
-        await super().process_replication_rows(stream_name, instance_name, token, rows)
+        super().process_replication_rows(stream_name, instance_name, token, rows)
 
     async def add_account_data_to_room(
         self, user_id: str, room_id: str, account_data_type: str, content: JsonDict
diff --git a/synapse/storage/databases/main/cache.py b/synapse/storage/databases/main/cache.py
index 048ff3e1b716eea281c1d276610b8113ee5e118c..2367ddeea3fde1af1f2c0656e88ba63b17d85cc0 100644
--- a/synapse/storage/databases/main/cache.py
+++ b/synapse/storage/databases/main/cache.py
@@ -119,7 +119,7 @@ class CacheInvalidationWorkerStore(SQLBaseStore):
             "get_all_updated_caches", get_all_updated_caches_txn
         )
 
-    async def process_replication_rows(
+    def process_replication_rows(
         self, stream_name: str, instance_name: str, token: int, rows: Iterable[Any]
     ) -> None:
         if stream_name == EventsStream.NAME:
@@ -154,7 +154,7 @@ class CacheInvalidationWorkerStore(SQLBaseStore):
                 else:
                     self._attempt_to_invalidate_cache(row.cache_func, row.keys)
 
-        await super().process_replication_rows(stream_name, instance_name, token, rows)
+        super().process_replication_rows(stream_name, instance_name, token, rows)
 
     def _process_event_stream_row(self, token: int, row: EventsStreamRow) -> None:
         data = row.data
diff --git a/synapse/storage/databases/main/deviceinbox.py b/synapse/storage/databases/main/deviceinbox.py
index 45fe58c104277b9306e615add2bd0554d68d8ff5..422e0e65ca5028734a07871b122efc02b5c303d5 100644
--- a/synapse/storage/databases/main/deviceinbox.py
+++ b/synapse/storage/databases/main/deviceinbox.py
@@ -128,7 +128,7 @@ class DeviceInboxWorkerStore(SQLBaseStore):
             prefilled_cache=device_outbox_prefill,
         )
 
-    async def process_replication_rows(
+    def process_replication_rows(
         self,
         stream_name: str,
         instance_name: str,
@@ -148,9 +148,7 @@ class DeviceInboxWorkerStore(SQLBaseStore):
                     self._device_federation_outbox_stream_cache.entity_has_changed(
                         row.entity, token
                     )
-        return await super().process_replication_rows(
-            stream_name, instance_name, token, rows
-        )
+        return super().process_replication_rows(stream_name, instance_name, token, rows)
 
     def get_to_device_stream_token(self) -> int:
         return self._device_inbox_id_gen.get_current_token()
diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py
index 5310d4eda2310c483f74421b6bdc312bd4989a52..f3935bfead96c5b52653996223347287af1787f3 100644
--- a/synapse/storage/databases/main/events_worker.py
+++ b/synapse/storage/databases/main/events_worker.py
@@ -280,7 +280,7 @@ class EventsWorkerStore(SQLBaseStore):
             id_column="chain_id",
         )
 
-    async def process_replication_rows(
+    def process_replication_rows(
         self,
         stream_name: str,
         instance_name: str,
@@ -292,7 +292,7 @@ class EventsWorkerStore(SQLBaseStore):
         elif stream_name == BackfillStream.NAME:
             self._backfill_id_gen.advance(instance_name, -token)
 
-        await super().process_replication_rows(stream_name, instance_name, token, rows)
+        super().process_replication_rows(stream_name, instance_name, token, rows)
 
     async def have_censored_event(self, event_id: str) -> bool:
         """Check if an event has been censored, i.e. if the content of the event has been erased
diff --git a/synapse/storage/databases/main/presence.py b/synapse/storage/databases/main/presence.py
index 9fe3124b359a59255556b0ac57d2d67438bb0ecf..9769a18a9d0cd347b34c0425dcac2172aed7901c 100644
--- a/synapse/storage/databases/main/presence.py
+++ b/synapse/storage/databases/main/presence.py
@@ -431,7 +431,7 @@ class PresenceStore(PresenceBackgroundUpdateStore, CacheInvalidationWorkerStore)
         self._presence_on_startup = []
         return active_on_startup
 
-    async def process_replication_rows(
+    def process_replication_rows(
         self,
         stream_name: str,
         instance_name: str,
@@ -443,6 +443,4 @@ class PresenceStore(PresenceBackgroundUpdateStore, CacheInvalidationWorkerStore)
             for row in rows:
                 self.presence_stream_cache.entity_has_changed(row.user_id, token)
                 self._get_presence_for_user.invalidate((row.user_id,))
-        return await super().process_replication_rows(
-            stream_name, instance_name, token, rows
-        )
+        return super().process_replication_rows(stream_name, instance_name, token, rows)
diff --git a/synapse/storage/databases/main/receipts.py b/synapse/storage/databases/main/receipts.py
index f85862d968b90721751ceeb9361aa73cc93cf84b..0090c9f22512cdb6530267e84550855cc83e8fd5 100644
--- a/synapse/storage/databases/main/receipts.py
+++ b/synapse/storage/databases/main/receipts.py
@@ -589,7 +589,7 @@ class ReceiptsWorkerStore(SQLBaseStore):
             "get_unread_event_push_actions_by_room_for_user", (room_id,)
         )
 
-    async def process_replication_rows(
+    def process_replication_rows(
         self,
         stream_name: str,
         instance_name: str,
@@ -604,9 +604,7 @@ class ReceiptsWorkerStore(SQLBaseStore):
                 )
                 self._receipts_stream_cache.entity_has_changed(row.room_id, token)
 
-        return await super().process_replication_rows(
-            stream_name, instance_name, token, rows
-        )
+        return super().process_replication_rows(stream_name, instance_name, token, rows)
 
     def _insert_linearized_receipt_txn(
         self,
diff --git a/synapse/storage/databases/main/tags.py b/synapse/storage/databases/main/tags.py
index 5e8905369cf0e174c99f315c500e83a8cfbd1275..b0f5de67a30dc66e4cd3393ea79d15716a85b212 100644
--- a/synapse/storage/databases/main/tags.py
+++ b/synapse/storage/databases/main/tags.py
@@ -292,7 +292,7 @@ class TagsWorkerStore(AccountDataWorkerStore):
                 # than the id that the client has.
                 pass
 
-    async def process_replication_rows(
+    def process_replication_rows(
         self,
         stream_name: str,
         instance_name: str,
@@ -305,7 +305,7 @@ class TagsWorkerStore(AccountDataWorkerStore):
                 self.get_tags_for_user.invalidate((row.user_id,))
                 self._account_data_stream_cache.entity_has_changed(row.user_id, token)
 
-        await super().process_replication_rows(stream_name, instance_name, token, rows)
+        super().process_replication_rows(stream_name, instance_name, token, rows)
 
 
 class TagsStore(TagsWorkerStore):