Skip to content
Snippets Groups Projects
Unverified Commit 4e04435b authored by Erik Johnston's avatar Erik Johnston Committed by GitHub
Browse files

Remove old tables after schema version bump (#9055)

These tables are unused, and can be dropped now the schema version has been bumped.
parent 63f49902
No related branches found
No related tags found
No related merge requests found
Drop unused database tables.
...@@ -312,12 +312,9 @@ class AccountDataStore(AccountDataWorkerStore): ...@@ -312,12 +312,9 @@ class AccountDataStore(AccountDataWorkerStore):
def __init__(self, database: DatabasePool, db_conn, hs): def __init__(self, database: DatabasePool, db_conn, hs):
self._account_data_id_gen = StreamIdGenerator( self._account_data_id_gen = StreamIdGenerator(
db_conn, db_conn,
"account_data_max_stream_id", "room_account_data",
"stream_id", "stream_id",
extra_tables=[ extra_tables=[("room_tags_revisions", "stream_id")],
("room_account_data", "stream_id"),
("room_tags_revisions", "stream_id"),
],
) )
super().__init__(database, db_conn, hs) super().__init__(database, db_conn, hs)
...@@ -362,14 +359,6 @@ class AccountDataStore(AccountDataWorkerStore): ...@@ -362,14 +359,6 @@ class AccountDataStore(AccountDataWorkerStore):
lock=False, lock=False,
) )
# it's theoretically possible for the above to succeed and the
# below to fail - in which case we might reuse a stream id on
# restart, and the above update might not get propagated. That
# doesn't sound any worse than the whole update getting lost,
# which is what would happen if we combined the two into one
# transaction.
await self._update_max_stream_id(next_id)
self._account_data_stream_cache.entity_has_changed(user_id, next_id) self._account_data_stream_cache.entity_has_changed(user_id, next_id)
self.get_account_data_for_user.invalidate((user_id,)) self.get_account_data_for_user.invalidate((user_id,))
self.get_account_data_for_room.invalidate((user_id, room_id)) self.get_account_data_for_room.invalidate((user_id, room_id))
...@@ -402,18 +391,6 @@ class AccountDataStore(AccountDataWorkerStore): ...@@ -402,18 +391,6 @@ class AccountDataStore(AccountDataWorkerStore):
content, content,
) )
# it's theoretically possible for the above to succeed and the
# below to fail - in which case we might reuse a stream id on
# restart, and the above update might not get propagated. That
# doesn't sound any worse than the whole update getting lost,
# which is what would happen if we combined the two into one
# transaction.
#
# Note: This is only here for backwards compat to allow admins to
# roll back to a previous Synapse version. Next time we update the
# database version we can remove this table.
await self._update_max_stream_id(next_id)
self._account_data_stream_cache.entity_has_changed(user_id, next_id) self._account_data_stream_cache.entity_has_changed(user_id, next_id)
self.get_account_data_for_user.invalidate((user_id,)) self.get_account_data_for_user.invalidate((user_id,))
self.get_global_account_data_by_type_for_user.invalidate( self.get_global_account_data_by_type_for_user.invalidate(
...@@ -486,24 +463,3 @@ class AccountDataStore(AccountDataWorkerStore): ...@@ -486,24 +463,3 @@ class AccountDataStore(AccountDataWorkerStore):
# Invalidate the cache for any ignored users which were added or removed. # Invalidate the cache for any ignored users which were added or removed.
for ignored_user_id in previously_ignored_users ^ currently_ignored_users: for ignored_user_id in previously_ignored_users ^ currently_ignored_users:
self._invalidate_cache_and_stream(txn, self.ignored_by, (ignored_user_id,)) self._invalidate_cache_and_stream(txn, self.ignored_by, (ignored_user_id,))
async def _update_max_stream_id(self, next_id: int) -> None:
"""Update the max stream_id
Args:
next_id: The the revision to advance to.
"""
# Note: This is only here for backwards compat to allow admins to
# roll back to a previous Synapse version. Next time we update the
# database version we can remove this table.
def _update(txn):
update_max_id_sql = (
"UPDATE account_data_max_stream_id"
" SET stream_id = ?"
" WHERE stream_id < ?"
)
txn.execute(update_max_id_sql, (next_id, next_id))
await self.db_pool.runInteraction("update_account_data_max_stream_id", _update)
/* Copyright 2021 The Matrix.org Foundation C.I.C
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-- This is no longer used and was only kept until we bumped the schema version.
DROP TABLE IF EXISTS account_data_max_stream_id;
/* Copyright 2021 The Matrix.org Foundation C.I.C
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-- This is no longer used and was only kept until we bumped the schema version.
DROP TABLE IF EXISTS cache_invalidation_stream;
...@@ -255,16 +255,6 @@ class TagsStore(TagsWorkerStore): ...@@ -255,16 +255,6 @@ class TagsStore(TagsWorkerStore):
self._account_data_stream_cache.entity_has_changed, user_id, next_id self._account_data_stream_cache.entity_has_changed, user_id, next_id
) )
# Note: This is only here for backwards compat to allow admins to
# roll back to a previous Synapse version. Next time we update the
# database version we can remove this table.
update_max_id_sql = (
"UPDATE account_data_max_stream_id"
" SET stream_id = ?"
" WHERE stream_id < ?"
)
txn.execute(update_max_id_sql, (next_id, next_id))
update_sql = ( update_sql = (
"UPDATE room_tags_revisions" "UPDATE room_tags_revisions"
" SET stream_id = ?" " SET stream_id = ?"
......
...@@ -35,9 +35,6 @@ logger = logging.getLogger(__name__) ...@@ -35,9 +35,6 @@ logger = logging.getLogger(__name__)
# Remember to update this number every time a change is made to database # Remember to update this number every time a change is made to database
# schema files, so the users will be informed on server restarts. # schema files, so the users will be informed on server restarts.
# XXX: If you're about to bump this to 59 (or higher) please create an update
# that drops the unused `cache_invalidation_stream` table, as per #7436!
# XXX: Also add an update to drop `account_data_max_stream_id` as per #7656!
SCHEMA_VERSION = 59 SCHEMA_VERSION = 59
dir_path = os.path.abspath(os.path.dirname(__file__)) dir_path = os.path.abspath(os.path.dirname(__file__))
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment