Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
synapse
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Container Registry
Model registry
Monitor
Service Desk
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Maunium
synapse
Commits
0caf2a33
Unverified
Commit
0caf2a33
authored
4 years ago
by
Jonathan de Jong
Committed by
GitHub
4 years ago
Browse files
Options
Downloads
Patches
Plain Diff
Fix federation stall on concurrent access errors (#9639)
parent
4ecba9bd
No related branches found
No related tags found
No related merge requests found
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
changelog.d/9639.bugfix
+1
-0
1 addition, 0 deletions
changelog.d/9639.bugfix
synapse/storage/databases/main/transactions.py
+9
-36
9 additions, 36 deletions
synapse/storage/databases/main/transactions.py
with
10 additions
and
36 deletions
changelog.d/9639.bugfix
0 → 100644
+
1
−
0
View file @
0caf2a33
Fix bug where federation sending can stall due to `concurrent access` database exceptions when it falls behind.
This diff is collapsed.
Click to expand it.
synapse/storage/databases/main/transactions.py
+
9
−
36
View file @
0caf2a33
...
@@ -22,7 +22,6 @@ from canonicaljson import encode_canonical_json
...
@@ -22,7 +22,6 @@ from canonicaljson import encode_canonical_json
from
synapse.metrics.background_process_metrics
import
wrap_as_background_process
from
synapse.metrics.background_process_metrics
import
wrap_as_background_process
from
synapse.storage._base
import
SQLBaseStore
,
db_to_json
from
synapse.storage._base
import
SQLBaseStore
,
db_to_json
from
synapse.storage.database
import
DatabasePool
,
LoggingTransaction
from
synapse.storage.database
import
DatabasePool
,
LoggingTransaction
from
synapse.storage.engines
import
PostgresEngine
,
Sqlite3Engine
from
synapse.types
import
JsonDict
from
synapse.types
import
JsonDict
from
synapse.util.caches.expiringcache
import
ExpiringCache
from
synapse.util.caches.expiringcache
import
ExpiringCache
...
@@ -312,49 +311,23 @@ class TransactionStore(TransactionWorkerStore):
...
@@ -312,49 +311,23 @@ class TransactionStore(TransactionWorkerStore):
stream_ordering: the stream_ordering of the event
stream_ordering: the stream_ordering of the event
"""
"""
return
await
self
.
db_pool
.
runInteraction
(
await
self
.
db_pool
.
simple_upsert_many
(
"
store_destination_rooms_entries
"
,
table
=
"
destinations
"
,
self
.
_store_destination_rooms_entries_txn
,
key_names
=
(
"
destination
"
,),
destinations
,
key_values
=
[(
d
,)
for
d
in
destinations
],
room_id
,
value_names
=
[],
stream_ordering
,
value_values
=
[],
desc
=
"
store_destination_rooms_entries_dests
"
,
)
)
def
_store_destination_rooms_entries_txn
(
self
,
txn
:
LoggingTransaction
,
destinations
:
Iterable
[
str
],
room_id
:
str
,
stream_ordering
:
int
,
)
->
None
:
# ensure we have a `destinations` row for this destination, as there is
# a foreign key constraint.
if
isinstance
(
self
.
database_engine
,
PostgresEngine
):
q
=
"""
INSERT INTO destinations (destination)
VALUES (?)
ON CONFLICT DO NOTHING;
"""
elif
isinstance
(
self
.
database_engine
,
Sqlite3Engine
):
q
=
"""
INSERT OR IGNORE INTO destinations (destination)
VALUES (?);
"""
else
:
raise
RuntimeError
(
"
Unknown database engine
"
)
txn
.
execute_batch
(
q
,
((
destination
,)
for
destination
in
destinations
))
rows
=
[(
destination
,
room_id
)
for
destination
in
destinations
]
rows
=
[(
destination
,
room_id
)
for
destination
in
destinations
]
await
self
.
db_pool
.
simple_upsert_many
(
self
.
db_pool
.
simple_upsert_many_txn
(
txn
,
table
=
"
destination_rooms
"
,
table
=
"
destination_rooms
"
,
key_names
=
(
"
destination
"
,
"
room_id
"
),
key_names
=
(
"
destination
"
,
"
room_id
"
),
key_values
=
rows
,
key_values
=
rows
,
value_names
=
[
"
stream_ordering
"
],
value_names
=
[
"
stream_ordering
"
],
value_values
=
[(
stream_ordering
,)]
*
len
(
rows
),
value_values
=
[(
stream_ordering
,)]
*
len
(
rows
),
desc
=
"
store_destination_rooms_entries_rooms
"
,
)
)
async
def
get_destination_last_successful_stream_ordering
(
async
def
get_destination_last_successful_stream_ordering
(
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment