Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
synapse
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Container registry
Model registry
Monitor
Service Desk
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Maunium
synapse
Commits
94ef2f4f
Unverified
Commit
94ef2f4f
authored
10 months ago
by
Erik Johnston
Committed by
GitHub
10 months ago
Browse files
Options
Downloads
Patches
Plain Diff
Handle duplicate OTK uploads racing (#17241)
Currently this causes one of then to 500.
parent
bb5a6929
Branches
Branches containing commit
Tags
Tags containing commit
No related merge requests found
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
changelog.d/17241.bugfix
+1
-0
1 addition, 0 deletions
changelog.d/17241.bugfix
synapse/handlers/e2e_keys.py
+45
-33
45 additions, 33 deletions
synapse/handlers/e2e_keys.py
with
46 additions
and
33 deletions
changelog.d/17241.bugfix
0 → 100644
+
1
−
0
View file @
94ef2f4f
Fix handling of duplicate concurrent uploading of device one-time-keys.
This diff is collapsed.
Click to expand it.
synapse/handlers/e2e_keys.py
+
45
−
33
View file @
94ef2f4f
...
@@ -53,6 +53,9 @@ if TYPE_CHECKING:
...
@@ -53,6 +53,9 @@ if TYPE_CHECKING:
logger
=
logging
.
getLogger
(
__name__
)
logger
=
logging
.
getLogger
(
__name__
)
ONE_TIME_KEY_UPLOAD
=
"
one_time_key_upload_lock
"
class
E2eKeysHandler
:
class
E2eKeysHandler
:
def
__init__
(
self
,
hs
:
"
HomeServer
"
):
def
__init__
(
self
,
hs
:
"
HomeServer
"
):
self
.
config
=
hs
.
config
self
.
config
=
hs
.
config
...
@@ -62,6 +65,7 @@ class E2eKeysHandler:
...
@@ -62,6 +65,7 @@ class E2eKeysHandler:
self
.
_appservice_handler
=
hs
.
get_application_service_handler
()
self
.
_appservice_handler
=
hs
.
get_application_service_handler
()
self
.
is_mine
=
hs
.
is_mine
self
.
is_mine
=
hs
.
is_mine
self
.
clock
=
hs
.
get_clock
()
self
.
clock
=
hs
.
get_clock
()
self
.
_worker_lock_handler
=
hs
.
get_worker_locks_handler
()
federation_registry
=
hs
.
get_federation_registry
()
federation_registry
=
hs
.
get_federation_registry
()
...
@@ -855,45 +859,53 @@ class E2eKeysHandler:
...
@@ -855,45 +859,53 @@ class E2eKeysHandler:
async
def
_upload_one_time_keys_for_user
(
async
def
_upload_one_time_keys_for_user
(
self
,
user_id
:
str
,
device_id
:
str
,
time_now
:
int
,
one_time_keys
:
JsonDict
self
,
user_id
:
str
,
device_id
:
str
,
time_now
:
int
,
one_time_keys
:
JsonDict
)
->
None
:
)
->
None
:
logger
.
info
(
# We take out a lock so that we don't have to worry about a client
"
Adding one_time_keys %r for device %r for user %r at %d
"
,
# sending duplicate requests.
one_time_keys
.
keys
(),
lock_key
=
f
"
{
user_id
}
_
{
device_id
}
"
device_id
,
async
with
self
.
_worker_lock_handler
.
acquire_lock
(
user_id
,
ONE_TIME_KEY_UPLOAD
,
lock_key
time_now
,
):
)
logger
.
info
(
"
Adding one_time_keys %r for device %r for user %r at %d
"
,
one_time_keys
.
keys
(),
device_id
,
user_id
,
time_now
,
)
# make a list of (alg, id, key) tuples
# make a list of (alg, id, key) tuples
key_list
=
[]
key_list
=
[]
for
key_id
,
key_obj
in
one_time_keys
.
items
():
for
key_id
,
key_obj
in
one_time_keys
.
items
():
algorithm
,
key_id
=
key_id
.
split
(
"
:
"
)
algorithm
,
key_id
=
key_id
.
split
(
"
:
"
)
key_list
.
append
((
algorithm
,
key_id
,
key_obj
))
key_list
.
append
((
algorithm
,
key_id
,
key_obj
))
# First we check if we have already persisted any of the keys.
# First we check if we have already persisted any of the keys.
existing_key_map
=
await
self
.
store
.
get_e2e_one_time_keys
(
existing_key_map
=
await
self
.
store
.
get_e2e_one_time_keys
(
user_id
,
device_id
,
[
k_id
for
_
,
k_id
,
_
in
key_list
]
user_id
,
device_id
,
[
k_id
for
_
,
k_id
,
_
in
key_list
]
)
)
new_keys
=
[]
# Keys that we need to insert. (alg, id, json) tuples.
new_keys
=
[]
# Keys that we need to insert. (alg, id, json) tuples.
for
algorithm
,
key_id
,
key
in
key_list
:
for
algorithm
,
key_id
,
key
in
key_list
:
ex_json
=
existing_key_map
.
get
((
algorithm
,
key_id
),
None
)
ex_json
=
existing_key_map
.
get
((
algorithm
,
key_id
),
None
)
if
ex_json
:
if
ex_json
:
if
not
_one_time_keys_match
(
ex_json
,
key
):
if
not
_one_time_keys_match
(
ex_json
,
key
):
raise
SynapseError
(
raise
SynapseError
(
400
,
400
,
(
(
"
One time key %s:%s already exists.
"
"
One time key %s:%s already exists.
"
"
Old key: %s; new key: %r
"
"
Old key: %s; new key: %r
"
)
%
(
algorithm
,
key_id
,
ex_json
,
key
),
)
)
%
(
algorithm
,
key_id
,
ex_json
,
key
),
else
:
new_keys
.
append
(
(
algorithm
,
key_id
,
encode_canonical_json
(
key
).
decode
(
"
ascii
"
))
)
)
else
:
new_keys
.
append
(
(
algorithm
,
key_id
,
encode_canonical_json
(
key
).
decode
(
"
ascii
"
))
)
log_kv
({
"
message
"
:
"
Inserting new one_time_keys.
"
,
"
keys
"
:
new_keys
})
log_kv
({
"
message
"
:
"
Inserting new one_time_keys.
"
,
"
keys
"
:
new_keys
})
await
self
.
store
.
add_e2e_one_time_keys
(
user_id
,
device_id
,
time_now
,
new_keys
)
await
self
.
store
.
add_e2e_one_time_keys
(
user_id
,
device_id
,
time_now
,
new_keys
)
async
def
upload_signing_keys_for_user
(
async
def
upload_signing_keys_for_user
(
self
,
user_id
:
str
,
keys
:
JsonDict
self
,
user_id
:
str
,
keys
:
JsonDict
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment