Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
synapse
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Container Registry
Model registry
Monitor
Service Desk
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Maunium
synapse
Commits
e6a6c4fb
Commit
e6a6c4fb
authored
5 years ago
by
Richard van der Hoff
Browse files
Options
Downloads
Patches
Plain Diff
split _get_events_from_db out of _enqueue_events
parent
c9964ba6
No related branches found
No related tags found
No related merge requests found
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
synapse/storage/events_worker.py
+51
-32
51 additions, 32 deletions
synapse/storage/events_worker.py
with
51 additions
and
32 deletions
synapse/storage/events_worker.py
+
51
−
32
View file @
e6a6c4fb
...
...
@@ -343,13 +343,12 @@ class EventsWorkerStore(SQLBaseStore):
log_ctx
=
LoggingContext
.
current_context
()
log_ctx
.
record_event_fetch
(
len
(
missing_events_ids
))
# Note that _
enqueue_events
is also responsible for turning db rows
# Note that _
get_events_from_db
is also responsible for turning db rows
# into FrozenEvents (via _get_event_from_row), which involves seeing if
# the events have been redacted, and if so pulling the redaction event out
# of the database to check it.
#
# _enqueue_events is a bit of a rubbish name but naming is hard.
missing_events
=
yield
self
.
_enqueue_events
(
missing_events
=
yield
self
.
_get_events_from_db
(
missing_events_ids
,
allow_rejected
=
allow_rejected
)
...
...
@@ -458,43 +457,25 @@ class EventsWorkerStore(SQLBaseStore):
self
.
hs
.
get_reactor
().
callFromThread
(
fire
,
event_list
,
e
)
@defer.inlineCallbacks
def
_
enqueue_events
(
self
,
events
,
allow_rejected
=
False
):
"""
Fetch
es
events from the database
using the _event_fetch_list. This
allows batch and bulk fetching of events - it allows us to fetch events
without having to create a new transaction for each request for event
s.
def
_
get_events_from_db
(
self
,
event
_id
s
,
allow_rejected
=
False
):
"""
Fetch
a bunch of
events from the database
.
Returned events will be added to the cache for future lookup
s.
Args:
events (Iterable[str]): events to be fetched.
event_ids (Iterable[str]): The event_ids of the events to fetch
allow_rejected (bool): Whether to include rejected events
Returns:
Deferred[Dict[str, _EventCacheEntry]]: map from event id to result.
Deferred[Dict[str, _EventCacheEntry]]:
map from event id to result.
"""
if
not
events
:
if
not
event
_id
s
:
return
{}
events_d
=
defer
.
Deferred
()
with
self
.
_event_fetch_lock
:
self
.
_event_fetch_list
.
append
((
events
,
events_d
))
row_map
=
yield
self
.
_enqueue_events
(
event_ids
)
self
.
_event_fetch_lock
.
notify
()
if
self
.
_event_fetch_ongoing
<
EVENT_QUEUE_THREADS
:
self
.
_event_fetch_ongoing
+=
1
should_start
=
True
else
:
should_start
=
False
if
should_start
:
run_as_background_process
(
"
fetch_events
"
,
self
.
runWithConnection
,
self
.
_do_fetch
)
logger
.
debug
(
"
Loading %d events
"
,
len
(
events
))
with
PreserveLoggingContext
():
row_map
=
yield
events_d
logger
.
debug
(
"
Loaded %d events (%d rows)
"
,
len
(
events
),
len
(
row_map
))
rows
=
(
row_map
.
get
(
event_id
)
for
event_id
in
events
)
rows
=
(
row_map
.
get
(
event_id
)
for
event_id
in
event_ids
)
# filter out absent rows
rows
=
filter
(
operator
.
truth
,
rows
)
...
...
@@ -521,6 +502,44 @@ class EventsWorkerStore(SQLBaseStore):
return
{
e
.
event
.
event_id
:
e
for
e
in
res
if
e
}
@defer.inlineCallbacks
def
_enqueue_events
(
self
,
events
):
"""
Fetches events from the database using the _event_fetch_list. This
allows batch and bulk fetching of events - it allows us to fetch events
without having to create a new transaction for each request for events.
Args:
events (Iterable[str]): events to be fetched.
Returns:
Deferred[Dict[str, Dict]]: map from event id to row data from the database.
May contain events that weren
'
t requested.
"""
events_d
=
defer
.
Deferred
()
with
self
.
_event_fetch_lock
:
self
.
_event_fetch_list
.
append
((
events
,
events_d
))
self
.
_event_fetch_lock
.
notify
()
if
self
.
_event_fetch_ongoing
<
EVENT_QUEUE_THREADS
:
self
.
_event_fetch_ongoing
+=
1
should_start
=
True
else
:
should_start
=
False
if
should_start
:
run_as_background_process
(
"
fetch_events
"
,
self
.
runWithConnection
,
self
.
_do_fetch
)
logger
.
debug
(
"
Loading %d events: %s
"
,
len
(
events
),
events
)
with
PreserveLoggingContext
():
row_map
=
yield
events_d
logger
.
debug
(
"
Loaded %d events (%d rows)
"
,
len
(
events
),
len
(
row_map
))
return
row_map
def
_fetch_event_rows
(
self
,
txn
,
event_ids
):
"""
Fetch event rows from the database
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment