Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
synapse
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Container registry
Model registry
Monitor
Service Desk
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Maunium
synapse
Commits
b0a14bf5
Commit
b0a14bf5
authored
8 years ago
by
Erik Johnston
Browse files
Options
Downloads
Patches
Plain Diff
Handle the fact that some tables have negative rowid rows
parent
530bc862
No related branches found
No related tags found
No related merge requests found
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
scripts/synapse_port_db
+111
-43
111 additions, 43 deletions
scripts/synapse_port_db
with
111 additions
and
43 deletions
scripts/synapse_port_db
+
111
−
43
View file @
b0a14bf5
...
...
@@ -34,7 +34,7 @@ logger = logging.getLogger("synapse_port_db")
BOOLEAN_COLUMNS
=
{
"
events
"
:
[
"
processed
"
,
"
outlier
"
],
"
events
"
:
[
"
processed
"
,
"
outlier
"
,
"
contains_url
"
],
"
rooms
"
:
[
"
is_public
"
],
"
event_edges
"
:
[
"
is_state
"
],
"
presence_list
"
:
[
"
accepted
"
],
...
...
@@ -92,8 +92,12 @@ class Store(object):
_simple_select_onecol_txn
=
SQLBaseStore
.
__dict__
[
"
_simple_select_onecol_txn
"
]
_simple_select_onecol
=
SQLBaseStore
.
__dict__
[
"
_simple_select_onecol
"
]
_simple_select_one
=
SQLBaseStore
.
__dict__
[
"
_simple_select_one
"
]
_simple_select_one_txn
=
SQLBaseStore
.
__dict__
[
"
_simple_select_one_txn
"
]
_simple_select_one_onecol
=
SQLBaseStore
.
__dict__
[
"
_simple_select_one_onecol
"
]
_simple_select_one_onecol_txn
=
SQLBaseStore
.
__dict__
[
"
_simple_select_one_onecol_txn
"
]
_simple_select_one_onecol_txn
=
SQLBaseStore
.
__dict__
[
"
_simple_select_one_onecol_txn
"
]
_simple_update_one
=
SQLBaseStore
.
__dict__
[
"
_simple_update_one
"
]
_simple_update_one_txn
=
SQLBaseStore
.
__dict__
[
"
_simple_update_one_txn
"
]
...
...
@@ -158,31 +162,40 @@ class Porter(object):
def
setup_table
(
self
,
table
):
if
table
in
APPEND_ONLY_TABLES
:
# It's safe to just carry on inserting.
next_chunk
=
yield
self
.
postgres_store
.
_simple_select_one
_onecol
(
row
=
yield
self
.
postgres_store
.
_simple_select_one
(
table
=
"
port_from_sqlite3
"
,
keyvalues
=
{
"
table_name
"
:
table
},
retcol
=
"
rowid
"
,
retcol
s
=
(
"
forward_rowid
"
,
"
backward_rowid
"
)
,
allow_none
=
True
,
)
total_to_port
=
None
if
next_chunk
is
None
:
if
row
is
None
:
if
table
==
"
sent_transactions
"
:
next
_chunk
,
already_ported
,
total_to_port
=
(
forward
_chunk
,
already_ported
,
total_to_port
=
(
yield
self
.
_setup_sent_transactions
()
)
backward_chunk
=
0
else
:
yield
self
.
postgres_store
.
_simple_insert
(
table
=
"
port_from_sqlite3
"
,
values
=
{
"
table_name
"
:
table
,
"
rowid
"
:
1
}
values
=
{
"
table_name
"
:
table
,
"
forward_rowid
"
:
1
,
"
backward_rowid
"
:
0
,
}
)
next_chunk
=
1
forward_chunk
=
1
backward_chunk
=
0
already_ported
=
0
else
:
forward_chunk
=
row
[
"
forward_rowid
"
]
backward_chunk
=
row
[
"
backward_rowid
"
]
if
total_to_port
is
None
:
already_ported
,
total_to_port
=
yield
self
.
_get_total_count_to_port
(
table
,
next
_chunk
table
,
forward_chunk
,
backward
_chunk
)
else
:
def
delete_all
(
txn
):
...
...
@@ -196,46 +209,85 @@ class Porter(object):
yield
self
.
postgres_store
.
_simple_insert
(
table
=
"
port_from_sqlite3
"
,
values
=
{
"
table_name
"
:
table
,
"
rowid
"
:
0
}
values
=
{
"
table_name
"
:
table
,
"
forward_rowid
"
:
1
,
"
backward_rowid
"
:
0
,
}
)
next_chunk
=
1
forward_chunk
=
1
backward_chunk
=
0
already_ported
,
total_to_port
=
yield
self
.
_get_total_count_to_port
(
table
,
next
_chunk
table
,
forward_chunk
,
backward
_chunk
)
defer
.
returnValue
((
table
,
already_ported
,
total_to_port
,
next_chunk
))
defer
.
returnValue
(
(
table
,
already_ported
,
total_to_port
,
forward_chunk
,
backward_chunk
)
)
@defer.inlineCallbacks
def
handle_table
(
self
,
table
,
postgres_size
,
table_size
,
next_chunk
):
def
handle_table
(
self
,
table
,
postgres_size
,
table_size
,
forward_chunk
,
backward_chunk
):
if
not
table_size
:
return
self
.
progress
.
add_table
(
table
,
postgres_size
,
table_size
)
if
table
==
"
event_search
"
:
yield
self
.
handle_search_table
(
postgres_size
,
table_size
,
next_chunk
)
yield
self
.
handle_search_table
(
postgres_size
,
table_size
,
forward_chunk
,
backward_chunk
)
return
select
=
(
forward_
select
=
(
"
SELECT rowid, * FROM %s WHERE rowid >= ? ORDER BY rowid LIMIT ?
"
%
(
table
,)
)
backward_select
=
(
"
SELECT rowid, * FROM %s WHERE rowid <= ? ORDER BY rowid LIMIT ?
"
%
(
table
,)
)
do_forward
=
[
True
]
do_backward
=
[
True
]
while
True
:
def
r
(
txn
):
txn
.
execute
(
select
,
(
next_chunk
,
self
.
batch_size
,))
rows
=
txn
.
fetchall
()
headers
=
[
column
[
0
]
for
column
in
txn
.
description
]
forward_rows
=
[]
backward_rows
=
[]
if
do_forward
[
0
]:
txn
.
execute
(
forward_select
,
(
forward_chunk
,
self
.
batch_size
,))
forward_rows
=
txn
.
fetchall
()
if
not
forward_rows
:
do_forward
[
0
]
=
False
if
do_backward
[
0
]:
txn
.
execute
(
backward_select
,
(
backward_chunk
,
self
.
batch_size
,))
backward_rows
=
txn
.
fetchall
()
if
not
backward_rows
:
do_backward
[
0
]
=
False
if
forward_rows
or
backward_rows
:
headers
=
[
column
[
0
]
for
column
in
txn
.
description
]
else
:
headers
=
None
return
headers
,
rows
return
headers
,
forward_rows
,
backward_
rows
headers
,
rows
=
yield
self
.
sqlite_store
.
runInteraction
(
"
select
"
,
r
)
headers
,
frows
,
brows
=
yield
self
.
sqlite_store
.
runInteraction
(
"
select
"
,
r
)
if
rows
:
next_chunk
=
rows
[
-
1
][
0
]
+
1
if
frows
or
brows
:
if
frows
:
forward_chunk
=
max
(
row
[
0
]
for
row
in
frows
)
+
1
if
brows
:
backward_chunk
=
min
(
row
[
0
]
for
row
in
brows
)
-
1
rows
=
frows
+
brows
self
.
_convert_rows
(
table
,
headers
,
rows
)
def
insert
(
txn
):
...
...
@@ -247,7 +299,10 @@ class Porter(object):
txn
,
table
=
"
port_from_sqlite3
"
,
keyvalues
=
{
"
table_name
"
:
table
},
updatevalues
=
{
"
rowid
"
:
next_chunk
},
updatevalues
=
{
"
forward_rowid
"
:
forward_chunk
,
"
backward_rowid
"
:
backward_chunk
,
},
)
yield
self
.
postgres_store
.
execute
(
insert
)
...
...
@@ -259,7 +314,8 @@ class Porter(object):
return
@defer.inlineCallbacks
def
handle_search_table
(
self
,
postgres_size
,
table_size
,
next_chunk
):
def
handle_search_table
(
self
,
postgres_size
,
table_size
,
forward_chunk
,
backward_chunk
):
select
=
(
"
SELECT es.rowid, es.*, e.origin_server_ts, e.stream_ordering
"
"
FROM event_search as es
"
...
...
@@ -270,7 +326,7 @@ class Porter(object):
while
True
:
def
r
(
txn
):
txn
.
execute
(
select
,
(
next
_chunk
,
self
.
batch_size
,))
txn
.
execute
(
select
,
(
forward
_chunk
,
self
.
batch_size
,))
rows
=
txn
.
fetchall
()
headers
=
[
column
[
0
]
for
column
in
txn
.
description
]
...
...
@@ -279,7 +335,7 @@ class Porter(object):
headers
,
rows
=
yield
self
.
sqlite_store
.
runInteraction
(
"
select
"
,
r
)
if
rows
:
next
_chunk
=
rows
[
-
1
][
0
]
+
1
forward
_chunk
=
rows
[
-
1
][
0
]
+
1
# We have to treat event_search differently since it has a
# different structure in the two different databases.
...
...
@@ -312,7 +368,10 @@ class Porter(object):
txn
,
table
=
"
port_from_sqlite3
"
,
keyvalues
=
{
"
table_name
"
:
"
event_search
"
},
updatevalues
=
{
"
rowid
"
:
next_chunk
},
updatevalues
=
{
"
forward_rowid
"
:
forward_chunk
,
"
backward_rowid
"
:
backward_chunk
,
},
)
yield
self
.
postgres_store
.
execute
(
insert
)
...
...
@@ -324,7 +383,6 @@ class Porter(object):
else
:
return
def
setup_db
(
self
,
db_config
,
database_engine
):
db_conn
=
database_engine
.
module
.
connect
(
**
{
...
...
@@ -395,7 +453,8 @@ class Porter(object):
txn
.
execute
(
"
CREATE TABLE port_from_sqlite3 (
"
"
table_name varchar(100) NOT NULL UNIQUE,
"
"
rowid bigint NOT NULL
"
"
forward_rowid bigint NOT NULL,
"
"
backward_rowid bigint NOT NULL
"
"
)
"
)
...
...
@@ -458,7 +517,7 @@ class Porter(object):
@defer.inlineCallbacks
def
_setup_sent_transactions
(
self
):
# Only save things from the last day
yesterday
=
int
(
time
.
time
()
*
1000
)
-
86400000
yesterday
=
int
(
time
.
time
()
*
1000
)
-
86400000
# And save the max transaction id from each destination
select
=
(
...
...
@@ -514,7 +573,11 @@ class Porter(object):
yield
self
.
postgres_store
.
_simple_insert
(
table
=
"
port_from_sqlite3
"
,
values
=
{
"
table_name
"
:
"
sent_transactions
"
,
"
rowid
"
:
next_chunk
}
values
=
{
"
table_name
"
:
"
sent_transactions
"
,
"
forward_rowid
"
:
next_chunk
,
"
backward_rowid
"
:
0
,
}
)
def
get_sent_table_size
(
txn
):
...
...
@@ -535,13 +598,18 @@ class Porter(object):
defer
.
returnValue
((
next_chunk
,
inserted_rows
,
total_count
))
@defer.inlineCallbacks
def
_get_remaining_count_to_port
(
self
,
table
,
next
_chunk
):
rows
=
yield
self
.
sqlite_store
.
execute_sql
(
def
_get_remaining_count_to_port
(
self
,
table
,
forward_chunk
,
backward
_chunk
):
f
rows
=
yield
self
.
sqlite_store
.
execute_sql
(
"
SELECT count(*) FROM %s WHERE rowid >= ?
"
%
(
table
,),
next
_chunk
,
forward
_chunk
,
)
defer
.
returnValue
(
rows
[
0
][
0
])
brows
=
yield
self
.
sqlite_store
.
execute_sql
(
"
SELECT count(*) FROM %s WHERE rowid <= ?
"
%
(
table
,),
backward_chunk
,
)
defer
.
returnValue
(
frows
[
0
][
0
]
+
brows
[
0
][
0
])
@defer.inlineCallbacks
def
_get_already_ported_count
(
self
,
table
):
...
...
@@ -552,10 +620,10 @@ class Porter(object):
defer
.
returnValue
(
rows
[
0
][
0
])
@defer.inlineCallbacks
def
_get_total_count_to_port
(
self
,
table
,
next
_chunk
):
def
_get_total_count_to_port
(
self
,
table
,
forward_chunk
,
backward
_chunk
):
remaining
,
done
=
yield
defer
.
gatherResults
(
[
self
.
_get_remaining_count_to_port
(
table
,
next
_chunk
),
self
.
_get_remaining_count_to_port
(
table
,
forward_chunk
,
backward
_chunk
),
self
.
_get_already_ported_count
(
table
),
],
consumeErrors
=
True
,
...
...
@@ -686,7 +754,7 @@ class CursesProgress(Progress):
color
=
curses
.
color_pair
(
2
)
if
perc
==
100
else
curses
.
color_pair
(
1
)
self
.
stdscr
.
addstr
(
i
+
2
,
left_margin
+
max_len
-
len
(
table
),
i
+
2
,
left_margin
+
max_len
-
len
(
table
),
table
,
curses
.
A_BOLD
|
color
,
)
...
...
@@ -694,18 +762,18 @@ class CursesProgress(Progress):
size
=
20
progress
=
"
[%s%s]
"
%
(
"
#
"
*
int
(
perc
*
size
/
100
),
"
"
*
(
size
-
int
(
perc
*
size
/
100
)),
"
#
"
*
int
(
perc
*
size
/
100
),
"
"
*
(
size
-
int
(
perc
*
size
/
100
)),
)
self
.
stdscr
.
addstr
(
i
+
2
,
left_margin
+
max_len
+
middle_space
,
i
+
2
,
left_margin
+
max_len
+
middle_space
,
"
%s %3d%% (%d/%d)
"
%
(
progress
,
perc
,
data
[
"
num_done
"
],
data
[
"
total
"
]),
)
if
self
.
finished
:
self
.
stdscr
.
addstr
(
rows
-
1
,
0
,
rows
-
1
,
0
,
"
Press any key to exit...
"
,
)
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment