diff --git a/Cargo.lock b/Cargo.lock
index 808ba4ef67d547a2e1d599028061cb42a2f50177..4f58ef369d50cd446b4f9f4af57ff4a5caeb0b6e 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -76,9 +76,9 @@ dependencies = [
 
 [[package]]
 name = "async-trait"
-version = "0.1.50"
+version = "0.1.51"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0b98e84bbb4cbcdd97da190ba0c58a1bb0de2c1fdf67d159e192ed766aeca722"
+checksum = "44318e776df68115a881de9a8fd1b9e53368d7a4a5ce4cc48517da3393233a5e"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -135,11 +135,20 @@ version = "0.1.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "383d29d513d8764dcdc42ea295d979eb99c3c9f00607b3692cf68a431f7dca72"
 
+[[package]]
+name = "bincode"
+version = "1.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad"
+dependencies = [
+ "serde",
+]
+
 [[package]]
 name = "bindgen"
-version = "0.57.0"
+version = "0.59.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fd4865004a46a0aafb2a0a5eb19d3c9fc46ee5f063a6cfc605c69ac9ecf5263d"
+checksum = "453c49e5950bb0eb63bb3df640e31618846c89d5b7faa54040d76e98e0134375"
 dependencies = [
  "bitflags",
  "cexpr",
@@ -160,6 +169,18 @@ version = "1.2.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
 
+[[package]]
+name = "bitvec"
+version = "0.19.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8942c8d352ae1838c9dda0b0ca2ab657696ef2232a20147cf1b30ae1a9cb4321"
+dependencies = [
+ "funty",
+ "radium",
+ "tap",
+ "wyz",
+]
+
 [[package]]
 name = "blake2b_simd"
 version = "0.5.11"
@@ -188,9 +209,9 @@ checksum = "9c59e7af012c713f529e7a3ee57ce9b31ddd858d4b512923602f74608b009631"
 
 [[package]]
 name = "bytemuck"
-version = "1.7.0"
+version = "1.7.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9966d2ab714d0f785dbac0a0396251a35280aeb42413281617d0209ab4898435"
+checksum = "72957246c41db82b8ef88a5486143830adeb8227ef9837740bdec67724cf2c5b"
 
 [[package]]
 name = "byteorder"
@@ -215,13 +236,19 @@ dependencies = [
 
 [[package]]
 name = "cexpr"
-version = "0.4.0"
+version = "0.5.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f4aedb84272dbe89af497cf81375129abda4fc0a9e7c5d317498c15cc30c0d27"
+checksum = "db507a7679252d2276ed0dd8113c6875ec56d3089f9225b2b42c30cc1f8e5c89"
 dependencies = [
  "nom",
 ]
 
+[[package]]
+name = "cfg-if"
+version = "0.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
+
 [[package]]
 name = "cfg-if"
 version = "1.0.0"
@@ -266,10 +293,10 @@ dependencies = [
  "bytes",
  "crossbeam",
  "directories",
+ "heed",
  "http",
  "image",
  "jsonwebtoken",
- "log",
  "lru-cache",
  "num_cpus",
  "opentelemetry",
@@ -292,8 +319,10 @@ dependencies = [
  "serde_yaml",
  "sled",
  "thiserror",
+ "threadpool",
  "tokio",
  "tracing",
+ "tracing-flame",
  "tracing-opentelemetry",
  "tracing-subscriber",
  "trust-dns-resolver",
@@ -326,9 +355,9 @@ checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e"
 
 [[package]]
 name = "cookie"
-version = "0.15.0"
+version = "0.15.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ffdf8865bac3d9a3bde5bde9088ca431b11f5d37c7a578b8086af77248b76627"
+checksum = "d5f1c7727e460397e56abc4bddc1d49e07a1ad78fc98eb2e1c8f032a58a2f80d"
 dependencies = [
  "percent-encoding",
  "time 0.2.27",
@@ -366,7 +395,7 @@ version = "1.2.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "81156fece84ab6a9f2afdb109ce3ae577e42b1228441eded99bd77f627953b1a"
 dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
 ]
 
 [[package]]
@@ -375,12 +404,12 @@ version = "0.8.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "4ae5588f6b3c3cb05239e90bd110f257254aecd01e4635400391aeae07497845"
 dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
  "crossbeam-channel",
  "crossbeam-deque",
  "crossbeam-epoch",
- "crossbeam-queue",
- "crossbeam-utils",
+ "crossbeam-queue 0.3.2",
+ "crossbeam-utils 0.8.5",
 ]
 
 [[package]]
@@ -389,8 +418,8 @@ version = "0.5.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "06ed27e177f16d65f0f0c22a213e17c696ace5dd64b14258b52f9417ccb52db4"
 dependencies = [
- "cfg-if",
- "crossbeam-utils",
+ "cfg-if 1.0.0",
+ "crossbeam-utils 0.8.5",
 ]
 
 [[package]]
@@ -399,9 +428,9 @@ version = "0.8.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "94af6efb46fef72616855b036a624cf27ba656ffc9be1b9a3c931cfc7749a9a9"
 dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
  "crossbeam-epoch",
- "crossbeam-utils",
+ "crossbeam-utils 0.8.5",
 ]
 
 [[package]]
@@ -410,21 +439,40 @@ version = "0.9.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "4ec02e091aa634e2c3ada4a392989e7c3116673ef0ac5b72232439094d73b7fd"
 dependencies = [
- "cfg-if",
- "crossbeam-utils",
+ "cfg-if 1.0.0",
+ "crossbeam-utils 0.8.5",
  "lazy_static",
  "memoffset",
  "scopeguard",
 ]
 
+[[package]]
+name = "crossbeam-queue"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7c979cd6cfe72335896575c6b5688da489e420d36a27a0b9eb0c73db574b4a4b"
+dependencies = [
+ "crossbeam-utils 0.6.6",
+]
+
 [[package]]
 name = "crossbeam-queue"
 version = "0.3.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "9b10ddc024425c88c2ad148c1b0fd53f4c6d38db9697c9f1588381212fa657c9"
 dependencies = [
- "cfg-if",
- "crossbeam-utils",
+ "cfg-if 1.0.0",
+ "crossbeam-utils 0.8.5",
+]
+
+[[package]]
+name = "crossbeam-utils"
+version = "0.6.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "04973fa96e96579258a5091af6003abde64af786b860f18622b82e026cca60e6"
+dependencies = [
+ "cfg-if 0.1.10",
+ "lazy_static",
 ]
 
 [[package]]
@@ -433,7 +481,7 @@ version = "0.8.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "d82cfc11ce7f2c3faef78d8a684447b40d503d9681acebed6cb728d45940c4db"
 dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
  "lazy_static",
 ]
 
@@ -490,9 +538,9 @@ dependencies = [
 
 [[package]]
 name = "devise"
-version = "0.3.0"
+version = "0.3.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "411cf45ac38f00df3679689616649dc12607b846db171780bb790b514a042832"
+checksum = "50c7580b072f1c8476148f16e0a0d5dedddab787da98d86c5082c5e9ed8ab595"
 dependencies = [
  "devise_codegen",
  "devise_core",
@@ -500,9 +548,9 @@ dependencies = [
 
 [[package]]
 name = "devise_codegen"
-version = "0.3.0"
+version = "0.3.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2cf7081f06822f1787e29359354426132cf832cc977d7a8ff747848631462ad1"
+checksum = "123c73e7a6e51b05c75fe1a1b2f4e241399ea5740ed810b0e3e6cacd9db5e7b2"
 dependencies = [
  "devise_core",
  "quote",
@@ -510,9 +558,9 @@ dependencies = [
 
 [[package]]
 name = "devise_core"
-version = "0.3.0"
+version = "0.3.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "80c23631758736875d7ce08f847f296b4001b72cf90878e85b47df7ac5442147"
+checksum = "841ef46f4787d9097405cac4e70fb8644fc037b526e8c14054247c0263c400d0"
 dependencies = [
  "bitflags",
  "proc-macro2",
@@ -564,9 +612,9 @@ checksum = "56899898ce76aaf4a0f24d914c97ea6ed976d42fec6ad33fcbb0a1103e07b2b0"
 
 [[package]]
 name = "ed25519"
-version = "1.1.1"
+version = "1.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8d0860415b12243916284c67a9be413e044ee6668247b99ba26d94b2bc06c8f6"
+checksum = "4620d40f6d2601794401d6dd95a5cf69b6c157852539470eeda433a99b3c0efc"
 dependencies = [
  "signature",
 ]
@@ -597,7 +645,7 @@ version = "0.8.28"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "80df024fbc5ac80f87dfef0d9f5209a252f2a497f7f42944cff24d8253cac065"
 dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
 ]
 
 [[package]]
@@ -677,11 +725,17 @@ dependencies = [
  "winapi",
 ]
 
+[[package]]
+name = "funty"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fed34cd105917e91daa4da6b3728c47b068749d6a62c59811f06ed2ac71d9da7"
+
 [[package]]
 name = "futures"
-version = "0.3.15"
+version = "0.3.16"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0e7e43a803dae2fa37c1f6a8fe121e1f7bf9548b4dfc0522a42f34145dadfc27"
+checksum = "1adc00f486adfc9ce99f77d717836f0c5aa84965eb0b4f051f4e83f7cab53f8b"
 dependencies = [
  "futures-channel",
  "futures-core",
@@ -694,9 +748,9 @@ dependencies = [
 
 [[package]]
 name = "futures-channel"
-version = "0.3.15"
+version = "0.3.16"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e682a68b29a882df0545c143dc3646daefe80ba479bcdede94d5a703de2871e2"
+checksum = "74ed2411805f6e4e3d9bc904c95d5d423b89b3b25dc0250aa74729de20629ff9"
 dependencies = [
  "futures-core",
  "futures-sink",
@@ -704,15 +758,15 @@ dependencies = [
 
 [[package]]
 name = "futures-core"
-version = "0.3.15"
+version = "0.3.16"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0402f765d8a89a26043b889b26ce3c4679d268fa6bb22cd7c6aad98340e179d1"
+checksum = "af51b1b4a7fdff033703db39de8802c673eb91855f2e0d47dcf3bf2c0ef01f99"
 
 [[package]]
 name = "futures-executor"
-version = "0.3.15"
+version = "0.3.16"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "badaa6a909fac9e7236d0620a2f57f7664640c56575b71a7552fbd68deafab79"
+checksum = "4d0d535a57b87e1ae31437b892713aee90cd2d7b0ee48727cd11fc72ef54761c"
 dependencies = [
  "futures-core",
  "futures-task",
@@ -721,15 +775,15 @@ dependencies = [
 
 [[package]]
 name = "futures-io"
-version = "0.3.15"
+version = "0.3.16"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "acc499defb3b348f8d8f3f66415835a9131856ff7714bf10dadfc4ec4bdb29a1"
+checksum = "0b0e06c393068f3a6ef246c75cdca793d6a46347e75286933e5e75fd2fd11582"
 
 [[package]]
 name = "futures-macro"
-version = "0.3.15"
+version = "0.3.16"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a4c40298486cdf52cc00cd6d6987892ba502c7656a16a4192a9992b1ccedd121"
+checksum = "c54913bae956fb8df7f4dc6fc90362aa72e69148e3f39041fbe8742d21e0ac57"
 dependencies = [
  "autocfg",
  "proc-macro-hack",
@@ -740,21 +794,21 @@ dependencies = [
 
 [[package]]
 name = "futures-sink"
-version = "0.3.15"
+version = "0.3.16"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a57bead0ceff0d6dde8f465ecd96c9338121bb7717d3e7b108059531870c4282"
+checksum = "c0f30aaa67363d119812743aa5f33c201a7a66329f97d1a887022971feea4b53"
 
 [[package]]
 name = "futures-task"
-version = "0.3.15"
+version = "0.3.16"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8a16bef9fc1a4dddb5bee51c989e3fbba26569cbb0e31f5b303c184e3dd33dae"
+checksum = "bbe54a98670017f3be909561f6ad13e810d9a51f3f061b902062ca3da80799f2"
 
 [[package]]
 name = "futures-util"
-version = "0.3.15"
+version = "0.3.16"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "feb5c238d27e2bf94ffdfd27b2c29e3df4a68c4193bb6427384259e2bf191967"
+checksum = "67eb846bfd58e44a8481a00049e82c43e0ccb5d61f8dc071057cb19249dd4d78"
 dependencies = [
  "autocfg",
  "futures-channel",
@@ -809,7 +863,7 @@ version = "0.1.16"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce"
 dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
  "libc",
  "wasi 0.9.0+wasi-snapshot-preview1",
 ]
@@ -820,7 +874,7 @@ version = "0.2.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "7fcd999463524c52659517fe2cea98493cfe485d10565e7b0fb07dbba7ad2753"
 dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
  "libc",
  "wasi 0.10.2+wasi-snapshot-preview1",
 ]
@@ -887,6 +941,42 @@ dependencies = [
  "unicode-segmentation",
 ]
 
+[[package]]
+name = "heed"
+version = "0.10.6"
+source = "git+https://github.com/timokoesters/heed.git?rev=f6f825da7fb2c758867e05ad973ef800a6fe1d5d#f6f825da7fb2c758867e05ad973ef800a6fe1d5d"
+dependencies = [
+ "bytemuck",
+ "byteorder",
+ "heed-traits",
+ "heed-types",
+ "libc",
+ "lmdb-rkv-sys",
+ "once_cell",
+ "page_size",
+ "serde",
+ "synchronoise",
+ "url",
+]
+
+[[package]]
+name = "heed-traits"
+version = "0.7.0"
+source = "git+https://github.com/timokoesters/heed.git?rev=f6f825da7fb2c758867e05ad973ef800a6fe1d5d#f6f825da7fb2c758867e05ad973ef800a6fe1d5d"
+
+[[package]]
+name = "heed-types"
+version = "0.7.2"
+source = "git+https://github.com/timokoesters/heed.git?rev=f6f825da7fb2c758867e05ad973ef800a6fe1d5d#f6f825da7fb2c758867e05ad973ef800a6fe1d5d"
+dependencies = [
+ "bincode",
+ "bytemuck",
+ "byteorder",
+ "heed-traits",
+ "serde",
+ "serde_json",
+]
+
 [[package]]
 name = "hermit-abi"
 version = "0.1.19"
@@ -952,9 +1042,9 @@ dependencies = [
 
 [[package]]
 name = "hyper"
-version = "0.14.10"
+version = "0.14.11"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7728a72c4c7d72665fde02204bcbd93b247721025b222ef78606f14513e0fd03"
+checksum = "0b61cf2d1aebcf6e6352c97b81dc2244ca29194be1b276f5d8ad5c6330fffb11"
 dependencies = [
  "bytes",
  "futures-channel",
@@ -967,7 +1057,7 @@ dependencies = [
  "httpdate",
  "itoa",
  "pin-project-lite",
- "socket2 0.4.0",
+ "socket2 0.4.1",
  "tokio",
  "tower-service",
  "tracing",
@@ -1049,7 +1139,7 @@ version = "0.1.10"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "bee0328b1209d157ef001c94dd85b4f8f64139adb0eac2659f4b08382b2f474d"
 dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
 ]
 
 [[package]]
@@ -1171,15 +1261,15 @@ version = "0.7.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "6f84d96438c15fcd6c3f244c8fce01d1e2b9c6b5623e9c711dc9286d8fc92d6a"
 dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
  "winapi",
 ]
 
 [[package]]
 name = "librocksdb-sys"
-version = "6.17.3"
+version = "6.20.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5da125e1c0f22c7cae785982115523a0738728498547f415c9054cb17c7e89f9"
+checksum = "c309a9d2470844aceb9a4a098cf5286154d20596868b75a6b36357d2bb9ca25d"
 dependencies = [
  "bindgen",
  "cc",
@@ -1204,6 +1294,17 @@ version = "0.5.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3"
 
+[[package]]
+name = "lmdb-rkv-sys"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b27470ac25167b3afdfb6af8fcd3bc1be67de50ffbdaf4073378cfded6ae24a5"
+dependencies = [
+ "cc",
+ "libc",
+ "pkg-config",
+]
+
 [[package]]
 name = "lock_api"
 version = "0.4.4"
@@ -1219,7 +1320,7 @@ version = "0.4.14"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710"
 dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
 ]
 
 [[package]]
@@ -1228,7 +1329,7 @@ version = "0.5.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "2111607c723d7857e0d8299d5ce7a0bf4b844d3e44f8de136b13da513eaf8fc4"
 dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
  "generator",
  "scoped-tls",
  "serde",
@@ -1346,10 +1447,12 @@ dependencies = [
 
 [[package]]
 name = "nom"
-version = "5.1.2"
+version = "6.1.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ffb4262d26ed83a1c0a33a38fe2bb15797329c85770da05e6b828ddb782627af"
+checksum = "e7413f999671bd4745a7b624bd370a569fb6bc574b23c83a3c5ed2e453f3d5e2"
 dependencies = [
+ "bitvec",
+ "funty",
  "memchr",
  "version_check",
 ]
@@ -1445,11 +1548,12 @@ checksum = "28988d872ab76095a6e6ac88d99b54fd267702734fd7ffe610ca27f533ddb95a"
 
 [[package]]
 name = "opentelemetry"
-version = "0.12.0"
+version = "0.15.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "514d24875c140ed269eecc2d1b56d7b71b573716922a763c317fb1b1b4b58f15"
+checksum = "ff27b33e30432e7b9854936693ca103d8591b0501f7ae9f633de48cda3bf2a67"
 dependencies = [
  "async-trait",
+ "crossbeam-channel",
  "futures",
  "js-sys",
  "lazy_static",
@@ -1461,9 +1565,9 @@ dependencies = [
 
 [[package]]
 name = "opentelemetry-jaeger"
-version = "0.11.0"
+version = "0.14.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a5677b3a361784aff6e2b1b30dbdb5f85f4ec57ff2ced41d9a481ad70a9d0b57"
+checksum = "09a9fc8192722e7daa0c56e59e2336b797122fb8598383dcb11c8852733b435c"
 dependencies = [
  "async-trait",
  "lazy_static",
@@ -1481,6 +1585,16 @@ dependencies = [
  "num-traits",
 ]
 
+[[package]]
+name = "page_size"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eebde548fbbf1ea81a99b128872779c437752fb99f217c45245e1a61dcd9edcd"
+dependencies = [
+ "libc",
+ "winapi",
+]
+
 [[package]]
 name = "parking_lot"
 version = "0.11.1"
@@ -1498,7 +1612,7 @@ version = "0.8.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "fa7a782938e745763fe6907fc6ba86946d72f49fe7e21de074e08128a99fb018"
 dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
  "instant",
  "libc",
  "redox_syscall",
@@ -1569,18 +1683,18 @@ dependencies = [
 
 [[package]]
 name = "pin-project"
-version = "1.0.7"
+version = "1.0.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c7509cc106041c40a4518d2af7a61530e1eed0e6285296a3d8c5472806ccc4a4"
+checksum = "576bc800220cc65dac09e99e97b08b358cfab6e17078de8dc5fee223bd2d0c08"
 dependencies = [
  "pin-project-internal",
 ]
 
 [[package]]
 name = "pin-project-internal"
-version = "1.0.7"
+version = "1.0.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "48c950132583b500556b1efd71d45b319029f2b71518d979fcc208e16b42426f"
+checksum = "6e8fe8163d14ce7f0cdac2e040116f22eac817edabff0be91e8aff7e9accf389"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -1601,9 +1715,9 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
 
 [[package]]
 name = "pkcs8"
-version = "0.7.0"
+version = "0.7.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "09d156817ae0125e8aa5067710b0db24f0984830614f99875a70aa5e3b74db69"
+checksum = "fbee84ed13e44dd82689fa18348a49934fa79cc774a344c42fc9b301c71b140a"
 dependencies = [
  "der",
  "spki",
@@ -1668,9 +1782,9 @@ checksum = "bc881b2c22681370c6a780e47af9840ef841837bc98118431d4e1868bd0c1086"
 
 [[package]]
 name = "proc-macro2"
-version = "1.0.27"
+version = "1.0.28"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f0d8caf72986c1a598726adc988bb5984792ef84f5ee5aa50209145ee8077038"
+checksum = "5c7ed8b8c7b886ea3ed7dde405212185f423ab44682667c8c6dd14aa1d9f6612"
 dependencies = [
  "unicode-xid",
 ]
@@ -1703,6 +1817,12 @@ dependencies = [
  "proc-macro2",
 ]
 
+[[package]]
+name = "radium"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "941ba9d78d8e2f7ce474c015eea4d9c6d25b6a3327f9832ee29a4de27f91bbb8"
+
 [[package]]
 name = "rand"
 version = "0.7.3"
@@ -2015,7 +2135,7 @@ dependencies = [
 [[package]]
 name = "ruma"
 version = "0.2.0"
-source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492"
+source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
 dependencies = [
  "assign",
  "js_int",
@@ -2036,7 +2156,7 @@ dependencies = [
 [[package]]
 name = "ruma-api"
 version = "0.17.1"
-source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492"
+source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
 dependencies = [
  "bytes",
  "http",
@@ -2052,7 +2172,7 @@ dependencies = [
 [[package]]
 name = "ruma-api-macros"
 version = "0.17.1"
-source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492"
+source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
 dependencies = [
  "proc-macro-crate",
  "proc-macro2",
@@ -2063,7 +2183,7 @@ dependencies = [
 [[package]]
 name = "ruma-appservice-api"
 version = "0.3.0"
-source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492"
+source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
 dependencies = [
  "ruma-api",
  "ruma-common",
@@ -2077,7 +2197,7 @@ dependencies = [
 [[package]]
 name = "ruma-client-api"
 version = "0.11.0"
-source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492"
+source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
 dependencies = [
  "assign",
  "bytes",
@@ -2097,7 +2217,7 @@ dependencies = [
 [[package]]
 name = "ruma-common"
 version = "0.5.4"
-source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492"
+source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
 dependencies = [
  "indexmap",
  "js_int",
@@ -2112,7 +2232,7 @@ dependencies = [
 [[package]]
 name = "ruma-events"
 version = "0.23.2"
-source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492"
+source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
 dependencies = [
  "indoc",
  "js_int",
@@ -2128,7 +2248,7 @@ dependencies = [
 [[package]]
 name = "ruma-events-macros"
 version = "0.23.2"
-source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492"
+source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
 dependencies = [
  "proc-macro-crate",
  "proc-macro2",
@@ -2139,7 +2259,7 @@ dependencies = [
 [[package]]
 name = "ruma-federation-api"
 version = "0.2.0"
-source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492"
+source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
 dependencies = [
  "js_int",
  "ruma-api",
@@ -2154,7 +2274,7 @@ dependencies = [
 [[package]]
 name = "ruma-identifiers"
 version = "0.19.4"
-source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492"
+source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
 dependencies = [
  "paste",
  "rand 0.8.4",
@@ -2168,7 +2288,7 @@ dependencies = [
 [[package]]
 name = "ruma-identifiers-macros"
 version = "0.19.4"
-source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492"
+source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
 dependencies = [
  "quote",
  "ruma-identifiers-validation",
@@ -2178,12 +2298,12 @@ dependencies = [
 [[package]]
 name = "ruma-identifiers-validation"
 version = "0.4.0"
-source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492"
+source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
 
 [[package]]
 name = "ruma-identity-service-api"
 version = "0.2.0"
-source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492"
+source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
 dependencies = [
  "js_int",
  "ruma-api",
@@ -2196,7 +2316,7 @@ dependencies = [
 [[package]]
 name = "ruma-push-gateway-api"
 version = "0.2.0"
-source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492"
+source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
 dependencies = [
  "js_int",
  "ruma-api",
@@ -2211,7 +2331,7 @@ dependencies = [
 [[package]]
 name = "ruma-serde"
 version = "0.4.1"
-source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492"
+source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
 dependencies = [
  "bytes",
  "form_urlencoded",
@@ -2225,7 +2345,7 @@ dependencies = [
 [[package]]
 name = "ruma-serde-macros"
 version = "0.4.1"
-source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492"
+source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
 dependencies = [
  "proc-macro-crate",
  "proc-macro2",
@@ -2236,7 +2356,7 @@ dependencies = [
 [[package]]
 name = "ruma-signatures"
 version = "0.8.0"
-source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492"
+source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
 dependencies = [
  "base64 0.13.0",
  "ed25519-dalek",
@@ -2253,7 +2373,7 @@ dependencies = [
 [[package]]
 name = "ruma-state-res"
 version = "0.2.0"
-source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492"
+source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93"
 dependencies = [
  "itertools 0.10.1",
  "js_int",
@@ -2292,7 +2412,7 @@ dependencies = [
  "base64 0.13.0",
  "blake2b_simd",
  "constant_time_eq",
- "crossbeam-utils",
+ "crossbeam-utils 0.8.5",
 ]
 
 [[package]]
@@ -2466,9 +2586,9 @@ dependencies = [
 
 [[package]]
 name = "serde_json"
-version = "1.0.64"
+version = "1.0.66"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "799e97dc9fdae36a5c8b8f2cae9ce2ee9fdce2058c57a93e6099d919fd982f79"
+checksum = "336b10da19a12ad094b59d870ebde26a45402e5b470add4b5fd03c5048a32127"
 dependencies = [
  "itoa",
  "ryu",
@@ -2512,7 +2632,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "b362ae5752fd2137731f9fa25fd4d9058af34666ca1966fb969119cc35719f12"
 dependencies = [
  "block-buffer",
- "cfg-if",
+ "cfg-if 1.0.0",
  "cpufeatures",
  "digest",
  "opaque-debug",
@@ -2529,9 +2649,9 @@ dependencies = [
 
 [[package]]
 name = "shlex"
-version = "0.1.1"
+version = "1.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7fdf1b9db47230893d76faad238fd6097fd6d6a9245cd7a4d90dbd639536bbd2"
+checksum = "42a568c8f2cd051a4d283bd6eb0343ac214c1b0f1ac19f93e1175b2dee38c73d"
 
 [[package]]
 name = "signal-hook-registry"
@@ -2573,7 +2693,7 @@ checksum = "1d0132f3e393bcb7390c60bb45769498cf4550bcb7a21d7f95c02b69f6362cdc"
 dependencies = [
  "crc32fast",
  "crossbeam-epoch",
- "crossbeam-utils",
+ "crossbeam-utils 0.8.5",
  "fs2",
  "fxhash",
  "libc",
@@ -2594,16 +2714,16 @@ version = "0.3.19"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "122e570113d28d773067fab24266b66753f6ea915758651696b6e35e49f88d6e"
 dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
  "libc",
  "winapi",
 ]
 
 [[package]]
 name = "socket2"
-version = "0.4.0"
+version = "0.4.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9e3dfc207c526015c632472a77be09cf1b6e46866581aecae5cc38fb4235dea2"
+checksum = "765f090f0e423d2b55843402a07915add955e7d60657db13707a159727326cad"
 dependencies = [
  "libc",
  "winapi",
@@ -2714,15 +2834,24 @@ checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601"
 
 [[package]]
 name = "syn"
-version = "1.0.73"
+version = "1.0.74"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f71489ff30030d2ae598524f61326b902466f72a0fb1a8564c001cc63425bcc7"
+checksum = "1873d832550d4588c3dbc20f01361ab00bfe741048f71e3fecf145a7cc18b29c"
 dependencies = [
  "proc-macro2",
  "quote",
  "unicode-xid",
 ]
 
+[[package]]
+name = "synchronoise"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d717ed0efc9d39ab3b642a096bc369a3e02a38a51c41845d7fe31bdad1d6eaeb"
+dependencies = [
+ "crossbeam-queue 0.1.2",
+]
+
 [[package]]
 name = "synstructure"
 version = "0.12.5"
@@ -2735,13 +2864,19 @@ dependencies = [
  "unicode-xid",
 ]
 
+[[package]]
+name = "tap"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
+
 [[package]]
 name = "tempfile"
 version = "3.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "dac1c663cfc93810f88aed9b8941d48cabf856a1b111c29a40439018d870eb22"
 dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
  "libc",
  "rand 0.8.4",
  "redox_syscall",
@@ -2859,9 +2994,9 @@ dependencies = [
 
 [[package]]
 name = "tinyvec"
-version = "1.2.0"
+version = "1.3.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5b5220f05bb7de7f3f53c7c065e1199b3172696fe2db9f9c4d8ad9b4ee74c342"
+checksum = "848a1e1181b9f6753b5e96a092749e29b11d19ede67dfbbd6c7dc7e0f49b5338"
 dependencies = [
  "tinyvec_macros",
 ]
@@ -2874,9 +3009,9 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
 
 [[package]]
 name = "tokio"
-version = "1.8.1"
+version = "1.9.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "98c8b05dc14c75ea83d63dd391100353789f5f24b8b3866542a5e85c8be8e985"
+checksum = "4b7b349f11a7047e6d1276853e612d152f5e8a352c61917887cc2169e2366b4c"
 dependencies = [
  "autocfg",
  "bytes",
@@ -2971,7 +3106,7 @@ version = "0.1.26"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "09adeb8c97449311ccd28a427f96fb563e7fd31aabf994189879d9da2394b89d"
 dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
  "pin-project-lite",
  "tracing-attributes",
  "tracing-core",
@@ -2997,6 +3132,17 @@ dependencies = [
  "lazy_static",
 ]
 
+[[package]]
+name = "tracing-flame"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bd520fe41c667b437952383f3a1ec14f1fa45d653f719a77eedd6e6a02d8fa54"
+dependencies = [
+ "lazy_static",
+ "tracing",
+ "tracing-subscriber",
+]
+
 [[package]]
 name = "tracing-log"
 version = "0.1.2"
@@ -3010,9 +3156,9 @@ dependencies = [
 
 [[package]]
 name = "tracing-opentelemetry"
-version = "0.11.0"
+version = "0.14.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cccdf13c28f1654fe806838f28c5b9cb23ca4c0eae71450daa489f50e523ceb1"
+checksum = "c47440f2979c4cd3138922840eec122e3c0ba2148bc290f756bd7fd60fc97fff"
 dependencies = [
  "opentelemetry",
  "tracing",
@@ -3060,7 +3206,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "ad0d7f5db438199a6e2609debe3f69f808d074e0a2888ee0bccb45fe234d03f4"
 dependencies = [
  "async-trait",
- "cfg-if",
+ "cfg-if 1.0.0",
  "data-encoding",
  "enum-as-inner",
  "futures-channel",
@@ -3084,7 +3230,7 @@ version = "0.20.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "f6ad17b608a64bd0735e67bde16b0636f8aa8591f831a25d18443ed00a699770"
 dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
  "futures-util",
  "ipconfig",
  "lazy_static",
@@ -3245,7 +3391,7 @@ version = "0.2.74"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "d54ee1d4ed486f78874278e63e4069fc1ab9f6a18ca492076ffb90c5eb2997fd"
 dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
  "serde",
  "serde_json",
  "wasm-bindgen-macro",
@@ -3272,7 +3418,7 @@ version = "0.4.24"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "5fba7978c679d53ce2d0ac80c8c175840feb849a161664365d1287b41f2e67f1"
 dependencies = [
- "cfg-if",
+ "cfg-if 1.0.0",
  "js-sys",
  "wasm-bindgen",
  "web-sys",
@@ -3394,6 +3540,12 @@ dependencies = [
  "winapi",
 ]
 
+[[package]]
+name = "wyz"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "85e60b0d1b5f99db2556934e21937020776a5d31520bf169e851ac44e6420214"
+
 [[package]]
 name = "yaml-rust"
 version = "0.4.5"
@@ -3411,9 +3563,9 @@ checksum = "9fc79f4a1e39857fc00c3f662cbf2651c771f00e9c15fe2abc341806bd46bd71"
 
 [[package]]
 name = "zeroize"
-version = "1.3.0"
+version = "1.4.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4756f7db3f7b5574938c3eb1c117038b8e07f95ee6718c0efad4ac21508f1efd"
+checksum = "377db0846015f7ae377174787dd452e1c5f5a9050bc6f954911d01f116daa0cd"
 dependencies = [
  "zeroize_derive",
 ]
diff --git a/Cargo.toml b/Cargo.toml
index 2ce4b03746d0f2c220937a1989773a0f9da409d7..19ce6b101612958842a9921837567e0b5305a487 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -18,12 +18,12 @@ edition = "2018"
 rocket = { version = "0.5.0-rc.1", features = ["tls"] } # Used to handle requests
 
 # Used for matrix spec type definitions and helpers
-ruma = { git = "https://github.com/ruma/ruma", rev = "eb19b0e08a901b87d11b3be0890ec788cc760492", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] }
-#ruma = { git = "https://github.com/timokoesters/ruma", rev = "74cf83c4ca937fa5e2709fb71e9d11848e72e487", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] }
+#ruma = { git = "https://github.com/ruma/ruma", rev = "eb19b0e08a901b87d11b3be0890ec788cc760492", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] }
+ruma = { git = "https://github.com/timokoesters/ruma", rev = "a2d93500e1dbc87e7032a3c74f3b2479a7f84e93", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] }
 #ruma = { path = "../ruma/crates/ruma", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] }
 
 # Used for long polling and federation sender, should be the same as rocket::tokio
-tokio = "1.2.0"
+tokio = "1.8.2"
 # Used for storing data permanently
 sled = { version = "0.34.6", features = ["compression", "no_metrics"], optional = true }
 rocksdb = { version = "0.16.0", features = ["multi-threaded-cf"], optional = true }
@@ -31,30 +31,28 @@ rocksdb = { version = "0.16.0", features = ["multi-threaded-cf"], optional = tru
 
 # Used for the http request / response body type for Ruma endpoints used with reqwest
 bytes = "1.0.1"
-# Used for emitting log entries
-log = "0.4.14"
 # Used for rocket<->ruma conversions
-http = "0.2.3"
+http = "0.2.4"
 # Used to find data directory for default db path
-directories = "3.0.1"
+directories = "3.0.2"
 # Used for ruma wrapper
 serde_json = { version = "1.0.64", features = ["raw_value"] }
 # Used for appservice registration files
 serde_yaml = "0.8.17"
 # Used for pdu definition
-serde = "1.0.123"
+serde = "1.0.126"
 # Used for secure identifiers
-rand = "0.8.3"
+rand = "0.8.4"
 # Used to hash passwords
 rust-argon2 = "0.8.3"
 # Used to send requests
-reqwest = { version = "0.11.3", default-features = false, features = ["rustls-tls-native-roots", "socks"] }
+reqwest = { version = "0.11.4", default-features = false, features = ["rustls-tls-native-roots", "socks"] }
 # Custom TLS verifier
-rustls = { version = "0.19", features = ["dangerous_configuration"] }
+rustls = { version = "0.19.1", features = ["dangerous_configuration"] }
 rustls-native-certs = "0.5.0"
 webpki = "0.21.0"
 # Used for conduit::Error type
-thiserror = "1.0.24"
+thiserror = "1.0.26"
 # Used to generate thumbnails for images
 image = { version = "0.23.14", default-features = false, features = ["jpeg", "png", "gif"] }
 # Used to encode server public key
@@ -62,29 +60,33 @@ base64 = "0.13.0"
 # Used when hashing the state
 ring = "0.16.20"
 # Used when querying the SRV record of other servers
-trust-dns-resolver = "0.20.0"
+trust-dns-resolver = "0.20.3"
 # Used to find matching events for appservices
-regex = "1.4.3"
+regex = "1.5.4"
 # jwt jsonwebtokens
 jsonwebtoken = "7.2.0"
 # Performance measurements
-tracing = "0.1.25"
-opentelemetry = "0.12.0"
-tracing-subscriber = "0.2.16"
-tracing-opentelemetry = "0.11.0"
-opentelemetry-jaeger = "0.11.0"
+tracing = { version = "0.1.26", features = ["release_max_level_warn"] }
+opentelemetry = "0.15.0"
+tracing-subscriber = "0.2.19"
+tracing-opentelemetry = "0.14.0"
+tracing-flame = "0.1.0"
+opentelemetry-jaeger = "0.14.0"
 pretty_env_logger = "0.4.0"
 lru-cache = "0.1.2"
 rusqlite = { version = "0.25.3", optional = true, features = ["bundled"] }
 parking_lot = { version = "0.11.1", optional = true }
 crossbeam = { version = "0.8.1", optional = true }
 num_cpus = "1.13.0"
+threadpool = "1.8.1"
+heed = { git = "https://github.com/timokoesters/heed.git", rev = "f6f825da7fb2c758867e05ad973ef800a6fe1d5d", optional = true }
 
 [features]
 default = ["conduit_bin", "backend_sqlite"]
 backend_sled = ["sled"]
 backend_rocksdb = ["rocksdb"]
 backend_sqlite = ["sqlite"]
+backend_heed = ["heed", "crossbeam"]
 sqlite = ["rusqlite", "parking_lot", "crossbeam", "tokio/signal"]
 conduit_bin = [] # TODO: add rocket to this when it is optional
 
diff --git a/src/appservice_server.rs b/src/appservice_server.rs
index 42918577c2a2ed04c5a8edb3073560abde49d45b..7868e45ff31c003c1023097db2d6c0e3910666eb 100644
--- a/src/appservice_server.rs
+++ b/src/appservice_server.rs
@@ -1,6 +1,5 @@
 use crate::{utils, Error, Result};
 use bytes::BytesMut;
-use log::warn;
 use ruma::api::{IncomingResponse, OutgoingRequest, SendAccessToken};
 use std::{
     convert::{TryFrom, TryInto},
@@ -8,6 +7,7 @@
     mem,
     time::Duration,
 };
+use tracing::warn;
 
 pub async fn send_request<T: OutgoingRequest>(
     globals: &crate::database::globals::Globals,
diff --git a/src/client_server/account.rs b/src/client_server/account.rs
index ddb44d6e795ed0b2b55ec311f912a4dde0ada3c3..c00cc871d57573f57b209594b51c5b232502f176 100644
--- a/src/client_server/account.rs
+++ b/src/client_server/account.rs
@@ -6,7 +6,6 @@
 
 use super::{DEVICE_ID_LENGTH, SESSION_ID_LENGTH, TOKEN_LENGTH};
 use crate::{database::DatabaseGuard, pdu::PduBuilder, utils, ConduitResult, Error, Ruma};
-use log::info;
 use ruma::{
     api::client::{
         error::ErrorKind,
@@ -28,6 +27,7 @@
     identifiers::RoomName,
     push, RoomAliasId, RoomId, RoomVersionId, UserId,
 };
+use tracing::info;
 
 use register::RegistrationKind;
 #[cfg(feature = "conduit_bin")]
diff --git a/src/client_server/directory.rs b/src/client_server/directory.rs
index d73888657ef0d1b274379bbbdd18ed1a1ad37c61..f1ec4b8682dad94b16046e06dc32a0cf62f06d6c 100644
--- a/src/client_server/directory.rs
+++ b/src/client_server/directory.rs
@@ -1,5 +1,4 @@
 use crate::{database::DatabaseGuard, ConduitResult, Database, Error, Result, Ruma};
-use log::info;
 use ruma::{
     api::{
         client::{
@@ -22,6 +21,7 @@
     serde::Raw,
     ServerName, UInt,
 };
+use tracing::info;
 
 #[cfg(feature = "conduit_bin")]
 use rocket::{get, post, put};
diff --git a/src/client_server/membership.rs b/src/client_server/membership.rs
index 52bb18c48042d8ac27915cb733c49d555c037309..ea7fdab5d311e4c620bee760957899fd8afa125c 100644
--- a/src/client_server/membership.rs
+++ b/src/client_server/membership.rs
@@ -4,7 +4,6 @@
     pdu::{PduBuilder, PduEvent},
     server_server, utils, ConduitResult, Database, Error, Result, Ruma,
 };
-use log::{debug, error, warn};
 use member::{MemberEventContent, MembershipState};
 use rocket::futures;
 use ruma::{
@@ -34,6 +33,7 @@
     sync::{Arc, RwLock},
     time::{Duration, Instant},
 };
+use tracing::{debug, error, warn};
 
 #[cfg(feature = "conduit_bin")]
 use rocket::{get, post};
diff --git a/src/client_server/room.rs b/src/client_server/room.rs
index 7aa22d081ec310a740cda6c639cf692047cb676a..d5188e8b394977e6644cb68d20e5ad694e3b007f 100644
--- a/src/client_server/room.rs
+++ b/src/client_server/room.rs
@@ -2,7 +2,6 @@
     client_server::invite_helper, database::DatabaseGuard, pdu::PduBuilder, ConduitResult, Error,
     Ruma,
 };
-use log::info;
 use ruma::{
     api::client::{
         error::ErrorKind,
@@ -16,6 +15,7 @@
     RoomAliasId, RoomId, RoomVersionId,
 };
 use std::{cmp::max, collections::BTreeMap, convert::TryFrom, sync::Arc};
+use tracing::{info, warn};
 
 #[cfg(feature = "conduit_bin")]
 use rocket::{get, post};
@@ -233,7 +233,8 @@ pub async fn create_room_route(
 
     // 5. Events listed in initial_state
     for event in &body.initial_state {
-        let pdu_builder = PduBuilder::from(event.deserialize().map_err(|_| {
+        let pdu_builder = PduBuilder::from(event.deserialize().map_err(|e| {
+            warn!("Invalid initial state event: {:?}", e);
             Error::BadRequest(ErrorKind::InvalidParam, "Invalid initial state event.")
         })?);
 
diff --git a/src/client_server/session.rs b/src/client_server/session.rs
index 7ad792b2ce2b87506daf2957738befe8b65f55b4..f8452e0df0bdbf7936135492034b1dd6f69f2a9c 100644
--- a/src/client_server/session.rs
+++ b/src/client_server/session.rs
@@ -1,6 +1,5 @@
 use super::{DEVICE_ID_LENGTH, TOKEN_LENGTH};
 use crate::{database::DatabaseGuard, utils, ConduitResult, Error, Ruma};
-use log::info;
 use ruma::{
     api::client::{
         error::ErrorKind,
@@ -9,6 +8,7 @@
     UserId,
 };
 use serde::Deserialize;
+use tracing::info;
 
 #[derive(Debug, Deserialize)]
 struct Claims {
diff --git a/src/client_server/sync.rs b/src/client_server/sync.rs
index fdb8f2564c3011de98a4e71e675f197903379c3c..541045ecb1482bcf1dfbde5523f756f96d7ccfcf 100644
--- a/src/client_server/sync.rs
+++ b/src/client_server/sync.rs
@@ -1,5 +1,4 @@
 use crate::{database::DatabaseGuard, ConduitResult, Database, Error, Result, Ruma, RumaResponse};
-use log::{error, warn};
 use ruma::{
     api::client::r0::{sync::sync_events, uiaa::UiaaResponse},
     events::{room::member::MembershipState, AnySyncEphemeralRoomEvent, EventType},
@@ -13,6 +12,7 @@
     time::Duration,
 };
 use tokio::sync::watch::Sender;
+use tracing::{error, warn};
 
 #[cfg(feature = "conduit_bin")]
 use rocket::{get, tokio};
diff --git a/src/client_server/to_device.rs b/src/client_server/to_device.rs
index e3fd780c05f06477d7341119ffb4be3e6c0d390b..d3f7d255da35b34578f19e9795f763068544a779 100644
--- a/src/client_server/to_device.rs
+++ b/src/client_server/to_device.rs
@@ -47,7 +47,7 @@ pub async fn send_event_to_device_route(
 
                 db.sending.send_reliable_edu(
                     target_user_id.server_name(),
-                    &serde_json::to_vec(&federation::transactions::edu::Edu::DirectToDevice(
+                    serde_json::to_vec(&federation::transactions::edu::Edu::DirectToDevice(
                         DirectDeviceContent {
                             sender: sender_user.clone(),
                             ev_type: EventType::from(&body.event_type),
diff --git a/src/database.rs b/src/database.rs
index e359a5f1d913c2c7e90356c4fa00a5b0ab86b081..5e9e025d8f2bc4c6f0d05ddf401804515c22e6e8 100644
--- a/src/database.rs
+++ b/src/database.rs
@@ -17,7 +17,6 @@
 use crate::{utils, Error, Result};
 use abstraction::DatabaseEngine;
 use directories::ProjectDirs;
-use log::error;
 use lru_cache::LruCache;
 use rocket::{
     futures::{channel::mpsc, stream::FuturesUnordered, StreamExt},
@@ -36,6 +35,7 @@
     sync::{Arc, Mutex, RwLock},
 };
 use tokio::sync::{OwnedRwLockReadGuard, RwLock as TokioRwLock, Semaphore};
+use tracing::{debug, error, warn};
 
 use self::proxy::ProxyConfig;
 
@@ -69,6 +69,8 @@ pub struct Config {
     allow_federation: bool,
     #[serde(default = "false_fn")]
     pub allow_jaeger: bool,
+    #[serde(default = "false_fn")]
+    pub tracing_flame: bool,
     #[serde(default)]
     proxy: ProxyConfig,
     jwt_secret: Option<String>,
@@ -91,12 +93,12 @@ pub fn warn_deprecated(&self) {
             .keys()
             .filter(|key| DEPRECATED_KEYS.iter().any(|s| s == key))
         {
-            log::warn!("Config parameter {} is deprecated", key);
+            warn!("Config parameter {} is deprecated", key);
             was_deprecated = true;
         }
 
         if was_deprecated {
-            log::warn!("Read conduit documentation and check your configuration if any new configuration parameters should be adjusted");
+            warn!("Read conduit documentation and check your configuration if any new configuration parameters should be adjusted");
         }
     }
 }
@@ -154,6 +156,9 @@ fn default_log() -> String {
 #[cfg(feature = "sqlite")]
 pub type Engine = abstraction::sqlite::Engine;
 
+#[cfg(feature = "heed")]
+pub type Engine = abstraction::heed::Engine;
+
 pub struct Database {
     _db: Arc<Engine>,
     pub globals: globals::Globals,
@@ -184,22 +189,22 @@ pub fn try_remove(server_name: &str) -> Result<()> {
     }
 
     fn check_sled_or_sqlite_db(config: &Config) -> Result<()> {
-        let path = Path::new(&config.database_path);
-
         #[cfg(feature = "backend_sqlite")]
         {
+            let path = Path::new(&config.database_path);
+
             let sled_exists = path.join("db").exists();
             let sqlite_exists = path.join("conduit.db").exists();
             if sled_exists {
                 if sqlite_exists {
                     // most likely an in-place directory, only warn
-                    log::warn!("Both sled and sqlite databases are detected in database directory");
-                    log::warn!("Currently running from the sqlite database, but consider removing sled database files to free up space")
+                    warn!("Both sled and sqlite databases are detected in database directory");
+                    warn!("Currently running from the sqlite database, but consider removing sled database files to free up space")
                 } else {
-                    log::error!(
+                    error!(
                         "Sled database detected, conduit now uses sqlite for database operations"
                     );
-                    log::error!("This database must be converted to sqlite, go to https://github.com/ShadowJonathan/conduit_toolbox#conduit_sled_to_sqlite");
+                    error!("This database must be converted to sqlite, go to https://github.com/ShadowJonathan/conduit_toolbox#conduit_sled_to_sqlite");
                     return Err(Error::bad_config(
                         "sled database detected, migrate to sqlite",
                     ));
@@ -291,12 +296,13 @@ pub async fn load_or_create(config: &Config) -> Result<Arc<TokioRwLock<Self>>> {
                 statehash_shortstatehash: builder.open_tree("statehash_shortstatehash")?,
 
                 eventid_outlierpdu: builder.open_tree("eventid_outlierpdu")?,
-                prevevent_parent: builder.open_tree("prevevent_parent")?,
+                referencedevents: builder.open_tree("referencedevents")?,
                 pdu_cache: Mutex::new(LruCache::new(100_000)),
                 auth_chain_cache: Mutex::new(LruCache::new(100_000)),
             },
             account_data: account_data::AccountData {
                 roomuserdataid_accountdata: builder.open_tree("roomuserdataid_accountdata")?,
+                roomusertype_roomuserdataid: builder.open_tree("roomusertype_roomuserdataid")?,
             },
             media: media::Media {
                 mediaid_file: builder.open_tree("mediaid_file")?,
@@ -311,8 +317,8 @@ pub async fn load_or_create(config: &Config) -> Result<Arc<TokioRwLock<Self>>> {
             },
             sending: sending::Sending {
                 servername_educount: builder.open_tree("servername_educount")?,
-                servernamepduids: builder.open_tree("servernamepduids")?,
-                servercurrentevents: builder.open_tree("servercurrentevents")?,
+                servernameevent_data: builder.open_tree("servernameevent_data")?,
+                servercurrentevent_data: builder.open_tree("servercurrentevent_data")?,
                 maximum_requests: Arc::new(Semaphore::new(config.max_concurrent_requests as usize)),
                 sender: sending_sender,
             },
@@ -419,6 +425,30 @@ pub async fn load_or_create(config: &Config) -> Result<Arc<TokioRwLock<Self>>> {
 
                 println!("Migration: 3 -> 4 finished");
             }
+
+            if db.globals.database_version()? < 5 {
+                // Upgrade user data store
+                for (roomuserdataid, _) in db.account_data.roomuserdataid_accountdata.iter() {
+                    let mut parts = roomuserdataid.split(|&b| b == 0xff);
+                    let room_id = parts.next().unwrap();
+                    let user_id = parts.next().unwrap();
+                    let event_type = roomuserdataid.rsplit(|&b| b == 0xff).next().unwrap();
+
+                    let mut key = room_id.to_vec();
+                    key.push(0xff);
+                    key.extend_from_slice(user_id);
+                    key.push(0xff);
+                    key.extend_from_slice(event_type);
+
+                    db.account_data
+                        .roomusertype_roomuserdataid
+                        .insert(&key, &roomuserdataid)?;
+                }
+
+                db.globals.bump_database_version(5)?;
+
+                println!("Migration: 4 -> 5 finished");
+            }
         }
 
         let guard = db.read().await;
@@ -444,10 +474,12 @@ pub async fn load_or_create(config: &Config) -> Result<Arc<TokioRwLock<Self>>> {
 
     #[cfg(feature = "conduit_bin")]
     pub async fn start_on_shutdown_tasks(db: Arc<TokioRwLock<Self>>, shutdown: Shutdown) {
+        use tracing::info;
+
         tokio::spawn(async move {
             shutdown.await;
 
-            log::info!(target: "shutdown-sync", "Received shutdown notification, notifying sync helpers...");
+            info!(target: "shutdown-sync", "Received shutdown notification, notifying sync helpers...");
 
             db.read().await.globals.rotate.fire();
         });
@@ -513,7 +545,7 @@ pub async fn watch(&self, user_id: &UserId, device_id: &DeviceId) {
 
             futures.push(
                 self.account_data
-                    .roomuserdataid_accountdata
+                    .roomusertype_roomuserdataid
                     .watch_prefix(&roomuser_prefix),
             );
         }
@@ -523,7 +555,7 @@ pub async fn watch(&self, user_id: &UserId, device_id: &DeviceId) {
 
         futures.push(
             self.account_data
-                .roomuserdataid_accountdata
+                .roomusertype_roomuserdataid
                 .watch_prefix(&globaluserdata_prefix),
         );
 
@@ -543,22 +575,25 @@ pub async fn watch(&self, user_id: &UserId, device_id: &DeviceId) {
         futures.next().await;
     }
 
+    #[tracing::instrument(skip(self))]
     pub async fn flush(&self) -> Result<()> {
         let start = std::time::Instant::now();
 
         let res = self._db.flush();
 
-        log::debug!("flush: took {:?}", start.elapsed());
+        debug!("flush: took {:?}", start.elapsed());
 
         res
     }
 
     #[cfg(feature = "sqlite")]
+    #[tracing::instrument(skip(self))]
     pub fn flush_wal(&self) -> Result<()> {
         self._db.flush_wal()
     }
 
     #[cfg(feature = "sqlite")]
+    #[tracing::instrument(skip(engine, config))]
     pub async fn start_spillover_reap_task(engine: Arc<Engine>, config: &Config) {
         let fraction = config.sqlite_spillover_reap_fraction.clamp(0.01, 1.0);
         let interval_secs = config.sqlite_spillover_reap_interval_secs as u64;
@@ -585,11 +620,13 @@ pub async fn start_spillover_reap_task(engine: Arc<Engine>, config: &Config) {
     }
 
     #[cfg(feature = "sqlite")]
+    #[tracing::instrument(skip(lock, config))]
     pub async fn start_wal_clean_task(lock: &Arc<TokioRwLock<Self>>, config: &Config) {
         use tokio::time::{interval, timeout};
 
         #[cfg(unix)]
         use tokio::signal::unix::{signal, SignalKind};
+        use tracing::info;
 
         use std::{
             sync::Weak,
@@ -611,41 +648,41 @@ pub async fn start_wal_clean_task(lock: &Arc<TokioRwLock<Self>>, config: &Config
                 #[cfg(unix)]
                 tokio::select! {
                     _ = i.tick(), if do_timer => {
-                        log::info!(target: "wal-trunc", "Timer ticked")
+                        info!(target: "wal-trunc", "Timer ticked")
                     }
                     _ = s.recv() => {
-                        log::info!(target: "wal-trunc", "Received SIGHUP")
+                        info!(target: "wal-trunc", "Received SIGHUP")
                     }
                 };
                 #[cfg(not(unix))]
                 if do_timer {
                     i.tick().await;
-                    log::info!(target: "wal-trunc", "Timer ticked")
+                    info!(target: "wal-trunc", "Timer ticked")
                 } else {
                     // timer disabled, and there's no concept of signals on windows, bailing...
                     return;
                 }
                 if let Some(arc) = Weak::upgrade(&weak) {
-                    log::info!(target: "wal-trunc", "Rotating sync helpers...");
+                    info!(target: "wal-trunc", "Rotating sync helpers...");
                     // This actually creates a very small race condition between firing this and trying to acquire the subsequent write lock.
                     // Though it is not a huge deal if the write lock doesn't "catch", as it'll harmlessly time out.
                     arc.read().await.globals.rotate.fire();
 
-                    log::info!(target: "wal-trunc", "Locking...");
+                    info!(target: "wal-trunc", "Locking...");
                     let guard = {
                         if let Ok(guard) = timeout(lock_timeout, arc.write()).await {
                             guard
                         } else {
-                            log::info!(target: "wal-trunc", "Lock failed in timeout, canceled.");
+                            info!(target: "wal-trunc", "Lock failed in timeout, canceled.");
                             continue;
                         }
                     };
-                    log::info!(target: "wal-trunc", "Locked, flushing...");
+                    info!(target: "wal-trunc", "Locked, flushing...");
                     let start = Instant::now();
                     if let Err(e) = guard.flush_wal() {
-                        log::error!(target: "wal-trunc", "Errored: {}", e);
+                        error!(target: "wal-trunc", "Errored: {}", e);
                     } else {
-                        log::info!(target: "wal-trunc", "Flushed in {:?}", start.elapsed());
+                        info!(target: "wal-trunc", "Flushed in {:?}", start.elapsed());
                     }
                 } else {
                     break;
diff --git a/src/database/abstraction.rs b/src/database/abstraction.rs
index fb11ba0bce74e0f90663f5b8fc6400107c3f09ff..8ccac787e4af226bc65cfde58e745e4408cd596a 100644
--- a/src/database/abstraction.rs
+++ b/src/database/abstraction.rs
@@ -12,6 +12,9 @@
 #[cfg(feature = "sqlite")]
 pub mod sqlite;
 
+#[cfg(feature = "heed")]
+pub mod heed;
+
 pub trait DatabaseEngine: Sized {
     fn open(config: &Config) -> Result<Arc<Self>>;
     fn open_tree(self: &Arc<Self>, name: &'static str) -> Result<Arc<dyn Tree>>;
diff --git a/src/database/abstraction/heed.rs b/src/database/abstraction/heed.rs
new file mode 100644
index 0000000000000000000000000000000000000000..0421b14001604f30660db77c73560c860b26b9df
--- /dev/null
+++ b/src/database/abstraction/heed.rs
@@ -0,0 +1,240 @@
+use super::super::Config;
+use crossbeam::channel::{bounded, Sender as ChannelSender};
+use threadpool::ThreadPool;
+
+use crate::{Error, Result};
+use std::{
+    collections::HashMap,
+    future::Future,
+    pin::Pin,
+    sync::{Arc, Mutex, RwLock},
+};
+use tokio::sync::oneshot::Sender;
+
+use super::{DatabaseEngine, Tree};
+
+type TupleOfBytes = (Vec<u8>, Vec<u8>);
+
+pub struct Engine {
+    env: heed::Env,
+    iter_pool: Mutex<ThreadPool>,
+}
+
+pub struct EngineTree {
+    engine: Arc<Engine>,
+    tree: Arc<heed::UntypedDatabase>,
+    watchers: RwLock<HashMap<Vec<u8>, Vec<Sender<()>>>>,
+}
+
+fn convert_error(error: heed::Error) -> Error {
+    Error::HeedError {
+        error: error.to_string(),
+    }
+}
+
+impl DatabaseEngine for Engine {
+    fn open(config: &Config) -> Result<Arc<Self>> {
+        let mut env_builder = heed::EnvOpenOptions::new();
+        env_builder.map_size(1024 * 1024 * 1024 * 1024); // 1 Terabyte
+        env_builder.max_readers(126);
+        env_builder.max_dbs(128);
+        unsafe {
+            env_builder.flag(heed::flags::Flags::MdbWriteMap);
+            env_builder.flag(heed::flags::Flags::MdbMapAsync);
+        }
+
+        Ok(Arc::new(Engine {
+            env: env_builder
+                .open(&config.database_path)
+                .map_err(convert_error)?,
+            iter_pool: Mutex::new(ThreadPool::new(10)),
+        }))
+    }
+
+    fn open_tree(self: &Arc<Self>, name: &'static str) -> Result<Arc<dyn Tree>> {
+        // Creates the db if it doesn't exist already
+        Ok(Arc::new(EngineTree {
+            engine: Arc::clone(self),
+            tree: Arc::new(
+                self.env
+                    .create_database(Some(name))
+                    .map_err(convert_error)?,
+            ),
+            watchers: RwLock::new(HashMap::new()),
+        }))
+    }
+
+    fn flush(self: &Arc<Self>) -> Result<()> {
+        self.env.force_sync().map_err(convert_error)?;
+        Ok(())
+    }
+}
+
+impl EngineTree {
+    #[tracing::instrument(skip(self, tree, from, backwards))]
+    fn iter_from_thread(
+        &self,
+        tree: Arc<heed::UntypedDatabase>,
+        from: Vec<u8>,
+        backwards: bool,
+    ) -> Box<dyn Iterator<Item = TupleOfBytes> + Send + Sync> {
+        let (s, r) = bounded::<TupleOfBytes>(100);
+        let engine = Arc::clone(&self.engine);
+
+        let lock = self.engine.iter_pool.lock().unwrap();
+        if lock.active_count() < lock.max_count() {
+            lock.execute(move || {
+                iter_from_thread_work(tree, &engine.env.read_txn().unwrap(), from, backwards, &s);
+            });
+        } else {
+            std::thread::spawn(move || {
+                iter_from_thread_work(tree, &engine.env.read_txn().unwrap(), from, backwards, &s);
+            });
+        }
+
+        Box::new(r.into_iter())
+    }
+}
+
+#[tracing::instrument(skip(tree, txn, from, backwards))]
+fn iter_from_thread_work(
+    tree: Arc<heed::UntypedDatabase>,
+    txn: &heed::RoTxn<'_>,
+    from: Vec<u8>,
+    backwards: bool,
+    s: &ChannelSender<(Vec<u8>, Vec<u8>)>,
+) {
+    if backwards {
+        for (k, v) in tree.rev_range(txn, ..=&*from).unwrap().map(|r| r.unwrap()) {
+            if s.send((k.to_vec(), v.to_vec())).is_err() {
+                return;
+            }
+        }
+    } else {
+        if from.is_empty() {
+            for (k, v) in tree.iter(txn).unwrap().map(|r| r.unwrap()) {
+                if s.send((k.to_vec(), v.to_vec())).is_err() {
+                    return;
+                }
+            }
+        } else {
+            for (k, v) in tree.range(txn, &*from..).unwrap().map(|r| r.unwrap()) {
+                if s.send((k.to_vec(), v.to_vec())).is_err() {
+                    return;
+                }
+            }
+        }
+    }
+}
+
+impl Tree for EngineTree {
+    #[tracing::instrument(skip(self, key))]
+    fn get(&self, key: &[u8]) -> Result<Option<Vec<u8>>> {
+        let txn = self.engine.env.read_txn().map_err(convert_error)?;
+        Ok(self
+            .tree
+            .get(&txn, &key)
+            .map_err(convert_error)?
+            .map(|s| s.to_vec()))
+    }
+
+    #[tracing::instrument(skip(self, key, value))]
+    fn insert(&self, key: &[u8], value: &[u8]) -> Result<()> {
+        let mut txn = self.engine.env.write_txn().map_err(convert_error)?;
+        self.tree
+            .put(&mut txn, &key, &value)
+            .map_err(convert_error)?;
+        txn.commit().map_err(convert_error)?;
+
+        let watchers = self.watchers.read().unwrap();
+        let mut triggered = Vec::new();
+
+        for length in 0..=key.len() {
+            if watchers.contains_key(&key[..length]) {
+                triggered.push(&key[..length]);
+            }
+        }
+
+        drop(watchers);
+
+        if !triggered.is_empty() {
+            let mut watchers = self.watchers.write().unwrap();
+            for prefix in triggered {
+                if let Some(txs) = watchers.remove(prefix) {
+                    for tx in txs {
+                        let _ = tx.send(());
+                    }
+                }
+            }
+        };
+
+        Ok(())
+    }
+
+    #[tracing::instrument(skip(self, key))]
+    fn remove(&self, key: &[u8]) -> Result<()> {
+        let mut txn = self.engine.env.write_txn().map_err(convert_error)?;
+        self.tree.delete(&mut txn, &key).map_err(convert_error)?;
+        txn.commit().map_err(convert_error)?;
+        Ok(())
+    }
+
+    #[tracing::instrument(skip(self))]
+    fn iter<'a>(&'a self) -> Box<dyn Iterator<Item = (Vec<u8>, Vec<u8>)> + Send + 'a> {
+        self.iter_from(&[], false)
+    }
+
+    #[tracing::instrument(skip(self, from, backwards))]
+    fn iter_from(
+        &self,
+        from: &[u8],
+        backwards: bool,
+    ) -> Box<dyn Iterator<Item = (Vec<u8>, Vec<u8>)> + Send> {
+        self.iter_from_thread(Arc::clone(&self.tree), from.to_vec(), backwards)
+    }
+
+    #[tracing::instrument(skip(self, key))]
+    fn increment(&self, key: &[u8]) -> Result<Vec<u8>> {
+        let mut txn = self.engine.env.write_txn().map_err(convert_error)?;
+
+        let old = self.tree.get(&txn, &key).map_err(convert_error)?;
+        let new =
+            crate::utils::increment(old.as_deref()).expect("utils::increment always returns Some");
+
+        self.tree
+            .put(&mut txn, &key, &&*new)
+            .map_err(convert_error)?;
+
+        txn.commit().map_err(convert_error)?;
+
+        Ok(new)
+    }
+
+    #[tracing::instrument(skip(self, prefix))]
+    fn scan_prefix<'a>(
+        &'a self,
+        prefix: Vec<u8>,
+    ) -> Box<dyn Iterator<Item = (Vec<u8>, Vec<u8>)> + Send + 'a> {
+        Box::new(
+            self.iter_from(&prefix, false)
+                .take_while(move |(key, _)| key.starts_with(&prefix)),
+        )
+    }
+
+    #[tracing::instrument(skip(self, prefix))]
+    fn watch_prefix<'a>(&'a self, prefix: &[u8]) -> Pin<Box<dyn Future<Output = ()> + Send + 'a>> {
+        let (tx, rx) = tokio::sync::oneshot::channel();
+
+        self.watchers
+            .write()
+            .unwrap()
+            .entry(prefix.to_vec())
+            .or_default()
+            .push(tx);
+
+        Box::pin(async move {
+            // Tx is never destroyed
+            rx.await.unwrap();
+        })
+    }
+}
diff --git a/src/database/abstraction/sled.rs b/src/database/abstraction/sled.rs
index 12e02751318705e62e3951c1e6c7303cb7cdd44f..d99ce26403167dc556affe8e20fffdb60b670771 100644
--- a/src/database/abstraction/sled.rs
+++ b/src/database/abstraction/sled.rs
@@ -1,7 +1,7 @@
 use super::super::Config;
 use crate::{utils, Result};
-use log::warn;
 use std::{future::Future, pin::Pin, sync::Arc};
+use tracing::warn;
 
 use super::{DatabaseEngine, Tree};
 
diff --git a/src/database/abstraction/sqlite.rs b/src/database/abstraction/sqlite.rs
index 8cc6a8de0060b69b647a1af85232b3fc37477998..a46d3adafa46ceca3cf4351129125be2933ba2c6 100644
--- a/src/database/abstraction/sqlite.rs
+++ b/src/database/abstraction/sqlite.rs
@@ -3,9 +3,8 @@
 use crossbeam::channel::{
     bounded, unbounded, Receiver as ChannelReceiver, Sender as ChannelSender, TryRecvError,
 };
-use log::debug;
 use parking_lot::{Mutex, MutexGuard, RwLock};
-use rusqlite::{params, Connection, DatabaseName::Main, OptionalExtension};
+use rusqlite::{params, Connection, DatabaseName::Main, OptionalExtension, Params};
 use std::{
     collections::HashMap,
     future::Future,
@@ -13,10 +12,11 @@
     path::{Path, PathBuf},
     pin::Pin,
     sync::Arc,
-    thread,
     time::{Duration, Instant},
 };
+use threadpool::ThreadPool;
 use tokio::sync::oneshot::Sender;
+use tracing::{debug, warn};
 
 struct Pool {
     writer: Mutex<Connection>,
@@ -86,9 +86,9 @@ fn deref(&self) -> &Self::Target {
 impl Drop for RecycledConn {
     fn drop(&mut self) {
         if let Some(conn) = self.0.take() {
-            log::debug!("Recycled connection");
+            debug!("Recycled connection");
             if let Err(e) = self.1.send(conn) {
-                log::warn!("Recycling a connection led to the following error: {:?}", e)
+                warn!("Recycling a connection led to the following error: {:?}", e)
             }
         }
     }
@@ -149,14 +149,14 @@ fn read_lock(&self) -> HoldingConn<'_> {
             }
         }
 
-        log::debug!("read_lock: All permanent readers locked, obtaining spillover reader...");
+        debug!("read_lock: All permanent readers locked, obtaining spillover reader...");
 
         // We didn't get a connection from the permanent pool, so we'll dumpster-dive for recycled connections.
         // Either we have a connection or we dont, if we don't, we make a new one.
         let conn = match self.spills.try_take() {
             Some(conn) => conn,
             None => {
-                log::debug!("read_lock: No recycled connections left, creating new one...");
+                debug!("read_lock: No recycled connections left, creating new one...");
                 Self::prepare_conn(&self.path, None).unwrap()
             }
         };
@@ -169,7 +169,7 @@ fn read_lock(&self) -> HoldingConn<'_> {
 
         // If the spillover readers are more than the number of total readers, there might be a problem.
         if now_count > self.readers.len() {
-            log::warn!(
+            warn!(
                 "Database is under high load. Consider increasing sqlite_read_pool_size ({} spillover readers exist)",
                 now_count
             );
@@ -182,6 +182,7 @@ fn read_lock(&self) -> HoldingConn<'_> {
 
 pub struct Engine {
     pool: Pool,
+    iter_pool: Mutex<ThreadPool>,
 }
 
 impl DatabaseEngine for Engine {
@@ -195,7 +196,10 @@ fn open(config: &Config) -> Result<Arc<Self>> {
         pool.write_lock()
             .execute("CREATE TABLE IF NOT EXISTS _noop (\"key\" INT)", params![])?;
 
-        let arc = Arc::new(Engine { pool });
+        let arc = Arc::new(Engine {
+            pool,
+            iter_pool: Mutex::new(ThreadPool::new(10)),
+        });
 
         Ok(arc)
     }
@@ -259,7 +263,7 @@ pub fn reap_spillover_by_fraction(&self, fraction: f64) {
             }
         }
 
-        log::debug!("Reaped {} connections", reaped);
+        debug!("Reaped {} connections", reaped);
     }
 }
 
@@ -272,6 +276,7 @@ pub struct SqliteTable {
 type TupleOfBytes = (Vec<u8>, Vec<u8>);
 
 impl SqliteTable {
+    #[tracing::instrument(skip(self, guard, key))]
     fn get_with_guard(&self, guard: &Connection, key: &[u8]) -> Result<Option<Vec<u8>>> {
         Ok(guard
             .prepare(format!("SELECT value FROM {} WHERE key = ?", self.name).as_str())?
@@ -279,6 +284,7 @@ fn get_with_guard(&self, guard: &Connection, key: &[u8]) -> Result<Option<Vec<u8
             .optional()?)
     }
 
+    #[tracing::instrument(skip(self, guard, key, value))]
     fn insert_with_guard(&self, guard: &Connection, key: &[u8], value: &[u8]) -> Result<()> {
         guard.execute(
             format!(
@@ -291,41 +297,67 @@ fn insert_with_guard(&self, guard: &Connection, key: &[u8], value: &[u8]) -> Res
         Ok(())
     }
 
-    fn _iter_from_thread<F>(&self, f: F) -> Box<dyn Iterator<Item = TupleOfBytes> + Send>
-    where
-        F: (for<'a> FnOnce(&'a Connection, ChannelSender<TupleOfBytes>)) + Send + 'static,
-    {
+    #[tracing::instrument(skip(self, sql, param))]
+    fn iter_from_thread(
+        &self,
+        sql: String,
+        param: Option<Vec<u8>>,
+    ) -> Box<dyn Iterator<Item = TupleOfBytes> + Send + Sync> {
         let (s, r) = bounded::<TupleOfBytes>(5);
 
-        let engine = self.engine.clone();
+        let engine = Arc::clone(&self.engine);
 
-        thread::spawn(move || {
-            let _ = f(&engine.pool.read_lock(), s);
-        });
+        let lock = self.engine.iter_pool.lock();
+        if lock.active_count() < lock.max_count() {
+            lock.execute(move || {
+                if let Some(param) = param {
+                    iter_from_thread_work(&engine.pool.read_lock(), &s, &sql, [param]);
+                } else {
+                    iter_from_thread_work(&engine.pool.read_lock(), &s, &sql, []);
+                }
+            });
+        } else {
+            std::thread::spawn(move || {
+                if let Some(param) = param {
+                    iter_from_thread_work(&engine.pool.read_lock(), &s, &sql, [param]);
+                } else {
+                    iter_from_thread_work(&engine.pool.read_lock(), &s, &sql, []);
+                }
+            });
+        }
 
         Box::new(r.into_iter())
     }
 }
 
-macro_rules! iter_from_thread {
-    ($self:expr, $sql:expr, $param:expr) => {
-        $self._iter_from_thread(move |guard, s| {
-            let _ = guard
-                .prepare($sql)
-                .unwrap()
-                .query_map($param, |row| Ok((row.get_unwrap(0), row.get_unwrap(1))))
-                .unwrap()
-                .map(|r| r.unwrap())
-                .try_for_each(|bob| s.send(bob));
-        })
-    };
+fn iter_from_thread_work<P>(
+    guard: &HoldingConn<'_>,
+    s: &ChannelSender<(Vec<u8>, Vec<u8>)>,
+    sql: &str,
+    params: P,
+) where
+    P: Params,
+{
+    for bob in guard
+        .prepare(sql)
+        .unwrap()
+        .query_map(params, |row| Ok((row.get_unwrap(0), row.get_unwrap(1))))
+        .unwrap()
+        .map(|r| r.unwrap())
+    {
+        if s.send(bob).is_err() {
+            return;
+        }
+    }
 }
 
 impl Tree for SqliteTable {
+    #[tracing::instrument(skip(self, key))]
     fn get(&self, key: &[u8]) -> Result<Option<Vec<u8>>> {
         self.get_with_guard(&self.engine.pool.read_lock(), key)
     }
 
+    #[tracing::instrument(skip(self, key, value))]
     fn insert(&self, key: &[u8], value: &[u8]) -> Result<()> {
         let guard = self.engine.pool.write_lock();
 
@@ -365,6 +397,7 @@ fn insert(&self, key: &[u8], value: &[u8]) -> Result<()> {
         Ok(())
     }
 
+    #[tracing::instrument(skip(self, key))]
     fn remove(&self, key: &[u8]) -> Result<()> {
         let guard = self.engine.pool.write_lock();
 
@@ -385,15 +418,13 @@ fn remove(&self, key: &[u8]) -> Result<()> {
         Ok(())
     }
 
+    #[tracing::instrument(skip(self))]
     fn iter<'a>(&'a self) -> Box<dyn Iterator<Item = TupleOfBytes> + Send + 'a> {
         let name = self.name.clone();
-        iter_from_thread!(
-            self,
-            format!("SELECT key, value FROM {}", name).as_str(),
-            params![]
-        )
+        self.iter_from_thread(format!("SELECT key, value FROM {}", name), None)
     }
 
+    #[tracing::instrument(skip(self, from, backwards))]
     fn iter_from<'a>(
         &'a self,
         from: &[u8],
@@ -402,28 +433,25 @@ fn iter_from<'a>(
         let name = self.name.clone();
         let from = from.to_vec(); // TODO change interface?
         if backwards {
-            iter_from_thread!(
-                self,
+            self.iter_from_thread(
                 format!(
                     "SELECT key, value FROM {} WHERE key <= ? ORDER BY key DESC",
                     name
-                )
-                .as_str(),
-                [from]
+                ),
+                Some(from),
             )
         } else {
-            iter_from_thread!(
-                self,
+            self.iter_from_thread(
                 format!(
                     "SELECT key, value FROM {} WHERE key >= ? ORDER BY key ASC",
                     name
-                )
-                .as_str(),
-                [from]
+                ),
+                Some(from),
             )
         }
     }
 
+    #[tracing::instrument(skip(self, key))]
     fn increment(&self, key: &[u8]) -> Result<Vec<u8>> {
         let guard = self.engine.pool.write_lock();
 
@@ -446,18 +474,17 @@ fn increment(&self, key: &[u8]) -> Result<Vec<u8>> {
         Ok(new)
     }
 
+    #[tracing::instrument(skip(self, prefix))]
     fn scan_prefix<'a>(
         &'a self,
         prefix: Vec<u8>,
     ) -> Box<dyn Iterator<Item = TupleOfBytes> + Send + 'a> {
         // let name = self.name.clone();
-        // iter_from_thread!(
-        //     self,
+        // self.iter_from_thread(
         //     format!(
         //         "SELECT key, value FROM {} WHERE key BETWEEN ?1 AND ?1 || X'FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF' ORDER BY key ASC",
         //         name
         //     )
-        //     .as_str(),
         //     [prefix]
         // )
         Box::new(
@@ -466,6 +493,7 @@ fn scan_prefix<'a>(
         )
     }
 
+    #[tracing::instrument(skip(self, prefix))]
     fn watch_prefix<'a>(&'a self, prefix: &[u8]) -> Pin<Box<dyn Future<Output = ()> + Send + 'a>> {
         let (tx, rx) = tokio::sync::oneshot::channel();
 
@@ -481,6 +509,7 @@ fn watch_prefix<'a>(&'a self, prefix: &[u8]) -> Pin<Box<dyn Future<Output = ()>
         })
     }
 
+    #[tracing::instrument(skip(self))]
     fn clear(&self) -> Result<()> {
         debug!("clear: running");
         self.engine
diff --git a/src/database/account_data.rs b/src/database/account_data.rs
index b1d5b6b505702005aeedc4707fff9f520d5cf2c8..e1d4c62065c74006fc9043910d4886fdc4dc5d49 100644
--- a/src/database/account_data.rs
+++ b/src/database/account_data.rs
@@ -12,10 +12,12 @@
 
 pub struct AccountData {
     pub(super) roomuserdataid_accountdata: Arc<dyn Tree>, // RoomUserDataId = Room + User + Count + Type
+    pub(super) roomusertype_roomuserdataid: Arc<dyn Tree>, // RoomUserType = Room + User + Type
 }
 
 impl AccountData {
     /// Places one event in the account data of the user and removes the previous entry.
+    #[tracing::instrument(skip(self, room_id, user_id, event_type, data, globals))]
     pub fn update<T: Serialize>(
         &self,
         room_id: Option<&RoomId>,
@@ -33,15 +35,13 @@ pub fn update<T: Serialize>(
         prefix.extend_from_slice(&user_id.as_bytes());
         prefix.push(0xff);
 
-        // Remove old entry
-        if let Some((old_key, _)) = self.find_event(room_id, user_id, &event_type)? {
-            self.roomuserdataid_accountdata.remove(&old_key)?;
-        }
+        let mut roomuserdataid = prefix.clone();
+        roomuserdataid.extend_from_slice(&globals.next_count()?.to_be_bytes());
+        roomuserdataid.push(0xff);
+        roomuserdataid.extend_from_slice(&event_type.as_bytes());
 
-        let mut key = prefix;
-        key.extend_from_slice(&globals.next_count()?.to_be_bytes());
-        key.push(0xff);
-        key.extend_from_slice(event_type.as_ref().as_bytes());
+        let mut key = prefix.clone();
+        key.extend_from_slice(event_type.as_bytes());
 
         let json = serde_json::to_value(data).expect("all types here can be serialized"); // TODO: maybe add error handling
         if json.get("type").is_none() || json.get("content").is_none() {
@@ -52,29 +52,58 @@ pub fn update<T: Serialize>(
         }
 
         self.roomuserdataid_accountdata.insert(
-            &key,
+            &roomuserdataid,
             &serde_json::to_vec(&json).expect("to_vec always works on json values"),
         )?;
 
+        let prev = self.roomusertype_roomuserdataid.get(&key)?;
+
+        self.roomusertype_roomuserdataid
+            .insert(&key, &roomuserdataid)?;
+
+        // Remove old entry
+        if let Some(prev) = prev {
+            self.roomuserdataid_accountdata.remove(&prev)?;
+        }
+
         Ok(())
     }
 
     /// Searches the account data for a specific kind.
+    #[tracing::instrument(skip(self, room_id, user_id, kind))]
     pub fn get<T: DeserializeOwned>(
         &self,
         room_id: Option<&RoomId>,
         user_id: &UserId,
         kind: EventType,
     ) -> Result<Option<T>> {
-        self.find_event(room_id, user_id, &kind)?
-            .map(|(_, v)| {
-                serde_json::from_slice(&v).map_err(|_| Error::bad_database("could not deserialize"))
+        let mut key = room_id
+            .map(|r| r.to_string())
+            .unwrap_or_default()
+            .as_bytes()
+            .to_vec();
+        key.push(0xff);
+        key.extend_from_slice(&user_id.as_bytes());
+        key.push(0xff);
+        key.extend_from_slice(kind.as_ref().as_bytes());
+
+        self.roomusertype_roomuserdataid
+            .get(&key)?
+            .and_then(|roomuserdataid| {
+                self.roomuserdataid_accountdata
+                    .get(&roomuserdataid)
+                    .transpose()
+            })
+            .transpose()?
+            .map(|data| {
+                serde_json::from_slice(&data)
+                    .map_err(|_| Error::bad_database("could not deserialize"))
             })
             .transpose()
     }
 
     /// Returns all changes to the account data that happened after `since`.
-    #[tracing::instrument(skip(self))]
+    #[tracing::instrument(skip(self, room_id, user_id, since))]
     pub fn changes_since(
         &self,
         room_id: Option<&RoomId>,
@@ -121,36 +150,4 @@ pub fn changes_since(
 
         Ok(userdata)
     }
-
-    fn find_event(
-        &self,
-        room_id: Option<&RoomId>,
-        user_id: &UserId,
-        kind: &EventType,
-    ) -> Result<Option<(Vec<u8>, Vec<u8>)>> {
-        let mut prefix = room_id
-            .map(|r| r.to_string())
-            .unwrap_or_default()
-            .as_bytes()
-            .to_vec();
-        prefix.push(0xff);
-        prefix.extend_from_slice(&user_id.as_bytes());
-        prefix.push(0xff);
-
-        let mut last_possible_key = prefix.clone();
-        last_possible_key.extend_from_slice(&u64::MAX.to_be_bytes());
-
-        let kind = kind.clone();
-
-        Ok(self
-            .roomuserdataid_accountdata
-            .iter_from(&last_possible_key, true)
-            .take_while(move |(k, _)| k.starts_with(&prefix))
-            .find(move |(k, _)| {
-                k.rsplit(|&b| b == 0xff)
-                    .next()
-                    .map(|current_event_type| current_event_type == kind.as_ref().as_bytes())
-                    .unwrap_or(false)
-            }))
-    }
 }
diff --git a/src/database/admin.rs b/src/database/admin.rs
index d8b7ae5e09af5896f9d73e15f60b4c2bff88b37a..e1b24d0851800b0fc55e5f8df2ff796bedc4aa1f 100644
--- a/src/database/admin.rs
+++ b/src/database/admin.rs
@@ -4,13 +4,13 @@
 };
 
 use crate::{pdu::PduBuilder, Database};
-use log::warn;
 use rocket::futures::{channel::mpsc, stream::StreamExt};
 use ruma::{
     events::{room::message, EventType},
     UserId,
 };
 use tokio::sync::{MutexGuard, RwLock, RwLockReadGuard};
+use tracing::warn;
 
 pub enum AdminCommand {
     RegisterAppservice(serde_yaml::Value),
diff --git a/src/database/globals.rs b/src/database/globals.rs
index fbd41a3899b148339668b2e5dda8a9a634a46997..0edb9ca27c1f3bdb334f5511adb3989c1a0d1b35 100644
--- a/src/database/globals.rs
+++ b/src/database/globals.rs
@@ -1,5 +1,4 @@
 use crate::{database::Config, utils, ConduitResult, Error, Result};
-use log::{error, info};
 use ruma::{
     api::{
         client::r0::sync::sync_events,
@@ -17,6 +16,7 @@
     time::{Duration, Instant},
 };
 use tokio::sync::{broadcast, watch::Receiver, Mutex, Semaphore};
+use tracing::{error, info};
 use trust_dns_resolver::TokioAsyncResolver;
 
 use super::abstraction::Tree;
@@ -56,6 +56,7 @@ struct MatrixServerVerifier {
 }
 
 impl ServerCertVerifier for MatrixServerVerifier {
+    #[tracing::instrument(skip(self, roots, presented_certs, dns_name, ocsp_response))]
     fn verify_server_cert(
         &self,
         roots: &rustls::RootCertStore,
@@ -220,11 +221,13 @@ pub fn reqwest_client(&self) -> &reqwest::Client {
         &self.reqwest_client
     }
 
+    #[tracing::instrument(skip(self))]
     pub fn next_count(&self) -> Result<u64> {
         utils::u64_from_bytes(&self.globals.increment(COUNTER)?)
             .map_err(|_| Error::bad_database("Count has invalid bytes."))
     }
 
+    #[tracing::instrument(skip(self))]
     pub fn current_count(&self) -> Result<u64> {
         self.globals.get(COUNTER)?.map_or(Ok(0_u64), |bytes| {
             utils::u64_from_bytes(&bytes)
diff --git a/src/database/pusher.rs b/src/database/pusher.rs
index 348f4dcb82ee4fc089a24394acad502516adb7fd..3df9ed4f8944d39ffe18e77221161764a05a82d1 100644
--- a/src/database/pusher.rs
+++ b/src/database/pusher.rs
@@ -1,6 +1,5 @@
 use crate::{Database, Error, PduEvent, Result};
 use bytes::BytesMut;
-use log::{error, info, warn};
 use ruma::{
     api::{
         client::r0::push::{get_pushers, set_pusher, PusherKind},
@@ -10,11 +9,13 @@
         },
         IncomingResponse, OutgoingRequest, SendAccessToken,
     },
-    events::{room::power_levels::PowerLevelsEventContent, EventType},
+    events::{room::power_levels::PowerLevelsEventContent, AnySyncRoomEvent, EventType},
     identifiers::RoomName,
     push::{Action, PushConditionRoomCtx, PushFormat, Ruleset, Tweak},
-    uint, UInt, UserId,
+    serde::Raw,
+    uint, RoomId, UInt, UserId,
 };
+use tracing::{error, info, warn};
 
 use std::{convert::TryFrom, fmt::Debug, mem, sync::Arc};
 
@@ -26,6 +27,7 @@ pub struct PushData {
 }
 
 impl PushData {
+    #[tracing::instrument(skip(self, sender, pusher))]
     pub fn set_pusher(&self, sender: &UserId, pusher: set_pusher::Pusher) -> Result<()> {
         let mut key = sender.as_bytes().to_vec();
         key.push(0xff);
@@ -48,6 +50,7 @@ pub fn set_pusher(&self, sender: &UserId, pusher: set_pusher::Pusher) -> Result<
         Ok(())
     }
 
+    #[tracing::instrument(skip(self, senderkey))]
     pub fn get_pusher(&self, senderkey: &[u8]) -> Result<Option<get_pushers::Pusher>> {
         self.senderkey_pusher
             .get(senderkey)?
@@ -58,6 +61,7 @@ pub fn get_pusher(&self, senderkey: &[u8]) -> Result<Option<get_pushers::Pusher>
             .transpose()
     }
 
+    #[tracing::instrument(skip(self, sender))]
     pub fn get_pushers(&self, sender: &UserId) -> Result<Vec<get_pushers::Pusher>> {
         let mut prefix = sender.as_bytes().to_vec();
         prefix.push(0xff);
@@ -71,6 +75,7 @@ pub fn get_pushers(&self, sender: &UserId) -> Result<Vec<get_pushers::Pusher>> {
             .collect()
     }
 
+    #[tracing::instrument(skip(self, sender))]
     pub fn get_pusher_senderkeys<'a>(
         &'a self,
         sender: &UserId,
@@ -82,6 +87,7 @@ pub fn get_pusher_senderkeys<'a>(
     }
 }
 
+#[tracing::instrument(skip(globals, destination, request))]
 pub async fn send_request<T: OutgoingRequest>(
     globals: &crate::database::globals::Globals,
     destination: &str,
@@ -155,6 +161,7 @@ pub async fn send_request<T: OutgoingRequest>(
     }
 }
 
+#[tracing::instrument(skip(user, unread, pusher, ruleset, pdu, db))]
 pub async fn send_push_notice(
     user: &UserId,
     unread: UInt,
@@ -166,7 +173,24 @@ pub async fn send_push_notice(
     let mut notify = None;
     let mut tweaks = Vec::new();
 
-    for action in get_actions(user, &ruleset, pdu, db)? {
+    let power_levels: PowerLevelsEventContent = db
+        .rooms
+        .room_state_get(&pdu.room_id, &EventType::RoomPowerLevels, "")?
+        .map(|ev| {
+            serde_json::from_value(ev.content.clone())
+                .map_err(|_| Error::bad_database("invalid m.room.power_levels event"))
+        })
+        .transpose()?
+        .unwrap_or_default();
+
+    for action in get_actions(
+        user,
+        &ruleset,
+        &power_levels,
+        &pdu.to_sync_room_event(),
+        &pdu.room_id,
+        db,
+    )? {
         let n = match action {
             Action::DontNotify => false,
             // TODO: Implement proper support for coalesce
@@ -194,37 +218,31 @@ pub async fn send_push_notice(
     Ok(())
 }
 
+#[tracing::instrument(skip(user, ruleset, pdu, db))]
 pub fn get_actions<'a>(
     user: &UserId,
     ruleset: &'a Ruleset,
-    pdu: &PduEvent,
+    power_levels: &PowerLevelsEventContent,
+    pdu: &Raw<AnySyncRoomEvent>,
+    room_id: &RoomId,
     db: &Database,
 ) -> Result<&'a [Action]> {
-    let power_levels: PowerLevelsEventContent = db
-        .rooms
-        .room_state_get(&pdu.room_id, &EventType::RoomPowerLevels, "")?
-        .map(|ev| {
-            serde_json::from_value(ev.content.clone())
-                .map_err(|_| Error::bad_database("invalid m.room.power_levels event"))
-        })
-        .transpose()?
-        .unwrap_or_default();
-
     let ctx = PushConditionRoomCtx {
-        room_id: pdu.room_id.clone(),
+        room_id: room_id.clone(),
         member_count: 10_u32.into(), // TODO: get member count efficiently
         user_display_name: db
             .users
             .displayname(&user)?
             .unwrap_or_else(|| user.localpart().to_owned()),
-        users_power_levels: power_levels.users,
+        users_power_levels: power_levels.users.clone(),
         default_power_level: power_levels.users_default,
-        notification_power_levels: power_levels.notifications,
+        notification_power_levels: power_levels.notifications.clone(),
     };
 
-    Ok(ruleset.get_actions(&pdu.to_sync_room_event(), &ctx))
+    Ok(ruleset.get_actions(pdu, &ctx))
 }
 
+#[tracing::instrument(skip(unread, pusher, tweaks, event, db))]
 async fn send_notice(
     unread: UInt,
     pusher: &get_pushers::Pusher,
diff --git a/src/database/rooms.rs b/src/database/rooms.rs
index f0ec683d6312d0db14d9cc8058c9c7b4ae55c177..79bb059d1c9afa38c1083e9f0d7d26e49cd01355 100644
--- a/src/database/rooms.rs
+++ b/src/database/rooms.rs
@@ -5,7 +5,6 @@
 use tokio::sync::MutexGuard;
 
 use crate::{pdu::PduBuilder, utils, Database, Error, PduEvent, Result};
-use log::{debug, error, warn};
 use lru_cache::LruCache;
 use regex::Regex;
 use ring::digest;
@@ -13,7 +12,9 @@
     api::{client::error::ErrorKind, federation},
     events::{
         ignored_user_list, push_rules,
-        room::{create::CreateEventContent, member, message},
+        room::{
+            create::CreateEventContent, member, message, power_levels::PowerLevelsEventContent,
+        },
         AnyStrippedStateEvent, AnySyncStateEvent, EventType,
     },
     push::{self, Action, Tweak},
@@ -27,6 +28,7 @@
     mem,
     sync::{Arc, Mutex},
 };
+use tracing::{debug, error, warn};
 
 use super::{abstraction::Tree, admin::AdminCommand, pusher};
 
@@ -82,7 +84,7 @@ pub struct Rooms {
     pub(super) eventid_outlierpdu: Arc<dyn Tree>,
 
     /// RoomId + EventId -> Parent PDU EventId.
-    pub(super) prevevent_parent: Arc<dyn Tree>,
+    pub(super) referencedevents: Arc<dyn Tree>,
 
     pub(super) pdu_cache: Mutex<LruCache<EventId, Arc<PduEvent>>>,
     pub(super) auth_chain_cache: Mutex<LruCache<EventId, HashSet<EventId>>>,
@@ -617,6 +619,7 @@ fn replace_pdu(&self, pdu_id: &[u8], pdu: &PduEvent) -> Result<()> {
     }
 
     /// Returns the leaf pdus of a room.
+    #[tracing::instrument(skip(self))]
     pub fn get_pdu_leaves(&self, room_id: &RoomId) -> Result<HashSet<EventId>> {
         let mut prefix = room_id.as_bytes().to_vec();
         prefix.push(0xff);
@@ -636,6 +639,7 @@ pub fn get_pdu_leaves(&self, room_id: &RoomId) -> Result<HashSet<EventId>> {
     ///
     /// The provided `event_ids` become the new leaves, this allows a room to have multiple
     /// `prev_events`.
+    #[tracing::instrument(skip(self))]
     pub fn replace_pdu_leaves(&self, room_id: &RoomId, event_ids: &[EventId]) -> Result<()> {
         let mut prefix = room_id.as_bytes().to_vec();
         prefix.push(0xff);
@@ -653,13 +657,15 @@ pub fn replace_pdu_leaves(&self, room_id: &RoomId, event_ids: &[EventId]) -> Res
         Ok(())
     }
 
+    #[tracing::instrument(skip(self))]
     pub fn is_event_referenced(&self, room_id: &RoomId, event_id: &EventId) -> Result<bool> {
         let mut key = room_id.as_bytes().to_vec();
         key.extend_from_slice(event_id.as_bytes());
-        Ok(self.prevevent_parent.get(&key)?.is_some())
+        Ok(self.referencedevents.get(&key)?.is_some())
     }
 
     /// Returns the pdu from the outlier tree.
+    #[tracing::instrument(skip(self))]
     pub fn get_pdu_outlier(&self, event_id: &EventId) -> Result<Option<PduEvent>> {
         self.eventid_outlierpdu
             .get(event_id.as_bytes())?
@@ -671,6 +677,7 @@ pub fn get_pdu_outlier(&self, event_id: &EventId) -> Result<Option<PduEvent>> {
     /// Append the PDU as an outlier.
     ///
     /// Any event given to this will be processed (state-res) on another thread.
+    #[tracing::instrument(skip(self, pdu))]
     pub fn add_pdu_outlier(&self, event_id: &EventId, pdu: &CanonicalJsonObject) -> Result<()> {
         self.eventid_outlierpdu.insert(
             &event_id.as_bytes(),
@@ -684,7 +691,7 @@ pub fn add_pdu_outlier(&self, event_id: &EventId, pdu: &CanonicalJsonObject) ->
     ///
     /// By this point the incoming event should be fully authenticated, no auth happens
     /// in `append_pdu`.
-    #[allow(clippy::too_many_arguments)]
+    #[tracing::instrument(skip(self, pdu, pdu_json, leaves, db))]
     pub fn append_pdu(
         &self,
         pdu: &PduEvent,
@@ -721,11 +728,10 @@ pub fn append_pdu(
         }
 
         // We must keep track of all events that have been referenced.
-        for leaf in leaves {
+        for prev in &pdu.prev_events {
             let mut key = pdu.room_id().as_bytes().to_vec();
-            key.extend_from_slice(leaf.as_bytes());
-            self.prevevent_parent
-                .insert(&key, pdu.event_id().as_bytes())?;
+            key.extend_from_slice(prev.as_bytes());
+            self.referencedevents.insert(&key, &[])?;
         }
 
         self.replace_pdu_leaves(&pdu.room_id, leaves)?;
@@ -756,13 +762,24 @@ pub fn append_pdu(
             .insert(pdu.event_id.as_bytes(), &pdu_id)?;
 
         // See if the event matches any known pushers
+        let power_levels: PowerLevelsEventContent = db
+            .rooms
+            .room_state_get(&pdu.room_id, &EventType::RoomPowerLevels, "")?
+            .map(|ev| {
+                serde_json::from_value(ev.content.clone())
+                    .map_err(|_| Error::bad_database("invalid m.room.power_levels event"))
+            })
+            .transpose()?
+            .unwrap_or_default();
+
+        let sync_pdu = pdu.to_sync_room_event();
+
         for user in db
-            .users
-            .iter()
+            .rooms
+            .room_members(&pdu.room_id)
             .filter_map(|r| r.ok())
             .filter(|user_id| user_id.server_name() == db.globals.server_name())
             .filter(|user_id| !db.users.is_deactivated(user_id).unwrap_or(false))
-            .filter(|user_id| self.is_joined(&user_id, &pdu.room_id).unwrap_or(false))
         {
             // Don't notify the user of their own events
             if user == pdu.sender {
@@ -778,7 +795,14 @@ pub fn append_pdu(
             let mut highlight = false;
             let mut notify = false;
 
-            for action in pusher::get_actions(&user, &rules_for_user, pdu, db)? {
+            for action in pusher::get_actions(
+                &user,
+                &rules_for_user,
+                &power_levels,
+                &sync_pdu,
+                &pdu.room_id,
+                db,
+            )? {
                 match action {
                     Action::DontNotify => notify = false,
                     // TODO: Implement proper support for coalesce
@@ -860,6 +884,7 @@ pub fn append_pdu(
                 if let Some(body) = pdu.content.get("body").and_then(|b| b.as_str()) {
                     for word in body
                         .split_terminator(|c: char| !c.is_alphanumeric())
+                        .filter(|word| word.len() <= 50)
                         .map(str::to_lowercase)
                     {
                         let mut key = pdu.room_id.as_bytes().to_vec();
@@ -992,6 +1017,7 @@ pub fn append_pdu(
         Ok(pdu_id)
     }
 
+    #[tracing::instrument(skip(self))]
     pub fn reset_notification_counts(&self, user_id: &UserId, room_id: &RoomId) -> Result<()> {
         let mut userroom_id = user_id.as_bytes().to_vec();
         userroom_id.push(0xff);
@@ -1005,6 +1031,7 @@ pub fn reset_notification_counts(&self, user_id: &UserId, room_id: &RoomId) -> R
         Ok(())
     }
 
+    #[tracing::instrument(skip(self))]
     pub fn notification_count(&self, user_id: &UserId, room_id: &RoomId) -> Result<u64> {
         let mut userroom_id = user_id.as_bytes().to_vec();
         userroom_id.push(0xff);
@@ -1019,6 +1046,7 @@ pub fn notification_count(&self, user_id: &UserId, room_id: &RoomId) -> Result<u
             .unwrap_or(Ok(0))
     }
 
+    #[tracing::instrument(skip(self))]
     pub fn highlight_count(&self, user_id: &UserId, room_id: &RoomId) -> Result<u64> {
         let mut userroom_id = user_id.as_bytes().to_vec();
         userroom_id.push(0xff);
@@ -1037,6 +1065,7 @@ pub fn highlight_count(&self, user_id: &UserId, room_id: &RoomId) -> Result<u64>
     ///
     /// This adds all current state events (not including the incoming event)
     /// to `stateid_pduid` and adds the incoming event to `eventid_statehash`.
+    #[tracing::instrument(skip(self, state, globals))]
     pub fn set_event_state(
         &self,
         event_id: &EventId,
@@ -1121,6 +1150,7 @@ pub fn set_event_state(
     ///
     /// This adds all current state events (not including the incoming event)
     /// to `stateid_pduid` and adds the incoming event to `eventid_statehash`.
+    #[tracing::instrument(skip(self, new_pdu, globals))]
     pub fn append_to_state(
         &self,
         new_pdu: &PduEvent,
@@ -1227,6 +1257,7 @@ pub fn append_to_state(
         }
     }
 
+    #[tracing::instrument(skip(self, invite_event))]
     pub fn calculate_invite_state(
         &self,
         invite_event: &PduEvent,
@@ -1264,6 +1295,7 @@ pub fn calculate_invite_state(
         Ok(state)
     }
 
+    #[tracing::instrument(skip(self))]
     pub fn set_room_state(&self, room_id: &RoomId, shortstatehash: u64) -> Result<()> {
         self.roomid_shortstatehash
             .insert(room_id.as_bytes(), &shortstatehash.to_be_bytes())?;
@@ -1272,6 +1304,7 @@ pub fn set_room_state(&self, room_id: &RoomId, shortstatehash: u64) -> Result<()
     }
 
     /// Creates a new persisted data unit and adds it to a room.
+    #[tracing::instrument(skip(self, db, _mutex_lock))]
     pub fn build_and_append_pdu(
         &self,
         pdu_builder: PduBuilder,
@@ -1424,6 +1457,13 @@ pub fn build_and_append_pdu(
             CanonicalJsonValue::String(pdu.event_id.as_str().to_owned()),
         );
 
+        // Generate short event id
+        let shorteventid = db.globals.next_count()?;
+        self.eventid_shorteventid
+            .insert(pdu.event_id.as_bytes(), &shorteventid.to_be_bytes())?;
+        self.shorteventid_eventid
+            .insert(&shorteventid.to_be_bytes(), pdu.event_id.as_bytes())?;
+
         // Increment the last index and use that
         // This is also the next_batch/since value
         let count = db.globals.next_count()?;
@@ -1563,6 +1603,7 @@ pub fn pdus_since<'a>(
 
     /// Returns an iterator over all events and their tokens in a room that happened before the
     /// event with id `until` in reverse-chronological order.
+    #[tracing::instrument(skip(self))]
     pub fn pdus_until<'a>(
         &'a self,
         user_id: &UserId,
@@ -1625,6 +1666,7 @@ pub fn pdus_after<'a>(
     }
 
     /// Replace a PDU with the redacted form.
+    #[tracing::instrument(skip(self, reason))]
     pub fn redact_pdu(&self, event_id: &EventId, reason: &PduEvent) -> Result<()> {
         if let Some(pdu_id) = self.get_pdu_id(event_id)? {
             let mut pdu = self
@@ -1642,6 +1684,7 @@ pub fn redact_pdu(&self, event_id: &EventId, reason: &PduEvent) -> Result<()> {
     }
 
     /// Update current membership data.
+    #[tracing::instrument(skip(self, last_state, db))]
     pub fn update_membership(
         &self,
         room_id: &RoomId,
@@ -2026,6 +2069,7 @@ async fn remote_leave_room(
     }
 
     /// Makes a user forget a room.
+    #[tracing::instrument(skip(self))]
     pub fn forget(&self, room_id: &RoomId, user_id: &UserId) -> Result<()> {
         let mut userroom_id = user_id.as_bytes().to_vec();
         userroom_id.push(0xff);
@@ -2041,6 +2085,7 @@ pub fn forget(&self, room_id: &RoomId, user_id: &UserId) -> Result<()> {
         Ok(())
     }
 
+    #[tracing::instrument(skip(self, globals))]
     pub fn set_alias(
         &self,
         alias: &RoomAliasId,
@@ -2076,6 +2121,7 @@ pub fn set_alias(
         Ok(())
     }
 
+    #[tracing::instrument(skip(self))]
     pub fn id_from_alias(&self, alias: &RoomAliasId) -> Result<Option<RoomId>> {
         self.alias_roomid
             .get(alias.alias().as_bytes())?
@@ -2089,6 +2135,7 @@ pub fn id_from_alias(&self, alias: &RoomAliasId) -> Result<Option<RoomId>> {
             })
     }
 
+    #[tracing::instrument(skip(self))]
     pub fn room_aliases<'a>(
         &'a self,
         room_id: &RoomId,
@@ -2104,6 +2151,7 @@ pub fn room_aliases<'a>(
         })
     }
 
+    #[tracing::instrument(skip(self))]
     pub fn set_public(&self, room_id: &RoomId, public: bool) -> Result<()> {
         if public {
             self.publicroomids.insert(room_id.as_bytes(), &[])?;
@@ -2114,10 +2162,12 @@ pub fn set_public(&self, room_id: &RoomId, public: bool) -> Result<()> {
         Ok(())
     }
 
+    #[tracing::instrument(skip(self))]
     pub fn is_public_room(&self, room_id: &RoomId) -> Result<bool> {
         Ok(self.publicroomids.get(room_id.as_bytes())?.is_some())
     }
 
+    #[tracing::instrument(skip(self))]
     pub fn public_rooms(&self) -> impl Iterator<Item = Result<RoomId>> + '_ {
         self.publicroomids.iter().map(|(bytes, _)| {
             RoomId::try_from(
@@ -2219,6 +2269,7 @@ pub fn get_shared_rooms<'a>(
     }
 
     /// Returns an iterator of all servers participating in this room.
+    #[tracing::instrument(skip(self))]
     pub fn room_servers<'a>(
         &'a self,
         room_id: &RoomId,
@@ -2242,6 +2293,7 @@ pub fn room_servers<'a>(
     }
 
     /// Returns an iterator of all rooms a server participates in (as far as we know).
+    #[tracing::instrument(skip(self))]
     pub fn server_rooms<'a>(
         &'a self,
         server: &ServerName,
@@ -2287,6 +2339,7 @@ pub fn room_members<'a>(
     }
 
     /// Returns an iterator over all User IDs who ever joined a room.
+    #[tracing::instrument(skip(self))]
     pub fn room_useroncejoined<'a>(
         &'a self,
         room_id: &RoomId,
@@ -2494,6 +2547,7 @@ pub fn rooms_left<'a>(
             })
     }
 
+    #[tracing::instrument(skip(self))]
     pub fn once_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool> {
         let mut userroom_id = user_id.as_bytes().to_vec();
         userroom_id.push(0xff);
@@ -2502,6 +2556,7 @@ pub fn once_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool> {
         Ok(self.roomuseroncejoinedids.get(&userroom_id)?.is_some())
     }
 
+    #[tracing::instrument(skip(self))]
     pub fn is_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool> {
         let mut userroom_id = user_id.as_bytes().to_vec();
         userroom_id.push(0xff);
@@ -2510,6 +2565,7 @@ pub fn is_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool> {
         Ok(self.userroomid_joined.get(&userroom_id)?.is_some())
     }
 
+    #[tracing::instrument(skip(self))]
     pub fn is_invited(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool> {
         let mut userroom_id = user_id.as_bytes().to_vec();
         userroom_id.push(0xff);
@@ -2518,6 +2574,7 @@ pub fn is_invited(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool> {
         Ok(self.userroomid_invitestate.get(&userroom_id)?.is_some())
     }
 
+    #[tracing::instrument(skip(self))]
     pub fn is_left(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool> {
         let mut userroom_id = user_id.as_bytes().to_vec();
         userroom_id.push(0xff);
@@ -2526,6 +2583,7 @@ pub fn is_left(&self, user_id: &UserId, room_id: &RoomId) -> Result<bool> {
         Ok(self.userroomid_leftstate.get(&userroom_id)?.is_some())
     }
 
+    #[tracing::instrument(skip(self))]
     pub fn auth_chain_cache(
         &self,
     ) -> std::sync::MutexGuard<'_, LruCache<EventId, HashSet<EventId>>> {
diff --git a/src/database/rooms/edus.rs b/src/database/rooms/edus.rs
index 9a5cdeb275cdf042cee554884b4c3300c9194a9d..664c1710d772bcdd955d3965d4d4140c8c40f01a 100644
--- a/src/database/rooms/edus.rs
+++ b/src/database/rooms/edus.rs
@@ -116,6 +116,7 @@ pub fn readreceipts_since<'a>(
     }
 
     /// Sets a private read marker at `count`.
+    #[tracing::instrument(skip(self, globals))]
     pub fn private_read_set(
         &self,
         room_id: &RoomId,
diff --git a/src/database/sending.rs b/src/database/sending.rs
index a07192e8fb34cda2ec2b95a1a95674ca1313e440..7d7a44aad6c7d8e5bb4ab8db189d7d5752e95fe5 100644
--- a/src/database/sending.rs
+++ b/src/database/sending.rs
@@ -10,7 +10,6 @@
     appservice_server, database::pusher, server_server, utils, Database, Error, PduEvent, Result,
 };
 use federation::transactions::send_transaction_message;
-use log::{error, warn};
 use ring::digest;
 use rocket::futures::{
     channel::mpsc,
@@ -34,6 +33,7 @@
     select,
     sync::{RwLock, Semaphore},
 };
+use tracing::{error, warn};
 
 use super::abstraction::Tree;
 
@@ -45,6 +45,7 @@ pub enum OutgoingKind {
 }
 
 impl OutgoingKind {
+    #[tracing::instrument(skip(self))]
     pub fn get_prefix(&self) -> Vec<u8> {
         let mut prefix = match self {
             OutgoingKind::Appservice(server) => {
@@ -80,10 +81,10 @@ pub enum SendingEventType {
 pub struct Sending {
     /// The state for a given state hash.
     pub(super) servername_educount: Arc<dyn Tree>, // EduCount: Count of last EDU sync
-    pub(super) servernamepduids: Arc<dyn Tree>, // ServernamePduId = (+ / $)SenderKey / ServerName / UserId + PduId
-    pub(super) servercurrentevents: Arc<dyn Tree>, // ServerCurrentEvents = (+ / $)ServerName / UserId + PduId / (*)EduEvent
+    pub(super) servernameevent_data: Arc<dyn Tree>, // ServernamEvent = (+ / $)SenderKey / ServerName / UserId + PduId / * (for edus), Data = EDU content
+    pub(super) servercurrentevent_data: Arc<dyn Tree>, // ServerCurrentEvents = (+ / $)ServerName / UserId + PduId / * (for edus), Data = EDU content
     pub(super) maximum_requests: Arc<Semaphore>,
-    pub sender: mpsc::UnboundedSender<Vec<u8>>,
+    pub sender: mpsc::UnboundedSender<(Vec<u8>, Vec<u8>)>,
 }
 
 enum TransactionStatus {
@@ -96,7 +97,7 @@ impl Sending {
     pub fn start_handler(
         &self,
         db: Arc<RwLock<Database>>,
-        mut receiver: mpsc::UnboundedReceiver<Vec<u8>>,
+        mut receiver: mpsc::UnboundedReceiver<(Vec<u8>, Vec<u8>)>,
     ) {
         tokio::spawn(async move {
             let mut futures = FuturesUnordered::new();
@@ -108,16 +109,15 @@ pub fn start_handler(
 
             let guard = db.read().await;
 
-            for (key, outgoing_kind, event) in
-                guard
-                    .sending
-                    .servercurrentevents
-                    .iter()
-                    .filter_map(|(key, _)| {
-                        Self::parse_servercurrentevent(&key)
-                            .ok()
-                            .map(|(k, e)| (key, k, e))
-                    })
+            for (key, outgoing_kind, event) in guard
+                .sending
+                .servercurrentevent_data
+                .iter()
+                .filter_map(|(key, v)| {
+                    Self::parse_servercurrentevent(&key, v)
+                        .ok()
+                        .map(|(k, e)| (key, k, e))
+                })
             {
                 let entry = initial_transactions
                     .entry(outgoing_kind.clone())
@@ -128,7 +128,7 @@ pub fn start_handler(
                         "Dropping some current events: {:?} {:?} {:?}",
                         key, outgoing_kind, event
                     );
-                    guard.sending.servercurrentevents.remove(&key).unwrap();
+                    guard.sending.servercurrentevent_data.remove(&key).unwrap();
                     continue;
                 }
 
@@ -155,17 +155,17 @@ pub fn start_handler(
                                 let guard = db.read().await;
 
                                 let prefix = outgoing_kind.get_prefix();
-                                for (key, _) in guard.sending.servercurrentevents
+                                for (key, _) in guard.sending.servercurrentevent_data
                                     .scan_prefix(prefix.clone())
                                 {
-                                    guard.sending.servercurrentevents.remove(&key).unwrap();
+                                    guard.sending.servercurrentevent_data.remove(&key).unwrap();
                                 }
 
                                 // Find events that have been added since starting the last request
-                                let new_events = guard.sending.servernamepduids
+                                let new_events = guard.sending.servernameevent_data
                                     .scan_prefix(prefix.clone())
-                                    .filter_map(|(k, _)| {
-                                        Self::parse_servercurrentevent(&k).ok().map(|ev| (ev, k))
+                                    .filter_map(|(k, v)| {
+                                        Self::parse_servercurrentevent(&k, v).ok().map(|ev| (ev, k))
                                     })
                                     .take(30)
                                     .collect::<Vec<_>>();
@@ -174,9 +174,10 @@ pub fn start_handler(
 
                                 if !new_events.is_empty() {
                                     // Insert pdus we found
-                                    for (_, key) in &new_events {
-                                        guard.sending.servercurrentevents.insert(&key, &[]).unwrap();
-                                        guard.sending.servernamepduids.remove(&key).unwrap();
+                                    for (e, key) in &new_events {
+                                        let value = if let SendingEventType::Edu(value) = &e.1 { &**value } else { &[] };
+                                        guard.sending.servercurrentevent_data.insert(&key, value).unwrap();
+                                        guard.sending.servernameevent_data.remove(&key).unwrap();
                                     }
 
                                     drop(guard);
@@ -204,8 +205,8 @@ pub fn start_handler(
                             }
                         };
                     },
-                    Some(key) = receiver.next() => {
-                        if let Ok((outgoing_kind, event)) = Self::parse_servercurrentevent(&key) {
+                    Some((key, value)) = receiver.next() => {
+                        if let Ok((outgoing_kind, event)) = Self::parse_servercurrentevent(&key, value) {
                             let guard = db.read().await;
 
                             if let Ok(Some(events)) = Self::select_events(
@@ -223,6 +224,7 @@ pub fn start_handler(
         });
     }
 
+    #[tracing::instrument(skip(outgoing_kind, new_events, current_transaction_status, db))]
     fn select_events(
         outgoing_kind: &OutgoingKind,
         new_events: Vec<(SendingEventType, Vec<u8>)>, // Events we want to send: event and full key
@@ -265,18 +267,25 @@ fn select_events(
 
         if retry {
             // We retry the previous transaction
-            for (key, _) in db.sending.servercurrentevents.scan_prefix(prefix) {
-                if let Ok((_, e)) = Self::parse_servercurrentevent(&key) {
+            for (key, value) in db.sending.servercurrentevent_data.scan_prefix(prefix) {
+                if let Ok((_, e)) = Self::parse_servercurrentevent(&key, value) {
                     events.push(e);
                 }
             }
         } else {
             for (e, full_key) in new_events {
-                db.sending.servercurrentevents.insert(&full_key, &[])?;
+                let value = if let SendingEventType::Edu(value) = &e {
+                    &**value
+                } else {
+                    &[][..]
+                };
+                db.sending
+                    .servercurrentevent_data
+                    .insert(&full_key, value)?;
 
                 // If it was a PDU we have to unqueue it
                 // TODO: don't try to unqueue EDUs
-                db.sending.servernamepduids.remove(&full_key)?;
+                db.sending.servernameevent_data.remove(&full_key)?;
 
                 events.push(e);
             }
@@ -295,6 +304,7 @@ fn select_events(
         Ok(Some(events))
     }
 
+    #[tracing::instrument(skip(db, server))]
     pub fn select_edus(db: &Database, server: &ServerName) -> Result<(Vec<Vec<u8>>, u64)> {
         // u64: count of last edu
         let since = db
@@ -371,37 +381,36 @@ pub fn select_edus(db: &Database, server: &ServerName) -> Result<(Vec<Vec<u8>>,
         Ok((events, max_edu_count))
     }
 
-    #[tracing::instrument(skip(self))]
+    #[tracing::instrument(skip(self, pdu_id, senderkey))]
     pub fn send_push_pdu(&self, pdu_id: &[u8], senderkey: Vec<u8>) -> Result<()> {
         let mut key = b"$".to_vec();
         key.extend_from_slice(&senderkey);
         key.push(0xff);
         key.extend_from_slice(pdu_id);
-        self.servernamepduids.insert(&key, b"")?;
-        self.sender.unbounded_send(key).unwrap();
+        self.servernameevent_data.insert(&key, &[])?;
+        self.sender.unbounded_send((key, vec![])).unwrap();
 
         Ok(())
     }
 
-    #[tracing::instrument(skip(self))]
+    #[tracing::instrument(skip(self, server, pdu_id))]
     pub fn send_pdu(&self, server: &ServerName, pdu_id: &[u8]) -> Result<()> {
         let mut key = server.as_bytes().to_vec();
         key.push(0xff);
         key.extend_from_slice(pdu_id);
-        self.servernamepduids.insert(&key, b"")?;
-        self.sender.unbounded_send(key).unwrap();
+        self.servernameevent_data.insert(&key, &[])?;
+        self.sender.unbounded_send((key, vec![])).unwrap();
 
         Ok(())
     }
 
-    #[tracing::instrument(skip(self))]
-    pub fn send_reliable_edu(&self, server: &ServerName, serialized: &[u8]) -> Result<()> {
+    #[tracing::instrument(skip(self, server, serialized))]
+    pub fn send_reliable_edu(&self, server: &ServerName, serialized: Vec<u8>) -> Result<()> {
         let mut key = server.as_bytes().to_vec();
         key.push(0xff);
         key.push(b'*');
-        key.extend_from_slice(serialized);
-        self.servernamepduids.insert(&key, b"")?;
-        self.sender.unbounded_send(key).unwrap();
+        self.servernameevent_data.insert(&key, &serialized)?;
+        self.sender.unbounded_send((key, serialized)).unwrap();
 
         Ok(())
     }
@@ -412,13 +421,13 @@ pub fn send_pdu_appservice(&self, appservice_id: &str, pdu_id: &[u8]) -> Result<
         key.extend_from_slice(appservice_id.as_bytes());
         key.push(0xff);
         key.extend_from_slice(pdu_id);
-        self.servernamepduids.insert(&key, b"")?;
-        self.sender.unbounded_send(key).unwrap();
+        self.servernameevent_data.insert(&key, &[])?;
+        self.sender.unbounded_send((key, vec![])).unwrap();
 
         Ok(())
     }
 
-    #[tracing::instrument]
+    #[tracing::instrument(skip(keys))]
     fn calculate_hash(keys: &[&[u8]]) -> Vec<u8> {
         // We only hash the pdu's event ids, not the whole pdu
         let bytes = keys.join(&0xff);
@@ -426,7 +435,7 @@ fn calculate_hash(keys: &[&[u8]]) -> Vec<u8> {
         hash.as_ref().to_owned()
     }
 
-    #[tracing::instrument(skip(db))]
+    #[tracing::instrument(skip(db, events, kind))]
     async fn handle_events(
         kind: OutgoingKind,
         events: Vec<SendingEventType>,
@@ -448,7 +457,7 @@ async fn handle_events(
                                     (
                                         kind.clone(),
                                         Error::bad_database(
-                                            "[Appservice] Event in servernamepduids not found in db.",
+                                            "[Appservice] Event in servernameevent_data not found in db.",
                                         ),
                                     )
                                 })?
@@ -505,7 +514,7 @@ async fn handle_events(
                                         (
                                             kind.clone(),
                                             Error::bad_database(
-                                                "[Push] Event in servernamepduids not found in db.",
+                                                "[Push] Event in servernamevent_datas not found in db.",
                                             ),
                                         )
                                     })?,
@@ -590,29 +599,25 @@ async fn handle_events(
                     match event {
                         SendingEventType::Pdu(pdu_id) => {
                             // TODO: check room version and remove event_id if needed
-                            pdu_jsons.push(serde_json::from_str(
-                                PduEvent::convert_to_outgoing_federation_event(
-                                    db.rooms
-                                        .get_pdu_json_from_id(&pdu_id)
-                                        .map_err(|e| (OutgoingKind::Normal(server.clone()), e))?
-                                        .ok_or_else(|| {
-                                            (
-                                                OutgoingKind::Normal(server.clone()),
-                                                Error::bad_database(
-                                                    "[Normal] Event in servernamepduids not found in db.",
-                                                ),
-                                            )
-                                        })?,
-                                )
-                                .json()
-                                .get(),
-                            )
-                            .expect("Raw<..> is always valid"));
+                            let raw = PduEvent::convert_to_outgoing_federation_event(
+                                db.rooms
+                                    .get_pdu_json_from_id(&pdu_id)
+                                    .map_err(|e| (OutgoingKind::Normal(server.clone()), e))?
+                                    .ok_or_else(|| {
+                                        (
+                                            OutgoingKind::Normal(server.clone()),
+                                            Error::bad_database(
+                                                "[Normal] Event in servernamevent_datas not found in db.",
+                                            ),
+                                        )
+                                    })?,
+                            );
+                            pdu_jsons.push(raw);
                         }
                         SendingEventType::Edu(edu) => {
-                            edu_jsons.push(
-                                serde_json::from_slice(edu).expect("Raw<..> is always valid"),
-                            );
+                            if let Ok(raw) = serde_json::from_slice(edu) {
+                                edu_jsons.push(raw);
+                            }
                         }
                     }
                 }
@@ -658,7 +663,11 @@ async fn handle_events(
         }
     }
 
-    fn parse_servercurrentevent(key: &[u8]) -> Result<(OutgoingKind, SendingEventType)> {
+    #[tracing::instrument(skip(key))]
+    fn parse_servercurrentevent(
+        key: &[u8],
+        value: Vec<u8>,
+    ) -> Result<(OutgoingKind, SendingEventType)> {
         // Appservices start with a plus
         Ok::<_, Error>(if key.starts_with(b"+") {
             let mut parts = key[1..].splitn(2, |&b| b == 0xff);
@@ -676,7 +685,7 @@ fn parse_servercurrentevent(key: &[u8]) -> Result<(OutgoingKind, SendingEventTyp
                     Error::bad_database("Invalid server string in server_currenttransaction")
                 })?),
                 if event.starts_with(b"*") {
-                    SendingEventType::Edu(event[1..].to_vec())
+                    SendingEventType::Edu(value)
                 } else {
                     SendingEventType::Pdu(event.to_vec())
                 },
@@ -694,7 +703,7 @@ fn parse_servercurrentevent(key: &[u8]) -> Result<(OutgoingKind, SendingEventTyp
             (
                 OutgoingKind::Push(user.to_vec(), pushkey.to_vec()),
                 if event.starts_with(b"*") {
-                    SendingEventType::Edu(event[1..].to_vec())
+                    SendingEventType::Edu(value)
                 } else {
                     SendingEventType::Pdu(event.to_vec())
                 },
@@ -723,7 +732,7 @@ fn parse_servercurrentevent(key: &[u8]) -> Result<(OutgoingKind, SendingEventTyp
         })
     }
 
-    #[tracing::instrument(skip(self, globals))]
+    #[tracing::instrument(skip(self, globals, destination, request))]
     pub async fn send_federation_request<T: OutgoingRequest>(
         &self,
         globals: &crate::database::globals::Globals,
@@ -740,7 +749,7 @@ pub async fn send_federation_request<T: OutgoingRequest>(
         response
     }
 
-    #[tracing::instrument(skip(self, globals))]
+    #[tracing::instrument(skip(self, globals, registration, request))]
     pub async fn send_appservice_request<T: OutgoingRequest>(
         &self,
         globals: &crate::database::globals::Globals,
diff --git a/src/database/users.rs b/src/database/users.rs
index cd46c458e42c8da75b455d0e5705351b753fc9fe..f501ec30327adfe468ec53ba9f5948916d9cedb7 100644
--- a/src/database/users.rs
+++ b/src/database/users.rs
@@ -8,6 +8,7 @@
     DeviceId, DeviceKeyAlgorithm, DeviceKeyId, MilliSecondsSinceUnixEpoch, UInt, UserId,
 };
 use std::{collections::BTreeMap, convert::TryFrom, mem, sync::Arc};
+use tracing::warn;
 
 use super::abstraction::Tree;
 
@@ -34,11 +35,13 @@ pub struct Users {
 
 impl Users {
     /// Check if a user has an account on this homeserver.
+    #[tracing::instrument(skip(self, user_id))]
     pub fn exists(&self, user_id: &UserId) -> Result<bool> {
         Ok(self.userid_password.get(user_id.as_bytes())?.is_some())
     }
 
     /// Check if account is deactivated
+    #[tracing::instrument(skip(self, user_id))]
     pub fn is_deactivated(&self, user_id: &UserId) -> Result<bool> {
         Ok(self
             .userid_password
@@ -51,17 +54,20 @@ pub fn is_deactivated(&self, user_id: &UserId) -> Result<bool> {
     }
 
     /// Create a new user account on this homeserver.
+    #[tracing::instrument(skip(self, user_id, password))]
     pub fn create(&self, user_id: &UserId, password: Option<&str>) -> Result<()> {
         self.set_password(user_id, password)?;
         Ok(())
     }
 
     /// Returns the number of users registered on this server.
+    #[tracing::instrument(skip(self))]
     pub fn count(&self) -> Result<usize> {
         Ok(self.userid_password.iter().count())
     }
 
     /// Find out which user an access token belongs to.
+    #[tracing::instrument(skip(self, token))]
     pub fn find_from_token(&self, token: &str) -> Result<Option<(UserId, String)>> {
         self.token_userdeviceid
             .get(token.as_bytes())?
@@ -89,6 +95,7 @@ pub fn find_from_token(&self, token: &str) -> Result<Option<(UserId, String)>> {
     }
 
     /// Returns an iterator over all users on this homeserver.
+    #[tracing::instrument(skip(self))]
     pub fn iter(&self) -> impl Iterator<Item = Result<UserId>> + '_ {
         self.userid_password.iter().map(|(bytes, _)| {
             UserId::try_from(utils::string_from_bytes(&bytes).map_err(|_| {
@@ -99,6 +106,7 @@ pub fn iter(&self) -> impl Iterator<Item = Result<UserId>> + '_ {
     }
 
     /// Returns the password hash for the given user.
+    #[tracing::instrument(skip(self, user_id))]
     pub fn password_hash(&self, user_id: &UserId) -> Result<Option<String>> {
         self.userid_password
             .get(user_id.as_bytes())?
@@ -110,6 +118,7 @@ pub fn password_hash(&self, user_id: &UserId) -> Result<Option<String>> {
     }
 
     /// Hash and set the user's password to the Argon2 hash
+    #[tracing::instrument(skip(self, user_id, password))]
     pub fn set_password(&self, user_id: &UserId, password: Option<&str>) -> Result<()> {
         if let Some(password) = password {
             if let Ok(hash) = utils::calculate_hash(&password) {
@@ -129,6 +138,7 @@ pub fn set_password(&self, user_id: &UserId, password: Option<&str>) -> Result<(
     }
 
     /// Returns the displayname of a user on this homeserver.
+    #[tracing::instrument(skip(self, user_id))]
     pub fn displayname(&self, user_id: &UserId) -> Result<Option<String>> {
         self.userid_displayname
             .get(user_id.as_bytes())?
@@ -140,6 +150,7 @@ pub fn displayname(&self, user_id: &UserId) -> Result<Option<String>> {
     }
 
     /// Sets a new displayname or removes it if displayname is None. You still need to nofify all rooms of this change.
+    #[tracing::instrument(skip(self, user_id, displayname))]
     pub fn set_displayname(&self, user_id: &UserId, displayname: Option<String>) -> Result<()> {
         if let Some(displayname) = displayname {
             self.userid_displayname
@@ -152,6 +163,7 @@ pub fn set_displayname(&self, user_id: &UserId, displayname: Option<String>) ->
     }
 
     /// Get the avatar_url of a user.
+    #[tracing::instrument(skip(self, user_id))]
     pub fn avatar_url(&self, user_id: &UserId) -> Result<Option<MxcUri>> {
         self.userid_avatarurl
             .get(user_id.as_bytes())?
@@ -164,6 +176,7 @@ pub fn avatar_url(&self, user_id: &UserId) -> Result<Option<MxcUri>> {
     }
 
     /// Sets a new avatar_url or removes it if avatar_url is None.
+    #[tracing::instrument(skip(self, user_id, avatar_url))]
     pub fn set_avatar_url(&self, user_id: &UserId, avatar_url: Option<MxcUri>) -> Result<()> {
         if let Some(avatar_url) = avatar_url {
             self.userid_avatarurl
@@ -176,6 +189,7 @@ pub fn set_avatar_url(&self, user_id: &UserId, avatar_url: Option<MxcUri>) -> Re
     }
 
     /// Get the blurhash of a user.
+    #[tracing::instrument(skip(self, user_id))]
     pub fn blurhash(&self, user_id: &UserId) -> Result<Option<String>> {
         self.userid_blurhash
             .get(user_id.as_bytes())?
@@ -189,6 +203,7 @@ pub fn blurhash(&self, user_id: &UserId) -> Result<Option<String>> {
     }
 
     /// Sets a new avatar_url or removes it if avatar_url is None.
+    #[tracing::instrument(skip(self, user_id, blurhash))]
     pub fn set_blurhash(&self, user_id: &UserId, blurhash: Option<String>) -> Result<()> {
         if let Some(blurhash) = blurhash {
             self.userid_blurhash
@@ -201,6 +216,7 @@ pub fn set_blurhash(&self, user_id: &UserId, blurhash: Option<String>) -> Result
     }
 
     /// Adds a new device to a user.
+    #[tracing::instrument(skip(self, user_id, device_id, token, initial_device_display_name))]
     pub fn create_device(
         &self,
         user_id: &UserId,
@@ -235,6 +251,7 @@ pub fn create_device(
     }
 
     /// Removes a device from a user.
+    #[tracing::instrument(skip(self, user_id, device_id))]
     pub fn remove_device(&self, user_id: &UserId, device_id: &DeviceId) -> Result<()> {
         let mut userdeviceid = user_id.as_bytes().to_vec();
         userdeviceid.push(0xff);
@@ -265,6 +282,7 @@ pub fn remove_device(&self, user_id: &UserId, device_id: &DeviceId) -> Result<()
     }
 
     /// Returns an iterator over all device ids of this user.
+    #[tracing::instrument(skip(self, user_id))]
     pub fn all_device_ids<'a>(
         &'a self,
         user_id: &UserId,
@@ -287,6 +305,7 @@ pub fn all_device_ids<'a>(
     }
 
     /// Replaces the access token of one device.
+    #[tracing::instrument(skip(self, user_id, device_id, token))]
     pub fn set_token(&self, user_id: &UserId, device_id: &DeviceId, token: &str) -> Result<()> {
         let mut userdeviceid = user_id.as_bytes().to_vec();
         userdeviceid.push(0xff);
@@ -310,6 +329,14 @@ pub fn set_token(&self, user_id: &UserId, device_id: &DeviceId, token: &str) ->
         Ok(())
     }
 
+    #[tracing::instrument(skip(
+        self,
+        user_id,
+        device_id,
+        one_time_key_key,
+        one_time_key_value,
+        globals
+    ))]
     pub fn add_one_time_key(
         &self,
         user_id: &UserId,
@@ -346,7 +373,7 @@ pub fn add_one_time_key(
         Ok(())
     }
 
-    #[tracing::instrument(skip(self))]
+    #[tracing::instrument(skip(self, user_id))]
     pub fn last_one_time_keys_update(&self, user_id: &UserId) -> Result<u64> {
         self.userid_lastonetimekeyupdate
             .get(&user_id.as_bytes())?
@@ -358,6 +385,7 @@ pub fn last_one_time_keys_update(&self, user_id: &UserId) -> Result<u64> {
             .unwrap_or(Ok(0))
     }
 
+    #[tracing::instrument(skip(self, user_id, device_id, key_algorithm, globals))]
     pub fn take_one_time_key(
         &self,
         user_id: &UserId,
@@ -397,7 +425,7 @@ pub fn take_one_time_key(
             .transpose()
     }
 
-    #[tracing::instrument(skip(self))]
+    #[tracing::instrument(skip(self, user_id, device_id))]
     pub fn count_one_time_keys(
         &self,
         user_id: &UserId,
@@ -430,6 +458,7 @@ pub fn count_one_time_keys(
         Ok(counts)
     }
 
+    #[tracing::instrument(skip(self, user_id, device_id, device_keys, rooms, globals))]
     pub fn add_device_keys(
         &self,
         user_id: &UserId,
@@ -452,6 +481,14 @@ pub fn add_device_keys(
         Ok(())
     }
 
+    #[tracing::instrument(skip(
+        self,
+        master_key,
+        self_signing_key,
+        user_signing_key,
+        rooms,
+        globals
+    ))]
     pub fn add_cross_signing_keys(
         &self,
         user_id: &UserId,
@@ -552,6 +589,7 @@ pub fn add_cross_signing_keys(
         Ok(())
     }
 
+    #[tracing::instrument(skip(self, target_id, key_id, signature, sender_id, rooms, globals))]
     pub fn sign_key(
         &self,
         target_id: &UserId,
@@ -595,7 +633,7 @@ pub fn sign_key(
         Ok(())
     }
 
-    #[tracing::instrument(skip(self))]
+    #[tracing::instrument(skip(self, user_or_room_id, from, to))]
     pub fn keys_changed<'a>(
         &'a self,
         user_or_room_id: &str,
@@ -608,9 +646,24 @@ pub fn keys_changed<'a>(
         let mut start = prefix.clone();
         start.extend_from_slice(&(from + 1).to_be_bytes());
 
+        let to = to.unwrap_or(u64::MAX);
+
         self.keychangeid_userid
             .iter_from(&start, false)
-            .take_while(move |(k, _)| k.starts_with(&prefix))
+            .take_while(move |(k, _)| {
+                k.starts_with(&prefix)
+                    && if let Some(current) = k.splitn(2, |&b| b == 0xff).nth(1) {
+                        if let Ok(c) = utils::u64_from_bytes(current) {
+                            c <= to
+                        } else {
+                            warn!("BadDatabase: Could not parse keychangeid_userid bytes");
+                            false
+                        }
+                    } else {
+                        warn!("BadDatabase: Could not parse keychangeid_userid");
+                        false
+                    }
+            })
             .map(|(_, bytes)| {
                 UserId::try_from(utils::string_from_bytes(&bytes).map_err(|_| {
                     Error::bad_database("User ID in devicekeychangeid_userid is invalid unicode.")
@@ -619,6 +672,7 @@ pub fn keys_changed<'a>(
             })
     }
 
+    #[tracing::instrument(skip(self, user_id, rooms, globals))]
     fn mark_device_key_update(
         &self,
         user_id: &UserId,
@@ -650,6 +704,7 @@ fn mark_device_key_update(
         Ok(())
     }
 
+    #[tracing::instrument(skip(self, user_id, device_id))]
     pub fn get_device_keys(
         &self,
         user_id: &UserId,
@@ -666,6 +721,7 @@ pub fn get_device_keys(
         })
     }
 
+    #[tracing::instrument(skip(self, user_id, allowed_signatures))]
     pub fn get_master_key<F: Fn(&UserId) -> bool>(
         &self,
         user_id: &UserId,
@@ -693,6 +749,7 @@ pub fn get_master_key<F: Fn(&UserId) -> bool>(
             })
     }
 
+    #[tracing::instrument(skip(self, user_id, allowed_signatures))]
     pub fn get_self_signing_key<F: Fn(&UserId) -> bool>(
         &self,
         user_id: &UserId,
@@ -720,6 +777,7 @@ pub fn get_self_signing_key<F: Fn(&UserId) -> bool>(
             })
     }
 
+    #[tracing::instrument(skip(self, user_id))]
     pub fn get_user_signing_key(&self, user_id: &UserId) -> Result<Option<CrossSigningKey>> {
         self.userid_usersigningkeyid
             .get(user_id.as_bytes())?
@@ -732,6 +790,15 @@ pub fn get_user_signing_key(&self, user_id: &UserId) -> Result<Option<CrossSigni
             })
     }
 
+    #[tracing::instrument(skip(
+        self,
+        sender,
+        target_user_id,
+        target_device_id,
+        event_type,
+        content,
+        globals
+    ))]
     pub fn add_to_device_event(
         &self,
         sender: &UserId,
@@ -759,7 +826,7 @@ pub fn add_to_device_event(
         Ok(())
     }
 
-    #[tracing::instrument(skip(self))]
+    #[tracing::instrument(skip(self, user_id, device_id))]
     pub fn get_to_device_events(
         &self,
         user_id: &UserId,
@@ -782,7 +849,7 @@ pub fn get_to_device_events(
         Ok(events)
     }
 
-    #[tracing::instrument(skip(self))]
+    #[tracing::instrument(skip(self, user_id, device_id, until))]
     pub fn remove_to_device_events(
         &self,
         user_id: &UserId,
@@ -817,6 +884,7 @@ pub fn remove_to_device_events(
         Ok(())
     }
 
+    #[tracing::instrument(skip(self, user_id, device_id, device))]
     pub fn update_device_metadata(
         &self,
         user_id: &UserId,
@@ -842,6 +910,7 @@ pub fn update_device_metadata(
     }
 
     /// Get device metadata.
+    #[tracing::instrument(skip(self, user_id, device_id))]
     pub fn get_device_metadata(
         &self,
         user_id: &UserId,
@@ -860,6 +929,7 @@ pub fn get_device_metadata(
             })
     }
 
+    #[tracing::instrument(skip(self, user_id))]
     pub fn get_devicelist_version(&self, user_id: &UserId) -> Result<Option<u64>> {
         self.userid_devicelistversion
             .get(user_id.as_bytes())?
@@ -870,6 +940,7 @@ pub fn get_devicelist_version(&self, user_id: &UserId) -> Result<Option<u64>> {
             })
     }
 
+    #[tracing::instrument(skip(self, user_id))]
     pub fn all_devices_metadata<'a>(
         &'a self,
         user_id: &UserId,
@@ -886,6 +957,7 @@ pub fn all_devices_metadata<'a>(
     }
 
     /// Deactivate account
+    #[tracing::instrument(skip(self, user_id))]
     pub fn deactivate_account(&self, user_id: &UserId) -> Result<()> {
         // Remove all associated devices
         for device_id in self.all_device_ids(user_id) {
diff --git a/src/error.rs b/src/error.rs
index f62bdee07564df381bb883600c8b37c7489e742b..24e52ecc337b9677967355a3822a0bcfd94ba664 100644
--- a/src/error.rs
+++ b/src/error.rs
@@ -1,4 +1,3 @@
-use log::warn;
 use ruma::{
     api::client::{
         error::{Error as RumaError, ErrorKind},
@@ -7,17 +6,18 @@
     ServerName,
 };
 use thiserror::Error;
+use tracing::warn;
 
 #[cfg(feature = "conduit_bin")]
 use {
     crate::RumaResponse,
     http::StatusCode,
-    log::error,
     rocket::{
         response::{self, Responder},
         Request,
     },
     ruma::api::client::r0::uiaa::UiaaResponse,
+    tracing::error,
 };
 
 pub type Result<T> = std::result::Result<T, Error>;
@@ -42,6 +42,9 @@ pub enum Error {
         #[from]
         source: rusqlite::Error,
     },
+    #[cfg(feature = "heed")]
+    #[error("There was a problem with the connection to the heed database: {error}")]
+    HeedError { error: String },
     #[error("Could not generate an image.")]
     ImageError {
         #[from]
diff --git a/src/main.rs b/src/main.rs
index a5face78b6589745ea0a21d35ab19846e3c67dea..9f6cced41cefe10521708ef91c6f63c66934e3ed 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -17,6 +17,7 @@
 use database::Config;
 pub use database::Database;
 pub use error::{Error, Result};
+use opentelemetry::trace::Tracer;
 pub use pdu::PduEvent;
 pub use rocket::State;
 use ruma::api::client::error::ErrorKind;
@@ -31,8 +32,7 @@
     routes, Request,
 };
 use tokio::sync::RwLock;
-use tracing::span;
-use tracing_subscriber::{prelude::*, Registry};
+use tracing_subscriber::{prelude::*, EnvFilter};
 
 fn setup_rocket(config: Figment, data: Arc<RwLock<Database>>) -> rocket::Rocket<rocket::Build> {
     rocket::custom(config)
@@ -201,38 +201,57 @@ async fn main() {
         .extract::<Config>()
         .expect("It looks like your config is invalid. Please take a look at the error");
 
-    let mut _span: Option<span::Span> = None;
-    let mut _enter: Option<span::Entered<'_>> = None;
+    let start = async {
+        config.warn_deprecated();
+
+        let db = Database::load_or_create(&config)
+            .await
+            .expect("config is valid");
+
+        let rocket = setup_rocket(raw_config, Arc::clone(&db))
+            .ignite()
+            .await
+            .unwrap();
+
+        Database::start_on_shutdown_tasks(db, rocket.shutdown()).await;
+
+        rocket.launch().await.unwrap();
+    };
 
     if config.allow_jaeger {
-        let (tracer, _uninstall) = opentelemetry_jaeger::new_pipeline()
+        let tracer = opentelemetry_jaeger::new_pipeline()
             .with_service_name("conduit")
-            .install()
+            .install_simple()
             .unwrap();
-        let telemetry = tracing_opentelemetry::layer().with_tracer(tracer);
-        Registry::default().with(telemetry).try_init().unwrap();
 
-        _span = Some(span!(tracing::Level::INFO, "app_start", work_units = 2));
-        _enter = Some(_span.as_ref().unwrap().enter());
+        let span = tracer.start("conduit");
+        start.await;
+        drop(span);
     } else {
         std::env::set_var("RUST_LOG", &config.log);
-        tracing_subscriber::fmt::init();
-    }
-
-    config.warn_deprecated();
 
-    let db = Database::load_or_create(&config)
-        .await
-        .expect("config is valid");
+        let registry = tracing_subscriber::Registry::default();
+        if config.tracing_flame {
+            let (flame_layer, _guard) =
+                tracing_flame::FlameLayer::with_file("./tracing.folded").unwrap();
+            let flame_layer = flame_layer.with_empty_samples(false);
 
-    let rocket = setup_rocket(raw_config, Arc::clone(&db))
-        .ignite()
-        .await
-        .unwrap();
+            let filter_layer = EnvFilter::new("trace,h2=off");
 
-    Database::start_on_shutdown_tasks(db, rocket.shutdown()).await;
+            let subscriber = registry.with(filter_layer).with(flame_layer);
+            tracing::subscriber::set_global_default(subscriber).unwrap();
+            start.await;
+        } else {
+            let fmt_layer = tracing_subscriber::fmt::Layer::new();
+            let filter_layer = EnvFilter::try_from_default_env()
+                .or_else(|_| EnvFilter::try_new("info"))
+                .unwrap();
 
-    rocket.launch().await.unwrap();
+            let subscriber = registry.with(filter_layer).with(fmt_layer);
+            tracing::subscriber::set_global_default(subscriber).unwrap();
+            start.await;
+        }
+    }
 }
 
 #[catch(404)]
diff --git a/src/pdu.rs b/src/pdu.rs
index f8dddd9e350165643b63b4263c1bf0b812259196..00eda5b196ad5fee513f200fca2b740d7a7f167b 100644
--- a/src/pdu.rs
+++ b/src/pdu.rs
@@ -1,5 +1,4 @@
 use crate::Error;
-use log::error;
 use ruma::{
     events::{
         pdu::EventHash, room::member::MemberEventContent, AnyEphemeralRoomEvent,
@@ -13,6 +12,7 @@
 use serde::{Deserialize, Serialize};
 use serde_json::json;
 use std::{cmp::Ordering, collections::BTreeMap, convert::TryFrom};
+use tracing::error;
 
 #[derive(Clone, Deserialize, Serialize, Debug)]
 pub struct PduEvent {
diff --git a/src/ruma_wrapper.rs b/src/ruma_wrapper.rs
index a4beac64ef2c8ad47722edd70b879b285f5ae7ca..21214399b81f96eeca16fa9384dc5fb23f4febf2 100644
--- a/src/ruma_wrapper.rs
+++ b/src/ruma_wrapper.rs
@@ -10,7 +10,6 @@
 #[cfg(feature = "conduit_bin")]
 use {
     crate::server_server,
-    log::{debug, warn},
     rocket::{
         data::{self, ByteUnit, Data, FromData},
         http::Status,
@@ -23,6 +22,7 @@
     std::collections::BTreeMap,
     std::convert::TryFrom,
     std::io::Cursor,
+    tracing::{debug, warn},
 };
 
 /// This struct converts rocket requests into ruma structs by converting them into http requests
@@ -45,6 +45,7 @@ impl<'a, T: Outgoing> FromData<'a> for Ruma<T>
 {
     type Error = ();
 
+    #[tracing::instrument(skip(request, data))]
     async fn from_data(
         request: &'a Request<'_>,
         data: Data<'a>,
@@ -256,7 +257,10 @@ async fn from_data(
                     match ruma::signatures::verify_json(&pub_key_map, &request_map) {
                         Ok(()) => (None, None, Some(origin), false),
                         Err(e) => {
-                            warn!("Failed to verify json request from {}: {}", origin, e);
+                            warn!(
+                                "Failed to verify json request from {}: {}\n{:?}",
+                                origin, e, request_map
+                            );
 
                             if request.uri().to_string().contains('@') {
                                 warn!("Request uri contained '@' character. Make sure your reverse proxy gives Conduit the raw uri (apache: use nocanon)");
diff --git a/src/server_server.rs b/src/server_server.rs
index f725dce4e0dba991cdec6e47ba67d82b4101cac5..232c5d46e82ee232dce0039878ab26709a0c7a0e 100644
--- a/src/server_server.rs
+++ b/src/server_server.rs
@@ -5,7 +5,6 @@
 };
 use get_profile_information::v1::ProfileField;
 use http::header::{HeaderValue, AUTHORIZATION, HOST};
-use log::{debug, error, info, trace, warn};
 use regex::Regex;
 use rocket::response::content::Json;
 use ruma::{
@@ -63,7 +62,8 @@
     sync::{Arc, RwLock},
     time::{Duration, Instant, SystemTime},
 };
-use tokio::sync::Semaphore;
+use tokio::sync::{MutexGuard, Semaphore};
+use tracing::{debug, error, info, trace, warn};
 
 #[cfg(feature = "conduit_bin")]
 use rocket::{get, post, put};
@@ -838,6 +838,7 @@ pub async fn send_transaction_message_route(
 ///     it
 /// 14. Use state resolution to find new room state
 // We use some AsyncRecursiveResult hacks here so we can call this async funtion recursively
+#[tracing::instrument(skip(value, is_timeline_event, db, pub_key_map))]
 pub fn handle_incoming_pdu<'a>(
     origin: &'a ServerName,
     event_id: &'a EventId,
@@ -1156,6 +1157,18 @@ pub fn handle_incoming_pdu<'a>(
         }
         debug!("Auth check succeeded.");
 
+        // We start looking at current room state now, so lets lock the room
+
+        let mutex = Arc::clone(
+            db.globals
+                .roomid_mutex
+                .write()
+                .unwrap()
+                .entry(room_id.clone())
+                .or_default(),
+        );
+        let mutex_lock = mutex.lock().await;
+
         // Now we calculate the set of extremities this room has after the incoming event has been
         // applied. We start with the previous extremities (aka leaves)
         let mut extremities = db
@@ -1170,8 +1183,8 @@ pub fn handle_incoming_pdu<'a>(
             }
         }
 
-        // Only keep those extremities we don't have in our timeline yet
-        extremities.retain(|id| !matches!(db.rooms.get_non_outlier_pdu_json(id), Ok(Some(_))));
+        // Only keep those extremities were not referenced yet
+        extremities.retain(|id| !matches!(db.rooms.is_event_referenced(&room_id, id), Ok(true)));
 
         let mut extremity_statehashes = Vec::new();
 
@@ -1301,9 +1314,11 @@ pub fn handle_incoming_pdu<'a>(
                     return Err("State resolution failed, either an event could not be found or deserialization".into());
                 }
             };
+
             state
         };
 
+        debug!("starting soft fail auth check");
         // 13. Check if the event passes auth based on the "current state" of the room, if not "soft fail" it
         let soft_fail = !state_res::event_auth::auth_check(
             &room_version,
@@ -1322,11 +1337,11 @@ pub fn handle_incoming_pdu<'a>(
             pdu_id = Some(
                 append_incoming_pdu(
                     &db,
-                    &room_id,
                     &incoming_pdu,
                     val,
                     extremities,
                     &state_at_incoming_event,
+                    &mutex_lock,
                 )
                 .await
                 .map_err(|_| "Failed to add pdu to db.".to_owned())?,
@@ -1350,6 +1365,7 @@ pub fn handle_incoming_pdu<'a>(
         }
 
         // Event has passed all auth/stateres checks
+        drop(mutex_lock);
         Ok(pdu_id)
     })
 }
@@ -1626,25 +1642,15 @@ pub(crate) async fn fetch_signing_keys(
 
 /// Append the incoming event setting the state snapshot to the state from the
 /// server that sent the event.
-#[tracing::instrument(skip(db))]
+#[tracing::instrument(skip(db, pdu, pdu_json, new_room_leaves, state, _mutex_lock))]
 async fn append_incoming_pdu(
     db: &Database,
-    room_id: &RoomId,
     pdu: &PduEvent,
     pdu_json: CanonicalJsonObject,
     new_room_leaves: HashSet<EventId>,
     state: &StateMap<Arc<PduEvent>>,
+    _mutex_lock: &MutexGuard<'_, ()>, // Take mutex guard to make sure users get the room mutex
 ) -> Result<Vec<u8>> {
-    let mutex = Arc::clone(
-        db.globals
-            .roomid_mutex
-            .write()
-            .unwrap()
-            .entry(room_id.clone())
-            .or_default(),
-    );
-    let mutex_lock = mutex.lock().await;
-
     // We append to state before appending the pdu, so we don't have a moment in time with the
     // pdu without it's state. This is okay because append_pdu can't fail.
     db.rooms
@@ -1657,8 +1663,6 @@ async fn append_incoming_pdu(
         &db,
     )?;
 
-    drop(mutex_lock);
-
     for appservice in db.appservice.iter_all()?.filter_map(|r| r.ok()) {
         if let Some(namespaces) = appservice.1.get("namespaces") {
             let users = namespaces
diff --git a/src/utils.rs b/src/utils.rs
index a4dfe036d2e542a8606ae3c449dda0c832ecb080..60a4e0cf7ee0fa21a288281bc2cffa278ae52b30 100644
--- a/src/utils.rs
+++ b/src/utils.rs
@@ -9,6 +9,7 @@
     time::{SystemTime, UNIX_EPOCH},
 };
 
+#[tracing::instrument]
 pub fn millis_since_unix_epoch() -> u64 {
     SystemTime::now()
         .duration_since(UNIX_EPOCH)
@@ -48,16 +49,19 @@ pub fn generate_keypair() -> Vec<u8> {
 }
 
 /// Parses the bytes into an u64.
+#[tracing::instrument(skip(bytes))]
 pub fn u64_from_bytes(bytes: &[u8]) -> Result<u64, std::array::TryFromSliceError> {
     let array: [u8; 8] = bytes.try_into()?;
     Ok(u64::from_be_bytes(array))
 }
 
 /// Parses the bytes into a string.
+#[tracing::instrument(skip(bytes))]
 pub fn string_from_bytes(bytes: &[u8]) -> Result<String, std::string::FromUtf8Error> {
     String::from_utf8(bytes.to_vec())
 }
 
+#[tracing::instrument(skip(length))]
 pub fn random_string(length: usize) -> String {
     thread_rng()
         .sample_iter(&rand::distributions::Alphanumeric)
@@ -67,6 +71,7 @@ pub fn random_string(length: usize) -> String {
 }
 
 /// Calculate a new hash for the given password
+#[tracing::instrument(skip(password))]
 pub fn calculate_hash(password: &str) -> Result<String, argon2::Error> {
     let hashing_config = Config {
         variant: Variant::Argon2id,
@@ -77,6 +82,7 @@ pub fn calculate_hash(password: &str) -> Result<String, argon2::Error> {
     argon2::hash_encoded(password.as_bytes(), salt.as_bytes(), &hashing_config)
 }
 
+#[tracing::instrument(skip(iterators, check_order))]
 pub fn common_elements(
     mut iterators: impl Iterator<Item = impl Iterator<Item = Vec<u8>>>,
     check_order: impl Fn(&[u8], &[u8]) -> Ordering,
@@ -104,6 +110,7 @@ pub fn common_elements(
 /// Fallible conversion from any value that implements `Serialize` to a `CanonicalJsonObject`.
 ///
 /// `value` must serialize to an `serde_json::Value::Object`.
+#[tracing::instrument(skip(value))]
 pub fn to_canonical_object<T: serde::Serialize>(
     value: T,
 ) -> Result<CanonicalJsonObject, CanonicalJsonError> {
@@ -117,6 +124,7 @@ pub fn to_canonical_object<T: serde::Serialize>(
     }
 }
 
+#[tracing::instrument(skip(deserializer))]
 pub fn deserialize_from_str<
     'de,
     D: serde::de::Deserializer<'de>,