diff --git a/README.rst b/README.rst
index 14ef6c5acf478d646c81511b6521185743a37719..714ad6443d078ff7157dddb4e59134aa1c603da5 100644
--- a/README.rst
+++ b/README.rst
@@ -86,7 +86,7 @@ Homeserver Installation
 =======================
 
 System requirements:
-- POSIX-compliant system (tested on Linux & OSX)
+- POSIX-compliant system (tested on Linux & OS X)
 - Python 2.7
 
 Synapse is written in python but some of the libraries is uses are written in
@@ -128,6 +128,15 @@ To set up your homeserver, run (in your virtualenv, as before)::
 
 Substituting your host and domain name as appropriate.
 
+This will generate you a config file that you can then customise, but it will
+also generate a set of keys for you. These keys will allow your Home Server to
+identify itself to other Home Servers, so don't lose or delete them. It would be
+wise to back them up somewhere safe. If, for whatever reason, you do need to
+change your Home Server's keys, you may find that other Home Servers have the
+old key cached. If you update the signing key, you should change the name of the
+key in the <server name>.signing.key file (the second word, which by default is
+, 'auto') to something different.
+
 By default, registration of new users is disabled. You can either enable
 registration in the config by specifying ``enable_registration: true``
 (it is then recommended to also set up CAPTCHA), or
@@ -367,10 +376,6 @@ SRV record, as that is the name other machines will expect it to have::
 You may additionally want to pass one or more "-v" options, in order to
 increase the verbosity of logging output; at least for initial testing.
 
-For the initial alpha release, the homeserver is not speaking TLS for
-either client-server or server-server traffic for ease of debugging. We have
-also not spent any time yet getting the homeserver to run behind loadbalancers.
-
 Running a Demo Federation of Homeservers
 ----------------------------------------
 
@@ -433,7 +438,7 @@ track 3PID logins and publish end-user public keys.
 
 It's currently early days for identity servers as Matrix is not yet using 3PIDs
 as the primary means of identity and E2E encryption is not complete. As such,
-we are running a single identity server (http://matrix.org:8090) at the current
+we are running a single identity server (https://matrix.org) at the current
 time.
 
 
diff --git a/UPGRADE.rst b/UPGRADE.rst
index 87dd6e04a88b583b70b5603465d66ec26c12bb01..ab327a8136b24638524b2c2fe8debf4a472c6dd8 100644
--- a/UPGRADE.rst
+++ b/UPGRADE.rst
@@ -1,3 +1,37 @@
+Upgrading to v0.x.x
+===================
+
+Application services have had a breaking API change in this version.
+
+They can no longer register themselves with a home server using the AS HTTP API. This
+decision was made because a compromised application service with free reign to register
+any regex in effect grants full read/write access to the home server if a regex of ``.*``
+is used. An attack where a compromised AS re-registers itself with ``.*`` was deemed too
+big of a security risk to ignore, and so the ability to register with the HS remotely has
+been removed.
+
+It has been replaced by specifying a list of application service registrations in
+``homeserver.yaml``::
+
+  app_service_config_files: ["registration-01.yaml", "registration-02.yaml"]
+  
+Where ``registration-01.yaml`` looks like::
+
+  url: <String>  # e.g. "https://my.application.service.com"
+  as_token: <String>
+  hs_token: <String>
+  sender_localpart: <String>  # This is a new field which denotes the user_id localpart when using the AS token
+  namespaces:
+    users:
+      - exclusive: <Boolean>
+        regex: <String>  # e.g. "@prefix_.*"
+    aliases:
+      - exclusive: <Boolean>
+        regex: <String>
+    rooms:
+      - exclusive: <Boolean>
+        regex: <String>
+
 Upgrading to v0.8.0
 ===================
 
diff --git a/docs/metrics-howto.rst b/docs/metrics-howto.rst
new file mode 100644
index 0000000000000000000000000000000000000000..c1f5ae217481413945673a05ce3e4e4571c60a72
--- /dev/null
+++ b/docs/metrics-howto.rst
@@ -0,0 +1,50 @@
+How to monitor Synapse metrics using Prometheus
+===============================================
+
+1: Install prometheus:
+  Follow instructions at http://prometheus.io/docs/introduction/install/
+
+2: Enable synapse metrics:
+  Simply setting a (local) port number will enable it. Pick a port.
+  prometheus itself defaults to 9090, so starting just above that for
+  locally monitored services seems reasonable. E.g. 9092:
+
+  Add to homeserver.yaml
+
+    metrics_port: 9092
+
+  Restart synapse
+
+3: Check out synapse-prometheus-config
+  https://github.com/matrix-org/synapse-prometheus-config
+
+4: Add ``synapse.html`` and ``synapse.rules``
+  The ``.html`` file needs to appear in prometheus's ``consoles`` directory,
+  and the ``.rules`` file needs to be invoked somewhere in the main config
+  file. A symlink to each from the git checkout into the prometheus directory
+  might be easiest to ensure ``git pull`` keeps it updated.
+
+5: Add a prometheus target for synapse
+  This is easiest if prometheus runs on the same machine as synapse, as it can
+  then just use localhost::
+
+    global: {
+      rule_file: "synapse.rules"
+    }
+
+    job: {
+      name: "synapse"
+
+      target_group: {
+        target: "http://localhost:9092/"
+      }
+    }
+
+6: Start prometheus::
+
+   ./prometheus -config.file=prometheus.conf
+
+7: Wait a few seconds for it to start and perform the first scrape,
+   then visit the console:
+
+    http://server-where-prometheus-runs:9090/consoles/synapse.html
diff --git a/synapse/api/auth.py b/synapse/api/auth.py
index d8b76147913e46ac939493257bc5c0eed0858959..935dffbabe60f7d5eebac97b3bb3d11339071824 100644
--- a/synapse/api/auth.py
+++ b/synapse/api/auth.py
@@ -183,17 +183,7 @@ class Auth(object):
         else:
             join_rule = JoinRules.INVITE
 
-        user_level = self._get_power_level_from_event_state(
-            event,
-            event.user_id,
-            auth_events,
-        )
-
-        # TODO(paul): There's an awful lot of int()-casting in this code;
-        #   surely we should be squashing strings to integers at a higher
-        #   level, maybe when we insert?
-        if user_level is not None:
-            user_level = int(user_level)
+        user_level = self._get_user_power_level(event.user_id, auth_events)
 
         # FIXME (erikj): What should we do here as the default?
         ban_level = self._get_named_level(auth_events, "ban", 50)
@@ -281,23 +271,26 @@ class Auth(object):
         key = (EventTypes.PowerLevels, "", )
         return auth_events.get(key)
 
-    def _get_power_level_from_event_state(self, event, user_id, auth_events):
+    def _get_user_power_level(self, user_id, auth_events):
         power_level_event = self._get_power_level_event(auth_events)
-        level = None
 
         if power_level_event:
             level = power_level_event.content.get("users", {}).get(user_id)
             if not level:
                 level = power_level_event.content.get("users_default", 0)
+
+            if level is None:
+                return 0
+            else:
+                return int(level)
         else:
             key = (EventTypes.Create, "", )
             create_event = auth_events.get(key)
             if (create_event is not None and
                     create_event.content["creator"] == user_id):
                 return 100
-
-        return level
-
+            else:
+                return 0
 
     def _get_named_level(self, auth_events, name, default):
         power_level_event = self._get_power_level_event(auth_events)
@@ -504,16 +497,7 @@ class Auth(object):
         else:
             send_level = 0
 
-        user_level = self._get_power_level_from_event_state(
-            event,
-            event.user_id,
-            auth_events,
-        )
-
-        if user_level:
-            user_level = int(user_level)
-        else:
-            user_level = 0
+        user_level = self._get_user_power_level(event.user_id, auth_events)
 
         if user_level < send_level:
             raise AuthError(
@@ -545,11 +529,7 @@ class Auth(object):
         return True
 
     def _check_redaction(self, event, auth_events):
-        user_level = self._get_power_level_from_event_state(
-            event,
-            event.user_id,
-            auth_events,
-        )
+        user_level = self._get_user_power_level(event.user_id, auth_events)
 
         redact_level = self._get_named_level(auth_events, "redact", 50)
 
@@ -579,11 +559,7 @@ class Auth(object):
         if not current_state:
             return
 
-        user_level = self._get_power_level_from_event_state(
-            event,
-            event.user_id,
-            auth_events,
-        )
+        user_level = self._get_user_power_level(event.user_id, auth_events)
 
         # Check other levels:
         levels_to_check = [
diff --git a/synapse/api/errors.py b/synapse/api/errors.py
index eddd889778b23e9bdc46b42685500cfa9ff5c2f6..72d2bd5b4c92cb3bf1cda0f364d361b28b9b712d 100644
--- a/synapse/api/errors.py
+++ b/synapse/api/errors.py
@@ -35,8 +35,8 @@ class Codes(object):
     LIMIT_EXCEEDED = "M_LIMIT_EXCEEDED"
     CAPTCHA_NEEDED = "M_CAPTCHA_NEEDED"
     CAPTCHA_INVALID = "M_CAPTCHA_INVALID"
-    MISSING_PARAM = "M_MISSING_PARAM",
-    TOO_LARGE = "M_TOO_LARGE",
+    MISSING_PARAM = "M_MISSING_PARAM"
+    TOO_LARGE = "M_TOO_LARGE"
     EXCLUSIVE = "M_EXCLUSIVE"
 
 
diff --git a/synapse/config/registration.py b/synapse/config/registration.py
index d5c8f4bf7b8648b738c68083b76e4fc5a16557c3..f412a72f592e2c38ae3cebe972512428d482125e 100644
--- a/synapse/config/registration.py
+++ b/synapse/config/registration.py
@@ -53,6 +53,7 @@ class RegistrationConfig(Config):
 
     @classmethod
     def generate_config(cls, args, config_dir_path):
+        super(RegistrationConfig, cls).generate_config(args, config_dir_path)
         if args.enable_registration is None:
             args.enable_registration = False
 
diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py
index f4db7b8a053d9bdf2a0c0c35ede34535e79c1ef3..2b4faee4c1253f174ab45e8bd3ad141808f6e0b5 100644
--- a/synapse/crypto/keyring.py
+++ b/synapse/crypto/keyring.py
@@ -24,6 +24,8 @@ from synapse.api.errors import SynapseError, Codes
 
 from synapse.util.retryutils import get_retry_limiter
 
+from synapse.util.async import create_observer
+
 from OpenSSL import crypto
 
 import logging
@@ -38,6 +40,8 @@ class Keyring(object):
         self.clock = hs.get_clock()
         self.hs = hs
 
+        self.key_downloads = {}
+
     @defer.inlineCallbacks
     def verify_json_for_server(self, server_name, json_object):
         logger.debug("Verifying for %s", server_name)
@@ -97,6 +101,22 @@ class Keyring(object):
             defer.returnValue(cached[0])
             return
 
+        download = self.key_downloads.get(server_name)
+
+        if download is None:
+            download = self._get_server_verify_key_impl(server_name, key_ids)
+            self.key_downloads[server_name] = download
+
+            @download.addBoth
+            def callback(ret):
+                del self.key_downloads[server_name]
+                return ret
+
+        r = yield create_observer(download)
+        defer.returnValue(r)
+
+    @defer.inlineCallbacks
+    def _get_server_verify_key_impl(self, server_name, key_ids):
         # Try to fetch the key from the remote server.
 
         limiter = yield get_retry_limiter(
diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py
index bbc7a0f200c83a832d70dce1a52c8b1a8cfd305a..571eacd34308b99ed8295ca44b3fbe73d885a325 100644
--- a/synapse/handlers/presence.py
+++ b/synapse/handlers/presence.py
@@ -36,6 +36,9 @@ metrics = synapse.metrics.get_metrics_for(__name__)
 # Don't bother bumping "last active" time if it differs by less than 60 seconds
 LAST_ACTIVE_GRANULARITY = 60*1000
 
+# Keep no more than this number of offline serial revisions
+MAX_OFFLINE_SERIALS = 1000
+
 
 # TODO(paul): Maybe there's one of these I can steal from somewhere
 def partition(l, func):
@@ -135,6 +138,9 @@ class PresenceHandler(BaseHandler):
         self._remote_sendmap = {}
         # map remote users to sets of local users who're interested in them
         self._remote_recvmap = {}
+        # list of (serial, set of(userids)) tuples, ordered by serial, latest
+        # first
+        self._remote_offline_serials = []
 
         # map any user to a UserPresenceCache
         self._user_cachemap = {}
@@ -714,8 +720,24 @@ class PresenceHandler(BaseHandler):
                 statuscache=statuscache,
             )
 
+            user_id = user.to_string()
+
             if state["presence"] == PresenceState.OFFLINE:
+                self._remote_offline_serials.insert(
+                    0,
+                    (self._user_cachemap_latest_serial, set([user_id]))
+                )
+                while len(self._remote_offline_serials) > MAX_OFFLINE_SERIALS:
+                    self._remote_offline_serials.pop()  # remove the oldest
                 del self._user_cachemap[user]
+            else:
+                # Remove the user from remote_offline_serials now that they're
+                # no longer offline
+                for idx, elem in enumerate(self._remote_offline_serials):
+                    (_, user_ids) = elem
+                    user_ids.discard(user_id)
+                    if not user_ids:
+                        self._remote_offline_serials.pop(idx)
 
         for poll in content.get("poll", []):
             user = UserID.from_string(poll)
@@ -836,6 +858,8 @@ class PresenceEventSource(object):
 
         presence = self.hs.get_handlers().presence_handler
         cachemap = presence._user_cachemap
+        clock = self.clock
+        latest_serial = None
 
         updates = []
         # TODO(paul): use a DeferredList ? How to limit concurrency.
@@ -845,18 +869,31 @@ class PresenceEventSource(object):
             if cached.serial <= from_key:
                 continue
 
-            if (yield self.is_visible(observer_user, observed_user)):
-                updates.append((observed_user, cached))
+            if not (yield self.is_visible(observer_user, observed_user)):
+                continue
+
+            if latest_serial is None or cached.serial > latest_serial:
+                latest_serial = cached.serial
+            updates.append(cached.make_event(user=observed_user, clock=clock))
 
         # TODO(paul): limit
 
-        if updates:
-            clock = self.clock
+        for serial, user_ids in presence._remote_offline_serials:
+            if serial < from_key:
+                break
 
-            latest_serial = max([x[1].serial for x in updates])
-            data = [x[1].make_event(user=x[0], clock=clock) for x in updates]
+            for u in user_ids:
+                updates.append({
+                    "type": "m.presence",
+                    "content": {"user_id": u, "presence": PresenceState.OFFLINE},
+                })
+        # TODO(paul): For the v2 API we want to tell the client their from_key
+        #   is too old if we fell off the end of the _remote_offline_serials
+        #   list, and get them to invalidate+resync. In v1 we have no such
+        #   concept so this is a best-effort result.
 
-            defer.returnValue((data, latest_serial))
+        if updates:
+            defer.returnValue((updates, latest_serial))
         else:
             defer.returnValue(([], presence._user_cachemap_latest_serial))
 
diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py
index 60fd35fbfb5ad11e1aa1dad68cb47c19379f44a4..f3d1cf5c5fb1c780a64b7a13a447aa97b58bf2fd 100644
--- a/synapse/push/baserules.py
+++ b/synapse/push/baserules.py
@@ -1,3 +1,17 @@
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 from synapse.push.rulekinds import PRIORITY_CLASS_MAP, PRIORITY_CLASS_INVERSE_MAP
 
 
@@ -112,7 +126,25 @@ def make_base_prepend_override_rules():
 def make_base_append_override_rules():
     return [
         {
-            'rule_id': 'global/override/.m.rule.call',
+            'rule_id': 'global/override/.m.rule.suppress_notices',
+            'conditions': [
+                {
+                    'kind': 'event_match',
+                    'key': 'content.msgtype',
+                    'pattern': 'm.notice',
+                }
+            ],
+            'actions': [
+                'dont_notify',
+            ]
+        }
+    ]
+
+
+def make_base_append_underride_rules(user):
+    return [
+        {
+            'rule_id': 'global/underride/.m.rule.call',
             'conditions': [
                 {
                     'kind': 'event_match',
@@ -131,19 +163,6 @@ def make_base_append_override_rules():
                 }
             ]
         },
-        {
-            'rule_id': 'global/override/.m.rule.suppress_notices',
-            'conditions': [
-                {
-                    'kind': 'event_match',
-                    'key': 'content.msgtype',
-                    'pattern': 'm.notice',
-                }
-            ],
-            'actions': [
-                'dont_notify',
-            ]
-        },
         {
             'rule_id': 'global/override/.m.rule.contains_display_name',
             'conditions': [
@@ -162,7 +181,7 @@ def make_base_append_override_rules():
             ]
         },
         {
-            'rule_id': 'global/override/.m.rule.room_one_to_one',
+            'rule_id': 'global/underride/.m.rule.room_one_to_one',
             'conditions': [
                 {
                     'kind': 'room_member_count',
@@ -179,12 +198,7 @@ def make_base_append_override_rules():
                     'value': False
                 }
             ]
-        }
-    ]
-
-
-def make_base_append_underride_rules(user):
-    return [
+        },
         {
             'rule_id': 'global/underride/.m.rule.invite_for_me',
             'conditions': [
diff --git a/synapse/push/rulekinds.py b/synapse/push/rulekinds.py
index 660aa4e10e2234773d1222fc7937ebc3652a3809..4c591aa638ab01efa37508b9533236eb371b7e94 100644
--- a/synapse/push/rulekinds.py
+++ b/synapse/push/rulekinds.py
@@ -1,3 +1,17 @@
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 PRIORITY_CLASS_MAP = {
     'underride': 1,
     'sender': 2,
diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py
index ee72f774b3f7668a95645c0e9ff28698e65fd4e4..8b457419cfdcf676d466cc73e0f5c7281926643c 100644
--- a/synapse/python_dependencies.py
+++ b/synapse/python_dependencies.py
@@ -1,3 +1,17 @@
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 import logging
 from distutils.version import LooseVersion
 
diff --git a/synapse/rest/media/v1/base_resource.py b/synapse/rest/media/v1/base_resource.py
index edd4f78024099621d78cd1a1c78cf05420e22506..08c8d75af41c5e48a44b742d7de3b6546287d8bd 100644
--- a/synapse/rest/media/v1/base_resource.py
+++ b/synapse/rest/media/v1/base_resource.py
@@ -25,6 +25,8 @@ from twisted.internet import defer
 from twisted.web.resource import Resource
 from twisted.protocols.basic import FileSender
 
+from synapse.util.async import create_observer
+
 import os
 
 import logging
@@ -87,7 +89,7 @@ class BaseMediaResource(Resource):
             def callback(media_info):
                 del self.downloads[key]
                 return media_info
-        return download
+        return create_observer(download)
 
     @defer.inlineCallbacks
     def _get_remote_media_impl(self, server_name, media_id):
diff --git a/synapse/rest/media/v1/identicon_resource.py b/synapse/rest/media/v1/identicon_resource.py
index 912856386ad342e04bc858e9701adeb1e1310f2b..603859d5d4a16a047c344a562b9773852c86665d 100644
--- a/synapse/rest/media/v1/identicon_resource.py
+++ b/synapse/rest/media/v1/identicon_resource.py
@@ -1,3 +1,17 @@
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 from pydenticon import Generator
 from twisted.web.resource import Resource
 
diff --git a/synapse/storage/schema/delta/14/upgrade_appservice_db.py b/synapse/storage/schema/delta/14/upgrade_appservice_db.py
index 847b1c5b890ed2d396258f4dcc63d246f1b44a34..9f3a4dd4c57b336543cdf0f5d8f2d8122a8233af 100644
--- a/synapse/storage/schema/delta/14/upgrade_appservice_db.py
+++ b/synapse/storage/schema/delta/14/upgrade_appservice_db.py
@@ -1,3 +1,17 @@
+# Copyright 2015 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 import json
 import logging
 
diff --git a/synapse/storage/schema/delta/14/v14.sql b/synapse/storage/schema/delta/14/v14.sql
index 021272644888621b8c1240f3b3dad439cbd33bae..1d09ad7a15e76f4258553b0e8ad4db452b3dff25 100644
--- a/synapse/storage/schema/delta/14/v14.sql
+++ b/synapse/storage/schema/delta/14/v14.sql
@@ -1,3 +1,17 @@
+/* Copyright 2015 OpenMarket Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 CREATE TABLE IF NOT EXISTS push_rules_enable (
   id INTEGER PRIMARY KEY AUTOINCREMENT,
   user_name TEXT NOT NULL,
diff --git a/synapse/util/async.py b/synapse/util/async.py
index c4fe5d522f362118c6e905431e66dd56cdafafb9..d8febdb90ca776d980f41bd06ee4657706930a2e 100644
--- a/synapse/util/async.py
+++ b/synapse/util/async.py
@@ -32,3 +32,22 @@ def run_on_reactor():
     iteration of the main loop
     """
     return sleep(0)
+
+
+def create_observer(deferred):
+    """Creates a deferred that observes the result or failure of the given
+     deferred *without* affecting the given deferred.
+    """
+    d = defer.Deferred()
+
+    def callback(r):
+        d.callback(r)
+        return r
+
+    def errback(f):
+        d.errback(f)
+        return f
+
+    deferred.addCallbacks(callback, errback)
+
+    return d
diff --git a/tests/handlers/test_presence.py b/tests/handlers/test_presence.py
index 04eba4289e61f68d3888910a639b713b10b1f6c6..9f5580c096ee6f2317fb614a5204b28f64145c6d 100644
--- a/tests/handlers/test_presence.py
+++ b/tests/handlers/test_presence.py
@@ -878,6 +878,71 @@ class PresencePushTestCase(MockedDatastorePresenceTestCase):
             state
         )
 
+    @defer.inlineCallbacks
+    def test_recv_remote_offline(self):
+        """ Various tests relating to SYN-261 """
+        potato_set = self.handler._remote_recvmap.setdefault(self.u_potato,
+                set())
+        potato_set.add(self.u_apple)
+
+        self.room_members = [self.u_banana, self.u_potato]
+
+        self.assertEquals(self.event_source.get_current_key(), 0)
+
+        yield self.mock_federation_resource.trigger("PUT",
+            "/_matrix/federation/v1/send/1000000/",
+            _make_edu_json("elsewhere", "m.presence",
+                content={
+                    "push": [
+                        {"user_id": "@potato:remote",
+                         "presence": "offline"},
+                    ],
+                }
+            )
+        )
+
+        self.assertEquals(self.event_source.get_current_key(), 1)
+
+        (events, _) = yield self.event_source.get_new_events_for_user(
+            self.u_apple, 0, None
+        )
+        self.assertEquals(events,
+            [
+                {"type": "m.presence",
+                 "content": {
+                     "user_id": "@potato:remote",
+                     "presence": OFFLINE,
+                }}
+            ]
+        )
+
+        yield self.mock_federation_resource.trigger("PUT",
+            "/_matrix/federation/v1/send/1000001/",
+            _make_edu_json("elsewhere", "m.presence",
+                content={
+                    "push": [
+                        {"user_id": "@potato:remote",
+                         "presence": "online"},
+                    ],
+                }
+            )
+        )
+
+        self.assertEquals(self.event_source.get_current_key(), 2)
+
+        (events, _) = yield self.event_source.get_new_events_for_user(
+            self.u_apple, 0, None
+        )
+        self.assertEquals(events,
+            [
+                {"type": "m.presence",
+                 "content": {
+                     "user_id": "@potato:remote",
+                     "presence": ONLINE,
+                }}
+            ]
+        )
+
     @defer.inlineCallbacks
     def test_join_room_local(self):
         self.room_members = [self.u_apple, self.u_banana]