Skip to content
Snippets Groups Projects
Commit 2eb421b6 authored by Jason Robinson's avatar Jason Robinson
Browse files

Merge branch 'develop' into jaywink/admin-forward-extremities

parents 90ad4d44 bce0c91d
No related branches found
No related tags found
No related merge requests found
Showing
with 118 additions and 128 deletions
...@@ -15,6 +15,7 @@ ...@@ -15,6 +15,7 @@
# limitations under the License. # limitations under the License.
import logging import logging
from synapse.storage.engines import create_engine from synapse.storage.engines import create_engine
logger = logging.getLogger("create_postgres_db") logger = logging.getLogger("create_postgres_db")
......
Add support for multiple SSO Identity Providers.
Configure the linters to run on a consistent set of files.
Remove broken and unmaintained `demo/webserver.py` script.
Fix error handling during insertion of client IPs into the database.
Fix bug where we didn't correctly record CPU time spent in 'on_new_event' block.
Fix a minor bug which could cause confusing error messages from invalid configurations.
Add missing user_mapping_provider configuration to the Keycloak OIDC example. Contributed by @chris-ruecker.
import argparse
import BaseHTTPServer
import os
import SimpleHTTPServer
import cgi, logging
from daemonize import Daemonize
class SimpleHTTPRequestHandlerWithPOST(SimpleHTTPServer.SimpleHTTPRequestHandler):
UPLOAD_PATH = "upload"
"""
Accept all post request as file upload
"""
def do_POST(self):
path = os.path.join(self.UPLOAD_PATH, os.path.basename(self.path))
length = self.headers["content-length"]
data = self.rfile.read(int(length))
with open(path, "wb") as fh:
fh.write(data)
self.send_response(200)
self.send_header("Content-Type", "application/json")
self.end_headers()
# Return the absolute path of the uploaded file
self.wfile.write('{"url":"/%s"}' % path)
def setup():
parser = argparse.ArgumentParser()
parser.add_argument("directory")
parser.add_argument("-p", "--port", dest="port", type=int, default=8080)
parser.add_argument("-P", "--pid-file", dest="pid", default="web.pid")
args = parser.parse_args()
# Get absolute path to directory to serve, as daemonize changes to '/'
os.chdir(args.directory)
dr = os.getcwd()
httpd = BaseHTTPServer.HTTPServer(("", args.port), SimpleHTTPRequestHandlerWithPOST)
def run():
os.chdir(dr)
httpd.serve_forever()
daemon = Daemonize(
app="synapse-webclient", pid=args.pid, action=run, auto_close_fds=False
)
daemon.start()
if __name__ == "__main__":
setup()
...@@ -158,6 +158,10 @@ oidc_config: ...@@ -158,6 +158,10 @@ oidc_config:
client_id: "synapse" client_id: "synapse"
client_secret: "copy secret generated from above" client_secret: "copy secret generated from above"
scopes: ["openid", "profile"] scopes: ["openid", "profile"]
user_mapping_provider:
config:
localpart_template: "{{ user.preferred_username }}"
display_name_template: "{{ user.name }}"
``` ```
### [Auth0][auth0] ### [Auth0][auth0]
......
...@@ -103,6 +103,7 @@ files = ...@@ -103,6 +103,7 @@ files =
tests/replication, tests/replication,
tests/test_utils, tests/test_utils,
tests/handlers/test_password_providers.py, tests/handlers/test_password_providers.py,
tests/rest/client/v1/test_login.py,
tests/rest/client/v2_alpha/test_auth.py, tests/rest/client/v2_alpha/test_auth.py,
tests/util/test_stream_change_cache.py tests/util/test_stream_change_cache.py
......
...@@ -15,16 +15,7 @@ ...@@ -15,16 +15,7 @@
# Stub for frozendict. # Stub for frozendict.
from typing import ( from typing import Any, Hashable, Iterable, Iterator, Mapping, Tuple, TypeVar, overload
Any,
Hashable,
Iterable,
Iterator,
Mapping,
overload,
Tuple,
TypeVar,
)
_KT = TypeVar("_KT", bound=Hashable) # Key type. _KT = TypeVar("_KT", bound=Hashable) # Key type.
_VT = TypeVar("_VT") # Value type. _VT = TypeVar("_VT") # Value type.
......
...@@ -7,17 +7,17 @@ from typing import ( ...@@ -7,17 +7,17 @@ from typing import (
Callable, Callable,
Dict, Dict,
Hashable, Hashable,
Iterator,
Iterable,
ItemsView, ItemsView,
Iterable,
Iterator,
KeysView, KeysView,
List, List,
Mapping, Mapping,
Optional, Optional,
Sequence, Sequence,
Tuple,
Type, Type,
TypeVar, TypeVar,
Tuple,
Union, Union,
ValuesView, ValuesView,
overload, overload,
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
"""Contains *incomplete* type hints for txredisapi. """Contains *incomplete* type hints for txredisapi.
""" """
from typing import List, Optional, Union, Type from typing import List, Optional, Type, Union
class RedisProtocol: class RedisProtocol:
def publish(self, channel: str, message: bytes): ... def publish(self, channel: str, message: bytes): ...
......
...@@ -56,7 +56,7 @@ def json_error_to_config_error( ...@@ -56,7 +56,7 @@ def json_error_to_config_error(
""" """
# copy `config_path` before modifying it. # copy `config_path` before modifying it.
path = list(config_path) path = list(config_path)
for p in list(e.path): for p in list(e.absolute_path):
if isinstance(p, int): if isinstance(p, int):
path.append("<item %i>" % p) path.append("<item %i>" % p)
else: else:
......
...@@ -396,31 +396,30 @@ class Notifier: ...@@ -396,31 +396,30 @@ class Notifier:
Will wake up all listeners for the given users and rooms. Will wake up all listeners for the given users and rooms.
""" """
with PreserveLoggingContext(): with Measure(self.clock, "on_new_event"):
with Measure(self.clock, "on_new_event"): user_streams = set()
user_streams = set()
for user in users: for user in users:
user_stream = self.user_to_user_stream.get(str(user)) user_stream = self.user_to_user_stream.get(str(user))
if user_stream is not None: if user_stream is not None:
user_streams.add(user_stream) user_streams.add(user_stream)
for room in rooms: for room in rooms:
user_streams |= self.room_to_user_streams.get(room, set()) user_streams |= self.room_to_user_streams.get(room, set())
time_now_ms = self.clock.time_msec() time_now_ms = self.clock.time_msec()
for user_stream in user_streams: for user_stream in user_streams:
try: try:
user_stream.notify(stream_key, new_token, time_now_ms) user_stream.notify(stream_key, new_token, time_now_ms)
except Exception: except Exception:
logger.exception("Failed to notify listener") logger.exception("Failed to notify listener")
self.notify_replication() self.notify_replication()
# Notify appservices # Notify appservices
self._notify_app_services_ephemeral( self._notify_app_services_ephemeral(
stream_key, new_token, users, stream_key, new_token, users,
) )
def on_new_replication_data(self) -> None: def on_new_replication_data(self) -> None:
"""Used to inform replication listeners that something has happened """Used to inform replication listeners that something has happened
......
...@@ -319,9 +319,9 @@ class SsoRedirectServlet(RestServlet): ...@@ -319,9 +319,9 @@ class SsoRedirectServlet(RestServlet):
# register themselves with the main SSOHandler. # register themselves with the main SSOHandler.
if hs.config.cas_enabled: if hs.config.cas_enabled:
hs.get_cas_handler() hs.get_cas_handler()
elif hs.config.saml2_enabled: if hs.config.saml2_enabled:
hs.get_saml_handler() hs.get_saml_handler()
elif hs.config.oidc_enabled: if hs.config.oidc_enabled:
hs.get_oidc_handler() hs.get_oidc_handler()
self._sso_handler = hs.get_sso_handler() self._sso_handler = hs.get_sso_handler()
......
...@@ -470,43 +470,35 @@ class ClientIpStore(ClientIpWorkerStore): ...@@ -470,43 +470,35 @@ class ClientIpStore(ClientIpWorkerStore):
for entry in to_update.items(): for entry in to_update.items():
(user_id, access_token, ip), (user_agent, device_id, last_seen) = entry (user_id, access_token, ip), (user_agent, device_id, last_seen) = entry
try: self.db_pool.simple_upsert_txn(
self.db_pool.simple_upsert_txn( txn,
table="user_ips",
keyvalues={"user_id": user_id, "access_token": access_token, "ip": ip},
values={
"user_agent": user_agent,
"device_id": device_id,
"last_seen": last_seen,
},
lock=False,
)
# Technically an access token might not be associated with
# a device so we need to check.
if device_id:
# this is always an update rather than an upsert: the row should
# already exist, and if it doesn't, that may be because it has been
# deleted, and we don't want to re-create it.
self.db_pool.simple_update_txn(
txn, txn,
table="user_ips", table="devices",
keyvalues={ keyvalues={"user_id": user_id, "device_id": device_id},
"user_id": user_id, updatevalues={
"access_token": access_token,
"ip": ip,
},
values={
"user_agent": user_agent, "user_agent": user_agent,
"device_id": device_id,
"last_seen": last_seen, "last_seen": last_seen,
"ip": ip,
}, },
lock=False,
) )
# Technically an access token might not be associated with
# a device so we need to check.
if device_id:
# this is always an update rather than an upsert: the row should
# already exist, and if it doesn't, that may be because it has been
# deleted, and we don't want to re-create it.
self.db_pool.simple_update_txn(
txn,
table="devices",
keyvalues={"user_id": user_id, "device_id": device_id},
updatevalues={
"user_agent": user_agent,
"last_seen": last_seen,
"ip": ip,
},
)
except Exception as e:
# Failed to upsert, log and continue
logger.error("Failed to insert client IP %r: %r", entry, e)
async def get_last_client_ip_by_device( async def get_last_client_ip_by_device(
self, user_id: str, device_id: Optional[str] self, user_id: str, device_id: Optional[str]
) -> Dict[Tuple[str, str], dict]: ) -> Dict[Tuple[str, str], dict]:
......
...@@ -111,7 +111,8 @@ class Measure: ...@@ -111,7 +111,8 @@ class Measure:
curr_context = current_context() curr_context = current_context()
if not curr_context: if not curr_context:
logger.warning( logger.warning(
"Starting metrics collection from sentinel context: metrics will be lost" "Starting metrics collection %r from sentinel context: metrics will be lost",
name,
) )
parent_context = None parent_context = None
else: else:
......
# -*- coding: utf-8 -*-
# Copyright 2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from synapse.config import ConfigError
from synapse.config._util import validate_config
from tests.unittest import TestCase
class ValidateConfigTestCase(TestCase):
"""Test cases for synapse.config._util.validate_config"""
def test_bad_object_in_array(self):
"""malformed objects within an array should be validated correctly"""
# consider a structure:
#
# array_of_objs:
# - r: 1
# foo: 2
#
# - r: 2
# bar: 3
#
# ... where each entry must contain an "r": check that the path
# to the required item is correclty reported.
schema = {
"type": "object",
"properties": {
"array_of_objs": {
"type": "array",
"items": {"type": "object", "required": ["r"]},
},
},
}
with self.assertRaises(ConfigError) as c:
validate_config(schema, {"array_of_objs": [{}]}, ("base",))
self.assertEqual(c.exception.path, ["base", "array_of_objs", "<item 0>"])
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment