From dc3e58693816b897c612ea2b1d5a9f0656108d7d Mon Sep 17 00:00:00 2001 From: Alexander Trost Date: Sun, 2 Jun 2019 18:13:20 +0200 Subject: [PATCH 01/61] SAML2 Improvements and redirect stuff Signed-off-by: Alexander Trost --- synapse/api/constants.py | 1 + synapse/config/saml2_config.py | 1 + synapse/handlers/auth.py | 3 ++ synapse/rest/client/v1/login.py | 46 +++++++++++++++++++++++++ synapse/static/client/login/js/login.js | 6 ++-- 5 files changed, 55 insertions(+), 2 deletions(-) diff --git a/synapse/api/constants.py b/synapse/api/constants.py index ee129c868..744443404 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -57,6 +57,7 @@ class LoginType(object): EMAIL_IDENTITY = u"m.login.email.identity" MSISDN = u"m.login.msisdn" RECAPTCHA = u"m.login.recaptcha" + SSO = u"m.login.sso" TERMS = u"m.login.terms" DUMMY = u"m.login.dummy" diff --git a/synapse/config/saml2_config.py b/synapse/config/saml2_config.py index aa6eac271..60384d33f 100644 --- a/synapse/config/saml2_config.py +++ b/synapse/config/saml2_config.py @@ -75,6 +75,7 @@ class SAML2Config(Config): # override them. # #saml2_config: + # enabled: true # sp_config: # # point this to the IdP's metadata. You can use either a local file or # # (preferably) a URL. diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index aa5d89a9a..e6c8965a9 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -727,6 +727,9 @@ class AuthHandler(BaseHandler): if canonical_user_id: defer.returnValue((canonical_user_id, None)) + if login_type == LoginType.SSO: + known_login_type = True + if not known_login_type: raise SynapseError(400, "Unknown login type %s" % login_type) diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py index 029039c16..ae9bbba61 100644 --- a/synapse/rest/client/v1/login.py +++ b/synapse/rest/client/v1/login.py @@ -33,6 +33,9 @@ from synapse.rest.well_known import WellKnownBuilder from synapse.types import UserID, map_username_to_mxid_localpart from synapse.util.msisdn import phone_number_to_msisdn +import saml2 +from saml2.client import Saml2Client + from .base import ClientV1RestServlet, client_path_patterns logger = logging.getLogger(__name__) @@ -93,6 +96,7 @@ class LoginRestServlet(ClientV1RestServlet): self.jwt_enabled = hs.config.jwt_enabled self.jwt_secret = hs.config.jwt_secret self.jwt_algorithm = hs.config.jwt_algorithm + self.saml2_enabled = hs.config.saml2_enabled self.cas_enabled = hs.config.cas_enabled self.auth_handler = self.hs.get_auth_handler() self.registration_handler = hs.get_registration_handler() @@ -104,6 +108,9 @@ class LoginRestServlet(ClientV1RestServlet): flows = [] if self.jwt_enabled: flows.append({"type": LoginRestServlet.JWT_TYPE}) + if self.saml2_enabled: + flows.append({"type": LoginRestServlet.SSO_TYPE}) + flows.append({"type": LoginRestServlet.TOKEN_TYPE}) if self.cas_enabled: flows.append({"type": LoginRestServlet.SSO_TYPE}) @@ -474,6 +481,43 @@ class CasTicketServlet(ClientV1RestServlet): return user, attributes +class SSORedirectServlet(RestServlet): + PATTERNS = client_path_patterns("/login/sso/redirect") + + def __init__(self, hs): + super(SSORedirectServlet, self).__init__() + self.saml2_sp_config = hs.config.saml2_sp_config + + def on_GET(self, request): + args = request.args + + saml_client = Saml2Client(self.saml2_sp_config) + reqid, info = saml_client.prepare_for_authenticate() + + redirect_url = None + + # Select the IdP URL to send the AuthN request to + for key, value in info['headers']: + if key is 'Location': + redirect_url = value + + if redirect_url is None: + raise LoginError(401, "Unsuccessful SSO SAML2 redirect url response", + errcode=Codes.UNAUTHORIZED) + + relay_state = "/_matrix/client/r0/login" + if b"redirectUrl" in args: + relay_state = args[b"redirectUrl"][0] + + url_parts = list(urllib.parse.urlparse(redirect_url)) + query = dict(urllib.parse.parse_qsl(url_parts[4])) + query.update({"RelayState": relay_state}) + url_parts[4] = urllib.parse.urlencode(query) + + request.redirect(urllib.parse.urlunparse(url_parts)) + finish_request(request) + + class SSOAuthHandler(object): """ Utility class for Resources and Servlets which handle the response from a SSO @@ -549,3 +593,5 @@ def register_servlets(hs, http_server): if hs.config.cas_enabled: CasRedirectServlet(hs).register(http_server) CasTicketServlet(hs).register(http_server) + if hs.config.saml2_enabled: + SSORedirectServlet(hs).register(http_server) diff --git a/synapse/static/client/login/js/login.js b/synapse/static/client/login/js/login.js index e02663f50..9b9e73c41 100644 --- a/synapse/static/client/login/js/login.js +++ b/synapse/static/client/login/js/login.js @@ -56,6 +56,7 @@ var show_login = function() { } if (matrixLogin.serverAcceptsSso) { + $("#sso_form").attr("action", "/_matrix/client/r0/login/sso/redirect"); $("#sso_flow").show(); } else if (matrixLogin.serverAcceptsCas) { $("#sso_form").attr("action", "/_matrix/client/r0/login/cas/redirect"); @@ -79,7 +80,7 @@ var fetch_info = function(cb) { $.get(matrixLogin.endpoint, function(response) { var serverAcceptsPassword = false; var serverAcceptsCas = false; - for (var i=0; i Date: Tue, 11 Jun 2019 00:03:57 +0100 Subject: [PATCH 02/61] Code cleanups and simplifications. Also: share the saml client between redirect and response handlers. --- synapse/api/constants.py | 1 - synapse/config/saml2_config.py | 7 ++- synapse/handlers/auth.py | 3 - synapse/rest/client/v1/login.py | 83 ++++++++++++------------- synapse/rest/saml2/response_resource.py | 4 +- synapse/server.py | 5 ++ 6 files changed, 53 insertions(+), 50 deletions(-) diff --git a/synapse/api/constants.py b/synapse/api/constants.py index 744443404..ee129c868 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -57,7 +57,6 @@ class LoginType(object): EMAIL_IDENTITY = u"m.login.email.identity" MSISDN = u"m.login.msisdn" RECAPTCHA = u"m.login.recaptcha" - SSO = u"m.login.sso" TERMS = u"m.login.terms" DUMMY = u"m.login.dummy" diff --git a/synapse/config/saml2_config.py b/synapse/config/saml2_config.py index 60384d33f..a6ff62df0 100644 --- a/synapse/config/saml2_config.py +++ b/synapse/config/saml2_config.py @@ -12,6 +12,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from synapse.python_dependencies import DependencyException, check_requirements from ._base import Config, ConfigError @@ -25,6 +26,11 @@ class SAML2Config(Config): if not saml2_config or not saml2_config.get("enabled", True): return + try: + check_requirements('saml2') + except DependencyException as e: + raise ConfigError(e.message) + self.saml2_enabled = True import saml2.config @@ -75,7 +81,6 @@ class SAML2Config(Config): # override them. # #saml2_config: - # enabled: true # sp_config: # # point this to the IdP's metadata. You can use either a local file or # # (preferably) a URL. diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index cb22869e3..7f8ddc99c 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -767,9 +767,6 @@ class AuthHandler(BaseHandler): if canonical_user_id: defer.returnValue((canonical_user_id, None)) - if login_type == LoginType.SSO: - known_login_type = True - if not known_login_type: raise SynapseError(400, "Unknown login type %s" % login_type) diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py index 471971225..1a886cbbb 100644 --- a/synapse/rest/client/v1/login.py +++ b/synapse/rest/client/v1/login.py @@ -34,10 +34,6 @@ from synapse.rest.well_known import WellKnownBuilder from synapse.types import UserID, map_username_to_mxid_localpart from synapse.util.msisdn import phone_number_to_msisdn -import saml2 -from saml2.client import Saml2Client - - logger = logging.getLogger(__name__) @@ -378,28 +374,49 @@ class LoginRestServlet(RestServlet): defer.returnValue(result) -class CasRedirectServlet(RestServlet): +class BaseSsoRedirectServlet(RestServlet): + """Common base class for /login/sso/redirect impls""" PATTERNS = client_patterns("/login/(cas|sso)/redirect", v1=True) + def on_GET(self, request): + args = request.args + if b"redirectUrl" not in args: + return 400, "Redirect URL not specified for SSO auth" + client_redirect_url = args[b"redirectUrl"][0] + sso_url = self.get_sso_url(client_redirect_url) + request.redirect(sso_url) + finish_request(request) + + def get_sso_url(self, client_redirect_url): + """Get the URL to redirect to, to perform SSO auth + + Args: + client_redirect_url (bytes): the URL that we should redirect the + client to when everything is done + + Returns: + bytes: URL to redirect to + """ + # to be implemented by subclasses + raise NotImplementedError() + + +class CasRedirectServlet(RestServlet): def __init__(self, hs): super(CasRedirectServlet, self).__init__() self.cas_server_url = hs.config.cas_server_url.encode('ascii') self.cas_service_url = hs.config.cas_service_url.encode('ascii') - def on_GET(self, request): - args = request.args - if b"redirectUrl" not in args: - return (400, "Redirect URL not specified for CAS auth") + def get_sso_url(self, client_redirect_url): client_redirect_url_param = urllib.parse.urlencode({ - b"redirectUrl": args[b"redirectUrl"][0] + b"redirectUrl": client_redirect_url }).encode('ascii') hs_redirect_url = (self.cas_service_url + b"/_matrix/client/r0/login/cas/ticket") service_param = urllib.parse.urlencode({ b"service": b"%s?%s" % (hs_redirect_url, client_redirect_url_param) }).encode('ascii') - request.redirect(b"%s/login?%s" % (self.cas_server_url, service_param)) - finish_request(request) + return b"%s/login?%s" % (self.cas_server_url, service_param) class CasTicketServlet(RestServlet): @@ -482,41 +499,23 @@ class CasTicketServlet(RestServlet): return user, attributes -class SSORedirectServlet(RestServlet): +class SAMLRedirectServlet(BaseSsoRedirectServlet): PATTERNS = client_patterns("/login/sso/redirect", v1=True) def __init__(self, hs): - super(SSORedirectServlet, self).__init__() - self.saml2_sp_config = hs.config.saml2_sp_config - - def on_GET(self, request): - args = request.args - - saml_client = Saml2Client(self.saml2_sp_config) - reqid, info = saml_client.prepare_for_authenticate() + self._saml_client = hs.get_saml_client() - redirect_url = None + def get_sso_url(self, client_redirect_url): + reqid, info = self._saml_client.prepare_for_authenticate( + relay_state=client_redirect_url, + ) - # Select the IdP URL to send the AuthN request to for key, value in info['headers']: - if key is 'Location': - redirect_url = value - - if redirect_url is None: - raise LoginError(401, "Unsuccessful SSO SAML2 redirect url response", - errcode=Codes.UNAUTHORIZED) - - relay_state = "/_matrix/client/r0/login" - if b"redirectUrl" in args: - relay_state = args[b"redirectUrl"][0] + if key == 'Location': + return value - url_parts = list(urllib.parse.urlparse(redirect_url)) - query = dict(urllib.parse.parse_qsl(url_parts[4])) - query.update({"RelayState": relay_state}) - url_parts[4] = urllib.parse.urlencode(query) - - request.redirect(urllib.parse.urlunparse(url_parts)) - finish_request(request) + # this shouldn't happen! + raise Exception("prepare_for_authenticate didn't return a Location header") class SSOAuthHandler(object): @@ -594,5 +593,5 @@ def register_servlets(hs, http_server): if hs.config.cas_enabled: CasRedirectServlet(hs).register(http_server) CasTicketServlet(hs).register(http_server) - if hs.config.saml2_enabled: - SSORedirectServlet(hs).register(http_server) + elif hs.config.saml2_enabled: + SAMLRedirectServlet(hs).register(http_server) diff --git a/synapse/rest/saml2/response_resource.py b/synapse/rest/saml2/response_resource.py index 69fb77b32..36ca1333a 100644 --- a/synapse/rest/saml2/response_resource.py +++ b/synapse/rest/saml2/response_resource.py @@ -16,7 +16,6 @@ import logging import saml2 -from saml2.client import Saml2Client from twisted.web.resource import Resource from twisted.web.server import NOT_DONE_YET @@ -36,8 +35,7 @@ class SAML2ResponseResource(Resource): def __init__(self, hs): Resource.__init__(self) - - self._saml_client = Saml2Client(hs.config.saml2_sp_config) + self._saml_client = hs.get_saml_client() self._sso_auth_handler = SSOAuthHandler(hs) def render_POST(self, request): diff --git a/synapse/server.py b/synapse/server.py index 9229a68a8..0eb896867 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -189,6 +189,7 @@ class HomeServer(object): 'registration_handler', 'account_validity_handler', 'event_client_serializer', + 'saml_client', ] REQUIRED_ON_MASTER_STARTUP = [ @@ -522,6 +523,10 @@ class HomeServer(object): def build_event_client_serializer(self): return EventClientSerializer(self) + def build_saml_client(self): + from saml2.client import Saml2Client + return Saml2Client(self.config.saml2_sp_config) + def remove_pusher(self, app_id, push_key, user_id): return self.get_pusherpool().remove_pusher(app_id, push_key, user_id) From 880005c3b794b2bcce41f9fde1ced49eb1ffc67d Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 11 Jun 2019 00:40:31 +0100 Subject: [PATCH 03/61] changelog --- changelog.d/5422.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/5422.feature diff --git a/changelog.d/5422.feature b/changelog.d/5422.feature new file mode 100644 index 000000000..02ac25a2f --- /dev/null +++ b/changelog.d/5422.feature @@ -0,0 +1 @@ +Fully support SAML2 authentication. Contributed by [Alexander Trost](https://github.com/galexrt) - thank you! From 8181e290a90bc7b7f950fb639b38d0212dca87da Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 21 Jun 2019 11:10:27 +0100 Subject: [PATCH 04/61] Fix sync tightloop bug. If, for some reason, presence updates take a while to persist then it can trigger clients to tightloop calling `/sync` due to the presence handler returning updates but not advancing the stream token. Fixes #5503. --- synapse/handlers/presence.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 5204073a3..3edd35998 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -1017,11 +1017,21 @@ class PresenceEventSource(object): if from_key is not None: from_key = int(from_key) + max_token = self.store.get_current_presence_token() + if from_key == max_token: + # This is necessary as due to the way stream ID generators work + # we may get updates that have a stream ID greater than the max + # token. This is usually fine, as it just means that we may send + # down some presence updates multiple times. However, we need to + # be careful that the sync stream actually does make some + # progress, otherwise clients will end up tight looping calling + # /sync due to it returning the same token repeatedly. Hence + # this guard. C.f. #5503. + defer.returnValue(([], max_token)) + presence = self.get_presence_handler() stream_change_cache = self.store.presence_stream_cache - max_token = self.store.get_current_presence_token() - users_interested_in = yield self._get_interested_in(user, explicit_room_id) user_ids_changed = set() From 8d452e0ca574ba466a6cf7eb78ca6fdb8acebb7e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 21 Jun 2019 11:13:13 +0100 Subject: [PATCH 05/61] Newsfile --- changelog.d/5507.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/5507.bugfix diff --git a/changelog.d/5507.bugfix b/changelog.d/5507.bugfix new file mode 100644 index 000000000..70452aa14 --- /dev/null +++ b/changelog.d/5507.bugfix @@ -0,0 +1 @@ +Fix bug where clients could tight loop calling `/sync` for a period. From 1c4a38e3774a597e3e0c77dc05b05cbe3e6bc117 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Tue, 25 Jun 2019 12:20:37 +0100 Subject: [PATCH 06/61] Update SUPPORT.md --- .github/SUPPORT.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/SUPPORT.md b/.github/SUPPORT.md index 7a4244f67..fad2dd2ef 100644 --- a/.github/SUPPORT.md +++ b/.github/SUPPORT.md @@ -1,3 +1,3 @@ -[**#matrix:matrix.org**](https://matrix.to/#/#matrix:matrix.org) is the official support room for Matrix, and can be accessed by any client from https://matrix.org/docs/projects/try-matrix-now.html - -It can also be access via IRC bridge at irc://irc.freenode.net/matrix or on the web here: https://webchat.freenode.net/?channels=matrix +[**#synapse:matrix.org**](https://matrix.to/#/#synapse:matrix.org) is the official support room for +Synapse, and can be accessed by any client from https://matrix.org/docs/projects/try-matrix-now.html. +Please ask for support there, rather than filing github issues. From 62e361a90f77d5f56aed31dc2d9c8442c8e65d99 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Tue, 25 Jun 2019 12:16:09 +0100 Subject: [PATCH 07/61] Update github templates --- .github/ISSUE_TEMPLATE/SUPPORT_REQUEST.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/SUPPORT_REQUEST.md b/.github/ISSUE_TEMPLATE/SUPPORT_REQUEST.md index 77581596c..64c06fe3c 100644 --- a/.github/ISSUE_TEMPLATE/SUPPORT_REQUEST.md +++ b/.github/ISSUE_TEMPLATE/SUPPORT_REQUEST.md @@ -4,6 +4,7 @@ about: I need support for Synapse --- -# Please ask for support in [**#matrix:matrix.org**](https://matrix.to/#/#matrix:matrix.org) +Please don't file github issues asking for support. -## Don't file an issue as a support request. +Instead, please join [`#synapse:matrix.org`](https://matrix.to/#/#synapse:matrix.org) +(from a matrix.org account if necessary), and ask there. From a5222b386e0ce24264c01955757bc7771451d3f7 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 25 Jun 2019 12:24:47 +0100 Subject: [PATCH 08/61] changelog --- changelog.d/5552.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/5552.misc diff --git a/changelog.d/5552.misc b/changelog.d/5552.misc new file mode 100644 index 000000000..77f480e88 --- /dev/null +++ b/changelog.d/5552.misc @@ -0,0 +1 @@ +Update github templates. From a2f6d31a63012531935566e380dfb5edd81dcbd0 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 26 Jun 2019 11:56:52 +0100 Subject: [PATCH 09/61] Refactor get_user_ids_changed to pull less from DB When a client asks for users whose devices have changed since a token we used to pull *all* users from the database since the token, which could easily be thousands of rows for old tokens. This PR changes this to only check for changes for users the client is actually interested in. Fixes #5553 --- synapse/handlers/device.py | 12 ++++----- synapse/handlers/sync.py | 22 ++++++++-------- synapse/storage/devices.py | 51 ++++++++++++++++++++++++++++++-------- 3 files changed, 57 insertions(+), 28 deletions(-) diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index f59d0479b..2b6c2117f 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -101,9 +101,13 @@ class DeviceWorkerHandler(BaseHandler): room_ids = yield self.store.get_rooms_for_user(user_id) - # First we check if any devices have changed + # First we check if any devices have changed for users that we share + # rooms with. + users_who_share_room = yield self.store.get_users_who_share_room_with_user( + user_id + ) changed = yield self.store.get_user_whose_devices_changed( - from_token.device_list_key + from_token.device_list_key, users_who_share_room ) # Then work out if any users have since joined @@ -188,10 +192,6 @@ class DeviceWorkerHandler(BaseHandler): break if possibly_changed or possibly_left: - users_who_share_room = yield self.store.get_users_who_share_room_with_user( - user_id - ) - # Take the intersection of the users whose devices may have changed # and those that actually still share a room with the user possibly_joined = possibly_changed & users_who_share_room diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index c5188a1f8..8249e75ec 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -1062,10 +1062,6 @@ class SyncHandler(object): since_token = sync_result_builder.since_token if since_token and since_token.device_list_key: - changed = yield self.store.get_user_whose_devices_changed( - since_token.device_list_key - ) - # TODO: Be more clever than this, i.e. remove users who we already # share a room with? for room_id in newly_joined_rooms: @@ -1076,21 +1072,23 @@ class SyncHandler(object): left_users = yield self.state.get_current_users_in_room(room_id) newly_left_users.update(left_users) + users_who_share_room = yield self.store.get_users_who_share_room_with_user( + user_id + ) + # TODO: Check that these users are actually new, i.e. either they # weren't in the previous sync *or* they left and rejoined. - changed.update(newly_joined_or_invited_users) - - if not changed and not newly_left_users: - defer.returnValue(DeviceLists(changed=[], left=newly_left_users)) + changed = users_who_share_room & set(newly_joined_or_invited_users) - users_who_share_room = yield self.store.get_users_who_share_room_with_user( - user_id + changed_users = yield self.store.get_user_whose_devices_changed( + since_token.device_list_key, users_who_share_room ) + changed.update(changed_users) + defer.returnValue( DeviceLists( - changed=users_who_share_room & changed, - left=set(newly_left_users) - users_who_share_room, + changed=changed, left=set(newly_left_users) - users_who_share_room ) ) else: diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py index 3413a4667..3af0171f7 100644 --- a/synapse/storage/devices.py +++ b/synapse/storage/devices.py @@ -391,22 +391,53 @@ class DeviceWorkerStore(SQLBaseStore): return now_stream_id, [] - @defer.inlineCallbacks - def get_user_whose_devices_changed(self, from_key): - """Get set of users whose devices have changed since `from_key`. + def get_user_whose_devices_changed(self, from_key, user_ids): + """Get set of users whose devices have changed since `from_key` that + are in the given list of user_ids. + + Args: + user_ids (Iterable[str]) + from_key: The device lists stream token + + Returns: + Deferred[set[str]]: The set of user_ids whose devices have changed + since `from_key` """ from_key = int(from_key) - changed = self._device_list_stream_cache.get_all_entities_changed(from_key) - if changed is not None: - defer.returnValue(set(changed)) + + # Get set of users who *may* have changed. Users not in the returned + # list have definitely not changed. + to_check = list( + self._device_list_stream_cache.get_entities_changed(user_ids, from_key) + ) + + if not to_check: + return defer.succeed(set()) + + # We now check the database for all users in `to_check`, in batches. + batch_size = 100 + chunks = [ + to_check[i : i + batch_size] for i in range(0, len(to_check), batch_size) + ] sql = """ - SELECT DISTINCT user_id FROM device_lists_stream WHERE stream_id > ? + SELECT DISTINCT user_id FROM device_lists_stream + WHERE stream_id > ? + AND user_id IN (%s) """ - rows = yield self._execute( - "get_user_whose_devices_changed", None, sql, from_key + + def _get_user_whose_devices_changed_txn(txn): + changes = set() + + for chunk in chunks: + txn.execute(sql % (",".join("?" for _ in chunk),), [from_key] + chunk) + changes.update(user_id for user_id, in txn) + + return changes + + return self.runInteraction( + "get_user_whose_devices_changed", _get_user_whose_devices_changed_txn ) - defer.returnValue(set(row[0] for row in rows)) def get_all_device_list_changes_for_remotes(self, from_key, to_key): """Return a list of `(stream_id, user_id, destination)` which is the From 508c3ce3d71b4711f6e50e7f1e74d71cb46f61d9 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 26 Jun 2019 12:03:49 +0100 Subject: [PATCH 10/61] Newsfile --- changelog.d/5559.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/5559.misc diff --git a/changelog.d/5559.misc b/changelog.d/5559.misc new file mode 100644 index 000000000..b77b38345 --- /dev/null +++ b/changelog.d/5559.misc @@ -0,0 +1 @@ +Optimise devices changed query to not pull unnecessary rows from the database, reducing database load. From 806a06daf2b30691c2c69e32d1ff2e104436bbc4 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 26 Jun 2019 19:09:10 +0100 Subject: [PATCH 11/61] Rename get_users_whose_devices_changed --- synapse/handlers/device.py | 2 +- synapse/handlers/sync.py | 2 +- synapse/storage/devices.py | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index 2b6c2117f..99e841309 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -106,7 +106,7 @@ class DeviceWorkerHandler(BaseHandler): users_who_share_room = yield self.store.get_users_who_share_room_with_user( user_id ) - changed = yield self.store.get_user_whose_devices_changed( + changed = yield self.store.get_users_whose_devices_changed( from_token.device_list_key, users_who_share_room ) diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 8249e75ec..f70ebfdee 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -1080,7 +1080,7 @@ class SyncHandler(object): # weren't in the previous sync *or* they left and rejoined. changed = users_who_share_room & set(newly_joined_or_invited_users) - changed_users = yield self.store.get_user_whose_devices_changed( + changed_users = yield self.store.get_users_whose_devices_changed( since_token.device_list_key, users_who_share_room ) diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py index 3af0171f7..97f6cd275 100644 --- a/synapse/storage/devices.py +++ b/synapse/storage/devices.py @@ -391,7 +391,7 @@ class DeviceWorkerStore(SQLBaseStore): return now_stream_id, [] - def get_user_whose_devices_changed(self, from_key, user_ids): + def get_users_whose_devices_changed(self, from_key, user_ids): """Get set of users whose devices have changed since `from_key` that are in the given list of user_ids. @@ -426,7 +426,7 @@ class DeviceWorkerStore(SQLBaseStore): AND user_id IN (%s) """ - def _get_user_whose_devices_changed_txn(txn): + def _get_users_whose_devices_changed_txn(txn): changes = set() for chunk in chunks: @@ -436,7 +436,7 @@ class DeviceWorkerStore(SQLBaseStore): return changes return self.runInteraction( - "get_user_whose_devices_changed", _get_user_whose_devices_changed_txn + "get_users_whose_devices_changed", _get_users_whose_devices_changed_txn ) def get_all_device_list_changes_for_remotes(self, from_key, to_key): From f335e77d5330d13cbaf61b7b903980bae60761d7 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 26 Jun 2019 19:10:38 +0100 Subject: [PATCH 12/61] Use batch_iter and correct docstring --- synapse/storage/devices.py | 27 ++++++++++++--------------- 1 file changed, 12 insertions(+), 15 deletions(-) diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py index 97f6cd275..44324bf40 100644 --- a/synapse/storage/devices.py +++ b/synapse/storage/devices.py @@ -24,6 +24,7 @@ from synapse.api.errors import StoreError from synapse.metrics.background_process_metrics import run_as_background_process from synapse.storage._base import Cache, SQLBaseStore, db_to_json from synapse.storage.background_updates import BackgroundUpdateStore +from synapse.util import batch_iter from synapse.util.caches.descriptors import cached, cachedInlineCallbacks, cachedList logger = logging.getLogger(__name__) @@ -396,8 +397,8 @@ class DeviceWorkerStore(SQLBaseStore): are in the given list of user_ids. Args: + from_key (str): The device lists stream token user_ids (Iterable[str]) - from_key: The device lists stream token Returns: Deferred[set[str]]: The set of user_ids whose devices have changed @@ -414,23 +415,19 @@ class DeviceWorkerStore(SQLBaseStore): if not to_check: return defer.succeed(set()) - # We now check the database for all users in `to_check`, in batches. - batch_size = 100 - chunks = [ - to_check[i : i + batch_size] for i in range(0, len(to_check), batch_size) - ] - - sql = """ - SELECT DISTINCT user_id FROM device_lists_stream - WHERE stream_id > ? - AND user_id IN (%s) - """ - def _get_users_whose_devices_changed_txn(txn): changes = set() - for chunk in chunks: - txn.execute(sql % (",".join("?" for _ in chunk),), [from_key] + chunk) + sql = """ + SELECT DISTINCT user_id FROM device_lists_stream + WHERE stream_id > ? + AND user_id IN (%s) + """ + + for chunk in batch_iter(to_check, 100): + txn.execute( + sql % (",".join("?" for _ in chunk),), [from_key] + list(chunk) + ) changes.update(user_id for user_id, in txn) return changes From 8624db3194789cdc98e4d2a8e0da324609497fb6 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 26 Jun 2019 19:30:35 +0100 Subject: [PATCH 13/61] Refactor and comment sync device list code --- synapse/handlers/sync.py | 70 ++++++++++++++++++++++++++++++---------- 1 file changed, 53 insertions(+), 17 deletions(-) diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index f70ebfdee..4f737d0a1 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -1058,38 +1058,74 @@ class SyncHandler(object): newly_left_rooms, newly_left_users, ): + """Generate the DeviceLists section of sync + + Args: + sync_result_builder (SyncResultBuilder) + newly_joined_rooms (set[str]): Set of rooms user has joined since + previous sync + newly_joined_or_invited_users (set[str]): Set of users that have + joined or been invited to a room since previous sync. + newly_left_rooms (set[str]): Set of rooms user has left since + previous sync + newly_left_users (set[str]): Set of users that have left a room + we're in since previous sync + + Returns: + Deferred[DeviceLists] + """ + user_id = sync_result_builder.sync_config.user.to_string() since_token = sync_result_builder.since_token - if since_token and since_token.device_list_key: - # TODO: Be more clever than this, i.e. remove users who we already - # share a room with? - for room_id in newly_joined_rooms: - joined_users = yield self.state.get_current_users_in_room(room_id) - newly_joined_or_invited_users.update(joined_users) + # We're going to mutate these fields, so lets copy them rather than + # assume they won't get used later. + newly_joined_or_invited_users = set(newly_joined_or_invited_users) + newly_left_users = set(newly_left_users) - for room_id in newly_left_rooms: - left_users = yield self.state.get_current_users_in_room(room_id) - newly_left_users.update(left_users) + if since_token and since_token.device_list_key: + # We want to figure out what user IDs the client should refetch + # device keys for, and which users we aren't going to track changes + # for anymore. + # + # For the first step we check: + # 1. if any users we share a room with have updated their devices, + # and + # 2. we also check if we've joined any new rooms, or if a user has + # joined a room we're in. + # + # For the second step we just find any users we no longer share a + # room with by looking at all users that have left a room plus users + # that were in a room we've left. users_who_share_room = yield self.store.get_users_who_share_room_with_user( user_id ) + # Step 1, check for changes in devices of users we share a room with + users_that_have_changed = yield self.store.get_users_whose_devices_changed( + since_token.device_list_key, users_who_share_room + ) + + # Step 2, check for newly joined rooms + for room_id in newly_joined_rooms: + joined_users = yield self.state.get_current_users_in_room(room_id) + newly_joined_or_invited_users.update(joined_users) + # TODO: Check that these users are actually new, i.e. either they # weren't in the previous sync *or* they left and rejoined. - changed = users_who_share_room & set(newly_joined_or_invited_users) + users_that_have_changed.update(newly_joined_or_invited_users) - changed_users = yield self.store.get_users_whose_devices_changed( - since_token.device_list_key, users_who_share_room - ) + # Now find users that we no longer track + for room_id in newly_left_rooms: + left_users = yield self.state.get_current_users_in_room(room_id) + newly_left_users.update(left_users) - changed.update(changed_users) + # Remove any users that we still share a room with. + newly_left_users -= users_who_share_room defer.returnValue( - DeviceLists( - changed=changed, left=set(newly_left_users) - users_who_share_room - ) + DeviceLists(changed=users_that_have_changed, left=newly_left_users) ) else: defer.returnValue(DeviceLists(changed=[], left=[])) From 82028d723b1533832c22b2acced1ff5d1a0fb51a Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 26 Jun 2019 19:33:11 +0100 Subject: [PATCH 14/61] Move changelog --- changelog.d/{5559.misc => 5559.feature} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename changelog.d/{5559.misc => 5559.feature} (100%) diff --git a/changelog.d/5559.misc b/changelog.d/5559.feature similarity index 100% rename from changelog.d/5559.misc rename to changelog.d/5559.feature From 0ade403f55dd9f6cab90e1e4f0a9241940a437f2 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 26 Jun 2019 22:46:23 +0100 Subject: [PATCH 15/61] Revert redundant changes to static js --- synapse/static/client/login/js/login.js | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/synapse/static/client/login/js/login.js b/synapse/static/client/login/js/login.js index 9b9e73c41..e02663f50 100644 --- a/synapse/static/client/login/js/login.js +++ b/synapse/static/client/login/js/login.js @@ -56,7 +56,6 @@ var show_login = function() { } if (matrixLogin.serverAcceptsSso) { - $("#sso_form").attr("action", "/_matrix/client/r0/login/sso/redirect"); $("#sso_flow").show(); } else if (matrixLogin.serverAcceptsCas) { $("#sso_form").attr("action", "/_matrix/client/r0/login/cas/redirect"); @@ -80,7 +79,7 @@ var fetch_info = function(cb) { $.get(matrixLogin.endpoint, function(response) { var serverAcceptsPassword = false; var serverAcceptsCas = false; - for (var i = 0; i < response.flows.length; i++) { + for (var i=0; i Date: Wed, 26 Jun 2019 22:52:02 +0100 Subject: [PATCH 16/61] Move all the saml stuff out to a centralised handler --- synapse/handlers/saml2_handler.py | 86 +++++++++++++++++++++++++ synapse/rest/client/v1/login.py | 13 +--- synapse/rest/saml2/response_resource.py | 35 +--------- synapse/server.py | 12 ++-- 4 files changed, 96 insertions(+), 50 deletions(-) create mode 100644 synapse/handlers/saml2_handler.py diff --git a/synapse/handlers/saml2_handler.py b/synapse/handlers/saml2_handler.py new file mode 100644 index 000000000..880e6a625 --- /dev/null +++ b/synapse/handlers/saml2_handler.py @@ -0,0 +1,86 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import logging + +import saml2 +from saml2.client import Saml2Client + +from synapse.api.errors import CodeMessageException +from synapse.http.servlet import parse_string +from synapse.rest.client.v1.login import SSOAuthHandler + +logger = logging.getLogger(__name__) + + +class Saml2Handler: + def __init__(self, hs): + self._saml_client = Saml2Client(hs.config.saml2_sp_config) + self._sso_auth_handler = SSOAuthHandler(hs) + + def handle_redirect_request(self, client_redirect_url): + """Handle an incoming request to /login/sso/redirect + + Args: + client_redirect_url (bytes): the URL that we should redirect the + client to when everything is done + + Returns: + bytes: URL to redirect to + """ + reqid, info = self._saml_client.prepare_for_authenticate( + relay_state=client_redirect_url + ) + + for key, value in info["headers"]: + if key == "Location": + return value + + # this shouldn't happen! + raise Exception("prepare_for_authenticate didn't return a Location header") + + def handle_saml_response(self, request): + """Handle an incoming request to /_matrix/saml2/authn_response + + Args: + request (SynapseRequest): the incoming request from the browser. We'll + respond to it with a redirect. + + Returns: + Deferred[none]: Completes once we have handled the request. + """ + resp_bytes = parse_string(request, "SAMLResponse", required=True) + relay_state = parse_string(request, "RelayState", required=True) + + try: + saml2_auth = self._saml_client.parse_authn_request_response( + resp_bytes, saml2.BINDING_HTTP_POST + ) + except Exception as e: + logger.warning("Exception parsing SAML2 response", exc_info=1) + raise CodeMessageException(400, "Unable to parse SAML2 response: %s" % (e,)) + + if saml2_auth.not_signed: + raise CodeMessageException(400, "SAML2 response was not signed") + + if "uid" not in saml2_auth.ava: + raise CodeMessageException(400, "uid not in SAML2 response") + + username = saml2_auth.ava["uid"][0] + + displayName = saml2_auth.ava.get("displayName", [None])[0] + + return self._sso_auth_handler.on_successful_auth( + username, request, relay_state, user_display_name=displayName + ) diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py index a31d27793..b59aa3d5c 100644 --- a/synapse/rest/client/v1/login.py +++ b/synapse/rest/client/v1/login.py @@ -487,19 +487,10 @@ class SAMLRedirectServlet(BaseSsoRedirectServlet): PATTERNS = client_patterns("/login/sso/redirect", v1=True) def __init__(self, hs): - self._saml_client = hs.get_saml_client() + self._saml_handler = hs.get_saml_handler() def get_sso_url(self, client_redirect_url): - reqid, info = self._saml_client.prepare_for_authenticate( - relay_state=client_redirect_url - ) - - for key, value in info["headers"]: - if key == "Location": - return value - - # this shouldn't happen! - raise Exception("prepare_for_authenticate didn't return a Location header") + return self._saml_handler.handle_redirect_request(client_redirect_url) class SSOAuthHandler(object): diff --git a/synapse/rest/saml2/response_resource.py b/synapse/rest/saml2/response_resource.py index 9ec56d6ad..8ee22473e 100644 --- a/synapse/rest/saml2/response_resource.py +++ b/synapse/rest/saml2/response_resource.py @@ -13,19 +13,11 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import logging - -import saml2 from twisted.web.resource import Resource from twisted.web.server import NOT_DONE_YET -from synapse.api.errors import CodeMessageException from synapse.http.server import wrap_html_request_handler -from synapse.http.servlet import parse_string -from synapse.rest.client.v1.login import SSOAuthHandler - -logger = logging.getLogger(__name__) class SAML2ResponseResource(Resource): @@ -35,8 +27,7 @@ class SAML2ResponseResource(Resource): def __init__(self, hs): Resource.__init__(self) - self._saml_client = hs.get_saml_client() - self._sso_auth_handler = SSOAuthHandler(hs) + self._saml_handler = hs.get_saml_handler() def render_POST(self, request): self._async_render_POST(request) @@ -44,26 +35,4 @@ class SAML2ResponseResource(Resource): @wrap_html_request_handler def _async_render_POST(self, request): - resp_bytes = parse_string(request, "SAMLResponse", required=True) - relay_state = parse_string(request, "RelayState", required=True) - - try: - saml2_auth = self._saml_client.parse_authn_request_response( - resp_bytes, saml2.BINDING_HTTP_POST - ) - except Exception as e: - logger.warning("Exception parsing SAML2 response", exc_info=1) - raise CodeMessageException(400, "Unable to parse SAML2 response: %s" % (e,)) - - if saml2_auth.not_signed: - raise CodeMessageException(400, "SAML2 response was not signed") - - if "uid" not in saml2_auth.ava: - raise CodeMessageException(400, "uid not in SAML2 response") - - username = saml2_auth.ava["uid"][0] - - displayName = saml2_auth.ava.get("displayName", [None])[0] - return self._sso_auth_handler.on_successful_auth( - username, request, relay_state, user_display_name=displayName - ) + return self._saml_handler.handle_saml_response(request) diff --git a/synapse/server.py b/synapse/server.py index dbb35c722..1bc8c08b5 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -194,8 +194,8 @@ class HomeServer(object): "sendmail", "registration_handler", "account_validity_handler", + "saml2_handler", "event_client_serializer", - "saml_client", ] REQUIRED_ON_MASTER_STARTUP = ["user_directory_handler", "stats_handler"] @@ -525,13 +525,13 @@ class HomeServer(object): def build_account_validity_handler(self): return AccountValidityHandler(self) - def build_event_client_serializer(self): - return EventClientSerializer(self) + def build_saml2_handler(self): + from synapse.handlers.saml2_handler import Saml2Handler - def build_saml_client(self): - from saml2.client import Saml2Client + return Saml2Handler(self) - return Saml2Client(self.config.saml2_sp_config) + def build_event_client_serializer(self): + return EventClientSerializer(self) def remove_pusher(self, app_id, push_key, user_id): return self.get_pusherpool().remove_pusher(app_id, push_key, user_id) From 36f4953dec97ec1650b7c0bb75905ed907a8cac1 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 26 Jun 2019 23:50:55 +0100 Subject: [PATCH 17/61] Add support for tracking SAML2 sessions. This allows us to correctly handle `allow_unsolicited: False`. --- synapse/config/saml2_config.py | 20 +++++++++++++++- synapse/handlers/saml2_handler.py | 39 +++++++++++++++++++++++++++++-- 2 files changed, 56 insertions(+), 3 deletions(-) diff --git a/synapse/config/saml2_config.py b/synapse/config/saml2_config.py index 463b5fdd6..965a97837 100644 --- a/synapse/config/saml2_config.py +++ b/synapse/config/saml2_config.py @@ -27,7 +27,7 @@ class SAML2Config(Config): return try: - check_requirements('saml2') + check_requirements("saml2") except DependencyException as e: raise ConfigError(e.message) @@ -43,6 +43,11 @@ class SAML2Config(Config): if config_path is not None: self.saml2_sp_config.load_file(config_path) + # session lifetime: in milliseconds + self.saml2_session_lifetime = self.parse_duration( + saml2_config.get("saml_session_lifetime", "5m") + ) + def _default_saml_config_dict(self): import saml2 @@ -87,6 +92,13 @@ class SAML2Config(Config): # remote: # - url: https://our_idp/metadata.xml # + # # By default, the user has to go to our login page first. If you'd like to + # # allow IdP-initiated login, set 'allow_unsolicited: True' in an 'sp' + # # section: + # # + # #sp: + # # allow_unsolicited: True + # # # # The rest of sp_config is just used to generate our metadata xml, and you # # may well not need it, depending on your setup. Alternatively you # # may need a whole lot more detail - see the pysaml2 docs! @@ -110,6 +122,12 @@ class SAML2Config(Config): # # separate pysaml2 configuration file: # # # config_path: "%(config_dir_path)s/sp_conf.py" + # + # # the lifetime of a SAML session. This defines how long a user has to + # # complete the authentication process, if allow_unsolicited is unset. + # # The default is 5 minutes. + # # + # # saml_session_lifetime: 5m """ % { "config_dir_path": config_dir_path } diff --git a/synapse/handlers/saml2_handler.py b/synapse/handlers/saml2_handler.py index 880e6a625..b06d3f172 100644 --- a/synapse/handlers/saml2_handler.py +++ b/synapse/handlers/saml2_handler.py @@ -14,6 +14,7 @@ # limitations under the License. import logging +import attr import saml2 from saml2.client import Saml2Client @@ -29,6 +30,12 @@ class Saml2Handler: self._saml_client = Saml2Client(hs.config.saml2_sp_config) self._sso_auth_handler = SSOAuthHandler(hs) + # a map from saml session id to Saml2SessionData object + self._outstanding_requests_dict = {} + + self._clock = hs.get_clock() + self._saml2_session_lifetime = hs.config.saml2_session_lifetime + def handle_redirect_request(self, client_redirect_url): """Handle an incoming request to /login/sso/redirect @@ -43,6 +50,9 @@ class Saml2Handler: relay_state=client_redirect_url ) + now = self._clock.time_msec() + self._outstanding_requests_dict[reqid] = Saml2SessionData(creation_time=now) + for key, value in info["headers"]: if key == "Location": return value @@ -63,9 +73,15 @@ class Saml2Handler: resp_bytes = parse_string(request, "SAMLResponse", required=True) relay_state = parse_string(request, "RelayState", required=True) + # expire outstanding sessions before parse_authn_request_response checks + # the dict. + self.expire_sessions() + try: saml2_auth = self._saml_client.parse_authn_request_response( - resp_bytes, saml2.BINDING_HTTP_POST + resp_bytes, + saml2.BINDING_HTTP_POST, + outstanding=self._outstanding_requests_dict, ) except Exception as e: logger.warning("Exception parsing SAML2 response", exc_info=1) @@ -77,10 +93,29 @@ class Saml2Handler: if "uid" not in saml2_auth.ava: raise CodeMessageException(400, "uid not in SAML2 response") - username = saml2_auth.ava["uid"][0] + self._outstanding_requests_dict.pop(saml2_auth.in_response_to, None) + username = saml2_auth.ava["uid"][0] displayName = saml2_auth.ava.get("displayName", [None])[0] return self._sso_auth_handler.on_successful_auth( username, request, relay_state, user_display_name=displayName ) + + def expire_sessions(self): + expire_before = self._clock.time_msec() - self._saml2_session_lifetime + to_expire = set() + for reqid, data in self._outstanding_requests_dict.items(): + if data.creation_time < expire_before: + to_expire.add(reqid) + for reqid in to_expire: + logger.debug("Expiring session id %s", reqid) + del self._outstanding_requests_dict[reqid] + + +@attr.s +class Saml2SessionData: + """Data we track about SAML2 sessions""" + + # time the session was created, in milliseconds + creation_time = attr.ib() From a0acfcc73eba167fa0cdbcb76d868340ce147aa4 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 26 Jun 2019 23:56:28 +0100 Subject: [PATCH 18/61] update sample config --- docs/sample_config.yaml | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index da10788e9..8d52d1779 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -997,6 +997,13 @@ signing_key_path: "CONFDIR/SERVERNAME.signing.key" # remote: # - url: https://our_idp/metadata.xml # +# # By default, the user has to go to our login page first. If you'd like to +# # allow IdP-initiated login, set 'allow_unsolicited: True' in an 'sp' +# # section: +# # +# #sp: +# # allow_unsolicited: True +# # # # The rest of sp_config is just used to generate our metadata xml, and you # # may well not need it, depending on your setup. Alternatively you # # may need a whole lot more detail - see the pysaml2 docs! @@ -1020,6 +1027,12 @@ signing_key_path: "CONFDIR/SERVERNAME.signing.key" # # separate pysaml2 configuration file: # # # config_path: "CONFDIR/sp_conf.py" +# +# # the lifetime of a SAML session. This defines how long a user has to +# # complete the authentication process, if allow_unsolicited is unset. +# # The default is 5 minutes. +# # +# # saml_session_lifetime: 5m From 28db0ae5377ca8e7133957008ba35b49432636ee Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 27 Jun 2019 00:37:41 +0100 Subject: [PATCH 19/61] cleanups --- synapse/config/saml2_config.py | 19 +++++++++++++------ .../{saml2_handler.py => saml_handler.py} | 2 +- synapse/server.py | 8 ++++---- 3 files changed, 18 insertions(+), 11 deletions(-) rename synapse/handlers/{saml2_handler.py => saml_handler.py} (99%) diff --git a/synapse/config/saml2_config.py b/synapse/config/saml2_config.py index 965a97837..6a8161547 100644 --- a/synapse/config/saml2_config.py +++ b/synapse/config/saml2_config.py @@ -83,6 +83,12 @@ class SAML2Config(Config): # so it is not normally necessary to specify them unless you need to # override them. # + # Once SAML support is enabled, a metadata file will be exposed at + # https://:/_matrix/saml2/metadata.xml, which you may be able to + # use to configure your SAML IdP with. Alternatively, you can manually configure + # the IdP to use an ACS location of + # https://:/_matrix/saml2/authn_response. + # #saml2_config: # sp_config: # # point this to the IdP's metadata. You can use either a local file or @@ -93,13 +99,14 @@ class SAML2Config(Config): # - url: https://our_idp/metadata.xml # # # By default, the user has to go to our login page first. If you'd like to - # # allow IdP-initiated login, set 'allow_unsolicited: True' in an 'sp' - # # section: - # # - # #sp: - # # allow_unsolicited: True + # # allow IdP-initiated login, set 'allow_unsolicited: True' in a + # # 'service.sp' section: # # - # # The rest of sp_config is just used to generate our metadata xml, and you + # #service: + # # sp: + # # allow_unsolicited: True + # + # # The examples below are just used to generate our metadata xml, and you # # may well not need it, depending on your setup. Alternatively you # # may need a whole lot more detail - see the pysaml2 docs! # diff --git a/synapse/handlers/saml2_handler.py b/synapse/handlers/saml_handler.py similarity index 99% rename from synapse/handlers/saml2_handler.py rename to synapse/handlers/saml_handler.py index b06d3f172..03a0ac438 100644 --- a/synapse/handlers/saml2_handler.py +++ b/synapse/handlers/saml_handler.py @@ -25,7 +25,7 @@ from synapse.rest.client.v1.login import SSOAuthHandler logger = logging.getLogger(__name__) -class Saml2Handler: +class SamlHandler: def __init__(self, hs): self._saml_client = Saml2Client(hs.config.saml2_sp_config) self._sso_auth_handler = SSOAuthHandler(hs) diff --git a/synapse/server.py b/synapse/server.py index 1bc8c08b5..9e28dba2b 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -194,7 +194,7 @@ class HomeServer(object): "sendmail", "registration_handler", "account_validity_handler", - "saml2_handler", + "saml_handler", "event_client_serializer", ] @@ -525,10 +525,10 @@ class HomeServer(object): def build_account_validity_handler(self): return AccountValidityHandler(self) - def build_saml2_handler(self): - from synapse.handlers.saml2_handler import Saml2Handler + def build_saml_handler(self): + from synapse.handlers.saml_handler import SamlHandler - return Saml2Handler(self) + return SamlHandler(self) def build_event_client_serializer(self): return EventClientSerializer(self) From dde41183417fedf28f47af1fed310790de11c661 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 27 Jun 2019 00:41:04 +0100 Subject: [PATCH 20/61] update sample config --- docs/sample_config.yaml | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index 8d52d1779..5558b3e2b 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -988,6 +988,12 @@ signing_key_path: "CONFDIR/SERVERNAME.signing.key" # so it is not normally necessary to specify them unless you need to # override them. # +# Once SAML support is enabled, a metadata file will be exposed at +# https://:/_matrix/saml2/metadata.xml, which you may be able to +# use to configure your SAML IdP with. Alternatively, you can manually configure +# the IdP to use an ACS location of +# https://:/_matrix/saml2/authn_response. +# #saml2_config: # sp_config: # # point this to the IdP's metadata. You can use either a local file or @@ -998,13 +1004,14 @@ signing_key_path: "CONFDIR/SERVERNAME.signing.key" # - url: https://our_idp/metadata.xml # # # By default, the user has to go to our login page first. If you'd like to -# # allow IdP-initiated login, set 'allow_unsolicited: True' in an 'sp' -# # section: -# # -# #sp: -# # allow_unsolicited: True +# # allow IdP-initiated login, set 'allow_unsolicited: True' in a +# # 'service.sp' section: # # -# # The rest of sp_config is just used to generate our metadata xml, and you +# #service: +# # sp: +# # allow_unsolicited: True +# +# # The examples below are just used to generate our metadata xml, and you # # may well not need it, depending on your setup. Alternatively you # # may need a whole lot more detail - see the pysaml2 docs! # From 79b9d9076dc37c259fe1682878ef5ebd8a73f1d4 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 27 Jun 2019 00:46:57 +0100 Subject: [PATCH 21/61] rename BaseSSORedirectServlet for consistency --- synapse/rest/client/v1/login.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py index b59aa3d5c..18aa35a7f 100644 --- a/synapse/rest/client/v1/login.py +++ b/synapse/rest/client/v1/login.py @@ -358,7 +358,7 @@ class LoginRestServlet(RestServlet): defer.returnValue(result) -class BaseSsoRedirectServlet(RestServlet): +class BaseSSORedirectServlet(RestServlet): """Common base class for /login/sso/redirect impls""" PATTERNS = client_patterns("/login/(cas|sso)/redirect", v1=True) @@ -386,7 +386,7 @@ class BaseSsoRedirectServlet(RestServlet): raise NotImplementedError() -class CasRedirectServlet(BaseSsoRedirectServlet): +class CasRedirectServlet(BaseSSORedirectServlet): def __init__(self, hs): super(CasRedirectServlet, self).__init__() self.cas_server_url = hs.config.cas_server_url.encode("ascii") @@ -483,7 +483,7 @@ class CasTicketServlet(RestServlet): return user, attributes -class SAMLRedirectServlet(BaseSsoRedirectServlet): +class SAMLRedirectServlet(BaseSSORedirectServlet): PATTERNS = client_patterns("/login/sso/redirect", v1=True) def __init__(self, hs): From 729f5a4fb6654e6c9beb68a3edbb8dbbae076e3f Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 27 Jun 2019 16:06:23 +0100 Subject: [PATCH 22/61] Review comments --- synapse/handlers/sync.py | 8 ++++---- synapse/storage/devices.py | 4 +--- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 4f737d0a1..a3f550554 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -1089,9 +1089,9 @@ class SyncHandler(object): # for anymore. # # For the first step we check: - # 1. if any users we share a room with have updated their devices, + # a. if any users we share a room with have updated their devices, # and - # 2. we also check if we've joined any new rooms, or if a user has + # b. we also check if we've joined any new rooms, or if a user has # joined a room we're in. # # For the second step we just find any users we no longer share a @@ -1102,12 +1102,12 @@ class SyncHandler(object): user_id ) - # Step 1, check for changes in devices of users we share a room with + # Step 1a, check for changes in devices of users we share a room with users_that_have_changed = yield self.store.get_users_whose_devices_changed( since_token.device_list_key, users_who_share_room ) - # Step 2, check for newly joined rooms + # Step 1b, check for newly joined rooms for room_id in newly_joined_rooms: joined_users = yield self.state.get_current_users_in_room(room_id) newly_joined_or_invited_users.update(joined_users) diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py index 44324bf40..d2b113a4e 100644 --- a/synapse/storage/devices.py +++ b/synapse/storage/devices.py @@ -425,9 +425,7 @@ class DeviceWorkerStore(SQLBaseStore): """ for chunk in batch_iter(to_check, 100): - txn.execute( - sql % (",".join("?" for _ in chunk),), [from_key] + list(chunk) - ) + txn.execute(sql % (",".join("?" for _ in chunk),), (from_key,) + chunk) changes.update(user_id for user_id, in txn) return changes From c548dbc4b16a8bff6226fa4a6d7c86180c4f5704 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Thu, 27 Jun 2019 18:20:17 +0100 Subject: [PATCH 23/61] Make it clearer that the template dir is relative to synapse's root dir (#5543) Helps address #5444 --- changelog.d/5543.misc | 1 + docs/sample_config.yaml | 10 +++++++++- synapse/config/emailconfig.py | 10 +++++++++- 3 files changed, 19 insertions(+), 2 deletions(-) create mode 100644 changelog.d/5543.misc diff --git a/changelog.d/5543.misc b/changelog.d/5543.misc new file mode 100644 index 000000000..793620a73 --- /dev/null +++ b/changelog.d/5543.misc @@ -0,0 +1 @@ +Make the config clearer in that email.template_dir is relative to the Synapse's root directory, not the `synapse/` folder within it. diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index da10788e9..18be376e1 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -1070,11 +1070,13 @@ password_config: # app_name: Matrix # # # Enable email notifications by default +# # # notif_for_new_users: True # # # Defining a custom URL for Riot is only needed if email notifications # # should contain links to a self-hosted installation of Riot; when set # # the "app_name" setting is ignored +# # # riot_base_url: "http://localhost/riot" # # # Enable sending password reset emails via the configured, trusted @@ -1087,16 +1089,22 @@ password_config: # # # # If this option is set to false and SMTP options have not been # # configured, resetting user passwords via email will be disabled +# # # #trust_identity_server_for_password_resets: false # # # Configure the time that a validation email or text message code # # will expire after sending # # # # This is currently used for password resets +# # # #validation_token_lifetime: 1h # # # Template directory. All template files should be stored within this -# # directory +# # directory. If not set, default templates from within the Synapse +# # package will be used +# # +# # For the list of default templates, please see +# # https://github.com/matrix-org/synapse/tree/master/synapse/res/templates # # # #template_dir: res/templates # diff --git a/synapse/config/emailconfig.py b/synapse/config/emailconfig.py index cf39936da..fcd55d3e3 100644 --- a/synapse/config/emailconfig.py +++ b/synapse/config/emailconfig.py @@ -233,11 +233,13 @@ class EmailConfig(Config): # app_name: Matrix # # # Enable email notifications by default + # # # notif_for_new_users: True # # # Defining a custom URL for Riot is only needed if email notifications # # should contain links to a self-hosted installation of Riot; when set # # the "app_name" setting is ignored + # # # riot_base_url: "http://localhost/riot" # # # Enable sending password reset emails via the configured, trusted @@ -250,16 +252,22 @@ class EmailConfig(Config): # # # # If this option is set to false and SMTP options have not been # # configured, resetting user passwords via email will be disabled + # # # #trust_identity_server_for_password_resets: false # # # Configure the time that a validation email or text message code # # will expire after sending # # # # This is currently used for password resets + # # # #validation_token_lifetime: 1h # # # Template directory. All template files should be stored within this - # # directory + # # directory. If not set, default templates from within the Synapse + # # package will be used + # # + # # For the list of default templates, please see + # # https://github.com/matrix-org/synapse/tree/master/synapse/res/templates # # # #template_dir: res/templates # From 457b8e4c4d15f43f9f16b22fdfa79d5b2017e5cd Mon Sep 17 00:00:00 2001 From: Silke Hofstra Date: Thu, 27 Jun 2019 19:26:41 +0200 Subject: [PATCH 24/61] Include systemd-python in Debian package to allow logging to journal (#5261) Signed-off-by: Silke Hofstra --- debian/build_virtualenv | 2 +- debian/changelog | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/debian/build_virtualenv b/debian/build_virtualenv index bead3ebc6..279189605 100755 --- a/debian/build_virtualenv +++ b/debian/build_virtualenv @@ -43,7 +43,7 @@ dh_virtualenv \ --preinstall="mock" \ --extra-pip-arg="--no-cache-dir" \ --extra-pip-arg="--compile" \ - --extras="all" + --extras="all,systemd" PACKAGE_BUILD_DIR="debian/matrix-synapse-py3" VIRTUALENV_DIR="${PACKAGE_BUILD_DIR}${DH_VIRTUALENV_INSTALL_ROOT}/matrix-synapse" diff --git a/debian/changelog b/debian/changelog index ef4edd7ac..91653e724 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,10 @@ +matrix-synapse-py3 (1.0.0+nmu1) UNRELEASED; urgency=medium + + [ Silke Hofstra ] + * Include systemd-python to allow logging to the systemd journal. + + -- Silke Hofstra Wed, 29 May 2019 09:45:29 +0200 + matrix-synapse-py3 (1.0.0) stable; urgency=medium * New synapse release 1.0.0. From 9646a593ac555e7b68c6133c29a9f5bac83d1c2f Mon Sep 17 00:00:00 2001 From: Daniel Hoffend Date: Thu, 27 Jun 2019 19:37:29 +0200 Subject: [PATCH 25/61] Added possibilty to disable local password authentication (#5092) Signed-off-by: Daniel Hoffend --- changelog.d/5092.feature | 1 + docs/sample_config.yaml | 6 ++++++ synapse/config/password.py | 7 +++++++ synapse/handlers/auth.py | 2 +- synapse/handlers/set_password.py | 3 +++ 5 files changed, 18 insertions(+), 1 deletion(-) create mode 100644 changelog.d/5092.feature diff --git a/changelog.d/5092.feature b/changelog.d/5092.feature new file mode 100644 index 000000000..c22f586d0 --- /dev/null +++ b/changelog.d/5092.feature @@ -0,0 +1 @@ +Added possibilty to disable local password authentication. Contributed by Daniel Hoffend. diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index 18be376e1..a01e1152f 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -1046,6 +1046,12 @@ password_config: # #enabled: false + # Uncomment to disable authentication against the local password + # database. This is ignored if `enabled` is false, and is only useful + # if you have other password_providers. + # + #localdb_enabled: false + # Uncomment and change to a secret random string for extra security. # DO NOT CHANGE THIS AFTER INITIAL SETUP! # diff --git a/synapse/config/password.py b/synapse/config/password.py index 598f84fc0..d5b5953f2 100644 --- a/synapse/config/password.py +++ b/synapse/config/password.py @@ -26,6 +26,7 @@ class PasswordConfig(Config): password_config = {} self.password_enabled = password_config.get("enabled", True) + self.password_localdb_enabled = password_config.get("localdb_enabled", True) self.password_pepper = password_config.get("pepper", "") def generate_config_section(self, config_dir_path, server_name, **kwargs): @@ -35,6 +36,12 @@ class PasswordConfig(Config): # #enabled: false + # Uncomment to disable authentication against the local password + # database. This is ignored if `enabled` is false, and is only useful + # if you have other password_providers. + # + #localdb_enabled: false + # Uncomment and change to a secret random string for extra security. # DO NOT CHANGE THIS AFTER INITIAL SETUP! # diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 97b21c409..c8c1ed324 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -743,7 +743,7 @@ class AuthHandler(BaseHandler): result = (result, None) defer.returnValue(result) - if login_type == LoginType.PASSWORD: + if login_type == LoginType.PASSWORD and self.hs.config.password_localdb_enabled: known_login_type = True canonical_user_id = yield self._check_local_password( diff --git a/synapse/handlers/set_password.py b/synapse/handlers/set_password.py index 5a0995d4f..d90c9e010 100644 --- a/synapse/handlers/set_password.py +++ b/synapse/handlers/set_password.py @@ -33,6 +33,9 @@ class SetPasswordHandler(BaseHandler): @defer.inlineCallbacks def set_password(self, user_id, newpassword, requester=None): + if not self.hs.config.password_localdb_enabled: + raise SynapseError(403, "Password change disabled", errcode=Codes.FORBIDDEN) + password_hash = yield self._auth_handler.hash(newpassword) except_device_id = requester.device_id if requester else None From be3b901ccdf28d0f81d312d7cd8b7bedb22b4049 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Fri, 28 Jun 2019 18:19:09 +1000 Subject: [PATCH 26/61] Update the TLS cipher string and provide configurability for TLS on outgoing federation (#5550) --- changelog.d/5550.feature | 1 + changelog.d/5550.misc | 1 + docs/sample_config.yaml | 9 +++ scripts/generate_config | 2 +- synapse/config/tls.py | 32 ++++++++- synapse/crypto/context_factory.py | 39 ++++++++-- tests/config/test_tls.py | 115 +++++++++++++++++++++++++++++- 7 files changed, 190 insertions(+), 9 deletions(-) create mode 100644 changelog.d/5550.feature create mode 100644 changelog.d/5550.misc diff --git a/changelog.d/5550.feature b/changelog.d/5550.feature new file mode 100644 index 000000000..79ecedf3b --- /dev/null +++ b/changelog.d/5550.feature @@ -0,0 +1 @@ +The minimum TLS version used for outgoing federation requests can now be set with `federation_client_minimum_tls_version`. diff --git a/changelog.d/5550.misc b/changelog.d/5550.misc new file mode 100644 index 000000000..ad5693338 --- /dev/null +++ b/changelog.d/5550.misc @@ -0,0 +1 @@ +Synapse will now only allow TLS v1.2 connections when serving federation, if it terminates TLS. As Synapse's allowed ciphers were only able to be used in TLSv1.2 before, this does not change behaviour. diff --git a/docs/sample_config.yaml b/docs/sample_config.yaml index a01e1152f..bf9cd88b1 100644 --- a/docs/sample_config.yaml +++ b/docs/sample_config.yaml @@ -317,6 +317,15 @@ listeners: # #federation_verify_certificates: false +# The minimum TLS version that will be used for outbound federation requests. +# +# Defaults to `1`. Configurable to `1`, `1.1`, `1.2`, or `1.3`. Note +# that setting this value higher than `1.2` will prevent federation to most +# of the public Matrix network: only configure it to `1.3` if you have an +# entirely private federation setup and you can ensure TLS 1.3 support. +# +#federation_client_minimum_tls_version: 1.2 + # Skip federation certificate verification on the following whitelist # of domains. # diff --git a/scripts/generate_config b/scripts/generate_config index 93b640699..771cbf8d9 100755 --- a/scripts/generate_config +++ b/scripts/generate_config @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 import argparse import shutil diff --git a/synapse/config/tls.py b/synapse/config/tls.py index 8fcf80141..ca508a224 100644 --- a/synapse/config/tls.py +++ b/synapse/config/tls.py @@ -23,7 +23,7 @@ import six from unpaddedbase64 import encode_base64 -from OpenSSL import crypto +from OpenSSL import SSL, crypto from twisted.internet._sslverify import Certificate, trustRootFromCertificates from synapse.config._base import Config, ConfigError @@ -81,6 +81,27 @@ class TlsConfig(Config): "federation_verify_certificates", True ) + # Minimum TLS version to use for outbound federation traffic + self.federation_client_minimum_tls_version = str( + config.get("federation_client_minimum_tls_version", 1) + ) + + if self.federation_client_minimum_tls_version not in ["1", "1.1", "1.2", "1.3"]: + raise ConfigError( + "federation_client_minimum_tls_version must be one of: 1, 1.1, 1.2, 1.3" + ) + + # Prevent people shooting themselves in the foot here by setting it to + # the biggest number blindly + if self.federation_client_minimum_tls_version == "1.3": + if getattr(SSL, "OP_NO_TLSv1_3", None) is None: + raise ConfigError( + ( + "federation_client_minimum_tls_version cannot be 1.3, " + "your OpenSSL does not support it" + ) + ) + # Whitelist of domains to not verify certificates for fed_whitelist_entries = config.get( "federation_certificate_verification_whitelist", [] @@ -261,6 +282,15 @@ class TlsConfig(Config): # #federation_verify_certificates: false + # The minimum TLS version that will be used for outbound federation requests. + # + # Defaults to `1`. Configurable to `1`, `1.1`, `1.2`, or `1.3`. Note + # that setting this value higher than `1.2` will prevent federation to most + # of the public Matrix network: only configure it to `1.3` if you have an + # entirely private federation setup and you can ensure TLS 1.3 support. + # + #federation_client_minimum_tls_version: 1.2 + # Skip federation certificate verification on the following whitelist # of domains. # diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py index 2bc5cc380..4f48e8e88 100644 --- a/synapse/crypto/context_factory.py +++ b/synapse/crypto/context_factory.py @@ -24,12 +24,25 @@ from OpenSSL import SSL, crypto from twisted.internet._sslverify import _defaultCurveName from twisted.internet.abstract import isIPAddress, isIPv6Address from twisted.internet.interfaces import IOpenSSLClientConnectionCreator -from twisted.internet.ssl import CertificateOptions, ContextFactory, platformTrust +from twisted.internet.ssl import ( + CertificateOptions, + ContextFactory, + TLSVersion, + platformTrust, +) from twisted.python.failure import Failure logger = logging.getLogger(__name__) +_TLS_VERSION_MAP = { + "1": TLSVersion.TLSv1_0, + "1.1": TLSVersion.TLSv1_1, + "1.2": TLSVersion.TLSv1_2, + "1.3": TLSVersion.TLSv1_3, +} + + class ServerContextFactory(ContextFactory): """Factory for PyOpenSSL SSL contexts that are used to handle incoming connections.""" @@ -43,16 +56,18 @@ class ServerContextFactory(ContextFactory): try: _ecCurve = crypto.get_elliptic_curve(_defaultCurveName) context.set_tmp_ecdh(_ecCurve) - except Exception: logger.exception("Failed to enable elliptic curve for TLS") - context.set_options(SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3) + + context.set_options( + SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3 | SSL.OP_NO_TLSv1 | SSL.OP_NO_TLSv1_1 + ) context.use_certificate_chain_file(config.tls_certificate_file) context.use_privatekey(config.tls_private_key) # https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/ context.set_cipher_list( - "ECDH+AESGCM:ECDH+CHACHA20:ECDH+AES256:ECDH+AES128:!aNULL:!SHA1" + "ECDH+AESGCM:ECDH+CHACHA20:ECDH+AES256:ECDH+AES128:!aNULL:!SHA1:!AESCCM" ) def getContext(self): @@ -79,10 +94,22 @@ class ClientTLSOptionsFactory(object): # Use CA root certs provided by OpenSSL trust_root = platformTrust() - self._verify_ssl_context = CertificateOptions(trustRoot=trust_root).getContext() + # "insecurelyLowerMinimumTo" is the argument that will go lower than + # Twisted's default, which is why it is marked as "insecure" (since + # Twisted's defaults are reasonably secure). But, since Twisted is + # moving to TLS 1.2 by default, we want to respect the config option if + # it is set to 1.0 (which the alternate option, raiseMinimumTo, will not + # let us do). + minTLS = _TLS_VERSION_MAP[config.federation_client_minimum_tls_version] + + self._verify_ssl = CertificateOptions( + trustRoot=trust_root, insecurelyLowerMinimumTo=minTLS + ) + self._verify_ssl_context = self._verify_ssl.getContext() self._verify_ssl_context.set_info_callback(self._context_info_cb) - self._no_verify_ssl_context = CertificateOptions().getContext() + self._no_verify_ssl = CertificateOptions(insecurelyLowerMinimumTo=minTLS) + self._no_verify_ssl_context = self._no_verify_ssl.getContext() self._no_verify_ssl_context.set_info_callback(self._context_info_cb) def get_options(self, host): diff --git a/tests/config/test_tls.py b/tests/config/test_tls.py index a5d88d644..4f8a87a3d 100644 --- a/tests/config/test_tls.py +++ b/tests/config/test_tls.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- # Copyright 2019 New Vector Ltd +# Copyright 2019 Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,7 +16,10 @@ import os -from synapse.config.tls import TlsConfig +from OpenSSL import SSL + +from synapse.config.tls import ConfigError, TlsConfig +from synapse.crypto.context_factory import ClientTLSOptionsFactory from tests.unittest import TestCase @@ -78,3 +82,112 @@ s4niecZKPBizL6aucT59CsunNmmb5Glq8rlAcU+1ZTZZzGYqVYhF6axB9Qg= "or use Synapse's ACME support to provision one." ), ) + + def test_tls_client_minimum_default(self): + """ + The default client TLS version is 1.0. + """ + config = {} + t = TestConfig() + t.read_config(config, config_dir_path="", data_dir_path="") + + self.assertEqual(t.federation_client_minimum_tls_version, "1") + + def test_tls_client_minimum_set(self): + """ + The default client TLS version can be set to 1.0, 1.1, and 1.2. + """ + config = {"federation_client_minimum_tls_version": 1} + t = TestConfig() + t.read_config(config, config_dir_path="", data_dir_path="") + self.assertEqual(t.federation_client_minimum_tls_version, "1") + + config = {"federation_client_minimum_tls_version": 1.1} + t = TestConfig() + t.read_config(config, config_dir_path="", data_dir_path="") + self.assertEqual(t.federation_client_minimum_tls_version, "1.1") + + config = {"federation_client_minimum_tls_version": 1.2} + t = TestConfig() + t.read_config(config, config_dir_path="", data_dir_path="") + self.assertEqual(t.federation_client_minimum_tls_version, "1.2") + + # Also test a string version + config = {"federation_client_minimum_tls_version": "1"} + t = TestConfig() + t.read_config(config, config_dir_path="", data_dir_path="") + self.assertEqual(t.federation_client_minimum_tls_version, "1") + + config = {"federation_client_minimum_tls_version": "1.2"} + t = TestConfig() + t.read_config(config, config_dir_path="", data_dir_path="") + self.assertEqual(t.federation_client_minimum_tls_version, "1.2") + + def test_tls_client_minimum_1_point_3_missing(self): + """ + If TLS 1.3 support is missing and it's configured, it will raise a + ConfigError. + """ + # thanks i hate it + if hasattr(SSL, "OP_NO_TLSv1_3"): + OP_NO_TLSv1_3 = SSL.OP_NO_TLSv1_3 + delattr(SSL, "OP_NO_TLSv1_3") + self.addCleanup(setattr, SSL, "SSL.OP_NO_TLSv1_3", OP_NO_TLSv1_3) + assert not hasattr(SSL, "OP_NO_TLSv1_3") + + config = {"federation_client_minimum_tls_version": 1.3} + t = TestConfig() + with self.assertRaises(ConfigError) as e: + t.read_config(config, config_dir_path="", data_dir_path="") + self.assertEqual( + e.exception.args[0], + ( + "federation_client_minimum_tls_version cannot be 1.3, " + "your OpenSSL does not support it" + ), + ) + + def test_tls_client_minimum_1_point_3_exists(self): + """ + If TLS 1.3 support exists and it's configured, it will be settable. + """ + # thanks i hate it, still + if not hasattr(SSL, "OP_NO_TLSv1_3"): + SSL.OP_NO_TLSv1_3 = 0x00 + self.addCleanup(lambda: delattr(SSL, "OP_NO_TLSv1_3")) + assert hasattr(SSL, "OP_NO_TLSv1_3") + + config = {"federation_client_minimum_tls_version": 1.3} + t = TestConfig() + t.read_config(config, config_dir_path="", data_dir_path="") + self.assertEqual(t.federation_client_minimum_tls_version, "1.3") + + def test_tls_client_minimum_set_passed_through_1_2(self): + """ + The configured TLS version is correctly configured by the ContextFactory. + """ + config = {"federation_client_minimum_tls_version": 1.2} + t = TestConfig() + t.read_config(config, config_dir_path="", data_dir_path="") + + cf = ClientTLSOptionsFactory(t) + + # The context has had NO_TLSv1_1 and NO_TLSv1_0 set, but not NO_TLSv1_2 + self.assertNotEqual(cf._verify_ssl._options & SSL.OP_NO_TLSv1, 0) + self.assertNotEqual(cf._verify_ssl._options & SSL.OP_NO_TLSv1_1, 0) + self.assertEqual(cf._verify_ssl._options & SSL.OP_NO_TLSv1_2, 0) + + def test_tls_client_minimum_set_passed_through_1_0(self): + """ + The configured TLS version is correctly configured by the ContextFactory. + """ + config = {"federation_client_minimum_tls_version": 1} + t = TestConfig() + t.read_config(config, config_dir_path="", data_dir_path="") + + cf = ClientTLSOptionsFactory(t) + + # The context has not had any of the NO_TLS set. + self.assertEqual(cf._verify_ssl._options & SSL.OP_NO_TLSv1, 0) + self.assertEqual(cf._verify_ssl._options & SSL.OP_NO_TLSv1_1, 0) + self.assertEqual(cf._verify_ssl._options & SSL.OP_NO_TLSv1_2, 0) From 071150ce19305e7f7a54f9794a1c7f2a05bc0610 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Fri, 28 Jun 2019 21:45:33 +1000 Subject: [PATCH 27/61] Don't log GC 0s at INFO (#5557) --- changelog.d/5557.misc | 1 + synapse/metrics/__init__.py | 5 ++++- 2 files changed, 5 insertions(+), 1 deletion(-) create mode 100644 changelog.d/5557.misc diff --git a/changelog.d/5557.misc b/changelog.d/5557.misc new file mode 100644 index 000000000..0c90f4987 --- /dev/null +++ b/changelog.d/5557.misc @@ -0,0 +1 @@ +Logging when running GC collection on generation 0 is now at the DEBUG level, not INFO. diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index 1f30179b5..eaf0aaa86 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -437,7 +437,10 @@ def runUntilCurrentTimer(func): counts = gc.get_count() for i in (2, 1, 0): if threshold[i] < counts[i]: - logger.info("Collecting gc %d", i) + if i == 0: + logger.debug("Collecting gc %d", i) + else: + logger.info("Collecting gc %d", i) start = time.time() unreachable = gc.collect(i) From 01d0f8e701b4c2ddd04eee1a26edef952c0ac558 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 28 Jun 2019 15:17:15 +0100 Subject: [PATCH 28/61] Don't update the ratelimiter before sending a 3PID invite This would cause emails being sent, but Synapse responding with a 429 when creating the event. The client would then retry, and with bad timing the same scenario would happen again. Some testing I did ended up sending me 10 emails for one single invite because of this. --- synapse/handlers/room_member.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index 4d6e88380..c860acf97 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -676,7 +676,7 @@ class RoomMemberHandler(object): # We need to rate limit *before* we send out any 3PID invites, so we # can't just rely on the standard ratelimiting of events. - yield self.base_handler.ratelimit(requester) + yield self.base_handler.ratelimit(requester, update=False) can_invite = yield self.third_party_event_rules.check_threepid_can_be_invited( medium, address, room_id From b339f6489f64b2e970b120245b9f7b87f1c17aa9 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 28 Jun 2019 15:24:59 +0100 Subject: [PATCH 29/61] Changelog --- changelog.d/5576.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/5576.bugfix diff --git a/changelog.d/5576.bugfix b/changelog.d/5576.bugfix new file mode 100644 index 000000000..c1ba5581f --- /dev/null +++ b/changelog.d/5576.bugfix @@ -0,0 +1 @@ +Fix a bug that would cause invited users to receive several emails for a single 3PID invite in case the inviter is rate limited. From 15d9fc31bd549e2b9c04f96a0d3e8938c1bdc6a5 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Fri, 28 Jun 2019 16:04:05 +0100 Subject: [PATCH 30/61] Only ratelimit when sending the email If we do the opposite, an event can arrive after or while sending the email and the 3PID invite event will get ratelimited. --- synapse/handlers/room_member.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index c860acf97..66b05b473 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -676,7 +676,7 @@ class RoomMemberHandler(object): # We need to rate limit *before* we send out any 3PID invites, so we # can't just rely on the standard ratelimiting of events. - yield self.base_handler.ratelimit(requester, update=False) + yield self.base_handler.ratelimit(requester) can_invite = yield self.third_party_event_rules.check_threepid_can_be_invited( medium, address, room_id @@ -823,6 +823,7 @@ class RoomMemberHandler(object): "sender": user.to_string(), "state_key": token, }, + ratelimit=False, txn_id=txn_id, ) From f40a7dc41fdd738a74546ff22b110a4c8ab850fe Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Sat, 29 Jun 2019 17:06:55 +1000 Subject: [PATCH 31/61] Make the http server handle coroutine-making REST servlets (#5475) --- changelog.d/5475.misc | 1 + synapse/http/server.py | 77 ++++++++++--------- synapse/rest/consent/consent_resource.py | 35 +++------ synapse/rest/key/v2/remote_key_resource.py | 28 +++---- synapse/rest/media/v1/config_resource.py | 21 +++-- synapse/rest/media/v1/download_resource.py | 26 +++---- synapse/rest/media/v1/preview_url_resource.py | 18 ++--- synapse/rest/media/v1/thumbnail_resource.py | 27 +++---- synapse/rest/media/v1/upload_resource.py | 23 +++--- synapse/rest/saml2/response_resource.py | 15 +--- tests/rest/media/v1/test_media_storage.py | 25 +++--- tests/unittest.py | 40 ++++++++-- 12 files changed, 162 insertions(+), 174 deletions(-) create mode 100644 changelog.d/5475.misc diff --git a/changelog.d/5475.misc b/changelog.d/5475.misc new file mode 100644 index 000000000..6be06d4d0 --- /dev/null +++ b/changelog.d/5475.misc @@ -0,0 +1 @@ +Synapse can now handle RestServlets that return coroutines. diff --git a/synapse/http/server.py b/synapse/http/server.py index 6fd13e87d..f067c163c 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -16,10 +16,11 @@ import cgi import collections +import http.client import logging - -from six import PY3 -from six.moves import http_client, urllib +import types +import urllib +from io import BytesIO from canonicaljson import encode_canonical_json, encode_pretty_printed_json, json @@ -41,11 +42,6 @@ from synapse.api.errors import ( from synapse.util.caches import intern_dict from synapse.util.logcontext import preserve_fn -if PY3: - from io import BytesIO -else: - from cStringIO import StringIO as BytesIO - logger = logging.getLogger(__name__) HTML_ERROR_TEMPLATE = """ @@ -75,10 +71,9 @@ def wrap_json_request_handler(h): deferred fails with any other type of error we send a 500 reponse. """ - @defer.inlineCallbacks - def wrapped_request_handler(self, request): + async def wrapped_request_handler(self, request): try: - yield h(self, request) + await h(self, request) except SynapseError as e: code = e.code logger.info("%s SynapseError: %s - %s", request, code, e.msg) @@ -142,10 +137,12 @@ def wrap_html_request_handler(h): where "request" must be a SynapseRequest. """ - def wrapped_request_handler(self, request): - d = defer.maybeDeferred(h, self, request) - d.addErrback(_return_html_error, request) - return d + async def wrapped_request_handler(self, request): + try: + return await h(self, request) + except Exception: + f = failure.Failure() + return _return_html_error(f, request) return wrap_async_request_handler(wrapped_request_handler) @@ -171,7 +168,7 @@ def _return_html_error(f, request): exc_info=(f.type, f.value, f.getTracebackObject()), ) else: - code = http_client.INTERNAL_SERVER_ERROR + code = http.client.INTERNAL_SERVER_ERROR msg = "Internal server error" logger.error( @@ -201,10 +198,9 @@ def wrap_async_request_handler(h): logged until the deferred completes. """ - @defer.inlineCallbacks - def wrapped_async_request_handler(self, request): + async def wrapped_async_request_handler(self, request): with request.processing(): - yield h(self, request) + await h(self, request) # we need to preserve_fn here, because the synchronous render method won't yield for # us (obviously) @@ -270,12 +266,11 @@ class JsonResource(HttpServer, resource.Resource): def render(self, request): """ This gets called by twisted every time someone sends us a request. """ - self._async_render(request) + defer.ensureDeferred(self._async_render(request)) return NOT_DONE_YET @wrap_json_request_handler - @defer.inlineCallbacks - def _async_render(self, request): + async def _async_render(self, request): """ This gets called from render() every time someone sends us a request. This checks if anyone has registered a callback for that method and path. @@ -292,26 +287,19 @@ class JsonResource(HttpServer, resource.Resource): # Now trigger the callback. If it returns a response, we send it # here. If it throws an exception, that is handled by the wrapper # installed by @request_handler. - - def _unquote(s): - if PY3: - # On Python 3, unquote is unicode -> unicode - return urllib.parse.unquote(s) - else: - # On Python 2, unquote is bytes -> bytes We need to encode the - # URL again (as it was decoded by _get_handler_for request), as - # ASCII because it's a URL, and then decode it to get the UTF-8 - # characters that were quoted. - return urllib.parse.unquote(s.encode("ascii")).decode("utf8") - kwargs = intern_dict( { - name: _unquote(value) if value else value + name: urllib.parse.unquote(value) if value else value for name, value in group_dict.items() } ) - callback_return = yield callback(request, **kwargs) + callback_return = callback(request, **kwargs) + + # Is it synchronous? We'll allow this for now. + if isinstance(callback_return, (defer.Deferred, types.CoroutineType)): + callback_return = await callback_return + if callback_return is not None: code, response = callback_return self._send_response(request, code, response) @@ -360,6 +348,23 @@ class JsonResource(HttpServer, resource.Resource): ) +class DirectServeResource(resource.Resource): + def render(self, request): + """ + Render the request, using an asynchronous render handler if it exists. + """ + render_callback_name = "_async_render_" + request.method.decode("ascii") + + if hasattr(self, render_callback_name): + # Call the handler + callback = getattr(self, render_callback_name) + defer.ensureDeferred(callback(request)) + + return NOT_DONE_YET + else: + super().render(request) + + def _options_handler(request): """Request handler for OPTIONS requests diff --git a/synapse/rest/consent/consent_resource.py b/synapse/rest/consent/consent_resource.py index 9a32892d8..624c42441 100644 --- a/synapse/rest/consent/consent_resource.py +++ b/synapse/rest/consent/consent_resource.py @@ -23,13 +23,13 @@ from six.moves import http_client import jinja2 from jinja2 import TemplateNotFound -from twisted.internet import defer -from twisted.web.resource import Resource -from twisted.web.server import NOT_DONE_YET - from synapse.api.errors import NotFoundError, StoreError, SynapseError from synapse.config import ConfigError -from synapse.http.server import finish_request, wrap_html_request_handler +from synapse.http.server import ( + DirectServeResource, + finish_request, + wrap_html_request_handler, +) from synapse.http.servlet import parse_string from synapse.types import UserID @@ -47,7 +47,7 @@ else: return a == b -class ConsentResource(Resource): +class ConsentResource(DirectServeResource): """A twisted Resource to display a privacy policy and gather consent to it When accessed via GET, returns the privacy policy via a template. @@ -87,7 +87,7 @@ class ConsentResource(Resource): Args: hs (synapse.server.HomeServer): homeserver """ - Resource.__init__(self) + super().__init__() self.hs = hs self.store = hs.get_datastore() @@ -118,18 +118,12 @@ class ConsentResource(Resource): self._hmac_secret = hs.config.form_secret.encode("utf-8") - def render_GET(self, request): - self._async_render_GET(request) - return NOT_DONE_YET - @wrap_html_request_handler - @defer.inlineCallbacks - def _async_render_GET(self, request): + async def _async_render_GET(self, request): """ Args: request (twisted.web.http.Request): """ - version = parse_string(request, "v", default=self._default_consent_version) username = parse_string(request, "u", required=False, default="") userhmac = None @@ -145,7 +139,7 @@ class ConsentResource(Resource): else: qualified_user_id = UserID(username, self.hs.hostname).to_string() - u = yield self.store.get_user_by_id(qualified_user_id) + u = await self.store.get_user_by_id(qualified_user_id) if u is None: raise NotFoundError("Unknown user") @@ -165,13 +159,8 @@ class ConsentResource(Resource): except TemplateNotFound: raise NotFoundError("Unknown policy version") - def render_POST(self, request): - self._async_render_POST(request) - return NOT_DONE_YET - @wrap_html_request_handler - @defer.inlineCallbacks - def _async_render_POST(self, request): + async def _async_render_POST(self, request): """ Args: request (twisted.web.http.Request): @@ -188,12 +177,12 @@ class ConsentResource(Resource): qualified_user_id = UserID(username, self.hs.hostname).to_string() try: - yield self.store.user_set_consent_version(qualified_user_id, version) + await self.store.user_set_consent_version(qualified_user_id, version) except StoreError as e: if e.code != 404: raise raise NotFoundError("Unknown user") - yield self.registration_handler.post_consent_actions(qualified_user_id) + await self.registration_handler.post_consent_actions(qualified_user_id) try: self._render_template(request, "success.html") diff --git a/synapse/rest/key/v2/remote_key_resource.py b/synapse/rest/key/v2/remote_key_resource.py index ec8b9d726..031a31669 100644 --- a/synapse/rest/key/v2/remote_key_resource.py +++ b/synapse/rest/key/v2/remote_key_resource.py @@ -16,18 +16,20 @@ import logging from io import BytesIO from twisted.internet import defer -from twisted.web.resource import Resource -from twisted.web.server import NOT_DONE_YET from synapse.api.errors import Codes, SynapseError from synapse.crypto.keyring import ServerKeyFetcher -from synapse.http.server import respond_with_json_bytes, wrap_json_request_handler +from synapse.http.server import ( + DirectServeResource, + respond_with_json_bytes, + wrap_json_request_handler, +) from synapse.http.servlet import parse_integer, parse_json_object_from_request logger = logging.getLogger(__name__) -class RemoteKey(Resource): +class RemoteKey(DirectServeResource): """HTTP resource for retreiving the TLS certificate and NACL signature verification keys for a collection of servers. Checks that the reported X.509 TLS certificate matches the one used in the HTTPS connection. Checks @@ -94,13 +96,8 @@ class RemoteKey(Resource): self.clock = hs.get_clock() self.federation_domain_whitelist = hs.config.federation_domain_whitelist - def render_GET(self, request): - self.async_render_GET(request) - return NOT_DONE_YET - @wrap_json_request_handler - @defer.inlineCallbacks - def async_render_GET(self, request): + async def _async_render_GET(self, request): if len(request.postpath) == 1: server, = request.postpath query = {server.decode("ascii"): {}} @@ -114,20 +111,15 @@ class RemoteKey(Resource): else: raise SynapseError(404, "Not found %r" % request.postpath, Codes.NOT_FOUND) - yield self.query_keys(request, query, query_remote_on_cache_miss=True) - - def render_POST(self, request): - self.async_render_POST(request) - return NOT_DONE_YET + await self.query_keys(request, query, query_remote_on_cache_miss=True) @wrap_json_request_handler - @defer.inlineCallbacks - def async_render_POST(self, request): + async def _async_render_POST(self, request): content = parse_json_object_from_request(request) query = content["server_keys"] - yield self.query_keys(request, query, query_remote_on_cache_miss=True) + await self.query_keys(request, query, query_remote_on_cache_miss=True) @defer.inlineCallbacks def query_keys(self, request, query, query_remote_on_cache_miss=False): diff --git a/synapse/rest/media/v1/config_resource.py b/synapse/rest/media/v1/config_resource.py index fa3d6680f..9f747de26 100644 --- a/synapse/rest/media/v1/config_resource.py +++ b/synapse/rest/media/v1/config_resource.py @@ -14,31 +14,28 @@ # limitations under the License. # -from twisted.internet import defer -from twisted.web.resource import Resource from twisted.web.server import NOT_DONE_YET -from synapse.http.server import respond_with_json, wrap_json_request_handler +from synapse.http.server import ( + DirectServeResource, + respond_with_json, + wrap_json_request_handler, +) -class MediaConfigResource(Resource): +class MediaConfigResource(DirectServeResource): isLeaf = True def __init__(self, hs): - Resource.__init__(self) + super().__init__() config = hs.get_config() self.clock = hs.get_clock() self.auth = hs.get_auth() self.limits_dict = {"m.upload.size": config.max_upload_size} - def render_GET(self, request): - self._async_render_GET(request) - return NOT_DONE_YET - @wrap_json_request_handler - @defer.inlineCallbacks - def _async_render_GET(self, request): - yield self.auth.get_user_by_req(request) + async def _async_render_GET(self, request): + await self.auth.get_user_by_req(request) respond_with_json(request, 200, self.limits_dict, send_cors=True) def render_OPTIONS(self, request): diff --git a/synapse/rest/media/v1/download_resource.py b/synapse/rest/media/v1/download_resource.py index a21a35f84..66a01559e 100644 --- a/synapse/rest/media/v1/download_resource.py +++ b/synapse/rest/media/v1/download_resource.py @@ -14,37 +14,31 @@ # limitations under the License. import logging -from twisted.internet import defer -from twisted.web.resource import Resource -from twisted.web.server import NOT_DONE_YET - import synapse.http.servlet -from synapse.http.server import set_cors_headers, wrap_json_request_handler +from synapse.http.server import ( + DirectServeResource, + set_cors_headers, + wrap_json_request_handler, +) from ._base import parse_media_id, respond_404 logger = logging.getLogger(__name__) -class DownloadResource(Resource): +class DownloadResource(DirectServeResource): isLeaf = True def __init__(self, hs, media_repo): - Resource.__init__(self) - + super().__init__() self.media_repo = media_repo self.server_name = hs.hostname # this is expected by @wrap_json_request_handler self.clock = hs.get_clock() - def render_GET(self, request): - self._async_render_GET(request) - return NOT_DONE_YET - @wrap_json_request_handler - @defer.inlineCallbacks - def _async_render_GET(self, request): + async def _async_render_GET(self, request): set_cors_headers(request) request.setHeader( b"Content-Security-Policy", @@ -58,7 +52,7 @@ class DownloadResource(Resource): ) server_name, media_id, name = parse_media_id(request) if server_name == self.server_name: - yield self.media_repo.get_local_media(request, media_id, name) + await self.media_repo.get_local_media(request, media_id, name) else: allow_remote = synapse.http.servlet.parse_boolean( request, "allow_remote", default=True @@ -72,4 +66,4 @@ class DownloadResource(Resource): respond_404(request) return - yield self.media_repo.get_remote_media(request, server_name, media_id, name) + await self.media_repo.get_remote_media(request, server_name, media_id, name) diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py index de6f292ff..0337b64dc 100644 --- a/synapse/rest/media/v1/preview_url_resource.py +++ b/synapse/rest/media/v1/preview_url_resource.py @@ -32,12 +32,11 @@ from canonicaljson import json from twisted.internet import defer from twisted.internet.error import DNSLookupError -from twisted.web.resource import Resource -from twisted.web.server import NOT_DONE_YET from synapse.api.errors import Codes, SynapseError from synapse.http.client import SimpleHttpClient from synapse.http.server import ( + DirectServeResource, respond_with_json, respond_with_json_bytes, wrap_json_request_handler, @@ -58,11 +57,11 @@ _charset_match = re.compile(br"<\s*meta[^>]*charset\s*=\s*([a-z0-9-]+)", flags=r _content_type_match = re.compile(r'.*; *charset="?(.*?)"?(;|$)', flags=re.I) -class PreviewUrlResource(Resource): +class PreviewUrlResource(DirectServeResource): isLeaf = True def __init__(self, hs, media_repo, media_storage): - Resource.__init__(self) + super().__init__() self.auth = hs.get_auth() self.clock = hs.get_clock() @@ -98,16 +97,11 @@ class PreviewUrlResource(Resource): def render_OPTIONS(self, request): return respond_with_json(request, 200, {}, send_cors=True) - def render_GET(self, request): - self._async_render_GET(request) - return NOT_DONE_YET - @wrap_json_request_handler - @defer.inlineCallbacks - def _async_render_GET(self, request): + async def _async_render_GET(self, request): # XXX: if get_user_by_req fails, what should we do in an async render? - requester = yield self.auth.get_user_by_req(request) + requester = await self.auth.get_user_by_req(request) url = parse_string(request, "url") if b"ts" in request.args: ts = parse_integer(request, "ts") @@ -159,7 +153,7 @@ class PreviewUrlResource(Resource): else: logger.info("Returning cached response") - og = yield make_deferred_yieldable(observable.observe()) + og = await make_deferred_yieldable(defer.maybeDeferred(observable.observe)) respond_with_json_bytes(request, 200, og, send_cors=True) @defer.inlineCallbacks diff --git a/synapse/rest/media/v1/thumbnail_resource.py b/synapse/rest/media/v1/thumbnail_resource.py index ca84c9f13..08329884a 100644 --- a/synapse/rest/media/v1/thumbnail_resource.py +++ b/synapse/rest/media/v1/thumbnail_resource.py @@ -17,10 +17,12 @@ import logging from twisted.internet import defer -from twisted.web.resource import Resource -from twisted.web.server import NOT_DONE_YET -from synapse.http.server import set_cors_headers, wrap_json_request_handler +from synapse.http.server import ( + DirectServeResource, + set_cors_headers, + wrap_json_request_handler, +) from synapse.http.servlet import parse_integer, parse_string from ._base import ( @@ -34,11 +36,11 @@ from ._base import ( logger = logging.getLogger(__name__) -class ThumbnailResource(Resource): +class ThumbnailResource(DirectServeResource): isLeaf = True def __init__(self, hs, media_repo, media_storage): - Resource.__init__(self) + super().__init__() self.store = hs.get_datastore() self.media_repo = media_repo @@ -47,13 +49,8 @@ class ThumbnailResource(Resource): self.server_name = hs.hostname self.clock = hs.get_clock() - def render_GET(self, request): - self._async_render_GET(request) - return NOT_DONE_YET - @wrap_json_request_handler - @defer.inlineCallbacks - def _async_render_GET(self, request): + async def _async_render_GET(self, request): set_cors_headers(request) server_name, media_id, _ = parse_media_id(request) width = parse_integer(request, "width", required=True) @@ -63,21 +60,21 @@ class ThumbnailResource(Resource): if server_name == self.server_name: if self.dynamic_thumbnails: - yield self._select_or_generate_local_thumbnail( + await self._select_or_generate_local_thumbnail( request, media_id, width, height, method, m_type ) else: - yield self._respond_local_thumbnail( + await self._respond_local_thumbnail( request, media_id, width, height, method, m_type ) self.media_repo.mark_recently_accessed(None, media_id) else: if self.dynamic_thumbnails: - yield self._select_or_generate_remote_thumbnail( + await self._select_or_generate_remote_thumbnail( request, server_name, media_id, width, height, method, m_type ) else: - yield self._respond_remote_thumbnail( + await self._respond_remote_thumbnail( request, server_name, media_id, width, height, method, m_type ) self.media_repo.mark_recently_accessed(server_name, media_id) diff --git a/synapse/rest/media/v1/upload_resource.py b/synapse/rest/media/v1/upload_resource.py index d1d7e959f..5d76bbdf6 100644 --- a/synapse/rest/media/v1/upload_resource.py +++ b/synapse/rest/media/v1/upload_resource.py @@ -15,22 +15,24 @@ import logging -from twisted.internet import defer -from twisted.web.resource import Resource from twisted.web.server import NOT_DONE_YET from synapse.api.errors import SynapseError -from synapse.http.server import respond_with_json, wrap_json_request_handler +from synapse.http.server import ( + DirectServeResource, + respond_with_json, + wrap_json_request_handler, +) from synapse.http.servlet import parse_string logger = logging.getLogger(__name__) -class UploadResource(Resource): +class UploadResource(DirectServeResource): isLeaf = True def __init__(self, hs, media_repo): - Resource.__init__(self) + super().__init__() self.media_repo = media_repo self.filepaths = media_repo.filepaths @@ -41,18 +43,13 @@ class UploadResource(Resource): self.max_upload_size = hs.config.max_upload_size self.clock = hs.get_clock() - def render_POST(self, request): - self._async_render_POST(request) - return NOT_DONE_YET - def render_OPTIONS(self, request): respond_with_json(request, 200, {}, send_cors=True) return NOT_DONE_YET @wrap_json_request_handler - @defer.inlineCallbacks - def _async_render_POST(self, request): - requester = yield self.auth.get_user_by_req(request) + async def _async_render_POST(self, request): + requester = await self.auth.get_user_by_req(request) # TODO: The checks here are a bit late. The content will have # already been uploaded to a tmp file at this point content_length = request.getHeader(b"Content-Length").decode("ascii") @@ -81,7 +78,7 @@ class UploadResource(Resource): # disposition = headers.getRawHeaders(b"Content-Disposition")[0] # TODO(markjh): parse content-dispostion - content_uri = yield self.media_repo.create_content( + content_uri = await self.media_repo.create_content( media_type, upload_name, request.content, content_length, requester.user ) diff --git a/synapse/rest/saml2/response_resource.py b/synapse/rest/saml2/response_resource.py index ab14b7067..939c87306 100644 --- a/synapse/rest/saml2/response_resource.py +++ b/synapse/rest/saml2/response_resource.py @@ -18,34 +18,27 @@ import logging import saml2 from saml2.client import Saml2Client -from twisted.web.resource import Resource -from twisted.web.server import NOT_DONE_YET - from synapse.api.errors import CodeMessageException -from synapse.http.server import wrap_html_request_handler +from synapse.http.server import DirectServeResource, wrap_html_request_handler from synapse.http.servlet import parse_string from synapse.rest.client.v1.login import SSOAuthHandler logger = logging.getLogger(__name__) -class SAML2ResponseResource(Resource): +class SAML2ResponseResource(DirectServeResource): """A Twisted web resource which handles the SAML response""" isLeaf = 1 def __init__(self, hs): - Resource.__init__(self) + super().__init__() self._saml_client = Saml2Client(hs.config.saml2_sp_config) self._sso_auth_handler = SSOAuthHandler(hs) - def render_POST(self, request): - self._async_render_POST(request) - return NOT_DONE_YET - @wrap_html_request_handler - def _async_render_POST(self, request): + async def _async_render_POST(self, request): resp_bytes = parse_string(request, "SAMLResponse", required=True) relay_state = parse_string(request, "RelayState", required=True) diff --git a/tests/rest/media/v1/test_media_storage.py b/tests/rest/media/v1/test_media_storage.py index e2d418b1d..39c934242 100644 --- a/tests/rest/media/v1/test_media_storage.py +++ b/tests/rest/media/v1/test_media_storage.py @@ -22,7 +22,6 @@ from binascii import unhexlify from mock import Mock from six.moves.urllib import parse -from twisted.internet import defer, reactor from twisted.internet.defer import Deferred from synapse.rest.media.v1._base import FileInfo @@ -34,15 +33,17 @@ from synapse.util.logcontext import make_deferred_yieldable from tests import unittest -class MediaStorageTests(unittest.TestCase): - def setUp(self): +class MediaStorageTests(unittest.HomeserverTestCase): + + needs_threadpool = True + + def prepare(self, reactor, clock, hs): self.test_dir = tempfile.mkdtemp(prefix="synapse-tests-") + self.addCleanup(shutil.rmtree, self.test_dir) self.primary_base_path = os.path.join(self.test_dir, "primary") self.secondary_base_path = os.path.join(self.test_dir, "secondary") - hs = Mock() - hs.get_reactor = Mock(return_value=reactor) hs.config.media_store_path = self.primary_base_path storage_providers = [FileStorageProviderBackend(hs, self.secondary_base_path)] @@ -52,10 +53,6 @@ class MediaStorageTests(unittest.TestCase): hs, self.primary_base_path, self.filepaths, storage_providers ) - def tearDown(self): - shutil.rmtree(self.test_dir) - - @defer.inlineCallbacks def test_ensure_media_is_in_local_cache(self): media_id = "some_media_id" test_body = "Test\n" @@ -73,7 +70,15 @@ class MediaStorageTests(unittest.TestCase): # Now we run ensure_media_is_in_local_cache, which should copy the file # to the local cache. file_info = FileInfo(None, media_id) - local_path = yield self.media_storage.ensure_media_is_in_local_cache(file_info) + + # This uses a real blocking threadpool so we have to wait for it to be + # actually done :/ + x = self.media_storage.ensure_media_is_in_local_cache(file_info) + + # Hotloop until the threadpool does its job... + self.wait_on_thread(x) + + local_path = self.get_success(x) self.assertTrue(os.path.exists(local_path)) diff --git a/tests/unittest.py b/tests/unittest.py index 36df43c13..d26804b5b 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -17,6 +17,7 @@ import gc import hashlib import hmac import logging +import time from mock import Mock @@ -24,7 +25,8 @@ from canonicaljson import json import twisted import twisted.logger -from twisted.internet.defer import Deferred +from twisted.internet.defer import Deferred, succeed +from twisted.python.threadpool import ThreadPool from twisted.trial import unittest from synapse.api.constants import EventTypes @@ -164,6 +166,7 @@ class HomeserverTestCase(TestCase): servlets = [] hijack_auth = True + needs_threadpool = False def setUp(self): """ @@ -192,15 +195,19 @@ class HomeserverTestCase(TestCase): if self.hijack_auth: def get_user_by_access_token(token=None, allow_guest=False): - return { - "user": UserID.from_string(self.helper.auth_user_id), - "token_id": 1, - "is_guest": False, - } + return succeed( + { + "user": UserID.from_string(self.helper.auth_user_id), + "token_id": 1, + "is_guest": False, + } + ) def get_user_by_req(request, allow_guest=False, rights="access"): - return create_requester( - UserID.from_string(self.helper.auth_user_id), 1, False, None + return succeed( + create_requester( + UserID.from_string(self.helper.auth_user_id), 1, False, None + ) ) self.hs.get_auth().get_user_by_req = get_user_by_req @@ -209,9 +216,26 @@ class HomeserverTestCase(TestCase): return_value="1234" ) + if self.needs_threadpool: + self.reactor.threadpool = ThreadPool() + self.addCleanup(self.reactor.threadpool.stop) + self.reactor.threadpool.start() + if hasattr(self, "prepare"): self.prepare(self.reactor, self.clock, self.hs) + def wait_on_thread(self, deferred, timeout=10): + """ + Wait until a Deferred is done, where it's waiting on a real thread. + """ + start_time = time.time() + + while not deferred.called: + if start_time + timeout < time.time(): + raise ValueError("Timed out waiting for threadpool") + self.reactor.advance(0.01) + time.sleep(0.01) + def make_homeserver(self, reactor, clock): """ Make and return a homeserver. From 915280f1edec3ddfe6261940d91ef451f207ed15 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 1 Jul 2019 10:22:42 +0100 Subject: [PATCH 32/61] Fixup comment --- synapse/handlers/presence.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 3edd35998..c80dc2eba 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -1021,12 +1021,19 @@ class PresenceEventSource(object): if from_key == max_token: # This is necessary as due to the way stream ID generators work # we may get updates that have a stream ID greater than the max - # token. This is usually fine, as it just means that we may send - # down some presence updates multiple times. However, we need to - # be careful that the sync stream actually does make some - # progress, otherwise clients will end up tight looping calling - # /sync due to it returning the same token repeatedly. Hence - # this guard. C.f. #5503. + # token (e.g. max_token is N but stream generator may return + # results for N+2, due to N+1 not having finished being + # persisted yet). + # + # This is usually fine, as it just means that we may send down + # some presence updates multiple times. However, we need to be + # careful that the sync stream either actually does make some + # progress or doesn't return, otherwise clients will end up + # tight looping calling /sync due to it immediately returning + # the same token repeatedly. + # + # Hence this guard where we just return nothing so that the sync + # doesn't return. C.f. #5503. defer.returnValue(([], max_token)) presence = self.get_presence_handler() From 3bcb13edd098ae634946d213472a2caf5134b9a8 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 1 Jul 2019 12:13:22 +0100 Subject: [PATCH 33/61] Address review comments --- synapse/handlers/saml_handler.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/synapse/handlers/saml_handler.py b/synapse/handlers/saml_handler.py index 03a0ac438..a1ce6929c 100644 --- a/synapse/handlers/saml_handler.py +++ b/synapse/handlers/saml_handler.py @@ -18,7 +18,7 @@ import attr import saml2 from saml2.client import Saml2Client -from synapse.api.errors import CodeMessageException +from synapse.api.errors import SynapseError from synapse.http.servlet import parse_string from synapse.rest.client.v1.login import SSOAuthHandler @@ -84,14 +84,16 @@ class SamlHandler: outstanding=self._outstanding_requests_dict, ) except Exception as e: - logger.warning("Exception parsing SAML2 response", exc_info=1) - raise CodeMessageException(400, "Unable to parse SAML2 response: %s" % (e,)) + logger.warning("Exception parsing SAML2 response: %s", e) + raise SynapseError(400, "Unable to parse SAML2 response: %s" % (e,)) if saml2_auth.not_signed: - raise CodeMessageException(400, "SAML2 response was not signed") + logger.warning("SAML2 response was not signed") + raise SynapseError(400, "SAML2 response was not signed") if "uid" not in saml2_auth.ava: - raise CodeMessageException(400, "uid not in SAML2 response") + logger.warning("SAML2 response lacks a 'uid' attestation") + raise SynapseError(400, "uid not in SAML2 response") self._outstanding_requests_dict.pop(saml2_auth.in_response_to, None) From b4914681a5d42b9f1407c41fd2b7518b75d88ec4 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Mon, 1 Jul 2019 23:33:52 +1000 Subject: [PATCH 34/61] fix async/await consentresource (#5585) Fixes #5582 --- changelog.d/5585.misc | 1 + synapse/rest/consent/consent_resource.py | 4 +++- 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 changelog.d/5585.misc diff --git a/changelog.d/5585.misc b/changelog.d/5585.misc new file mode 100644 index 000000000..6be06d4d0 --- /dev/null +++ b/changelog.d/5585.misc @@ -0,0 +1 @@ +Synapse can now handle RestServlets that return coroutines. diff --git a/synapse/rest/consent/consent_resource.py b/synapse/rest/consent/consent_resource.py index 624c42441..1ddf9997f 100644 --- a/synapse/rest/consent/consent_resource.py +++ b/synapse/rest/consent/consent_resource.py @@ -23,6 +23,8 @@ from six.moves import http_client import jinja2 from jinja2 import TemplateNotFound +from twisted.internet import defer + from synapse.api.errors import NotFoundError, StoreError, SynapseError from synapse.config import ConfigError from synapse.http.server import ( @@ -139,7 +141,7 @@ class ConsentResource(DirectServeResource): else: qualified_user_id = UserID(username, self.hs.hostname).to_string() - u = await self.store.get_user_by_id(qualified_user_id) + u = await defer.maybeDeferred(self.store.get_user_by_id, qualified_user_id) if u is None: raise NotFoundError("Unknown user") From 9ceb4f0889e5b8b2d5e4f77ed0e30c0edc37f092 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 2 Jul 2019 10:14:08 +0100 Subject: [PATCH 35/61] Fix --no-daemonize flag for synctl --- synctl | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/synctl b/synctl index 30d751236..794de99ea 100755 --- a/synctl +++ b/synctl @@ -150,8 +150,9 @@ def main(): parser.add_argument( "--no-daemonize", action="store_false", + dest="daemonize", help="Run synapse in the foreground for debugging. " - "Will work only if the daemonize option is not set in the config." + "Will work only if the daemonize option is not set in the config.", ) options = parser.parse_args() @@ -159,7 +160,7 @@ def main(): if options.worker and options.all_processes: write('Cannot use "--worker" with "--all-processes"', stream=sys.stderr) sys.exit(1) - if options.no_daemonize and options.all_processes: + if not options.daemonize and options.all_processes: write('Cannot use "--no-daemonize" with "--all-processes"', stream=sys.stderr) sys.exit(1) @@ -169,9 +170,8 @@ def main(): write( "No config file found\n" "To generate a config file, run '%s -c %s --generate-config" - " --server-name= --report-stats='\n" % ( - " ".join(SYNAPSE), options.configfile, - ), + " --server-name= --report-stats='\n" + % (" ".join(SYNAPSE), options.configfile), stream=sys.stderr, ) sys.exit(1) @@ -289,7 +289,7 @@ def main(): # Check if synapse is already running if os.path.exists(pidfile) and pid_running(int(open(pidfile).read())): abort("synapse.app.homeserver already running") - start(configfile, bool(options.no_daemonize)) + start(configfile, bool(options.daemonize)) for worker in workers: env = os.environ.copy() From 948488e11528dd4926de9c8b17d41ce1a8ed127a Mon Sep 17 00:00:00 2001 From: PauRE Date: Tue, 2 Jul 2019 11:25:37 +0200 Subject: [PATCH 36/61] Fix JWT login with new users (#5586) Signed-off-by: Pau Rodriguez-Estivill --- changelog.d/5586.bugfix | 1 + synapse/rest/client/v1/login.py | 9 +++------ 2 files changed, 4 insertions(+), 6 deletions(-) create mode 100644 changelog.d/5586.bugfix diff --git a/changelog.d/5586.bugfix b/changelog.d/5586.bugfix new file mode 100644 index 000000000..c0b1ecf81 --- /dev/null +++ b/changelog.d/5586.bugfix @@ -0,0 +1 @@ +Fixed m.login.jwt using unregistred user_id and added pyjwt>=1.6.4 as jwt conditional dependencies. Contributed by Pau Rodriguez-Estivill. diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py index ede6bc8b1..e88d2ad7b 100644 --- a/synapse/rest/client/v1/login.py +++ b/synapse/rest/client/v1/login.py @@ -319,12 +319,12 @@ class LoginRestServlet(RestServlet): raise LoginError(401, "Invalid JWT", errcode=Codes.UNAUTHORIZED) user_id = UserID(user, self.hs.hostname).to_string() + device_id = login_submission.get("device_id") + initial_display_name = login_submission.get("initial_device_display_name") auth_handler = self.auth_handler registered_user_id = yield auth_handler.check_user_exists(user_id) if registered_user_id: - device_id = login_submission.get("device_id") - initial_display_name = login_submission.get("initial_device_display_name") device_id, access_token = yield self.registration_handler.register_device( registered_user_id, device_id, initial_display_name ) @@ -338,11 +338,8 @@ class LoginRestServlet(RestServlet): user_id, access_token = ( yield self.registration_handler.register(localpart=user) ) - - device_id = login_submission.get("device_id") - initial_display_name = login_submission.get("initial_device_display_name") device_id, access_token = yield self.registration_handler.register_device( - registered_user_id, device_id, initial_display_name + user_id, device_id, initial_display_name ) result = { From de8077a164f57467fc73f1fe429bc1ed0afd73c4 Mon Sep 17 00:00:00 2001 From: Amir Zarrinkafsh Date: Tue, 2 Jul 2019 19:31:06 +1000 Subject: [PATCH 37/61] Add ability to set timezone for Docker container (#5383) Signed-off-by: Amir Zarrinkafsh --- changelog.d/5383.feature | 1 + docker/Dockerfile | 1 + docker/README.md | 2 +- 3 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 changelog.d/5383.feature diff --git a/changelog.d/5383.feature b/changelog.d/5383.feature new file mode 100644 index 000000000..5d084cf0b --- /dev/null +++ b/changelog.d/5383.feature @@ -0,0 +1 @@ +Add ability to change Docker containers [timezone](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) with the `TZ` variable. diff --git a/docker/Dockerfile b/docker/Dockerfile index 0939cadf3..79276209f 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -66,6 +66,7 @@ RUN apk add --no-cache --virtual .runtime_deps \ libpq \ zlib \ su-exec \ + tzdata \ xmlsec COPY --from=builder /install /usr/local diff --git a/docker/README.md b/docker/README.md index b62417c28..46bb9d2d9 100644 --- a/docker/README.md +++ b/docker/README.md @@ -63,7 +63,6 @@ The following environment variables are supported in `generate` mode: * `UID`, `GID`: the user id and group id to use for creating the data directories. Defaults to `991`, `991`. - ## Running synapse Once you have a valid configuration file, you can start synapse as follows: @@ -91,6 +90,7 @@ The following environment variables are supported in run mode: * `SYNAPSE_CONFIG_PATH`: path to the config file. Defaults to `/homeserver.yaml`. * `UID`, `GID`: the user and group id to run Synapse as. Defaults to `991`, `991`. +* `TZ`: the [timezone](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) the container will run with. Defaults to `UTC`. ## TLS support From 8134c49cadbcc426cce982bd93c03b3c280d0a54 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 2 Jul 2019 10:36:04 +0100 Subject: [PATCH 38/61] Newsfile --- changelog.d/5587.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/5587.feature diff --git a/changelog.d/5587.feature b/changelog.d/5587.feature new file mode 100644 index 000000000..ec1503860 --- /dev/null +++ b/changelog.d/5587.feature @@ -0,0 +1 @@ +Add --no-daemonize option to run synapse in the foreground, per issue #4130. Contributed by Soham Gumaste. \ No newline at end of file From a0fa4641c427c2aad338d0d69e4cb0fb78963001 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 2 Jul 2019 11:51:27 +0100 Subject: [PATCH 39/61] prepare v1.1.0rc1 --- CHANGES.md | 101 +++++++++++++++++++++++++++++++++++++++ UPGRADE.rst | 8 ++-- changelog.d/4276.misc | 1 - changelog.d/5015.misc | 1 - changelog.d/5042.bugfix | 1 - changelog.d/5051.bugfix | 1 - changelog.d/5092.feature | 1 - changelog.d/5252.feature | 1 - changelog.d/5313.misc | 1 - changelog.d/5325.bugfix | 1 - changelog.d/5363.feature | 1 - changelog.d/5378.misc | 1 - changelog.d/5381.misc | 1 - changelog.d/5382.misc | 1 - changelog.d/5383.feature | 1 - changelog.d/5383.misc | 1 - changelog.d/5384.feature | 1 - changelog.d/5386.misc | 1 - changelog.d/5387.bugfix | 1 - changelog.d/5388.bugfix | 1 - changelog.d/5389.bugfix | 1 - changelog.d/5390.bugfix | 1 - changelog.d/5394.bugfix | 1 - changelog.d/5412.feature | 1 - changelog.d/5422.feature | 1 - changelog.d/5425.removal | 1 - changelog.d/5440.feature | 1 - changelog.d/5446.misc | 1 - changelog.d/5447.misc | 1 - changelog.d/5448.removal | 1 - changelog.d/5458.feature | 1 - changelog.d/5459.misc | 1 - changelog.d/5460.misc | 1 - changelog.d/5461.feature | 1 - changelog.d/5464.bugfix | 1 - changelog.d/5465.misc | 2 - changelog.d/5474.feature | 1 - changelog.d/5475.misc | 1 - changelog.d/5476.misc | 1 - changelog.d/5477.feature | 1 - changelog.d/5478.misc | 1 - changelog.d/5480.misc | 1 - changelog.d/5482.misc | 1 - changelog.d/5490.bugfix | 1 - changelog.d/5493.misc | 1 - changelog.d/5498.bugfix | 1 - changelog.d/5499.misc | 1 - changelog.d/5500.bugfix | 1 - changelog.d/5502.misc | 1 - changelog.d/5505.feature | 1 - changelog.d/5507.bugfix | 1 - changelog.d/5509.misc | 1 - changelog.d/5510.misc | 1 - changelog.d/5511.misc | 1 - changelog.d/5512.feature | 1 - changelog.d/5513.feature | 1 - changelog.d/5514.bugfix | 1 - changelog.d/5516.feature | 1 - changelog.d/5521.feature | 1 - changelog.d/5522.feature | 1 - changelog.d/5523.bugfix | 1 - changelog.d/5524.feature | 1 - changelog.d/5525.removal | 1 - changelog.d/5531.feature | 1 - changelog.d/5534.feature | 1 - changelog.d/5537.misc | 1 - changelog.d/5543.misc | 1 - changelog.d/5545.misc | 1 - changelog.d/5546.feature | 1 - changelog.d/5547.feature | 1 - changelog.d/5548.misc | 1 - changelog.d/5550.feature | 1 - changelog.d/5550.misc | 1 - changelog.d/5555.bugfix | 1 - changelog.d/5557.misc | 1 - changelog.d/5558.misc | 1 - changelog.d/5559.feature | 1 - changelog.d/5561.feature | 1 - changelog.d/5562.feature | 1 - changelog.d/5563.bugfix | 1 - changelog.d/5564.misc | 1 - changelog.d/5565.feature | 1 - changelog.d/5566.feature | 1 - changelog.d/5567.feature | 1 - changelog.d/5568.feature | 1 - changelog.d/5570.misc | 1 - changelog.d/5576.bugfix | 1 - changelog.d/5585.misc | 1 - changelog.d/5586.bugfix | 1 - changelog.d/5587.feature | 1 - synapse/__init__.py | 2 +- 91 files changed, 106 insertions(+), 94 deletions(-) delete mode 100644 changelog.d/4276.misc delete mode 100644 changelog.d/5015.misc delete mode 100644 changelog.d/5042.bugfix delete mode 100644 changelog.d/5051.bugfix delete mode 100644 changelog.d/5092.feature delete mode 100644 changelog.d/5252.feature delete mode 100644 changelog.d/5313.misc delete mode 100644 changelog.d/5325.bugfix delete mode 100644 changelog.d/5363.feature delete mode 100644 changelog.d/5378.misc delete mode 100644 changelog.d/5381.misc delete mode 100644 changelog.d/5382.misc delete mode 100644 changelog.d/5383.feature delete mode 100644 changelog.d/5383.misc delete mode 100644 changelog.d/5384.feature delete mode 100644 changelog.d/5386.misc delete mode 100644 changelog.d/5387.bugfix delete mode 100644 changelog.d/5388.bugfix delete mode 100644 changelog.d/5389.bugfix delete mode 100644 changelog.d/5390.bugfix delete mode 100644 changelog.d/5394.bugfix delete mode 100644 changelog.d/5412.feature delete mode 100644 changelog.d/5422.feature delete mode 100644 changelog.d/5425.removal delete mode 100644 changelog.d/5440.feature delete mode 100644 changelog.d/5446.misc delete mode 100644 changelog.d/5447.misc delete mode 100644 changelog.d/5448.removal delete mode 100644 changelog.d/5458.feature delete mode 100644 changelog.d/5459.misc delete mode 100644 changelog.d/5460.misc delete mode 100644 changelog.d/5461.feature delete mode 100644 changelog.d/5464.bugfix delete mode 100644 changelog.d/5465.misc delete mode 100644 changelog.d/5474.feature delete mode 100644 changelog.d/5475.misc delete mode 100644 changelog.d/5476.misc delete mode 100644 changelog.d/5477.feature delete mode 100644 changelog.d/5478.misc delete mode 100644 changelog.d/5480.misc delete mode 100644 changelog.d/5482.misc delete mode 100644 changelog.d/5490.bugfix delete mode 100644 changelog.d/5493.misc delete mode 100644 changelog.d/5498.bugfix delete mode 100644 changelog.d/5499.misc delete mode 100644 changelog.d/5500.bugfix delete mode 100644 changelog.d/5502.misc delete mode 100644 changelog.d/5505.feature delete mode 100644 changelog.d/5507.bugfix delete mode 100644 changelog.d/5509.misc delete mode 100644 changelog.d/5510.misc delete mode 100644 changelog.d/5511.misc delete mode 100644 changelog.d/5512.feature delete mode 100644 changelog.d/5513.feature delete mode 100644 changelog.d/5514.bugfix delete mode 100644 changelog.d/5516.feature delete mode 100644 changelog.d/5521.feature delete mode 100644 changelog.d/5522.feature delete mode 100644 changelog.d/5523.bugfix delete mode 100644 changelog.d/5524.feature delete mode 100644 changelog.d/5525.removal delete mode 100644 changelog.d/5531.feature delete mode 100644 changelog.d/5534.feature delete mode 100644 changelog.d/5537.misc delete mode 100644 changelog.d/5543.misc delete mode 100644 changelog.d/5545.misc delete mode 100644 changelog.d/5546.feature delete mode 100644 changelog.d/5547.feature delete mode 100644 changelog.d/5548.misc delete mode 100644 changelog.d/5550.feature delete mode 100644 changelog.d/5550.misc delete mode 100644 changelog.d/5555.bugfix delete mode 100644 changelog.d/5557.misc delete mode 100644 changelog.d/5558.misc delete mode 100644 changelog.d/5559.feature delete mode 100644 changelog.d/5561.feature delete mode 100644 changelog.d/5562.feature delete mode 100644 changelog.d/5563.bugfix delete mode 100644 changelog.d/5564.misc delete mode 100644 changelog.d/5565.feature delete mode 100644 changelog.d/5566.feature delete mode 100644 changelog.d/5567.feature delete mode 100644 changelog.d/5568.feature delete mode 100644 changelog.d/5570.misc delete mode 100644 changelog.d/5576.bugfix delete mode 100644 changelog.d/5585.misc delete mode 100644 changelog.d/5586.bugfix delete mode 100644 changelog.d/5587.feature diff --git a/CHANGES.md b/CHANGES.md index 284e89b26..381553302 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,104 @@ +Synapse 1.1.0rc1 (2019-07-02) +============================= + +As of v1.1.0, Synapse no longer supports Python 2, nor Postgres version 9.4. +See the [upgrade notes](UPGRADE.rst#upgrading-to-v110) for more details. + +Features +-------- + +- Added possibilty to disable local password authentication. Contributed by Daniel Hoffend. ([\#5092](https://github.com/matrix-org/synapse/issues/5092)) +- Add monthly active users to phonehome stats. ([\#5252](https://github.com/matrix-org/synapse/issues/5252)) +- Allow expired user to trigger renewal email sending manually. ([\#5363](https://github.com/matrix-org/synapse/issues/5363)) +- Add ability to change Docker containers [timezone](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) with the `TZ` variable. ([\#5383](https://github.com/matrix-org/synapse/issues/5383)) +- Statistics on forward extremities per room are now exposed via Prometheus. ([\#5384](https://github.com/matrix-org/synapse/issues/5384), [\#5458](https://github.com/matrix-org/synapse/issues/5458), [\#5461](https://github.com/matrix-org/synapse/issues/5461)) +- Add --no-daemonize option to run synapse in the foreground, per issue #4130. Contributed by Soham Gumaste. ([\#5412](https://github.com/matrix-org/synapse/issues/5412), [\#5587](https://github.com/matrix-org/synapse/issues/5587)) +- Fully support SAML2 authentication. Contributed by [Alexander Trost](https://github.com/galexrt) - thank you! ([\#5422](https://github.com/matrix-org/synapse/issues/5422)) +- Allow server admins to define implementations of extra rules for allowing or denying incoming events. ([\#5440](https://github.com/matrix-org/synapse/issues/5440), [\#5474](https://github.com/matrix-org/synapse/issues/5474), [\#5477](https://github.com/matrix-org/synapse/issues/5477)) +- Add support for handling pagination APIs on client reader worker. ([\#5505](https://github.com/matrix-org/synapse/issues/5505), [\#5513](https://github.com/matrix-org/synapse/issues/5513), [\#5531](https://github.com/matrix-org/synapse/issues/5531)) +- Improve help and cmdline option names for --generate-config options. ([\#5512](https://github.com/matrix-org/synapse/issues/5512)) +- Allow configuration of the path used for ACME account keys. ([\#5516](https://github.com/matrix-org/synapse/issues/5516), [\#5521](https://github.com/matrix-org/synapse/issues/5521), [\#5522](https://github.com/matrix-org/synapse/issues/5522)) +- Add --data-dir and --open-private-ports options. ([\#5524](https://github.com/matrix-org/synapse/issues/5524)) +- Split public rooms directory auth config in two settings, in order to manage client auth independently from the federation part of it. Obsoletes the "restrict_public_rooms_to_local_users" configuration setting. If "restrict_public_rooms_to_local_users" is set in the config, Synapse will act as if both new options are enabled, i.e. require authentication through the client API and deny federation requests. ([\#5534](https://github.com/matrix-org/synapse/issues/5534)) +- Update docker image to use Python 3.7. ([\#5546](https://github.com/matrix-org/synapse/issues/5546)) +- Increase default log level for docker image to INFO. It can still be changed by editing the generated log.config file. ([\#5547](https://github.com/matrix-org/synapse/issues/5547)) +- The minimum TLS version used for outgoing federation requests can now be set with `federation_client_minimum_tls_version`. ([\#5550](https://github.com/matrix-org/synapse/issues/5550)) +- Optimise devices changed query to not pull unnecessary rows from the database, reducing database load. ([\#5559](https://github.com/matrix-org/synapse/issues/5559)) +- Update Docker image to deprecate the use of environment variables for configuration, and make the use of a static configuration the default. ([\#5561](https://github.com/matrix-org/synapse/issues/5561), [\#5562](https://github.com/matrix-org/synapse/issues/5562), [\#5566](https://github.com/matrix-org/synapse/issues/5566), [\#5567](https://github.com/matrix-org/synapse/issues/5567)) +- Docker: Send synapse logs to the docker logging system, by default. ([\#5565](https://github.com/matrix-org/synapse/issues/5565)) +- Docker image: open the non-TLS port by default. ([\#5568](https://github.com/matrix-org/synapse/issues/5568)) +- Add new metrics for number of forward extremities being persisted and number of state groups involved in resolution. ([\#5476](https://github.com/matrix-org/synapse/issues/5476)) + + +Bugfixes +-------- + +- Fix bug processing incoming events over federation if call to `/get_missing_events` fails. ([\#5042](https://github.com/matrix-org/synapse/issues/5042)) +- Prevent >1 room upgrades happening simultaneously on the same room. ([\#5051](https://github.com/matrix-org/synapse/issues/5051)) +- Fix a bug where running synapse_port_db would cause the account validity feature to fail because it didn't set the type of the email_sent column to boolean. ([\#5325](https://github.com/matrix-org/synapse/issues/5325)) +- Warn about disabling email-based password resets when a reset occurs, and remove warning when someone attempts a phone-based reset. ([\#5387](https://github.com/matrix-org/synapse/issues/5387)) +- Fix email notifications for unnamed rooms with multiple people. ([\#5388](https://github.com/matrix-org/synapse/issues/5388)) +- Fix exceptions in federation reader worker caused by attempting to renew attestations, which should only happen on master worker. ([\#5389](https://github.com/matrix-org/synapse/issues/5389)) +- Fix handling of failures fetching remote content to not log failures as exceptions. ([\#5390](https://github.com/matrix-org/synapse/issues/5390)) +- Fix a bug where deactivated users could receive renewal emails if the account validity feature is on. ([\#5394](https://github.com/matrix-org/synapse/issues/5394)) +- Fix missing invite state after exchanging 3PID invites over federaton. ([\#5464](https://github.com/matrix-org/synapse/issues/5464)) +- Fix failure to start under docker with SAML support enabled. ([\#5490](https://github.com/matrix-org/synapse/issues/5490)) +- Fix intermittent exceptions on Apple hardware. Also fix bug that caused database activity times to be under-reported in log lines. ([\#5498](https://github.com/matrix-org/synapse/issues/5498)) +- Fix logging error when a tampered event is detected. ([\#5500](https://github.com/matrix-org/synapse/issues/5500)) +- Fix bug where clients could tight loop calling `/sync` for a period. ([\#5507](https://github.com/matrix-org/synapse/issues/5507)) +- Fix bug with `jinja2` preventing Synapse from starting. Users who had this problem should now simply need to run `pip install matrix-synapse`. ([\#5514](https://github.com/matrix-org/synapse/issues/5514)) +- Fix a regression where homeservers on private IP addresses were incorrectly blacklisted. ([\#5523](https://github.com/matrix-org/synapse/issues/5523)) +- Fixed m.login.jwt using unregistred user_id and added pyjwt>=1.6.4 as jwt conditional dependencies. Contributed by Pau Rodriguez-Estivill. ([\#5555](https://github.com/matrix-org/synapse/issues/5555), [\#5586](https://github.com/matrix-org/synapse/issues/5586)) +- Docker: Use a sensible location for data files when generating a config file. ([\#5563](https://github.com/matrix-org/synapse/issues/5563)) +- Fix a bug that would cause invited users to receive several emails for a single 3PID invite in case the inviter is rate limited. ([\#5576](https://github.com/matrix-org/synapse/issues/5576)) + + +Deprecations and Removals +------------------------- + +- Python 2.7 is no longer a supported platform. Synapse now requires Python 3.5+ to run. ([\#5425](https://github.com/matrix-org/synapse/issues/5425)) +- PostgreSQL 9.4 is no longer supported. Synapse requires Postgres 9.5+ or above for Postgres support. ([\#5448](https://github.com/matrix-org/synapse/issues/5448)) +- Remove support for cpu_affinity setting. ([\#5525](https://github.com/matrix-org/synapse/issues/5525)) + + +Improved Documentation +---------------------- +- Improve README section on performance troubleshooting. ([\#4276](https://github.com/matrix-org/synapse/issues/4276)) +- Add information about how to install and run `black` on the codebase to code_style.rst. ([\#5537](https://github.com/matrix-org/synapse/issues/5537)) +- Improve install docs on choosing server_name. ([\#5558](https://github.com/matrix-org/synapse/issues/5558)) + + +Internal Changes +---------------- + +- Add logging to 3pid invite signature verification. ([\#5015](https://github.com/matrix-org/synapse/issues/5015)) +- Update example haproxy config to a more compatible setup. ([\#5313](https://github.com/matrix-org/synapse/issues/5313)) +- Track deactivated accounts in the database. ([\#5378](https://github.com/matrix-org/synapse/issues/5378), [\#5465](https://github.com/matrix-org/synapse/issues/5465), [\#5493](https://github.com/matrix-org/synapse/issues/5493)) +- Clean up code for sending federation EDUs. ([\#5381](https://github.com/matrix-org/synapse/issues/5381)) +- Add a sponsor button to the repo. ([\#5382](https://github.com/matrix-org/synapse/issues/5382), [\#5386](https://github.com/matrix-org/synapse/issues/5386)) +- Don't log non-200 responses from federation queries as exceptions. ([\#5383](https://github.com/matrix-org/synapse/issues/5383)) +- Update Python syntax in contrib/ to Python 3. ([\#5446](https://github.com/matrix-org/synapse/issues/5446)) +- Update federation_client dev script to support `.well-known` and work with python3. ([\#5447](https://github.com/matrix-org/synapse/issues/5447)) +- SyTest has been moved to Buildkite. ([\#5459](https://github.com/matrix-org/synapse/issues/5459)) +- Demo script now uses python3. ([\#5460](https://github.com/matrix-org/synapse/issues/5460)) +- Synapse can now handle RestServlets that return coroutines. ([\#5475](https://github.com/matrix-org/synapse/issues/5475), [\#5585](https://github.com/matrix-org/synapse/issues/5585)) +- The demo servers talk to each other again. ([\#5478](https://github.com/matrix-org/synapse/issues/5478)) +- Add an EXPERIMENTAL config option to try and periodically clean up extremities by sending dummy events. ([\#5480](https://github.com/matrix-org/synapse/issues/5480)) +- Synapse's codebase is now formatted by `black`. ([\#5482](https://github.com/matrix-org/synapse/issues/5482)) +- Some cleanups and sanity-checking in the CPU and database metrics. ([\#5499](https://github.com/matrix-org/synapse/issues/5499)) +- Improve email notification logging. ([\#5502](https://github.com/matrix-org/synapse/issues/5502)) +- Fix "Unexpected entry in 'full_schemas'" log warning. ([\#5509](https://github.com/matrix-org/synapse/issues/5509)) +- Improve logging when generating config files. ([\#5510](https://github.com/matrix-org/synapse/issues/5510)) +- Refactor and clean up Config parser for maintainability. ([\#5511](https://github.com/matrix-org/synapse/issues/5511)) +- Make the config clearer in that email.template_dir is relative to the Synapse's root directory, not the `synapse/` folder within it. ([\#5543](https://github.com/matrix-org/synapse/issues/5543)) +- Update v1.0.0 release changelog to include more information about changes to password resets. ([\#5545](https://github.com/matrix-org/synapse/issues/5545)) +- Remove non-functioning check_event_hash.py dev script. ([\#5548](https://github.com/matrix-org/synapse/issues/5548)) +- Synapse will now only allow TLS v1.2 connections when serving federation, if it terminates TLS. As Synapse's allowed ciphers were only able to be used in TLSv1.2 before, this does not change behaviour. ([\#5550](https://github.com/matrix-org/synapse/issues/5550)) +- Logging when running GC collection on generation 0 is now at the DEBUG level, not INFO. ([\#5557](https://github.com/matrix-org/synapse/issues/5557)) +- Reduce the amount of stuff we send in the docker context. ([\#5564](https://github.com/matrix-org/synapse/issues/5564)) +- Point the reverse links in the Purge History contrib scripts at the intended location. ([\#5570](https://github.com/matrix-org/synapse/issues/5570)) + + Synapse 1.0.0 (2019-06-11) ========================== diff --git a/UPGRADE.rst b/UPGRADE.rst index 1fb109a21..72064accf 100644 --- a/UPGRADE.rst +++ b/UPGRADE.rst @@ -49,16 +49,16 @@ returned by the Client-Server API: # configured on port 443. curl -kv https:///_matrix/client/versions 2>&1 | grep "Server:" -Upgrading to v1.1 -================= +Upgrading to v1.1.0 +=================== -Synapse 1.1 removes support for older Python and PostgreSQL versions, as +Synapse v1.1.0 removes support for older Python and PostgreSQL versions, as outlined in `our deprecation notice `_. Minimum Python Version ---------------------- -Synapse v1.1 has a minimum Python requirement of Python 3.5. Python 3.6 or +Synapse v1.1.0 has a minimum Python requirement of Python 3.5. Python 3.6 or Python 3.7 are recommended as they have improved internal string handling, significantly reducing memory usage. diff --git a/changelog.d/4276.misc b/changelog.d/4276.misc deleted file mode 100644 index 285939a4b..000000000 --- a/changelog.d/4276.misc +++ /dev/null @@ -1 +0,0 @@ -Improve README section on performance troubleshooting. diff --git a/changelog.d/5015.misc b/changelog.d/5015.misc deleted file mode 100644 index eeec85b92..000000000 --- a/changelog.d/5015.misc +++ /dev/null @@ -1 +0,0 @@ -Add logging to 3pid invite signature verification. diff --git a/changelog.d/5042.bugfix b/changelog.d/5042.bugfix deleted file mode 100644 index 736b07c79..000000000 --- a/changelog.d/5042.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix bug processing incoming events over federation if call to `/get_missing_events` fails. diff --git a/changelog.d/5051.bugfix b/changelog.d/5051.bugfix deleted file mode 100644 index bfa22cc75..000000000 --- a/changelog.d/5051.bugfix +++ /dev/null @@ -1 +0,0 @@ -Prevent >1 room upgrades happening simultaneously on the same room. diff --git a/changelog.d/5092.feature b/changelog.d/5092.feature deleted file mode 100644 index c22f586d0..000000000 --- a/changelog.d/5092.feature +++ /dev/null @@ -1 +0,0 @@ -Added possibilty to disable local password authentication. Contributed by Daniel Hoffend. diff --git a/changelog.d/5252.feature b/changelog.d/5252.feature deleted file mode 100644 index 44115b038..000000000 --- a/changelog.d/5252.feature +++ /dev/null @@ -1 +0,0 @@ -Add monthly active users to phonehome stats. diff --git a/changelog.d/5313.misc b/changelog.d/5313.misc deleted file mode 100644 index 2ea01cb9d..000000000 --- a/changelog.d/5313.misc +++ /dev/null @@ -1 +0,0 @@ -Update example haproxy config to a more compatible setup. diff --git a/changelog.d/5325.bugfix b/changelog.d/5325.bugfix deleted file mode 100644 index b9413388f..000000000 --- a/changelog.d/5325.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a bug where running synapse_port_db would cause the account validity feature to fail because it didn't set the type of the email_sent column to boolean. diff --git a/changelog.d/5363.feature b/changelog.d/5363.feature deleted file mode 100644 index 803fe3fc3..000000000 --- a/changelog.d/5363.feature +++ /dev/null @@ -1 +0,0 @@ -Allow expired user to trigger renewal email sending manually. diff --git a/changelog.d/5378.misc b/changelog.d/5378.misc deleted file mode 100644 index 365e49d63..000000000 --- a/changelog.d/5378.misc +++ /dev/null @@ -1 +0,0 @@ -Track deactivated accounts in the database. diff --git a/changelog.d/5381.misc b/changelog.d/5381.misc deleted file mode 100644 index bbf70a044..000000000 --- a/changelog.d/5381.misc +++ /dev/null @@ -1 +0,0 @@ -Clean up code for sending federation EDUs. diff --git a/changelog.d/5382.misc b/changelog.d/5382.misc deleted file mode 100644 index 060cbba2a..000000000 --- a/changelog.d/5382.misc +++ /dev/null @@ -1 +0,0 @@ -Add a sponsor button to the repo. diff --git a/changelog.d/5383.feature b/changelog.d/5383.feature deleted file mode 100644 index 5d084cf0b..000000000 --- a/changelog.d/5383.feature +++ /dev/null @@ -1 +0,0 @@ -Add ability to change Docker containers [timezone](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) with the `TZ` variable. diff --git a/changelog.d/5383.misc b/changelog.d/5383.misc deleted file mode 100644 index 9dd5d1df9..000000000 --- a/changelog.d/5383.misc +++ /dev/null @@ -1 +0,0 @@ -Don't log non-200 responses from federation queries as exceptions. diff --git a/changelog.d/5384.feature b/changelog.d/5384.feature deleted file mode 100644 index 9497f521c..000000000 --- a/changelog.d/5384.feature +++ /dev/null @@ -1 +0,0 @@ -Statistics on forward extremities per room are now exposed via Prometheus. diff --git a/changelog.d/5386.misc b/changelog.d/5386.misc deleted file mode 100644 index 060cbba2a..000000000 --- a/changelog.d/5386.misc +++ /dev/null @@ -1 +0,0 @@ -Add a sponsor button to the repo. diff --git a/changelog.d/5387.bugfix b/changelog.d/5387.bugfix deleted file mode 100644 index 2c6c94efc..000000000 --- a/changelog.d/5387.bugfix +++ /dev/null @@ -1 +0,0 @@ -Warn about disabling email-based password resets when a reset occurs, and remove warning when someone attempts a phone-based reset. diff --git a/changelog.d/5388.bugfix b/changelog.d/5388.bugfix deleted file mode 100644 index 503e83091..000000000 --- a/changelog.d/5388.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix email notifications for unnamed rooms with multiple people. diff --git a/changelog.d/5389.bugfix b/changelog.d/5389.bugfix deleted file mode 100644 index dd648e26c..000000000 --- a/changelog.d/5389.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix exceptions in federation reader worker caused by attempting to renew attestations, which should only happen on master worker. diff --git a/changelog.d/5390.bugfix b/changelog.d/5390.bugfix deleted file mode 100644 index e7b7483cf..000000000 --- a/changelog.d/5390.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix handling of failures fetching remote content to not log failures as exceptions. diff --git a/changelog.d/5394.bugfix b/changelog.d/5394.bugfix deleted file mode 100644 index 2ad9fbe82..000000000 --- a/changelog.d/5394.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a bug where deactivated users could receive renewal emails if the account validity feature is on. diff --git a/changelog.d/5412.feature b/changelog.d/5412.feature deleted file mode 100644 index ec1503860..000000000 --- a/changelog.d/5412.feature +++ /dev/null @@ -1 +0,0 @@ -Add --no-daemonize option to run synapse in the foreground, per issue #4130. Contributed by Soham Gumaste. \ No newline at end of file diff --git a/changelog.d/5422.feature b/changelog.d/5422.feature deleted file mode 100644 index 02ac25a2f..000000000 --- a/changelog.d/5422.feature +++ /dev/null @@ -1 +0,0 @@ -Fully support SAML2 authentication. Contributed by [Alexander Trost](https://github.com/galexrt) - thank you! diff --git a/changelog.d/5425.removal b/changelog.d/5425.removal deleted file mode 100644 index 30022ee63..000000000 --- a/changelog.d/5425.removal +++ /dev/null @@ -1 +0,0 @@ -Python 2.7 is no longer a supported platform. Synapse now requires Python 3.5+ to run. diff --git a/changelog.d/5440.feature b/changelog.d/5440.feature deleted file mode 100644 index 63d9b5873..000000000 --- a/changelog.d/5440.feature +++ /dev/null @@ -1 +0,0 @@ -Allow server admins to define implementations of extra rules for allowing or denying incoming events. diff --git a/changelog.d/5446.misc b/changelog.d/5446.misc deleted file mode 100644 index e5209be0a..000000000 --- a/changelog.d/5446.misc +++ /dev/null @@ -1 +0,0 @@ -Update Python syntax in contrib/ to Python 3. diff --git a/changelog.d/5447.misc b/changelog.d/5447.misc deleted file mode 100644 index dd5206840..000000000 --- a/changelog.d/5447.misc +++ /dev/null @@ -1 +0,0 @@ -Update federation_client dev script to support `.well-known` and work with python3. diff --git a/changelog.d/5448.removal b/changelog.d/5448.removal deleted file mode 100644 index 33b9859da..000000000 --- a/changelog.d/5448.removal +++ /dev/null @@ -1 +0,0 @@ -PostgreSQL 9.4 is no longer supported. Synapse requires Postgres 9.5+ or above for Postgres support. diff --git a/changelog.d/5458.feature b/changelog.d/5458.feature deleted file mode 100644 index 9497f521c..000000000 --- a/changelog.d/5458.feature +++ /dev/null @@ -1 +0,0 @@ -Statistics on forward extremities per room are now exposed via Prometheus. diff --git a/changelog.d/5459.misc b/changelog.d/5459.misc deleted file mode 100644 index 904e45f66..000000000 --- a/changelog.d/5459.misc +++ /dev/null @@ -1 +0,0 @@ -SyTest has been moved to Buildkite. diff --git a/changelog.d/5460.misc b/changelog.d/5460.misc deleted file mode 100644 index badc8bb79..000000000 --- a/changelog.d/5460.misc +++ /dev/null @@ -1 +0,0 @@ -Demo script now uses python3. diff --git a/changelog.d/5461.feature b/changelog.d/5461.feature deleted file mode 100644 index 9497f521c..000000000 --- a/changelog.d/5461.feature +++ /dev/null @@ -1 +0,0 @@ -Statistics on forward extremities per room are now exposed via Prometheus. diff --git a/changelog.d/5464.bugfix b/changelog.d/5464.bugfix deleted file mode 100644 index 8278d1bce..000000000 --- a/changelog.d/5464.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix missing invite state after exchanging 3PID invites over federaton. diff --git a/changelog.d/5465.misc b/changelog.d/5465.misc deleted file mode 100644 index af5f0f8f4..000000000 --- a/changelog.d/5465.misc +++ /dev/null @@ -1,2 +0,0 @@ -Track deactivated accounts in the database. - diff --git a/changelog.d/5474.feature b/changelog.d/5474.feature deleted file mode 100644 index 63d9b5873..000000000 --- a/changelog.d/5474.feature +++ /dev/null @@ -1 +0,0 @@ -Allow server admins to define implementations of extra rules for allowing or denying incoming events. diff --git a/changelog.d/5475.misc b/changelog.d/5475.misc deleted file mode 100644 index 6be06d4d0..000000000 --- a/changelog.d/5475.misc +++ /dev/null @@ -1 +0,0 @@ -Synapse can now handle RestServlets that return coroutines. diff --git a/changelog.d/5476.misc b/changelog.d/5476.misc deleted file mode 100644 index 7955c1474..000000000 --- a/changelog.d/5476.misc +++ /dev/null @@ -1 +0,0 @@ -Add new metrics for number of forward extremities being persisted and number of state groups involved in resolution. diff --git a/changelog.d/5477.feature b/changelog.d/5477.feature deleted file mode 100644 index 63d9b5873..000000000 --- a/changelog.d/5477.feature +++ /dev/null @@ -1 +0,0 @@ -Allow server admins to define implementations of extra rules for allowing or denying incoming events. diff --git a/changelog.d/5478.misc b/changelog.d/5478.misc deleted file mode 100644 index 829bb1e52..000000000 --- a/changelog.d/5478.misc +++ /dev/null @@ -1 +0,0 @@ -The demo servers talk to each other again. diff --git a/changelog.d/5480.misc b/changelog.d/5480.misc deleted file mode 100644 index 3001bcc1f..000000000 --- a/changelog.d/5480.misc +++ /dev/null @@ -1 +0,0 @@ -Add an EXPERIMENTAL config option to try and periodically clean up extremities by sending dummy events. diff --git a/changelog.d/5482.misc b/changelog.d/5482.misc deleted file mode 100644 index 0332d1133..000000000 --- a/changelog.d/5482.misc +++ /dev/null @@ -1 +0,0 @@ -Synapse's codebase is now formatted by `black`. diff --git a/changelog.d/5490.bugfix b/changelog.d/5490.bugfix deleted file mode 100644 index 4242254c5..000000000 --- a/changelog.d/5490.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix failure to start under docker with SAML support enabled. \ No newline at end of file diff --git a/changelog.d/5493.misc b/changelog.d/5493.misc deleted file mode 100644 index 365e49d63..000000000 --- a/changelog.d/5493.misc +++ /dev/null @@ -1 +0,0 @@ -Track deactivated accounts in the database. diff --git a/changelog.d/5498.bugfix b/changelog.d/5498.bugfix deleted file mode 100644 index 6ef298d20..000000000 --- a/changelog.d/5498.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix intermittent exceptions on Apple hardware. Also fix bug that caused database activity times to be under-reported in log lines. diff --git a/changelog.d/5499.misc b/changelog.d/5499.misc deleted file mode 100644 index 84de1f2da..000000000 --- a/changelog.d/5499.misc +++ /dev/null @@ -1 +0,0 @@ -Some cleanups and sanity-checking in the CPU and database metrics. \ No newline at end of file diff --git a/changelog.d/5500.bugfix b/changelog.d/5500.bugfix deleted file mode 100644 index 624c67843..000000000 --- a/changelog.d/5500.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix logging error when a tampered event is detected. diff --git a/changelog.d/5502.misc b/changelog.d/5502.misc deleted file mode 100644 index d515323ea..000000000 --- a/changelog.d/5502.misc +++ /dev/null @@ -1 +0,0 @@ -Improve email notification logging. diff --git a/changelog.d/5505.feature b/changelog.d/5505.feature deleted file mode 100644 index 5c6bab2c3..000000000 --- a/changelog.d/5505.feature +++ /dev/null @@ -1 +0,0 @@ -Add support for handling pagination APIs on client reader worker. diff --git a/changelog.d/5507.bugfix b/changelog.d/5507.bugfix deleted file mode 100644 index 70452aa14..000000000 --- a/changelog.d/5507.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix bug where clients could tight loop calling `/sync` for a period. diff --git a/changelog.d/5509.misc b/changelog.d/5509.misc deleted file mode 100644 index cc27cf294..000000000 --- a/changelog.d/5509.misc +++ /dev/null @@ -1 +0,0 @@ -Fix "Unexpected entry in 'full_schemas'" log warning. diff --git a/changelog.d/5510.misc b/changelog.d/5510.misc deleted file mode 100644 index 4591a63d9..000000000 --- a/changelog.d/5510.misc +++ /dev/null @@ -1 +0,0 @@ -Improve logging when generating config files. diff --git a/changelog.d/5511.misc b/changelog.d/5511.misc deleted file mode 100644 index c1f679b28..000000000 --- a/changelog.d/5511.misc +++ /dev/null @@ -1 +0,0 @@ -Refactor and clean up Config parser for maintainability. diff --git a/changelog.d/5512.feature b/changelog.d/5512.feature deleted file mode 100644 index 712878901..000000000 --- a/changelog.d/5512.feature +++ /dev/null @@ -1 +0,0 @@ -Improve help and cmdline option names for --generate-config options. diff --git a/changelog.d/5513.feature b/changelog.d/5513.feature deleted file mode 100644 index 5c6bab2c3..000000000 --- a/changelog.d/5513.feature +++ /dev/null @@ -1 +0,0 @@ -Add support for handling pagination APIs on client reader worker. diff --git a/changelog.d/5514.bugfix b/changelog.d/5514.bugfix deleted file mode 100644 index c3a76a854..000000000 --- a/changelog.d/5514.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix bug with `jinja2` preventing Synapse from starting. Users who had this problem should now simply need to run `pip install matrix-synapse`. diff --git a/changelog.d/5516.feature b/changelog.d/5516.feature deleted file mode 100644 index fdf91c35e..000000000 --- a/changelog.d/5516.feature +++ /dev/null @@ -1 +0,0 @@ -Allow configuration of the path used for ACME account keys. diff --git a/changelog.d/5521.feature b/changelog.d/5521.feature deleted file mode 100644 index fdf91c35e..000000000 --- a/changelog.d/5521.feature +++ /dev/null @@ -1 +0,0 @@ -Allow configuration of the path used for ACME account keys. diff --git a/changelog.d/5522.feature b/changelog.d/5522.feature deleted file mode 100644 index fdf91c35e..000000000 --- a/changelog.d/5522.feature +++ /dev/null @@ -1 +0,0 @@ -Allow configuration of the path used for ACME account keys. diff --git a/changelog.d/5523.bugfix b/changelog.d/5523.bugfix deleted file mode 100644 index 26acd367a..000000000 --- a/changelog.d/5523.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a regression where homeservers on private IP addresses were incorrectly blacklisted. \ No newline at end of file diff --git a/changelog.d/5524.feature b/changelog.d/5524.feature deleted file mode 100644 index 6ba211c3c..000000000 --- a/changelog.d/5524.feature +++ /dev/null @@ -1 +0,0 @@ -Add --data-dir and --open-private-ports options. \ No newline at end of file diff --git a/changelog.d/5525.removal b/changelog.d/5525.removal deleted file mode 100644 index af71560f3..000000000 --- a/changelog.d/5525.removal +++ /dev/null @@ -1 +0,0 @@ -Remove support for cpu_affinity setting. diff --git a/changelog.d/5531.feature b/changelog.d/5531.feature deleted file mode 100644 index 5c6bab2c3..000000000 --- a/changelog.d/5531.feature +++ /dev/null @@ -1 +0,0 @@ -Add support for handling pagination APIs on client reader worker. diff --git a/changelog.d/5534.feature b/changelog.d/5534.feature deleted file mode 100644 index 2e279c9b7..000000000 --- a/changelog.d/5534.feature +++ /dev/null @@ -1 +0,0 @@ -Split public rooms directory auth config in two settings, in order to manage client auth independently from the federation part of it. Obsoletes the "restrict_public_rooms_to_local_users" configuration setting. If "restrict_public_rooms_to_local_users" is set in the config, Synapse will act as if both new options are enabled, i.e. require authentication through the client API and deny federation requests. diff --git a/changelog.d/5537.misc b/changelog.d/5537.misc deleted file mode 100644 index 870a5ff18..000000000 --- a/changelog.d/5537.misc +++ /dev/null @@ -1 +0,0 @@ -Add information about how to install and run `black` on the codebase to code_style.rst. diff --git a/changelog.d/5543.misc b/changelog.d/5543.misc deleted file mode 100644 index 793620a73..000000000 --- a/changelog.d/5543.misc +++ /dev/null @@ -1 +0,0 @@ -Make the config clearer in that email.template_dir is relative to the Synapse's root directory, not the `synapse/` folder within it. diff --git a/changelog.d/5545.misc b/changelog.d/5545.misc deleted file mode 100644 index b738eef4b..000000000 --- a/changelog.d/5545.misc +++ /dev/null @@ -1 +0,0 @@ -Update v1.0.0 release changelog to include more information about changes to password resets. diff --git a/changelog.d/5546.feature b/changelog.d/5546.feature deleted file mode 100644 index 16952b62f..000000000 --- a/changelog.d/5546.feature +++ /dev/null @@ -1 +0,0 @@ -Update docker image to use Python 3.7. diff --git a/changelog.d/5547.feature b/changelog.d/5547.feature deleted file mode 100644 index 509e36c7e..000000000 --- a/changelog.d/5547.feature +++ /dev/null @@ -1 +0,0 @@ -Increase default log level for docker image to INFO. It can still be changed by editing the generated log.config file. diff --git a/changelog.d/5548.misc b/changelog.d/5548.misc deleted file mode 100644 index f35939cfe..000000000 --- a/changelog.d/5548.misc +++ /dev/null @@ -1 +0,0 @@ -Remove non-functioning check_event_hash.py dev script. diff --git a/changelog.d/5550.feature b/changelog.d/5550.feature deleted file mode 100644 index 79ecedf3b..000000000 --- a/changelog.d/5550.feature +++ /dev/null @@ -1 +0,0 @@ -The minimum TLS version used for outgoing federation requests can now be set with `federation_client_minimum_tls_version`. diff --git a/changelog.d/5550.misc b/changelog.d/5550.misc deleted file mode 100644 index ad5693338..000000000 --- a/changelog.d/5550.misc +++ /dev/null @@ -1 +0,0 @@ -Synapse will now only allow TLS v1.2 connections when serving federation, if it terminates TLS. As Synapse's allowed ciphers were only able to be used in TLSv1.2 before, this does not change behaviour. diff --git a/changelog.d/5555.bugfix b/changelog.d/5555.bugfix deleted file mode 100644 index c0b1ecf81..000000000 --- a/changelog.d/5555.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fixed m.login.jwt using unregistred user_id and added pyjwt>=1.6.4 as jwt conditional dependencies. Contributed by Pau Rodriguez-Estivill. diff --git a/changelog.d/5557.misc b/changelog.d/5557.misc deleted file mode 100644 index 0c90f4987..000000000 --- a/changelog.d/5557.misc +++ /dev/null @@ -1 +0,0 @@ -Logging when running GC collection on generation 0 is now at the DEBUG level, not INFO. diff --git a/changelog.d/5558.misc b/changelog.d/5558.misc deleted file mode 100644 index 9ce3555d4..000000000 --- a/changelog.d/5558.misc +++ /dev/null @@ -1 +0,0 @@ -Improve install docs on choosing server_name. diff --git a/changelog.d/5559.feature b/changelog.d/5559.feature deleted file mode 100644 index b77b38345..000000000 --- a/changelog.d/5559.feature +++ /dev/null @@ -1 +0,0 @@ -Optimise devices changed query to not pull unnecessary rows from the database, reducing database load. diff --git a/changelog.d/5561.feature b/changelog.d/5561.feature deleted file mode 100644 index 85380bc51..000000000 --- a/changelog.d/5561.feature +++ /dev/null @@ -1 +0,0 @@ -Update Docker image to deprecate the use of environment variables for configuration, and make the use of a static configuration the default. diff --git a/changelog.d/5562.feature b/changelog.d/5562.feature deleted file mode 100644 index 85380bc51..000000000 --- a/changelog.d/5562.feature +++ /dev/null @@ -1 +0,0 @@ -Update Docker image to deprecate the use of environment variables for configuration, and make the use of a static configuration the default. diff --git a/changelog.d/5563.bugfix b/changelog.d/5563.bugfix deleted file mode 100644 index 09c4381a2..000000000 --- a/changelog.d/5563.bugfix +++ /dev/null @@ -1 +0,0 @@ -Docker: Use a sensible location for data files when generating a config file. \ No newline at end of file diff --git a/changelog.d/5564.misc b/changelog.d/5564.misc deleted file mode 100644 index e209cdcc2..000000000 --- a/changelog.d/5564.misc +++ /dev/null @@ -1 +0,0 @@ -Reduce the amount of stuff we send in the docker context. diff --git a/changelog.d/5565.feature b/changelog.d/5565.feature deleted file mode 100644 index 4b0665af0..000000000 --- a/changelog.d/5565.feature +++ /dev/null @@ -1 +0,0 @@ -Docker: Send synapse logs to the docker logging system, by default. diff --git a/changelog.d/5566.feature b/changelog.d/5566.feature deleted file mode 100644 index 85380bc51..000000000 --- a/changelog.d/5566.feature +++ /dev/null @@ -1 +0,0 @@ -Update Docker image to deprecate the use of environment variables for configuration, and make the use of a static configuration the default. diff --git a/changelog.d/5567.feature b/changelog.d/5567.feature deleted file mode 100644 index 85380bc51..000000000 --- a/changelog.d/5567.feature +++ /dev/null @@ -1 +0,0 @@ -Update Docker image to deprecate the use of environment variables for configuration, and make the use of a static configuration the default. diff --git a/changelog.d/5568.feature b/changelog.d/5568.feature deleted file mode 100644 index 59b9e5f96..000000000 --- a/changelog.d/5568.feature +++ /dev/null @@ -1 +0,0 @@ -Docker image: open the non-TLS port by default. diff --git a/changelog.d/5570.misc b/changelog.d/5570.misc deleted file mode 100644 index dfb1d7e58..000000000 --- a/changelog.d/5570.misc +++ /dev/null @@ -1 +0,0 @@ -Point the reverse links in the Purge History contrib scripts at the intended location. diff --git a/changelog.d/5576.bugfix b/changelog.d/5576.bugfix deleted file mode 100644 index c1ba5581f..000000000 --- a/changelog.d/5576.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a bug that would cause invited users to receive several emails for a single 3PID invite in case the inviter is rate limited. diff --git a/changelog.d/5585.misc b/changelog.d/5585.misc deleted file mode 100644 index 6be06d4d0..000000000 --- a/changelog.d/5585.misc +++ /dev/null @@ -1 +0,0 @@ -Synapse can now handle RestServlets that return coroutines. diff --git a/changelog.d/5586.bugfix b/changelog.d/5586.bugfix deleted file mode 100644 index c0b1ecf81..000000000 --- a/changelog.d/5586.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fixed m.login.jwt using unregistred user_id and added pyjwt>=1.6.4 as jwt conditional dependencies. Contributed by Pau Rodriguez-Estivill. diff --git a/changelog.d/5587.feature b/changelog.d/5587.feature deleted file mode 100644 index ec1503860..000000000 --- a/changelog.d/5587.feature +++ /dev/null @@ -1 +0,0 @@ -Add --no-daemonize option to run synapse in the foreground, per issue #4130. Contributed by Soham Gumaste. \ No newline at end of file diff --git a/synapse/__init__.py b/synapse/__init__.py index 119359be6..a7d2008e8 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -35,4 +35,4 @@ try: except ImportError: pass -__version__ = "1.0.0" +__version__ = "1.1.0rc1" From f8b52eb8c5fc4c723fad49df7a5339c1239b34be Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 2 Jul 2019 12:03:39 +0100 Subject: [PATCH 40/61] tweak changelog --- CHANGES.md | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 381553302..329e59d39 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -10,7 +10,6 @@ Features - Added possibilty to disable local password authentication. Contributed by Daniel Hoffend. ([\#5092](https://github.com/matrix-org/synapse/issues/5092)) - Add monthly active users to phonehome stats. ([\#5252](https://github.com/matrix-org/synapse/issues/5252)) - Allow expired user to trigger renewal email sending manually. ([\#5363](https://github.com/matrix-org/synapse/issues/5363)) -- Add ability to change Docker containers [timezone](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) with the `TZ` variable. ([\#5383](https://github.com/matrix-org/synapse/issues/5383)) - Statistics on forward extremities per room are now exposed via Prometheus. ([\#5384](https://github.com/matrix-org/synapse/issues/5384), [\#5458](https://github.com/matrix-org/synapse/issues/5458), [\#5461](https://github.com/matrix-org/synapse/issues/5461)) - Add --no-daemonize option to run synapse in the foreground, per issue #4130. Contributed by Soham Gumaste. ([\#5412](https://github.com/matrix-org/synapse/issues/5412), [\#5587](https://github.com/matrix-org/synapse/issues/5587)) - Fully support SAML2 authentication. Contributed by [Alexander Trost](https://github.com/galexrt) - thank you! ([\#5422](https://github.com/matrix-org/synapse/issues/5422)) @@ -20,21 +19,15 @@ Features - Allow configuration of the path used for ACME account keys. ([\#5516](https://github.com/matrix-org/synapse/issues/5516), [\#5521](https://github.com/matrix-org/synapse/issues/5521), [\#5522](https://github.com/matrix-org/synapse/issues/5522)) - Add --data-dir and --open-private-ports options. ([\#5524](https://github.com/matrix-org/synapse/issues/5524)) - Split public rooms directory auth config in two settings, in order to manage client auth independently from the federation part of it. Obsoletes the "restrict_public_rooms_to_local_users" configuration setting. If "restrict_public_rooms_to_local_users" is set in the config, Synapse will act as if both new options are enabled, i.e. require authentication through the client API and deny federation requests. ([\#5534](https://github.com/matrix-org/synapse/issues/5534)) -- Update docker image to use Python 3.7. ([\#5546](https://github.com/matrix-org/synapse/issues/5546)) -- Increase default log level for docker image to INFO. It can still be changed by editing the generated log.config file. ([\#5547](https://github.com/matrix-org/synapse/issues/5547)) - The minimum TLS version used for outgoing federation requests can now be set with `federation_client_minimum_tls_version`. ([\#5550](https://github.com/matrix-org/synapse/issues/5550)) - Optimise devices changed query to not pull unnecessary rows from the database, reducing database load. ([\#5559](https://github.com/matrix-org/synapse/issues/5559)) -- Update Docker image to deprecate the use of environment variables for configuration, and make the use of a static configuration the default. ([\#5561](https://github.com/matrix-org/synapse/issues/5561), [\#5562](https://github.com/matrix-org/synapse/issues/5562), [\#5566](https://github.com/matrix-org/synapse/issues/5566), [\#5567](https://github.com/matrix-org/synapse/issues/5567)) -- Docker: Send synapse logs to the docker logging system, by default. ([\#5565](https://github.com/matrix-org/synapse/issues/5565)) -- Docker image: open the non-TLS port by default. ([\#5568](https://github.com/matrix-org/synapse/issues/5568)) - Add new metrics for number of forward extremities being persisted and number of state groups involved in resolution. ([\#5476](https://github.com/matrix-org/synapse/issues/5476)) - Bugfixes -------- - Fix bug processing incoming events over federation if call to `/get_missing_events` fails. ([\#5042](https://github.com/matrix-org/synapse/issues/5042)) -- Prevent >1 room upgrades happening simultaneously on the same room. ([\#5051](https://github.com/matrix-org/synapse/issues/5051)) +- Prevent more than one room upgrade happening simultaneously on the same room. ([\#5051](https://github.com/matrix-org/synapse/issues/5051)) - Fix a bug where running synapse_port_db would cause the account validity feature to fail because it didn't set the type of the email_sent column to boolean. ([\#5325](https://github.com/matrix-org/synapse/issues/5325)) - Warn about disabling email-based password resets when a reset occurs, and remove warning when someone attempts a phone-based reset. ([\#5387](https://github.com/matrix-org/synapse/issues/5387)) - Fix email notifications for unnamed rooms with multiple people. ([\#5388](https://github.com/matrix-org/synapse/issues/5388)) @@ -42,17 +35,27 @@ Bugfixes - Fix handling of failures fetching remote content to not log failures as exceptions. ([\#5390](https://github.com/matrix-org/synapse/issues/5390)) - Fix a bug where deactivated users could receive renewal emails if the account validity feature is on. ([\#5394](https://github.com/matrix-org/synapse/issues/5394)) - Fix missing invite state after exchanging 3PID invites over federaton. ([\#5464](https://github.com/matrix-org/synapse/issues/5464)) -- Fix failure to start under docker with SAML support enabled. ([\#5490](https://github.com/matrix-org/synapse/issues/5490)) - Fix intermittent exceptions on Apple hardware. Also fix bug that caused database activity times to be under-reported in log lines. ([\#5498](https://github.com/matrix-org/synapse/issues/5498)) - Fix logging error when a tampered event is detected. ([\#5500](https://github.com/matrix-org/synapse/issues/5500)) - Fix bug where clients could tight loop calling `/sync` for a period. ([\#5507](https://github.com/matrix-org/synapse/issues/5507)) - Fix bug with `jinja2` preventing Synapse from starting. Users who had this problem should now simply need to run `pip install matrix-synapse`. ([\#5514](https://github.com/matrix-org/synapse/issues/5514)) - Fix a regression where homeservers on private IP addresses were incorrectly blacklisted. ([\#5523](https://github.com/matrix-org/synapse/issues/5523)) - Fixed m.login.jwt using unregistred user_id and added pyjwt>=1.6.4 as jwt conditional dependencies. Contributed by Pau Rodriguez-Estivill. ([\#5555](https://github.com/matrix-org/synapse/issues/5555), [\#5586](https://github.com/matrix-org/synapse/issues/5586)) -- Docker: Use a sensible location for data files when generating a config file. ([\#5563](https://github.com/matrix-org/synapse/issues/5563)) - Fix a bug that would cause invited users to receive several emails for a single 3PID invite in case the inviter is rate limited. ([\#5576](https://github.com/matrix-org/synapse/issues/5576)) +Updates to the Docker image +--------------------------- +- Add ability to change Docker containers [timezone](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) with the `TZ` variable. ([\#5383](https://github.com/matrix-org/synapse/issues/5383)) +- Update docker image to use Python 3.7. ([\#5546](https://github.com/matrix-org/synapse/issues/5546)) +- Deprecate the use of environment variables for configuration, and make the use of a static configuration the default. ([\#5561](https://github.com/matrix-org/synapse/issues/5561), [\#5562](https://github.com/matrix-org/synapse/issues/5562), [\#5566](https://github.com/matrix-org/synapse/issues/5566), [\#5567](https://github.com/matrix-org/synapse/issues/5567)) +- Increase default log level for docker image to INFO. It can still be changed by editing the generated log.config file. ([\#5547](https://github.com/matrix-org/synapse/issues/5547)) +- Send synapse logs to the docker logging system, by default. ([\#5565](https://github.com/matrix-org/synapse/issues/5565)) +- Open the non-TLS port by default. ([\#5568](https://github.com/matrix-org/synapse/issues/5568)) +- Fix failure to start under docker with SAML support enabled. ([\#5490](https://github.com/matrix-org/synapse/issues/5490)) +- Use a sensible location for data files when generating a config file. ([\#5563](https://github.com/matrix-org/synapse/issues/5563)) + + Deprecations and Removals ------------------------- From 0ee9076ffe40140db5e86763af077efbdb5d86b0 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Wed, 3 Jul 2019 04:01:28 +1000 Subject: [PATCH 41/61] Fix media repo breaking (#5593) --- changelog.d/5593.bugfix | 1 + synapse/http/server.py | 26 +++++++++------ synapse/rest/media/v1/preview_url_resource.py | 1 + synapse/util/logcontext.py | 9 +++-- tests/rest/media/v1/test_url_preview.py | 12 +++++++ tests/util/test_logcontext.py | 33 ++++++++++++------- 6 files changed, 58 insertions(+), 24 deletions(-) create mode 100644 changelog.d/5593.bugfix diff --git a/changelog.d/5593.bugfix b/changelog.d/5593.bugfix new file mode 100644 index 000000000..e981589ac --- /dev/null +++ b/changelog.d/5593.bugfix @@ -0,0 +1 @@ +Fix regression in 1.1rc1 where OPTIONS requests to the media repo would fail. diff --git a/synapse/http/server.py b/synapse/http/server.py index f067c163c..d993161a3 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -65,8 +65,8 @@ def wrap_json_request_handler(h): The handler method must have a signature of "handle_foo(self, request)", where "request" must be a SynapseRequest. - The handler must return a deferred. If the deferred succeeds we assume that - a response has been sent. If the deferred fails with a SynapseError we use + The handler must return a deferred or a coroutine. If the deferred succeeds + we assume that a response has been sent. If the deferred fails with a SynapseError we use it to send a JSON response with the appropriate HTTP reponse code. If the deferred fails with any other type of error we send a 500 reponse. """ @@ -353,16 +353,22 @@ class DirectServeResource(resource.Resource): """ Render the request, using an asynchronous render handler if it exists. """ - render_callback_name = "_async_render_" + request.method.decode("ascii") + async_render_callback_name = "_async_render_" + request.method.decode("ascii") - if hasattr(self, render_callback_name): - # Call the handler - callback = getattr(self, render_callback_name) - defer.ensureDeferred(callback(request)) + # Try and get the async renderer + callback = getattr(self, async_render_callback_name, None) - return NOT_DONE_YET - else: - super().render(request) + # No async renderer for this request method. + if not callback: + return super().render(request) + + resp = callback(request) + + # If it's a coroutine, turn it into a Deferred + if isinstance(resp, types.CoroutineType): + defer.ensureDeferred(resp) + + return NOT_DONE_YET def _options_handler(request): diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py index 0337b64dc..053346fb8 100644 --- a/synapse/rest/media/v1/preview_url_resource.py +++ b/synapse/rest/media/v1/preview_url_resource.py @@ -95,6 +95,7 @@ class PreviewUrlResource(DirectServeResource): ) def render_OPTIONS(self, request): + request.setHeader(b"Allow", b"OPTIONS, GET") return respond_with_json(request, 200, {}, send_cors=True) @wrap_json_request_handler diff --git a/synapse/util/logcontext.py b/synapse/util/logcontext.py index 6b0d2deea..9e1b53780 100644 --- a/synapse/util/logcontext.py +++ b/synapse/util/logcontext.py @@ -24,6 +24,7 @@ See doc/log_contexts.rst for details on how this works. import logging import threading +import types from twisted.internet import defer, threads @@ -528,8 +529,9 @@ def run_in_background(f, *args, **kwargs): return from the function, and that the sentinel context is set once the deferred returned by the function completes. - Useful for wrapping functions that return a deferred which you don't yield - on (for instance because you want to pass it to deferred.gatherResults()). + Useful for wrapping functions that return a deferred or coroutine, which you don't + yield or await on (for instance because you want to pass it to + deferred.gatherResults()). Note that if you completely discard the result, you should make sure that `f` doesn't raise any deferred exceptions, otherwise a scary-looking @@ -544,6 +546,9 @@ def run_in_background(f, *args, **kwargs): # by synchronous exceptions, so let's turn them into Failures. return defer.fail() + if isinstance(res, types.CoroutineType): + res = defer.ensureDeferred(res) + if not isinstance(res, defer.Deferred): return res diff --git a/tests/rest/media/v1/test_url_preview.py b/tests/rest/media/v1/test_url_preview.py index 8fe596186..976652aee 100644 --- a/tests/rest/media/v1/test_url_preview.py +++ b/tests/rest/media/v1/test_url_preview.py @@ -460,3 +460,15 @@ class URLPreviewTests(unittest.HomeserverTestCase): "error": "DNS resolution failure during URL preview generation", }, ) + + def test_OPTIONS(self): + """ + OPTIONS returns the OPTIONS. + """ + request, channel = self.make_request( + "OPTIONS", "url_preview?url=http://example.com", shorthand=False + ) + request.render(self.preview_url) + self.pump() + self.assertEqual(channel.code, 200) + self.assertEqual(channel.json_body, {}) diff --git a/tests/util/test_logcontext.py b/tests/util/test_logcontext.py index 8adaee3c8..8d69fbf11 100644 --- a/tests/util/test_logcontext.py +++ b/tests/util/test_logcontext.py @@ -39,24 +39,17 @@ class LoggingContextTestCase(unittest.TestCase): callback_completed = [False] - def test(): + with LoggingContext() as context_one: context_one.request = "one" - d = function() + + # fire off function, but don't wait on it. + d2 = logcontext.run_in_background(function) def cb(res): - self._check_test_key("one") callback_completed[0] = True return res - d.addCallback(cb) - - return d - - with LoggingContext() as context_one: - context_one.request = "one" - - # fire off function, but don't wait on it. - logcontext.run_in_background(test) + d2.addCallback(cb) self._check_test_key("one") @@ -105,6 +98,22 @@ class LoggingContextTestCase(unittest.TestCase): return self._test_run_in_background(testfunc) + def test_run_in_background_with_coroutine(self): + async def testfunc(): + self._check_test_key("one") + d = Clock(reactor).sleep(0) + self.assertIs(LoggingContext.current_context(), LoggingContext.sentinel) + await d + self._check_test_key("one") + + return self._test_run_in_background(testfunc) + + def test_run_in_background_with_nonblocking_coroutine(self): + async def testfunc(): + self._check_test_key("one") + + return self._test_run_in_background(testfunc) + @defer.inlineCallbacks def test_make_deferred_yieldable(self): # a function which retuns an incomplete deferred, but doesn't follow From c7b48bd42deb4b8c320c6c1b4006e689a9eefe14 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Wed, 3 Jul 2019 07:14:48 +0100 Subject: [PATCH 42/61] Remove SMTP_* env var functionality from docker conf (#5596) Removes any `SMTP_*` docker container environment variables from having any effect on the default config. Fixes https://github.com/matrix-org/synapse/issues/5430 --- changelog.d/5596.bugfix | 1 + docker/conf/homeserver.yaml | 19 ------------------- 2 files changed, 1 insertion(+), 19 deletions(-) create mode 100644 changelog.d/5596.bugfix diff --git a/changelog.d/5596.bugfix b/changelog.d/5596.bugfix new file mode 100644 index 000000000..da7ca1ed8 --- /dev/null +++ b/changelog.d/5596.bugfix @@ -0,0 +1 @@ +Removed the `SYNAPSE_SMTP_*` docker container environment variables. Using these environment variables prevented the docker container from starting in Synapse v1.0, even though they didn't actually allow any functionality anyway. Users are advised to remove `SYNAPSE_SMTP_HOST`, `SYNAPSE_SMTP_PORT`, `SYNAPSE_SMTP_USER`, `SYNAPSE_SMTP_PASSWORD` and `SYNAPSE_SMTP_FROM` environment variables from their docker run commands. diff --git a/docker/conf/homeserver.yaml b/docker/conf/homeserver.yaml index b0267b1c6..c1110f0f5 100644 --- a/docker/conf/homeserver.yaml +++ b/docker/conf/homeserver.yaml @@ -207,22 +207,3 @@ perspectives: password_config: enabled: true - -{% if SYNAPSE_SMTP_HOST %} -email: - enable_notifs: false - smtp_host: "{{ SYNAPSE_SMTP_HOST }}" - smtp_port: {{ SYNAPSE_SMTP_PORT or "25" }} - smtp_user: "{{ SYNAPSE_SMTP_USER }}" - smtp_pass: "{{ SYNAPSE_SMTP_PASSWORD }}" - require_transport_security: False - notif_from: "{{ SYNAPSE_SMTP_FROM or "hostmaster@" + SYNAPSE_SERVER_NAME }}" - app_name: Matrix - # if template_dir is unset, uses the example templates that are part of - # the Synapse distribution. - #template_dir: res/templates - notif_template_html: notif_mail.html - notif_template_text: notif_mail.txt - notif_for_new_users: True - riot_base_url: "https://{{ SYNAPSE_SERVER_NAME }}" -{% endif %} From 91753cae5962a56ce0440b06891f1040ba7582e9 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Wed, 3 Jul 2019 09:31:27 +0100 Subject: [PATCH 43/61] Fix a number of "Starting txn from sentinel context" warnings (#5605) Fixes #5602, #5603 --- changelog.d/5605.bugfix | 1 + synapse/handlers/account_validity.py | 10 +++++++++- synapse/storage/events.py | 9 ++++++++- synapse/storage/registration.py | 13 ++++++++++--- synapse/util/__init__.py | 8 +++++++- 5 files changed, 35 insertions(+), 6 deletions(-) create mode 100644 changelog.d/5605.bugfix diff --git a/changelog.d/5605.bugfix b/changelog.d/5605.bugfix new file mode 100644 index 000000000..4995ba9f6 --- /dev/null +++ b/changelog.d/5605.bugfix @@ -0,0 +1 @@ +Fix a number of "Starting txn from sentinel context" warnings. diff --git a/synapse/handlers/account_validity.py b/synapse/handlers/account_validity.py index 0719da3ab..edb48054a 100644 --- a/synapse/handlers/account_validity.py +++ b/synapse/handlers/account_validity.py @@ -22,6 +22,7 @@ from email.mime.text import MIMEText from twisted.internet import defer from synapse.api.errors import StoreError +from synapse.metrics.background_process_metrics import run_as_background_process from synapse.types import UserID from synapse.util import stringutils from synapse.util.logcontext import make_deferred_yieldable @@ -67,7 +68,14 @@ class AccountValidityHandler(object): ) # Check the renewal emails to send and send them every 30min. - self.clock.looping_call(self.send_renewal_emails, 30 * 60 * 1000) + def send_emails(): + # run as a background process to make sure that the database transactions + # have a logcontext to report to + return run_as_background_process( + "send_renewals", self.send_renewal_emails + ) + + self.clock.looping_call(send_emails, 30 * 60 * 1000) @defer.inlineCallbacks def send_renewal_emails(self): diff --git a/synapse/storage/events.py b/synapse/storage/events.py index fefba39ea..86f848570 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -253,7 +253,14 @@ class EventsStore( ) # Read the extrems every 60 minutes - hs.get_clock().looping_call(self._read_forward_extremities, 60 * 60 * 1000) + def read_forward_extremities(): + # run as a background process to make sure that the database transactions + # have a logcontext to report to + return run_as_background_process( + "read_forward_extremities", self._read_forward_extremities + ) + + hs.get_clock().looping_call(read_forward_extremities, 60 * 60 * 1000) @defer.inlineCallbacks def _read_forward_extremities(self): diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index 983ce1329..13a3d5208 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -25,6 +25,7 @@ from twisted.internet import defer from synapse.api.constants import UserTypes from synapse.api.errors import Codes, StoreError, ThreepidValidationError +from synapse.metrics.background_process_metrics import run_as_background_process from synapse.storage import background_updates from synapse.storage._base import SQLBaseStore from synapse.types import UserID @@ -619,9 +620,15 @@ class RegistrationStore( ) # Create a background job for culling expired 3PID validity tokens - hs.get_clock().looping_call( - self.cull_expired_threepid_validation_tokens, THIRTY_MINUTES_IN_MS - ) + def start_cull(): + # run as a background process to make sure that the database transactions + # have a logcontext to report to + return run_as_background_process( + "cull_expired_threepid_validation_tokens", + self.cull_expired_threepid_validation_tokens, + ) + + hs.get_clock().looping_call(start_cull, THIRTY_MINUTES_IN_MS) @defer.inlineCallbacks def _backgroud_update_set_deactivated_flag(self, progress, batch_size): diff --git a/synapse/util/__init__.py b/synapse/util/__init__.py index dcc747cac..954e32fb2 100644 --- a/synapse/util/__init__.py +++ b/synapse/util/__init__.py @@ -62,7 +62,10 @@ class Clock(object): def looping_call(self, f, msec): """Call a function repeatedly. - Waits `msec` initially before calling `f` for the first time. + Waits `msec` initially before calling `f` for the first time. + + Note that the function will be called with no logcontext, so if it is anything + other than trivial, you probably want to wrap it in run_as_background_process. Args: f(function): The function to call repeatedly. @@ -77,6 +80,9 @@ class Clock(object): def call_later(self, delay, callback, *args, **kwargs): """Call something later + Note that the function will be called with no logcontext, so if it is anything + other than trivial, you probably want to wrap it in run_as_background_process. + Args: delay(float): How long to wait in seconds. callback(function): Function to call From 463d5a8fde4f029b4dcf7a68fe60130863b65167 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 3 Jul 2019 10:51:47 +0100 Subject: [PATCH 44/61] 1.1.0rc2 --- CHANGES.md | 17 +++++++++++++++++ changelog.d/5552.misc | 1 - changelog.d/5593.bugfix | 1 - changelog.d/5596.bugfix | 1 - changelog.d/5605.bugfix | 1 - synapse/__init__.py | 2 +- 6 files changed, 18 insertions(+), 5 deletions(-) delete mode 100644 changelog.d/5552.misc delete mode 100644 changelog.d/5593.bugfix delete mode 100644 changelog.d/5596.bugfix delete mode 100644 changelog.d/5605.bugfix diff --git a/CHANGES.md b/CHANGES.md index 329e59d39..e4710a506 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,20 @@ +Synapse 1.1.0rc2 (2019-07-03) +============================= + +Bugfixes +-------- + +- Fix regression in 1.1rc1 where OPTIONS requests to the media repo would fail. ([\#5593](https://github.com/matrix-org/synapse/issues/5593)) +- Removed the `SYNAPSE_SMTP_*` docker container environment variables. Using these environment variables prevented the docker container from starting in Synapse v1.0, even though they didn't actually allow any functionality anyway. Users are advised to remove `SYNAPSE_SMTP_HOST`, `SYNAPSE_SMTP_PORT`, `SYNAPSE_SMTP_USER`, `SYNAPSE_SMTP_PASSWORD` and `SYNAPSE_SMTP_FROM` environment variables from their docker run commands. ([\#5596](https://github.com/matrix-org/synapse/issues/5596)) +- Fix a number of "Starting txn from sentinel context" warnings. ([\#5605](https://github.com/matrix-org/synapse/issues/5605)) + + +Internal Changes +---------------- + +- Update github templates. ([\#5552](https://github.com/matrix-org/synapse/issues/5552)) + + Synapse 1.1.0rc1 (2019-07-02) ============================= diff --git a/changelog.d/5552.misc b/changelog.d/5552.misc deleted file mode 100644 index 77f480e88..000000000 --- a/changelog.d/5552.misc +++ /dev/null @@ -1 +0,0 @@ -Update github templates. diff --git a/changelog.d/5593.bugfix b/changelog.d/5593.bugfix deleted file mode 100644 index e981589ac..000000000 --- a/changelog.d/5593.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix regression in 1.1rc1 where OPTIONS requests to the media repo would fail. diff --git a/changelog.d/5596.bugfix b/changelog.d/5596.bugfix deleted file mode 100644 index da7ca1ed8..000000000 --- a/changelog.d/5596.bugfix +++ /dev/null @@ -1 +0,0 @@ -Removed the `SYNAPSE_SMTP_*` docker container environment variables. Using these environment variables prevented the docker container from starting in Synapse v1.0, even though they didn't actually allow any functionality anyway. Users are advised to remove `SYNAPSE_SMTP_HOST`, `SYNAPSE_SMTP_PORT`, `SYNAPSE_SMTP_USER`, `SYNAPSE_SMTP_PASSWORD` and `SYNAPSE_SMTP_FROM` environment variables from their docker run commands. diff --git a/changelog.d/5605.bugfix b/changelog.d/5605.bugfix deleted file mode 100644 index 4995ba9f6..000000000 --- a/changelog.d/5605.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a number of "Starting txn from sentinel context" warnings. diff --git a/synapse/__init__.py b/synapse/__init__.py index a7d2008e8..5fe863197 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -35,4 +35,4 @@ try: except ImportError: pass -__version__ = "1.1.0rc1" +__version__ = "1.1.0rc2" From cb8d568cf90bb0f5f07ee9e7e6796ad7cd83361f Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Wed, 3 Jul 2019 13:40:45 +0100 Subject: [PATCH 45/61] Fix 'utime went backwards' errors on daemonization. (#5609) * Fix 'utime went backwards' errors on daemonization. Fixes #5608 * remove spurious debug --- changelog.d/5609.bugfix | 1 + synapse/app/_base.py | 57 ++++++++++++++++++++------------------ synapse/util/logcontext.py | 17 +++++++++--- 3 files changed, 44 insertions(+), 31 deletions(-) create mode 100644 changelog.d/5609.bugfix diff --git a/changelog.d/5609.bugfix b/changelog.d/5609.bugfix new file mode 100644 index 000000000..534ee22a1 --- /dev/null +++ b/changelog.d/5609.bugfix @@ -0,0 +1 @@ +Fix 'utime went backwards' errors on daemonization. diff --git a/synapse/app/_base.py b/synapse/app/_base.py index d50a9840d..9b5c30f50 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -93,33 +93,36 @@ def start_reactor( install_dns_limiter(reactor) def run(): - # make sure that we run the reactor with the sentinel log context, - # otherwise other PreserveLoggingContext instances will get confused - # and complain when they see the logcontext arbitrarily swapping - # between the sentinel and `run` logcontexts. - with PreserveLoggingContext(): - logger.info("Running") - - change_resource_limit(soft_file_limit) - if gc_thresholds: - gc.set_threshold(*gc_thresholds) - reactor.run() - - if daemonize: - if print_pidfile: - print(pid_file) - - daemon = Daemonize( - app=appname, - pid=pid_file, - action=run, - auto_close_fds=False, - verbose=True, - logger=logger, - ) - daemon.start() - else: - run() + logger.info("Running") + change_resource_limit(soft_file_limit) + if gc_thresholds: + gc.set_threshold(*gc_thresholds) + reactor.run() + + # make sure that we run the reactor with the sentinel log context, + # otherwise other PreserveLoggingContext instances will get confused + # and complain when they see the logcontext arbitrarily swapping + # between the sentinel and `run` logcontexts. + # + # We also need to drop the logcontext before forking if we're daemonizing, + # otherwise the cputime metrics get confused about the per-thread resource usage + # appearing to go backwards. + with PreserveLoggingContext(): + if daemonize: + if print_pidfile: + print(pid_file) + + daemon = Daemonize( + app=appname, + pid=pid_file, + action=run, + auto_close_fds=False, + verbose=True, + logger=logger, + ) + daemon.start() + else: + run() def quit_with_error(error_string): diff --git a/synapse/util/logcontext.py b/synapse/util/logcontext.py index 9e1b53780..30dfa1d6b 100644 --- a/synapse/util/logcontext.py +++ b/synapse/util/logcontext.py @@ -52,6 +52,15 @@ except Exception: return None +# get an id for the current thread. +# +# threading.get_ident doesn't actually return an OS-level tid, and annoyingly, +# on Linux it actually returns the same value either side of a fork() call. However +# we only fork in one place, so it's not worth the hoop-jumping to get a real tid. +# +get_thread_id = threading.get_ident + + class ContextResourceUsage(object): """Object for tracking the resources used by a log context @@ -225,7 +234,7 @@ class LoggingContext(object): # became active. self.usage_start = None - self.main_thread = threading.current_thread() + self.main_thread = get_thread_id() self.request = None self.tag = "" self.alive = True @@ -318,7 +327,7 @@ class LoggingContext(object): record.request = self.request def start(self): - if threading.current_thread() is not self.main_thread: + if get_thread_id() != self.main_thread: logger.warning("Started logcontext %s on different thread", self) return @@ -328,7 +337,7 @@ class LoggingContext(object): self.usage_start = get_thread_resource_usage() def stop(self): - if threading.current_thread() is not self.main_thread: + if get_thread_id() != self.main_thread: logger.warning("Stopped logcontext %s on different thread", self) return @@ -355,7 +364,7 @@ class LoggingContext(object): # If we are on the correct thread and we're currently running then we # can include resource usage so far. - is_main_thread = threading.current_thread() is self.main_thread + is_main_thread = get_thread_id() == self.main_thread if self.alive and self.usage_start and is_main_thread: utime_delta, stime_delta = self._get_cputime() res.ru_utime += utime_delta From 463b072b1290a8cb75bc1d7b6688fa76bbfdb14f Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Thu, 4 Jul 2019 00:07:04 +1000 Subject: [PATCH 46/61] Move logging utilities out of the side drawer of util/ and into logging/ (#5606) --- changelog.d/5606.misc | 1 + contrib/example_log_config.yaml | 8 +-- contrib/experiments/test_messaging.py | 2 +- contrib/systemd/log_config.yaml | 2 +- debian/changelog | 3 ++ debian/log.yaml | 2 +- docker/conf/log.config | 2 +- docs/log_contexts.rst | 38 +++++++------- synapse/app/_base.py | 2 +- synapse/app/appservice.py | 2 +- synapse/app/client_reader.py | 2 +- synapse/app/event_creator.py | 2 +- synapse/app/federation_reader.py | 2 +- synapse/app/federation_sender.py | 2 +- synapse/app/frontend_proxy.py | 2 +- synapse/app/homeserver.py | 2 +- synapse/app/media_repository.py | 2 +- synapse/app/pusher.py | 2 +- synapse/app/synchrotron.py | 2 +- synapse/app/user_dir.py | 2 +- synapse/appservice/scheduler.py | 2 +- synapse/config/logger.py | 4 +- synapse/crypto/keyring.py | 15 +++--- synapse/events/snapshot.py | 2 +- synapse/federation/federation_base.py | 24 +++++---- synapse/federation/federation_client.py | 8 +-- synapse/federation/federation_server.py | 4 +- synapse/federation/persistence.py | 2 +- synapse/federation/sender/__init__.py | 12 +++-- synapse/federation/transport/client.py | 2 +- synapse/federation/transport/server.py | 2 +- synapse/groups/attestations.py | 2 +- synapse/handlers/account_validity.py | 2 +- synapse/handlers/appservice.py | 2 +- synapse/handlers/auth.py | 6 +-- synapse/handlers/e2e_keys.py | 2 +- synapse/handlers/events.py | 2 +- synapse/handlers/federation.py | 43 ++++++++-------- synapse/handlers/initial_sync.py | 2 +- synapse/handlers/message.py | 2 +- synapse/handlers/pagination.py | 2 +- synapse/handlers/presence.py | 4 +- synapse/handlers/sync.py | 2 +- synapse/handlers/typing.py | 2 +- synapse/http/client.py | 2 +- .../federation/matrix_federation_agent.py | 2 +- synapse/http/federation/srv_resolver.py | 2 +- synapse/http/matrixfederationclient.py | 2 +- synapse/http/request_metrics.py | 2 +- synapse/http/server.py | 2 +- synapse/http/site.py | 2 +- synapse/logging/__init__.py | 0 .../logcontext.py => logging/context.py} | 0 .../logformatter.py => logging/formatter.py} | 0 .../{util/logutils.py => logging/utils.py} | 0 synapse/metrics/background_process_metrics.py | 2 +- synapse/notifier.py | 4 +- synapse/push/mailer.py | 2 +- synapse/replication/tcp/protocol.py | 2 +- synapse/rest/client/transactions.py | 2 +- synapse/rest/media/v1/_base.py | 6 +-- synapse/rest/media/v1/media_repository.py | 12 ++--- synapse/rest/media/v1/media_storage.py | 5 +- synapse/rest/media/v1/preview_url_resource.py | 2 +- synapse/rest/media/v1/storage_provider.py | 5 +- synapse/state/__init__.py | 2 +- synapse/storage/_base.py | 2 +- synapse/storage/events.py | 4 +- synapse/storage/events_worker.py | 6 +-- synapse/storage/stream.py | 2 +- synapse/util/__init__.py | 12 +++-- synapse/util/async_helpers.py | 9 ++-- synapse/util/caches/descriptors.py | 11 +++-- synapse/util/caches/response_cache.py | 4 +- synapse/util/distributor.py | 2 +- synapse/util/file_consumer.py | 2 +- synapse/util/metrics.py | 2 +- synapse/util/ratelimitutils.py | 2 +- synapse/util/retryutils.py | 4 +- tests/appservice/test_scheduler.py | 2 +- tests/crypto/test_keyring.py | 13 +++-- .../test_matrix_federation_agent.py | 2 +- tests/http/federation/test_srv_resolver.py | 2 +- tests/http/test_fedclient.py | 2 +- tests/patch_inline_callbacks.py | 2 +- tests/push/test_http.py | 2 +- tests/rest/client/test_transactions.py | 2 +- tests/rest/media/v1/test_media_storage.py | 2 +- tests/test_federation.py | 2 +- tests/test_server.py | 2 +- tests/test_utils/logging_setup.py | 2 +- tests/unittest.py | 2 +- tests/util/caches/test_descriptors.py | 49 ++++++++----------- tests/util/test_async_utils.py | 7 ++- tests/util/test_linearizer.py | 9 ++-- tests/util/test_logcontext.py | 24 +++++---- tests/util/test_logformatter.py | 2 +- tests/utils.py | 2 +- 98 files changed, 249 insertions(+), 233 deletions(-) create mode 100644 changelog.d/5606.misc create mode 100644 synapse/logging/__init__.py rename synapse/{util/logcontext.py => logging/context.py} (100%) rename synapse/{util/logformatter.py => logging/formatter.py} (100%) rename synapse/{util/logutils.py => logging/utils.py} (100%) diff --git a/changelog.d/5606.misc b/changelog.d/5606.misc new file mode 100644 index 000000000..bb3c02816 --- /dev/null +++ b/changelog.d/5606.misc @@ -0,0 +1 @@ +Move logging code out of `synapse.util` and into `synapse.logging`. diff --git a/contrib/example_log_config.yaml b/contrib/example_log_config.yaml index 06592963d..3a76a7a33 100644 --- a/contrib/example_log_config.yaml +++ b/contrib/example_log_config.yaml @@ -1,7 +1,7 @@ -# Example log_config file for synapse. To enable, point `log_config` to it in +# Example log_config file for synapse. To enable, point `log_config` to it in # `homeserver.yaml`, and restart synapse. # -# This configuration will produce similar results to the defaults within +# This configuration will produce similar results to the defaults within # synapse, but can be edited to give more flexibility. version: 1 @@ -12,7 +12,7 @@ formatters: filters: context: - (): synapse.util.logcontext.LoggingContextFilter + (): synapse.logging.context.LoggingContextFilter request: "" handlers: @@ -35,7 +35,7 @@ handlers: root: level: INFO handlers: [console] # to use file handler instead, switch to [file] - + loggers: synapse: level: INFO diff --git a/contrib/experiments/test_messaging.py b/contrib/experiments/test_messaging.py index c7e55d8aa..5ef140ae4 100644 --- a/contrib/experiments/test_messaging.py +++ b/contrib/experiments/test_messaging.py @@ -36,7 +36,7 @@ from synapse.util import origin_from_ucid from synapse.app.homeserver import SynapseHomeServer -# from synapse.util.logutils import log_function +# from synapse.logging.utils import log_function from twisted.internet import reactor, defer from twisted.python import log diff --git a/contrib/systemd/log_config.yaml b/contrib/systemd/log_config.yaml index d85bdd120..22f67a50c 100644 --- a/contrib/systemd/log_config.yaml +++ b/contrib/systemd/log_config.yaml @@ -8,7 +8,7 @@ formatters: filters: context: - (): synapse.util.logcontext.LoggingContextFilter + (): synapse.logging.context.LoggingContextFilter request: "" handlers: diff --git a/debian/changelog b/debian/changelog index 91653e724..bf5a59b33 100644 --- a/debian/changelog +++ b/debian/changelog @@ -3,6 +3,9 @@ matrix-synapse-py3 (1.0.0+nmu1) UNRELEASED; urgency=medium [ Silke Hofstra ] * Include systemd-python to allow logging to the systemd journal. + [ Amber Brown ] + * Update logging config defaults to match API changes in Synapse. + -- Silke Hofstra Wed, 29 May 2019 09:45:29 +0200 matrix-synapse-py3 (1.0.0) stable; urgency=medium diff --git a/debian/log.yaml b/debian/log.yaml index 206b65f1a..95b655dd3 100644 --- a/debian/log.yaml +++ b/debian/log.yaml @@ -7,7 +7,7 @@ formatters: filters: context: - (): synapse.util.logcontext.LoggingContextFilter + (): synapse.logging.context.LoggingContextFilter request: "" handlers: diff --git a/docker/conf/log.config b/docker/conf/log.config index 895e45d20..ea5ccfd68 100644 --- a/docker/conf/log.config +++ b/docker/conf/log.config @@ -6,7 +6,7 @@ formatters: filters: context: - (): synapse.util.logcontext.LoggingContextFilter + (): synapse.logging.context.LoggingContextFilter request: "" handlers: diff --git a/docs/log_contexts.rst b/docs/log_contexts.rst index 27cde11cf..f5cd5de8a 100644 --- a/docs/log_contexts.rst +++ b/docs/log_contexts.rst @@ -1,4 +1,4 @@ -Log contexts +Log Contexts ============ .. contents:: @@ -12,7 +12,7 @@ record. Logcontexts are also used for CPU and database accounting, so that we can track which requests were responsible for high CPU use or database activity. -The ``synapse.util.logcontext`` module provides a facilities for managing the +The ``synapse.logging.context`` module provides a facilities for managing the current log context (as well as providing the ``LoggingContextFilter`` class). Deferreds make the whole thing complicated, so this document describes how it @@ -27,19 +27,19 @@ found them: .. code:: python - from synapse.util import logcontext # omitted from future snippets + from synapse.logging import context # omitted from future snippets def handle_request(request_id): - request_context = logcontext.LoggingContext() + request_context = context.LoggingContext() - calling_context = logcontext.LoggingContext.current_context() - logcontext.LoggingContext.set_current_context(request_context) + calling_context = context.LoggingContext.current_context() + context.LoggingContext.set_current_context(request_context) try: request_context.request = request_id do_request_handling() logger.debug("finished") finally: - logcontext.LoggingContext.set_current_context(calling_context) + context.LoggingContext.set_current_context(calling_context) def do_request_handling(): logger.debug("phew") # this will be logged against request_id @@ -51,7 +51,7 @@ written much more succinctly as: .. code:: python def handle_request(request_id): - with logcontext.LoggingContext() as request_context: + with context.LoggingContext() as request_context: request_context.request = request_id do_request_handling() logger.debug("finished") @@ -74,7 +74,7 @@ blocking operation, and returns a deferred: @defer.inlineCallbacks def handle_request(request_id): - with logcontext.LoggingContext() as request_context: + with context.LoggingContext() as request_context: request_context.request = request_id yield do_request_handling() logger.debug("finished") @@ -179,7 +179,7 @@ though, we need to make up a new Deferred, or we get a Deferred back from external code. We need to make it follow our rules. The easy way to do it is with a combination of ``defer.inlineCallbacks``, and -``logcontext.PreserveLoggingContext``. Suppose we want to implement ``sleep``, +``context.PreserveLoggingContext``. Suppose we want to implement ``sleep``, which returns a deferred which will run its callbacks after a given number of seconds. That might look like: @@ -204,13 +204,13 @@ That doesn't follow the rules, but we can fix it by wrapping it with This technique works equally for external functions which return deferreds, or deferreds we have made ourselves. -You can also use ``logcontext.make_deferred_yieldable``, which just does the +You can also use ``context.make_deferred_yieldable``, which just does the boilerplate for you, so the above could be written: .. code:: python def sleep(seconds): - return logcontext.make_deferred_yieldable(get_sleep_deferred(seconds)) + return context.make_deferred_yieldable(get_sleep_deferred(seconds)) Fire-and-forget @@ -279,7 +279,7 @@ Obviously that option means that the operations done in that might be fixed by setting a different logcontext via a ``with LoggingContext(...)`` in ``background_operation``). -The second option is to use ``logcontext.run_in_background``, which wraps a +The second option is to use ``context.run_in_background``, which wraps a function so that it doesn't reset the logcontext even when it returns an incomplete deferred, and adds a callback to the returned deferred to reset the logcontext. In other words, it turns a function that follows the Synapse rules @@ -293,7 +293,7 @@ It can be used like this: def do_request_handling(): yield foreground_operation() - logcontext.run_in_background(background_operation) + context.run_in_background(background_operation) # this will now be logged against the request context logger.debug("Request handling complete") @@ -332,7 +332,7 @@ gathered: result = yield defer.gatherResults([d1, d2]) In this case particularly, though, option two, of using -``logcontext.preserve_fn`` almost certainly makes more sense, so that +``context.preserve_fn`` almost certainly makes more sense, so that ``operation1`` and ``operation2`` are both logged against the original logcontext. This looks like: @@ -340,8 +340,8 @@ logcontext. This looks like: @defer.inlineCallbacks def do_request_handling(): - d1 = logcontext.preserve_fn(operation1)() - d2 = logcontext.preserve_fn(operation2)() + d1 = context.preserve_fn(operation1)() + d2 = context.preserve_fn(operation2)() with PreserveLoggingContext(): result = yield defer.gatherResults([d1, d2]) @@ -381,7 +381,7 @@ off the background process, and then leave the ``with`` block to wait for it: .. code:: python def handle_request(request_id): - with logcontext.LoggingContext() as request_context: + with context.LoggingContext() as request_context: request_context.request = request_id d = do_request_handling() @@ -414,7 +414,7 @@ runs its callbacks in the original logcontext, all is happy. The business of a Deferred which runs its callbacks in the original logcontext isn't hard to achieve — we have it today, in the shape of -``logcontext._PreservingContextDeferred``: +``context._PreservingContextDeferred``: .. code:: python diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 9b5c30f50..1ebb7ae53 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -27,7 +27,7 @@ from twisted.protocols.tls import TLSMemoryBIOFactory import synapse from synapse.app import check_bind_error from synapse.crypto import context_factory -from synapse.util import PreserveLoggingContext +from synapse.logging.context import PreserveLoggingContext from synapse.util.async_helpers import Linearizer from synapse.util.rlimit import change_resource_limit from synapse.util.versionstring import get_version_string diff --git a/synapse/app/appservice.py b/synapse/app/appservice.py index 9120bdb14..be44249ed 100644 --- a/synapse/app/appservice.py +++ b/synapse/app/appservice.py @@ -26,6 +26,7 @@ from synapse.config._base import ConfigError from synapse.config.homeserver import HomeServerConfig from synapse.config.logger import setup_logging from synapse.http.site import SynapseSite +from synapse.logging.context import LoggingContext, run_in_background from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore @@ -36,7 +37,6 @@ from synapse.replication.tcp.client import ReplicationClientHandler from synapse.server import HomeServer from synapse.storage.engines import create_engine from synapse.util.httpresourcetree import create_resource_tree -from synapse.util.logcontext import LoggingContext, run_in_background from synapse.util.manhole import manhole from synapse.util.versionstring import get_version_string diff --git a/synapse/app/client_reader.py b/synapse/app/client_reader.py index 90bc79cdd..ff11beca8 100644 --- a/synapse/app/client_reader.py +++ b/synapse/app/client_reader.py @@ -27,6 +27,7 @@ from synapse.config.homeserver import HomeServerConfig from synapse.config.logger import setup_logging from synapse.http.server import JsonResource from synapse.http.site import SynapseSite +from synapse.logging.context import LoggingContext from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.replication.slave.storage._base import BaseSlavedStore @@ -64,7 +65,6 @@ from synapse.rest.client.versions import VersionsRestServlet from synapse.server import HomeServer from synapse.storage.engines import create_engine from synapse.util.httpresourcetree import create_resource_tree -from synapse.util.logcontext import LoggingContext from synapse.util.manhole import manhole from synapse.util.versionstring import get_version_string diff --git a/synapse/app/event_creator.py b/synapse/app/event_creator.py index ff522e449..cacad25ea 100644 --- a/synapse/app/event_creator.py +++ b/synapse/app/event_creator.py @@ -27,6 +27,7 @@ from synapse.config.homeserver import HomeServerConfig from synapse.config.logger import setup_logging from synapse.http.server import JsonResource from synapse.http.site import SynapseSite +from synapse.logging.context import LoggingContext from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.replication.slave.storage._base import BaseSlavedStore @@ -59,7 +60,6 @@ from synapse.server import HomeServer from synapse.storage.engines import create_engine from synapse.storage.user_directory import UserDirectoryStore from synapse.util.httpresourcetree import create_resource_tree -from synapse.util.logcontext import LoggingContext from synapse.util.manhole import manhole from synapse.util.versionstring import get_version_string diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py index 942142093..11e80dbae 100644 --- a/synapse/app/federation_reader.py +++ b/synapse/app/federation_reader.py @@ -28,6 +28,7 @@ from synapse.config.homeserver import HomeServerConfig from synapse.config.logger import setup_logging from synapse.federation.transport.server import TransportLayerServer from synapse.http.site import SynapseSite +from synapse.logging.context import LoggingContext from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.replication.slave.storage._base import BaseSlavedStore @@ -48,7 +49,6 @@ from synapse.rest.key.v2 import KeyApiV2Resource from synapse.server import HomeServer from synapse.storage.engines import create_engine from synapse.util.httpresourcetree import create_resource_tree -from synapse.util.logcontext import LoggingContext from synapse.util.manhole import manhole from synapse.util.versionstring import get_version_string diff --git a/synapse/app/federation_sender.py b/synapse/app/federation_sender.py index 969be58d0..97da7bdcb 100644 --- a/synapse/app/federation_sender.py +++ b/synapse/app/federation_sender.py @@ -27,6 +27,7 @@ from synapse.config.homeserver import HomeServerConfig from synapse.config.logger import setup_logging from synapse.federation import send_queue from synapse.http.site import SynapseSite +from synapse.logging.context import LoggingContext, run_in_background from synapse.metrics import RegistryProxy from synapse.metrics.background_process_metrics import run_as_background_process from synapse.metrics.resource import METRICS_PREFIX, MetricsResource @@ -44,7 +45,6 @@ from synapse.storage.engines import create_engine from synapse.types import ReadReceipt from synapse.util.async_helpers import Linearizer from synapse.util.httpresourcetree import create_resource_tree -from synapse.util.logcontext import LoggingContext, run_in_background from synapse.util.manhole import manhole from synapse.util.versionstring import get_version_string diff --git a/synapse/app/frontend_proxy.py b/synapse/app/frontend_proxy.py index 2fd7d57eb..417a10bbd 100644 --- a/synapse/app/frontend_proxy.py +++ b/synapse/app/frontend_proxy.py @@ -29,6 +29,7 @@ from synapse.config.logger import setup_logging from synapse.http.server import JsonResource from synapse.http.servlet import RestServlet, parse_json_object_from_request from synapse.http.site import SynapseSite +from synapse.logging.context import LoggingContext from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.replication.slave.storage._base import BaseSlavedStore @@ -41,7 +42,6 @@ from synapse.rest.client.v2_alpha._base import client_patterns from synapse.server import HomeServer from synapse.storage.engines import create_engine from synapse.util.httpresourcetree import create_resource_tree -from synapse.util.logcontext import LoggingContext from synapse.util.manhole import manhole from synapse.util.versionstring import get_version_string diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 49da105cf..639b1429c 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -54,6 +54,7 @@ from synapse.federation.transport.server import TransportLayerServer from synapse.http.additional_resource import AdditionalResource from synapse.http.server import RootRedirect from synapse.http.site import SynapseSite +from synapse.logging.context import LoggingContext from synapse.metrics import RegistryProxy from synapse.metrics.background_process_metrics import run_as_background_process from synapse.metrics.resource import METRICS_PREFIX, MetricsResource @@ -72,7 +73,6 @@ from synapse.storage.engines import IncorrectDatabaseSetup, create_engine from synapse.storage.prepare_database import UpgradeDatabaseException, prepare_database from synapse.util.caches import CACHE_SIZE_FACTOR from synapse.util.httpresourcetree import create_resource_tree -from synapse.util.logcontext import LoggingContext from synapse.util.manhole import manhole from synapse.util.module_loader import load_module from synapse.util.rlimit import change_resource_limit diff --git a/synapse/app/media_repository.py b/synapse/app/media_repository.py index cf0e2036c..f23b9b6ed 100644 --- a/synapse/app/media_repository.py +++ b/synapse/app/media_repository.py @@ -27,6 +27,7 @@ from synapse.config._base import ConfigError from synapse.config.homeserver import HomeServerConfig from synapse.config.logger import setup_logging from synapse.http.site import SynapseSite +from synapse.logging.context import LoggingContext from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.replication.slave.storage._base import BaseSlavedStore @@ -40,7 +41,6 @@ from synapse.server import HomeServer from synapse.storage.engines import create_engine from synapse.storage.media_repository import MediaRepositoryStore from synapse.util.httpresourcetree import create_resource_tree -from synapse.util.logcontext import LoggingContext from synapse.util.manhole import manhole from synapse.util.versionstring import get_version_string diff --git a/synapse/app/pusher.py b/synapse/app/pusher.py index df29ea5ec..4f929edf8 100644 --- a/synapse/app/pusher.py +++ b/synapse/app/pusher.py @@ -26,6 +26,7 @@ from synapse.config._base import ConfigError from synapse.config.homeserver import HomeServerConfig from synapse.config.logger import setup_logging from synapse.http.site import SynapseSite +from synapse.logging.context import LoggingContext, run_in_background from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.replication.slave.storage._base import __func__ @@ -38,7 +39,6 @@ from synapse.server import HomeServer from synapse.storage import DataStore from synapse.storage.engines import create_engine from synapse.util.httpresourcetree import create_resource_tree -from synapse.util.logcontext import LoggingContext, run_in_background from synapse.util.manhole import manhole from synapse.util.versionstring import get_version_string diff --git a/synapse/app/synchrotron.py b/synapse/app/synchrotron.py index 858949910..de4797fdd 100644 --- a/synapse/app/synchrotron.py +++ b/synapse/app/synchrotron.py @@ -31,6 +31,7 @@ from synapse.config.logger import setup_logging from synapse.handlers.presence import PresenceHandler, get_interested_parties from synapse.http.server import JsonResource from synapse.http.site import SynapseSite +from synapse.logging.context import LoggingContext, run_in_background from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.replication.slave.storage._base import BaseSlavedStore, __func__ @@ -57,7 +58,6 @@ from synapse.server import HomeServer from synapse.storage.engines import create_engine from synapse.storage.presence import UserPresenceState from synapse.util.httpresourcetree import create_resource_tree -from synapse.util.logcontext import LoggingContext, run_in_background from synapse.util.manhole import manhole from synapse.util.stringutils import random_string from synapse.util.versionstring import get_version_string diff --git a/synapse/app/user_dir.py b/synapse/app/user_dir.py index 2d9d2e1bb..1177ddd72 100644 --- a/synapse/app/user_dir.py +++ b/synapse/app/user_dir.py @@ -28,6 +28,7 @@ from synapse.config.homeserver import HomeServerConfig from synapse.config.logger import setup_logging from synapse.http.server import JsonResource from synapse.http.site import SynapseSite +from synapse.logging.context import LoggingContext, run_in_background from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.replication.slave.storage._base import BaseSlavedStore @@ -46,7 +47,6 @@ from synapse.storage.engines import create_engine from synapse.storage.user_directory import UserDirectoryStore from synapse.util.caches.stream_change_cache import StreamChangeCache from synapse.util.httpresourcetree import create_resource_tree -from synapse.util.logcontext import LoggingContext, run_in_background from synapse.util.manhole import manhole from synapse.util.versionstring import get_version_string diff --git a/synapse/appservice/scheduler.py b/synapse/appservice/scheduler.py index b54bf5411..e5b36494f 100644 --- a/synapse/appservice/scheduler.py +++ b/synapse/appservice/scheduler.py @@ -53,8 +53,8 @@ import logging from twisted.internet import defer from synapse.appservice import ApplicationServiceState +from synapse.logging.context import run_in_background from synapse.metrics.background_process_metrics import run_as_background_process -from synapse.util.logcontext import run_in_background logger = logging.getLogger(__name__) diff --git a/synapse/config/logger.py b/synapse/config/logger.py index 931aec41c..0f5554211 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -24,7 +24,7 @@ from twisted.logger import STDLibLogObserver, globalLogBeginner import synapse from synapse.app import _base as appbase -from synapse.util.logcontext import LoggingContextFilter +from synapse.logging.context import LoggingContextFilter from synapse.util.versionstring import get_version_string from ._base import Config @@ -40,7 +40,7 @@ formatters: filters: context: - (): synapse.util.logcontext.LoggingContextFilter + (): synapse.logging.context.LoggingContextFilter request: "" handlers: diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index 10c2eb7f0..341c86315 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -44,15 +44,16 @@ from synapse.api.errors import ( RequestSendFailed, SynapseError, ) -from synapse.storage.keys import FetchKeyResult -from synapse.util import logcontext, unwrapFirstError -from synapse.util.async_helpers import yieldable_gather_results -from synapse.util.logcontext import ( +from synapse.logging.context import ( LoggingContext, PreserveLoggingContext, + make_deferred_yieldable, preserve_fn, run_in_background, ) +from synapse.storage.keys import FetchKeyResult +from synapse.util import unwrapFirstError +from synapse.util.async_helpers import yieldable_gather_results from synapse.util.metrics import Measure from synapse.util.retryutils import NotRetryingDestination @@ -140,7 +141,7 @@ class Keyring(object): """ req = VerifyJsonRequest(server_name, json_object, validity_time, request_name) requests = (req,) - return logcontext.make_deferred_yieldable(self._verify_objects(requests)[0]) + return make_deferred_yieldable(self._verify_objects(requests)[0]) def verify_json_objects_for_server(self, server_and_json): """Bulk verifies signatures of json objects, bulk fetching keys as @@ -557,7 +558,7 @@ class BaseV2KeyFetcher(object): signed_key_json_bytes = encode_canonical_json(signed_key_json) - yield logcontext.make_deferred_yieldable( + yield make_deferred_yieldable( defer.gatherResults( [ run_in_background( @@ -612,7 +613,7 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher): defer.returnValue({}) - results = yield logcontext.make_deferred_yieldable( + results = yield make_deferred_yieldable( defer.gatherResults( [run_in_background(get_key, server) for server in self.key_servers], consumeErrors=True, diff --git a/synapse/events/snapshot.py b/synapse/events/snapshot.py index a96cdada3..a9545e6c1 100644 --- a/synapse/events/snapshot.py +++ b/synapse/events/snapshot.py @@ -19,7 +19,7 @@ from frozendict import frozendict from twisted.internet import defer -from synapse.util.logcontext import make_deferred_yieldable, run_in_background +from synapse.logging.context import make_deferred_yieldable, run_in_background class EventContext(object): diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py index 1e925b19e..f7bb806ae 100644 --- a/synapse/federation/federation_base.py +++ b/synapse/federation/federation_base.py @@ -27,8 +27,14 @@ from synapse.crypto.event_signing import check_event_content_hash from synapse.events import event_type_from_format_version from synapse.events.utils import prune_event from synapse.http.servlet import assert_params_in_dict +from synapse.logging.context import ( + LoggingContext, + PreserveLoggingContext, + make_deferred_yieldable, + preserve_fn, +) from synapse.types import get_domain_from_id -from synapse.util import logcontext, unwrapFirstError +from synapse.util import unwrapFirstError logger = logging.getLogger(__name__) @@ -73,7 +79,7 @@ class FederationBase(object): @defer.inlineCallbacks def handle_check_result(pdu, deferred): try: - res = yield logcontext.make_deferred_yieldable(deferred) + res = yield make_deferred_yieldable(deferred) except SynapseError: res = None @@ -102,10 +108,10 @@ class FederationBase(object): defer.returnValue(res) - handle = logcontext.preserve_fn(handle_check_result) + handle = preserve_fn(handle_check_result) deferreds2 = [handle(pdu, deferred) for pdu, deferred in zip(pdus, deferreds)] - valid_pdus = yield logcontext.make_deferred_yieldable( + valid_pdus = yield make_deferred_yieldable( defer.gatherResults(deferreds2, consumeErrors=True) ).addErrback(unwrapFirstError) @@ -115,7 +121,7 @@ class FederationBase(object): defer.returnValue([p for p in valid_pdus if p]) def _check_sigs_and_hash(self, room_version, pdu): - return logcontext.make_deferred_yieldable( + return make_deferred_yieldable( self._check_sigs_and_hashes(room_version, [pdu])[0] ) @@ -133,14 +139,14 @@ class FederationBase(object): * returns a redacted version of the event (if the signature matched but the hash did not) * throws a SynapseError if the signature check failed. - The deferreds run their callbacks in the sentinel logcontext. + The deferreds run their callbacks in the sentinel """ deferreds = _check_sigs_on_pdus(self.keyring, room_version, pdus) - ctx = logcontext.LoggingContext.current_context() + ctx = LoggingContext.current_context() def callback(_, pdu): - with logcontext.PreserveLoggingContext(ctx): + with PreserveLoggingContext(ctx): if not check_event_content_hash(pdu): # let's try to distinguish between failures because the event was # redacted (which are somewhat expected) vs actual ball-tampering @@ -178,7 +184,7 @@ class FederationBase(object): def errback(failure, pdu): failure.trap(SynapseError) - with logcontext.PreserveLoggingContext(ctx): + with PreserveLoggingContext(ctx): logger.warn( "Signature check failed for %s: %s", pdu.event_id, diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 3883eb525..3cb4b9442 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -39,10 +39,10 @@ from synapse.api.room_versions import ( ) from synapse.events import builder, room_version_to_event_format from synapse.federation.federation_base import FederationBase, event_from_pdu_json -from synapse.util import logcontext, unwrapFirstError +from synapse.logging.context import make_deferred_yieldable, run_in_background +from synapse.logging.utils import log_function +from synapse.util import unwrapFirstError from synapse.util.caches.expiringcache import ExpiringCache -from synapse.util.logcontext import make_deferred_yieldable, run_in_background -from synapse.util.logutils import log_function from synapse.util.retryutils import NotRetryingDestination logger = logging.getLogger(__name__) @@ -207,7 +207,7 @@ class FederationClient(FederationBase): ] # FIXME: We should handle signature failures more gracefully. - pdus[:] = yield logcontext.make_deferred_yieldable( + pdus[:] = yield make_deferred_yieldable( defer.gatherResults( self._check_sigs_and_hashes(room_version, pdus), consumeErrors=True ).addErrback(unwrapFirstError) diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 2e0cebb63..8c0a18b12 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -42,6 +42,8 @@ from synapse.federation.federation_base import FederationBase, event_from_pdu_js from synapse.federation.persistence import TransactionActions from synapse.federation.units import Edu, Transaction from synapse.http.endpoint import parse_server_name +from synapse.logging.context import nested_logging_context +from synapse.logging.utils import log_function from synapse.replication.http.federation import ( ReplicationFederationSendEduRestServlet, ReplicationGetQueryRestServlet, @@ -50,8 +52,6 @@ from synapse.types import get_domain_from_id from synapse.util import glob_to_regex from synapse.util.async_helpers import Linearizer, concurrently_execute from synapse.util.caches.response_cache import ResponseCache -from synapse.util.logcontext import nested_logging_context -from synapse.util.logutils import log_function # when processing incoming transactions, we try to handle multiple rooms in # parallel, up to this limit. diff --git a/synapse/federation/persistence.py b/synapse/federation/persistence.py index 7535f7920..d086e0424 100644 --- a/synapse/federation/persistence.py +++ b/synapse/federation/persistence.py @@ -23,7 +23,7 @@ import logging from twisted.internet import defer -from synapse.util.logutils import log_function +from synapse.logging.utils import log_function logger = logging.getLogger(__name__) diff --git a/synapse/federation/sender/__init__.py b/synapse/federation/sender/__init__.py index 766c5a37c..d46f4aaeb 100644 --- a/synapse/federation/sender/__init__.py +++ b/synapse/federation/sender/__init__.py @@ -26,6 +26,11 @@ from synapse.federation.sender.per_destination_queue import PerDestinationQueue from synapse.federation.sender.transaction_manager import TransactionManager from synapse.federation.units import Edu from synapse.handlers.presence import get_interested_remotes +from synapse.logging.context import ( + make_deferred_yieldable, + preserve_fn, + run_in_background, +) from synapse.metrics import ( LaterGauge, event_processing_loop_counter, @@ -33,7 +38,6 @@ from synapse.metrics import ( events_processed_counter, ) from synapse.metrics.background_process_metrics import run_as_background_process -from synapse.util import logcontext from synapse.util.metrics import measure_func logger = logging.getLogger(__name__) @@ -210,10 +214,10 @@ class FederationSender(object): for event in events: events_by_room.setdefault(event.room_id, []).append(event) - yield logcontext.make_deferred_yieldable( + yield make_deferred_yieldable( defer.gatherResults( [ - logcontext.run_in_background(handle_room_events, evs) + run_in_background(handle_room_events, evs) for evs in itervalues(events_by_room) ], consumeErrors=True, @@ -360,7 +364,7 @@ class FederationSender(object): for queue in queues: queue.flush_read_receipts_for_room(room_id) - @logcontext.preserve_fn # the caller should not yield on this + @preserve_fn # the caller should not yield on this @defer.inlineCallbacks def send_presence(self, states): """Send the new presence states to the appropriate destinations. diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index aecd14230..1aae9ec9e 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -22,7 +22,7 @@ from twisted.internet import defer from synapse.api.constants import Membership from synapse.api.urls import FEDERATION_V1_PREFIX, FEDERATION_V2_PREFIX -from synapse.util.logutils import log_function +from synapse.logging.utils import log_function logger = logging.getLogger(__name__) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 955f0f430..2efdcff7e 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -36,8 +36,8 @@ from synapse.http.servlet import ( parse_json_object_from_request, parse_string_from_args, ) +from synapse.logging.context import run_in_background from synapse.types import ThirdPartyInstanceID, get_domain_from_id -from synapse.util.logcontext import run_in_background from synapse.util.ratelimitutils import FederationRateLimiter from synapse.util.versionstring import get_version_string diff --git a/synapse/groups/attestations.py b/synapse/groups/attestations.py index e73757570..f49771113 100644 --- a/synapse/groups/attestations.py +++ b/synapse/groups/attestations.py @@ -43,9 +43,9 @@ from signedjson.sign import sign_json from twisted.internet import defer from synapse.api.errors import HttpResponseException, RequestSendFailed, SynapseError +from synapse.logging.context import run_in_background from synapse.metrics.background_process_metrics import run_as_background_process from synapse.types import get_domain_from_id -from synapse.util.logcontext import run_in_background logger = logging.getLogger(__name__) diff --git a/synapse/handlers/account_validity.py b/synapse/handlers/account_validity.py index edb48054a..1f1708ba7 100644 --- a/synapse/handlers/account_validity.py +++ b/synapse/handlers/account_validity.py @@ -22,10 +22,10 @@ from email.mime.text import MIMEText from twisted.internet import defer from synapse.api.errors import StoreError +from synapse.logging.context import make_deferred_yieldable from synapse.metrics.background_process_metrics import run_as_background_process from synapse.types import UserID from synapse.util import stringutils -from synapse.util.logcontext import make_deferred_yieldable try: from synapse.push.mailer import load_jinja2_templates diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index 5cc89d43f..8f089f0e3 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -23,13 +23,13 @@ from twisted.internet import defer import synapse from synapse.api.constants import EventTypes +from synapse.logging.context import make_deferred_yieldable, run_in_background from synapse.metrics import ( event_processing_loop_counter, event_processing_loop_room_count, ) from synapse.metrics.background_process_metrics import run_as_background_process from synapse.util import log_failure -from synapse.util.logcontext import make_deferred_yieldable, run_in_background from synapse.util.metrics import Measure logger = logging.getLogger(__name__) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index c8c1ed324..ef5585aa9 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -36,9 +36,9 @@ from synapse.api.errors import ( SynapseError, ) from synapse.api.ratelimiting import Ratelimiter +from synapse.logging.context import defer_to_thread from synapse.module_api import ModuleApi from synapse.types import UserID -from synapse.util import logcontext from synapse.util.caches.expiringcache import ExpiringCache from ._base import BaseHandler @@ -987,7 +987,7 @@ class AuthHandler(BaseHandler): bcrypt.gensalt(self.bcrypt_rounds), ).decode("ascii") - return logcontext.defer_to_thread(self.hs.get_reactor(), _do_hash) + return defer_to_thread(self.hs.get_reactor(), _do_hash) def validate_hash(self, password, stored_hash): """Validates that self.hash(password) == stored_hash. @@ -1013,7 +1013,7 @@ class AuthHandler(BaseHandler): if not isinstance(stored_hash, bytes): stored_hash = stored_hash.encode("ascii") - return logcontext.defer_to_thread(self.hs.get_reactor(), _do_validate_hash) + return defer_to_thread(self.hs.get_reactor(), _do_validate_hash) else: return defer.succeed(False) diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index 807900fe5..55b4ab3a1 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -23,8 +23,8 @@ from canonicaljson import encode_canonical_json, json from twisted.internet import defer from synapse.api.errors import CodeMessageException, FederationDeniedError, SynapseError +from synapse.logging.context import make_deferred_yieldable, run_in_background from synapse.types import UserID, get_domain_from_id -from synapse.util.logcontext import make_deferred_yieldable, run_in_background from synapse.util.retryutils import NotRetryingDestination logger = logging.getLogger(__name__) diff --git a/synapse/handlers/events.py b/synapse/handlers/events.py index 5836d3c63..6a38328af 100644 --- a/synapse/handlers/events.py +++ b/synapse/handlers/events.py @@ -21,8 +21,8 @@ from twisted.internet import defer from synapse.api.constants import EventTypes, Membership from synapse.api.errors import AuthError, SynapseError from synapse.events import EventBase +from synapse.logging.utils import log_function from synapse.types import UserID -from synapse.util.logutils import log_function from synapse.visibility import filter_events_for_client from ._base import BaseHandler diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 02d397c49..57be968c6 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -45,6 +45,13 @@ from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersions from synapse.crypto.event_signing import compute_event_signature from synapse.event_auth import auth_types_for_event from synapse.events.validator import EventValidator +from synapse.logging.context import ( + make_deferred_yieldable, + nested_logging_context, + preserve_fn, + run_in_background, +) +from synapse.logging.utils import log_function from synapse.replication.http.federation import ( ReplicationCleanRoomRestServlet, ReplicationFederationSendEventsRestServlet, @@ -52,10 +59,9 @@ from synapse.replication.http.federation import ( from synapse.replication.http.membership import ReplicationUserJoinedLeftRoomRestServlet from synapse.state import StateResolutionStore, resolve_events_with_store from synapse.types import UserID, get_domain_from_id -from synapse.util import logcontext, unwrapFirstError +from synapse.util import unwrapFirstError from synapse.util.async_helpers import Linearizer from synapse.util.distributor import user_joined_room -from synapse.util.logutils import log_function from synapse.util.retryutils import NotRetryingDestination from synapse.visibility import filter_events_for_server @@ -338,7 +344,7 @@ class FederationHandler(BaseHandler): room_version = yield self.store.get_room_version(room_id) - with logcontext.nested_logging_context(p): + with nested_logging_context(p): # note that if any of the missing prevs share missing state or # auth events, the requests to fetch those events are deduped # by the get_pdu_cache in federation_client. @@ -532,7 +538,7 @@ class FederationHandler(BaseHandler): event_id, ev.event_id, ) - with logcontext.nested_logging_context(ev.event_id): + with nested_logging_context(ev.event_id): try: yield self.on_receive_pdu(origin, ev, sent_to_us_directly=False) except FederationError as e: @@ -725,10 +731,10 @@ class FederationHandler(BaseHandler): missing_auth - failed_to_fetch, ) - results = yield logcontext.make_deferred_yieldable( + results = yield make_deferred_yieldable( defer.gatherResults( [ - logcontext.run_in_background( + run_in_background( self.federation_client.get_pdu, [dest], event_id, @@ -994,10 +1000,8 @@ class FederationHandler(BaseHandler): event_ids = list(extremities.keys()) logger.debug("calling resolve_state_groups in _maybe_backfill") - resolve = logcontext.preserve_fn( - self.state_handler.resolve_state_groups_for_events - ) - states = yield logcontext.make_deferred_yieldable( + resolve = preserve_fn(self.state_handler.resolve_state_groups_for_events) + states = yield make_deferred_yieldable( defer.gatherResults( [resolve(room_id, [e]) for e in event_ids], consumeErrors=True ) @@ -1171,7 +1175,7 @@ class FederationHandler(BaseHandler): # lots of requests for missing prev_events which we do actually # have. Hence we fire off the deferred, but don't wait for it. - logcontext.run_in_background(self._handle_queued_pdus, room_queue) + run_in_background(self._handle_queued_pdus, room_queue) defer.returnValue(True) @@ -1191,7 +1195,7 @@ class FederationHandler(BaseHandler): p.event_id, p.room_id, ) - with logcontext.nested_logging_context(p.event_id): + with nested_logging_context(p.event_id): yield self.on_receive_pdu(origin, p, sent_to_us_directly=True) except Exception as e: logger.warn( @@ -1610,7 +1614,7 @@ class FederationHandler(BaseHandler): success = True finally: if not success: - logcontext.run_in_background( + run_in_background( self.store.remove_push_actions_from_staging, event.event_id ) @@ -1629,7 +1633,7 @@ class FederationHandler(BaseHandler): @defer.inlineCallbacks def prep(ev_info): event = ev_info["event"] - with logcontext.nested_logging_context(suffix=event.event_id): + with nested_logging_context(suffix=event.event_id): res = yield self._prep_event( origin, event, @@ -1639,12 +1643,9 @@ class FederationHandler(BaseHandler): ) defer.returnValue(res) - contexts = yield logcontext.make_deferred_yieldable( + contexts = yield make_deferred_yieldable( defer.gatherResults( - [ - logcontext.run_in_background(prep, ev_info) - for ev_info in event_infos - ], + [run_in_background(prep, ev_info) for ev_info in event_infos], consumeErrors=True, ) ) @@ -2106,10 +2107,10 @@ class FederationHandler(BaseHandler): room_version = yield self.store.get_room_version(event.room_id) - different_events = yield logcontext.make_deferred_yieldable( + different_events = yield make_deferred_yieldable( defer.gatherResults( [ - logcontext.run_in_background( + run_in_background( self.store.get_event, d, allow_none=True, allow_rejected=False ) for d in different_auth diff --git a/synapse/handlers/initial_sync.py b/synapse/handlers/initial_sync.py index a1fe9d116..54c966c8a 100644 --- a/synapse/handlers/initial_sync.py +++ b/synapse/handlers/initial_sync.py @@ -21,12 +21,12 @@ from synapse.api.constants import EventTypes, Membership from synapse.api.errors import AuthError, Codes, SynapseError from synapse.events.validator import EventValidator from synapse.handlers.presence import format_user_presence_state +from synapse.logging.context import make_deferred_yieldable, run_in_background from synapse.streams.config import PaginationConfig from synapse.types import StreamToken, UserID from synapse.util import unwrapFirstError from synapse.util.async_helpers import concurrently_execute from synapse.util.caches.snapshot_cache import SnapshotCache -from synapse.util.logcontext import make_deferred_yieldable, run_in_background from synapse.visibility import filter_events_for_client from ._base import BaseHandler diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 683da6bf3..eaeda7a5c 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -34,13 +34,13 @@ from synapse.api.errors import ( from synapse.api.room_versions import RoomVersions from synapse.api.urls import ConsentURIBuilder from synapse.events.validator import EventValidator +from synapse.logging.context import run_in_background from synapse.metrics.background_process_metrics import run_as_background_process from synapse.replication.http.send_event import ReplicationSendEventRestServlet from synapse.storage.state import StateFilter from synapse.types import RoomAlias, UserID, create_requester from synapse.util.async_helpers import Linearizer from synapse.util.frozenutils import frozendict_json_encoder -from synapse.util.logcontext import run_in_background from synapse.util.metrics import measure_func from synapse.visibility import filter_events_for_client diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py index 76ee97ddd..20bcfed33 100644 --- a/synapse/handlers/pagination.py +++ b/synapse/handlers/pagination.py @@ -20,10 +20,10 @@ from twisted.python.failure import Failure from synapse.api.constants import EventTypes, Membership from synapse.api.errors import SynapseError +from synapse.logging.context import run_in_background from synapse.storage.state import StateFilter from synapse.types import RoomStreamToken from synapse.util.async_helpers import ReadWriteLock -from synapse.util.logcontext import run_in_background from synapse.util.stringutils import random_string from synapse.visibility import filter_events_for_client diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index c80dc2eba..6f3537e43 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -34,14 +34,14 @@ from twisted.internet import defer import synapse.metrics from synapse.api.constants import EventTypes, Membership, PresenceState from synapse.api.errors import SynapseError +from synapse.logging.context import run_in_background +from synapse.logging.utils import log_function from synapse.metrics import LaterGauge from synapse.metrics.background_process_metrics import run_as_background_process from synapse.storage.presence import UserPresenceState from synapse.types import UserID, get_domain_from_id from synapse.util.async_helpers import Linearizer from synapse.util.caches.descriptors import cachedInlineCallbacks -from synapse.util.logcontext import run_in_background -from synapse.util.logutils import log_function from synapse.util.metrics import Measure from synapse.util.wheel_timer import WheelTimer diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index a3f550554..cd1ac0a27 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -25,6 +25,7 @@ from prometheus_client import Counter from twisted.internet import defer from synapse.api.constants import EventTypes, Membership +from synapse.logging.context import LoggingContext from synapse.push.clientformat import format_push_rules_for_user from synapse.storage.roommember import MemberSummary from synapse.storage.state import StateFilter @@ -33,7 +34,6 @@ from synapse.util.async_helpers import concurrently_execute from synapse.util.caches.expiringcache import ExpiringCache from synapse.util.caches.lrucache import LruCache from synapse.util.caches.response_cache import ResponseCache -from synapse.util.logcontext import LoggingContext from synapse.util.metrics import Measure, measure_func from synapse.visibility import filter_events_for_client diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py index f8062c867..c3e0c8fc7 100644 --- a/synapse/handlers/typing.py +++ b/synapse/handlers/typing.py @@ -19,9 +19,9 @@ from collections import namedtuple from twisted.internet import defer from synapse.api.errors import AuthError, SynapseError +from synapse.logging.context import run_in_background from synapse.types import UserID, get_domain_from_id from synapse.util.caches.stream_change_cache import StreamChangeCache -from synapse.util.logcontext import run_in_background from synapse.util.metrics import Measure from synapse.util.wheel_timer import WheelTimer diff --git a/synapse/http/client.py b/synapse/http/client.py index 9bc7035c8..45d501095 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -45,9 +45,9 @@ from synapse.http import ( cancelled_to_request_timed_out_error, redact_uri, ) +from synapse.logging.context import make_deferred_yieldable from synapse.util.async_helpers import timeout_deferred from synapse.util.caches import CACHE_SIZE_FACTOR -from synapse.util.logcontext import make_deferred_yieldable logger = logging.getLogger(__name__) diff --git a/synapse/http/federation/matrix_federation_agent.py b/synapse/http/federation/matrix_federation_agent.py index 414cde077..054c321a2 100644 --- a/synapse/http/federation/matrix_federation_agent.py +++ b/synapse/http/federation/matrix_federation_agent.py @@ -30,9 +30,9 @@ from twisted.web.http_headers import Headers from twisted.web.iweb import IAgent from synapse.http.federation.srv_resolver import SrvResolver, pick_server_from_list +from synapse.logging.context import make_deferred_yieldable from synapse.util import Clock from synapse.util.caches.ttlcache import TTLCache -from synapse.util.logcontext import make_deferred_yieldable from synapse.util.metrics import Measure # period to cache .well-known results for by default diff --git a/synapse/http/federation/srv_resolver.py b/synapse/http/federation/srv_resolver.py index 1f22f78a7..ecc88f9b9 100644 --- a/synapse/http/federation/srv_resolver.py +++ b/synapse/http/federation/srv_resolver.py @@ -25,7 +25,7 @@ from twisted.internet.error import ConnectError from twisted.names import client, dns from twisted.names.error import DNSNameError, DomainError -from synapse.util.logcontext import make_deferred_yieldable +from synapse.logging.context import make_deferred_yieldable logger = logging.getLogger(__name__) diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 5ef8bb60a..dee3710f6 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -48,8 +48,8 @@ from synapse.api.errors import ( from synapse.http import QuieterFileBodyProducer from synapse.http.client import BlacklistingAgentWrapper, IPBlacklistingResolver from synapse.http.federation.matrix_federation_agent import MatrixFederationAgent +from synapse.logging.context import make_deferred_yieldable from synapse.util.async_helpers import timeout_deferred -from synapse.util.logcontext import make_deferred_yieldable from synapse.util.metrics import Measure logger = logging.getLogger(__name__) diff --git a/synapse/http/request_metrics.py b/synapse/http/request_metrics.py index 62045a918..46af27c8f 100644 --- a/synapse/http/request_metrics.py +++ b/synapse/http/request_metrics.py @@ -19,8 +19,8 @@ import threading from prometheus_client.core import Counter, Histogram +from synapse.logging.context import LoggingContext from synapse.metrics import LaterGauge -from synapse.util.logcontext import LoggingContext logger = logging.getLogger(__name__) diff --git a/synapse/http/server.py b/synapse/http/server.py index d993161a3..72a3d67eb 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -39,8 +39,8 @@ from synapse.api.errors import ( SynapseError, UnrecognizedRequestError, ) +from synapse.logging.context import preserve_fn from synapse.util.caches import intern_dict -from synapse.util.logcontext import preserve_fn logger = logging.getLogger(__name__) diff --git a/synapse/http/site.py b/synapse/http/site.py index 93f679ea4..df5274c17 100644 --- a/synapse/http/site.py +++ b/synapse/http/site.py @@ -19,7 +19,7 @@ from twisted.web.server import Request, Site from synapse.http import redact_uri from synapse.http.request_metrics import RequestMetrics, requests_counter -from synapse.util.logcontext import LoggingContext, PreserveLoggingContext +from synapse.logging.context import LoggingContext, PreserveLoggingContext logger = logging.getLogger(__name__) diff --git a/synapse/logging/__init__.py b/synapse/logging/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/synapse/util/logcontext.py b/synapse/logging/context.py similarity index 100% rename from synapse/util/logcontext.py rename to synapse/logging/context.py diff --git a/synapse/util/logformatter.py b/synapse/logging/formatter.py similarity index 100% rename from synapse/util/logformatter.py rename to synapse/logging/formatter.py diff --git a/synapse/util/logutils.py b/synapse/logging/utils.py similarity index 100% rename from synapse/util/logutils.py rename to synapse/logging/utils.py diff --git a/synapse/metrics/background_process_metrics.py b/synapse/metrics/background_process_metrics.py index 167e2c068..edd6b42db 100644 --- a/synapse/metrics/background_process_metrics.py +++ b/synapse/metrics/background_process_metrics.py @@ -22,7 +22,7 @@ from prometheus_client.core import REGISTRY, Counter, GaugeMetricFamily from twisted.internet import defer -from synapse.util.logcontext import LoggingContext, PreserveLoggingContext +from synapse.logging.context import LoggingContext, PreserveLoggingContext logger = logging.getLogger(__name__) diff --git a/synapse/notifier.py b/synapse/notifier.py index d398078ee..918ef6489 100644 --- a/synapse/notifier.py +++ b/synapse/notifier.py @@ -23,12 +23,12 @@ from twisted.internet import defer from synapse.api.constants import EventTypes, Membership from synapse.api.errors import AuthError from synapse.handlers.presence import format_user_presence_state +from synapse.logging.context import PreserveLoggingContext +from synapse.logging.utils import log_function from synapse.metrics import LaterGauge from synapse.metrics.background_process_metrics import run_as_background_process from synapse.types import StreamToken from synapse.util.async_helpers import ObservableDeferred, timeout_deferred -from synapse.util.logcontext import PreserveLoggingContext -from synapse.util.logutils import log_function from synapse.util.metrics import Measure from synapse.visibility import filter_events_for_client diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py index 809199fe8..521c6e2cd 100644 --- a/synapse/push/mailer.py +++ b/synapse/push/mailer.py @@ -29,6 +29,7 @@ from twisted.internet import defer from synapse.api.constants import EventTypes from synapse.api.errors import StoreError +from synapse.logging.context import make_deferred_yieldable from synapse.push.presentable_names import ( calculate_room_name, descriptor_from_member_events, @@ -36,7 +37,6 @@ from synapse.push.presentable_names import ( ) from synapse.types import UserID from synapse.util.async_helpers import concurrently_execute -from synapse.util.logcontext import make_deferred_yieldable from synapse.visibility import filter_events_for_client logger = logging.getLogger(__name__) diff --git a/synapse/replication/tcp/protocol.py b/synapse/replication/tcp/protocol.py index 97efb835a..5ffdf2675 100644 --- a/synapse/replication/tcp/protocol.py +++ b/synapse/replication/tcp/protocol.py @@ -62,9 +62,9 @@ from twisted.internet import defer from twisted.protocols.basic import LineOnlyReceiver from twisted.python.failure import Failure +from synapse.logging.context import make_deferred_yieldable, run_in_background from synapse.metrics import LaterGauge from synapse.metrics.background_process_metrics import run_as_background_process -from synapse.util.logcontext import make_deferred_yieldable, run_in_background from synapse.util.stringutils import random_string from .commands import ( diff --git a/synapse/rest/client/transactions.py b/synapse/rest/client/transactions.py index 36404b797..6da71dc46 100644 --- a/synapse/rest/client/transactions.py +++ b/synapse/rest/client/transactions.py @@ -17,8 +17,8 @@ to ensure idempotency when performing PUTs using the REST API.""" import logging +from synapse.logging.context import make_deferred_yieldable, run_in_background from synapse.util.async_helpers import ObservableDeferred -from synapse.util.logcontext import make_deferred_yieldable, run_in_background logger = logging.getLogger(__name__) diff --git a/synapse/rest/media/v1/_base.py b/synapse/rest/media/v1/_base.py index 3318638d3..5fefee4dd 100644 --- a/synapse/rest/media/v1/_base.py +++ b/synapse/rest/media/v1/_base.py @@ -25,7 +25,7 @@ from twisted.protocols.basic import FileSender from synapse.api.errors import Codes, SynapseError, cs_error from synapse.http.server import finish_request, respond_with_json -from synapse.util import logcontext +from synapse.logging.context import make_deferred_yieldable from synapse.util.stringutils import is_ascii logger = logging.getLogger(__name__) @@ -75,9 +75,7 @@ def respond_with_file(request, media_type, file_path, file_size=None, upload_nam add_file_headers(request, media_type, file_size, upload_name) with open(file_path, "rb") as f: - yield logcontext.make_deferred_yieldable( - FileSender().beginFileTransfer(f, request) - ) + yield make_deferred_yieldable(FileSender().beginFileTransfer(f, request)) finish_request(request) else: diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index df3d985a3..65afffbb4 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -33,8 +33,8 @@ from synapse.api.errors import ( RequestSendFailed, SynapseError, ) +from synapse.logging.context import defer_to_thread from synapse.metrics.background_process_metrics import run_as_background_process -from synapse.util import logcontext from synapse.util.async_helpers import Linearizer from synapse.util.retryutils import NotRetryingDestination from synapse.util.stringutils import random_string @@ -463,7 +463,7 @@ class MediaRepository(object): ) thumbnailer = Thumbnailer(input_path) - t_byte_source = yield logcontext.defer_to_thread( + t_byte_source = yield defer_to_thread( self.hs.get_reactor(), self._generate_thumbnail, thumbnailer, @@ -511,7 +511,7 @@ class MediaRepository(object): ) thumbnailer = Thumbnailer(input_path) - t_byte_source = yield logcontext.defer_to_thread( + t_byte_source = yield defer_to_thread( self.hs.get_reactor(), self._generate_thumbnail, thumbnailer, @@ -596,7 +596,7 @@ class MediaRepository(object): return if thumbnailer.transpose_method is not None: - m_width, m_height = yield logcontext.defer_to_thread( + m_width, m_height = yield defer_to_thread( self.hs.get_reactor(), thumbnailer.transpose ) @@ -616,11 +616,11 @@ class MediaRepository(object): for (t_width, t_height, t_type), t_method in iteritems(thumbnails): # Generate the thumbnail if t_method == "crop": - t_byte_source = yield logcontext.defer_to_thread( + t_byte_source = yield defer_to_thread( self.hs.get_reactor(), thumbnailer.crop, t_width, t_height, t_type ) elif t_method == "scale": - t_byte_source = yield logcontext.defer_to_thread( + t_byte_source = yield defer_to_thread( self.hs.get_reactor(), thumbnailer.scale, t_width, t_height, t_type ) else: diff --git a/synapse/rest/media/v1/media_storage.py b/synapse/rest/media/v1/media_storage.py index eff86836f..25e5ac284 100644 --- a/synapse/rest/media/v1/media_storage.py +++ b/synapse/rest/media/v1/media_storage.py @@ -24,9 +24,8 @@ import six from twisted.internet import defer from twisted.protocols.basic import FileSender -from synapse.util import logcontext +from synapse.logging.context import defer_to_thread, make_deferred_yieldable from synapse.util.file_consumer import BackgroundFileConsumer -from synapse.util.logcontext import make_deferred_yieldable from ._base import Responder @@ -65,7 +64,7 @@ class MediaStorage(object): with self.store_into_file(file_info) as (f, fname, finish_cb): # Write to the main repository - yield logcontext.defer_to_thread( + yield defer_to_thread( self.hs.get_reactor(), _write_file_synchronously, source, f ) yield finish_cb() diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py index 053346fb8..5871737bf 100644 --- a/synapse/rest/media/v1/preview_url_resource.py +++ b/synapse/rest/media/v1/preview_url_resource.py @@ -42,11 +42,11 @@ from synapse.http.server import ( wrap_json_request_handler, ) from synapse.http.servlet import parse_integer, parse_string +from synapse.logging.context import make_deferred_yieldable, run_in_background from synapse.metrics.background_process_metrics import run_as_background_process from synapse.rest.media.v1._base import get_filename_from_headers from synapse.util.async_helpers import ObservableDeferred from synapse.util.caches.expiringcache import ExpiringCache -from synapse.util.logcontext import make_deferred_yieldable, run_in_background from synapse.util.stringutils import random_string from ._base import FileInfo diff --git a/synapse/rest/media/v1/storage_provider.py b/synapse/rest/media/v1/storage_provider.py index 359b45ebf..e8f559acc 100644 --- a/synapse/rest/media/v1/storage_provider.py +++ b/synapse/rest/media/v1/storage_provider.py @@ -20,8 +20,7 @@ import shutil from twisted.internet import defer from synapse.config._base import Config -from synapse.util import logcontext -from synapse.util.logcontext import run_in_background +from synapse.logging.context import defer_to_thread, run_in_background from .media_storage import FileResponder @@ -125,7 +124,7 @@ class FileStorageProviderBackend(StorageProvider): if not os.path.exists(dirname): os.makedirs(dirname) - return logcontext.defer_to_thread( + return defer_to_thread( self.hs.get_reactor(), shutil.copyfile, primary_fname, backup_fname ) diff --git a/synapse/state/__init__.py b/synapse/state/__init__.py index 1b454a56a..9f708fa20 100644 --- a/synapse/state/__init__.py +++ b/synapse/state/__init__.py @@ -28,11 +28,11 @@ from twisted.internet import defer from synapse.api.constants import EventTypes from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, StateResolutionVersions from synapse.events.snapshot import EventContext +from synapse.logging.utils import log_function from synapse.state import v1, v2 from synapse.util.async_helpers import Linearizer from synapse.util.caches import get_cache_factor_for from synapse.util.caches.expiringcache import ExpiringCache -from synapse.util.logutils import log_function from synapse.util.metrics import Measure logger = logging.getLogger(__name__) diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 29589853c..2f940dbae 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -30,12 +30,12 @@ from prometheus_client import Histogram from twisted.internet import defer from synapse.api.errors import StoreError +from synapse.logging.context import LoggingContext, PreserveLoggingContext from synapse.metrics.background_process_metrics import run_as_background_process from synapse.storage.engines import PostgresEngine, Sqlite3Engine from synapse.types import get_domain_from_id from synapse.util import batch_iter from synapse.util.caches.descriptors import Cache -from synapse.util.logcontext import LoggingContext, PreserveLoggingContext from synapse.util.stringutils import exception_to_unicode # import a function which will return a monotonic time, in seconds diff --git a/synapse/storage/events.py b/synapse/storage/events.py index 86f848570..b486ca50e 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -33,6 +33,8 @@ from synapse.api.constants import EventTypes from synapse.api.errors import SynapseError from synapse.events import EventBase # noqa: F401 from synapse.events.snapshot import EventContext # noqa: F401 +from synapse.logging.context import PreserveLoggingContext, make_deferred_yieldable +from synapse.logging.utils import log_function from synapse.metrics import BucketCollector from synapse.metrics.background_process_metrics import run_as_background_process from synapse.state import StateResolutionStore @@ -45,8 +47,6 @@ from synapse.util import batch_iter from synapse.util.async_helpers import ObservableDeferred from synapse.util.caches.descriptors import cached, cachedInlineCallbacks from synapse.util.frozenutils import frozendict_json_encoder -from synapse.util.logcontext import PreserveLoggingContext, make_deferred_yieldable -from synapse.util.logutils import log_function from synapse.util.metrics import Measure logger = logging.getLogger(__name__) diff --git a/synapse/storage/events_worker.py b/synapse/storage/events_worker.py index 6d680d405..09db87251 100644 --- a/synapse/storage/events_worker.py +++ b/synapse/storage/events_worker.py @@ -29,14 +29,14 @@ from synapse.api.room_versions import EventFormatVersions from synapse.events import FrozenEvent, event_type_from_format_version # noqa: F401 from synapse.events.snapshot import EventContext # noqa: F401 from synapse.events.utils import prune_event -from synapse.metrics.background_process_metrics import run_as_background_process -from synapse.types import get_domain_from_id -from synapse.util.logcontext import ( +from synapse.logging.context import ( LoggingContext, PreserveLoggingContext, make_deferred_yieldable, run_in_background, ) +from synapse.metrics.background_process_metrics import run_as_background_process +from synapse.types import get_domain_from_id from synapse.util.metrics import Measure from ._base import SQLBaseStore diff --git a/synapse/storage/stream.py b/synapse/storage/stream.py index d9482a384..386a9dbe1 100644 --- a/synapse/storage/stream.py +++ b/synapse/storage/stream.py @@ -41,12 +41,12 @@ from six.moves import range from twisted.internet import defer +from synapse.logging.context import make_deferred_yieldable, run_in_background from synapse.storage._base import SQLBaseStore from synapse.storage.engines import PostgresEngine from synapse.storage.events_worker import EventsWorkerStore from synapse.types import RoomStreamToken from synapse.util.caches.stream_change_cache import StreamChangeCache -from synapse.util.logcontext import make_deferred_yieldable, run_in_background logger = logging.getLogger(__name__) diff --git a/synapse/util/__init__.py b/synapse/util/__init__.py index 954e32fb2..c6d2ce440 100644 --- a/synapse/util/__init__.py +++ b/synapse/util/__init__.py @@ -21,10 +21,14 @@ import attr from twisted.internet import defer, task -from synapse.util.logcontext import PreserveLoggingContext +from synapse.logging import context, formatter logger = logging.getLogger(__name__) +# Compatibility alias, for existing logconfigs. +logcontext = context +logformatter = formatter + def unwrapFirstError(failure): # defer.gatherResults and DeferredLists wrap failures. @@ -46,7 +50,7 @@ class Clock(object): @defer.inlineCallbacks def sleep(self, seconds): d = defer.Deferred() - with PreserveLoggingContext(): + with context.PreserveLoggingContext(): self._reactor.callLater(seconds, d.callback, seconds) res = yield d defer.returnValue(res) @@ -91,10 +95,10 @@ class Clock(object): """ def wrapped_callback(*args, **kwargs): - with PreserveLoggingContext(): + with context.PreserveLoggingContext(): callback(*args, **kwargs) - with PreserveLoggingContext(): + with context.PreserveLoggingContext(): return self._reactor.callLater(delay, wrapped_callback, *args, **kwargs) def cancel_call_later(self, timer, ignore_errs=False): diff --git a/synapse/util/async_helpers.py b/synapse/util/async_helpers.py index 7757b8708..58a6b8764 100644 --- a/synapse/util/async_helpers.py +++ b/synapse/util/async_helpers.py @@ -23,13 +23,12 @@ from twisted.internet import defer from twisted.internet.defer import CancelledError from twisted.python import failure -from synapse.util import Clock, logcontext, unwrapFirstError - -from .logcontext import ( +from synapse.logging.context import ( PreserveLoggingContext, make_deferred_yieldable, run_in_background, ) +from synapse.util import Clock, unwrapFirstError logger = logging.getLogger(__name__) @@ -153,7 +152,7 @@ def concurrently_execute(func, args, limit): except StopIteration: pass - return logcontext.make_deferred_yieldable( + return make_deferred_yieldable( defer.gatherResults( [run_in_background(_concurrently_execute_inner) for _ in range(limit)], consumeErrors=True, @@ -174,7 +173,7 @@ def yieldable_gather_results(func, iter, *args, **kwargs): Deferred[list]: Resolved when all functions have been invoked, or errors if one of the function calls fails. """ - return logcontext.make_deferred_yieldable( + return make_deferred_yieldable( defer.gatherResults( [run_in_background(func, item, *args, **kwargs) for item in iter], consumeErrors=True, diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py index d2f25063a..675db2f44 100644 --- a/synapse/util/caches/descriptors.py +++ b/synapse/util/caches/descriptors.py @@ -24,7 +24,8 @@ from six import itervalues, string_types from twisted.internet import defer -from synapse.util import logcontext, unwrapFirstError +from synapse.logging.context import make_deferred_yieldable, preserve_fn +from synapse.util import unwrapFirstError from synapse.util.async_helpers import ObservableDeferred from synapse.util.caches import get_cache_factor_for from synapse.util.caches.lrucache import LruCache @@ -388,7 +389,7 @@ class CacheDescriptor(_CacheDescriptorBase): except KeyError: ret = defer.maybeDeferred( - logcontext.preserve_fn(self.function_to_call), obj, *args, **kwargs + preserve_fn(self.function_to_call), obj, *args, **kwargs ) def onErr(f): @@ -408,7 +409,7 @@ class CacheDescriptor(_CacheDescriptorBase): observer = result_d.observe() if isinstance(observer, defer.Deferred): - return logcontext.make_deferred_yieldable(observer) + return make_deferred_yieldable(observer) else: return observer @@ -563,7 +564,7 @@ class CacheListDescriptor(_CacheDescriptorBase): cached_defers.append( defer.maybeDeferred( - logcontext.preserve_fn(self.function_to_call), **args_to_call + preserve_fn(self.function_to_call), **args_to_call ).addCallbacks(complete_all, errback) ) @@ -571,7 +572,7 @@ class CacheListDescriptor(_CacheDescriptorBase): d = defer.gatherResults(cached_defers, consumeErrors=True).addCallbacks( lambda _: results, unwrapFirstError ) - return logcontext.make_deferred_yieldable(d) + return make_deferred_yieldable(d) else: return results diff --git a/synapse/util/caches/response_cache.py b/synapse/util/caches/response_cache.py index cbe54d45d..d6908e169 100644 --- a/synapse/util/caches/response_cache.py +++ b/synapse/util/caches/response_cache.py @@ -16,9 +16,9 @@ import logging from twisted.internet import defer +from synapse.logging.context import make_deferred_yieldable, run_in_background from synapse.util.async_helpers import ObservableDeferred from synapse.util.caches import register_cache -from synapse.util.logcontext import make_deferred_yieldable, run_in_background logger = logging.getLogger(__name__) @@ -78,7 +78,7 @@ class ResponseCache(object): *deferred* should run its callbacks in the sentinel logcontext (ie, you should wrap normal synapse deferreds with - logcontext.run_in_background). + synapse.logging.context.run_in_background). Can return either a new Deferred (which also doesn't follow the synapse logcontext rules), or, if *deferred* was already complete, the actual diff --git a/synapse/util/distributor.py b/synapse/util/distributor.py index 5a79db821..45af8d3ee 100644 --- a/synapse/util/distributor.py +++ b/synapse/util/distributor.py @@ -17,8 +17,8 @@ import logging from twisted.internet import defer +from synapse.logging.context import make_deferred_yieldable, run_in_background from synapse.metrics.background_process_metrics import run_as_background_process -from synapse.util.logcontext import make_deferred_yieldable, run_in_background logger = logging.getLogger(__name__) diff --git a/synapse/util/file_consumer.py b/synapse/util/file_consumer.py index 629ed4414..8b17d1c8b 100644 --- a/synapse/util/file_consumer.py +++ b/synapse/util/file_consumer.py @@ -17,7 +17,7 @@ from six.moves import queue from twisted.internet import threads -from synapse.util.logcontext import make_deferred_yieldable, run_in_background +from synapse.logging.context import make_deferred_yieldable, run_in_background class BackgroundFileConsumer(object): diff --git a/synapse/util/metrics.py b/synapse/util/metrics.py index 01284d3cf..c30b6de19 100644 --- a/synapse/util/metrics.py +++ b/synapse/util/metrics.py @@ -20,8 +20,8 @@ from prometheus_client import Counter from twisted.internet import defer +from synapse.logging.context import LoggingContext from synapse.metrics import InFlightGauge -from synapse.util.logcontext import LoggingContext logger = logging.getLogger(__name__) diff --git a/synapse/util/ratelimitutils.py b/synapse/util/ratelimitutils.py index 06defa819..27bceac00 100644 --- a/synapse/util/ratelimitutils.py +++ b/synapse/util/ratelimitutils.py @@ -20,7 +20,7 @@ import logging from twisted.internet import defer from synapse.api.errors import LimitExceededError -from synapse.util.logcontext import ( +from synapse.logging.context import ( PreserveLoggingContext, make_deferred_yieldable, run_in_background, diff --git a/synapse/util/retryutils.py b/synapse/util/retryutils.py index 1a7745649..d8d0ceae5 100644 --- a/synapse/util/retryutils.py +++ b/synapse/util/retryutils.py @@ -17,7 +17,7 @@ import random from twisted.internet import defer -import synapse.util.logcontext +import synapse.logging.context from synapse.api.errors import CodeMessageException logger = logging.getLogger(__name__) @@ -225,4 +225,4 @@ class RetryDestinationLimiter(object): logger.exception("Failed to store destination_retry_timings") # we deliberately do this in the background. - synapse.util.logcontext.run_in_background(store_retry_timings) + synapse.logging.context.run_in_background(store_retry_timings) diff --git a/tests/appservice/test_scheduler.py b/tests/appservice/test_scheduler.py index db9f86bda..04b8c2c07 100644 --- a/tests/appservice/test_scheduler.py +++ b/tests/appservice/test_scheduler.py @@ -22,7 +22,7 @@ from synapse.appservice.scheduler import ( _ServiceQueuer, _TransactionController, ) -from synapse.util.logcontext import make_deferred_yieldable +from synapse.logging.context import make_deferred_yieldable from tests import unittest diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py index 5a355f00c..795703967 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py @@ -30,9 +30,12 @@ from synapse.crypto.keyring import ( ServerKeyFetcher, StoreKeyFetcher, ) +from synapse.logging.context import ( + LoggingContext, + PreserveLoggingContext, + make_deferred_yieldable, +) from synapse.storage.keys import FetchKeyResult -from synapse.util import logcontext -from synapse.util.logcontext import LoggingContext from tests import unittest @@ -131,7 +134,7 @@ class KeyringTestCase(unittest.HomeserverTestCase): @defer.inlineCallbacks def get_perspectives(**kwargs): self.assertEquals(LoggingContext.current_context().request, "11") - with logcontext.PreserveLoggingContext(): + with PreserveLoggingContext(): yield persp_deferred defer.returnValue(persp_resp) @@ -158,7 +161,7 @@ class KeyringTestCase(unittest.HomeserverTestCase): self.assertFalse(res_deferreds[0].called) res_deferreds[0].addBoth(self.check_context, None) - yield logcontext.make_deferred_yieldable(res_deferreds[0]) + yield make_deferred_yieldable(res_deferreds[0]) # let verify_json_objects_for_server finish its work before we kill the # logcontext @@ -184,7 +187,7 @@ class KeyringTestCase(unittest.HomeserverTestCase): [("server10", json1, 0, "test")] ) res_deferreds_2[0].addBoth(self.check_context, None) - yield logcontext.make_deferred_yieldable(res_deferreds_2[0]) + yield make_deferred_yieldable(res_deferreds_2[0]) # let verify_json_objects_for_server finish its work before we kill the # logcontext diff --git a/tests/http/federation/test_matrix_federation_agent.py b/tests/http/federation/test_matrix_federation_agent.py index 417fda3ab..a49f9b322 100644 --- a/tests/http/federation/test_matrix_federation_agent.py +++ b/tests/http/federation/test_matrix_federation_agent.py @@ -36,8 +36,8 @@ from synapse.http.federation.matrix_federation_agent import ( _cache_period_from_headers, ) from synapse.http.federation.srv_resolver import Server +from synapse.logging.context import LoggingContext from synapse.util.caches.ttlcache import TTLCache -from synapse.util.logcontext import LoggingContext from tests.http import TestServerTLSConnectionFactory, get_test_ca_cert_file from tests.server import FakeTransport, ThreadedMemoryReactorClock diff --git a/tests/http/federation/test_srv_resolver.py b/tests/http/federation/test_srv_resolver.py index cf6c6e95b..65b51dc98 100644 --- a/tests/http/federation/test_srv_resolver.py +++ b/tests/http/federation/test_srv_resolver.py @@ -22,7 +22,7 @@ from twisted.internet.error import ConnectError from twisted.names import dns, error from synapse.http.federation.srv_resolver import SrvResolver -from synapse.util.logcontext import LoggingContext +from synapse.logging.context import LoggingContext from tests import unittest from tests.utils import MockClock diff --git a/tests/http/test_fedclient.py b/tests/http/test_fedclient.py index c4c0d9b96..b9d6d7ad1 100644 --- a/tests/http/test_fedclient.py +++ b/tests/http/test_fedclient.py @@ -29,7 +29,7 @@ from synapse.http.matrixfederationclient import ( MatrixFederationHttpClient, MatrixFederationRequest, ) -from synapse.util.logcontext import LoggingContext +from synapse.logging.context import LoggingContext from tests.server import FakeTransport from tests.unittest import HomeserverTestCase diff --git a/tests/patch_inline_callbacks.py b/tests/patch_inline_callbacks.py index ee0add345..220884311 100644 --- a/tests/patch_inline_callbacks.py +++ b/tests/patch_inline_callbacks.py @@ -28,7 +28,7 @@ def do_patch(): Patch defer.inlineCallbacks so that it checks the state of the logcontext on exit """ - from synapse.util.logcontext import LoggingContext + from synapse.logging.context import LoggingContext orig_inline_callbacks = defer.inlineCallbacks diff --git a/tests/push/test_http.py b/tests/push/test_http.py index 22c3f73ef..8ce6bb62d 100644 --- a/tests/push/test_http.py +++ b/tests/push/test_http.py @@ -18,8 +18,8 @@ from mock import Mock from twisted.internet.defer import Deferred import synapse.rest.admin +from synapse.logging.context import make_deferred_yieldable from synapse.rest.client.v1 import login, room -from synapse.util.logcontext import make_deferred_yieldable from tests.unittest import HomeserverTestCase diff --git a/tests/rest/client/test_transactions.py b/tests/rest/client/test_transactions.py index 708dc26e6..a8adc9a61 100644 --- a/tests/rest/client/test_transactions.py +++ b/tests/rest/client/test_transactions.py @@ -2,9 +2,9 @@ from mock import Mock, call from twisted.internet import defer, reactor +from synapse.logging.context import LoggingContext from synapse.rest.client.transactions import CLEANUP_PERIOD_MS, HttpTransactionCache from synapse.util import Clock -from synapse.util.logcontext import LoggingContext from tests import unittest from tests.utils import MockClock diff --git a/tests/rest/media/v1/test_media_storage.py b/tests/rest/media/v1/test_media_storage.py index 39c934242..bc662b61d 100644 --- a/tests/rest/media/v1/test_media_storage.py +++ b/tests/rest/media/v1/test_media_storage.py @@ -24,11 +24,11 @@ from six.moves.urllib import parse from twisted.internet.defer import Deferred +from synapse.logging.context import make_deferred_yieldable from synapse.rest.media.v1._base import FileInfo from synapse.rest.media.v1.filepath import MediaFilePaths from synapse.rest.media.v1.media_storage import MediaStorage from synapse.rest.media.v1.storage_provider import FileStorageProviderBackend -from synapse.util.logcontext import make_deferred_yieldable from tests import unittest diff --git a/tests/test_federation.py b/tests/test_federation.py index 6a8339b56..a73f18f88 100644 --- a/tests/test_federation.py +++ b/tests/test_federation.py @@ -3,9 +3,9 @@ from mock import Mock from twisted.internet.defer import maybeDeferred, succeed from synapse.events import FrozenEvent +from synapse.logging.context import LoggingContext from synapse.types import Requester, UserID from synapse.util import Clock -from synapse.util.logcontext import LoggingContext from tests import unittest from tests.server import ThreadedMemoryReactorClock, setup_test_homeserver diff --git a/tests/test_server.py b/tests/test_server.py index da29ae92c..ba08483a4 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -26,8 +26,8 @@ from twisted.web.server import NOT_DONE_YET from synapse.api.errors import Codes, SynapseError from synapse.http.server import JsonResource from synapse.http.site import SynapseSite, logger +from synapse.logging.context import make_deferred_yieldable from synapse.util import Clock -from synapse.util.logcontext import make_deferred_yieldable from tests import unittest from tests.server import ( diff --git a/tests/test_utils/logging_setup.py b/tests/test_utils/logging_setup.py index 813f98419..2d96b0fa8 100644 --- a/tests/test_utils/logging_setup.py +++ b/tests/test_utils/logging_setup.py @@ -17,7 +17,7 @@ import os import twisted.logger -from synapse.util.logcontext import LoggingContextFilter +from synapse.logging.context import LoggingContextFilter class ToTwistedHandler(logging.Handler): diff --git a/tests/unittest.py b/tests/unittest.py index d26804b5b..a09e76c7c 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -33,9 +33,9 @@ from synapse.api.constants import EventTypes from synapse.config.homeserver import HomeServerConfig from synapse.http.server import JsonResource from synapse.http.site import SynapseRequest +from synapse.logging.context import LoggingContext from synapse.server import HomeServer from synapse.types import Requester, UserID, create_requester -from synapse.util.logcontext import LoggingContext from tests.server import get_clock, make_request, render, setup_test_homeserver from tests.test_utils.logging_setup import setup_logging diff --git a/tests/util/caches/test_descriptors.py b/tests/util/caches/test_descriptors.py index 6f8f52537..7807328e2 100644 --- a/tests/util/caches/test_descriptors.py +++ b/tests/util/caches/test_descriptors.py @@ -21,7 +21,11 @@ import mock from twisted.internet import defer, reactor from synapse.api.errors import SynapseError -from synapse.util import logcontext +from synapse.logging.context import ( + LoggingContext, + PreserveLoggingContext, + make_deferred_yieldable, +) from synapse.util.caches import descriptors from tests import unittest @@ -32,7 +36,7 @@ logger = logging.getLogger(__name__) def run_on_reactor(): d = defer.Deferred() reactor.callLater(0, d.callback, 0) - return logcontext.make_deferred_yieldable(d) + return make_deferred_yieldable(d) class CacheTestCase(unittest.TestCase): @@ -153,7 +157,7 @@ class DescriptorTestCase(unittest.TestCase): def fn(self, arg1): @defer.inlineCallbacks def inner_fn(): - with logcontext.PreserveLoggingContext(): + with PreserveLoggingContext(): yield complete_lookup defer.returnValue(1) @@ -161,10 +165,10 @@ class DescriptorTestCase(unittest.TestCase): @defer.inlineCallbacks def do_lookup(): - with logcontext.LoggingContext() as c1: + with LoggingContext() as c1: c1.name = "c1" r = yield obj.fn(1) - self.assertEqual(logcontext.LoggingContext.current_context(), c1) + self.assertEqual(LoggingContext.current_context(), c1) defer.returnValue(r) def check_result(r): @@ -174,18 +178,12 @@ class DescriptorTestCase(unittest.TestCase): # set off a deferred which will do a cache lookup d1 = do_lookup() - self.assertEqual( - logcontext.LoggingContext.current_context(), - logcontext.LoggingContext.sentinel, - ) + self.assertEqual(LoggingContext.current_context(), LoggingContext.sentinel) d1.addCallback(check_result) # and another d2 = do_lookup() - self.assertEqual( - logcontext.LoggingContext.current_context(), - logcontext.LoggingContext.sentinel, - ) + self.assertEqual(LoggingContext.current_context(), LoggingContext.sentinel) d2.addCallback(check_result) # let the lookup complete @@ -210,29 +208,25 @@ class DescriptorTestCase(unittest.TestCase): @defer.inlineCallbacks def do_lookup(): - with logcontext.LoggingContext() as c1: + with LoggingContext() as c1: c1.name = "c1" try: d = obj.fn(1) self.assertEqual( - logcontext.LoggingContext.current_context(), - logcontext.LoggingContext.sentinel, + LoggingContext.current_context(), LoggingContext.sentinel ) yield d self.fail("No exception thrown") except SynapseError: pass - self.assertEqual(logcontext.LoggingContext.current_context(), c1) + self.assertEqual(LoggingContext.current_context(), c1) obj = Cls() # set off a deferred which will do a cache lookup d1 = do_lookup() - self.assertEqual( - logcontext.LoggingContext.current_context(), - logcontext.LoggingContext.sentinel, - ) + self.assertEqual(LoggingContext.current_context(), LoggingContext.sentinel) return d1 @@ -288,23 +282,20 @@ class CachedListDescriptorTestCase(unittest.TestCase): @descriptors.cachedList("fn", "args1", inlineCallbacks=True) def list_fn(self, args1, arg2): - assert logcontext.LoggingContext.current_context().request == "c1" + assert LoggingContext.current_context().request == "c1" # we want this to behave like an asynchronous function yield run_on_reactor() - assert logcontext.LoggingContext.current_context().request == "c1" + assert LoggingContext.current_context().request == "c1" defer.returnValue(self.mock(args1, arg2)) - with logcontext.LoggingContext() as c1: + with LoggingContext() as c1: c1.request = "c1" obj = Cls() obj.mock.return_value = {10: "fish", 20: "chips"} d1 = obj.list_fn([10, 20], 2) - self.assertEqual( - logcontext.LoggingContext.current_context(), - logcontext.LoggingContext.sentinel, - ) + self.assertEqual(LoggingContext.current_context(), LoggingContext.sentinel) r = yield d1 - self.assertEqual(logcontext.LoggingContext.current_context(), c1) + self.assertEqual(LoggingContext.current_context(), c1) obj.mock.assert_called_once_with([10, 20], 2) self.assertEqual(r, {10: "fish", 20: "chips"}) obj.mock.reset_mock() diff --git a/tests/util/test_async_utils.py b/tests/util/test_async_utils.py index bf85d3b8e..f60918069 100644 --- a/tests/util/test_async_utils.py +++ b/tests/util/test_async_utils.py @@ -16,9 +16,8 @@ from twisted.internet import defer from twisted.internet.defer import CancelledError, Deferred from twisted.internet.task import Clock -from synapse.util import logcontext +from synapse.logging.context import LoggingContext, PreserveLoggingContext from synapse.util.async_helpers import timeout_deferred -from synapse.util.logcontext import LoggingContext from tests.unittest import TestCase @@ -69,14 +68,14 @@ class TimeoutDeferredTest(TestCase): @defer.inlineCallbacks def blocking(): non_completing_d = Deferred() - with logcontext.PreserveLoggingContext(): + with PreserveLoggingContext(): try: yield non_completing_d except CancelledError: blocking_was_cancelled[0] = True raise - with logcontext.LoggingContext("one") as context_one: + with LoggingContext("one") as context_one: # the errbacks should be run in the test logcontext def errback(res, deferred_name): self.assertIs( diff --git a/tests/util/test_linearizer.py b/tests/util/test_linearizer.py index ec7ba9719..0ec8ef90c 100644 --- a/tests/util/test_linearizer.py +++ b/tests/util/test_linearizer.py @@ -19,7 +19,8 @@ from six.moves import range from twisted.internet import defer, reactor from twisted.internet.defer import CancelledError -from synapse.util import Clock, logcontext +from synapse.logging.context import LoggingContext +from synapse.util import Clock from synapse.util.async_helpers import Linearizer from tests import unittest @@ -51,13 +52,13 @@ class LinearizerTestCase(unittest.TestCase): @defer.inlineCallbacks def func(i, sleep=False): - with logcontext.LoggingContext("func(%s)" % i) as lc: + with LoggingContext("func(%s)" % i) as lc: with (yield linearizer.queue("")): - self.assertEqual(logcontext.LoggingContext.current_context(), lc) + self.assertEqual(LoggingContext.current_context(), lc) if sleep: yield Clock(reactor).sleep(0) - self.assertEqual(logcontext.LoggingContext.current_context(), lc) + self.assertEqual(LoggingContext.current_context(), lc) func(0, sleep=True) for i in range(1, 100): diff --git a/tests/util/test_logcontext.py b/tests/util/test_logcontext.py index 8d69fbf11..8b8455c8b 100644 --- a/tests/util/test_logcontext.py +++ b/tests/util/test_logcontext.py @@ -1,8 +1,14 @@ import twisted.python.failure from twisted.internet import defer, reactor -from synapse.util import Clock, logcontext -from synapse.util.logcontext import LoggingContext +from synapse.logging.context import ( + LoggingContext, + PreserveLoggingContext, + make_deferred_yieldable, + nested_logging_context, + run_in_background, +) +from synapse.util import Clock from .. import unittest @@ -43,7 +49,7 @@ class LoggingContextTestCase(unittest.TestCase): context_one.request = "one" # fire off function, but don't wait on it. - d2 = logcontext.run_in_background(function) + d2 = run_in_background(function) def cb(res): callback_completed[0] = True @@ -85,7 +91,7 @@ class LoggingContextTestCase(unittest.TestCase): def test_run_in_background_with_non_blocking_fn(self): @defer.inlineCallbacks def nonblocking_function(): - with logcontext.PreserveLoggingContext(): + with PreserveLoggingContext(): yield defer.succeed(None) return self._test_run_in_background(nonblocking_function) @@ -94,7 +100,7 @@ class LoggingContextTestCase(unittest.TestCase): # a function which returns a deferred which looks like it has been # called, but is actually paused def testfunc(): - return logcontext.make_deferred_yieldable(_chained_deferred_function()) + return make_deferred_yieldable(_chained_deferred_function()) return self._test_run_in_background(testfunc) @@ -128,7 +134,7 @@ class LoggingContextTestCase(unittest.TestCase): with LoggingContext() as context_one: context_one.request = "one" - d1 = logcontext.make_deferred_yieldable(blocking_function()) + d1 = make_deferred_yieldable(blocking_function()) # make sure that the context was reset by make_deferred_yieldable self.assertIs(LoggingContext.current_context(), sentinel_context) @@ -144,7 +150,7 @@ class LoggingContextTestCase(unittest.TestCase): with LoggingContext() as context_one: context_one.request = "one" - d1 = logcontext.make_deferred_yieldable(_chained_deferred_function()) + d1 = make_deferred_yieldable(_chained_deferred_function()) # make sure that the context was reset by make_deferred_yieldable self.assertIs(LoggingContext.current_context(), sentinel_context) @@ -161,7 +167,7 @@ class LoggingContextTestCase(unittest.TestCase): with LoggingContext() as context_one: context_one.request = "one" - d1 = logcontext.make_deferred_yieldable("bum") + d1 = make_deferred_yieldable("bum") self._check_test_key("one") r = yield d1 @@ -170,7 +176,7 @@ class LoggingContextTestCase(unittest.TestCase): def test_nested_logging_context(self): with LoggingContext(request="foo"): - nested_context = logcontext.nested_logging_context(suffix="bar") + nested_context = nested_logging_context(suffix="bar") self.assertEqual(nested_context.request, "foo-bar") diff --git a/tests/util/test_logformatter.py b/tests/util/test_logformatter.py index 297aebbfb..0fb60caac 100644 --- a/tests/util/test_logformatter.py +++ b/tests/util/test_logformatter.py @@ -14,7 +14,7 @@ # limitations under the License. import sys -from synapse.util.logformatter import LogFormatter +from synapse.logging.formatter import LogFormatter from tests import unittest diff --git a/tests/utils.py b/tests/utils.py index da43166f3..d8e55b080 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -34,6 +34,7 @@ from synapse.config.homeserver import HomeServerConfig from synapse.config.server import DEFAULT_ROOM_VERSION from synapse.federation.transport import server as federation_server from synapse.http.server import HttpServer +from synapse.logging.context import LoggingContext from synapse.server import HomeServer from synapse.storage import DataStore from synapse.storage.engines import PostgresEngine, create_engine @@ -42,7 +43,6 @@ from synapse.storage.prepare_database import ( _setup_new_database, prepare_database, ) -from synapse.util.logcontext import LoggingContext from synapse.util.ratelimitutils import FederationRateLimiter # set this to True to run the tests against postgres instead of sqlite. From 426854e7bce4c584af0ea0b8e8a499be2f5a1265 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Thu, 4 Jul 2019 11:10:35 +0100 Subject: [PATCH 47/61] Suggest people use a config file for Docker instead of env vars --- CHANGES.md | 2 +- changelog.d/5615.doc | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/5615.doc diff --git a/CHANGES.md b/CHANGES.md index e4710a506..46989f9cf 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -5,7 +5,7 @@ Bugfixes -------- - Fix regression in 1.1rc1 where OPTIONS requests to the media repo would fail. ([\#5593](https://github.com/matrix-org/synapse/issues/5593)) -- Removed the `SYNAPSE_SMTP_*` docker container environment variables. Using these environment variables prevented the docker container from starting in Synapse v1.0, even though they didn't actually allow any functionality anyway. Users are advised to remove `SYNAPSE_SMTP_HOST`, `SYNAPSE_SMTP_PORT`, `SYNAPSE_SMTP_USER`, `SYNAPSE_SMTP_PASSWORD` and `SYNAPSE_SMTP_FROM` environment variables from their docker run commands. ([\#5596](https://github.com/matrix-org/synapse/issues/5596)) +- Removed the `SYNAPSE_SMTP_*` docker container environment variables. Using these environment variables prevented the docker container from starting in Synapse v1.0, even though they didn't actually allow any functionality anyway. ([\#5596](https://github.com/matrix-org/synapse/issues/5596)) - Fix a number of "Starting txn from sentinel context" warnings. ([\#5605](https://github.com/matrix-org/synapse/issues/5605)) diff --git a/changelog.d/5615.doc b/changelog.d/5615.doc new file mode 100644 index 000000000..0fd45c66b --- /dev/null +++ b/changelog.d/5615.doc @@ -0,0 +1 @@ +Change a previous changelog entry to suggest that people use a config file for Docker instead of env vars. From 20332b278da3128421334e7cbf806a0c06f31e23 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 4 Jul 2019 11:44:09 +0100 Subject: [PATCH 48/61] 1.1.0 --- CHANGES.md | 3 +++ changelog.d/5615.doc | 1 - debian/changelog | 7 +++++-- synapse/__init__.py | 2 +- 4 files changed, 9 insertions(+), 4 deletions(-) delete mode 100644 changelog.d/5615.doc diff --git a/CHANGES.md b/CHANGES.md index 46989f9cf..a528251de 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,6 @@ +Synapse 1.1.0 (2019-07-04) +========================== + Synapse 1.1.0rc2 (2019-07-03) ============================= diff --git a/changelog.d/5615.doc b/changelog.d/5615.doc deleted file mode 100644 index 0fd45c66b..000000000 --- a/changelog.d/5615.doc +++ /dev/null @@ -1 +0,0 @@ -Change a previous changelog entry to suggest that people use a config file for Docker instead of env vars. diff --git a/debian/changelog b/debian/changelog index 91653e724..cf39bed90 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,9 +1,12 @@ -matrix-synapse-py3 (1.0.0+nmu1) UNRELEASED; urgency=medium +matrix-synapse-py3 (1.1.0) stable; urgency=medium [ Silke Hofstra ] * Include systemd-python to allow logging to the systemd journal. - -- Silke Hofstra Wed, 29 May 2019 09:45:29 +0200 + [ Synapse Packaging team ] + * New synapse release 1.1.0. + + -- Synapse Packaging team Thu, 04 Jul 2019 11:43:41 +0100 matrix-synapse-py3 (1.0.0) stable; urgency=medium diff --git a/synapse/__init__.py b/synapse/__init__.py index 5fe863197..cf22fabd6 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -35,4 +35,4 @@ try: except ImportError: pass -__version__ = "1.1.0rc2" +__version__ = "1.1.0" From 54283f3ed49ce40059e9f0746f2ded08ce3a5b9e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 4 Jul 2019 11:55:07 +0100 Subject: [PATCH 49/61] Update changelog --- CHANGES.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index a528251de..dc8c74fe5 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,6 +1,16 @@ Synapse 1.1.0 (2019-07-04) ========================== +As of v1.1.0, Synapse no longer supports Python 2, nor Postgres version 9.4. +See the [upgrade notes](UPGRADE.rst#upgrading-to-v110) for more details. + +This release also deprecates the use of environment variables to configure the +docker image. See the [docker README](https://github.com/matrix-org/synapse/blob/release-v1.1.0/docker/README.md#legacy-dynamic-configuration-file-support) +for more details. + +No changes since 1.1.0rc2. + + Synapse 1.1.0rc2 (2019-07-03) ============================= From ee91ac179c4e518fddbdb40a7b57b1f11fa71293 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Thu, 4 Jul 2019 16:24:13 +0100 Subject: [PATCH 50/61] Add a sytest blacklist file (#5611) * Add a sytest blacklist file * Add changelog * Add blacklist to manifest --- .buildkite/pipeline.yml | 9 ++- .buildkite/synapse_sytest.sh | 145 ----------------------------------- MANIFEST.in | 1 + changelog.d/5611.misc | 1 + sytest-blacklist | 35 +++++++++ 5 files changed, 43 insertions(+), 148 deletions(-) delete mode 100644 .buildkite/synapse_sytest.sh create mode 100644 changelog.d/5611.misc create mode 100644 sytest-blacklist diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index 513eb3bde..dd0f98cba 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -173,11 +173,12 @@ steps: queue: "medium" command: - "bash .buildkite/merge_base_branch.sh" - - "bash .buildkite/synapse_sytest.sh" + - "bash /synapse_sytest.sh" plugins: - docker#v3.0.1: image: "matrixdotorg/sytest-synapse:py35" propagate-environment: true + always-pull: true retry: automatic: - exit_status: -1 @@ -192,11 +193,12 @@ steps: POSTGRES: "1" command: - "bash .buildkite/merge_base_branch.sh" - - "bash .buildkite/synapse_sytest.sh" + - "bash /synapse_sytest.sh" plugins: - docker#v3.0.1: image: "matrixdotorg/sytest-synapse:py35" propagate-environment: true + always-pull: true retry: automatic: - exit_status: -1 @@ -212,11 +214,12 @@ steps: WORKERS: "1" command: - "bash .buildkite/merge_base_branch.sh" - - "bash .buildkite/synapse_sytest.sh" + - "bash /synapse_sytest.sh" plugins: - docker#v3.0.1: image: "matrixdotorg/sytest-synapse:py35" propagate-environment: true + always-pull: true soft_fail: true retry: automatic: diff --git a/.buildkite/synapse_sytest.sh b/.buildkite/synapse_sytest.sh deleted file mode 100644 index 3011b88bb..000000000 --- a/.buildkite/synapse_sytest.sh +++ /dev/null @@ -1,145 +0,0 @@ -#!/bin/bash -# -# Fetch sytest, and then run the tests for synapse. The entrypoint for the -# sytest-synapse docker images. - -set -ex - -if [ -n "$BUILDKITE" ] -then - SYNAPSE_DIR=`pwd` -else - SYNAPSE_DIR="/src" -fi - -# Attempt to find a sytest to use. -# If /sytest exists, it means that a SyTest checkout has been mounted into the Docker image. -if [ -d "/sytest" ]; then - # If the user has mounted in a SyTest checkout, use that. - echo "Using local sytests..." - - # create ourselves a working directory and dos2unix some scripts therein - mkdir -p /work/jenkins - for i in install-deps.pl run-tests.pl tap-to-junit-xml.pl jenkins/prep_sytest_for_postgres.sh; do - dos2unix -n "/sytest/$i" "/work/$i" - done - ln -sf /sytest/tests /work - ln -sf /sytest/keys /work - SYTEST_LIB="/sytest/lib" -else - if [ -n "BUILDKITE_BRANCH" ] - then - branch_name=$BUILDKITE_BRANCH - else - # Otherwise, try and find out what the branch that the Synapse checkout is using. Fall back to develop if it's not a branch. - branch_name="$(git --git-dir=/src/.git symbolic-ref HEAD 2>/dev/null)" || branch_name="develop" - fi - - # Try and fetch the branch - echo "Trying to get same-named sytest branch..." - wget -q https://github.com/matrix-org/sytest/archive/$branch_name.tar.gz -O sytest.tar.gz || { - # Probably a 404, fall back to develop - echo "Using develop instead..." - wget -q https://github.com/matrix-org/sytest/archive/develop.tar.gz -O sytest.tar.gz - } - - mkdir -p /work - tar -C /work --strip-components=1 -xf sytest.tar.gz - SYTEST_LIB="/work/lib" -fi - -cd /work - -# PostgreSQL setup -if [ -n "$POSTGRES" ] -then - export PGUSER=postgres - export POSTGRES_DB_1=pg1 - export POSTGRES_DB_2=pg2 - - # Start the database - su -c 'eatmydata /usr/lib/postgresql/9.6/bin/pg_ctl -w -D /var/lib/postgresql/data start' postgres - - # Use the Jenkins script to write out the configuration for a PostgreSQL using Synapse - jenkins/prep_sytest_for_postgres.sh - - # Make the test databases for the two Synapse servers that will be spun up - su -c 'psql -c "CREATE DATABASE pg1;"' postgres - su -c 'psql -c "CREATE DATABASE pg2;"' postgres - -fi - -if [ -n "$OFFLINE" ]; then - # if we're in offline mode, just put synapse into the virtualenv, and - # hope that the deps are up-to-date. - # - # (`pip install -e` likes to reinstall setuptools even if it's already installed, - # so we just run setup.py explicitly.) - # - (cd $SYNAPSE_DIR && /venv/bin/python setup.py -q develop) -else - # We've already created the virtualenv, but lets double check we have all - # deps. - /venv/bin/pip install -q --upgrade --no-cache-dir -e $SYNAPSE_DIR - /venv/bin/pip install -q --upgrade --no-cache-dir \ - lxml psycopg2 coverage codecov tap.py - - # Make sure all Perl deps are installed -- this is done in the docker build - # so will only install packages added since the last Docker build - ./install-deps.pl -fi - - -# Run the tests ->&2 echo "+++ Running tests" - -RUN_TESTS=( - perl -I "$SYTEST_LIB" ./run-tests.pl --python=/venv/bin/python --synapse-directory=$SYNAPSE_DIR --coverage -O tap --all -) - -TEST_STATUS=0 - -if [ -n "$WORKERS" ]; then - RUN_TESTS+=(-I Synapse::ViaHaproxy --dendron-binary=/pydron.py) -else - RUN_TESTS+=(-I Synapse) -fi - -"${RUN_TESTS[@]}" "$@" > results.tap || TEST_STATUS=$? - -if [ $TEST_STATUS -ne 0 ]; then - >&2 echo -e "run-tests \e[31mFAILED\e[0m: exit code $TEST_STATUS" -else - >&2 echo -e "run-tests \e[32mPASSED\e[0m" -fi - ->&2 echo "--- Copying assets" - -# Copy out the logs -mkdir -p /logs -cp results.tap /logs/results.tap -rsync --ignore-missing-args --min-size=1B -av server-0 server-1 /logs --include "*/" --include="*.log.*" --include="*.log" --exclude="*" - -# Upload coverage to codecov and upload files, if running on Buildkite -if [ -n "$BUILDKITE" ] -then - /venv/bin/coverage combine || true - /venv/bin/coverage xml || true - /venv/bin/codecov -X gcov -f coverage.xml - - wget -O buildkite.tar.gz https://github.com/buildkite/agent/releases/download/v3.13.0/buildkite-agent-linux-amd64-3.13.0.tar.gz - tar xvf buildkite.tar.gz - chmod +x ./buildkite-agent - - # Upload the files - ./buildkite-agent artifact upload "/logs/**/*.log*" - ./buildkite-agent artifact upload "/logs/results.tap" - - if [ $TEST_STATUS -ne 0 ]; then - # Annotate, if failure - /venv/bin/python $SYNAPSE_DIR/.buildkite/format_tap.py /logs/results.tap "$BUILDKITE_LABEL" | ./buildkite-agent annotate --style="error" --context="$BUILDKITE_LABEL" - fi -fi - - -exit $TEST_STATUS diff --git a/MANIFEST.in b/MANIFEST.in index 2c59c7bdc..834ddfad3 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -7,6 +7,7 @@ include demo/README include demo/demo.tls.dh include demo/*.py include demo/*.sh +include sytest-blacklist recursive-include synapse/storage/schema *.sql recursive-include synapse/storage/schema *.sql.postgres diff --git a/changelog.d/5611.misc b/changelog.d/5611.misc new file mode 100644 index 000000000..a2d169513 --- /dev/null +++ b/changelog.d/5611.misc @@ -0,0 +1 @@ +Add a blacklist file to the repo to blacklist certain sytests from failing CI. diff --git a/sytest-blacklist b/sytest-blacklist new file mode 100644 index 000000000..b760a48c5 --- /dev/null +++ b/sytest-blacklist @@ -0,0 +1,35 @@ +# This file serves as a blacklist for SyTest tests that we expect will fail in +# Synapse. +# +# Each line of this file is scanned by sytest during a run and if the line +# exactly matches the name of a test, it will be marked as "expected fail", +# meaning the test will still run, but failure will not mark the entire test +# suite as failing. +# +# Test names are encouraged to have a bug accompanied with them, serving as an +# explanation for why the test has been excluded. + +# Blacklisted due to https://github.com/matrix-org/synapse/issues/1679 +Remote room members also see posted message events + +# Blacklisted due to https://github.com/matrix-org/synapse/issues/2065 +Guest users can accept invites to private rooms over federation + +# Blacklisted due to https://github.com/vector-im/riot-web/issues/7211 +The only membership state included in a gapped incremental sync is for senders in the timeline + +# Blacklisted due to https://github.com/matrix-org/synapse/issues/1658 +Newly created users see their own presence in /initialSync (SYT-34) + +# Blacklisted due to https://github.com/matrix-org/synapse/issues/1396 +Should reject keys claiming to belong to a different user + +# Blacklisted due to https://github.com/matrix-org/synapse/issues/2306 +Users appear/disappear from directory when join_rules are changed +Users appear/disappear from directory when history_visibility are changed + +# Blacklisted due to https://github.com/matrix-org/synapse/issues/1531 +Enabling an unknown default rule fails with 404 + +# Blacklisted due to https://github.com/matrix-org/synapse/issues/1663 +New federated private chats get full presence information (SYN-115) From 1ee268d33d3e4eb40270eb169ca5266843481b41 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Fri, 5 Jul 2019 02:32:02 +1000 Subject: [PATCH 51/61] Improve the backwards compatibility re-exports of synapse.logging.context (#5617) * Improve the backwards compatibility re-exports of synapse.logging.context. * reexport logformatter too --- changelog.d/5617.misc | 1 + synapse/util/__init__.py | 6 +----- synapse/util/logcontext.py | 39 ++++++++++++++++++++++++++++++++++++ synapse/util/logformatter.py | 21 +++++++++++++++++++ 4 files changed, 62 insertions(+), 5 deletions(-) create mode 100644 changelog.d/5617.misc create mode 100644 synapse/util/logcontext.py create mode 100644 synapse/util/logformatter.py diff --git a/changelog.d/5617.misc b/changelog.d/5617.misc new file mode 100644 index 000000000..bb3c02816 --- /dev/null +++ b/changelog.d/5617.misc @@ -0,0 +1 @@ +Move logging code out of `synapse.util` and into `synapse.logging`. diff --git a/synapse/util/__init__.py b/synapse/util/__init__.py index c6d2ce440..f506b2a69 100644 --- a/synapse/util/__init__.py +++ b/synapse/util/__init__.py @@ -21,14 +21,10 @@ import attr from twisted.internet import defer, task -from synapse.logging import context, formatter +from synapse.logging import context logger = logging.getLogger(__name__) -# Compatibility alias, for existing logconfigs. -logcontext = context -logformatter = formatter - def unwrapFirstError(failure): # defer.gatherResults and DeferredLists wrap failures. diff --git a/synapse/util/logcontext.py b/synapse/util/logcontext.py new file mode 100644 index 000000000..40e5c10a4 --- /dev/null +++ b/synapse/util/logcontext.py @@ -0,0 +1,39 @@ +# Copyright 2019 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Backwards compatibility re-exports of ``synapse.logging.context`` functionality. +""" + +from synapse.logging.context import ( + LoggingContext, + LoggingContextFilter, + PreserveLoggingContext, + defer_to_thread, + make_deferred_yieldable, + nested_logging_context, + preserve_fn, + run_in_background, +) + +__all__ = [ + "defer_to_thread", + "LoggingContext", + "LoggingContextFilter", + "make_deferred_yieldable", + "nested_logging_context", + "preserve_fn", + "PreserveLoggingContext", + "run_in_background", +] diff --git a/synapse/util/logformatter.py b/synapse/util/logformatter.py new file mode 100644 index 000000000..320e8f817 --- /dev/null +++ b/synapse/util/logformatter.py @@ -0,0 +1,21 @@ +# Copyright 2019 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Backwards compatibility re-exports of ``synapse.logging.formatter`` functionality. +""" + +from synapse.logging.formatter import LogFormatter + +__all__ = ["LogFormatter"] From 0e5434264f4c3d4025278ebe47f365fd93d6fada Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Fri, 5 Jul 2019 10:44:12 +0100 Subject: [PATCH 52/61] Make errors about email password resets much clearer (#5616) The runtime errors that dealt with local email password resets talked about config options that users may not even have in their config file yet (if upgrading). Instead, the cryptic errors are now replaced with hopefully much more helpful ones. --- changelog.d/5616.misc | 1 + synapse/config/emailconfig.py | 19 ++++++++----------- 2 files changed, 9 insertions(+), 11 deletions(-) create mode 100644 changelog.d/5616.misc diff --git a/changelog.d/5616.misc b/changelog.d/5616.misc new file mode 100644 index 000000000..9f94be677 --- /dev/null +++ b/changelog.d/5616.misc @@ -0,0 +1 @@ +Make runtime errors surrounding password reset emails much clearer. diff --git a/synapse/config/emailconfig.py b/synapse/config/emailconfig.py index fcd55d3e3..8381b8eb2 100644 --- a/synapse/config/emailconfig.py +++ b/synapse/config/emailconfig.py @@ -112,13 +112,17 @@ class EmailConfig(Config): missing = [] for k in required: if k not in email_config: - missing.append(k) + missing.append("email." + k) + + if config.get("public_baseurl") is None: + missing.append("public_base_url") if len(missing) > 0: raise RuntimeError( - "email.password_reset_behaviour is set to 'local' " - "but required keys are missing: %s" - % (", ".join(["email." + k for k in missing]),) + "Password resets emails are configured to be sent from " + "this homeserver due to a partial 'email' block. " + "However, the following required keys are missing: %s" + % (", ".join(missing),) ) # Templates for password reset emails @@ -156,13 +160,6 @@ class EmailConfig(Config): filepath, "email.password_reset_template_success_html" ) - if config.get("public_baseurl") is None: - raise RuntimeError( - "email.password_reset_behaviour is set to 'local' but no " - "public_baseurl is set. This is necessary to generate password " - "reset links" - ) - if self.email_enable_notifs: required = [ "smtp_host", From 9481707a52a89022bc5d99c181ceb9e7ec9ec9e9 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Fri, 5 Jul 2019 11:10:19 +0100 Subject: [PATCH 53/61] Fixes to the federation rate limiter (#5621) - Put the default window_size back to 1000ms (broken by #5181) - Make the `rc_federation` config actually do something - fix an off-by-one error in the 'concurrent' limit - Avoid creating an unused `_PerHostRatelimiter` object for every single incoming request --- changelog.d/5621.bugfix | 1 + synapse/config/ratelimiting.py | 4 +- synapse/util/ratelimitutils.py | 16 ++--- tests/config/test_ratelimiting.py | 40 +++++++++++++ tests/util/test_ratelimitutils.py | 97 +++++++++++++++++++++++++++++++ tests/utils.py | 6 -- 6 files changed, 148 insertions(+), 16 deletions(-) create mode 100644 changelog.d/5621.bugfix create mode 100644 tests/config/test_ratelimiting.py create mode 100644 tests/util/test_ratelimitutils.py diff --git a/changelog.d/5621.bugfix b/changelog.d/5621.bugfix new file mode 100644 index 000000000..f1a2851f4 --- /dev/null +++ b/changelog.d/5621.bugfix @@ -0,0 +1 @@ +Various minor fixes to the federation request rate limiter. diff --git a/synapse/config/ratelimiting.py b/synapse/config/ratelimiting.py index 8c587f3fd..33f31cf21 100644 --- a/synapse/config/ratelimiting.py +++ b/synapse/config/ratelimiting.py @@ -23,7 +23,7 @@ class RateLimitConfig(object): class FederationRateLimitConfig(object): _items_and_default = { - "window_size": 10000, + "window_size": 1000, "sleep_limit": 10, "sleep_delay": 500, "reject_limit": 50, @@ -54,7 +54,7 @@ class RatelimitConfig(Config): # Load the new-style federation config, if it exists. Otherwise, fall # back to the old method. - if "federation_rc" in config: + if "rc_federation" in config: self.rc_federation = FederationRateLimitConfig(**config["rc_federation"]) else: self.rc_federation = FederationRateLimitConfig( diff --git a/synapse/util/ratelimitutils.py b/synapse/util/ratelimitutils.py index 27bceac00..5ca4521ce 100644 --- a/synapse/util/ratelimitutils.py +++ b/synapse/util/ratelimitutils.py @@ -36,9 +36,11 @@ class FederationRateLimiter(object): clock (Clock) config (FederationRateLimitConfig) """ - self.clock = clock - self._config = config - self.ratelimiters = {} + + def new_limiter(): + return _PerHostRatelimiter(clock=clock, config=config) + + self.ratelimiters = collections.defaultdict(new_limiter) def ratelimit(self, host): """Used to ratelimit an incoming request from given host @@ -53,11 +55,9 @@ class FederationRateLimiter(object): host (str): Origin of incoming request. Returns: - _PerHostRatelimiter + context manager which returns a deferred. """ - return self.ratelimiters.setdefault( - host, _PerHostRatelimiter(clock=self.clock, config=self._config) - ).ratelimit() + return self.ratelimiters[host].ratelimit() class _PerHostRatelimiter(object): @@ -122,7 +122,7 @@ class _PerHostRatelimiter(object): self.request_times.append(time_now) def queue_request(): - if len(self.current_processing) > self.concurrent_requests: + if len(self.current_processing) >= self.concurrent_requests: queue_defer = defer.Deferred() self.ready_request_queue[request_id] = queue_defer logger.info( diff --git a/tests/config/test_ratelimiting.py b/tests/config/test_ratelimiting.py new file mode 100644 index 000000000..13ab28238 --- /dev/null +++ b/tests/config/test_ratelimiting.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from synapse.config.homeserver import HomeServerConfig + +from tests.unittest import TestCase +from tests.utils import default_config + + +class RatelimitConfigTestCase(TestCase): + def test_parse_rc_federation(self): + config_dict = default_config("test") + config_dict["rc_federation"] = { + "window_size": 20000, + "sleep_limit": 693, + "sleep_delay": 252, + "reject_limit": 198, + "concurrent": 7, + } + + config = HomeServerConfig() + config.parse_config_dict(config_dict, "", "") + config_obj = config.rc_federation + + self.assertEqual(config_obj.window_size, 20000) + self.assertEqual(config_obj.sleep_limit, 693) + self.assertEqual(config_obj.sleep_delay, 252) + self.assertEqual(config_obj.reject_limit, 198) + self.assertEqual(config_obj.concurrent, 7) diff --git a/tests/util/test_ratelimitutils.py b/tests/util/test_ratelimitutils.py new file mode 100644 index 000000000..4d1aee91d --- /dev/null +++ b/tests/util/test_ratelimitutils.py @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- +# Copyright 2019 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from synapse.config.homeserver import HomeServerConfig +from synapse.util.ratelimitutils import FederationRateLimiter + +from tests.server import get_clock +from tests.unittest import TestCase +from tests.utils import default_config + + +class FederationRateLimiterTestCase(TestCase): + def test_ratelimit(self): + """A simple test with the default values""" + reactor, clock = get_clock() + rc_config = build_rc_config() + ratelimiter = FederationRateLimiter(clock, rc_config) + + with ratelimiter.ratelimit("testhost") as d1: + # shouldn't block + self.successResultOf(d1) + + def test_concurrent_limit(self): + """Test what happens when we hit the concurrent limit""" + reactor, clock = get_clock() + rc_config = build_rc_config({"rc_federation": {"concurrent": 2}}) + ratelimiter = FederationRateLimiter(clock, rc_config) + + with ratelimiter.ratelimit("testhost") as d1: + # shouldn't block + self.successResultOf(d1) + + cm2 = ratelimiter.ratelimit("testhost") + d2 = cm2.__enter__() + # also shouldn't block + self.successResultOf(d2) + + cm3 = ratelimiter.ratelimit("testhost") + d3 = cm3.__enter__() + # this one should block, though ... + self.assertNoResult(d3) + + # ... until we complete an earlier request + cm2.__exit__(None, None, None) + self.successResultOf(d3) + + def test_sleep_limit(self): + """Test what happens when we hit the sleep limit""" + reactor, clock = get_clock() + rc_config = build_rc_config( + {"rc_federation": {"sleep_limit": 2, "sleep_delay": 500}} + ) + ratelimiter = FederationRateLimiter(clock, rc_config) + + with ratelimiter.ratelimit("testhost") as d1: + # shouldn't block + self.successResultOf(d1) + + with ratelimiter.ratelimit("testhost") as d2: + # nor this + self.successResultOf(d2) + + with ratelimiter.ratelimit("testhost") as d3: + # this one should block, though ... + self.assertNoResult(d3) + sleep_time = _await_resolution(reactor, d3) + self.assertAlmostEqual(sleep_time, 500, places=3) + + +def _await_resolution(reactor, d): + """advance the clock until the deferred completes. + + Returns the number of milliseconds it took to complete. + """ + start_time = reactor.seconds() + while not d.called: + reactor.advance(0.01) + return (reactor.seconds() - start_time) * 1000 + + +def build_rc_config(settings={}): + config_dict = default_config("test") + config_dict.update(settings) + config = HomeServerConfig() + config.parse_config_dict(config_dict, "", "") + return config.rc_federation diff --git a/tests/utils.py b/tests/utils.py index d8e55b080..8a94ce0b4 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -152,12 +152,6 @@ def default_config(name, parse=False): "mau_stats_only": False, "mau_limits_reserved_threepids": [], "admin_contact": None, - "rc_federation": { - "reject_limit": 10, - "sleep_limit": 10, - "sleep_delay": 10, - "concurrent": 10, - }, "rc_message": {"per_second": 10000, "burst_count": 10000}, "rc_registration": {"per_second": 10000, "burst_count": 10000}, "rc_login": { From a6a776f3d80e441d416c384b7ad9344ee1967d31 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Fri, 5 Jul 2019 12:59:42 +0100 Subject: [PATCH 54/61] remove dead transaction persist code (#5622) this hasn't done anything for years --- changelog.d/5622.misc | 1 + synapse/federation/persistence.py | 32 ------------------- .../federation/sender/transaction_manager.py | 9 ------ synapse/storage/transactions.py | 28 ---------------- 4 files changed, 1 insertion(+), 69 deletions(-) create mode 100644 changelog.d/5622.misc diff --git a/changelog.d/5622.misc b/changelog.d/5622.misc new file mode 100644 index 000000000..9f0a87311 --- /dev/null +++ b/changelog.d/5622.misc @@ -0,0 +1 @@ +Remove dead code for persiting outgoing federation transactions. diff --git a/synapse/federation/persistence.py b/synapse/federation/persistence.py index d086e0424..44edcabed 100644 --- a/synapse/federation/persistence.py +++ b/synapse/federation/persistence.py @@ -21,8 +21,6 @@ These actions are mostly only used by the :py:mod:`.replication` module. import logging -from twisted.internet import defer - from synapse.logging.utils import log_function logger = logging.getLogger(__name__) @@ -63,33 +61,3 @@ class TransactionActions(object): return self.store.set_received_txn_response( transaction.transaction_id, origin, code, response ) - - @defer.inlineCallbacks - @log_function - def prepare_to_send(self, transaction): - """ Persists the `Transaction` we are about to send and works out the - correct value for the `prev_ids` key. - - Returns: - Deferred - """ - transaction.prev_ids = yield self.store.prep_send_transaction( - transaction.transaction_id, - transaction.destination, - transaction.origin_server_ts, - ) - - @log_function - def delivered(self, transaction, response_code, response_dict): - """ Marks the given `Transaction` as having been successfully - delivered to the remote homeserver, and what the response was. - - Returns: - Deferred - """ - return self.store.delivered_txn( - transaction.transaction_id, - transaction.destination, - response_code, - response_dict, - ) diff --git a/synapse/federation/sender/transaction_manager.py b/synapse/federation/sender/transaction_manager.py index c987bb9a0..0460a8c4a 100644 --- a/synapse/federation/sender/transaction_manager.py +++ b/synapse/federation/sender/transaction_manager.py @@ -63,8 +63,6 @@ class TransactionManager(object): len(edus), ) - logger.debug("TX [%s] Persisting transaction...", destination) - transaction = Transaction.create_new( origin_server_ts=int(self.clock.time_msec()), transaction_id=txn_id, @@ -76,9 +74,6 @@ class TransactionManager(object): self._next_txn_id += 1 - yield self._transaction_actions.prepare_to_send(transaction) - - logger.debug("TX [%s] Persisted transaction", destination) logger.info( "TX [%s] {%s} Sending transaction [%s]," " (PDUs: %d, EDUs: %d)", destination, @@ -118,10 +113,6 @@ class TransactionManager(object): logger.info("TX [%s] {%s} got %d response", destination, txn_id, code) - yield self._transaction_actions.delivered(transaction, code, response) - - logger.debug("TX [%s] {%s} Marked as delivered", destination, txn_id) - if code == 200: for e_id, r in response.get("pdus", {}).items(): if "error" in r: diff --git a/synapse/storage/transactions.py b/synapse/storage/transactions.py index b1188f6bc..fd1861917 100644 --- a/synapse/storage/transactions.py +++ b/synapse/storage/transactions.py @@ -133,34 +133,6 @@ class TransactionStore(SQLBaseStore): desc="set_received_txn_response", ) - def prep_send_transaction(self, transaction_id, destination, origin_server_ts): - """Persists an outgoing transaction and calculates the values for the - previous transaction id list. - - This should be called before sending the transaction so that it has the - correct value for the `prev_ids` key. - - Args: - transaction_id (str) - destination (str) - origin_server_ts (int) - - Returns: - list: A list of previous transaction ids. - """ - return defer.succeed([]) - - def delivered_txn(self, transaction_id, destination, code, response_dict): - """Persists the response for an outgoing transaction. - - Args: - transaction_id (str) - destination (str) - code (int) - response_json (str) - """ - pass - @defer.inlineCallbacks def get_destination_retry_timings(self, destination): """Gets the current retry timings (if any) for a given destination. From 80cc82a4459e4bbc926c6a57ae5e29278c2f9439 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Fri, 5 Jul 2019 16:47:58 +0100 Subject: [PATCH 55/61] Remove support for invite_3pid_guest. (#5625) This has never been documented, and I'm not sure it's ever been used outside sytest. It's quite a lot of poorly-maintained code, so I'd like to get rid of it. For now I haven't removed the database table; I suggest we leave that for a future clearout. --- changelog.d/5625.removal | 1 + synapse/config/registration.py | 5 +- synapse/handlers/register.py | 30 ------------ synapse/handlers/room_member.py | 39 ---------------- synapse/handlers/room_member_worker.py | 12 ----- synapse/replication/http/membership.py | 65 -------------------------- synapse/storage/registration.py | 47 ------------------- 7 files changed, 3 insertions(+), 196 deletions(-) create mode 100644 changelog.d/5625.removal diff --git a/changelog.d/5625.removal b/changelog.d/5625.removal new file mode 100644 index 000000000..d33a778d6 --- /dev/null +++ b/changelog.d/5625.removal @@ -0,0 +1 @@ +Remove support for the `invite_3pid_guest` configuration setting. diff --git a/synapse/config/registration.py b/synapse/config/registration.py index 4a59e6ec9..b895c4e9f 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -71,9 +71,8 @@ class RegistrationConfig(Config): self.default_identity_server = config.get("default_identity_server") self.allow_guest_access = config.get("allow_guest_access", False) - self.invite_3pid_guest = self.allow_guest_access and config.get( - "invite_3pid_guest", False - ) + if config.get("invite_3pid_guest", False): + raise ConfigError("invite_3pid_guest is no longer supported") self.auto_join_rooms = config.get("auto_join_rooms", []) for room_alias in self.auto_join_rooms: diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index e487b90c0..fd55eb165 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -556,36 +556,6 @@ class RegistrationHandler(BaseHandler): defer.returnValue((user_id, token)) - @defer.inlineCallbacks - def get_or_register_3pid_guest(self, medium, address, inviter_user_id): - """Get a guest access token for a 3PID, creating a guest account if - one doesn't already exist. - - Args: - medium (str) - address (str) - inviter_user_id (str): The user ID who is trying to invite the - 3PID - - Returns: - Deferred[(str, str)]: A 2-tuple of `(user_id, access_token)` of the - 3PID guest account. - """ - access_token = yield self.store.get_3pid_guest_access_token(medium, address) - if access_token: - user_info = yield self.auth.get_user_by_access_token(access_token) - - defer.returnValue((user_info["user"].to_string(), access_token)) - - user_id, access_token = yield self.register( - generate_token=True, make_guest=True - ) - access_token = yield self.store.save_or_get_3pid_guest_access_token( - medium, address, access_token, inviter_user_id - ) - - defer.returnValue((user_id, access_token)) - @defer.inlineCallbacks def _join_user_to_room(self, requester, room_identifier): room_id = None diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index 66b05b473..679daaa07 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -118,24 +118,6 @@ class RoomMemberHandler(object): """ raise NotImplementedError() - @abc.abstractmethod - def get_or_register_3pid_guest(self, requester, medium, address, inviter_user_id): - """Get a guest access token for a 3PID, creating a guest account if - one doesn't already exist. - - Args: - requester (Requester) - medium (str) - address (str) - inviter_user_id (str): The user ID who is trying to invite the - 3PID - - Returns: - Deferred[(str, str)]: A 2-tuple of `(user_id, access_token)` of the - 3PID guest account. - """ - raise NotImplementedError() - @abc.abstractmethod def _user_joined_room(self, target, room_id): """Notifies distributor on master process that the user has joined the @@ -890,21 +872,6 @@ class RoomMemberHandler(object): "sender_avatar_url": inviter_avatar_url, } - if self.config.invite_3pid_guest: - guest_user_id, guest_access_token = yield self.get_or_register_3pid_guest( - requester=requester, - medium=medium, - address=address, - inviter_user_id=inviter_user_id, - ) - - invite_config.update( - { - "guest_access_token": guest_access_token, - "guest_user_id": guest_user_id, - } - ) - data = yield self.simple_http_client.post_urlencoded_get_json( is_url, invite_config ) @@ -1010,12 +977,6 @@ class RoomMemberMasterHandler(RoomMemberHandler): yield self.store.locally_reject_invite(target.to_string(), room_id) defer.returnValue({}) - def get_or_register_3pid_guest(self, requester, medium, address, inviter_user_id): - """Implements RoomMemberHandler.get_or_register_3pid_guest - """ - rg = self.registration_handler - return rg.get_or_register_3pid_guest(medium, address, inviter_user_id) - def _user_joined_room(self, target, room_id): """Implements RoomMemberHandler._user_joined_room """ diff --git a/synapse/handlers/room_member_worker.py b/synapse/handlers/room_member_worker.py index da501f38c..fc873a3ba 100644 --- a/synapse/handlers/room_member_worker.py +++ b/synapse/handlers/room_member_worker.py @@ -20,7 +20,6 @@ from twisted.internet import defer from synapse.api.errors import SynapseError from synapse.handlers.room_member import RoomMemberHandler from synapse.replication.http.membership import ( - ReplicationRegister3PIDGuestRestServlet as Repl3PID, ReplicationRemoteJoinRestServlet as ReplRemoteJoin, ReplicationRemoteRejectInviteRestServlet as ReplRejectInvite, ReplicationUserJoinedLeftRoomRestServlet as ReplJoinedLeft, @@ -33,7 +32,6 @@ class RoomMemberWorkerHandler(RoomMemberHandler): def __init__(self, hs): super(RoomMemberWorkerHandler, self).__init__(hs) - self._get_register_3pid_client = Repl3PID.make_client(hs) self._remote_join_client = ReplRemoteJoin.make_client(hs) self._remote_reject_client = ReplRejectInvite.make_client(hs) self._notify_change_client = ReplJoinedLeft.make_client(hs) @@ -80,13 +78,3 @@ class RoomMemberWorkerHandler(RoomMemberHandler): return self._notify_change_client( user_id=target.to_string(), room_id=room_id, change="left" ) - - def get_or_register_3pid_guest(self, requester, medium, address, inviter_user_id): - """Implements RoomMemberHandler.get_or_register_3pid_guest - """ - return self._get_register_3pid_client( - requester=requester, - medium=medium, - address=address, - inviter_user_id=inviter_user_id, - ) diff --git a/synapse/replication/http/membership.py b/synapse/replication/http/membership.py index 0a76a3762..2d9cbbaef 100644 --- a/synapse/replication/http/membership.py +++ b/synapse/replication/http/membership.py @@ -156,70 +156,6 @@ class ReplicationRemoteRejectInviteRestServlet(ReplicationEndpoint): defer.returnValue((200, ret)) -class ReplicationRegister3PIDGuestRestServlet(ReplicationEndpoint): - """Gets/creates a guest account for given 3PID. - - Request format: - - POST /_synapse/replication/get_or_register_3pid_guest/ - - { - "requester": ..., - "medium": ..., - "address": ..., - "inviter_user_id": ... - } - """ - - NAME = "get_or_register_3pid_guest" - PATH_ARGS = () - - def __init__(self, hs): - super(ReplicationRegister3PIDGuestRestServlet, self).__init__(hs) - - self.registeration_handler = hs.get_registration_handler() - self.store = hs.get_datastore() - self.clock = hs.get_clock() - - @staticmethod - def _serialize_payload(requester, medium, address, inviter_user_id): - """ - Args: - requester(Requester) - medium (str) - address (str) - inviter_user_id (str): The user ID who is trying to invite the - 3PID - """ - return { - "requester": requester.serialize(), - "medium": medium, - "address": address, - "inviter_user_id": inviter_user_id, - } - - @defer.inlineCallbacks - def _handle_request(self, request): - content = parse_json_object_from_request(request) - - medium = content["medium"] - address = content["address"] - inviter_user_id = content["inviter_user_id"] - - requester = Requester.deserialize(self.store, content["requester"]) - - if requester.user: - request.authenticated_entity = requester.user.to_string() - - logger.info("get_or_register_3pid_guest: %r", content) - - ret = yield self.registeration_handler.get_or_register_3pid_guest( - medium, address, inviter_user_id - ) - - defer.returnValue((200, ret)) - - class ReplicationUserJoinedLeftRoomRestServlet(ReplicationEndpoint): """Notifies that a user has joined or left the room @@ -272,5 +208,4 @@ class ReplicationUserJoinedLeftRoomRestServlet(ReplicationEndpoint): def register_servlets(hs, http_server): ReplicationRemoteJoinRestServlet(hs).register(http_server) ReplicationRemoteRejectInviteRestServlet(hs).register(http_server) - ReplicationRegister3PIDGuestRestServlet(hs).register(http_server) ReplicationUserJoinedLeftRoomRestServlet(hs).register(http_server) diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index 13a3d5208..aea5b3276 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -432,19 +432,6 @@ class RegistrationWorkerStore(SQLBaseStore): ) ) - @defer.inlineCallbacks - def get_3pid_guest_access_token(self, medium, address): - ret = yield self._simple_select_one( - "threepid_guest_access_tokens", - {"medium": medium, "address": address}, - ["guest_access_token"], - True, - "get_3pid_guest_access_token", - ) - if ret: - defer.returnValue(ret["guest_access_token"]) - defer.returnValue(None) - @defer.inlineCallbacks def get_user_id_by_threepid(self, medium, address, require_verified=False): """Returns user id from threepid @@ -979,40 +966,6 @@ class RegistrationStore( defer.returnValue(res if res else False) - @defer.inlineCallbacks - def save_or_get_3pid_guest_access_token( - self, medium, address, access_token, inviter_user_id - ): - """ - Gets the 3pid's guest access token if exists, else saves access_token. - - Args: - medium (str): Medium of the 3pid. Must be "email". - address (str): 3pid address. - access_token (str): The access token to persist if none is - already persisted. - inviter_user_id (str): User ID of the inviter. - - Returns: - deferred str: Whichever access token is persisted at the end - of this function call. - """ - - def insert(txn): - txn.execute( - "INSERT INTO threepid_guest_access_tokens " - "(medium, address, guest_access_token, first_inviter) " - "VALUES (?, ?, ?, ?)", - (medium, address, access_token, inviter_user_id), - ) - - try: - yield self.runInteraction("save_3pid_guest_access_token", insert) - defer.returnValue(access_token) - except self.database_engine.module.IntegrityError: - ret = yield self.get_3pid_guest_access_token(medium, address) - defer.returnValue(ret) - def add_user_pending_deactivation(self, user_id): """ Adds a user to the table of users who need to be parted from all the rooms they're From ad8b909ce98cce5e955b10a3cbe04f3e868ecb70 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Fri, 5 Jul 2019 17:20:02 +0100 Subject: [PATCH 56/61] Add origin_server_ts and sender fields to m.replace (#5613) Riot team would like some extra fields as part of m.replace, so here you go. Fixes: #5598 --- changelog.d/5613.feature | 1 + synapse/events/utils.py | 6 ++++- tests/rest/client/v2_alpha/test_relations.py | 24 +++++++++++++++----- 3 files changed, 24 insertions(+), 7 deletions(-) create mode 100644 changelog.d/5613.feature diff --git a/changelog.d/5613.feature b/changelog.d/5613.feature new file mode 100644 index 000000000..4b7bb2745 --- /dev/null +++ b/changelog.d/5613.feature @@ -0,0 +1 @@ +Add `sender` and `origin_server_ts` fields to `m.replace`. diff --git a/synapse/events/utils.py b/synapse/events/utils.py index f24f0c16f..987de5cab 100644 --- a/synapse/events/utils.py +++ b/synapse/events/utils.py @@ -392,7 +392,11 @@ class EventClientSerializer(object): serialized_event["content"].pop("m.relates_to", None) r = serialized_event["unsigned"].setdefault("m.relations", {}) - r[RelationTypes.REPLACE] = {"event_id": edit.event_id} + r[RelationTypes.REPLACE] = { + "event_id": edit.event_id, + "origin_server_ts": edit.origin_server_ts, + "sender": edit.sender, + } defer.returnValue(serialized_event) diff --git a/tests/rest/client/v2_alpha/test_relations.py b/tests/rest/client/v2_alpha/test_relations.py index 3deeed3a7..6bb7d9263 100644 --- a/tests/rest/client/v2_alpha/test_relations.py +++ b/tests/rest/client/v2_alpha/test_relations.py @@ -466,9 +466,15 @@ class RelationsTestCase(unittest.HomeserverTestCase): self.assertEquals(channel.json_body["content"], new_body) - self.assertEquals( - channel.json_body["unsigned"].get("m.relations"), - {RelationTypes.REPLACE: {"event_id": edit_event_id}}, + relations_dict = channel.json_body["unsigned"].get("m.relations") + self.assertIn(RelationTypes.REPLACE, relations_dict) + + m_replace_dict = relations_dict[RelationTypes.REPLACE] + for key in ["event_id", "sender", "origin_server_ts"]: + self.assertIn(key, m_replace_dict) + + self.assert_dict( + {"event_id": edit_event_id, "sender": self.user_id}, m_replace_dict ) def test_multi_edit(self): @@ -518,9 +524,15 @@ class RelationsTestCase(unittest.HomeserverTestCase): self.assertEquals(channel.json_body["content"], new_body) - self.assertEquals( - channel.json_body["unsigned"].get("m.relations"), - {RelationTypes.REPLACE: {"event_id": edit_event_id}}, + relations_dict = channel.json_body["unsigned"].get("m.relations") + self.assertIn(RelationTypes.REPLACE, relations_dict) + + m_replace_dict = relations_dict[RelationTypes.REPLACE] + for key in ["event_id", "sender", "origin_server_ts"]: + self.assertIn(key, m_replace_dict) + + self.assert_dict( + {"event_id": edit_event_id, "sender": self.user_id}, m_replace_dict ) def _send_relation( From 9b1b79f3f59a93cde0d6f973e40515f65ff1b3b8 Mon Sep 17 00:00:00 2001 From: "J. Ryan Stinnett" Date: Fri, 5 Jul 2019 17:37:52 +0100 Subject: [PATCH 57/61] Add default push rule to ignore reactions (#5623) This adds a default push rule following the proposal in [MSC2153](https://github.com/matrix-org/matrix-doc/pull/2153). See also https://github.com/vector-im/riot-web/issues/10208 See also https://github.com/matrix-org/matrix-js-sdk/pull/976 --- changelog.d/5623.feature | 1 + synapse/push/baserules.py | 13 +++++++++++++ 2 files changed, 14 insertions(+) create mode 100644 changelog.d/5623.feature diff --git a/changelog.d/5623.feature b/changelog.d/5623.feature new file mode 100644 index 000000000..b73080e88 --- /dev/null +++ b/changelog.d/5623.feature @@ -0,0 +1 @@ +Add default push rule to ignore reactions. diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py index 96d087de2..134bf805e 100644 --- a/synapse/push/baserules.py +++ b/synapse/push/baserules.py @@ -1,5 +1,6 @@ # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2017 New Vector Ltd +# Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -248,6 +249,18 @@ BASE_APPEND_OVERRIDE_RULES = [ ], "actions": ["notify", {"set_tweak": "highlight", "value": True}], }, + { + "rule_id": "global/override/.m.rule.reaction", + "conditions": [ + { + "kind": "event_match", + "key": "type", + "pattern": "m.reaction", + "_id": "_reaction", + } + ], + "actions": ["dont_notify"], + }, ] From 589d43d9cd80d6a780f1bf8b99ae396efe23aae8 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Mon, 8 Jul 2019 12:53:33 +0100 Subject: [PATCH 58/61] Add a few more common environment directory names to black exclusion (#5630) * Add a few more common environment directory names to black exclusion * Add changelog --- changelog.d/5630.misc | 1 + pyproject.toml | 2 ++ 2 files changed, 3 insertions(+) create mode 100644 changelog.d/5630.misc diff --git a/changelog.d/5630.misc b/changelog.d/5630.misc new file mode 100644 index 000000000..f112d873e --- /dev/null +++ b/changelog.d/5630.misc @@ -0,0 +1 @@ +Add some more common python virtual-environment paths to the black exclusion list. diff --git a/pyproject.toml b/pyproject.toml index ec23258da..ef329aab4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,6 +39,8 @@ exclude = ''' | \.git # root of the project | \.tox | \.venv + | \.env + | env | _build | _trial_temp.* | build From 1af2fcd492c81075a9d75a5578d8d599af3cea0c Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Mon, 8 Jul 2019 14:52:26 +0100 Subject: [PATCH 59/61] Move get_or_create_user to test code (#5628) This is only used in tests, so... --- changelog.d/5628.misc | 1 + synapse/handlers/register.py | 51 ------------------------- tests/handlers/test_register.py | 68 ++++++++++++++++++++++++++++----- 3 files changed, 60 insertions(+), 60 deletions(-) create mode 100644 changelog.d/5628.misc diff --git a/changelog.d/5628.misc b/changelog.d/5628.misc new file mode 100644 index 000000000..fec844679 --- /dev/null +++ b/changelog.d/5628.misc @@ -0,0 +1 @@ +Move RegistrationHandler.get_or_create_user to test code. diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index fd55eb165..853020180 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -505,57 +505,6 @@ class RegistrationHandler(BaseHandler): ) defer.returnValue(data) - @defer.inlineCallbacks - def get_or_create_user(self, requester, localpart, displayname, password_hash=None): - """Creates a new user if the user does not exist, - else revokes all previous access tokens and generates a new one. - - Args: - localpart : The local part of the user ID to register. If None, - one will be randomly generated. - Returns: - A tuple of (user_id, access_token). - Raises: - RegistrationError if there was a problem registering. - - NB this is only used in tests. TODO: move it to the test package! - """ - if localpart is None: - raise SynapseError(400, "Request must include user id") - yield self.auth.check_auth_blocking() - need_register = True - - try: - yield self.check_username(localpart) - except SynapseError as e: - if e.errcode == Codes.USER_IN_USE: - need_register = False - else: - raise - - user = UserID(localpart, self.hs.hostname) - user_id = user.to_string() - token = self.macaroon_gen.generate_access_token(user_id) - - if need_register: - yield self.register_with_store( - user_id=user_id, - token=token, - password_hash=password_hash, - create_profile_with_displayname=user.localpart, - ) - else: - yield self._auth_handler.delete_access_tokens_for_user(user_id) - yield self.store.add_access_token_to_user(user_id=user_id, token=token) - - if displayname is not None: - logger.info("setting user display name: %s -> %s", user_id, displayname) - yield self.profile_handler.set_displayname( - user, requester, displayname, by_admin=True - ) - - defer.returnValue((user_id, token)) - @defer.inlineCallbacks def _join_user_to_room(self, requester, room_identifier): room_id = None diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index 4edce7af4..1c7ded739 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -18,7 +18,7 @@ from mock import Mock from twisted.internet import defer from synapse.api.constants import UserTypes -from synapse.api.errors import ResourceLimitError, SynapseError +from synapse.api.errors import Codes, ResourceLimitError, SynapseError from synapse.handlers.register import RegistrationHandler from synapse.types import RoomAlias, UserID, create_requester @@ -67,7 +67,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): user_id = frank.to_string() requester = create_requester(user_id) result_user_id, result_token = self.get_success( - self.handler.get_or_create_user(requester, frank.localpart, "Frankie") + self.get_or_create_user(requester, frank.localpart, "Frankie") ) self.assertEquals(result_user_id, user_id) self.assertTrue(result_token is not None) @@ -87,7 +87,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): user_id = frank.to_string() requester = create_requester(user_id) result_user_id, result_token = self.get_success( - self.handler.get_or_create_user(requester, local_part, None) + self.get_or_create_user(requester, local_part, None) ) self.assertEquals(result_user_id, user_id) self.assertTrue(result_token is not None) @@ -95,9 +95,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): def test_mau_limits_when_disabled(self): self.hs.config.limit_usage_by_mau = False # Ensure does not throw exception - self.get_success( - self.handler.get_or_create_user(self.requester, "a", "display_name") - ) + self.get_success(self.get_or_create_user(self.requester, "a", "display_name")) def test_get_or_create_user_mau_not_blocked(self): self.hs.config.limit_usage_by_mau = True @@ -105,7 +103,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): return_value=defer.succeed(self.hs.config.max_mau_value - 1) ) # Ensure does not throw exception - self.get_success(self.handler.get_or_create_user(self.requester, "c", "User")) + self.get_success(self.get_or_create_user(self.requester, "c", "User")) def test_get_or_create_user_mau_blocked(self): self.hs.config.limit_usage_by_mau = True @@ -113,7 +111,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): return_value=defer.succeed(self.lots_of_users) ) self.get_failure( - self.handler.get_or_create_user(self.requester, "b", "display_name"), + self.get_or_create_user(self.requester, "b", "display_name"), ResourceLimitError, ) @@ -121,7 +119,7 @@ class RegistrationTestCase(unittest.HomeserverTestCase): return_value=defer.succeed(self.hs.config.max_mau_value) ) self.get_failure( - self.handler.get_or_create_user(self.requester, "b", "display_name"), + self.get_or_create_user(self.requester, "b", "display_name"), ResourceLimitError, ) @@ -232,3 +230,55 @@ class RegistrationTestCase(unittest.HomeserverTestCase): def test_invalid_user_id_length(self): invalid_user_id = "x" * 256 self.get_failure(self.handler.register(localpart=invalid_user_id), SynapseError) + + @defer.inlineCallbacks + def get_or_create_user(self, requester, localpart, displayname, password_hash=None): + """Creates a new user if the user does not exist, + else revokes all previous access tokens and generates a new one. + + XXX: this used to be in the main codebase, but was only used by this file, + so got moved here. TODO: get rid of it, probably + + Args: + localpart : The local part of the user ID to register. If None, + one will be randomly generated. + Returns: + A tuple of (user_id, access_token). + Raises: + RegistrationError if there was a problem registering. + """ + if localpart is None: + raise SynapseError(400, "Request must include user id") + yield self.hs.get_auth().check_auth_blocking() + need_register = True + + try: + yield self.handler.check_username(localpart) + except SynapseError as e: + if e.errcode == Codes.USER_IN_USE: + need_register = False + else: + raise + + user = UserID(localpart, self.hs.hostname) + user_id = user.to_string() + token = self.macaroon_generator.generate_access_token(user_id) + + if need_register: + yield self.handler.register_with_store( + user_id=user_id, + token=token, + password_hash=password_hash, + create_profile_with_displayname=user.localpart, + ) + else: + yield self.hs.get_auth_handler().delete_access_tokens_for_user(user_id) + yield self.store.add_access_token_to_user(user_id=user_id, token=token) + + if displayname is not None: + # logger.info("setting user display name: %s -> %s", user_id, displayname) + yield self.hs.get_profile_handler().set_displayname( + user, requester, displayname, by_admin=True + ) + + defer.returnValue((user_id, token)) From f9e99f95340c2489ab38c6782ac16440455777f3 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Mon, 8 Jul 2019 14:54:22 +0100 Subject: [PATCH 60/61] Factor out some redundant code in the login impl (#5639) * Factor out some redundant code in the login impl Also fixes a redundant access_token which was generated during jwt login. * changelog --- changelog.d/5639.misc | 1 + synapse/rest/client/v1/login.py | 49 +++++++-------------------------- 2 files changed, 11 insertions(+), 39 deletions(-) create mode 100644 changelog.d/5639.misc diff --git a/changelog.d/5639.misc b/changelog.d/5639.misc new file mode 100644 index 000000000..413b13128 --- /dev/null +++ b/changelog.d/5639.misc @@ -0,0 +1 @@ +Factor out some redundant code in the login implementation. diff --git a/synapse/rest/client/v1/login.py b/synapse/rest/client/v1/login.py index f96117823..b13043cc6 100644 --- a/synapse/rest/client/v1/login.py +++ b/synapse/rest/client/v1/login.py @@ -283,19 +283,7 @@ class LoginRestServlet(RestServlet): yield auth_handler.validate_short_term_login_token_and_get_user_id(token) ) - device_id = login_submission.get("device_id") - initial_display_name = login_submission.get("initial_device_display_name") - device_id, access_token = yield self.registration_handler.register_device( - user_id, device_id, initial_display_name - ) - - result = { - "user_id": user_id, # may have changed - "access_token": access_token, - "home_server": self.hs.hostname, - "device_id": device_id, - } - + result = yield self._register_device_with_callback(user_id, login_submission) defer.returnValue(result) @defer.inlineCallbacks @@ -323,35 +311,18 @@ class LoginRestServlet(RestServlet): raise LoginError(401, "Invalid JWT", errcode=Codes.UNAUTHORIZED) user_id = UserID(user, self.hs.hostname).to_string() - device_id = login_submission.get("device_id") - initial_display_name = login_submission.get("initial_device_display_name") - - auth_handler = self.auth_handler - registered_user_id = yield auth_handler.check_user_exists(user_id) - if registered_user_id: - device_id, access_token = yield self.registration_handler.register_device( - registered_user_id, device_id, initial_display_name - ) - result = { - "user_id": registered_user_id, - "access_token": access_token, - "home_server": self.hs.hostname, - } - else: - user_id, access_token = ( - yield self.registration_handler.register(localpart=user) - ) - device_id, access_token = yield self.registration_handler.register_device( - user_id, device_id, initial_display_name + registered_user_id = yield self.auth_handler.check_user_exists(user_id) + if not registered_user_id: + registered_user_id, _ = ( + yield self.registration_handler.register( + localpart=user, generate_token=False + ) ) - result = { - "user_id": user_id, # may have changed - "access_token": access_token, - "home_server": self.hs.hostname, - } - + result = yield self._register_device_with_callback( + registered_user_id, login_submission + ) defer.returnValue(result) From 4b1f7febc753f9438c314ef3c03d9e3a86715b93 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Mon, 8 Jul 2019 14:55:34 +0100 Subject: [PATCH 61/61] Update ModuleApi to avoid register(generate_token=True) (#5640) * Update ModuleApi to avoid register(generate_token=True) This is the only place this is still used, so I'm trying to kill it off. * changelog --- changelog.d/5640.misc | 1 + synapse/module_api/__init__.py | 64 +++++++++++++++++++++++++++++----- 2 files changed, 57 insertions(+), 8 deletions(-) create mode 100644 changelog.d/5640.misc diff --git a/changelog.d/5640.misc b/changelog.d/5640.misc new file mode 100644 index 000000000..7d69a1b3b --- /dev/null +++ b/changelog.d/5640.misc @@ -0,0 +1 @@ +Update ModuleApi to avoid register(generate_token=True). diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py index bf43ca09b..a0be2c5ca 100644 --- a/synapse/module_api/__init__.py +++ b/synapse/module_api/__init__.py @@ -12,10 +12,14 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import logging + from twisted.internet import defer from synapse.types import UserID +logger = logging.getLogger(__name__) + class ModuleApi(object): """A proxy object that gets passed to password auth providers so they @@ -76,8 +80,13 @@ class ModuleApi(object): @defer.inlineCallbacks def register(self, localpart, displayname=None, emails=[]): - """Registers a new user with given localpart and optional - displayname, emails. + """Registers a new user with given localpart and optional displayname, emails. + + Also returns an access token for the new user. + + Deprecated: avoid this, as it generates a new device with no way to + return that device to the user. Prefer separate calls to register_user and + register_device. Args: localpart (str): The localpart of the new user. @@ -85,16 +94,55 @@ class ModuleApi(object): emails (List[str]): Emails to bind to the new user. Returns: - Deferred: a 2-tuple of (user_id, access_token) + Deferred[tuple[str, str]]: a 2-tuple of (user_id, access_token) """ - # Register the user - reg = self.hs.get_registration_handler() - user_id, access_token = yield reg.register( - localpart=localpart, default_display_name=displayname, bind_emails=emails + logger.warning( + "Using deprecated ModuleApi.register which creates a dummy user device." ) - + user_id = yield self.register_user(localpart, displayname, emails) + _, access_token = yield self.register_device(user_id) defer.returnValue((user_id, access_token)) + @defer.inlineCallbacks + def register_user(self, localpart, displayname=None, emails=[]): + """Registers a new user with given localpart and optional displayname, emails. + + Args: + localpart (str): The localpart of the new user. + displayname (str|None): The displayname of the new user. + emails (List[str]): Emails to bind to the new user. + + Returns: + Deferred[str]: user_id + """ + user_id, _ = yield self.hs.get_registration_handler().register( + localpart=localpart, + default_display_name=displayname, + bind_emails=emails, + generate_token=False, + ) + + defer.returnValue(user_id) + + def register_device(self, user_id, device_id=None, initial_display_name=None): + """Register a device for a user and generate an access token. + + Args: + user_id (str): full canonical @user:id + device_id (str|None): The device ID to check, or None to generate + a new one. + initial_display_name (str|None): An optional display name for the + device. + + Returns: + defer.Deferred[tuple[str, str]]: Tuple of device ID and access token + """ + return self.hs.get_registration_handler().register_device( + user_id=user_id, + device_id=device_id, + initial_display_name=initial_display_name, + ) + @defer.inlineCallbacks def invalidate_access_token(self, access_token): """Invalidate an access token for a user