From 1a6ead758fc432a4d93f5603e188be2098530d32 Mon Sep 17 00:00:00 2001 From: reivilibre <38398653+reivilibre@users.noreply.github.com> Date: Wed, 19 Aug 2020 08:16:43 +0100 Subject: [PATCH 01/44] Quieten tests. (#149) --- changelog.d/149.misc | 1 + tests/test_apns.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 changelog.d/149.misc diff --git a/changelog.d/149.misc b/changelog.d/149.misc new file mode 100644 index 00000000..b782af26 --- /dev/null +++ b/changelog.d/149.misc @@ -0,0 +1 @@ +Remove a source of nuisance, noisy (but otherwise harmless) exceptions in tests. diff --git a/tests/test_apns.py b/tests/test_apns.py index 4663a21c..5898a0a1 100644 --- a/tests/test_apns.py +++ b/tests/test_apns.py @@ -68,7 +68,7 @@ def test_payload_truncation(self): """ # Arrange method = self.apns_pushkin_snotif - method.return_value = testutils.make_async_magic_mock( + method.side_effect = testutils.make_async_magic_mock( NotificationResult("notID", "200") ) self.sygnal.pushkins[PUSHKIN_ID].MAX_JSON_BODY_SIZE = 240 @@ -91,7 +91,7 @@ def test_payload_truncation_test_validity(self): """ # Arrange method = self.apns_pushkin_snotif - method.return_value = testutils.make_async_magic_mock( + method.side_effect = testutils.make_async_magic_mock( NotificationResult("notID", "200") ) self.sygnal.pushkins[PUSHKIN_ID].MAX_JSON_BODY_SIZE = 4096 From d7b166085c2915d522e004080d4910655cc9e978 Mon Sep 17 00:00:00 2001 From: reivilibre <38398653+reivilibre@users.noreply.github.com> Date: Tue, 25 Aug 2020 15:47:50 +0100 Subject: [PATCH 02/44] Add Documentation (Troubleshooting, Application Developers' Notes) (#150) Co-authored-by: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> --- README.rst | 16 ++++ changelog.d/150.doc | 1 + docs/applications.md | 205 ++++++++++++++++++++++++++++++++++++++++ docs/troubleshooting.md | 200 +++++++++++++++++++++++++++++++++++++++ 4 files changed, 422 insertions(+) create mode 100644 changelog.d/150.doc create mode 100644 docs/applications.md create mode 100644 docs/troubleshooting.md diff --git a/README.rst b/README.rst index b80b4505..3d2cdaef 100644 --- a/README.rst +++ b/README.rst @@ -9,6 +9,7 @@ for a high level overview of how notifications work in Matrix. https://matrix.org/docs/spec/push_gateway/r0.1.0 describes the protocol that Matrix Home Servers use to send notifications to Push Gateways such as Sygnal. + Setup ===== Sygnal is configured through a YAML configuration file. @@ -27,6 +28,7 @@ Keys in this section take the form of the ``app_id``, as specified when setting See the sample configuration for examples. + App Types --------- There are two supported App Types: @@ -65,6 +67,7 @@ gcm to contain the 'Server key', which can be acquired from Firebase Console at: ``https://console.firebase.google.com/project//settings/cloudmessaging/`` + Using an HTTP Proxy for outbound traffic ---------------------------------------- @@ -78,6 +81,7 @@ Currently only HTTP proxies with the CONNECT method are supported. If you wish, you can instead configure a HTTP CONNECT proxy in ``sygnal.yaml``. + Pusher ``data`` configuration ============================= @@ -90,6 +94,7 @@ via `POST /_matrix/client/r0/pushers/set `_ +* `Troubleshooting `_ diff --git a/changelog.d/150.doc b/changelog.d/150.doc new file mode 100644 index 00000000..783994f8 --- /dev/null +++ b/changelog.d/150.doc @@ -0,0 +1 @@ +Add preliminary documentation (Troubleshooting and Application Developers' Notes). diff --git a/docs/applications.md b/docs/applications.md new file mode 100644 index 00000000..7441135b --- /dev/null +++ b/docs/applications.md @@ -0,0 +1,205 @@ +# Notes for application developers + +This document aims to illustrate some of the quirks, peculiarities and other +notable aspects of Sygnal for developers of applications that need to receive +push. + +Sygnal has been somewhat flavoured by the development of the Element iOS +and Element Android clients, but nevertheless is intended to be useful for +other iOS and Android applications that have 'typical' requirements, without +need to resort to customising the application types (Pushkins). + +(It is possible to extend Sygnal with other application types, but this is +out of scope for this document.) + +Once you have read this document, you may also find [the troubleshooting document](./troubleshooting.md) +useful if you are running into trouble whilst deploying Sygnal. + + +## Definitions + +* **APNs**: Apple Push Notification service, a notification service for iOS + applications. +* **FCM**: Firebase Cloud Messaging (previously Google Cloud Messaging), a + notification service primarily for Android applications but also usable for + iOS and Chrome applications. +* **push key**: Also known as registration token (FCM) or device token (APNs), + this ID identifies a device to which notifications can be sent. +* **Pushkin**: A module which adds support to Sygnal for an application type. + Sygnal comes with an APNs pushkin and an FCM pushkin out of the box, but you + may also use custom ones. + + +## Outlines of flows + +An understanding of the flows of push notifications may be useful. +This section will provide an outline. + + +### Registration flow + +0) The client needs to be configured with the address of its Sygnal instance, + and any configuration that FCM or APNs requires for client apps. + +1) The client needs to use an FCM or APNs library to acquire a push key, which + identifies the client to the notification service. + +2) The client registers a pusher on the user's homeserver, giving the address of + its Sygnal instance, its push key, and perhaps some other parameters. + To register a pusher, it uses [POST /_matrix/client/r0/pushers/set](https://matrix.org/docs/spec/client_server/latest#post-matrix-client-r0-pushers-set). + + +#### Worth noting + +* In general, there is no contract between the operators of the homeserver(s) + and the operators of the Sygnal push gateways. + + - Sygnal (push gateway) instances are deployed by the owner of the application. + + - Unless the application restricts it, the user is usually free to choose any + homeserver which may be operated by anyone. + +* It is not feasible to allow end-users to configure their own Sygnal instance, + because the Sygnal instance needs the appropriate FCM or APNs secrets that + belong to the application. + + +### Notification flow + +1) The user's homeserver receives an event (from federation, another local user, + or even an application service — the source is unimportant). + +2) The user's homeserver applies push rules to the event on behalf of the user, + in order to determine whether to send a push notification or not (as well as + a few other tweaks). + + - For example, the push rules can decide to exclude certain rooms, or to + notify for mentions on keywords (so the user could, for example, be notified + when anyone mentions 'lunch'). + +3) If the homeserver decides to send a notification, we continue. + +4) The homeserver calls [POST /_matrix/push/v1/notify](https://matrix.org/docs/spec/push_gateway/latest#post-matrix-push-v1-notify) + on the Sygnal instance that the user's client registered. + +5) The Sygnal receives this request and rewrites it into a request for the + appropriate notification service (APNs or FCM), which it then sends. + +6) Sygnal responds to the homeserver about the push's success and whether or not + the push key was rejected. + + +#### Worth noting + +* When Sygnal only sends data messages (also known as 'silent notifications') to + target devices — the application needs to wake up and trigger its own + notification, perhaps after downloading and decrypting the event. + + - Data-only messages are always sent to FCM. + + - Data-only messages are sent to APNs when the `event_id_only` format is in + use. In other cases, the app may still need to perform additional processing, + for example if encrypted events would need to be decrypted. + +* When encrypted events are present, the homeserver is unable to conclusively + run push rules — in this case, the client will need to run them locally to + decide whether or not a notification should be displayed, a sound needs to be + played and/or the message needs to be highlighted. + +* **Consider user privacy**: if you use the `event_id_only` format, then data + sent to the notification service (FCM or APNs) is minimal. If you do not, then + unencrypted messages will have their content sent to the notification service, + which some may prefer to avoid. + + +## Platform-specific notes + +### Apple Push Notification service + +By default, the client will receive a message with this structure: + +```json +{ + "room_id": "!slw48wfj34rtnrf:example.com", + "event_id": "$qTOWWTEL48yPm3uT-gdNhFcoHxfKbZuqRVnnWWSkGBs", + "aps": { + "alert": { + "loc-key": "MSG_FROM_USER_IN_ROOM_WITH_CONTENT", + "loc-args": [ + "Major Tom", + "Mission Control", + "I'm floating in a most peculiar way." + ] + }, + "badge": 3 + } +} +``` + +Please note that fields may be truncated if they are large, so that they fit +within APNs' limit. +Please also note that some fields will be unavailable if you registered a pusher +with `event_id_only` format. + + +#### iOS applications beware! + +When registering your iOS pusher, you have the ability to specify a default +payload that will be sent to APNs. This allows you to set custom flags for APNs. + +Of particular interest are `content-available` and `mutable-content` (which you +can set to `1` to enable). + +Consult [APNs documentation] for a more in-depth explanation, but: + +* `content-available` wakes up your application for up to 30 seconds in which it + can process the message. +* `mutable-content` allows your application to mutate (modify) the notification. + +An example `data` dictionary to specify on `POST /_matrix/client/r0/pushers/set`: + +```json +{ + "url": "https://push-gateway.location.here/_matrix/push/v1/notify", + "format": "event_id_only", + "default_payload": { + "aps": { + "mutable-content": 1, + "content-available": 1, + "alert": {"loc-key": "SINGLE_UNREAD", "loc-args": []} + } + } +} +``` + +[APNs documentation]: https://developer.apple.com/library/archive/documentation/NetworkingInternet/Conceptual/RemoteNotificationsPG/CreatingtheNotificationPayload.html + + +### Firebase Cloud Messaging + +The client will receive a message with an FCM `data` payload with this structure: + +```json +{ + "event_id": "$3957tyerfgewrf384", + "type": "m.room.message", + "sender": "@exampleuser:example.org", + "room_name": "Mission Control", + "room_alias": "#exampleroom:example.org", + "sender_display_name": "Major Tom", + "content": { + "msgtype": "m.text", + "body": "I'm floating in a most peculiar way." + }, + "room_id": "!slw48wfj34rtnrf:example.org", + "prio": "high", + "unread": 2, + "missed_calls": 1 +} +``` + +Please note that fields may be truncated if they are large, so that they fit +within FCM's limit. +Please also note that some fields will be unavailable if you registered a pusher +with `event_id_only` format. + diff --git a/docs/troubleshooting.md b/docs/troubleshooting.md new file mode 100644 index 00000000..8e58d645 --- /dev/null +++ b/docs/troubleshooting.md @@ -0,0 +1,200 @@ +# Troubleshooting Sygnal deployments + +Push notifications can be rather hard to get right, and there are plenty of +places to trip up and have nothing but silence to show for your efforts. + +This document offers some suggestions of what to check and lists some common +pitfalls you may encounter, based on experience. + +There are also appendices with information which may be useful for manual +debugging. + +Your first steps are to ensure that you have logging so you can see what is going +on; a level of `INFO` or even `DEBUG` will be useful. + + +## Narrowing in on the problem + +### Check the pusher is registered in your homeserver + +Typically, applications will register a pusher on startup. + +If you have access to your homeserver, you can check that it is making it there. + +Start your application and then run a query against the database. + +#### On Synapse + +Use `sqlite3 /path/to/homeserver.db` or `psql synapse` as required for your +deployment. + +```sql +SELECT app_id, data FROM pushers + WHERE user_name = '@my.user:example.org' AND kind='http'; +``` + +You should see something like: + +``` + app_id | data +-------------------+-------------------------------------------------------- + org.example.chat | {"format":"event_id_only", + | "url":"https://example.org/_matrix/push/v1/notify"} +``` + + +#### On other homeserver implementations + +No details available, but contributions welcome. + + +### Check the push gateway (Sygnal) is reachable from the homeserver + +Following on from the example above, the homeserver's database contains the +push gateway URL of `https://example.org/_matrix/push/v1/notify`. + +It may be worth manually checking that the push gateway is reachable from the +homeserver; e.g. with curl: + +``` +$ curl https://example.org/_matrix/push/v1/notify + + 405 - Method Not Allowed + +

Method Not Allowed

+

Your browser approached me (at /_matrix/push/v1/notify) with the method "GET". I only allow the methods HEAD, POST here.

+ + +``` + +If you get a response, such as an error like **405 Method Not Allowed**, as above, +this would suggest that the push gateway is at least reachable. + +If you get a **404 No Such Resource** error on the `/_matrix/push/v1/notify` endpoint, +then chances are that your reverse proxy is not configured to pass through the +full URL. + +If you don't get an HTTP response, then it is probably worth investigation. +Check that: + +* Sygnal is running +* Sygnal's configuration makes it listen on the desired port +* Any reverse proxies are correctly set up and running +* The firewall permits inbound traffic on the port in question + + +## Troubleshooting Firebase notifications + +### iOS-specific troubles with apps using Firebase + +#### App doesn't receive notifications when inactive + +Sygnal currently only sends 'data messages' (also called 'silent notifications', +but this name could be misleading). + +Whereas data messages will wake up apps on Android with no additional changes, +iOS needs to be told that a notification is meant to wake up an inactive app. +This is done with FCM's `content_available` flag, which you can set in your +`fcm_options` dictionary for the Firebase pushkin. +(See [`sygnal.yaml.sample`](../sygnal.yaml.sample).) + + +## Troubleshooting APNs notifications + +### Base64 decoding error in the logs + +#### Common cause 1: Hex rather than base64 encoding + +Sygnal's APNs support expects your pushkeys to be base64 encoded rather than +hexadecimally encoded. + +*(Why? The previous APNs API which Sygnal supported was binary and didn't define +a text-safe encoding, so it was chosen to use base64 in Sygnal. Now the new API +exists and specifies hexadecimal encoding, but Sygnal retains backwards +compatibility and will do the base64-to-hex conversion.)* + + +#### Common cause 2: Firebase token given + +If you are using Firebase for your iOS app, you will get Firebase tokens +(looking a bit like `blahblahblah:APA91blahblahblah`… note the presence of a +colon which is not valid base64). + +In this case, you need to **configure Sygnal to use a FCM (gcm) pushkin rather +than an APNs one, as Firebase talks to APNs on your behalf**. +Instead of configuring Sygnal with your APNs secrets, you need to configure +Firebase with your APNs secrets, and Sygnal with your Firebase secrets. + + +### App doesn't receive notifications when inactive + +If you want your application to be woken up to be able to process APNs messages +received when your application is in the background, you need to set the +`content-available` flag in your pusher's default payload — see +[the notes for iOS applications](applications.md#ios-applications-beware). + + +# Appendices + +## Sending a notification to Sygnal manually with `curl` + +Note: this depends on the heredoc syntax of the `bash` shell. + +```bash +curl -i -H "Content-Type: application/json" --request POST -d '@-' http://syg1:8008/_matrix/push/v1/notify <", + "pushkey": "", + "pushkey_ts": 12345678, + "data": {}, + "tweaks": { + "sound": "bing" + } + } + ] + } +} +EOF +``` + + +## Example of an FCM request + +HTTP data sent to `https://fcm.googleapis.com/fcm/send`: + +``` +POST /fcm/send HTTP/1.1 +User-Agent: sygnal +Content-Type: application/json +Authorization: key= +Host: fcm.googleapis.com + +{"data": {"event_id": "$3957tyerfgewrf384", "type": "m.room.message", "sender": "@exampleuser:example.org", "room_name": "Mission Control", "room_alias": "#exampleroom:example.org", "membership": null, "sender_display_name": "Major Tom", "content": {"msgtype": "m.text", "body": "I'm floating in a most peculiar way."}, "room_id": "!slw48wfj34rtnrf:example.org", "prio": "high", "unread": 2, "missed_calls": 1}, "priority": "high", "to": ""} +``` + +You can send using curl using: + +```bash +curl -i -H "Content-Type: application/json" -H "Authorization: key=" --request POST -d '@-' https://fcm.googleapis.com/fcm/send <"} +EOF +``` From b2a74dba5b36f5e6eada9a7ded5c7c1d70eba9ed Mon Sep 17 00:00:00 2001 From: reivilibre <38398653+reivilibre@users.noreply.github.com> Date: Wed, 26 Aug 2020 19:53:05 +0100 Subject: [PATCH 03/44] Tests for the HTTP Proxy Support (Twisted) (#152) Taken from Synapse --- changelog.d/152.misc | 1 + mypy.ini | 3 + tests/test_httpproxy_twisted.py | 229 +++++++++++++++++++++++++ tests/testutils.py | 27 ++- tests/tls/ca.crt | 19 +++ tests/tls/ca.key | 27 +++ tests/tls/server.key | 27 +++ tests/twisted_test_helpers.py | 292 ++++++++++++++++++++++++++++++++ 8 files changed, 621 insertions(+), 4 deletions(-) create mode 100644 changelog.d/152.misc create mode 100644 tests/test_httpproxy_twisted.py create mode 100644 tests/tls/ca.crt create mode 100644 tests/tls/ca.key create mode 100644 tests/tls/server.key create mode 100644 tests/twisted_test_helpers.py diff --git a/changelog.d/152.misc b/changelog.d/152.misc new file mode 100644 index 00000000..3f5a3a8b --- /dev/null +++ b/changelog.d/152.misc @@ -0,0 +1 @@ +Add tests for HTTP Proxy support. diff --git a/mypy.ini b/mypy.ini index 09060c4c..ed90a598 100644 --- a/mypy.ini +++ b/mypy.ini @@ -39,3 +39,6 @@ ignore_missing_imports = True [mypy-setuptools] ignore_missing_imports = True + +[mypy-OpenSSL.*] +ignore_missing_imports = True diff --git a/tests/test_httpproxy_twisted.py b/tests/test_httpproxy_twisted.py new file mode 100644 index 00000000..a99e2d5f --- /dev/null +++ b/tests/test_httpproxy_twisted.py @@ -0,0 +1,229 @@ +# -*- coding: utf-8 -*- +# Copyright 2019-2020 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import logging + +from twisted.internet import interfaces # noqa: F401 +from twisted.internet.protocol import Factory +from twisted.protocols.tls import TLSMemoryBIOFactory +from twisted.web.client import readBody +from twisted.web.http import HTTPChannel + +from sygnal.helper.proxy.proxyagent_twisted import ProxyAgent + +from tests.testutils import TestCase +from tests.twisted_test_helpers import ( + FakeTransport, + TestServerTLSConnectionFactory, + get_test_https_policy, +) + +logger = logging.getLogger(__name__) + +HTTPFactory = Factory.forProtocol(HTTPChannel) + + +class SygnalTwistedProxyTests(TestCase): + def config_setup(self, config): + super(SygnalTwistedProxyTests, self).config_setup(config) + config["apps"]["com.example.gcm"] = { + "type": "tests.test_gcm.TestGcmPushkin", + "api_key": "kii", + } + + def _make_connection( + self, client_factory, server_factory, ssl=False, expected_sni=None + ): + """Builds a test server, and completes the outgoing client connection + + Args: + client_factory (interfaces.IProtocolFactory): the the factory that the + application is trying to use to make the outbound connection. We will + invoke it to build the client Protocol + + server_factory (interfaces.IProtocolFactory): a factory to build the + server-side protocol + + ssl (bool): If true, we will expect an ssl connection and wrap + server_factory with a TLSMemoryBIOFactory + + expected_sni (bytes|None): the expected SNI value + + Returns: + IProtocol: the server Protocol returned by server_factory + """ + if ssl: + server_factory = _wrap_server_factory_for_tls(server_factory) + + server_protocol = server_factory.buildProtocol(None) + + # now, tell the client protocol factory to build the client protocol, + # and wire the output of said protocol up to the server via + # a FakeTransport. + # + # Normally this would be done by the TCP socket code in Twisted, but we are + # stubbing that out here. + client_protocol = client_factory.buildProtocol(None) + client_protocol.makeConnection( + FakeTransport(server_protocol, self.reactor, client_protocol) + ) + + # tell the server protocol to send its stuff back to the client, too + server_protocol.makeConnection( + FakeTransport(client_protocol, self.reactor, server_protocol) + ) + + if ssl: + http_protocol = server_protocol.wrappedProtocol + tls_connection = server_protocol._tlsConnection + else: + http_protocol = server_protocol + tls_connection = None + + # give the reactor a pump to get the TLS juices flowing (if needed) + self.reactor.advance(0) + + if expected_sni is not None: + server_name = tls_connection.get_servername() + self.assertEqual( + server_name, + expected_sni, + "Expected SNI %s but got %s" % (expected_sni, server_name), + ) + + return http_protocol + + def test_https_request_via_proxy(self): + agent = ProxyAgent( + self.reactor, + contextFactory=get_test_https_policy(), + proxy_url_str="http://proxy.com:1080", + ) + + self.reactor.lookups["proxy.com"] = "1.2.3.5" + d = agent.request(b"GET", b"https://test.com/abc") + + # there should be a pending TCP connection + clients = self.reactor.tcpClients + self.assertEqual(len(clients), 1) + (host, port, client_factory, _timeout, _bindAddress) = clients[0] + self.assertEqual(host, "1.2.3.5") + self.assertEqual(port, 1080) + + # make a test HTTP server, and wire up the client + proxy_server = self._make_connection( + client_factory, _get_test_protocol_factory() + ) + + # fish the transports back out so that we can do the old switcheroo + s2c_transport = proxy_server.transport + client_protocol = s2c_transport.other + c2s_transport = client_protocol.transport + + # the FakeTransport is async, so we need to pump the reactor + self.reactor.advance(0) + + # now there should be a pending CONNECT request + self.assertEqual(len(proxy_server.requests), 1) + + request = proxy_server.requests[0] + self.assertEqual(request.method, b"CONNECT") + self.assertEqual(request.path, b"test.com:443") + + # tell the proxy server not to close the connection + proxy_server.persistent = True + + # this just stops the http Request trying to do a chunked response + # request.setHeader(b"Content-Length", b"0") + request.finish() + + # now we can replace the proxy channel with a new, SSL-wrapped HTTP channel + ssl_factory = _wrap_server_factory_for_tls(_get_test_protocol_factory()) + ssl_protocol = ssl_factory.buildProtocol(None) + http_server = ssl_protocol.wrappedProtocol + + ssl_protocol.makeConnection( + FakeTransport(client_protocol, self.reactor, ssl_protocol) + ) + c2s_transport.other = ssl_protocol + + self.reactor.advance(0) + + server_name = ssl_protocol._tlsConnection.get_servername() + expected_sni = b"test.com" + self.assertEqual( + server_name, + expected_sni, + "Expected SNI %r but got %r" % (expected_sni, server_name), + ) + + # now there should be a pending request + self.assertEqual(len(http_server.requests), 1) + + request = http_server.requests[0] + self.assertEqual(request.method, b"GET") + self.assertEqual(request.path, b"/abc") + self.assertEqual(request.requestHeaders.getRawHeaders(b"host"), [b"test.com"]) + request.write(b"result") + + self.reactor.advance(0) + + request.finish() + + self.reactor.advance(0) + + resp = self.successResultOf(d) + body = self.successResultOf(readBody(resp)) + self.assertEqual(body, b"result") + + +def _wrap_server_factory_for_tls(factory, sanlist=None): + """Wrap an existing Protocol Factory with a test TLSMemoryBIOFactory + + The resultant factory will create a TLS server which presents a certificate + signed by our test CA, valid for the domains in `sanlist` + + Args: + factory (interfaces.IProtocolFactory): protocol factory to wrap + sanlist (iterable[bytes]): list of domains the cert should be valid for + + Returns: + interfaces.IProtocolFactory + """ + if sanlist is None: + sanlist = [b"DNS:test.com"] + + connection_creator = TestServerTLSConnectionFactory(sanlist=sanlist) + return TLSMemoryBIOFactory( + connection_creator, isClient=False, wrappedFactory=factory + ) + + +def _get_test_protocol_factory(): + """Get a protocol Factory which will build an HTTPChannel + + Returns: + interfaces.IProtocolFactory + """ + server_factory = Factory.forProtocol(HTTPChannel) + + # Request.finish expects the factory to have a 'log' method. + server_factory.log = _log_request + + return server_factory + + +def _log_request(request): + """Implements Factory.log, which is expected by Request.finish""" + logger.info("Completed request %s", request) diff --git a/tests/testutils.py b/tests/testutils.py index de9b3a25..cd43e092 100644 --- a/tests/testutils.py +++ b/tests/testutils.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2019 The Matrix.org Foundation C.I.C. +# Copyright 2019-2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,15 +17,19 @@ from os import environ from threading import Condition from time import time_ns -from typing import BinaryIO, List, Optional, Union +from typing import BinaryIO, Dict, List, Optional, Union import attr import psycopg2 -from twisted.internet.defer import ensureDeferred -from twisted.test.proto_helpers import MemoryReactorClock +from twisted.internet._resolver import SimpleResolverComplexifier +from twisted.internet.defer import ensureDeferred, fail, succeed +from twisted.internet.error import DNSLookupError +from twisted.internet.interfaces import IReactorPluggableNameResolver, IResolverSimple +from twisted.internet.testing import MemoryReactorClock from twisted.trial import unittest from twisted.web.http_headers import Headers from twisted.web.server import Request +from zope.interface.declarations import implementer from sygnal.http import PushGatewayApiServer from sygnal.sygnal import CONFIG_DEFAULTS, Sygnal, merge_left_with_defaults @@ -88,6 +92,7 @@ def setUp(self): logging_config = { "setup": { + "disable_existing_loggers": False, # otherwise this breaks logging! "formatters": { "normal": { "format": "%(asctime)s [%(process)d] " @@ -123,6 +128,7 @@ def setUp(self): self._set_up_database(self.dbname) self.sygnal = Sygnal(config, reactor) + self.reactor = reactor self.sygnal.database.start() self.v1api = PushGatewayApiServer(self.sygnal) @@ -261,11 +267,24 @@ def channel_result(channel): return [channel_result(channel) for channel in channels] +@implementer(IReactorPluggableNameResolver) class ExtendedMemoryReactorClock(MemoryReactorClock): def __init__(self): super().__init__() self.work_notifier = Condition() + self.lookups: Dict[str, str] = {} + + @implementer(IResolverSimple) + class FakeResolver(object): + @staticmethod + def getHostByName(name, timeout=None): + if name not in self.lookups: + return fail(DNSLookupError("OH NO: unknown %s" % (name,))) + return succeed(self.lookups[name]) + + self.nameResolver = SimpleResolverComplexifier(FakeResolver()) + def callFromThread(self, function, *args): self.callLater(0, function, *args) diff --git a/tests/tls/ca.crt b/tests/tls/ca.crt new file mode 100644 index 00000000..730f81e9 --- /dev/null +++ b/tests/tls/ca.crt @@ -0,0 +1,19 @@ +-----BEGIN CERTIFICATE----- +MIIDCjCCAfKgAwIBAgIJAPwHIHgH/jtjMA0GCSqGSIb3DQEBCwUAMBoxGDAWBgNV +BAMMD3N5bmFwc2UgdGVzdCBDQTAeFw0xOTA2MTAxMTI2NDdaFw0yOTA2MDcxMTI2 +NDdaMBoxGDAWBgNVBAMMD3N5bmFwc2UgdGVzdCBDQTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBAOZOXCKuylf9jHzJXpU2nS+XEKrnGPgs2SAhQKrzBxg3 +/d8KT2Zsfsj1i3G7oGu7B0ZKO6qG5AxOPCmSMf9/aiSHFilfSh+r8rCpJyWMev2c +/w/xmhoFHgn+H90NnqlXvWb5y1YZCE3gWaituQSaa93GPKacRqXCgIrzjPUuhfeT +uwFQt4iyUhMNBYEy3aw4IuIHdyBqi4noUhR2ZeuflLJ6PswdJ8mEiAvxCbBGPerq +idhWcZwlo0fKu4u1uu5B8TnTsMg2fJgL6c5olBG90Urt22gA6anfP5W/U1ZdVhmB +T3Rv5SJMkGyMGE6sEUetLFyb2GJpgGD7ePkUCZr+IMMCAwEAAaNTMFEwHQYDVR0O +BBYEFLg7nTCYsvQXWTyS6upLc0YTlIwRMB8GA1UdIwQYMBaAFLg7nTCYsvQXWTyS +6upLc0YTlIwRMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBADqx +GX4Ul5OGQlcG+xTt4u3vMCeqGo8mh1AnJ7zQbyRmwjJiNxJVX+/EcqFSTsmkBNoe +xdYITI7Z6dyoiKw99yCZDE7gALcyACEU7r0XY7VY/hebAaX6uLaw1sZKKAIC04lD +KgCu82tG85n60Qyud5SiZZF0q1XVq7lbvOYVdzVZ7k8Vssy5p9XnaLJLMggYeOiX +psHIQjvYGnTTEBZZHzWOrc0WGThd69wxTOOkAbCsoTPEwZL8BGUsdtLWtvhp452O +npvaUBzKg39R5X3KTdhB68XptiQfzbQkd3FtrwNuYPUywlsg55Bxkv85n57+xDO3 +D9YkgUqEp0RGUXQgCsQ= +-----END CERTIFICATE----- diff --git a/tests/tls/ca.key b/tests/tls/ca.key new file mode 100644 index 00000000..5c99cae1 --- /dev/null +++ b/tests/tls/ca.key @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpgIBAAKCAQEA5k5cIq7KV/2MfMlelTadL5cQqucY+CzZICFAqvMHGDf93wpP +Zmx+yPWLcbuga7sHRko7qobkDE48KZIx/39qJIcWKV9KH6vysKknJYx6/Zz/D/Ga +GgUeCf4f3Q2eqVe9ZvnLVhkITeBZqK25BJpr3cY8ppxGpcKAivOM9S6F95O7AVC3 +iLJSEw0FgTLdrDgi4gd3IGqLiehSFHZl65+Usno+zB0nyYSIC/EJsEY96uqJ2FZx +nCWjR8q7i7W67kHxOdOwyDZ8mAvpzmiUEb3RSu3baADpqd8/lb9TVl1WGYFPdG/l +IkyQbIwYTqwRR60sXJvYYmmAYPt4+RQJmv4gwwIDAQABAoIBAQCFuFG+wYYy+MCt +Y65LLN6vVyMSWAQjdMbM5QHLQDiKU1hQPIhFjBFBVXCVpL9MTde3dDqYlKGsk3BT +ItNs6eoTM2wmsXE0Wn4bHNvh7WMsBhACjeFP4lDCtI6DpvjMkmkidT8eyoIL1Yu5 +aMTYa2Dd79AfXPWYIQrJowfhBBY83KuW5fmYnKKDVLqkT9nf2dgmmQz85RgtNiZC +zFkIsNmPqH1zRbcw0wORfOBrLFvsMc4Tt8EY5Wz3NnH8Zfgf8Q3MgARH1yspz3Vp +B+EYHbsK17xZ+P59KPiX3yefvyYWEUjFF7ymVsVnDxLugYl4pXwWUpm19GxeDvFk +cgBUD5OBAoGBAP7lBdCp6lx6fYtxdxUm3n4MMQmYcac4qZdeBIrvpFMnvOBBuixl +eavcfFmFdwgAr8HyVYiu9ynac504IYvmtYlcpUmiRBbmMHbvLQEYHl7FYFKNz9ej +2ue4oJE3RsPdLsD3xIlc+xN8oT1j0knyorwsHdj0Sv77eZzZS9XZZfJzAoGBAOdO +CibYmoNqK/mqDHkp6PgsnbQGD5/CvPF/BLUWV1QpHxLzUQQeoBOQW5FatHe1H5zi +mbq3emBefVmsCLrRIJ4GQu4vsTMfjcpGLwviWmaK6pHbGPt8IYeEQ2MNyv59EtA2 +pQy4dX7/Oe6NLAR1UEQjXmCuXf+rxnxF3VJd1nRxAoGBANb9eusl9fusgSnVOTjJ +AQ7V36KVRv9hZoG6liBNwo80zDVmms4JhRd1MBkd3mkMkzIF4SkZUnWlwLBSANGM +dX/3eZ5i1AVwgF5Am/f5TNxopDbdT/o1RVT/P8dcFT7s1xuBn+6wU0F7dFBgWqVu +lt4aY85zNrJcj5XBHhqwdDGLAoGBAIksPNUAy9F3m5C6ih8o/aKAQx5KIeXrBUZq +v43tK+kbYfRJHBjHWMOBbuxq0G/VmGPf9q9GtGqGXuxZG+w+rYtJx1OeMQZShjIZ +ITl5CYeahrXtK4mo+fF2PMh3m5UE861LWuKKWhPwpJiWXC5grDNcjlHj1pcTdeip +PjHkuJPhAoGBAIh35DptqqdicOd3dr/+/m2YQywY8aSpMrR0bC06aAkscD7oq4tt +s/jwl0UlHIrEm/aMN7OnGIbpfkVdExfGKYaa5NRlgOwQpShwLufIo/c8fErd2zb8 +K3ptlwBxMrayMXpS3DP78r83Z0B8/FSK2guelzdRJ3ftipZ9io1Gss1C +-----END RSA PRIVATE KEY----- diff --git a/tests/tls/server.key b/tests/tls/server.key new file mode 100644 index 00000000..c53ee02b --- /dev/null +++ b/tests/tls/server.key @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpAIBAAKCAQEAvUAWLOE6TEp3FYSfEnJMwYtJg3KIW5BjiAOOvFVOVQfJ5eEa +vzyJ1Z+8DUgLznFnUkAeD9GjPvP7awl3NPJKLQSMkV5Tp+ea4YyV+Aa4R7flROEa +zCGvmleydZw0VqN1atVZ0ikEoglM/APJQd70ec7KSR3QoxaV2/VNCHmyAPdP+0WI +llV54VXX1CZrWSHaCSn1gzo3WjnGbxTOCQE5Z4k5hqJAwLWWhxDv+FX/jD38Sq3H +gMFNpXJv6FYwwaKU8awghHdSY/qlBPE/1rU83vIBFJ3jW6I1WnQDfCQ69of5vshK +N4v4hok56ScwdUnk8lw6xvJx1Uav/XQB9qGh4QIDAQABAoIBAQCHLO5p8hotAgdb +JFZm26N9nxrMPBOvq0ucjEX4ucnwrFaGzynGrNwa7TRqHCrqs0/EjS2ryOacgbL0 +eldeRy26SASLlN+WD7UuI7e+6DXabDzj3RHB+tGuIbPDk+ZCeBDXVTsKBOhdQN1v +KNkpJrJjCtSsMxKiWvCBow353srJKqCDZcF5NIBYBeDBPMoMbfYn5dJ9JhEf+2h4 +0iwpnWDX1Vqf46pCRa0hwEyMXycGeV2CnfJSyV7z52ZHQrvkz8QspSnPpnlCnbOE +UAvc8kZ5e8oZE7W+JfkK38vHbEGM1FCrBmrC/46uUGMRpZfDferGs91RwQVq/F0n +JN9hLzsBAoGBAPh2pm9Xt7a4fWSkX0cDgjI7PT2BvLUjbRwKLV+459uDa7+qRoGE +sSwb2QBqmQ1kbr9JyTS+Ld8dyUTsGHZK+YbTieAxI3FBdKsuFtcYJO/REN0vik+6 +fMaBHPvDHSU2ioq7spZ4JBFskzqs38FvZ0lX7aa3fguMk8GMLnofQ8QxAoGBAML9 +o5sJLN9Tk9bv2aFgnERgfRfNjjV4Wd99TsktnCD04D1GrP2eDSLfpwFlCnguck6b +jxikqcolsNhZH4dgYHqRNj+IljSdl+sYZiygO6Ld0XU+dEFO86N3E9NzZhKcQ1at +85VdwNPCS7JM2fIxEvS9xfbVnsmK6/37ZZ5iI7yxAoGBALw2vRtJGmy60pojfd1A +hibhAyINnlKlFGkSOI7zdgeuRTf6l9BTIRclvTt4hJpFgzM6hMWEbyE94hJoupsZ +bm443o/LCWsox2VI05p6urhD6f9znNWKkiyY78izY+elqksvpjgfqEresaTYAeP5 +LQe9KNSK2VuMUP1j4G04M9BxAoGAWe8ITZJuytZOgrz/YIohqPvj1l2tcIYA1a6C +7xEFSMIIxtpZIWSLZIFJEsCakpHBkPX4iwIveZfmt/JrM1JFTWK6ZZVGyh/BmOIZ +Bg4lU1oBqJTUo+aZQtTCJS29b2n5OPpkNYkXTdP4e9UsVKNDvfPlYZJneUeEzxDr +bqCPIRECgYA544KMwrWxDQZg1dsKWgdVVKx80wEFZAiQr9+0KF6ch6Iu7lwGJHFY +iI6O85paX41qeC/Fo+feIWJVJU2GvG6eBsbO4bmq+KSg4NkABJSYxodgBp9ftNeD +jo1tfw+gudlNe5jXHu7oSX93tqGjR4Cnlgan/KtfkB96yHOumGmOhQ== +-----END RSA PRIVATE KEY----- diff --git a/tests/twisted_test_helpers.py b/tests/twisted_test_helpers.py new file mode 100644 index 00000000..687341a6 --- /dev/null +++ b/tests/twisted_test_helpers.py @@ -0,0 +1,292 @@ +import logging +import os +import subprocess + +import attr +from OpenSSL import SSL +from OpenSSL.SSL import Connection +from twisted.internet.interfaces import IOpenSSLServerConnectionCreator +from twisted.internet.ssl import Certificate, trustRootFromCertificates +from twisted.web.client import BrowserLikePolicyForHTTPS # noqa: F401 +from twisted.web.iweb import IPolicyForHTTPS # noqa: F401 +from zope.interface.declarations import implementer + +logger = logging.getLogger(__name__) + + +@attr.s(cmp=False) +class FakeTransport(object): + """ + A twisted.internet.interfaces.ITransport implementation which sends all its data + straight into an IProtocol object: it exists to connect two IProtocols together. + + To use it, instantiate it with the receiving IProtocol, and then pass it to the + sending IProtocol's makeConnection method: + + server = HTTPChannel() + client.makeConnection(FakeTransport(server, self.reactor)) + + If you want bidirectional communication, you'll need two instances. + """ + + other = attr.ib() + """The Protocol object which will receive any data written to this transport. + + :type: twisted.internet.interfaces.IProtocol + """ + + _reactor = attr.ib() + """Test reactor + + :type: twisted.internet.interfaces.IReactorTime + """ + + _protocol = attr.ib(default=None) + """The Protocol which is producing data for this transport. Optional, but if set + will get called back for connectionLost() notifications etc. + """ + + disconnecting = False + disconnected = False + connected = True + buffer = attr.ib(default=b"") + producer = attr.ib(default=None) + autoflush = attr.ib(default=True) + + def getPeer(self): + return None + + def getHost(self): + return None + + def loseConnection(self, reason=None): + if not self.disconnecting: + logger.info("FakeTransport: loseConnection(%s)", reason) + self.disconnecting = True + if self._protocol: + self._protocol.connectionLost(reason) + + # if we still have data to write, delay until that is done + if self.buffer: + logger.info( + "FakeTransport: Delaying disconnect until buffer is flushed" + ) + else: + self.connected = False + self.disconnected = True + + def abortConnection(self): + logger.info("FakeTransport: abortConnection()") + + if not self.disconnecting: + self.disconnecting = True + if self._protocol: + self._protocol.connectionLost(None) + + self.disconnected = True + + def pauseProducing(self): + if not self.producer: + return + + self.producer.pauseProducing() + + def resumeProducing(self): + if not self.producer: + return + self.producer.resumeProducing() + + def unregisterProducer(self): + if not self.producer: + return + + self.producer = None + + def registerProducer(self, producer, streaming): + self.producer = producer + self.producerStreaming = streaming + + def _produce(): + d = self.producer.resumeProducing() + d.addCallback(lambda x: self._reactor.callLater(0.1, _produce)) + + if not streaming: + self._reactor.callLater(0.0, _produce) + + def write(self, byt): + if self.disconnecting: + raise Exception("Writing to disconnecting FakeTransport") + + self.buffer = self.buffer + byt + + # always actually do the write asynchronously. Some protocols (notably the + # TLSMemoryBIOProtocol) get very confused if a read comes back while they are + # still doing a write. Doing a callLater here breaks the cycle. + if self.autoflush: + self._reactor.callLater(0.0, self.flush) + + def writeSequence(self, seq): + for x in seq: + self.write(x) + + def flush(self, maxbytes=None): + if not self.buffer: + # nothing to do. Don't write empty buffers: it upsets the + # TLSMemoryBIOProtocol + return + + if self.disconnected: + return + + if getattr(self.other, "transport") is None: + # the other has no transport yet; reschedule + if self.autoflush: + self._reactor.callLater(0.0, self.flush) + return + + if maxbytes is not None: + to_write = self.buffer[:maxbytes] + else: + to_write = self.buffer + + logger.info("%s->%s: %s", self._protocol, self.other, to_write) + + try: + self.other.dataReceived(to_write) + except Exception as e: + logger.exception("Exception writing to protocol: %s", e) + return + + self.buffer = self.buffer[len(to_write) :] + if self.buffer and self.autoflush: + self._reactor.callLater(0.0, self.flush) + + if not self.buffer and self.disconnecting: + logger.info("FakeTransport: Buffer now empty, completing disconnect") + self.disconnected = True + + +def get_test_https_policy(): + """Get a test IPolicyForHTTPS which trusts the test CA cert + + Returns: + IPolicyForHTTPS + """ + ca_file = get_test_ca_cert_file() + with open(ca_file) as stream: + content = stream.read() + cert = Certificate.loadPEM(content) + trust_root = trustRootFromCertificates([cert]) + return BrowserLikePolicyForHTTPS(trustRoot=trust_root) + + +def get_test_ca_cert_file(): + """Get the path to the test CA cert + + The keypair is generated with: + + openssl genrsa -out ca.key 2048 + openssl req -new -x509 -key ca.key -days 3650 -out ca.crt \ + -subj '/CN=synapse test CA' + """ + return os.path.join(os.path.dirname(__file__), "tls/ca.crt") + + +def get_test_key_file(): + """get the path to the test key + + The key file is made with: + + openssl genrsa -out server.key 2048 + """ + return os.path.join(os.path.dirname(__file__), "tls/server.key") + + +cert_file_count = 0 + +CONFIG_TEMPLATE = b"""\ +[default] +basicConstraints = CA:FALSE +keyUsage=nonRepudiation, digitalSignature, keyEncipherment +subjectAltName = %(sanentries)s +""" + + +def create_test_cert_file(sanlist): + """build an x509 certificate file + + Args: + sanlist: list[bytes]: a list of subjectAltName values for the cert + + Returns: + str: the path to the file + """ + global cert_file_count + csr_filename = "server.csr" + cnf_filename = "server.%i.cnf" % (cert_file_count,) + cert_filename = "server.%i.crt" % (cert_file_count,) + cert_file_count += 1 + + # first build a CSR + subprocess.check_call( + [ + "openssl", + "req", + "-new", + "-key", + get_test_key_file(), + "-subj", + "/", + "-out", + csr_filename, + ] + ) + + # now a config file describing the right SAN entries + sanentries = b",".join(sanlist) + with open(cnf_filename, "wb") as f: + f.write(CONFIG_TEMPLATE % {b"sanentries": sanentries}) + + # finally the cert + ca_key_filename = os.path.join(os.path.dirname(__file__), "tls/ca.key") + ca_cert_filename = get_test_ca_cert_file() + subprocess.check_call( + [ + "openssl", + "x509", + "-req", + "-in", + csr_filename, + "-CA", + ca_cert_filename, + "-CAkey", + ca_key_filename, + "-set_serial", + "1", + "-extfile", + cnf_filename, + "-out", + cert_filename, + ] + ) + + return cert_filename + + +@implementer(IOpenSSLServerConnectionCreator) +class TestServerTLSConnectionFactory(object): + """An SSL connection creator which returns connections which present a certificate + signed by our test CA.""" + + def __init__(self, sanlist): + """ + Args: + sanlist: list[bytes]: a list of subjectAltName values for the cert + """ + self._cert_file = create_test_cert_file(sanlist) + + def serverConnectionForTLS(self, tlsProtocol): + ctx = SSL.Context(SSL.TLSv1_METHOD) + ctx.use_certificate_file(self._cert_file) + ctx.use_privatekey_file(get_test_key_file()) + return Connection(ctx, None) From 53015c70785dea441606d1504667d71c5b99eabc Mon Sep 17 00:00:00 2001 From: reivilibre <38398653+reivilibre@users.noreply.github.com> Date: Tue, 1 Sep 2020 16:28:39 +0100 Subject: [PATCH 04/44] Tests for the HTTP Proxy support (asyncio and common only) (#151) Co-authored-by: Patrick Cloke --- changelog.d/151.misc | 1 + tests/asyncio_test_helpers.py | 191 +++++++++++++++++++++++++++++++ tests/test_httpproxy_asyncio.py | 193 ++++++++++++++++++++++++++++++++ tests/test_proxy_url_parsing.py | 54 +++++++++ tests/testutils.py | 2 +- 5 files changed, 440 insertions(+), 1 deletion(-) create mode 100644 changelog.d/151.misc create mode 100644 tests/asyncio_test_helpers.py create mode 100644 tests/test_httpproxy_asyncio.py create mode 100644 tests/test_proxy_url_parsing.py diff --git a/changelog.d/151.misc b/changelog.d/151.misc new file mode 100644 index 00000000..3f5a3a8b --- /dev/null +++ b/changelog.d/151.misc @@ -0,0 +1 @@ +Add tests for HTTP Proxy support. diff --git a/tests/asyncio_test_helpers.py b/tests/asyncio_test_helpers.py new file mode 100644 index 00000000..9096f34d --- /dev/null +++ b/tests/asyncio_test_helpers.py @@ -0,0 +1,191 @@ +import logging +import types +from asyncio import AbstractEventLoop, transports +from asyncio.protocols import BaseProtocol, Protocol +from asyncio.transports import Transport +from contextvars import Context +from typing import Any, Callable, List, Optional, Tuple + +logger = logging.getLogger(__name__) + + +class TimelessEventLoopWrapper: + @property # type: ignore + def __class__(self): + """ + Fakes isinstance(this, AbstractEventLoop) so we can set_event_loop + without fail. + """ + return self._wrapped_loop.__class__ + + def __init__(self, wrapped_loop: AbstractEventLoop): + self._wrapped_loop = wrapped_loop + self._time = 0.0 + self._to_be_called: List[Tuple[float, Any, Any, Any]] = [] + + def advance(self, time_delta: float): + target_time = self._time + time_delta + logger.debug( + "advancing from %f by %f (%d in queue)", + self._time, + time_delta, + len(self._to_be_called), + ) + while self._time < target_time and self._to_be_called: + # pop off the next callback from the queue + next_time, next_callback, args, _context = self._to_be_called[0] + if next_time > target_time: + # this isn't allowed to run yet + break + logger.debug("callback at %f on %r", next_time, next_callback) + self._to_be_called = self._to_be_called[1:] + self._time = next_time + next_callback(*args) + + # no more tasks can run now but advance to the time anyway + self._time = target_time + + def __getattr__(self, item: str): + """ + We use this to delegate other method calls to the real EventLoop. + """ + value = getattr(self._wrapped_loop, item) + if isinstance(value, types.MethodType): + # rebind this method to be called on us + # this makes the wrapped class use our overridden methods when + # available. + # we have to do this because methods are bound to the underlying + # event loop, which will call `self.call_later` or something + # which won't normally hit us because we are not an actual subtype. + return types.MethodType(value.__func__, self) + else: + return value + + def call_later( + self, + delay: float, + callback: Callable, + *args: Any, + context: Optional[Context] = None, + ): + self.call_at(self._time + delay, callback, *args, context=context) + + def call_at( + self, + when: float, + callback: Callable, + *args: Any, + context: Optional[Context] = None, + ): + logger.debug(f"Calling {callback} at %f...", when) + self._to_be_called.append((when, callback, args, context)) + + # re-sort list in ascending time order + self._to_be_called.sort(key=lambda x: x[0]) + + def call_soon( + self, callback: Callable, *args: Any, context: Optional[Context] = None + ): + return self.call_later(0, callback, *args, context=context) + + def time(self) -> float: + return self._time + + +class MockTransport(Transport): + """ + A transport intended to be driven by tests. + Stores received data into a buffer. + """ + + def __init__(self): + # Holds bytes received + self.buffer = b"" + + # Whether we reached the end of file/stream + self.eofed = False + + # Whether the connection was aborted + self.aborted = False + + # The protocol attached to this transport + self.protocol = None + + # Whether this transport was closed + self.closed = False + + def reset_mock(self) -> None: + self.buffer = b"" + self.eofed = False + self.aborted = False + self.closed = False + + def is_reading(self) -> bool: + return True + + def pause_reading(self) -> None: + pass # NOP + + def resume_reading(self) -> None: + pass # NOP + + def set_write_buffer_limits(self, high: int = None, low: int = None) -> None: + pass # NOP + + def get_write_buffer_size(self) -> int: + """Return the current size of the write buffer.""" + raise NotImplementedError + + def write(self, data: bytes) -> None: + self.buffer += data + + def write_eof(self) -> None: + self.eofed = True + + def can_write_eof(self) -> bool: + return True + + def abort(self) -> None: + self.aborted = True + + def pretend_to_receive(self, data: bytes) -> None: + proto = self.get_protocol() + assert isinstance(proto, Protocol) + proto.data_received(data) + + def set_protocol(self, protocol: BaseProtocol) -> None: + self.protocol = protocol + + def get_protocol(self) -> BaseProtocol: + assert isinstance(self.protocol, BaseProtocol) + return self.protocol + + def close(self) -> None: + self.closed = True + + +class MockProtocol(Protocol): + """ + A protocol intended to be driven by tests. + Stores received data into a buffer. + """ + + def __init__(self): + self._to_transmit = b"" + self.received_bytes = b"" + self.transport = None + + def data_received(self, data: bytes) -> None: + self.received_bytes += data + + def connection_made(self, transport: transports.BaseTransport) -> None: + assert isinstance(transport, Transport) + self.transport = transport + if self._to_transmit: + transport.write(self._to_transmit) + + def write(self, data: bytes) -> None: + if self.transport: + self.transport.write(data) + else: + self._to_transmit += data diff --git a/tests/test_httpproxy_asyncio.py b/tests/test_httpproxy_asyncio.py new file mode 100644 index 00000000..e88189ae --- /dev/null +++ b/tests/test_httpproxy_asyncio.py @@ -0,0 +1,193 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import asyncio +from asyncio import AbstractEventLoop, BaseTransport, Protocol, Task +from typing import Optional, Tuple, cast + +from sygnal.exceptions import ProxyConnectError +from sygnal.helper.proxy.proxy_asyncio import HttpConnectProtocol + +from tests import testutils +from tests.asyncio_test_helpers import ( + MockProtocol, + MockTransport, + TimelessEventLoopWrapper, +) + + +class AsyncioHttpProxyTest(testutils.TestCase): + def config_setup(self, config): + super().config_setup(config) + config["apps"]["com.example.spqr"] = { + "type": "tests.test_pushgateway_api_v1.TestPushkin" + } + base_loop = asyncio.new_event_loop() + augmented_loop = TimelessEventLoopWrapper(base_loop) # type: ignore + asyncio.set_event_loop(cast(AbstractEventLoop, augmented_loop)) + + self.loop = augmented_loop + + def make_fake_proxy( + self, host: str, port: int, proxy_credentials: Optional[Tuple[str, str]] + ) -> Tuple[MockProtocol, MockTransport, "Task[Tuple[BaseTransport, Protocol]]"]: + # Task[Tuple[MockTransport, MockProtocol]] + # make a fake proxy + fake_proxy = MockTransport() + # make a fake protocol that we fancy using through the proxy + fake_protocol = MockProtocol() + # create a HTTP CONNECT proxy client protocol + http_connect_protocol = HttpConnectProtocol( + target_hostport=(host, port), + proxy_credentials=proxy_credentials, + protocol_factory=lambda: fake_protocol, + sslcontext=None, + loop=None, + ) + switch_over_task = asyncio.get_event_loop().create_task( + http_connect_protocol.switch_over_when_ready() + ) + # check the task is not somehow already marked as done before we even + # receive anything. + self.assertFalse(switch_over_task.done()) + # connect the proxy client to the proxy + fake_proxy.set_protocol(http_connect_protocol) + http_connect_protocol.connection_made(fake_proxy) + return fake_protocol, fake_proxy, switch_over_task + + def test_connect_no_credentials(self): + """ + Tests the proxy connection procedure when there is no basic auth. + """ + host = "example.org" + port = 443 + proxy_credentials = None + fake_protocol, fake_proxy, switch_over_task = self.make_fake_proxy( + host, port, proxy_credentials + ) + + # Check that the proxy got the proper CONNECT request. + self.assertEqual(fake_proxy.buffer, b"CONNECT example.org:443 HTTP/1.0\r\n\r\n") + # Reset the proxy mock + fake_proxy.reset_mock() + + # pretend we got a happy response with some dangling bytes from the + # target protocol + fake_proxy.pretend_to_receive( + b"HTTP/1.0 200 Connection Established\r\n\r\n" + b"begin beep boop\r\n\r\n~~ :) ~~" + ) + + # advance event loop because we have to let coroutines be executed + self.loop.advance(1.0) + + # *now* we should have switched over from the HTTP CONNECT protocol + # to the user protocol (in our case, a MockProtocol). + self.assertTrue(switch_over_task.done()) + + transport, protocol = switch_over_task.result() + + # check it was our protocol that was returned + self.assertIs(protocol, fake_protocol) + + # check our protocol received exactly the bytes meant for it + self.assertEqual( + fake_protocol.received_bytes, b"begin beep boop\r\n\r\n~~ :) ~~" + ) + + def test_connect_correct_credentials(self): + """ + Tests the proxy connection procedure when there is basic auth. + """ + host = "example.org" + port = 443 + proxy_credentials = ("user", "secret") + fake_protocol, fake_proxy, switch_over_task = self.make_fake_proxy( + host, port, proxy_credentials + ) + + # Check that the proxy got the proper CONNECT request with the + # correctly-encoded credentials + self.assertEqual( + fake_proxy.buffer, + b"CONNECT example.org:443 HTTP/1.0\r\n" + b"Proxy-Authorization: basic dXNlcjpzZWNyZXQ=\r\n\r\n", + ) + # Reset the proxy mock + fake_proxy.reset_mock() + + # pretend we got a happy response with some dangling bytes from the + # target protocol + fake_proxy.pretend_to_receive( + b"HTTP/1.0 200 Connection Established\r\n\r\n" + b"begin beep boop\r\n\r\n~~ :) ~~" + ) + + # advance event loop because we have to let coroutines be executed + self.loop.advance(1.0) + + # *now* we should have switched over from the HTTP CONNECT protocol + # to the user protocol (in our case, a MockProtocol). + self.assertTrue(switch_over_task.done()) + + transport, protocol = switch_over_task.result() + + # check it was our protocol that was returned + self.assertIs(protocol, fake_protocol) + + # check our protocol received exactly the bytes meant for it + self.assertEqual( + fake_protocol.received_bytes, b"begin beep boop\r\n\r\n~~ :) ~~" + ) + + def test_connect_failure(self): + """ + Test that our task fails properly when we cannot make a connection through + the proxy. + """ + host = "example.org" + port = 443 + proxy_credentials = ("user", "secret") + fake_protocol, fake_proxy, switch_over_task = self.make_fake_proxy( + host, port, proxy_credentials + ) + + # Check that the proxy got the proper CONNECT request with the + # correctly-encoded credentials. + self.assertEqual( + fake_proxy.buffer, + b"CONNECT example.org:443 HTTP/1.0\r\n" + b"Proxy-Authorization: basic dXNlcjpzZWNyZXQ=\r\n\r\n", + ) + # Reset the proxy mock + fake_proxy.reset_mock() + + # For the sake of this test, pretend the credentials are incorrect so + # send a sad response with a HTML error page + fake_proxy.pretend_to_receive( + b"HTTP/1.0 401 Unauthorised\r\n\r\n... some error here ..." + ) + + # advance event loop because we have to let coroutines be executed + self.loop.advance(1.0) + + # *now* this future should have completed + self.assertTrue(switch_over_task.done()) + + # but we should have failed + self.assertIsInstance(switch_over_task.exception(), ProxyConnectError) + + # check our protocol did not receive anything, because it was an HTTP- + # level error, not actually a connection to our target. + self.assertEqual(fake_protocol.received_bytes, b"") diff --git a/tests/test_proxy_url_parsing.py b/tests/test_proxy_url_parsing.py new file mode 100644 index 00000000..21c70534 --- /dev/null +++ b/tests/test_proxy_url_parsing.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import unittest + +from sygnal.helper.proxy import HttpProxyUrl, decompose_http_proxy_url + + +class ProxyUrlTestCase(unittest.TestCase): + def test_decompose_http_proxy_url(self): + parts = decompose_http_proxy_url("http://example.org") + self.assertEqual(parts, HttpProxyUrl("example.org", 80, None)) + + parts = decompose_http_proxy_url("http://example.org:8080") + self.assertEqual(parts, HttpProxyUrl("example.org", 8080, None)) + + parts = decompose_http_proxy_url("http://bob:secretsquirrel@example.org") + self.assertEqual( + parts, HttpProxyUrl("example.org", 80, ("bob", "secretsquirrel")) + ) + + parts = decompose_http_proxy_url("http://bob:secretsquirrel@example.org:8080") + self.assertEqual( + parts, HttpProxyUrl("example.org", 8080, ("bob", "secretsquirrel")) + ) + + def test_decompose_username_only(self): + """ + We do not support usernames without passwords for now — this tests the + current behaviour, though (it ignores the username). + """ + + parts = decompose_http_proxy_url("http://bob@example.org:8080") + self.assertEqual(parts, HttpProxyUrl("example.org", 8080, None)) + + def test_decompose_http_proxy_url_failure(self): + # test that non-HTTP schemes raise an exception + self.assertRaises( + RuntimeError, lambda: decompose_http_proxy_url("ftp://example.org") + ) + + # test that the lack of a hostname raises an exception + self.assertRaises(RuntimeError, lambda: decompose_http_proxy_url("http://")) diff --git a/tests/testutils.py b/tests/testutils.py index cd43e092..681713b0 100644 --- a/tests/testutils.py +++ b/tests/testutils.py @@ -251,7 +251,7 @@ def dump_if_needed(payload): channel.process_request(b"POST", REQ_PATH, content) def all_channels_done(): - return all([channel.done for channel in channels]) + return all(channel.done for channel in channels) while not all_channels_done(): # we need to advance until the request has been finished From 393ad877adcc639486907c2a88a15f2f5037c444 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Wed, 7 Oct 2020 14:13:29 +0100 Subject: [PATCH 05/44] Add note about warning the world before making a release (#155) Otherwise we end up making many releases without anybody knowing. --- RELEASING.md | 5 +++++ changelog.d/155.doc | 1 + 2 files changed, 6 insertions(+) create mode 100644 changelog.d/155.doc diff --git a/RELEASING.md b/RELEASING.md index 66a5db6d..53a8f999 100644 --- a/RELEASING.md +++ b/RELEASING.md @@ -1,3 +1,6 @@ +0. Consider whether this release will affect any customers, including those on +EMS, and warn them beforehand - in case they need to upgrade quickly. + 1. Set a variable to the version number for convenience: ```sh ver=x.y.z @@ -28,3 +31,5 @@ ```sh xdg-open https://github.com/matrix-org/sygnal/releases/edit/v$ver ``` +1. Notify #sygnal:matrix.org, #synapse-dev:matrix.org and EMS that a new + release has been published. diff --git a/changelog.d/155.doc b/changelog.d/155.doc new file mode 100644 index 00000000..7b5867fe --- /dev/null +++ b/changelog.d/155.doc @@ -0,0 +1 @@ +Add a note to the releasing doc asking people to inform EMS and customers during the release process. From 08318c8d10a021fa9f6fcc6d5b8b335d26ded9ae Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Wed, 7 Oct 2020 14:18:15 +0100 Subject: [PATCH 06/44] Add platform troubleshooting documentation and sample configuration details (#154) A customer had an issue where they were using sandbox push tokens with the production server. I don't blame them, as this doesn't seem to have been documented anywhere. I've added the `platform` option to the sample config with some explanation, as well as a section in troubleshooting in case others fall down the same path. While adding to the sample config, I realised that we would eventually end up duplicating nearly every option if we didn't deduplicate early, so that's what I did. --- changelog.d/154.doc | 1 + docs/troubleshooting.md | 17 +++++++++++++++++ sygnal.yaml.sample | 33 +++++++++++++++++++-------------- 3 files changed, 37 insertions(+), 14 deletions(-) create mode 100644 changelog.d/154.doc diff --git a/changelog.d/154.doc b/changelog.d/154.doc new file mode 100644 index 00000000..6bd9858c --- /dev/null +++ b/changelog.d/154.doc @@ -0,0 +1 @@ +Add troubleshooting documentation and sample configuration about APNs pushkin's `platform` option. \ No newline at end of file diff --git a/docs/troubleshooting.md b/docs/troubleshooting.md index 8e58d645..a9f9982b 100644 --- a/docs/troubleshooting.md +++ b/docs/troubleshooting.md @@ -126,6 +126,23 @@ Instead of configuring Sygnal with your APNs secrets, you need to configure Firebase with your APNs secrets, and Sygnal with your Firebase secrets. +#### Common cause 3: Using sandbox tokens with the production APNs server + +Sygnal connects to the production APNs instance by default. This will return +`400 BadDeviceToken` if you send it a token intended for the sandbox APNs +server. + +Either use production tokens, or switch to the sandbox APNs server by setting: + +``` +com.example.myapp.ios: + type: apns + ... + platform: sandbox +``` + +in your sygnal config file. + ### App doesn't receive notifications when inactive If you want your application to be woken up to be able to process APNs messages diff --git a/sygnal.yaml.sample b/sygnal.yaml.sample index bdb6ec29..2a5c11c8 100644 --- a/sygnal.yaml.sample +++ b/sygnal.yaml.sample @@ -199,30 +199,24 @@ metrics: # For the type, you may specify a fully-qualified Python classname if desired. # apps: - # This is an example APNs push configuration using certificate authentication. + # This is an example APNs push configuration # #com.example.myapp.ios: # type: apns - # certfile: com.example.myApp_prod_APNS.pem # - # # This is the maximum number of in-flight requests *for this pushkin* - # # before additional notifications will be failed. - # # (This is a robustness measure to prevent one pushkin stacking up with - # # queued requests and saturating the inbound connection queue of a load - # # balancer or reverse proxy). - # # Defaults to 512 if unset. + # # Authentication # # - # #inflight_request_limit: 512 - - # This is an example APNs push configuration using key authentication. - # - #com.example.myapp2.ios: - # type: apns + # # Two methods of authentication to APNs are currently supported. + # # + # # You can authenticate using a key: # keyfile: my_key.p8 # key_id: MY_KEY_ID # team_id: MY_TEAM_ID # topic: MY_TOPIC # + # # Or, a certificate can be used instead: + # certfile: com.example.myApp_prod_APNS.pem + # # # This is the maximum number of in-flight requests *for this pushkin* # # before additional notifications will be failed. # # (This is a robustness measure to prevent one pushkin stacking up with @@ -231,12 +225,23 @@ apps: # # Defaults to 512 if unset. # # # #inflight_request_limit: 512 + # + # # Specifies whether to use the production or sandbox APNs server. Note that + # # sandbox tokens should only be used with the sandbox server and vice versa. + # # + # # Valid options are: + # # * production + # # * sandbox + # # + # # The default is 'production'. Uncomment to use the sandbox instance. + # #platform: sandbox # This is an example GCM/FCM push configuration. # #com.example.myapp.android: # type: gcm # api_key: your_api_key_for_gcm + # # # This is the maximum number of connections to GCM servers at any one time # # the default is 20. # #max_connections: 20 From 8060950481e20584ea01f8f4377da401d6ce22ba Mon Sep 17 00:00:00 2001 From: reivilibre <38398653+reivilibre@users.noreply.github.com> Date: Wed, 11 Nov 2020 13:53:43 +0000 Subject: [PATCH 07/44] Update troubleshooting document with 400 BadDeviceToken (#158) Signed-off-by: Olivier Wilkinson (reivilibre) --- changelog.d/158.doc | 1 + docs/troubleshooting.md | 24 +++++++++++++++--------- 2 files changed, 16 insertions(+), 9 deletions(-) create mode 100644 changelog.d/158.doc diff --git a/changelog.d/158.doc b/changelog.d/158.doc new file mode 100644 index 00000000..1ca3149c --- /dev/null +++ b/changelog.d/158.doc @@ -0,0 +1 @@ +Update troubleshooting document with suggestions for 400 BadDeviceToken. diff --git a/docs/troubleshooting.md b/docs/troubleshooting.md index a9f9982b..9b1a592d 100644 --- a/docs/troubleshooting.md +++ b/docs/troubleshooting.md @@ -126,7 +126,20 @@ Instead of configuring Sygnal with your APNs secrets, you need to configure Firebase with your APNs secrets, and Sygnal with your Firebase secrets. -#### Common cause 3: Using sandbox tokens with the production APNs server +### App doesn't receive notifications when inactive + +If you want your application to be woken up to be able to process APNs messages +received when your application is in the background, you need to set the +`content-available` flag in your pusher's default payload — see +[the notes for iOS applications](applications.md#ios-applications-beware). + + +### '400 BadDeviceToken' error + +If you get a bad device token error and you have doubled-checked the +token is correct, it is possible that you have used a token from the wrong 'environment', +such as a development token when Sygnal is configured to use the production +environment. Sygnal connects to the production APNs instance by default. This will return `400 BadDeviceToken` if you send it a token intended for the sandbox APNs @@ -141,14 +154,7 @@ com.example.myapp.ios: platform: sandbox ``` -in your sygnal config file. - -### App doesn't receive notifications when inactive - -If you want your application to be woken up to be able to process APNs messages -received when your application is in the background, you need to set the -`content-available` flag in your pusher's default payload — see -[the notes for iOS applications](applications.md#ios-applications-beware). +in your Sygnal config file. # Appendices From ab87ab821159e4a0c8d2c55aa8dfcc0b1200a341 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Fri, 11 Dec 2020 12:26:56 +0000 Subject: [PATCH 08/44] Add 'max_connections' as an understood config field to gcmpushkin (#157) --- changelog.d/157.bugfix | 1 + sygnal/gcmpushkin.py | 1 + 2 files changed, 2 insertions(+) create mode 100644 changelog.d/157.bugfix diff --git a/changelog.d/157.bugfix b/changelog.d/157.bugfix new file mode 100644 index 00000000..df634230 --- /dev/null +++ b/changelog.d/157.bugfix @@ -0,0 +1 @@ +Fix erroneous warning log line when setting the `max_connections` option in a GCM app config. diff --git a/sygnal/gcmpushkin.py b/sygnal/gcmpushkin.py index 81473092..a8748031 100644 --- a/sygnal/gcmpushkin.py +++ b/sygnal/gcmpushkin.py @@ -97,6 +97,7 @@ class GcmPushkin(ConcurrencyLimitedPushkin): "type", "api_key", "fcm_options", + "max_connections", } | ConcurrencyLimitedPushkin.UNDERSTOOD_CONFIG_FIELDS def __init__(self, name, sygnal, config, canonical_reg_id_store): From d99867b51192de4beebbcd3b243fa44fd189207f Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Tue, 16 Mar 2021 07:14:11 -0400 Subject: [PATCH 09/44] Fix type hints due to changes in Twisted 21.2.0. (#178) Twisted 21.2.0 added type hints which showed flaws in the type hints of Sygnal. Those are fixed to be consistent with Twisted. --- changelog.d/178.misc | 1 + mypy.ini | 1 + sygnal/gcmpushkin.py | 25 +++++++++-------- .../proxy/connectproxyclient_twisted.py | 27 ++++++++++--------- sygnal/helper/proxy/proxyagent_twisted.py | 5 ++-- tests/testutils.py | 4 +++ 6 files changed, 35 insertions(+), 28 deletions(-) create mode 100644 changelog.d/178.misc diff --git a/changelog.d/178.misc b/changelog.d/178.misc new file mode 100644 index 00000000..f26f3b54 --- /dev/null +++ b/changelog.d/178.misc @@ -0,0 +1 @@ +Fix type hints due to Twisted upgrade. diff --git a/mypy.ini b/mypy.ini index ed90a598..45ccfacc 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,4 +1,5 @@ [mypy] +plugins = mypy_zope:plugin check_untyped_defs = True show_error_codes = True show_traceback = True diff --git a/sygnal/gcmpushkin.py b/sygnal/gcmpushkin.py index a8748031..1967dd37 100644 --- a/sygnal/gcmpushkin.py +++ b/sygnal/gcmpushkin.py @@ -156,8 +156,10 @@ async def create(cls, name, sygnal, config): an instance of this Pushkin """ logger.debug("About to set up CanonicalRegId Store") - canonical_reg_id_store = CanonicalRegIdStore() - await canonical_reg_id_store.setup(sygnal.database, sygnal.database_engine) + canonical_reg_id_store = CanonicalRegIdStore( + sygnal.database, sygnal.database_engine + ) + await canonical_reg_id_store.setup() logger.debug("Finished setting up CanonicalRegId Store") return cls(name, sygnal, config, canonical_reg_id_store) @@ -468,26 +470,23 @@ class CanonicalRegIdStore(object): ); """ - def __init__(self): - self.db: ConnectionPool = None - self.engine = None - - async def setup(self, db, engine): + def __init__(self, db: ConnectionPool, engine: str): """ - Prepares, if necessary, the database for storing canonical registration IDs. - - Separate method from the constructor because we wait for an async request - to complete, so it must be an `async def` method. - Args: db (adbapi.ConnectionPool): database to prepare engine (str): Database engine to use. Shoud be either "sqlite" or "postgresql". - """ self.db = db self.engine = engine + async def setup(self): + """ + Prepares, if necessary, the database for storing canonical registration IDs. + + Separate method from the constructor because we wait for an async request + to complete, so it must be an `async def` method. + """ await self.db.runOperation(self.TABLE_CREATE_QUERY) async def set_canonical_id(self, reg_id, canonical_reg_id): diff --git a/sygnal/helper/proxy/connectproxyclient_twisted.py b/sygnal/helper/proxy/connectproxyclient_twisted.py index bf6845e2..1c24bc47 100644 --- a/sygnal/helper/proxy/connectproxyclient_twisted.py +++ b/sygnal/helper/proxy/connectproxyclient_twisted.py @@ -23,8 +23,8 @@ from twisted.internet import defer, protocol from twisted.internet.base import ReactorBase from twisted.internet.defer import Deferred -from twisted.internet.interfaces import IProtocol, IStreamClientEndpoint -from twisted.internet.protocol import connectionDone +from twisted.internet.interfaces import IProtocolFactory, IStreamClientEndpoint +from twisted.internet.protocol import Protocol, connectionDone from twisted.web import http from zope.interface import implementer @@ -71,7 +71,8 @@ def __init__( def __repr__(self): return "" % (self._proxy_endpoint,) - def connect(self, protocolFactory: protocol.ClientFactory): + def connect(self, protocolFactory: IProtocolFactory): + assert isinstance(protocolFactory, protocol.ClientFactory) f = HTTPProxiedClientFactory( self._host, self._port, self._proxy_auth, protocolFactory ) @@ -90,11 +91,11 @@ class HTTPProxiedClientFactory(protocol.ClientFactory): connection. Args: - dst_host (bytes): hostname that we want to CONNECT to - dst_port (int): port that we want to connect to - proxy_auth (tuple): None or tuple of (username, pasword) for HTTP basic proxy + dst_host: hostname that we want to CONNECT to + dst_port: port that we want to connect to + proxy_auth: None or tuple of (username, pasword) for HTTP basic proxy authentication - wrapped_factory (protocol.ClientFactory): The original Factory + wrapped_factory: The original Factory """ def __init__( @@ -141,18 +142,18 @@ class HTTPConnectProtocol(protocol.Protocol): """Protocol that wraps an existing Protocol to do a CONNECT handshake at connect Args: - host (bytes): The original HTTP(s) hostname or IPv4 or IPv6 address literal + host: The original HTTP(s) hostname or IPv4 or IPv6 address literal to put in the CONNECT request - port (int): The original HTTP(s) port to put in the CONNECT request + port: The original HTTP(s) port to put in the CONNECT request - proxy_auth (tuple): None or tuple of (username, pasword) for HTTP basic proxy + proxy_auth: None or tuple of (username, pasword) for HTTP basic proxy authentication - wrapped_protocol (interfaces.IProtocol): the original protocol (probably + wrapped_protocol: the original protocol (probably HTTPChannel or TLSMemoryBIOProtocol, but could be anything really) - connected_deferred (Deferred): a Deferred which will be callbacked with + connected_deferred: a Deferred which will be callbacked with wrapped_protocol when the CONNECT completes """ @@ -161,7 +162,7 @@ def __init__( host: bytes, port: int, proxy_auth: Optional[Tuple[str, str]], - wrapped_protocol: IProtocol, + wrapped_protocol: Protocol, connected_deferred: Deferred, ): self.host = host diff --git a/sygnal/helper/proxy/proxyagent_twisted.py b/sygnal/helper/proxy/proxyagent_twisted.py index d572d395..8da2b413 100644 --- a/sygnal/helper/proxy/proxyagent_twisted.py +++ b/sygnal/helper/proxy/proxyagent_twisted.py @@ -22,6 +22,7 @@ from twisted.internet import defer from twisted.internet.endpoints import HostnameEndpoint, wrapClientTLS +from twisted.internet.interfaces import IStreamClientEndpoint from twisted.python.failure import Failure from twisted.web.client import URI, BrowserLikePolicyForHTTPS, _AgentBase from twisted.web.error import SchemeNotSupported @@ -81,7 +82,7 @@ def __init__( self.proxy_endpoint = HostnameEndpoint( reactor, parsed_url.hostname, parsed_url.port, **self._endpoint_kwargs - ) + ) # type: Optional[HostnameEndpoint] else: self.proxy_endpoint = None @@ -127,7 +128,7 @@ def request(self, method, uri, headers=None, bodyProducer=None): # Cache *all* connections under the same key, since we are only # connecting to a single destination, the proxy: pool_key = ("http-proxy", self.proxy_endpoint) - endpoint = self.proxy_endpoint + endpoint = self.proxy_endpoint # type: IStreamClientEndpoint request_path = uri elif parsed_uri.scheme == b"https" and self.proxy_endpoint: endpoint = HTTPConnectProxyEndpoint( diff --git a/tests/testutils.py b/tests/testutils.py index 681713b0..6b5e1d51 100644 --- a/tests/testutils.py +++ b/tests/testutils.py @@ -285,6 +285,10 @@ def getHostByName(name, timeout=None): self.nameResolver = SimpleResolverComplexifier(FakeResolver()) + def installNameResolver(self, resolver): + # It is not expected that this gets called. + raise RuntimeError(resolver) + def callFromThread(self, function, *args): self.callLater(0, function, *args) From 5e259d88f2422d9022765f99786d613ca3e3f1ce Mon Sep 17 00:00:00 2001 From: Bruno Windels Date: Wed, 17 Mar 2021 19:32:57 +0000 Subject: [PATCH 10/44] Add experimental support for WebPush. (#177) --- changelog.d/177.feature | 1 + docs/applications.md | 63 +++++++++ mypy.ini | 6 + setup.py | 2 + sygnal/webpushpushkin.py | 274 +++++++++++++++++++++++++++++++++++++++ 5 files changed, 346 insertions(+) create mode 100644 changelog.d/177.feature create mode 100644 sygnal/webpushpushkin.py diff --git a/changelog.d/177.feature b/changelog.d/177.feature new file mode 100644 index 00000000..a5da3497 --- /dev/null +++ b/changelog.d/177.feature @@ -0,0 +1 @@ +Add experimental support for WebPush pushkins. \ No newline at end of file diff --git a/docs/applications.md b/docs/applications.md index 7441135b..25584925 100644 --- a/docs/applications.md +++ b/docs/applications.md @@ -203,3 +203,66 @@ within FCM's limit. Please also note that some fields will be unavailable if you registered a pusher with `event_id_only` format. +### WebPush + +#### Setup & configuration + +In the sygnal virtualenv, generate the server key pair by running +`vapid --gen --applicationServerKey`. This will generate a `private_key.pem` +(which you'll refer to in the config file with `vapid_private_key`) +and `public_key.pem` file, and also string labeled `Application Server Key`. + +You'll copy the Application Server Key to your web application to subscribe +to the push manager: + +```js +serviceWorkerRegistration.pushManager.subscribe({ + userVisibleOnly: true, + applicationServerKey: "...", +}); +``` + +You also need to set an e-mail address in `vapid_contact_email` in the config file, +where the push gateway operator can reach you in case they need to notify you +about your usage of their API. + +#### Push key and expected push data + +In your web application, [the push manager subscribe method](https://developer.mozilla.org/en-US/docs/Web/API/PushManager/subscribe) +will return +[a subscription](https://developer.mozilla.org/en-US/docs/Web/API/PushSubscription) +with an `endpoint` and `keys` property, the latter containing a `p256dh` and `auth` +property. The `p256dh` key is used as the push key, and the push data is expected +`endpoint` and `auth`. You can also set `default_payload` in the push data; +any properties set in it will be present in the push messages you receive, +so it can be used to pass identifiers specific to your client +(like which account the notification is for). + +Also note that because you can only have one push subscription per service worker, +and hence per origin, you might create pushers for different accounts with the same +p256dh push key. To prevent the server from removing other pushers with the same +push key for your other users, you should set `append` to `true` when uploading +your pusher. + +#### Notification format + +The notification as received by your web application will contain the following keys +(assuming non-null values were sent by the homeserver). These are the +same as specified in [the push gateway spec](https://matrix.org/docs/spec/push_gateway/r0.1.0#post-matrix-push-v1-notify), +but the sub-keys of `counts` (`unread` and `missed_calls`) are flattened into +the notification object. + +``` +room_id +room_name +room_alias +membership +event_id +sender +sender_display_name +user_is_target +type +content +unread +missed_calls +``` diff --git a/mypy.ini b/mypy.ini index 45ccfacc..54aa196f 100644 --- a/mypy.ini +++ b/mypy.ini @@ -43,3 +43,9 @@ ignore_missing_imports = True [mypy-OpenSSL.*] ignore_missing_imports = True + +[mypy-py_vapid] +ignore_missing_imports = True + +[mypy-pywebpush] +ignore_missing_imports = True diff --git a/setup.py b/setup.py index 05f5789a..0e8dcab2 100755 --- a/setup.py +++ b/setup.py @@ -49,6 +49,8 @@ def read(fname): "idna>=2.8", "psycopg2>=2.8.4", "importlib_metadata", + "pywebpush>=1.13.0", + "py-vapid>=1.7.0", ], long_description=read("README.rst"), ) diff --git a/sygnal/webpushpushkin.py b/sygnal/webpushpushkin.py new file mode 100644 index 00000000..6ed57d9a --- /dev/null +++ b/sygnal/webpushpushkin.py @@ -0,0 +1,274 @@ +# -*- coding: utf-8 -*- +# Copyright 2021 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import json +import logging +import os.path +from io import BytesIO + +from prometheus_client import Gauge, Histogram +from py_vapid import Vapid, VapidException +from pywebpush import webpush +from twisted.internet.defer import DeferredSemaphore +from twisted.web.client import FileBodyProducer, HTTPConnectionPool, readBody +from twisted.web.http_headers import Headers + +from sygnal.helper.context_factory import ClientTLSOptionsFactory +from sygnal.helper.proxy.proxyagent_twisted import ProxyAgent + +from .exceptions import PushkinSetupException +from .notifications import ConcurrencyLimitedPushkin + +QUEUE_TIME_HISTOGRAM = Histogram( + "sygnal_webpush_queue_time", + "Time taken waiting for a connection to WebPush endpoint", +) + +SEND_TIME_HISTOGRAM = Histogram( + "sygnal_webpush_request_time", "Time taken to send HTTP request to WebPush endpoint" +) + +PENDING_REQUESTS_GAUGE = Gauge( + "sygnal_pending_webpush_requests", + "Number of WebPush requests waiting for a connection", +) + +ACTIVE_REQUESTS_GAUGE = Gauge( + "sygnal_active_webpush_requests", "Number of WebPush requests in flight" +) + +logger = logging.getLogger(__name__) + +DEFAULT_MAX_CONNECTIONS = 20 + + +class WebpushPushkin(ConcurrencyLimitedPushkin): + """ + Pushkin that relays notifications to Google/Firebase Cloud Messaging. + """ + + UNDERSTOOD_CONFIG_FIELDS = { + "type", + "max_connections", + "vapid_private_key", + "vapid_contact_email", + } | ConcurrencyLimitedPushkin.UNDERSTOOD_CONFIG_FIELDS + + def __init__(self, name, sygnal, config): + super(WebpushPushkin, self).__init__(name, sygnal, config) + + nonunderstood = self.cfg.keys() - self.UNDERSTOOD_CONFIG_FIELDS + if nonunderstood: + logger.warning( + "The following configuration fields are not understood: %s", + nonunderstood, + ) + + self.http_pool = HTTPConnectionPool(reactor=sygnal.reactor) + self.max_connections = self.get_config( + "max_connections", DEFAULT_MAX_CONNECTIONS + ) + self.connection_semaphore = DeferredSemaphore(self.max_connections) + self.http_pool.maxPersistentPerHost = self.max_connections + + tls_client_options_factory = ClientTLSOptionsFactory() + + # use the Sygnal global proxy configuration + proxy_url = sygnal.config.get("proxy") + + self.http_agent = ProxyAgent( + reactor=sygnal.reactor, + pool=self.http_pool, + contextFactory=tls_client_options_factory, + proxy_url_str=proxy_url, + ) + self.http_agent_wrapper = HttpAgentWrapper(self.http_agent) + + privkey_filename = self.get_config("vapid_private_key") + if not privkey_filename: + raise PushkinSetupException("'vapid_private_key' not set in config") + if not os.path.exists(privkey_filename): + raise PushkinSetupException("path in 'vapid_private_key' does not exist") + try: + self.vapid_private_key = Vapid.from_file(private_key_file=privkey_filename) + except VapidException as e: + raise PushkinSetupException("invalid 'vapid_private_key' file") from e + vapid_contact_email = self.get_config("vapid_contact_email") + if not vapid_contact_email: + raise PushkinSetupException("'vapid_contact_email' not set in config") + self.vapid_claims = {"sub": "mailto:{}".format(vapid_contact_email)} + + async def _dispatch_notification_unlimited(self, n, device, context): + p256dh = device.pushkey + if not isinstance(device.data, dict): + logger.warn( + "device.data is not a dict for pushkey %s, rejecting pushkey", p256dh + ) + return [device.pushkey] + + endpoint = device.data.get("endpoint") + auth = device.data.get("auth") + + if not p256dh or not endpoint or not auth: + logger.warn( + "subscription info incomplete " + + "(p256dh: %s, endpoint: %s, auth: %s), rejecting pushkey", + p256dh, + endpoint, + auth, + ) + return [device.pushkey] + + subscription_info = { + "endpoint": endpoint, + "keys": {"p256dh": p256dh, "auth": auth}, + } + payload = WebpushPushkin._build_payload(n, device) + data = json.dumps(payload) + + # we use the semaphore to actually limit the number of concurrent + # requests, since the HTTPConnectionPool will actually just lead to more + # requests being created but not pooled – it does not perform limiting. + with QUEUE_TIME_HISTOGRAM.time(): + with PENDING_REQUESTS_GAUGE.track_inprogress(): + await self.connection_semaphore.acquire() + + try: + with SEND_TIME_HISTOGRAM.time(): + with ACTIVE_REQUESTS_GAUGE.track_inprogress(): + response_wrapper = webpush( + subscription_info=subscription_info, + data=data, + vapid_private_key=self.vapid_private_key, + vapid_claims=self.vapid_claims, + requests_session=self.http_agent_wrapper, + ) + response = await response_wrapper.deferred + await readBody(response) + finally: + self.connection_semaphore.release() + + # assume 4xx is permanent and 5xx is temporary + if 400 <= response.code < 500: + return [device.pushkey] + return [] + + @staticmethod + def _build_payload(n, device): + """ + Build the payload data to be sent. + + Args: + n: Notification to build the payload for. + device (Device): Device information to which the constructed payload + will be sent. + + Returns: + JSON-compatible dict + """ + payload = {} + + default_payload = device.data.get("default_payload") + if isinstance(default_payload, dict): + payload.update(default_payload) + + for attr in [ + "room_id", + "room_name", + "room_alias", + "membership", + "event_id", + "sender", + "sender_display_name", + "user_is_target", + "type", + "content", + ]: + value = getattr(n, attr, None) + if value: + payload[attr] = value + + counts = getattr(n, "counts", None) + if counts is not None: + for attr in ["unread", "missed_calls"]: + count_value = getattr(counts, attr, None) + if count_value is not None: + payload[attr] = count_value + + return payload + + +class HttpAgentWrapper: + """ + Provide a post method that matches the API expected from pywebpush. + """ + + def __init__(self, http_agent): + self.http_agent = http_agent + + def post(self, endpoint, data, headers, timeout): + """ + Convert the requests-like API to a Twisted API call. + + Args: + endpoint (str): + The full http url to post to + data (bytes): + the (encrypted) binary body of the request + headers (py_vapid.CaseInsensitiveDict): + A (costume) dictionary with the headers. + timeout (int) + Ignored for now + """ + body_producer = FileBodyProducer(BytesIO(data)) + # Convert the headers to the camelcase version. + headers = { + b"User-Agent": ["sygnal"], + b"Content-Encoding": [headers["content-encoding"]], + b"Authorization": [headers["authorization"]], + b"TTL": [headers["ttl"]], + } + deferred = self.http_agent.request( + b"POST", + endpoint.encode(), + headers=Headers(headers), + bodyProducer=body_producer, + ) + return HttpResponseWrapper(deferred) + + +class HttpResponseWrapper: + """ + Provide a response object that matches the API expected from pywebpush. + pywebpush expects a synchronous API, while we use an asynchronous API. + + To keep pywebpush happy we present it with some hardcoded values that + make its assertions pass while the async network call is happening + in the background. + + Attributes: + deferred (Deferred): + The deferred to await the actual response after calling pywebpush. + status_code (int): + Defined to be 200 so the pywebpush check to see if is below 202 + passes. + text (str): + Set to None as pywebpush references this field for its logging. + """ + + status_code = 200 + text = None + + def __init__(self, deferred): + self.deferred = deferred From 5c6dfcfb05d9f79757fb3cced7aac8edeafaf720 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Thu, 18 Mar 2021 11:03:18 -0400 Subject: [PATCH 11/44] Enable reconnection in DB pool (#179) --- changelog.d/179.bugfix | 1 + sygnal/sygnal.py | 8 +++++++- 2 files changed, 8 insertions(+), 1 deletion(-) create mode 100644 changelog.d/179.bugfix diff --git a/changelog.d/179.bugfix b/changelog.d/179.bugfix new file mode 100644 index 00000000..63a6448c --- /dev/null +++ b/changelog.d/179.bugfix @@ -0,0 +1 @@ +Fix bug where Sygnal would not recover after losing connection to the database. diff --git a/sygnal/sygnal.py b/sygnal/sygnal.py index 182e860a..25cb1fef 100644 --- a/sygnal/sygnal.py +++ b/sygnal/sygnal.py @@ -149,9 +149,15 @@ def __init__(self, config, custom_reactor, tracer=opentracing.tracer): if db_name == "psycopg2": logger.info("Using postgresql database") + + # By default enable `cp_reconnect`. We need to fiddle with db_args in case + # someone has explicitly set `cp_reconnect`. + db_args = dict(config["database"].get("args", {})) + db_args.setdefault("cp_reconnect", True) + self.database_engine = "postgresql" self.database = ConnectionPool( - "psycopg2", cp_reactor=self.reactor, **config["database"].get("args") + "psycopg2", cp_reactor=self.reactor, **db_args ) elif db_name == "sqlite3": logger.info("Using sqlite database") From ad135847bd54ac9e4cae323f0c97f7f1945cd11e Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Thu, 18 Mar 2021 16:34:17 +0000 Subject: [PATCH 12/44] Initialise sygnal_inflight_request_limit_drop before it's incremented (#172) --- changelog.d/172.bugfix | 1 + sygnal/notifications.py | 10 +++++++--- 2 files changed, 8 insertions(+), 3 deletions(-) create mode 100644 changelog.d/172.bugfix diff --git a/changelog.d/172.bugfix b/changelog.d/172.bugfix new file mode 100644 index 00000000..3154438d --- /dev/null +++ b/changelog.d/172.bugfix @@ -0,0 +1 @@ +Fixes a bug where the `sygnal_inflight_request_limit_drop` metric would not appear in prometheus until requests were actually dropped. \ No newline at end of file diff --git a/sygnal/notifications.py b/sygnal/notifications.py index 81f47b24..65769ef3 100644 --- a/sygnal/notifications.py +++ b/sygnal/notifications.py @@ -159,13 +159,17 @@ def __init__(self, name: str, sygnal: "Sygnal", config: Dict[str, Any]): ) self._concurrent_now = 0 + # Grab an instance of the dropped request counter given our pushkin name. + # Note this ensures the counter appears in metrics even if it hasn't yet + # been incremented. + dropped_requests = ConcurrencyLimitedPushkin.RATELIMITING_DROPPED_REQUESTS + self.dropped_requests_counter = dropped_requests.labels(pushkin=name) + async def dispatch_notification( self, n: Notification, device: Device, context: "NotificationContext" ) -> List[str]: if self._concurrent_now >= self._concurrent_limit: - ConcurrencyLimitedPushkin.RATELIMITING_DROPPED_REQUESTS.labels( - pushkin=self.name - ).inc() + self.dropped_requests_counter.inc() raise NotificationDispatchException( "Too many in-flight requests for this pushkin. " "(Something is wrong and Sygnal is struggling to keep up!)" From 100a8fdb683da4efb5b2d8a66dfe3feeb0e24361 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Thu, 18 Mar 2021 16:34:27 +0000 Subject: [PATCH 13/44] Remove useless log line (#174) --- changelog.d/174.misc | 1 + sygnal/apnspushkin.py | 2 -- 2 files changed, 1 insertion(+), 2 deletions(-) create mode 100644 changelog.d/174.misc diff --git a/changelog.d/174.misc b/changelog.d/174.misc new file mode 100644 index 00000000..efc229e0 --- /dev/null +++ b/changelog.d/174.misc @@ -0,0 +1 @@ +Remove extraneous log line. \ No newline at end of file diff --git a/sygnal/apnspushkin.py b/sygnal/apnspushkin.py index 1fa0f5e0..ea670c85 100644 --- a/sygnal/apnspushkin.py +++ b/sygnal/apnspushkin.py @@ -268,8 +268,6 @@ async def _dispatch_notification_unlimited(self, n, device, context): for retry_number in range(self.MAX_TRIES): try: - log.debug("Trying") - span_tags = {"retry_num": retry_number} with self.sygnal.tracer.start_span( From e7198f8f996b0a4bc367c6aa0908b42460493e99 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Fri, 19 Mar 2021 09:22:50 -0400 Subject: [PATCH 14/44] Update gitignore. --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 32f6cd34..0cdf0be7 100644 --- a/.gitignore +++ b/.gitignore @@ -11,3 +11,4 @@ _trial_temp* /build /dist /.tox +/.python-version From e7036afd7c4cc252e8b8082d6d4803d7552fae63 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Fri, 19 Mar 2021 09:31:40 -0400 Subject: [PATCH 15/44] 0.9.0 --- CHANGELOG.md | 35 +++++++++++++++++++++++++++++++++++ changelog.d/149.misc | 1 - changelog.d/150.doc | 1 - changelog.d/151.misc | 1 - changelog.d/152.misc | 1 - changelog.d/154.doc | 1 - changelog.d/155.doc | 1 - changelog.d/157.bugfix | 1 - changelog.d/158.doc | 1 - changelog.d/172.bugfix | 1 - changelog.d/174.misc | 1 - changelog.d/177.feature | 1 - changelog.d/178.misc | 1 - changelog.d/179.bugfix | 1 - 14 files changed, 35 insertions(+), 13 deletions(-) delete mode 100644 changelog.d/149.misc delete mode 100644 changelog.d/150.doc delete mode 100644 changelog.d/151.misc delete mode 100644 changelog.d/152.misc delete mode 100644 changelog.d/154.doc delete mode 100644 changelog.d/155.doc delete mode 100644 changelog.d/157.bugfix delete mode 100644 changelog.d/158.doc delete mode 100644 changelog.d/172.bugfix delete mode 100644 changelog.d/174.misc delete mode 100644 changelog.d/177.feature delete mode 100644 changelog.d/178.misc delete mode 100644 changelog.d/179.bugfix diff --git a/CHANGELOG.md b/CHANGELOG.md index 61297081..07e52a44 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,38 @@ +Sygnal 0.9.0 (2021-03-19) +========================= + +Features +-------- + +- Add experimental support for WebPush pushkins. ([\#177](https://github.com/matrix-org/sygnal/issues/177)) + + +Bugfixes +-------- + +- Fix erroneous warning log line when setting the `max_connections` option in a GCM app config. ([\#157](https://github.com/matrix-org/sygnal/issues/157)) +- Fixes a bug where the `sygnal_inflight_request_limit_drop` metric would not appear in prometheus until requests were actually dropped. ([\#172](https://github.com/matrix-org/sygnal/issues/172)) +- Fix bug where Sygnal would not recover after losing connection to the database. ([\#179](https://github.com/matrix-org/sygnal/issues/179)) + + +Improved Documentation +---------------------- + +- Add preliminary documentation (Troubleshooting and Application Developers' Notes). ([\#150](https://github.com/matrix-org/sygnal/issues/150)) +- Add troubleshooting documentation and sample configuration about APNs pushkin's `platform` option. ([\#154](https://github.com/matrix-org/sygnal/issues/154)) +- Add a note to the releasing doc asking people to inform EMS and customers during the release process. ([\#155](https://github.com/matrix-org/sygnal/issues/155)) +- Update troubleshooting document with suggestions for 400 BadDeviceToken. ([\#158](https://github.com/matrix-org/sygnal/issues/158)) + + +Internal Changes +---------------- + +- Remove a source of nuisance, noisy (but otherwise harmless) exceptions in tests. ([\#149](https://github.com/matrix-org/sygnal/issues/149)) +- Add tests for HTTP Proxy support. ([\#151](https://github.com/matrix-org/sygnal/issues/151), [\#152](https://github.com/matrix-org/sygnal/issues/152)) +- Remove extraneous log line. ([\#174](https://github.com/matrix-org/sygnal/issues/174)) +- Fix type hints due to Twisted upgrade. ([\#178](https://github.com/matrix-org/sygnal/issues/178)) + + Sygnal 0.8.2 (2020-08-06) ========================= diff --git a/changelog.d/149.misc b/changelog.d/149.misc deleted file mode 100644 index b782af26..00000000 --- a/changelog.d/149.misc +++ /dev/null @@ -1 +0,0 @@ -Remove a source of nuisance, noisy (but otherwise harmless) exceptions in tests. diff --git a/changelog.d/150.doc b/changelog.d/150.doc deleted file mode 100644 index 783994f8..00000000 --- a/changelog.d/150.doc +++ /dev/null @@ -1 +0,0 @@ -Add preliminary documentation (Troubleshooting and Application Developers' Notes). diff --git a/changelog.d/151.misc b/changelog.d/151.misc deleted file mode 100644 index 3f5a3a8b..00000000 --- a/changelog.d/151.misc +++ /dev/null @@ -1 +0,0 @@ -Add tests for HTTP Proxy support. diff --git a/changelog.d/152.misc b/changelog.d/152.misc deleted file mode 100644 index 3f5a3a8b..00000000 --- a/changelog.d/152.misc +++ /dev/null @@ -1 +0,0 @@ -Add tests for HTTP Proxy support. diff --git a/changelog.d/154.doc b/changelog.d/154.doc deleted file mode 100644 index 6bd9858c..00000000 --- a/changelog.d/154.doc +++ /dev/null @@ -1 +0,0 @@ -Add troubleshooting documentation and sample configuration about APNs pushkin's `platform` option. \ No newline at end of file diff --git a/changelog.d/155.doc b/changelog.d/155.doc deleted file mode 100644 index 7b5867fe..00000000 --- a/changelog.d/155.doc +++ /dev/null @@ -1 +0,0 @@ -Add a note to the releasing doc asking people to inform EMS and customers during the release process. diff --git a/changelog.d/157.bugfix b/changelog.d/157.bugfix deleted file mode 100644 index df634230..00000000 --- a/changelog.d/157.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix erroneous warning log line when setting the `max_connections` option in a GCM app config. diff --git a/changelog.d/158.doc b/changelog.d/158.doc deleted file mode 100644 index 1ca3149c..00000000 --- a/changelog.d/158.doc +++ /dev/null @@ -1 +0,0 @@ -Update troubleshooting document with suggestions for 400 BadDeviceToken. diff --git a/changelog.d/172.bugfix b/changelog.d/172.bugfix deleted file mode 100644 index 3154438d..00000000 --- a/changelog.d/172.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fixes a bug where the `sygnal_inflight_request_limit_drop` metric would not appear in prometheus until requests were actually dropped. \ No newline at end of file diff --git a/changelog.d/174.misc b/changelog.d/174.misc deleted file mode 100644 index efc229e0..00000000 --- a/changelog.d/174.misc +++ /dev/null @@ -1 +0,0 @@ -Remove extraneous log line. \ No newline at end of file diff --git a/changelog.d/177.feature b/changelog.d/177.feature deleted file mode 100644 index a5da3497..00000000 --- a/changelog.d/177.feature +++ /dev/null @@ -1 +0,0 @@ -Add experimental support for WebPush pushkins. \ No newline at end of file diff --git a/changelog.d/178.misc b/changelog.d/178.misc deleted file mode 100644 index f26f3b54..00000000 --- a/changelog.d/178.misc +++ /dev/null @@ -1 +0,0 @@ -Fix type hints due to Twisted upgrade. diff --git a/changelog.d/179.bugfix b/changelog.d/179.bugfix deleted file mode 100644 index 63a6448c..00000000 --- a/changelog.d/179.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix bug where Sygnal would not recover after losing connection to the database. From 6b2e3648afc45c811a316829cdf89a1f44cfe574 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Fri, 19 Mar 2021 09:34:22 -0400 Subject: [PATCH 16/44] Tweak changelog. --- CHANGELOG.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 07e52a44..6c8b84c9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,23 +11,23 @@ Bugfixes -------- - Fix erroneous warning log line when setting the `max_connections` option in a GCM app config. ([\#157](https://github.com/matrix-org/sygnal/issues/157)) -- Fixes a bug where the `sygnal_inflight_request_limit_drop` metric would not appear in prometheus until requests were actually dropped. ([\#172](https://github.com/matrix-org/sygnal/issues/172)) +- Fix bug where the `sygnal_inflight_request_limit_drop` metric would not appear in prometheus until requests were actually dropped. ([\#172](https://github.com/matrix-org/sygnal/issues/172)) - Fix bug where Sygnal would not recover after losing connection to the database. ([\#179](https://github.com/matrix-org/sygnal/issues/179)) Improved Documentation ---------------------- -- Add preliminary documentation (Troubleshooting and Application Developers' Notes). ([\#150](https://github.com/matrix-org/sygnal/issues/150)) +- Add preliminary documentation ([Troubleshooting](docs/troubleshooting.md) and [Application Developers' Notes](docs/applications.md)). ([\#150](https://github.com/matrix-org/sygnal/issues/150)) - Add troubleshooting documentation and sample configuration about APNs pushkin's `platform` option. ([\#154](https://github.com/matrix-org/sygnal/issues/154)) - Add a note to the releasing doc asking people to inform EMS and customers during the release process. ([\#155](https://github.com/matrix-org/sygnal/issues/155)) -- Update troubleshooting document with suggestions for 400 BadDeviceToken. ([\#158](https://github.com/matrix-org/sygnal/issues/158)) +- Update troubleshooting document with suggestions for `400 BadDeviceToken`. ([\#158](https://github.com/matrix-org/sygnal/issues/158)) Internal Changes ---------------- -- Remove a source of nuisance, noisy (but otherwise harmless) exceptions in tests. ([\#149](https://github.com/matrix-org/sygnal/issues/149)) +- Remove a source of noisy (but otherwise harmless) exceptions in tests. ([\#149](https://github.com/matrix-org/sygnal/issues/149)) - Add tests for HTTP Proxy support. ([\#151](https://github.com/matrix-org/sygnal/issues/151), [\#152](https://github.com/matrix-org/sygnal/issues/152)) - Remove extraneous log line. ([\#174](https://github.com/matrix-org/sygnal/issues/174)) - Fix type hints due to Twisted upgrade. ([\#178](https://github.com/matrix-org/sygnal/issues/178)) From 1d9d4152cd59e3dea855f58d4f4373498c76b9e0 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Fri, 19 Mar 2021 09:43:05 -0400 Subject: [PATCH 17/44] Combine related changelog entries. --- CHANGELOG.md | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6c8b84c9..96b40394 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,10 +18,8 @@ Bugfixes Improved Documentation ---------------------- -- Add preliminary documentation ([Troubleshooting](docs/troubleshooting.md) and [Application Developers' Notes](docs/applications.md)). ([\#150](https://github.com/matrix-org/sygnal/issues/150)) -- Add troubleshooting documentation and sample configuration about APNs pushkin's `platform` option. ([\#154](https://github.com/matrix-org/sygnal/issues/154)) +- Add preliminary documentation ([Troubleshooting](docs/troubleshooting.md) and [Application Developers' Notes](docs/applications.md)). ([\#150](https://github.com/matrix-org/sygnal/issues/150), [\#154](https://github.com/matrix-org/sygnal/issues/154), [\#158](https://github.com/matrix-org/sygnal/issues/158)) - Add a note to the releasing doc asking people to inform EMS and customers during the release process. ([\#155](https://github.com/matrix-org/sygnal/issues/155)) -- Update troubleshooting document with suggestions for `400 BadDeviceToken`. ([\#158](https://github.com/matrix-org/sygnal/issues/158)) Internal Changes From 72ff6c575025b2196458e90d69aa8681baf5ec33 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 22 Mar 2021 09:51:12 +0000 Subject: [PATCH 18/44] Fix race when using HTTP proxy. (#181) After `start_tls` we need to manually call `connection_made` on the protocol to tell it about the new underlying transport. This gives a brief window where the protocol can receive data without having a transport to write to, causing issues with the APNS connections where it assumes it can write once it starts reading data. We fix this by wrapping the protocol in a buffer that simply buffers incoming data until `connection_made` is called. Co-authored-by: Patrick Cloke --- changelog.d/181.bugfix | 1 + sygnal/helper/proxy/proxy_asyncio.py | 50 ++++++++- tests/asyncio_test_helpers.py | 38 +++++++ tests/test_httpproxy_asyncio.py | 150 +++++++++++++++++++++++++++ 4 files changed, 237 insertions(+), 2 deletions(-) create mode 100644 changelog.d/181.bugfix diff --git a/changelog.d/181.bugfix b/changelog.d/181.bugfix new file mode 100644 index 00000000..d1595022 --- /dev/null +++ b/changelog.d/181.bugfix @@ -0,0 +1 @@ +Fix bug when using a HTTP proxy where connections would sometimes fail to establish. diff --git a/sygnal/helper/proxy/proxy_asyncio.py b/sygnal/helper/proxy/proxy_asyncio.py index 73f43377..39f3049b 100644 --- a/sygnal/helper/proxy/proxy_asyncio.py +++ b/sygnal/helper/proxy/proxy_asyncio.py @@ -22,6 +22,8 @@ from ssl import Purpose, SSLContext, create_default_context from typing import Callable, Optional, Tuple, Union +import attr + from sygnal.exceptions import ProxyConnectError from sygnal.helper.proxy import decompose_http_proxy_url @@ -146,19 +148,25 @@ async def switch_over_when_ready(self) -> Tuple[BaseTransport, Protocol]: # unreachable raise RuntimeError("Left over bytes should not occur with TLS") + # There is a race where the `new_protocol` may get given data before + # we manage to call `connection_made` on it, which can lead to + # exceptions if the protocol then tries to write to the transport + # that is has been given yet. + buffered_protocol = _BufferedWrapperProtocol(new_protocol) + # be careful not to use the `transport` ever again after passing it # to start_tls — we overwrite our variable with the TLS-wrapped # transport to avoid that! transport = await self._event_loop.start_tls( self._transport, - new_protocol, + buffered_protocol, self._sslcontext, server_hostname=self._target_hostport[0], ) # start_tls does NOT call connection_made on new_protocol, so we # must do it ourselves - new_protocol.connection_made(transport) + buffered_protocol.connection_made(transport) else: # no wrapping required for non-TLS transport = self._transport @@ -171,6 +179,8 @@ async def switch_over_when_ready(self) -> Tuple[BaseTransport, Protocol]: # pass over dangling bytes if applicable new_protocol.data_received(left_over_bytes) + logger.debug("Finished switching protocol") + return transport, new_protocol def data_received(self, data: bytes) -> None: @@ -332,3 +342,39 @@ def __getattr__(self, item): We use this to delegate other method calls to the real EventLoop. """ return getattr(self._wrapped_loop, item) + + +@attr.s(slots=True, auto_attribs=True) +class _BufferedWrapperProtocol(Protocol): + """Wraps a protocol to buffer any incoming data received before + `connection_made` is called. + """ + + _protocol: Protocol + _connected: bool = False + _buffer: bytearray = attr.Factory(bytearray) + + def connection_made(self, transport: BaseTransport): + self._connected = True + self._protocol.connection_made(transport) + if self._buffer: + self._protocol.data_received(self._buffer) + self._buffer = bytearray() + + def connection_lost(self, exc: Optional[Exception]): + self._protocol.connection_lost(exc) + + def pause_writing(self): + self._protocol.pause_writing() + + def resume_writing(self): + self._protocol.resume_writing() + + def data_received(self, data: bytes): + if self._connected: + self._protocol.data_received(data) + else: + self._buffer.extend(data) + + def eof_received(self): + return self._protocol.eof_received() diff --git a/tests/asyncio_test_helpers.py b/tests/asyncio_test_helpers.py index 9096f34d..20a7fdf2 100644 --- a/tests/asyncio_test_helpers.py +++ b/tests/asyncio_test_helpers.py @@ -70,6 +70,14 @@ def call_later( ): self.call_at(self._time + delay, callback, *args, context=context) + # We're meant to return a canceller, but can cheat and return a no-op one + # instead. + class _Canceller: + def cancel(self): + pass + + return _Canceller() + def call_at( self, when: float, @@ -114,6 +122,10 @@ def __init__(self): # Whether this transport was closed self.closed = False + # We need to explicitly mark that this connection allows start tls, + # otherwise `loop.start_tls` will raise an exception. + self._start_tls_compatible = True + def reset_mock(self) -> None: self.buffer = b"" self.eofed = False @@ -189,3 +201,29 @@ def write(self, data: bytes) -> None: self.transport.write(data) else: self._to_transmit += data + + +class EchoProtocol(Protocol): + """A protocol that immediately echoes all data it receives""" + + def __init__(self): + self._to_transmit = b"" + self.received_bytes = b"" + self.transport = None + + def data_received(self, data: bytes) -> None: + self.received_bytes += data + assert self.transport + self.transport.write(data) + + def connection_made(self, transport: transports.BaseTransport) -> None: + assert isinstance(transport, Transport) + self.transport = transport + if self._to_transmit: + transport.write(self._to_transmit) + + def write(self, data: bytes) -> None: + if self.transport: + self.transport.write(data) + else: + self._to_transmit += data diff --git a/tests/test_httpproxy_asyncio.py b/tests/test_httpproxy_asyncio.py index e88189ae..d173ddfb 100644 --- a/tests/test_httpproxy_asyncio.py +++ b/tests/test_httpproxy_asyncio.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import asyncio +import ssl from asyncio import AbstractEventLoop, BaseTransport, Protocol, Task from typing import Optional, Tuple, cast @@ -21,10 +22,16 @@ from tests import testutils from tests.asyncio_test_helpers import ( + EchoProtocol, MockProtocol, MockTransport, TimelessEventLoopWrapper, ) +from tests.twisted_test_helpers import ( + create_test_cert_file, + get_test_ca_cert_file, + get_test_key_file, +) class AsyncioHttpProxyTest(testutils.TestCase): @@ -191,3 +198,146 @@ def test_connect_failure(self): # check our protocol did not receive anything, because it was an HTTP- # level error, not actually a connection to our target. self.assertEqual(fake_protocol.received_bytes, b"") + + +class AsyncioHttpProxyTLSTest(testutils.TestCase): + """Test that using a HTTPS proxy works. + + This is a bit convoluted to try and test that we don't hit a race where the + new client protocol can receive data before `connection_made` is called, + which can cause problems if it tries to write to the connection that it + hasn't been given yet. + """ + + def config_setup(self, config): + super().config_setup(config) + config["apps"]["com.example.spqr"] = { + "type": "tests.test_pushgateway_api_v1.TestPushkin" + } + self.base_loop = asyncio.new_event_loop() + augmented_loop = TimelessEventLoopWrapper(self.base_loop) # type: ignore + asyncio.set_event_loop(cast(AbstractEventLoop, augmented_loop)) + + self.loop = augmented_loop + + self.proxy_context = ssl.create_default_context() + self.proxy_context.load_verify_locations(get_test_ca_cert_file()) + self.proxy_context.set_ciphers("DEFAULT") + + def make_fake_proxy( + self, host: str, port: int, proxy_credentials: Optional[Tuple[str, str]], + ) -> Tuple[EchoProtocol, MockTransport, "Task[Tuple[BaseTransport, Protocol]]"]: + # Task[Tuple[MockTransport, MockProtocol]] + + # make a fake proxy + fake_proxy = MockTransport() + + # We connect with an echo protocol to test that we can always write when + # we receive data. + fake_protocol = EchoProtocol() + + # create a HTTP CONNECT proxy client protocol + http_connect_protocol = HttpConnectProtocol( + target_hostport=(host, port), + proxy_credentials=proxy_credentials, + protocol_factory=lambda: fake_protocol, + sslcontext=self.proxy_context, + loop=None, + ) + switch_over_task = self.loop.create_task( + http_connect_protocol.switch_over_when_ready() + ) + # check the task is not somehow already marked as done before we even + # receive anything. + self.assertFalse(switch_over_task.done()) + # connect the proxy client to the proxy + fake_proxy.set_protocol(http_connect_protocol) + http_connect_protocol.connection_made(fake_proxy) + return fake_protocol, fake_proxy, switch_over_task + + def test_connect_no_credentials(self): + """ + Tests the proxy connection procedure when there is no basic auth. + """ + host = "example.org" + port = 443 + proxy_credentials = None + fake_protocol, fake_proxy, switch_over_task = self.make_fake_proxy( + host, port, proxy_credentials + ) + + # Check that the proxy got the proper CONNECT request. + self.assertEqual(fake_proxy.buffer, b"CONNECT example.org:443 HTTP/1.0\r\n\r\n") + # Reset the proxy mock + fake_proxy.reset_mock() + + # pretend we got a happy response + fake_proxy.pretend_to_receive(b"HTTP/1.0 200 Connection Established\r\n\r\n") + + # Since we're talking TLS we need to create a server TLS connection that + # we can use to talk to each other. + context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) + context.load_cert_chain( + create_test_cert_file([b"DNS:example.org"]), keyfile=get_test_key_file() + ) + context.set_ciphers("DEFAULT") + + # Note that we have to use a different event loop wrapper here as we + # want that server side setup to finish before the client side setup, so + # that we can trigger any races. + server_loop = TimelessEventLoopWrapper(self.base_loop) # type: ignore + server_transport = MockTransport() + proxy_ft = server_loop.create_task( + server_loop.start_tls( + server_transport, + MockProtocol(), + context, + server_hostname=host, + server_side=True, + ) + ) + + # Advance event loop because we have to let coroutines be executed + self.loop.advance(1.0) + server_loop.advance(1.0) + + # We manually copy the bytes between the fake_proxy transport and our + # created TLS transport. We do this for each step in the TLS handshake. + + # Client -> Server + server_transport.pretend_to_receive(fake_proxy.buffer) + fake_proxy.buffer = b"" + + # Server -> Client + fake_proxy.pretend_to_receive(server_transport.buffer) + server_transport.buffer = b"" + + # Client -> Server + server_transport.pretend_to_receive(fake_proxy.buffer) + fake_proxy.buffer = b"" + + # We *only* advance the server side loop so that we can send data before + # the client has called `connection_made` on the new protocol. + server_loop.advance(0.1) + + # Server -> Client application data. + server_plain_transport = proxy_ft.result() + server_plain_transport.write(b"begin beep boop\r\n\r\n~~ :) ~~") + fake_proxy.pretend_to_receive(server_transport.buffer) + server_transport.buffer = b"" + + self.loop.advance(1.0) + + # *now* we should have switched over from the HTTP CONNECT protocol + # to the user protocol (in our case, a MockProtocol). + self.assertTrue(switch_over_task.done()) + + transport, protocol = switch_over_task.result() + + # check it was our protocol that was returned + self.assertIs(protocol, fake_protocol) + + # check our protocol received exactly the bytes meant for it + self.assertEqual( + fake_protocol.received_bytes, b"begin beep boop\r\n\r\n~~ :) ~~" + ) From a2ac34dd307fd4579f82755ead85d4c4efc1057d Mon Sep 17 00:00:00 2001 From: Bruno Windels Date: Mon, 22 Mar 2021 15:41:22 +0000 Subject: [PATCH 19/44] Fix webpush vapid_claims from one request bleeding into others. (#180) --- changelog.d/180.bugfix | 1 + sygnal/webpushpushkin.py | 27 +++++++++++++++++++-------- 2 files changed, 20 insertions(+), 8 deletions(-) create mode 100644 changelog.d/180.bugfix diff --git a/changelog.d/180.bugfix b/changelog.d/180.bugfix new file mode 100644 index 00000000..216e5ee4 --- /dev/null +++ b/changelog.d/180.bugfix @@ -0,0 +1 @@ +Fix vapid_claims from one webpush request bleeding into others \ No newline at end of file diff --git a/sygnal/webpushpushkin.py b/sygnal/webpushpushkin.py index 6ed57d9a..ff53594d 100644 --- a/sygnal/webpushpushkin.py +++ b/sygnal/webpushpushkin.py @@ -16,6 +16,7 @@ import logging import os.path from io import BytesIO +from urllib.parse import urlparse from prometheus_client import Gauge, Histogram from py_vapid import Vapid, VapidException @@ -104,16 +105,15 @@ def __init__(self, name, sygnal, config): self.vapid_private_key = Vapid.from_file(private_key_file=privkey_filename) except VapidException as e: raise PushkinSetupException("invalid 'vapid_private_key' file") from e - vapid_contact_email = self.get_config("vapid_contact_email") - if not vapid_contact_email: + self.vapid_contact_email = self.get_config("vapid_contact_email") + if not self.vapid_contact_email: raise PushkinSetupException("'vapid_contact_email' not set in config") - self.vapid_claims = {"sub": "mailto:{}".format(vapid_contact_email)} async def _dispatch_notification_unlimited(self, n, device, context): p256dh = device.pushkey if not isinstance(device.data, dict): logger.warn( - "device.data is not a dict for pushkey %s, rejecting pushkey", p256dh + "Rejecting pushkey %s; device.data is not a dict", device.pushkey ) return [device.pushkey] @@ -122,8 +122,8 @@ async def _dispatch_notification_unlimited(self, n, device, context): if not p256dh or not endpoint or not auth: logger.warn( - "subscription info incomplete " - + "(p256dh: %s, endpoint: %s, auth: %s), rejecting pushkey", + "Rejecting pushkey; subscription info incomplete " + + "(p256dh: %s, endpoint: %s, auth: %s)", p256dh, endpoint, auth, @@ -137,6 +137,10 @@ async def _dispatch_notification_unlimited(self, n, device, context): payload = WebpushPushkin._build_payload(n, device) data = json.dumps(payload) + # note that webpush modifies vapid_claims, so make sure it's only used once + vapid_claims = { + "sub": "mailto:{}".format(self.vapid_contact_email), + } # we use the semaphore to actually limit the number of concurrent # requests, since the HTTPConnectionPool will actually just lead to more # requests being created but not pooled – it does not perform limiting. @@ -151,16 +155,23 @@ async def _dispatch_notification_unlimited(self, n, device, context): subscription_info=subscription_info, data=data, vapid_private_key=self.vapid_private_key, - vapid_claims=self.vapid_claims, + vapid_claims=vapid_claims, requests_session=self.http_agent_wrapper, ) response = await response_wrapper.deferred - await readBody(response) + response_text = (await readBody(response)).decode() finally: self.connection_semaphore.release() # assume 4xx is permanent and 5xx is temporary if 400 <= response.code < 500: + logger.warn( + "Rejecting pushkey %s; gateway %s failed with %d: %s", + device.pushkey, + urlparse(endpoint).netloc, + response.code, + response_text, + ) return [device.pushkey] return [] From 189f091cfcd18471daaf0e4b8f4654d24bb49c00 Mon Sep 17 00:00:00 2001 From: Bruno Windels Date: Mon, 22 Mar 2021 19:23:38 +0000 Subject: [PATCH 20/44] Add an option to configure allowed WebPush endpoints. (#182) --- changelog.d/182.feature | 1 + docs/applications.md | 13 +++++++++++++ sygnal/utils.py | 25 +++++++++++++++++++++++++ sygnal/webpushpushkin.py | 24 +++++++++++++++++++++++- 4 files changed, 62 insertions(+), 1 deletion(-) create mode 100644 changelog.d/182.feature diff --git a/changelog.d/182.feature b/changelog.d/182.feature new file mode 100644 index 00000000..e071dcd8 --- /dev/null +++ b/changelog.d/182.feature @@ -0,0 +1 @@ +Add 'allowed_endpoints' configuration option for limiting the endpoints that WebPush pushkins will contact. diff --git a/docs/applications.md b/docs/applications.md index 25584925..096ce482 100644 --- a/docs/applications.md +++ b/docs/applications.md @@ -226,6 +226,19 @@ You also need to set an e-mail address in `vapid_contact_email` in the config fi where the push gateway operator can reach you in case they need to notify you about your usage of their API. +Since for webpush, the push gateway endpoint is variable and comes from the browser +through the push data, you may not want to have your sygnal instance connect to any +random addressable server. For this, you can set the `allowed_endpoints` option to +a list of allowed endpoints. Globs are supported. For example, to allow Firefox, +Chrome and Opera (Google) and Edge as a push gateway, you can use this: + +```yaml +allowed_endpoints: + - "updates.push.services.mozilla.com" + - "fcm.googleapis.com" + - "*.notify.windows.com" +``` + #### Push key and expected push data In your web application, [the push manager subscribe method](https://developer.mozilla.org/en-US/docs/Web/API/PushManager/subscribe) diff --git a/sygnal/utils.py b/sygnal/utils.py index 0da68868..3ce3bfaa 100644 --- a/sygnal/utils.py +++ b/sygnal/utils.py @@ -12,6 +12,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import re from logging import LoggerAdapter from twisted.internet.defer import Deferred @@ -37,3 +38,27 @@ async def twisted_sleep(delay, twisted_reactor): class NotificationLoggerAdapter(LoggerAdapter): def process(self, msg, kwargs): return f"[{self.extra['request_id']}] {msg}", kwargs + + +def glob_to_regex(glob): + """Converts a glob to a compiled regex object. + + The regex is anchored at the beginning and end of the string. + + Args: + glob (str) + + Returns: + re.RegexObject + """ + res = "" + for c in glob: + if c == "*": + res = res + ".*" + elif c == "?": + res = res + "." + else: + res = res + re.escape(c) + + # \A anchors at start of string, \Z at end of string + return re.compile(r"\A" + res + r"\Z", re.IGNORECASE) diff --git a/sygnal/webpushpushkin.py b/sygnal/webpushpushkin.py index ff53594d..12582d95 100644 --- a/sygnal/webpushpushkin.py +++ b/sygnal/webpushpushkin.py @@ -16,6 +16,7 @@ import logging import os.path from io import BytesIO +from typing import List, Optional, Pattern from urllib.parse import urlparse from prometheus_client import Gauge, Histogram @@ -30,6 +31,7 @@ from .exceptions import PushkinSetupException from .notifications import ConcurrencyLimitedPushkin +from .utils import glob_to_regex QUEUE_TIME_HISTOGRAM = Histogram( "sygnal_webpush_queue_time", @@ -96,6 +98,14 @@ def __init__(self, name, sygnal, config): ) self.http_agent_wrapper = HttpAgentWrapper(self.http_agent) + self.allowed_endpoints = None # type: Optional[List[Pattern]] + allowed_endpoints = self.get_config("allowed_endpoints") + if allowed_endpoints: + if not isinstance(allowed_endpoints, list): + raise PushkinSetupException( + "'allowed_endpoints' should be a list or not set" + ) + self.allowed_endpoints = list(map(glob_to_regex, allowed_endpoints)) privkey_filename = self.get_config("vapid_private_key") if not privkey_filename: raise PushkinSetupException("'vapid_private_key' not set in config") @@ -119,6 +129,18 @@ async def _dispatch_notification_unlimited(self, n, device, context): endpoint = device.data.get("endpoint") auth = device.data.get("auth") + endpoint_domain = urlparse(endpoint).netloc + if self.allowed_endpoints: + allowed = any( + regex.fullmatch(endpoint_domain) for regex in self.allowed_endpoints + ) + if not allowed: + logger.error( + "push gateway %s is not in allowed_endpoints, blocking request", + endpoint_domain, + ) + # abort, but don't reject push key + return [] if not p256dh or not endpoint or not auth: logger.warn( @@ -168,7 +190,7 @@ async def _dispatch_notification_unlimited(self, n, device, context): logger.warn( "Rejecting pushkey %s; gateway %s failed with %d: %s", device.pushkey, - urlparse(endpoint).netloc, + endpoint_domain, response.code, response_text, ) From ebd9fafc0c745271bb77101752c77f35614e751b Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Tue, 23 Mar 2021 07:25:59 -0400 Subject: [PATCH 21/44] 0.9.1 --- CHANGELOG.md | 16 ++++++++++++++++ changelog.d/180.bugfix | 1 - changelog.d/181.bugfix | 1 - changelog.d/182.feature | 1 - 4 files changed, 16 insertions(+), 3 deletions(-) delete mode 100644 changelog.d/180.bugfix delete mode 100644 changelog.d/181.bugfix delete mode 100644 changelog.d/182.feature diff --git a/CHANGELOG.md b/CHANGELOG.md index 96b40394..366e1f49 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,19 @@ +Sygnal 0.9.1 (2021-03-23) +========================= + +Features +-------- + +- Add 'allowed_endpoints' configuration option for limiting the endpoints that WebPush pushkins will contact. ([\#182](https://github.com/matrix-org/sygnal/issues/182)) + + +Bugfixes +-------- + +- Fix vapid_claims from one webpush request bleeding into others ([\#180](https://github.com/matrix-org/sygnal/issues/180)) +- Fix bug when using a HTTP proxy where connections would sometimes fail to establish. ([\#181](https://github.com/matrix-org/sygnal/issues/181)) + + Sygnal 0.9.0 (2021-03-19) ========================= diff --git a/changelog.d/180.bugfix b/changelog.d/180.bugfix deleted file mode 100644 index 216e5ee4..00000000 --- a/changelog.d/180.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix vapid_claims from one webpush request bleeding into others \ No newline at end of file diff --git a/changelog.d/181.bugfix b/changelog.d/181.bugfix deleted file mode 100644 index d1595022..00000000 --- a/changelog.d/181.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix bug when using a HTTP proxy where connections would sometimes fail to establish. diff --git a/changelog.d/182.feature b/changelog.d/182.feature deleted file mode 100644 index e071dcd8..00000000 --- a/changelog.d/182.feature +++ /dev/null @@ -1 +0,0 @@ -Add 'allowed_endpoints' configuration option for limiting the endpoints that WebPush pushkins will contact. From 6d43e7f4278672461602917625858e783d4fbf1a Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Tue, 23 Mar 2021 07:28:20 -0400 Subject: [PATCH 22/44] Tweak changelog. --- CHANGELOG.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 366e1f49..4f215644 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,13 +4,13 @@ Sygnal 0.9.1 (2021-03-23) Features -------- -- Add 'allowed_endpoints' configuration option for limiting the endpoints that WebPush pushkins will contact. ([\#182](https://github.com/matrix-org/sygnal/issues/182)) +- Add `allowed_endpoints` configuration option for limiting the endpoints that WebPush pushkins will contact. ([\#182](https://github.com/matrix-org/sygnal/issues/182)) Bugfixes -------- -- Fix vapid_claims from one webpush request bleeding into others ([\#180](https://github.com/matrix-org/sygnal/issues/180)) +- Fix bug where the requests from different WebPush devices could bleed into each other. ([\#180](https://github.com/matrix-org/sygnal/issues/180)) - Fix bug when using a HTTP proxy where connections would sometimes fail to establish. ([\#181](https://github.com/matrix-org/sygnal/issues/181)) From 42ab814394d1525a78e92399c45457a054ba80d6 Mon Sep 17 00:00:00 2001 From: Bruno Windels Date: Tue, 23 Mar 2021 16:55:06 +0000 Subject: [PATCH 23/44] Add allowed_endpoints as a config option for WebPush (#184) --- changelog.d/184.bugfix | 1 + sygnal/webpushpushkin.py | 1 + 2 files changed, 2 insertions(+) create mode 100644 changelog.d/184.bugfix diff --git a/changelog.d/184.bugfix b/changelog.d/184.bugfix new file mode 100644 index 00000000..223e9113 --- /dev/null +++ b/changelog.d/184.bugfix @@ -0,0 +1 @@ +Add `allowed_endpoints` option as understood config option for WebPush pushkins. diff --git a/sygnal/webpushpushkin.py b/sygnal/webpushpushkin.py index 12582d95..399b651d 100644 --- a/sygnal/webpushpushkin.py +++ b/sygnal/webpushpushkin.py @@ -66,6 +66,7 @@ class WebpushPushkin(ConcurrencyLimitedPushkin): "max_connections", "vapid_private_key", "vapid_contact_email", + "allowed_endpoints", } | ConcurrencyLimitedPushkin.UNDERSTOOD_CONFIG_FIELDS def __init__(self, name, sygnal, config): From 9d719c7aa9502028500f88ee658069e8283f53d9 Mon Sep 17 00:00:00 2001 From: Bruno Windels Date: Mon, 29 Mar 2021 10:56:03 +0000 Subject: [PATCH 24/44] Add time-to-live (ttl) option for webpush messages (#185) --- changelog.d/185.bugfix | 1 + docs/applications.md | 2 ++ sygnal/webpushpushkin.py | 6 ++++++ 3 files changed, 9 insertions(+) create mode 100644 changelog.d/185.bugfix diff --git a/changelog.d/185.bugfix b/changelog.d/185.bugfix new file mode 100644 index 00000000..dfd5a4e1 --- /dev/null +++ b/changelog.d/185.bugfix @@ -0,0 +1 @@ +Add `ttl` option as understood config option for WebPush pushkins to make delivery more reliable \ No newline at end of file diff --git a/docs/applications.md b/docs/applications.md index 096ce482..3db22809 100644 --- a/docs/applications.md +++ b/docs/applications.md @@ -239,6 +239,8 @@ allowed_endpoints: - "*.notify.windows.com" ``` +A default time-to-live of 15 minutes is set for webpush, but you can adjust this by setting the `ttl: ` configuration option for the pusher. If notifications can't be delivered by the push gateway aftet this time, they are dropped. + #### Push key and expected push data In your web application, [the push manager subscribe method](https://developer.mozilla.org/en-US/docs/Web/API/PushManager/subscribe) diff --git a/sygnal/webpushpushkin.py b/sygnal/webpushpushkin.py index 399b651d..ea73d544 100644 --- a/sygnal/webpushpushkin.py +++ b/sygnal/webpushpushkin.py @@ -54,6 +54,7 @@ logger = logging.getLogger(__name__) DEFAULT_MAX_CONNECTIONS = 20 +DEFAULT_TTL = 15 * 60 # in seconds class WebpushPushkin(ConcurrencyLimitedPushkin): @@ -67,6 +68,7 @@ class WebpushPushkin(ConcurrencyLimitedPushkin): "vapid_private_key", "vapid_contact_email", "allowed_endpoints", + "ttl", } | ConcurrencyLimitedPushkin.UNDERSTOOD_CONFIG_FIELDS def __init__(self, name, sygnal, config): @@ -119,6 +121,9 @@ def __init__(self, name, sygnal, config): self.vapid_contact_email = self.get_config("vapid_contact_email") if not self.vapid_contact_email: raise PushkinSetupException("'vapid_contact_email' not set in config") + self.ttl = self.get_config("ttl", DEFAULT_TTL) + if not isinstance(self.ttl, int): + raise PushkinSetupException("'ttl' must be an int if set") async def _dispatch_notification_unlimited(self, n, device, context): p256dh = device.pushkey @@ -177,6 +182,7 @@ async def _dispatch_notification_unlimited(self, n, device, context): response_wrapper = webpush( subscription_info=subscription_info, data=data, + ttl=self.ttl, vapid_private_key=self.vapid_private_key, vapid_claims=vapid_claims, requests_session=self.http_agent_wrapper, From a85d7a57698b127a8beaa0c11dbbba0854446368 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 29 Mar 2021 12:11:32 +0100 Subject: [PATCH 25/44] v0.9.2 --- CHANGELOG.md | 10 ++++++++++ changelog.d/184.bugfix | 1 - changelog.d/185.bugfix | 1 - 3 files changed, 10 insertions(+), 2 deletions(-) delete mode 100644 changelog.d/184.bugfix delete mode 100644 changelog.d/185.bugfix diff --git a/CHANGELOG.md b/CHANGELOG.md index 4f215644..64d12058 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,13 @@ +Sygnal v0.9.2 (2021-03-29) +========================== + +Bugfixes +-------- + +- Add `allowed_endpoints` option as understood config option for WebPush pushkins. ([\#184](https://github.com/matrix-org/sygnal/issues/184)) +- Add `ttl` option as understood config option for WebPush pushkins to make delivery more reliable ([\#185](https://github.com/matrix-org/sygnal/issues/185)) + + Sygnal 0.9.1 (2021-03-23) ========================= diff --git a/changelog.d/184.bugfix b/changelog.d/184.bugfix deleted file mode 100644 index 223e9113..00000000 --- a/changelog.d/184.bugfix +++ /dev/null @@ -1 +0,0 @@ -Add `allowed_endpoints` option as understood config option for WebPush pushkins. diff --git a/changelog.d/185.bugfix b/changelog.d/185.bugfix deleted file mode 100644 index dfd5a4e1..00000000 --- a/changelog.d/185.bugfix +++ /dev/null @@ -1 +0,0 @@ -Add `ttl` option as understood config option for WebPush pushkins to make delivery more reliable \ No newline at end of file From c1e90cb2ea1ea79d75f5eff0ec19f01a399308a6 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 29 Mar 2021 12:15:26 +0100 Subject: [PATCH 26/44] Fixup changelog --- CHANGELOG.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 64d12058..80e4ed93 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,11 +1,11 @@ Sygnal v0.9.2 (2021-03-29) ========================== -Bugfixes +Features -------- - Add `allowed_endpoints` option as understood config option for WebPush pushkins. ([\#184](https://github.com/matrix-org/sygnal/issues/184)) -- Add `ttl` option as understood config option for WebPush pushkins to make delivery more reliable ([\#185](https://github.com/matrix-org/sygnal/issues/185)) +- Add `ttl` option as an understood config option for WebPush pushkins to make delivery more reliable ([\#185](https://github.com/matrix-org/sygnal/issues/185)) Sygnal 0.9.1 (2021-03-23) From 033ed0e34960405d25f62d11d13cf9ec497a8102 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 29 Mar 2021 12:26:13 +0100 Subject: [PATCH 27/44] Fixup changelog --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 80e4ed93..e53c2cb5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,7 +4,7 @@ Sygnal v0.9.2 (2021-03-29) Features -------- -- Add `allowed_endpoints` option as understood config option for WebPush pushkins. ([\#184](https://github.com/matrix-org/sygnal/issues/184)) +- Add `allowed_endpoints` option as an understood config option for WebPush pushkins. ([\#184](https://github.com/matrix-org/sygnal/issues/184)) - Add `ttl` option as an understood config option for WebPush pushkins to make delivery more reliable ([\#185](https://github.com/matrix-org/sygnal/issues/185)) From ea06aad656f2d96129bdf9f51b54af6b31fd5654 Mon Sep 17 00:00:00 2001 From: Dan Callahan Date: Mon, 29 Mar 2021 23:12:58 +0100 Subject: [PATCH 28/44] Update contributing docs to be more explicit (#188) Fixes #187 Signed-off-by: Dan Callahan --- .gitignore | 2 ++ CONTRIBUTING.md | 62 +++++++++++++++++++++++++++++++++++++++------ README.rst | 5 ++++ changelog.d/188.doc | 1 + 4 files changed, 62 insertions(+), 8 deletions(-) create mode 100644 changelog.d/188.doc diff --git a/.gitignore b/.gitignore index 0cdf0be7..edd2ecbb 100644 --- a/.gitignore +++ b/.gitignore @@ -5,6 +5,8 @@ sygnal.pid sygnal.db _trial_temp* +/venv/ +/.venv/ /.idea /.eggs /*.egg-info diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index c2ca6985..37de0fc5 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,13 +1,59 @@ # Contributing code to Sygnal -Everyone is welcome to contribute code to [matrix.org -projects](https://github.com/matrix-org), provided that they are willing to -license their contributions under the same license as the project itself. We -follow a simple 'inbound=outbound' model for contributions: the act of -submitting an 'inbound' contribution means that the contributor agrees to -license the code under the same terms as the project's overall 'outbound' -license - in our case, this is almost always Apache Software License v2 (see -[LICENSE](LICENSE)). +Everyone is welcome to contribute code to Sygnal, provided you are willing to +license your contributions under the same license as the project itself. In +this case, the [Apache Software License v2](LICENSE). + +## Preparing your development environment + +### Create a virtualenv + +To contribute to Sygnal, ensure you have Python 3.7 or newer and then run: + +``` +python3 -m venv venv +./venv/bin/pip install -e . +./venv/bin/pip install -U black flake8 isort mypy mypy-zope tox +``` + +This creates an isolated virtual Python environment ("virtualenv") just for +use with Sygnal, then installs Sygnal along with its dependencies, and lastly +installs a handful of useful tools + +Finally, activate the virtualenv by running: + +``` +source ./venv/bin/activate +``` + +Be sure to do this _every time_ you open a new terminal window for working on +Sygnal. Activating the venv ensures that any Python commands you run (`pip`, +`python`, etc.) use the versions inside your venv, and not your system Python. + +When you're done, you can close your terminal or run `deactivate` to disable +the virtualenv. + +### Run the tests + +To make sure everything is working as expected, run the unit tests: + +``` +tox -e py +``` + +If you see a message like: + +``` +------------------------------------------------------------------------------- +Ran 46 tests in 0.209s + +PASSED (successes=46) +___________________________________ summary ___________________________________ + py: commands succeeded + congratulations :) +``` + +Then all is well and you're ready to work! ## How to contribute diff --git a/README.rst b/README.rst index 3d2cdaef..3a4f99f3 100644 --- a/README.rst +++ b/README.rst @@ -10,6 +10,11 @@ https://matrix.org/docs/spec/push_gateway/r0.1.0 describes the protocol that Matrix Home Servers use to send notifications to Push Gateways such as Sygnal. +Contributing +============ +Looking to contribute to Sygnal? See `CONTRIBUTING.md `_ + + Setup ===== Sygnal is configured through a YAML configuration file. diff --git a/changelog.d/188.doc b/changelog.d/188.doc new file mode 100644 index 00000000..ba0361a1 --- /dev/null +++ b/changelog.d/188.doc @@ -0,0 +1 @@ +Make CONTIBUTING.md more explicit about how to get tests passing. From 3c263c45fc13c8d54ef7e810b038fef72fb046ec Mon Sep 17 00:00:00 2001 From: Hillery Shay Date: Wed, 31 Mar 2021 15:27:25 -0700 Subject: [PATCH 29/44] Make development dependencies available as extras (#194) Fixes #190 --- CONTRIBUTING.md | 3 +-- changelog.d/194.misc | 1 + setup.py | 10 ++++++++++ tox.ini | 13 ++++--------- 4 files changed, 16 insertions(+), 11 deletions(-) create mode 100644 changelog.d/194.misc diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 37de0fc5..4b1cbb39 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -12,8 +12,7 @@ To contribute to Sygnal, ensure you have Python 3.7 or newer and then run: ``` python3 -m venv venv -./venv/bin/pip install -e . -./venv/bin/pip install -U black flake8 isort mypy mypy-zope tox +./venv/bin/pip install -e '.[dev]' ``` This creates an isolated virtual Python environment ("virtualenv") just for diff --git a/changelog.d/194.misc b/changelog.d/194.misc new file mode 100644 index 00000000..3152633f --- /dev/null +++ b/changelog.d/194.misc @@ -0,0 +1 @@ +Development dependencies are now avaible as extras. Contributed by Hillery Shay. diff --git a/setup.py b/setup.py index 0e8dcab2..ddf93118 100755 --- a/setup.py +++ b/setup.py @@ -52,5 +52,15 @@ def read(fname): "pywebpush>=1.13.0", "py-vapid>=1.7.0", ], + extras_require={ + "dev": [ + "black==19.10b0", + "flake8==3.8.3", + "isort~=5.0", + "mypy==0.780", + "mypy-zope==0.2.7", + "tox", + ] + }, long_description=read("README.rst"), ) diff --git a/tox.ini b/tox.ini index 51e8d105..8d99a8b0 100644 --- a/tox.ini +++ b/tox.ini @@ -15,20 +15,15 @@ usedevelop=true commands = trial tests [testenv:check_codestyle] -skip_install = True -deps = - # pin deps so we don't start failing when they are updated - flake8==3.8.3 - black==19.10b0 - isort~=5.0 +extras = + dev commands = flake8 . black --check --diff . isort --check-only --diff . [testenv:check_types] -deps = - mypy==0.780 - mypy-zope==0.2.7 +extras = + dev commands = mypy . From ae797c739e13aca378e6de35d4a31e088cb619b7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20Ignacio=20Torres?= Date: Thu, 1 Apr 2021 01:31:37 -0400 Subject: [PATCH 30/44] Update Tox settings for easier use (#193) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Andrés Ignacio Torres --- CONTRIBUTING.md | 38 +++++++++++++++++--------------------- changelog.d/193.doc | 1 + changelog.d/193.misc | 1 + scripts-dev/lint.sh | 26 -------------------------- tox.ini | 10 +++++----- 5 files changed, 24 insertions(+), 52 deletions(-) create mode 100644 changelog.d/193.doc create mode 100644 changelog.d/193.misc delete mode 100755 scripts-dev/lint.sh diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 4b1cbb39..bd3c1d9c 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -10,18 +10,18 @@ this case, the [Apache Software License v2](LICENSE). To contribute to Sygnal, ensure you have Python 3.7 or newer and then run: -``` +```bash python3 -m venv venv ./venv/bin/pip install -e '.[dev]' ``` This creates an isolated virtual Python environment ("virtualenv") just for use with Sygnal, then installs Sygnal along with its dependencies, and lastly -installs a handful of useful tools +installs a handful of useful tools Finally, activate the virtualenv by running: -``` +```bash source ./venv/bin/activate ``` @@ -36,7 +36,7 @@ the virtualenv. To make sure everything is working as expected, run the unit tests: -``` +```bash tox -e py ``` @@ -87,27 +87,23 @@ Sygnal follows the [Synapse code style]. Many of the conventions are enforced by scripts which are run as part of the [continuous integration system](#continuous-integration-and-testing). -To help check and fix adherence to the code style, you can run `scripts-dev/lint.sh` -locally. You'll need Python 3.7 or later, and to install a number of tools: - -``` -# Install the dependencies -pip install -U black flake8 isort mypy mypy-zope - -# Run the linter script -./scripts-dev/lint.sh -``` +To help check and fix adherence to the code style, you can run `tox` +locally. You'll need Python 3.7 or later, and a virtual environment configured and +active: -**Note that the script does not just test/check, but also reformats code, so you -may wish to ensure any new code is committed first**. +```bash +# Activate the virtual environment +source ./venv/bin/activate -By default, this script checks all files and can take some time; if you alter -only certain files, you might wish to specify paths as arguments to reduce the -run-time: +# Run the code style check +tox -e check_codestyle +# Run the types check +tox -e check_types ``` -./scripts-dev/lint.sh path/to/file1.py path/to/file2.py path/to/folder -``` + +These commands will consider the paths and files related to the project (i.e. +everything in `sygnal/` and in `tests/` as well as the `setup.py` file). Before pushing new changes, ensure they don't produce linting errors. Commit any files that were corrected. diff --git a/changelog.d/193.doc b/changelog.d/193.doc new file mode 100644 index 00000000..5e48a1df --- /dev/null +++ b/changelog.d/193.doc @@ -0,0 +1 @@ +Update CONTRIBUTING.md to specify how to run code style and type checks with Tox, and add formatting to code block samples. diff --git a/changelog.d/193.misc b/changelog.d/193.misc new file mode 100644 index 00000000..3f50574f --- /dev/null +++ b/changelog.d/193.misc @@ -0,0 +1 @@ +Updated Tox to run in the installed version of Python (instead of specifying Python 3.7) and to consider specific paths and folders while running checks, instead of the whole repository (potentially including unwanted files and folders, e.g. the virtual environment). diff --git a/scripts-dev/lint.sh b/scripts-dev/lint.sh deleted file mode 100755 index f05d7e73..00000000 --- a/scripts-dev/lint.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/sh -# -# Runs linting scripts over the local Sygnal checkout -# isort - sorts import statements -# black - opinionated code formatter -# flake8 - lints and finds mistakes -# mypy - checks types - -set -e - -if [ $# -ge 1 ] -then - files=$* -else - files="sygnal tests" -fi - -echo "Linting these locations: $files" -echo " ===== Running isort ===== " -isort $files -echo " ===== Running black ===== " -black $files -echo " ===== Running flake8 ===== " -flake8 $files -echo " ===== Running mypy ===== " -mypy $files diff --git a/tox.ini b/tox.ini index 8d99a8b0..b5edce5c 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py37, check_codestyle, check_types +envlist = py, check_codestyle, check_types [testenv] @@ -18,12 +18,12 @@ commands = trial tests extras = dev commands = - flake8 . - black --check --diff . - isort --check-only --diff . + flake8 sygnal/ tests/ setup.py + black --check --diff sygnal/ tests/ setup.py + isort --check-only --diff sygnal/ tests/ setup.py [testenv:check_types] extras = dev commands = - mypy . + mypy sygnal/ tests/ setup.py From 881ab216ec5ff949c388109e659765265ee7358f Mon Sep 17 00:00:00 2001 From: Tawanda Date: Fri, 2 Apr 2021 13:39:39 +0200 Subject: [PATCH 31/44] Specify python_requires in setup.py (#207) Update setup.py to specify minimum Python version of 3.7 Fixes #198 Signed-off-by: tawanda --- changelog.d/207.misc | 1 + setup.py | 1 + 2 files changed, 2 insertions(+) create mode 100644 changelog.d/207.misc diff --git a/changelog.d/207.misc b/changelog.d/207.misc new file mode 100644 index 00000000..f1d3a241 --- /dev/null +++ b/changelog.d/207.misc @@ -0,0 +1 @@ +setup.py specifies that a minimum version of Python greater or equal to 3.7 is required. Contributed by Tawanda Moyo diff --git a/setup.py b/setup.py index ddf93118..e4dcdfe1 100755 --- a/setup.py +++ b/setup.py @@ -34,6 +34,7 @@ def read(fname): packages=find_packages(exclude=["tests", "tests.*"]), description="Reference Push Gateway for Matrix Notifications", use_scm_version=True, + python_requires=">=3.7", setup_requires=["setuptools_scm"], install_requires=[ "Twisted>=19.2.1", From d8e539722c8f934743783edd212dff78d1cb0de0 Mon Sep 17 00:00:00 2001 From: csett86 Date: Tue, 6 Apr 2021 15:43:55 +0200 Subject: [PATCH 32/44] Revert "Docker: Include Root CA cert needed for APNs (#141)" (#208) This reverts commit d477292a9ce5ff885c58f781b9652a42597a31ac. Since March 29 2021 APNs is no longer using GeoTrust, so this can be removed. See https://developer.apple.com/documentation/usernotifications/setting_up_a_remote_notification_server --- changelog.d/208.docker | 1 + docker/Dockerfile | 4 ---- docker/GeoTrust_Global_CA.crt | 20 -------------------- 3 files changed, 1 insertion(+), 24 deletions(-) create mode 100644 changelog.d/208.docker delete mode 100644 docker/GeoTrust_Global_CA.crt diff --git a/changelog.d/208.docker b/changelog.d/208.docker new file mode 100644 index 00000000..5b29e98a --- /dev/null +++ b/changelog.d/208.docker @@ -0,0 +1 @@ +Remove manually added GeoTrust Root CA certificate from docker image as Apple is no longer using it. diff --git a/docker/Dockerfile b/docker/Dockerfile index a641a417..66a7f1c8 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -36,10 +36,6 @@ FROM python:3.7-slim RUN apt-get update && apt-get install -y libpq5 && apt-get clean COPY --from=builder /install /usr/local -# copy the root certificate used by APNs, since Debian removed it -COPY docker/GeoTrust_Global_CA.crt /usr/local/share/ca-certificates/GeoTrust_Global_CA.crt -RUN update-ca-certificates - EXPOSE 5000/tcp ENTRYPOINT ["python", "-m", "sygnal.sygnal"] diff --git a/docker/GeoTrust_Global_CA.crt b/docker/GeoTrust_Global_CA.crt deleted file mode 100644 index bcb25297..00000000 --- a/docker/GeoTrust_Global_CA.crt +++ /dev/null @@ -1,20 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT -MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i -YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG -EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg -R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9 -9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq -fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv -iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU -1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+ -bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW -MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA -ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l -uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn -Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS -tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF -PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un -hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV -5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw== ------END CERTIFICATE----- From 6b2a7c1647db576215bb1c54201b3f94b1783766 Mon Sep 17 00:00:00 2001 From: Bruno Windels Date: Thu, 8 Apr 2021 15:21:07 +0200 Subject: [PATCH 33/44] WebPush reliability improvements (#212) Part of https://github.com/matrix-org/sygnal/issues/186: - Only reject push key on 404 or 410 response codes - Trim message body so we don't exceed 4096 characters - Log when returned TTL header is lower than request - Support events_only flag on push data, to avoid bogus notifications in browser. --- changelog.d/212.feature | 1 + docs/applications.md | 6 +++ sygnal/webpushpushkin.py | 87 +++++++++++++++++++++++++++++++++++----- 3 files changed, 83 insertions(+), 11 deletions(-) create mode 100644 changelog.d/212.feature diff --git a/changelog.d/212.feature b/changelog.d/212.feature new file mode 100644 index 00000000..d615e171 --- /dev/null +++ b/changelog.d/212.feature @@ -0,0 +1 @@ +Prevent the push key from being rejected for temporary errors and oversized payloads, add TTL logging, and support events_only push data flag. \ No newline at end of file diff --git a/docs/applications.md b/docs/applications.md index 3db22809..0e17012c 100644 --- a/docs/applications.md +++ b/docs/applications.md @@ -253,6 +253,12 @@ any properties set in it will be present in the push messages you receive, so it can be used to pass identifiers specific to your client (like which account the notification is for). +##### events_only + +As of the time of writing, all webpush-supporting browsers require you to set `userVisibleOnly: true` when calling (`pushManager.subscribe`)[https://developer.mozilla.org/en-US/docs/Web/API/PushManager/subscribe], to (prevent abusing webpush to track users)[https://goo.gl/yqv4Q4] without their knowledge. With this (mandatory) flag, the browser will show a "site has been updated in the background" notification if no notifications are visible after your service worker processes a `push` event. This can easily happen when sygnal sends a push message to clear the unread count, which is not specific to an event. With `events_only: true` in the pusher data, sygnal won't forward any push message without a event id. This prevents your service worker being forced to show a notification to push messages that clear the unread count. + +##### Multiple pushers on one origin + Also note that because you can only have one push subscription per service worker, and hence per origin, you might create pushers for different accounts with the same p256dh push key. To prevent the server from removing other pushers with the same diff --git a/sygnal/webpushpushkin.py b/sygnal/webpushpushkin.py index ea73d544..faa9e843 100644 --- a/sygnal/webpushpushkin.py +++ b/sygnal/webpushpushkin.py @@ -55,6 +55,9 @@ DEFAULT_MAX_CONNECTIONS = 20 DEFAULT_TTL = 15 * 60 # in seconds +# Max payload size is 4096 +MAX_BODY_LENGTH = 1000 +MAX_CIPHERTEXT_LENGTH = 2000 class WebpushPushkin(ConcurrencyLimitedPushkin): @@ -133,6 +136,11 @@ async def _dispatch_notification_unlimited(self, n, device, context): ) return [device.pushkey] + # drop notifications without an event id if requested, + # see https://github.com/matrix-org/sygnal/issues/186 + if device.data.get("events_only") is True and not n.event_id: + return [] + endpoint = device.data.get("endpoint") auth = device.data.get("auth") endpoint_domain = urlparse(endpoint).netloc @@ -175,7 +183,6 @@ async def _dispatch_notification_unlimited(self, n, device, context): with QUEUE_TIME_HISTOGRAM.time(): with PENDING_REQUESTS_GAUGE.track_inprogress(): await self.connection_semaphore.acquire() - try: with SEND_TIME_HISTOGRAM.time(): with ACTIVE_REQUESTS_GAUGE.track_inprogress(): @@ -192,15 +199,10 @@ async def _dispatch_notification_unlimited(self, n, device, context): finally: self.connection_semaphore.release() - # assume 4xx is permanent and 5xx is temporary - if 400 <= response.code < 500: - logger.warn( - "Rejecting pushkey %s; gateway %s failed with %d: %s", - device.pushkey, - endpoint_domain, - response.code, - response_text, - ) + reject_pushkey = self._handle_response( + response, response_text, device.pushkey, endpoint_domain + ) + if reject_pushkey: return [device.pushkey] return [] @@ -233,7 +235,6 @@ def _build_payload(n, device): "sender_display_name", "user_is_target", "type", - "content", ]: value = getattr(n, attr, None) if value: @@ -246,8 +247,72 @@ def _build_payload(n, device): if count_value is not None: payload[attr] = count_value + if n.content and isinstance(n.content, dict): + content = n.content.copy() + # we can't show formatted_body in a notification anyway on web + # so remove it + content.pop("formatted_body", None) + body = content.get("body") + # make some attempts to not go over the max payload length + if isinstance(body, str) and len(body) > MAX_BODY_LENGTH: + content["body"] = body[0 : MAX_BODY_LENGTH - 1] + "…" + ciphertext = content.get("ciphertext") + if isinstance(ciphertext, str) and len(ciphertext) > MAX_CIPHERTEXT_LENGTH: + content.pop("ciphertext", None) + payload["content"] = content + return payload + def _handle_response(self, response, response_text, pushkey, endpoint_domain): + """ + Logs and determines the outcome of the response + + Returns: + Boolean whether the puskey should be rejected + """ + ttl_response_headers = response.headers.getRawHeaders(b"TTL") + if ttl_response_headers: + try: + ttl_given = int(ttl_response_headers[0]) + if ttl_given != self.ttl: + logger.info( + "requested TTL of %d to endpoint %s but got %d", + self.ttl, + endpoint_domain, + ttl_given, + ) + except ValueError: + pass + # permanent errors + if response.code == 404 or response.code == 410: + logger.warn( + "Rejecting pushkey %s; subscription is invalid on %s: %d: %s", + pushkey, + endpoint_domain, + response.code, + response_text, + ) + return True + # and temporary ones + if response.code >= 400: + logger.warn( + "webpush request failed for pushkey %s; %s responded with %d: %s", + pushkey, + endpoint_domain, + response.code, + response_text, + ) + elif response.code != 201: + logger.info( + "webpush request for pushkey %s didn't respond with 201; " + + "%s responded with %d: %s", + pushkey, + endpoint_domain, + response.code, + response_text, + ) + return False + class HttpAgentWrapper: """ From 75ee260c8a4a51e3146e4c3f1179214579816382 Mon Sep 17 00:00:00 2001 From: kibablu <50038533+kibablu@users.noreply.github.com> Date: Fri, 9 Apr 2021 17:59:14 +0300 Subject: [PATCH 34/44] Upgrade development dependencies #199 (#214) * Upgrade development dependencies (#199) Fixes #199 Signed-off-by: Omar Mohamed --- changelog.d/214.misc | 1 + setup.py | 8 ++++---- sygnal/helper/proxy/proxy_asyncio.py | 4 +++- tests/test_apns.py | 3 ++- tests/test_httpproxy_asyncio.py | 5 ++++- 5 files changed, 14 insertions(+), 7 deletions(-) create mode 100644 changelog.d/214.misc diff --git a/changelog.d/214.misc b/changelog.d/214.misc new file mode 100644 index 00000000..19149774 --- /dev/null +++ b/changelog.d/214.misc @@ -0,0 +1 @@ +Upgrade development dependencies fixes #199. Contributed by Omar Mohamed diff --git a/setup.py b/setup.py index e4dcdfe1..551eefe5 100755 --- a/setup.py +++ b/setup.py @@ -55,11 +55,11 @@ def read(fname): ], extras_require={ "dev": [ - "black==19.10b0", - "flake8==3.8.3", + "black==20.8b1", + "flake8==3.9.0", "isort~=5.0", - "mypy==0.780", - "mypy-zope==0.2.7", + "mypy==0.812", + "mypy-zope==0.3.0", "tox", ] }, diff --git a/sygnal/helper/proxy/proxy_asyncio.py b/sygnal/helper/proxy/proxy_asyncio.py index 39f3049b..6f136ada 100644 --- a/sygnal/helper/proxy/proxy_asyncio.py +++ b/sygnal/helper/proxy/proxy_asyncio.py @@ -270,7 +270,9 @@ class ProxyingEventLoopWrapper: """ def __init__( - self, wrapped_loop: asyncio.AbstractEventLoop, proxy_url_str: str, + self, + wrapped_loop: asyncio.AbstractEventLoop, + proxy_url_str: str, ): """ Args: diff --git a/tests/test_apns.py b/tests/test_apns.py index 5898a0a1..d94b6bf0 100644 --- a/tests/test_apns.py +++ b/tests/test_apns.py @@ -205,7 +205,8 @@ def test_expected_badge_only_with_default_payload(self): ((notification_req,), _kwargs) = method.call_args self.assertEqual( - {"aps": {"badge": 2}}, notification_req.message, + {"aps": {"badge": 2}}, + notification_req.message, ) self.assertEqual({"rejected": []}, resp) diff --git a/tests/test_httpproxy_asyncio.py b/tests/test_httpproxy_asyncio.py index d173ddfb..c8b02d62 100644 --- a/tests/test_httpproxy_asyncio.py +++ b/tests/test_httpproxy_asyncio.py @@ -225,7 +225,10 @@ def config_setup(self, config): self.proxy_context.set_ciphers("DEFAULT") def make_fake_proxy( - self, host: str, port: int, proxy_credentials: Optional[Tuple[str, str]], + self, + host: str, + port: int, + proxy_credentials: Optional[Tuple[str, str]], ) -> Tuple[EchoProtocol, MockTransport, "Task[Tuple[BaseTransport, Protocol]]"]: # Task[Tuple[MockTransport, MockProtocol]] From 214497a1584c37e57f01499628d123ed984bd271 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Tue, 13 Apr 2021 11:19:48 +0100 Subject: [PATCH 35/44] Be stricter about JSON that is accepted by Sygnal (#216) This disables the JSON extensions which Python supports by default (parsing of `Infinity` / `-Infinity` and `NaN`). These shouldn't be accepted since they're not technically valid JSON and other languages won't be able to interpret it properly. --- changelog.d/216.bugfix | 1 + sygnal/gcmpushkin.py | 7 +++---- sygnal/http.py | 4 ++-- sygnal/utils.py | 10 ++++++++++ 4 files changed, 16 insertions(+), 6 deletions(-) create mode 100644 changelog.d/216.bugfix diff --git a/changelog.d/216.bugfix b/changelog.d/216.bugfix new file mode 100644 index 00000000..bf9ad489 --- /dev/null +++ b/changelog.d/216.bugfix @@ -0,0 +1 @@ +Fix a long-standing bug where invalid JSON would be accepted over the HTTP interfaces. diff --git a/sygnal/gcmpushkin.py b/sygnal/gcmpushkin.py index 1967dd37..e3bbbec4 100644 --- a/sygnal/gcmpushkin.py +++ b/sygnal/gcmpushkin.py @@ -18,7 +18,6 @@ import logging import time from io import BytesIO -from json import JSONDecodeError from opentracing import logs, tags from prometheus_client import Counter, Gauge, Histogram @@ -33,7 +32,7 @@ ) from sygnal.helper.context_factory import ClientTLSOptionsFactory from sygnal.helper.proxy.proxyagent_twisted import ProxyAgent -from sygnal.utils import NotificationLoggerAdapter, twisted_sleep +from sygnal.utils import NotificationLoggerAdapter, json_decoder, twisted_sleep from .exceptions import PushkinSetupException from .notifications import ConcurrencyLimitedPushkin @@ -251,8 +250,8 @@ async def _request_dispatch(self, n, log, body, headers, pushkeys, span): return pushkeys, [] elif 200 <= response.code < 300: try: - resp_object = json.loads(response_text) - except JSONDecodeError: + resp_object = json_decoder.decode(response_text) + except ValueError: raise NotificationDispatchException("Invalid JSON response from GCM.") if "results" not in resp_object: log.error( diff --git a/sygnal/http.py b/sygnal/http.py index 7facc7be..5558ecb3 100644 --- a/sygnal/http.py +++ b/sygnal/http.py @@ -35,7 +35,7 @@ from twisted.web.server import NOT_DONE_YET from sygnal.notifications import NotificationContext -from sygnal.utils import NotificationLoggerAdapter +from sygnal.utils import NotificationLoggerAdapter, json_decoder from .exceptions import InvalidNotificationException, NotificationDispatchException from .notifications import Notification @@ -133,7 +133,7 @@ def _handle_request(self, request): log = NotificationLoggerAdapter(logger, {"request_id": request_id}) try: - body = json.loads(request.content.read()) + body = json_decoder.decode(request.content.read().decode("utf-8")) except Exception as exc: msg = "Expected JSON request body" log.warning(msg, exc_info=exc) diff --git a/sygnal/utils.py b/sygnal/utils.py index 3ce3bfaa..fb71e673 100644 --- a/sygnal/utils.py +++ b/sygnal/utils.py @@ -12,6 +12,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import json import re from logging import LoggerAdapter @@ -62,3 +63,12 @@ def glob_to_regex(glob): # \A anchors at start of string, \Z at end of string return re.compile(r"\A" + res + r"\Z", re.IGNORECASE) + + +def _reject_invalid_json(val): + """Do not allow Infinity, -Infinity, or NaN values in JSON.""" + raise ValueError(f"Invalid JSON value: {val!r}") + + +# a custom JSON decoder which will reject Python extensions to JSON. +json_decoder = json.JSONDecoder(parse_constant=_reject_invalid_json) From 8d2dff002f08eb54bc1495aad644df61cf141aa6 Mon Sep 17 00:00:00 2001 From: Bruno Windels Date: Tue, 13 Apr 2021 15:45:12 +0200 Subject: [PATCH 36/44] WebPush support for urgency and coalescing (#213) * support dropping earlier notifications in the same room using the Topic header * set the Urgency header based on the notification prio field. --- changelog.d/213.feature | 1 + docs/applications.md | 19 ++++++++- sygnal/webpushpushkin.py | 83 +++++++++++++++++++++++++--------------- 3 files changed, 71 insertions(+), 32 deletions(-) create mode 100644 changelog.d/213.feature diff --git a/changelog.d/213.feature b/changelog.d/213.feature new file mode 100644 index 00000000..e57f317d --- /dev/null +++ b/changelog.d/213.feature @@ -0,0 +1 @@ +WebPush: add support for Urgency and Topic header \ No newline at end of file diff --git a/docs/applications.md b/docs/applications.md index 0e17012c..f04628a5 100644 --- a/docs/applications.md +++ b/docs/applications.md @@ -255,7 +255,24 @@ so it can be used to pass identifiers specific to your client ##### events_only -As of the time of writing, all webpush-supporting browsers require you to set `userVisibleOnly: true` when calling (`pushManager.subscribe`)[https://developer.mozilla.org/en-US/docs/Web/API/PushManager/subscribe], to (prevent abusing webpush to track users)[https://goo.gl/yqv4Q4] without their knowledge. With this (mandatory) flag, the browser will show a "site has been updated in the background" notification if no notifications are visible after your service worker processes a `push` event. This can easily happen when sygnal sends a push message to clear the unread count, which is not specific to an event. With `events_only: true` in the pusher data, sygnal won't forward any push message without a event id. This prevents your service worker being forced to show a notification to push messages that clear the unread count. +As of the time of writing, all webpush-supporting browsers require you to set +`userVisibleOnly: true` when calling (`pushManager.subscribe`) +[https://developer.mozilla.org/en-US/docs/Web/API/PushManager/subscribe], to +(prevent abusing webpush to track users)[https://goo.gl/yqv4Q4] without their +knowledge. With this (mandatory) flag, the browser will show a "site has been +updated in the background" notification if no notifications are visible after +your service worker processes a `push` event. This can easily happen when sygnal +sends a push message to clear the unread count, which is not specific +to an event. With `events_only: true` in the pusher data, sygnal won't forward +any push message without a event id. This prevents your service worker being +forced to show a notification to push messages that clear the unread count. + +##### only_last_per_room + +You can opt in to only receive the last notification per room by setting +`only_last_per_room: true` in the push data. Note that if the first notification +can be delivered before the second one is sent, you will still get both; +it only has an effect when notifications are queued up on the gateway. ##### Multiple pushers on one origin diff --git a/sygnal/webpushpushkin.py b/sygnal/webpushpushkin.py index faa9e843..34676e6d 100644 --- a/sygnal/webpushpushkin.py +++ b/sygnal/webpushpushkin.py @@ -15,6 +15,8 @@ import json import logging import os.path +from base64 import urlsafe_b64encode +from hashlib import blake2s from io import BytesIO from typing import List, Optional, Pattern from urllib.parse import urlparse @@ -102,7 +104,7 @@ def __init__(self, name, sygnal, config): contextFactory=tls_client_options_factory, proxy_url_str=proxy_url, ) - self.http_agent_wrapper = HttpAgentWrapper(self.http_agent) + self.http_request_factory = HttpRequestFactory() self.allowed_endpoints = None # type: Optional[List[Pattern]] allowed_endpoints = self.get_config("allowed_endpoints") @@ -173,6 +175,17 @@ async def _dispatch_notification_unlimited(self, n, device, context): payload = WebpushPushkin._build_payload(n, device) data = json.dumps(payload) + # web push only supports normal and low priority, so assume normal if absent + low_priority = n.prio == "low" + # allow dropping earlier notifications in the same room if requested + topic = None + if n.room_id and device.data.get("only_last_per_room") is True: + # ask for a 22 byte hash, so the base64 of it is 32, + # the limit webpush allows for the topic + topic = urlsafe_b64encode( + blake2s(n.room_id.encode(), digest_size=22).digest() + ) + # note that webpush modifies vapid_claims, so make sure it's only used once vapid_claims = { "sub": "mailto:{}".format(self.vapid_contact_email), @@ -186,15 +199,17 @@ async def _dispatch_notification_unlimited(self, n, device, context): try: with SEND_TIME_HISTOGRAM.time(): with ACTIVE_REQUESTS_GAUGE.track_inprogress(): - response_wrapper = webpush( + request = webpush( subscription_info=subscription_info, data=data, ttl=self.ttl, vapid_private_key=self.vapid_private_key, vapid_claims=vapid_claims, - requests_session=self.http_agent_wrapper, + requests_session=self.http_request_factory, + ) + response = await request.execute( + self.http_agent, low_priority, topic ) - response = await response_wrapper.deferred response_text = (await readBody(response)).decode() finally: self.connection_semaphore.release() @@ -314,14 +329,11 @@ def _handle_response(self, response, response_text, pushkey, endpoint_domain): return False -class HttpAgentWrapper: +class HttpRequestFactory: """ Provide a post method that matches the API expected from pywebpush. """ - def __init__(self, http_agent): - self.http_agent = http_agent - def post(self, endpoint, data, headers, timeout): """ Convert the requests-like API to a Twisted API call. @@ -336,35 +348,23 @@ def post(self, endpoint, data, headers, timeout): timeout (int) Ignored for now """ - body_producer = FileBodyProducer(BytesIO(data)) - # Convert the headers to the camelcase version. - headers = { - b"User-Agent": ["sygnal"], - b"Content-Encoding": [headers["content-encoding"]], - b"Authorization": [headers["authorization"]], - b"TTL": [headers["ttl"]], - } - deferred = self.http_agent.request( - b"POST", - endpoint.encode(), - headers=Headers(headers), - bodyProducer=body_producer, - ) - return HttpResponseWrapper(deferred) + return HttpDelayedRequest(endpoint, data, headers) -class HttpResponseWrapper: +class HttpDelayedRequest: """ - Provide a response object that matches the API expected from pywebpush. + Captures the values received from pywebpush for the endpoint request. + The request isn't immediately executed, to allow adding headers + not supported by pywebpush, like Topic and Urgency. + + Also provides the interface that pywebpush expects from a response object. pywebpush expects a synchronous API, while we use an asynchronous API. To keep pywebpush happy we present it with some hardcoded values that - make its assertions pass while the async network call is happening - in the background. + make its assertions pass even though the HTTP request has not yet been + made. Attributes: - deferred (Deferred): - The deferred to await the actual response after calling pywebpush. status_code (int): Defined to be 200 so the pywebpush check to see if is below 202 passes. @@ -375,5 +375,26 @@ class HttpResponseWrapper: status_code = 200 text = None - def __init__(self, deferred): - self.deferred = deferred + def __init__(self, endpoint, data, vapid_headers): + self.endpoint = endpoint + self.data = data + self.vapid_headers = vapid_headers + + def execute(self, http_agent, low_priority, topic): + body_producer = FileBodyProducer(BytesIO(self.data)) + # Convert the headers to the camelcase version. + headers = { + b"User-Agent": ["sygnal"], + b"Content-Encoding": [self.vapid_headers["content-encoding"]], + b"Authorization": [self.vapid_headers["authorization"]], + b"TTL": [self.vapid_headers["ttl"]], + b"Urgency": ["low" if low_priority else "normal"], + } + if topic: + headers[b"Topic"] = [topic] + return http_agent.request( + b"POST", + self.endpoint.encode(), + headers=Headers(headers), + bodyProducer=body_producer, + ) From 62bdfeb788773572e47eb992f043f2c444fb764e Mon Sep 17 00:00:00 2001 From: Hillery Shay Date: Tue, 13 Apr 2021 10:48:09 -0700 Subject: [PATCH 37/44] Ported Buildkite checks to Github actions (#210) --- .github/workflows/changelog_check.yml | 13 ++++++ .github/workflows/pipeline.yml | 64 +++++++++++++++++++++++++++ changelog.d/210.misc | 1 + 3 files changed, 78 insertions(+) create mode 100644 .github/workflows/changelog_check.yml create mode 100644 .github/workflows/pipeline.yml create mode 100644 changelog.d/210.misc diff --git a/.github/workflows/changelog_check.yml b/.github/workflows/changelog_check.yml new file mode 100644 index 00000000..7430af51 --- /dev/null +++ b/.github/workflows/changelog_check.yml @@ -0,0 +1,13 @@ +Name: Changelog +on: [pull_request] + +jobs: + check-newsfile: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: "3.7" + - run: python -m pip install towncrier + - run: python -m towncrier.check \ No newline at end of file diff --git a/.github/workflows/pipeline.yml b/.github/workflows/pipeline.yml new file mode 100644 index 00000000..36f4a8ae --- /dev/null +++ b/.github/workflows/pipeline.yml @@ -0,0 +1,64 @@ +name: Linting and Tests +on: [push] +jobs: + check-code-style: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: "3.7" + - run: python -m pip install tox + - run: tox -e check_codestyle + + check-types-mypy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: "3.7" + - run: python -m pip install tox + - run: tox -e check_types + + run-unit-tests-sqlite: + needs: [check-code-style, check-types-mypy] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: "3.7" + - run: python -m pip install -e . + - run: trial tests + + run-unit-tests-postgres: + needs: [check-code-style, check-types-mypy] + runs-on: ubuntu-latest + + services: + postgres: + image: postgres:11 + ports: + - 5432:5432 + env: + POSTGRES_PASSWORD: "postgres" + POSTGRES_INITDB_ARGS: "--lc-collate C --lc-ctype C --encoding UTF8" + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: "3.7" + - run: python -m pip install -e . + - run: trial tests + env: + TEST_USE_POSTGRES: "yes" + TEST_POSTGRES_USER: "postgres" + TEST_POSTGRES_PASSWORD: "postgres" + TEST_POSTGRES_HOST: "localhost" \ No newline at end of file diff --git a/changelog.d/210.misc b/changelog.d/210.misc new file mode 100644 index 00000000..e4f74702 --- /dev/null +++ b/changelog.d/210.misc @@ -0,0 +1 @@ +Port CI checks to Github Actions. From 7a3d6f8f47229ce8d03499b186ca8f1906797843 Mon Sep 17 00:00:00 2001 From: kibablu Date: Tue, 13 Apr 2021 23:09:17 +0100 Subject: [PATCH 38/44] Document pip connection timeout error (#215) Signed-off-by: Omar Mohamed --- CONTRIBUTING.md | 4 ++++ changelog.d/215.doc | 1 + 2 files changed, 5 insertions(+) create mode 100644 changelog.d/215.doc diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index bd3c1d9c..34c4c06f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -19,6 +19,10 @@ This creates an isolated virtual Python environment ("virtualenv") just for use with Sygnal, then installs Sygnal along with its dependencies, and lastly installs a handful of useful tools +If you get `ConnectTimeoutError`, this is caused by slow internet whereby +`pip` has a default time out of _15 sec_. You can specify a larger timeout +by passing `--timeout 120` to the `pip install` command above. + Finally, activate the virtualenv by running: ```bash diff --git a/changelog.d/215.doc b/changelog.d/215.doc new file mode 100644 index 00000000..f4df8853 --- /dev/null +++ b/changelog.d/215.doc @@ -0,0 +1 @@ +Document how to work around pip installation timeout errors. Contributed by Omar Mohamed. From ff1e98ed494cf388606c4dd33eb38a0f67593cc2 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Mon, 19 Apr 2021 14:23:19 +0100 Subject: [PATCH 39/44] Limit the size of requests received from HTTP clients (#220) Applies an arbitrary limit of 512K to incoming request bodies. --- changelog.d/220.bugfix | 1 + sygnal/http.py | 23 ++++++++++++++- sygnal/sygnal.py | 15 ++++------ tests/test_pushgateway_api_v1.py | 49 ++++++++++++++++++++++++++++++++ tests/testutils.py | 16 ++++++----- 5 files changed, 86 insertions(+), 18 deletions(-) create mode 100644 changelog.d/220.bugfix diff --git a/changelog.d/220.bugfix b/changelog.d/220.bugfix new file mode 100644 index 00000000..4a2963b2 --- /dev/null +++ b/changelog.d/220.bugfix @@ -0,0 +1 @@ +Limit the size of requests received from HTTP clients. diff --git a/sygnal/http.py b/sygnal/http.py index 5558ecb3..743a1fc7 100644 --- a/sygnal/http.py +++ b/sygnal/http.py @@ -306,6 +306,24 @@ def render_GET(self, request): return b"" +class SizeLimitingRequest(server.Request): + # Arbitrarily limited to 512 KiB. + MAX_REQUEST_SIZE = 512 * 1024 + + def handleContentChunk(self, data): + # we should have a content by now + assert self.content, "handleContentChunk() called before gotLength()" + if self.content.tell() + len(data) > self.MAX_REQUEST_SIZE: + logger.info( + "Aborting connection from %s because the request exceeds maximum size", + self.client.host, + ) + self.transport.abortConnection() + return + + return super().handleContentChunk(data) + + class SygnalLoggedSite(server.Site): """ A subclass of Site to perform access logging in a way that makes sense for @@ -354,5 +372,8 @@ def __init__(self, sygnal): ) self.site = SygnalLoggedSite( - root, reactor=sygnal.reactor, log_formatter=log_formatter + root, + reactor=sygnal.reactor, + log_formatter=log_formatter, + requestFactory=SizeLimitingRequest, ) diff --git a/sygnal/sygnal.py b/sygnal/sygnal.py index 25cb1fef..2fa4d628 100644 --- a/sygnal/sygnal.py +++ b/sygnal/sygnal.py @@ -198,7 +198,7 @@ async def _make_pushkin(self, app_name, app_config): clarse = getattr(pushkin_module, to_construct) return await clarse.create(app_name, self, app_config) - async def _make_pushkins_then_start(self, port, bind_addresses, pushgateway_api): + async def make_pushkins_then_start(self): for app_id, app_cfg in self.config["apps"].items(): try: self.pushkins[app_id] = await self._make_pushkin(app_id, app_cfg) @@ -215,7 +215,9 @@ async def _make_pushkins_then_start(self, port, bind_addresses, pushgateway_api) logger.info("Configured with app IDs: %r", self.pushkins.keys()) - for interface in bind_addresses: + pushgateway_api = PushGatewayApiServer(self) + port = int(self.config["http"]["port"]) + for interface in self.config["http"]["bind_addresses"]: logger.info("Starting listening on %s port %d", interface, port) self.reactor.listenTCP(port, pushgateway_api.site, interface=interface) @@ -223,18 +225,11 @@ def run(self): """ Attempt to run Sygnal and then exit the application. """ - port = int(self.config["http"]["port"]) - bind_addresses = self.config["http"]["bind_addresses"] - pushgateway_api = PushGatewayApiServer(self) @defer.inlineCallbacks def start(): try: - yield ensureDeferred( - self._make_pushkins_then_start( - port, bind_addresses, pushgateway_api - ) - ) + yield ensureDeferred(self.make_pushkins_then_start()) except Exception: # Print the exception and bail out. print("Error during startup:", file=sys.stderr) diff --git a/tests/test_pushgateway_api_v1.py b/tests/test_pushgateway_api_v1.py index ffd6a841..6b2be88a 100644 --- a/tests/test_pushgateway_api_v1.py +++ b/tests/test_pushgateway_api_v1.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +from twisted.internet.address import IPv6Address +from twisted.internet.testing import StringTransport + from sygnal.exceptions import ( NotificationDispatchException, TemporaryNotificationDispatchException, @@ -183,3 +186,49 @@ def test_remote_errors_give_502(self): ), 502, ) + + def test_overlong_requests_are_rejected(self): + # as a control case, first send a regular request. + + # connect the site to a fake transport. + transport = StringTransport() + protocol = self.site.buildProtocol(IPv6Address("TCP", "::1", "2345")) + protocol.makeConnection(transport) + + protocol.dataReceived( + b"POST / HTTP/1.1\r\n" + b"Connection: close\r\n" + b"Transfer-Encoding: chunked\r\n" + b"\r\n" + b"0\r\n" + b"\r\n" + ) + + # we should get a 404 + self.assertRegex(transport.value().decode(), r"^HTTP/1\.1 404 ") + + # now send an oversized request + transport = StringTransport() + protocol = self.site.buildProtocol(IPv6Address("TCP", "::1", "2345")) + protocol.makeConnection(transport) + + protocol.dataReceived( + b"POST / HTTP/1.1\r\n" + b"Connection: close\r\n" + b"Transfer-Encoding: chunked\r\n" + b"\r\n" + ) + + # we deliberately send all the data in one big chunk, to ensure that + # twisted isn't buffering the data in the chunked transfer decoder. + # we start with the chunk size, in hex. (We won't actually send this much) + protocol.dataReceived(b"10000000\r\n") + sent = 0 + while not transport.disconnected: + self.assertLess(sent, 0x10000000, "connection did not drop") + protocol.dataReceived(b"\0" * 1024) + sent += 1024 + + # default max upload size is 512K, so it should drop on the next buffer after + # that. + self.assertEqual(sent, 513 * 1024) diff --git a/tests/testutils.py b/tests/testutils.py index 6b5e1d51..dabe8f89 100644 --- a/tests/testutils.py +++ b/tests/testutils.py @@ -31,7 +31,6 @@ from twisted.web.server import Request from zope.interface.declarations import implementer -from sygnal.http import PushGatewayApiServer from sygnal.sygnal import CONFIG_DEFAULTS, Sygnal, merge_left_with_defaults REQ_PATH = b"/_matrix/push/v1/notify" @@ -130,17 +129,20 @@ def setUp(self): self.sygnal = Sygnal(config, reactor) self.reactor = reactor self.sygnal.database.start() - self.v1api = PushGatewayApiServer(self.sygnal) - start_deferred = ensureDeferred( - self.sygnal._make_pushkins_then_start(0, [], None) - ) + start_deferred = ensureDeferred(self.sygnal.make_pushkins_then_start()) while not start_deferred.called: # we need to advance until the pushkins have started up self.sygnal.reactor.advance(1) self.sygnal.reactor.wait_for_work(lambda: start_deferred.called) + # sygnal should have started a single (fake) tcp listener + listeners = self.reactor.tcpServers + self.assertEqual(len(listeners), 1) + (port, site, _backlog, interface) = listeners[0] + self.site = site + def tearDown(self): super().tearDown() self.sygnal.database.close() @@ -205,7 +207,7 @@ def _request(self, payload: Union[str, dict]) -> Union[dict, int]: payload = json.dumps(payload) content = BytesIO(payload.encode()) - channel = FakeChannel(self.v1api.site, self.sygnal.reactor) + channel = FakeChannel(self.site, self.sygnal.reactor) channel.process_request(b"POST", REQ_PATH, content) while not channel.done: @@ -245,7 +247,7 @@ def dump_if_needed(payload): contents = [BytesIO(dump_if_needed(payload).encode()) for payload in payloads] - channels = [FakeChannel(self.v1api.site, self.sygnal.reactor) for _ in contents] + channels = [FakeChannel(self.site, self.sygnal.reactor) for _ in contents] for channel, content in zip(channels, contents): channel.process_request(b"POST", REQ_PATH, content) From 16ccfaaa4df19368c63727c19cd36cd04ca60223 Mon Sep 17 00:00:00 2001 From: Hillery Shay Date: Mon, 19 Apr 2021 11:57:53 -0700 Subject: [PATCH 40/44] fix towncrier fail in github actions (#219) --- .github/workflows/changelog_check.yml | 5 ++++- changelog.d/219.misc | 1 + 2 files changed, 5 insertions(+), 1 deletion(-) create mode 100644 changelog.d/219.misc diff --git a/.github/workflows/changelog_check.yml b/.github/workflows/changelog_check.yml index 7430af51..74c6c6ea 100644 --- a/.github/workflows/changelog_check.yml +++ b/.github/workflows/changelog_check.yml @@ -1,4 +1,4 @@ -Name: Changelog +name: Changelog on: [pull_request] jobs: @@ -6,6 +6,9 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 + with: + fetch-depth: 0 + ref: ${{github.event.pull_request.head.sha}} - uses: actions/setup-python@v2 with: python-version: "3.7" diff --git a/changelog.d/219.misc b/changelog.d/219.misc new file mode 100644 index 00000000..e4f74702 --- /dev/null +++ b/changelog.d/219.misc @@ -0,0 +1 @@ +Port CI checks to Github Actions. From b389bc69f80d14965c13f887b62b395de0e8f1e2 Mon Sep 17 00:00:00 2001 From: Hillery Shay Date: Thu, 22 Apr 2021 09:26:30 -0700 Subject: [PATCH 41/44] setup coverage.py in tox env (#217) Signed-off-by: Hillery Shay shaysquared@gmail.com --- .gitignore | 2 ++ changelog.d/217.misc | 1 + setup.py | 1 + tox.ini | 14 +++++++++----- 4 files changed, 13 insertions(+), 5 deletions(-) create mode 100644 changelog.d/217.misc diff --git a/.gitignore b/.gitignore index edd2ecbb..918c5dab 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,7 @@ var/ sygnal.pid sygnal.db _trial_temp* +.coverage* /venv/ /.venv/ @@ -14,3 +15,4 @@ _trial_temp* /dist /.tox /.python-version +/htmlcov diff --git a/changelog.d/217.misc b/changelog.d/217.misc new file mode 100644 index 00000000..11cc10d5 --- /dev/null +++ b/changelog.d/217.misc @@ -0,0 +1 @@ +setup coverage.py to run in tox environment and add html reports \ No newline at end of file diff --git a/setup.py b/setup.py index 551eefe5..5f77a286 100755 --- a/setup.py +++ b/setup.py @@ -55,6 +55,7 @@ def read(fname): ], extras_require={ "dev": [ + "coverage~=5.5", "black==20.8b1", "flake8==3.9.0", "isort~=5.0", diff --git a/tox.ini b/tox.ini index b5edce5c..d8980036 100644 --- a/tox.ini +++ b/tox.ini @@ -12,18 +12,22 @@ envlist = py, check_codestyle, check_types # creates a symlink to the project directory instead of unpacking the sdist. usedevelop=true -commands = trial tests +extras = + dev + +commands = + coverage run --source=sygnal -m twisted.trial tests + coverage report --sort=cover + coverage html [testenv:check_codestyle] -extras = - dev + commands = flake8 sygnal/ tests/ setup.py black --check --diff sygnal/ tests/ setup.py isort --check-only --diff sygnal/ tests/ setup.py [testenv:check_types] -extras = - dev + commands = mypy sygnal/ tests/ setup.py From ef342a7a320f33bece58e6676d18dc1d499bb248 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 22 Apr 2021 17:29:41 +0100 Subject: [PATCH 42/44] 0.9.3 --- CHANGELOG.md | 42 +++++++++++++++++++++++++++++++++++++++++ changelog.d/188.doc | 1 - changelog.d/193.doc | 1 - changelog.d/193.misc | 1 - changelog.d/194.misc | 1 - changelog.d/207.misc | 1 - changelog.d/208.docker | 1 - changelog.d/210.misc | 1 - changelog.d/212.feature | 1 - changelog.d/213.feature | 1 - changelog.d/214.misc | 1 - changelog.d/215.doc | 1 - changelog.d/216.bugfix | 1 - changelog.d/217.misc | 1 - changelog.d/219.misc | 1 - changelog.d/220.bugfix | 1 - 16 files changed, 42 insertions(+), 15 deletions(-) delete mode 100644 changelog.d/188.doc delete mode 100644 changelog.d/193.doc delete mode 100644 changelog.d/193.misc delete mode 100644 changelog.d/194.misc delete mode 100644 changelog.d/207.misc delete mode 100644 changelog.d/208.docker delete mode 100644 changelog.d/210.misc delete mode 100644 changelog.d/212.feature delete mode 100644 changelog.d/213.feature delete mode 100644 changelog.d/214.misc delete mode 100644 changelog.d/215.doc delete mode 100644 changelog.d/216.bugfix delete mode 100644 changelog.d/217.misc delete mode 100644 changelog.d/219.misc delete mode 100644 changelog.d/220.bugfix diff --git a/CHANGELOG.md b/CHANGELOG.md index e53c2cb5..dc38c967 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,45 @@ +Sygnal 0.9.3 (2021-04-22) +========================= + +Features +-------- + +- Prevent the push key from being rejected for temporary errors and oversized payloads, add TTL logging, and support events_only push data flag. ([\#212](https://github.com/matrix-org/sygnal/issues/212)) +- WebPush: add support for Urgency and Topic header ([\#213](https://github.com/matrix-org/sygnal/issues/213)) + + +Bugfixes +-------- + +- Fix a long-standing bug where invalid JSON would be accepted over the HTTP interfaces. ([\#216](https://github.com/matrix-org/sygnal/issues/216)) +- Limit the size of requests received from HTTP clients. ([\#220](https://github.com/matrix-org/sygnal/issues/220)) + + +Updates to the Docker image +--------------------------- + +- Remove manually added GeoTrust Root CA certificate from docker image as Apple is no longer using it. ([\#208](https://github.com/matrix-org/sygnal/issues/208)) + + +Improved Documentation +---------------------- + +- Make CONTIBUTING.md more explicit about how to get tests passing. ([\#188](https://github.com/matrix-org/sygnal/issues/188)) +- Update CONTRIBUTING.md to specify how to run code style and type checks with Tox, and add formatting to code block samples. ([\#193](https://github.com/matrix-org/sygnal/issues/193)) +- Document how to work around pip installation timeout errors. Contributed by Omar Mohamed. ([\#215](https://github.com/matrix-org/sygnal/issues/215)) + + +Internal Changes +---------------- + +- Updated Tox to run in the installed version of Python (instead of specifying Python 3.7) and to consider specific paths and folders while running checks, instead of the whole repository (potentially including unwanted files and folders, e.g. the virtual environment). ([\#193](https://github.com/matrix-org/sygnal/issues/193)) +- Development dependencies are now avaible as extras. Contributed by Hillery Shay. ([\#194](https://github.com/matrix-org/sygnal/issues/194)) +- setup.py specifies that a minimum version of Python greater or equal to 3.7 is required. Contributed by Tawanda Moyo ([\#207](https://github.com/matrix-org/sygnal/issues/207)) +- Port CI checks to Github Actions. ([\#210](https://github.com/matrix-org/sygnal/issues/210), [\#219](https://github.com/matrix-org/sygnal/issues/219)) +- Upgrade development dependencies fixes #199. Contributed by Omar Mohamed ([\#214](https://github.com/matrix-org/sygnal/issues/214)) +- setup coverage.py to run in tox environment and add html reports ([\#217](https://github.com/matrix-org/sygnal/issues/217)) + + Sygnal v0.9.2 (2021-03-29) ========================== diff --git a/changelog.d/188.doc b/changelog.d/188.doc deleted file mode 100644 index ba0361a1..00000000 --- a/changelog.d/188.doc +++ /dev/null @@ -1 +0,0 @@ -Make CONTIBUTING.md more explicit about how to get tests passing. diff --git a/changelog.d/193.doc b/changelog.d/193.doc deleted file mode 100644 index 5e48a1df..00000000 --- a/changelog.d/193.doc +++ /dev/null @@ -1 +0,0 @@ -Update CONTRIBUTING.md to specify how to run code style and type checks with Tox, and add formatting to code block samples. diff --git a/changelog.d/193.misc b/changelog.d/193.misc deleted file mode 100644 index 3f50574f..00000000 --- a/changelog.d/193.misc +++ /dev/null @@ -1 +0,0 @@ -Updated Tox to run in the installed version of Python (instead of specifying Python 3.7) and to consider specific paths and folders while running checks, instead of the whole repository (potentially including unwanted files and folders, e.g. the virtual environment). diff --git a/changelog.d/194.misc b/changelog.d/194.misc deleted file mode 100644 index 3152633f..00000000 --- a/changelog.d/194.misc +++ /dev/null @@ -1 +0,0 @@ -Development dependencies are now avaible as extras. Contributed by Hillery Shay. diff --git a/changelog.d/207.misc b/changelog.d/207.misc deleted file mode 100644 index f1d3a241..00000000 --- a/changelog.d/207.misc +++ /dev/null @@ -1 +0,0 @@ -setup.py specifies that a minimum version of Python greater or equal to 3.7 is required. Contributed by Tawanda Moyo diff --git a/changelog.d/208.docker b/changelog.d/208.docker deleted file mode 100644 index 5b29e98a..00000000 --- a/changelog.d/208.docker +++ /dev/null @@ -1 +0,0 @@ -Remove manually added GeoTrust Root CA certificate from docker image as Apple is no longer using it. diff --git a/changelog.d/210.misc b/changelog.d/210.misc deleted file mode 100644 index e4f74702..00000000 --- a/changelog.d/210.misc +++ /dev/null @@ -1 +0,0 @@ -Port CI checks to Github Actions. diff --git a/changelog.d/212.feature b/changelog.d/212.feature deleted file mode 100644 index d615e171..00000000 --- a/changelog.d/212.feature +++ /dev/null @@ -1 +0,0 @@ -Prevent the push key from being rejected for temporary errors and oversized payloads, add TTL logging, and support events_only push data flag. \ No newline at end of file diff --git a/changelog.d/213.feature b/changelog.d/213.feature deleted file mode 100644 index e57f317d..00000000 --- a/changelog.d/213.feature +++ /dev/null @@ -1 +0,0 @@ -WebPush: add support for Urgency and Topic header \ No newline at end of file diff --git a/changelog.d/214.misc b/changelog.d/214.misc deleted file mode 100644 index 19149774..00000000 --- a/changelog.d/214.misc +++ /dev/null @@ -1 +0,0 @@ -Upgrade development dependencies fixes #199. Contributed by Omar Mohamed diff --git a/changelog.d/215.doc b/changelog.d/215.doc deleted file mode 100644 index f4df8853..00000000 --- a/changelog.d/215.doc +++ /dev/null @@ -1 +0,0 @@ -Document how to work around pip installation timeout errors. Contributed by Omar Mohamed. diff --git a/changelog.d/216.bugfix b/changelog.d/216.bugfix deleted file mode 100644 index bf9ad489..00000000 --- a/changelog.d/216.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a long-standing bug where invalid JSON would be accepted over the HTTP interfaces. diff --git a/changelog.d/217.misc b/changelog.d/217.misc deleted file mode 100644 index 11cc10d5..00000000 --- a/changelog.d/217.misc +++ /dev/null @@ -1 +0,0 @@ -setup coverage.py to run in tox environment and add html reports \ No newline at end of file diff --git a/changelog.d/219.misc b/changelog.d/219.misc deleted file mode 100644 index e4f74702..00000000 --- a/changelog.d/219.misc +++ /dev/null @@ -1 +0,0 @@ -Port CI checks to Github Actions. diff --git a/changelog.d/220.bugfix b/changelog.d/220.bugfix deleted file mode 100644 index 4a2963b2..00000000 --- a/changelog.d/220.bugfix +++ /dev/null @@ -1 +0,0 @@ -Limit the size of requests received from HTTP clients. From c5eb9974c1210f566a4e790dd762e253aea85015 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 22 Apr 2021 17:32:46 +0100 Subject: [PATCH 43/44] minor changelog edits --- CHANGELOG.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index dc38c967..69c5b44f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -32,12 +32,12 @@ Improved Documentation Internal Changes ---------------- -- Updated Tox to run in the installed version of Python (instead of specifying Python 3.7) and to consider specific paths and folders while running checks, instead of the whole repository (potentially including unwanted files and folders, e.g. the virtual environment). ([\#193](https://github.com/matrix-org/sygnal/issues/193)) -- Development dependencies are now avaible as extras. Contributed by Hillery Shay. ([\#194](https://github.com/matrix-org/sygnal/issues/194)) -- setup.py specifies that a minimum version of Python greater or equal to 3.7 is required. Contributed by Tawanda Moyo ([\#207](https://github.com/matrix-org/sygnal/issues/207)) +- Update Tox to run in the installed version of Python (instead of specifying Python 3.7) and to consider specific paths and folders while running checks, instead of the whole repository (potentially including unwanted files and folders, e.g. the virtual environment). ([\#193](https://github.com/matrix-org/sygnal/issues/193)) +- Make development dependencies available as extras. Contributed by Hillery Shay. ([\#194](https://github.com/matrix-org/sygnal/issues/194)) +- Update `setup.py` to specify that a minimum version of Python greater or equal to 3.7 is required. Contributed by Tawanda Moyo. ([\#207](https://github.com/matrix-org/sygnal/issues/207)) - Port CI checks to Github Actions. ([\#210](https://github.com/matrix-org/sygnal/issues/210), [\#219](https://github.com/matrix-org/sygnal/issues/219)) - Upgrade development dependencies fixes #199. Contributed by Omar Mohamed ([\#214](https://github.com/matrix-org/sygnal/issues/214)) -- setup coverage.py to run in tox environment and add html reports ([\#217](https://github.com/matrix-org/sygnal/issues/217)) +- Set up `coverage.py` to run in tox environment, and add html reports ([\#217](https://github.com/matrix-org/sygnal/issues/217)) Sygnal v0.9.2 (2021-03-29) From 860cda0f1b043ba81eea100d73cedc1ee452d232 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 22 Apr 2021 17:44:35 +0100 Subject: [PATCH 44/44] more minor changelog edits --- CHANGELOG.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 69c5b44f..22debda6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,7 +4,7 @@ Sygnal 0.9.3 (2021-04-22) Features -------- -- Prevent the push key from being rejected for temporary errors and oversized payloads, add TTL logging, and support events_only push data flag. ([\#212](https://github.com/matrix-org/sygnal/issues/212)) +- Prevent the push key from being rejected for temporary errors and oversized payloads, add TTL logging, and support `events_only` push data flag. ([\#212](https://github.com/matrix-org/sygnal/issues/212)) - WebPush: add support for Urgency and Topic header ([\#213](https://github.com/matrix-org/sygnal/issues/213)) @@ -24,8 +24,8 @@ Updates to the Docker image Improved Documentation ---------------------- -- Make CONTIBUTING.md more explicit about how to get tests passing. ([\#188](https://github.com/matrix-org/sygnal/issues/188)) -- Update CONTRIBUTING.md to specify how to run code style and type checks with Tox, and add formatting to code block samples. ([\#193](https://github.com/matrix-org/sygnal/issues/193)) +- Make `CONTIBUTING.md` more explicit about how to get tests passing. ([\#188](https://github.com/matrix-org/sygnal/issues/188)) +- Update `CONTRIBUTING.md` to specify how to run code style and type checks with Tox, and add formatting to code block samples. ([\#193](https://github.com/matrix-org/sygnal/issues/193)) - Document how to work around pip installation timeout errors. Contributed by Omar Mohamed. ([\#215](https://github.com/matrix-org/sygnal/issues/215)) @@ -36,7 +36,7 @@ Internal Changes - Make development dependencies available as extras. Contributed by Hillery Shay. ([\#194](https://github.com/matrix-org/sygnal/issues/194)) - Update `setup.py` to specify that a minimum version of Python greater or equal to 3.7 is required. Contributed by Tawanda Moyo. ([\#207](https://github.com/matrix-org/sygnal/issues/207)) - Port CI checks to Github Actions. ([\#210](https://github.com/matrix-org/sygnal/issues/210), [\#219](https://github.com/matrix-org/sygnal/issues/219)) -- Upgrade development dependencies fixes #199. Contributed by Omar Mohamed ([\#214](https://github.com/matrix-org/sygnal/issues/214)) +- Upgrade development dependencies. Contributed by Omar Mohamed ([\#214](https://github.com/matrix-org/sygnal/issues/214)) - Set up `coverage.py` to run in tox environment, and add html reports ([\#217](https://github.com/matrix-org/sygnal/issues/217))