From cb8d568cf90bb0f5f07ee9e7e6796ad7cd83361f Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Wed, 3 Jul 2019 13:40:45 +0100 Subject: [PATCH 1/5] Fix 'utime went backwards' errors on daemonization. (#5609) * Fix 'utime went backwards' errors on daemonization. Fixes #5608 * remove spurious debug --- changelog.d/5609.bugfix | 1 + synapse/app/_base.py | 57 ++++++++++++++++++++------------------ synapse/util/logcontext.py | 17 +++++++++--- 3 files changed, 44 insertions(+), 31 deletions(-) create mode 100644 changelog.d/5609.bugfix diff --git a/changelog.d/5609.bugfix b/changelog.d/5609.bugfix new file mode 100644 index 000000000000..534ee22a1b3f --- /dev/null +++ b/changelog.d/5609.bugfix @@ -0,0 +1 @@ +Fix 'utime went backwards' errors on daemonization. diff --git a/synapse/app/_base.py b/synapse/app/_base.py index d50a9840d4f6..9b5c30f50665 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -93,33 +93,36 @@ def start_reactor( install_dns_limiter(reactor) def run(): - # make sure that we run the reactor with the sentinel log context, - # otherwise other PreserveLoggingContext instances will get confused - # and complain when they see the logcontext arbitrarily swapping - # between the sentinel and `run` logcontexts. - with PreserveLoggingContext(): - logger.info("Running") - - change_resource_limit(soft_file_limit) - if gc_thresholds: - gc.set_threshold(*gc_thresholds) - reactor.run() - - if daemonize: - if print_pidfile: - print(pid_file) - - daemon = Daemonize( - app=appname, - pid=pid_file, - action=run, - auto_close_fds=False, - verbose=True, - logger=logger, - ) - daemon.start() - else: - run() + logger.info("Running") + change_resource_limit(soft_file_limit) + if gc_thresholds: + gc.set_threshold(*gc_thresholds) + reactor.run() + + # make sure that we run the reactor with the sentinel log context, + # otherwise other PreserveLoggingContext instances will get confused + # and complain when they see the logcontext arbitrarily swapping + # between the sentinel and `run` logcontexts. + # + # We also need to drop the logcontext before forking if we're daemonizing, + # otherwise the cputime metrics get confused about the per-thread resource usage + # appearing to go backwards. + with PreserveLoggingContext(): + if daemonize: + if print_pidfile: + print(pid_file) + + daemon = Daemonize( + app=appname, + pid=pid_file, + action=run, + auto_close_fds=False, + verbose=True, + logger=logger, + ) + daemon.start() + else: + run() def quit_with_error(error_string): diff --git a/synapse/util/logcontext.py b/synapse/util/logcontext.py index 9e1b53780405..30dfa1d6b2ed 100644 --- a/synapse/util/logcontext.py +++ b/synapse/util/logcontext.py @@ -52,6 +52,15 @@ def get_thread_resource_usage(): return None +# get an id for the current thread. +# +# threading.get_ident doesn't actually return an OS-level tid, and annoyingly, +# on Linux it actually returns the same value either side of a fork() call. However +# we only fork in one place, so it's not worth the hoop-jumping to get a real tid. +# +get_thread_id = threading.get_ident + + class ContextResourceUsage(object): """Object for tracking the resources used by a log context @@ -225,7 +234,7 @@ def __init__(self, name=None, parent_context=None, request=None): # became active. self.usage_start = None - self.main_thread = threading.current_thread() + self.main_thread = get_thread_id() self.request = None self.tag = "" self.alive = True @@ -318,7 +327,7 @@ def copy_to(self, record): record.request = self.request def start(self): - if threading.current_thread() is not self.main_thread: + if get_thread_id() != self.main_thread: logger.warning("Started logcontext %s on different thread", self) return @@ -328,7 +337,7 @@ def start(self): self.usage_start = get_thread_resource_usage() def stop(self): - if threading.current_thread() is not self.main_thread: + if get_thread_id() != self.main_thread: logger.warning("Stopped logcontext %s on different thread", self) return @@ -355,7 +364,7 @@ def get_resource_usage(self): # If we are on the correct thread and we're currently running then we # can include resource usage so far. - is_main_thread = threading.current_thread() is self.main_thread + is_main_thread = get_thread_id() == self.main_thread if self.alive and self.usage_start and is_main_thread: utime_delta, stime_delta = self._get_cputime() res.ru_utime += utime_delta From 463b072b1290a8cb75bc1d7b6688fa76bbfdb14f Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Thu, 4 Jul 2019 00:07:04 +1000 Subject: [PATCH 2/5] Move logging utilities out of the side drawer of util/ and into logging/ (#5606) --- changelog.d/5606.misc | 1 + contrib/example_log_config.yaml | 8 +-- contrib/experiments/test_messaging.py | 2 +- contrib/systemd/log_config.yaml | 2 +- debian/changelog | 3 ++ debian/log.yaml | 2 +- docker/conf/log.config | 2 +- docs/log_contexts.rst | 38 +++++++------- synapse/app/_base.py | 2 +- synapse/app/appservice.py | 2 +- synapse/app/client_reader.py | 2 +- synapse/app/event_creator.py | 2 +- synapse/app/federation_reader.py | 2 +- synapse/app/federation_sender.py | 2 +- synapse/app/frontend_proxy.py | 2 +- synapse/app/homeserver.py | 2 +- synapse/app/media_repository.py | 2 +- synapse/app/pusher.py | 2 +- synapse/app/synchrotron.py | 2 +- synapse/app/user_dir.py | 2 +- synapse/appservice/scheduler.py | 2 +- synapse/config/logger.py | 4 +- synapse/crypto/keyring.py | 15 +++--- synapse/events/snapshot.py | 2 +- synapse/federation/federation_base.py | 24 +++++---- synapse/federation/federation_client.py | 8 +-- synapse/federation/federation_server.py | 4 +- synapse/federation/persistence.py | 2 +- synapse/federation/sender/__init__.py | 12 +++-- synapse/federation/transport/client.py | 2 +- synapse/federation/transport/server.py | 2 +- synapse/groups/attestations.py | 2 +- synapse/handlers/account_validity.py | 2 +- synapse/handlers/appservice.py | 2 +- synapse/handlers/auth.py | 6 +-- synapse/handlers/e2e_keys.py | 2 +- synapse/handlers/events.py | 2 +- synapse/handlers/federation.py | 43 ++++++++-------- synapse/handlers/initial_sync.py | 2 +- synapse/handlers/message.py | 2 +- synapse/handlers/pagination.py | 2 +- synapse/handlers/presence.py | 4 +- synapse/handlers/sync.py | 2 +- synapse/handlers/typing.py | 2 +- synapse/http/client.py | 2 +- .../federation/matrix_federation_agent.py | 2 +- synapse/http/federation/srv_resolver.py | 2 +- synapse/http/matrixfederationclient.py | 2 +- synapse/http/request_metrics.py | 2 +- synapse/http/server.py | 2 +- synapse/http/site.py | 2 +- synapse/logging/__init__.py | 0 .../logcontext.py => logging/context.py} | 0 .../logformatter.py => logging/formatter.py} | 0 .../{util/logutils.py => logging/utils.py} | 0 synapse/metrics/background_process_metrics.py | 2 +- synapse/notifier.py | 4 +- synapse/push/mailer.py | 2 +- synapse/replication/tcp/protocol.py | 2 +- synapse/rest/client/transactions.py | 2 +- synapse/rest/media/v1/_base.py | 6 +-- synapse/rest/media/v1/media_repository.py | 12 ++--- synapse/rest/media/v1/media_storage.py | 5 +- synapse/rest/media/v1/preview_url_resource.py | 2 +- synapse/rest/media/v1/storage_provider.py | 5 +- synapse/state/__init__.py | 2 +- synapse/storage/_base.py | 2 +- synapse/storage/events.py | 4 +- synapse/storage/events_worker.py | 6 +-- synapse/storage/stream.py | 2 +- synapse/util/__init__.py | 12 +++-- synapse/util/async_helpers.py | 9 ++-- synapse/util/caches/descriptors.py | 11 +++-- synapse/util/caches/response_cache.py | 4 +- synapse/util/distributor.py | 2 +- synapse/util/file_consumer.py | 2 +- synapse/util/metrics.py | 2 +- synapse/util/ratelimitutils.py | 2 +- synapse/util/retryutils.py | 4 +- tests/appservice/test_scheduler.py | 2 +- tests/crypto/test_keyring.py | 13 +++-- .../test_matrix_federation_agent.py | 2 +- tests/http/federation/test_srv_resolver.py | 2 +- tests/http/test_fedclient.py | 2 +- tests/patch_inline_callbacks.py | 2 +- tests/push/test_http.py | 2 +- tests/rest/client/test_transactions.py | 2 +- tests/rest/media/v1/test_media_storage.py | 2 +- tests/test_federation.py | 2 +- tests/test_server.py | 2 +- tests/test_utils/logging_setup.py | 2 +- tests/unittest.py | 2 +- tests/util/caches/test_descriptors.py | 49 ++++++++----------- tests/util/test_async_utils.py | 7 ++- tests/util/test_linearizer.py | 9 ++-- tests/util/test_logcontext.py | 24 +++++---- tests/util/test_logformatter.py | 2 +- tests/utils.py | 2 +- 98 files changed, 249 insertions(+), 233 deletions(-) create mode 100644 changelog.d/5606.misc create mode 100644 synapse/logging/__init__.py rename synapse/{util/logcontext.py => logging/context.py} (100%) rename synapse/{util/logformatter.py => logging/formatter.py} (100%) rename synapse/{util/logutils.py => logging/utils.py} (100%) diff --git a/changelog.d/5606.misc b/changelog.d/5606.misc new file mode 100644 index 000000000000..bb3c02816789 --- /dev/null +++ b/changelog.d/5606.misc @@ -0,0 +1 @@ +Move logging code out of `synapse.util` and into `synapse.logging`. diff --git a/contrib/example_log_config.yaml b/contrib/example_log_config.yaml index 06592963daee..3a76a7a33fa6 100644 --- a/contrib/example_log_config.yaml +++ b/contrib/example_log_config.yaml @@ -1,7 +1,7 @@ -# Example log_config file for synapse. To enable, point `log_config` to it in +# Example log_config file for synapse. To enable, point `log_config` to it in # `homeserver.yaml`, and restart synapse. # -# This configuration will produce similar results to the defaults within +# This configuration will produce similar results to the defaults within # synapse, but can be edited to give more flexibility. version: 1 @@ -12,7 +12,7 @@ formatters: filters: context: - (): synapse.util.logcontext.LoggingContextFilter + (): synapse.logging.context.LoggingContextFilter request: "" handlers: @@ -35,7 +35,7 @@ handlers: root: level: INFO handlers: [console] # to use file handler instead, switch to [file] - + loggers: synapse: level: INFO diff --git a/contrib/experiments/test_messaging.py b/contrib/experiments/test_messaging.py index c7e55d8aa7fd..5ef140ae48a7 100644 --- a/contrib/experiments/test_messaging.py +++ b/contrib/experiments/test_messaging.py @@ -36,7 +36,7 @@ from synapse.app.homeserver import SynapseHomeServer -# from synapse.util.logutils import log_function +# from synapse.logging.utils import log_function from twisted.internet import reactor, defer from twisted.python import log diff --git a/contrib/systemd/log_config.yaml b/contrib/systemd/log_config.yaml index d85bdd1208f9..22f67a50ce0f 100644 --- a/contrib/systemd/log_config.yaml +++ b/contrib/systemd/log_config.yaml @@ -8,7 +8,7 @@ formatters: filters: context: - (): synapse.util.logcontext.LoggingContextFilter + (): synapse.logging.context.LoggingContextFilter request: "" handlers: diff --git a/debian/changelog b/debian/changelog index 91653e724356..bf5a59b33540 100644 --- a/debian/changelog +++ b/debian/changelog @@ -3,6 +3,9 @@ matrix-synapse-py3 (1.0.0+nmu1) UNRELEASED; urgency=medium [ Silke Hofstra ] * Include systemd-python to allow logging to the systemd journal. + [ Amber Brown ] + * Update logging config defaults to match API changes in Synapse. + -- Silke Hofstra Wed, 29 May 2019 09:45:29 +0200 matrix-synapse-py3 (1.0.0) stable; urgency=medium diff --git a/debian/log.yaml b/debian/log.yaml index 206b65f1acb0..95b655dd35f7 100644 --- a/debian/log.yaml +++ b/debian/log.yaml @@ -7,7 +7,7 @@ formatters: filters: context: - (): synapse.util.logcontext.LoggingContextFilter + (): synapse.logging.context.LoggingContextFilter request: "" handlers: diff --git a/docker/conf/log.config b/docker/conf/log.config index 895e45d20b7a..ea5ccfd68b9d 100644 --- a/docker/conf/log.config +++ b/docker/conf/log.config @@ -6,7 +6,7 @@ formatters: filters: context: - (): synapse.util.logcontext.LoggingContextFilter + (): synapse.logging.context.LoggingContextFilter request: "" handlers: diff --git a/docs/log_contexts.rst b/docs/log_contexts.rst index 27cde11cf762..f5cd5de8abac 100644 --- a/docs/log_contexts.rst +++ b/docs/log_contexts.rst @@ -1,4 +1,4 @@ -Log contexts +Log Contexts ============ .. contents:: @@ -12,7 +12,7 @@ record. Logcontexts are also used for CPU and database accounting, so that we can track which requests were responsible for high CPU use or database activity. -The ``synapse.util.logcontext`` module provides a facilities for managing the +The ``synapse.logging.context`` module provides a facilities for managing the current log context (as well as providing the ``LoggingContextFilter`` class). Deferreds make the whole thing complicated, so this document describes how it @@ -27,19 +27,19 @@ found them: .. code:: python - from synapse.util import logcontext # omitted from future snippets + from synapse.logging import context # omitted from future snippets def handle_request(request_id): - request_context = logcontext.LoggingContext() + request_context = context.LoggingContext() - calling_context = logcontext.LoggingContext.current_context() - logcontext.LoggingContext.set_current_context(request_context) + calling_context = context.LoggingContext.current_context() + context.LoggingContext.set_current_context(request_context) try: request_context.request = request_id do_request_handling() logger.debug("finished") finally: - logcontext.LoggingContext.set_current_context(calling_context) + context.LoggingContext.set_current_context(calling_context) def do_request_handling(): logger.debug("phew") # this will be logged against request_id @@ -51,7 +51,7 @@ written much more succinctly as: .. code:: python def handle_request(request_id): - with logcontext.LoggingContext() as request_context: + with context.LoggingContext() as request_context: request_context.request = request_id do_request_handling() logger.debug("finished") @@ -74,7 +74,7 @@ blocking operation, and returns a deferred: @defer.inlineCallbacks def handle_request(request_id): - with logcontext.LoggingContext() as request_context: + with context.LoggingContext() as request_context: request_context.request = request_id yield do_request_handling() logger.debug("finished") @@ -179,7 +179,7 @@ though, we need to make up a new Deferred, or we get a Deferred back from external code. We need to make it follow our rules. The easy way to do it is with a combination of ``defer.inlineCallbacks``, and -``logcontext.PreserveLoggingContext``. Suppose we want to implement ``sleep``, +``context.PreserveLoggingContext``. Suppose we want to implement ``sleep``, which returns a deferred which will run its callbacks after a given number of seconds. That might look like: @@ -204,13 +204,13 @@ That doesn't follow the rules, but we can fix it by wrapping it with This technique works equally for external functions which return deferreds, or deferreds we have made ourselves. -You can also use ``logcontext.make_deferred_yieldable``, which just does the +You can also use ``context.make_deferred_yieldable``, which just does the boilerplate for you, so the above could be written: .. code:: python def sleep(seconds): - return logcontext.make_deferred_yieldable(get_sleep_deferred(seconds)) + return context.make_deferred_yieldable(get_sleep_deferred(seconds)) Fire-and-forget @@ -279,7 +279,7 @@ Obviously that option means that the operations done in that might be fixed by setting a different logcontext via a ``with LoggingContext(...)`` in ``background_operation``). -The second option is to use ``logcontext.run_in_background``, which wraps a +The second option is to use ``context.run_in_background``, which wraps a function so that it doesn't reset the logcontext even when it returns an incomplete deferred, and adds a callback to the returned deferred to reset the logcontext. In other words, it turns a function that follows the Synapse rules @@ -293,7 +293,7 @@ It can be used like this: def do_request_handling(): yield foreground_operation() - logcontext.run_in_background(background_operation) + context.run_in_background(background_operation) # this will now be logged against the request context logger.debug("Request handling complete") @@ -332,7 +332,7 @@ gathered: result = yield defer.gatherResults([d1, d2]) In this case particularly, though, option two, of using -``logcontext.preserve_fn`` almost certainly makes more sense, so that +``context.preserve_fn`` almost certainly makes more sense, so that ``operation1`` and ``operation2`` are both logged against the original logcontext. This looks like: @@ -340,8 +340,8 @@ logcontext. This looks like: @defer.inlineCallbacks def do_request_handling(): - d1 = logcontext.preserve_fn(operation1)() - d2 = logcontext.preserve_fn(operation2)() + d1 = context.preserve_fn(operation1)() + d2 = context.preserve_fn(operation2)() with PreserveLoggingContext(): result = yield defer.gatherResults([d1, d2]) @@ -381,7 +381,7 @@ off the background process, and then leave the ``with`` block to wait for it: .. code:: python def handle_request(request_id): - with logcontext.LoggingContext() as request_context: + with context.LoggingContext() as request_context: request_context.request = request_id d = do_request_handling() @@ -414,7 +414,7 @@ runs its callbacks in the original logcontext, all is happy. The business of a Deferred which runs its callbacks in the original logcontext isn't hard to achieve — we have it today, in the shape of -``logcontext._PreservingContextDeferred``: +``context._PreservingContextDeferred``: .. code:: python diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 9b5c30f50665..1ebb7ae53948 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -27,7 +27,7 @@ import synapse from synapse.app import check_bind_error from synapse.crypto import context_factory -from synapse.util import PreserveLoggingContext +from synapse.logging.context import PreserveLoggingContext from synapse.util.async_helpers import Linearizer from synapse.util.rlimit import change_resource_limit from synapse.util.versionstring import get_version_string diff --git a/synapse/app/appservice.py b/synapse/app/appservice.py index 9120bdb1438c..be44249ed6f1 100644 --- a/synapse/app/appservice.py +++ b/synapse/app/appservice.py @@ -26,6 +26,7 @@ from synapse.config.homeserver import HomeServerConfig from synapse.config.logger import setup_logging from synapse.http.site import SynapseSite +from synapse.logging.context import LoggingContext, run_in_background from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore @@ -36,7 +37,6 @@ from synapse.server import HomeServer from synapse.storage.engines import create_engine from synapse.util.httpresourcetree import create_resource_tree -from synapse.util.logcontext import LoggingContext, run_in_background from synapse.util.manhole import manhole from synapse.util.versionstring import get_version_string diff --git a/synapse/app/client_reader.py b/synapse/app/client_reader.py index 90bc79cdda87..ff11beca82a7 100644 --- a/synapse/app/client_reader.py +++ b/synapse/app/client_reader.py @@ -27,6 +27,7 @@ from synapse.config.logger import setup_logging from synapse.http.server import JsonResource from synapse.http.site import SynapseSite +from synapse.logging.context import LoggingContext from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.replication.slave.storage._base import BaseSlavedStore @@ -64,7 +65,6 @@ from synapse.server import HomeServer from synapse.storage.engines import create_engine from synapse.util.httpresourcetree import create_resource_tree -from synapse.util.logcontext import LoggingContext from synapse.util.manhole import manhole from synapse.util.versionstring import get_version_string diff --git a/synapse/app/event_creator.py b/synapse/app/event_creator.py index ff522e4499f3..cacad25eacd5 100644 --- a/synapse/app/event_creator.py +++ b/synapse/app/event_creator.py @@ -27,6 +27,7 @@ from synapse.config.logger import setup_logging from synapse.http.server import JsonResource from synapse.http.site import SynapseSite +from synapse.logging.context import LoggingContext from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.replication.slave.storage._base import BaseSlavedStore @@ -59,7 +60,6 @@ from synapse.storage.engines import create_engine from synapse.storage.user_directory import UserDirectoryStore from synapse.util.httpresourcetree import create_resource_tree -from synapse.util.logcontext import LoggingContext from synapse.util.manhole import manhole from synapse.util.versionstring import get_version_string diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py index 94214209301a..11e80dbae02c 100644 --- a/synapse/app/federation_reader.py +++ b/synapse/app/federation_reader.py @@ -28,6 +28,7 @@ from synapse.config.logger import setup_logging from synapse.federation.transport.server import TransportLayerServer from synapse.http.site import SynapseSite +from synapse.logging.context import LoggingContext from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.replication.slave.storage._base import BaseSlavedStore @@ -48,7 +49,6 @@ from synapse.server import HomeServer from synapse.storage.engines import create_engine from synapse.util.httpresourcetree import create_resource_tree -from synapse.util.logcontext import LoggingContext from synapse.util.manhole import manhole from synapse.util.versionstring import get_version_string diff --git a/synapse/app/federation_sender.py b/synapse/app/federation_sender.py index 969be58d0b04..97da7bdcbf4a 100644 --- a/synapse/app/federation_sender.py +++ b/synapse/app/federation_sender.py @@ -27,6 +27,7 @@ from synapse.config.logger import setup_logging from synapse.federation import send_queue from synapse.http.site import SynapseSite +from synapse.logging.context import LoggingContext, run_in_background from synapse.metrics import RegistryProxy from synapse.metrics.background_process_metrics import run_as_background_process from synapse.metrics.resource import METRICS_PREFIX, MetricsResource @@ -44,7 +45,6 @@ from synapse.types import ReadReceipt from synapse.util.async_helpers import Linearizer from synapse.util.httpresourcetree import create_resource_tree -from synapse.util.logcontext import LoggingContext, run_in_background from synapse.util.manhole import manhole from synapse.util.versionstring import get_version_string diff --git a/synapse/app/frontend_proxy.py b/synapse/app/frontend_proxy.py index 2fd7d57ebf1c..417a10bbd23c 100644 --- a/synapse/app/frontend_proxy.py +++ b/synapse/app/frontend_proxy.py @@ -29,6 +29,7 @@ from synapse.http.server import JsonResource from synapse.http.servlet import RestServlet, parse_json_object_from_request from synapse.http.site import SynapseSite +from synapse.logging.context import LoggingContext from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.replication.slave.storage._base import BaseSlavedStore @@ -41,7 +42,6 @@ from synapse.server import HomeServer from synapse.storage.engines import create_engine from synapse.util.httpresourcetree import create_resource_tree -from synapse.util.logcontext import LoggingContext from synapse.util.manhole import manhole from synapse.util.versionstring import get_version_string diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 49da105cf62e..639b1429c05a 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -54,6 +54,7 @@ from synapse.http.additional_resource import AdditionalResource from synapse.http.server import RootRedirect from synapse.http.site import SynapseSite +from synapse.logging.context import LoggingContext from synapse.metrics import RegistryProxy from synapse.metrics.background_process_metrics import run_as_background_process from synapse.metrics.resource import METRICS_PREFIX, MetricsResource @@ -72,7 +73,6 @@ from synapse.storage.prepare_database import UpgradeDatabaseException, prepare_database from synapse.util.caches import CACHE_SIZE_FACTOR from synapse.util.httpresourcetree import create_resource_tree -from synapse.util.logcontext import LoggingContext from synapse.util.manhole import manhole from synapse.util.module_loader import load_module from synapse.util.rlimit import change_resource_limit diff --git a/synapse/app/media_repository.py b/synapse/app/media_repository.py index cf0e2036c3fd..f23b9b6eda89 100644 --- a/synapse/app/media_repository.py +++ b/synapse/app/media_repository.py @@ -27,6 +27,7 @@ from synapse.config.homeserver import HomeServerConfig from synapse.config.logger import setup_logging from synapse.http.site import SynapseSite +from synapse.logging.context import LoggingContext from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.replication.slave.storage._base import BaseSlavedStore @@ -40,7 +41,6 @@ from synapse.storage.engines import create_engine from synapse.storage.media_repository import MediaRepositoryStore from synapse.util.httpresourcetree import create_resource_tree -from synapse.util.logcontext import LoggingContext from synapse.util.manhole import manhole from synapse.util.versionstring import get_version_string diff --git a/synapse/app/pusher.py b/synapse/app/pusher.py index df29ea5ecbea..4f929edf8660 100644 --- a/synapse/app/pusher.py +++ b/synapse/app/pusher.py @@ -26,6 +26,7 @@ from synapse.config.homeserver import HomeServerConfig from synapse.config.logger import setup_logging from synapse.http.site import SynapseSite +from synapse.logging.context import LoggingContext, run_in_background from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.replication.slave.storage._base import __func__ @@ -38,7 +39,6 @@ from synapse.storage import DataStore from synapse.storage.engines import create_engine from synapse.util.httpresourcetree import create_resource_tree -from synapse.util.logcontext import LoggingContext, run_in_background from synapse.util.manhole import manhole from synapse.util.versionstring import get_version_string diff --git a/synapse/app/synchrotron.py b/synapse/app/synchrotron.py index 858949910d0d..de4797fddcd8 100644 --- a/synapse/app/synchrotron.py +++ b/synapse/app/synchrotron.py @@ -31,6 +31,7 @@ from synapse.handlers.presence import PresenceHandler, get_interested_parties from synapse.http.server import JsonResource from synapse.http.site import SynapseSite +from synapse.logging.context import LoggingContext, run_in_background from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.replication.slave.storage._base import BaseSlavedStore, __func__ @@ -57,7 +58,6 @@ from synapse.storage.engines import create_engine from synapse.storage.presence import UserPresenceState from synapse.util.httpresourcetree import create_resource_tree -from synapse.util.logcontext import LoggingContext, run_in_background from synapse.util.manhole import manhole from synapse.util.stringutils import random_string from synapse.util.versionstring import get_version_string diff --git a/synapse/app/user_dir.py b/synapse/app/user_dir.py index 2d9d2e1bbc02..1177ddd72e92 100644 --- a/synapse/app/user_dir.py +++ b/synapse/app/user_dir.py @@ -28,6 +28,7 @@ from synapse.config.logger import setup_logging from synapse.http.server import JsonResource from synapse.http.site import SynapseSite +from synapse.logging.context import LoggingContext, run_in_background from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.replication.slave.storage._base import BaseSlavedStore @@ -46,7 +47,6 @@ from synapse.storage.user_directory import UserDirectoryStore from synapse.util.caches.stream_change_cache import StreamChangeCache from synapse.util.httpresourcetree import create_resource_tree -from synapse.util.logcontext import LoggingContext, run_in_background from synapse.util.manhole import manhole from synapse.util.versionstring import get_version_string diff --git a/synapse/appservice/scheduler.py b/synapse/appservice/scheduler.py index b54bf5411f46..e5b36494f57f 100644 --- a/synapse/appservice/scheduler.py +++ b/synapse/appservice/scheduler.py @@ -53,8 +53,8 @@ from twisted.internet import defer from synapse.appservice import ApplicationServiceState +from synapse.logging.context import run_in_background from synapse.metrics.background_process_metrics import run_as_background_process -from synapse.util.logcontext import run_in_background logger = logging.getLogger(__name__) diff --git a/synapse/config/logger.py b/synapse/config/logger.py index 931aec41c09c..0f5554211c6a 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -24,7 +24,7 @@ import synapse from synapse.app import _base as appbase -from synapse.util.logcontext import LoggingContextFilter +from synapse.logging.context import LoggingContextFilter from synapse.util.versionstring import get_version_string from ._base import Config @@ -40,7 +40,7 @@ filters: context: - (): synapse.util.logcontext.LoggingContextFilter + (): synapse.logging.context.LoggingContextFilter request: "" handlers: diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index 10c2eb7f0fa7..341c86315273 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -44,15 +44,16 @@ RequestSendFailed, SynapseError, ) -from synapse.storage.keys import FetchKeyResult -from synapse.util import logcontext, unwrapFirstError -from synapse.util.async_helpers import yieldable_gather_results -from synapse.util.logcontext import ( +from synapse.logging.context import ( LoggingContext, PreserveLoggingContext, + make_deferred_yieldable, preserve_fn, run_in_background, ) +from synapse.storage.keys import FetchKeyResult +from synapse.util import unwrapFirstError +from synapse.util.async_helpers import yieldable_gather_results from synapse.util.metrics import Measure from synapse.util.retryutils import NotRetryingDestination @@ -140,7 +141,7 @@ def verify_json_for_server( """ req = VerifyJsonRequest(server_name, json_object, validity_time, request_name) requests = (req,) - return logcontext.make_deferred_yieldable(self._verify_objects(requests)[0]) + return make_deferred_yieldable(self._verify_objects(requests)[0]) def verify_json_objects_for_server(self, server_and_json): """Bulk verifies signatures of json objects, bulk fetching keys as @@ -557,7 +558,7 @@ def process_v2_response(self, from_server, response_json, time_added_ms): signed_key_json_bytes = encode_canonical_json(signed_key_json) - yield logcontext.make_deferred_yieldable( + yield make_deferred_yieldable( defer.gatherResults( [ run_in_background( @@ -612,7 +613,7 @@ def get_key(key_server): defer.returnValue({}) - results = yield logcontext.make_deferred_yieldable( + results = yield make_deferred_yieldable( defer.gatherResults( [run_in_background(get_key, server) for server in self.key_servers], consumeErrors=True, diff --git a/synapse/events/snapshot.py b/synapse/events/snapshot.py index a96cdada3d35..a9545e6c1b17 100644 --- a/synapse/events/snapshot.py +++ b/synapse/events/snapshot.py @@ -19,7 +19,7 @@ from twisted.internet import defer -from synapse.util.logcontext import make_deferred_yieldable, run_in_background +from synapse.logging.context import make_deferred_yieldable, run_in_background class EventContext(object): diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py index 1e925b19e7b3..f7bb806ae724 100644 --- a/synapse/federation/federation_base.py +++ b/synapse/federation/federation_base.py @@ -27,8 +27,14 @@ from synapse.events import event_type_from_format_version from synapse.events.utils import prune_event from synapse.http.servlet import assert_params_in_dict +from synapse.logging.context import ( + LoggingContext, + PreserveLoggingContext, + make_deferred_yieldable, + preserve_fn, +) from synapse.types import get_domain_from_id -from synapse.util import logcontext, unwrapFirstError +from synapse.util import unwrapFirstError logger = logging.getLogger(__name__) @@ -73,7 +79,7 @@ def _check_sigs_and_hash_and_fetch( @defer.inlineCallbacks def handle_check_result(pdu, deferred): try: - res = yield logcontext.make_deferred_yieldable(deferred) + res = yield make_deferred_yieldable(deferred) except SynapseError: res = None @@ -102,10 +108,10 @@ def handle_check_result(pdu, deferred): defer.returnValue(res) - handle = logcontext.preserve_fn(handle_check_result) + handle = preserve_fn(handle_check_result) deferreds2 = [handle(pdu, deferred) for pdu, deferred in zip(pdus, deferreds)] - valid_pdus = yield logcontext.make_deferred_yieldable( + valid_pdus = yield make_deferred_yieldable( defer.gatherResults(deferreds2, consumeErrors=True) ).addErrback(unwrapFirstError) @@ -115,7 +121,7 @@ def handle_check_result(pdu, deferred): defer.returnValue([p for p in valid_pdus if p]) def _check_sigs_and_hash(self, room_version, pdu): - return logcontext.make_deferred_yieldable( + return make_deferred_yieldable( self._check_sigs_and_hashes(room_version, [pdu])[0] ) @@ -133,14 +139,14 @@ def _check_sigs_and_hashes(self, room_version, pdus): * returns a redacted version of the event (if the signature matched but the hash did not) * throws a SynapseError if the signature check failed. - The deferreds run their callbacks in the sentinel logcontext. + The deferreds run their callbacks in the sentinel """ deferreds = _check_sigs_on_pdus(self.keyring, room_version, pdus) - ctx = logcontext.LoggingContext.current_context() + ctx = LoggingContext.current_context() def callback(_, pdu): - with logcontext.PreserveLoggingContext(ctx): + with PreserveLoggingContext(ctx): if not check_event_content_hash(pdu): # let's try to distinguish between failures because the event was # redacted (which are somewhat expected) vs actual ball-tampering @@ -178,7 +184,7 @@ def callback(_, pdu): def errback(failure, pdu): failure.trap(SynapseError) - with logcontext.PreserveLoggingContext(ctx): + with PreserveLoggingContext(ctx): logger.warn( "Signature check failed for %s: %s", pdu.event_id, diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 3883eb525eed..3cb4b9442085 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -39,10 +39,10 @@ ) from synapse.events import builder, room_version_to_event_format from synapse.federation.federation_base import FederationBase, event_from_pdu_json -from synapse.util import logcontext, unwrapFirstError +from synapse.logging.context import make_deferred_yieldable, run_in_background +from synapse.logging.utils import log_function +from synapse.util import unwrapFirstError from synapse.util.caches.expiringcache import ExpiringCache -from synapse.util.logcontext import make_deferred_yieldable, run_in_background -from synapse.util.logutils import log_function from synapse.util.retryutils import NotRetryingDestination logger = logging.getLogger(__name__) @@ -207,7 +207,7 @@ def backfill(self, dest, room_id, limit, extremities): ] # FIXME: We should handle signature failures more gracefully. - pdus[:] = yield logcontext.make_deferred_yieldable( + pdus[:] = yield make_deferred_yieldable( defer.gatherResults( self._check_sigs_and_hashes(room_version, pdus), consumeErrors=True ).addErrback(unwrapFirstError) diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 2e0cebb638c4..8c0a18b12039 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -42,6 +42,8 @@ from synapse.federation.persistence import TransactionActions from synapse.federation.units import Edu, Transaction from synapse.http.endpoint import parse_server_name +from synapse.logging.context import nested_logging_context +from synapse.logging.utils import log_function from synapse.replication.http.federation import ( ReplicationFederationSendEduRestServlet, ReplicationGetQueryRestServlet, @@ -50,8 +52,6 @@ from synapse.util import glob_to_regex from synapse.util.async_helpers import Linearizer, concurrently_execute from synapse.util.caches.response_cache import ResponseCache -from synapse.util.logcontext import nested_logging_context -from synapse.util.logutils import log_function # when processing incoming transactions, we try to handle multiple rooms in # parallel, up to this limit. diff --git a/synapse/federation/persistence.py b/synapse/federation/persistence.py index 7535f79203b3..d086e0424304 100644 --- a/synapse/federation/persistence.py +++ b/synapse/federation/persistence.py @@ -23,7 +23,7 @@ from twisted.internet import defer -from synapse.util.logutils import log_function +from synapse.logging.utils import log_function logger = logging.getLogger(__name__) diff --git a/synapse/federation/sender/__init__.py b/synapse/federation/sender/__init__.py index 766c5a37cd7a..d46f4aaeb1a3 100644 --- a/synapse/federation/sender/__init__.py +++ b/synapse/federation/sender/__init__.py @@ -26,6 +26,11 @@ from synapse.federation.sender.transaction_manager import TransactionManager from synapse.federation.units import Edu from synapse.handlers.presence import get_interested_remotes +from synapse.logging.context import ( + make_deferred_yieldable, + preserve_fn, + run_in_background, +) from synapse.metrics import ( LaterGauge, event_processing_loop_counter, @@ -33,7 +38,6 @@ events_processed_counter, ) from synapse.metrics.background_process_metrics import run_as_background_process -from synapse.util import logcontext from synapse.util.metrics import measure_func logger = logging.getLogger(__name__) @@ -210,10 +214,10 @@ def handle_room_events(events): for event in events: events_by_room.setdefault(event.room_id, []).append(event) - yield logcontext.make_deferred_yieldable( + yield make_deferred_yieldable( defer.gatherResults( [ - logcontext.run_in_background(handle_room_events, evs) + run_in_background(handle_room_events, evs) for evs in itervalues(events_by_room) ], consumeErrors=True, @@ -360,7 +364,7 @@ def _flush_rrs_for_room(self, room_id): for queue in queues: queue.flush_read_receipts_for_room(room_id) - @logcontext.preserve_fn # the caller should not yield on this + @preserve_fn # the caller should not yield on this @defer.inlineCallbacks def send_presence(self, states): """Send the new presence states to the appropriate destinations. diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index aecd1423097b..1aae9ec9e74d 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -22,7 +22,7 @@ from synapse.api.constants import Membership from synapse.api.urls import FEDERATION_V1_PREFIX, FEDERATION_V2_PREFIX -from synapse.util.logutils import log_function +from synapse.logging.utils import log_function logger = logging.getLogger(__name__) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 955f0f430825..2efdcff7ef8b 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -36,8 +36,8 @@ parse_json_object_from_request, parse_string_from_args, ) +from synapse.logging.context import run_in_background from synapse.types import ThirdPartyInstanceID, get_domain_from_id -from synapse.util.logcontext import run_in_background from synapse.util.ratelimitutils import FederationRateLimiter from synapse.util.versionstring import get_version_string diff --git a/synapse/groups/attestations.py b/synapse/groups/attestations.py index e73757570cf7..f497711133e9 100644 --- a/synapse/groups/attestations.py +++ b/synapse/groups/attestations.py @@ -43,9 +43,9 @@ from twisted.internet import defer from synapse.api.errors import HttpResponseException, RequestSendFailed, SynapseError +from synapse.logging.context import run_in_background from synapse.metrics.background_process_metrics import run_as_background_process from synapse.types import get_domain_from_id -from synapse.util.logcontext import run_in_background logger = logging.getLogger(__name__) diff --git a/synapse/handlers/account_validity.py b/synapse/handlers/account_validity.py index edb48054a029..1f1708ba7d5d 100644 --- a/synapse/handlers/account_validity.py +++ b/synapse/handlers/account_validity.py @@ -22,10 +22,10 @@ from twisted.internet import defer from synapse.api.errors import StoreError +from synapse.logging.context import make_deferred_yieldable from synapse.metrics.background_process_metrics import run_as_background_process from synapse.types import UserID from synapse.util import stringutils -from synapse.util.logcontext import make_deferred_yieldable try: from synapse.push.mailer import load_jinja2_templates diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index 5cc89d43f6a6..8f089f0e3355 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -23,13 +23,13 @@ import synapse from synapse.api.constants import EventTypes +from synapse.logging.context import make_deferred_yieldable, run_in_background from synapse.metrics import ( event_processing_loop_counter, event_processing_loop_room_count, ) from synapse.metrics.background_process_metrics import run_as_background_process from synapse.util import log_failure -from synapse.util.logcontext import make_deferred_yieldable, run_in_background from synapse.util.metrics import Measure logger = logging.getLogger(__name__) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index c8c1ed324656..ef5585aa9912 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -36,9 +36,9 @@ SynapseError, ) from synapse.api.ratelimiting import Ratelimiter +from synapse.logging.context import defer_to_thread from synapse.module_api import ModuleApi from synapse.types import UserID -from synapse.util import logcontext from synapse.util.caches.expiringcache import ExpiringCache from ._base import BaseHandler @@ -987,7 +987,7 @@ def _do_hash(): bcrypt.gensalt(self.bcrypt_rounds), ).decode("ascii") - return logcontext.defer_to_thread(self.hs.get_reactor(), _do_hash) + return defer_to_thread(self.hs.get_reactor(), _do_hash) def validate_hash(self, password, stored_hash): """Validates that self.hash(password) == stored_hash. @@ -1013,7 +1013,7 @@ def _do_validate_hash(): if not isinstance(stored_hash, bytes): stored_hash = stored_hash.encode("ascii") - return logcontext.defer_to_thread(self.hs.get_reactor(), _do_validate_hash) + return defer_to_thread(self.hs.get_reactor(), _do_validate_hash) else: return defer.succeed(False) diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index 807900fe52ac..55b4ab3a1a9f 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -23,8 +23,8 @@ from twisted.internet import defer from synapse.api.errors import CodeMessageException, FederationDeniedError, SynapseError +from synapse.logging.context import make_deferred_yieldable, run_in_background from synapse.types import UserID, get_domain_from_id -from synapse.util.logcontext import make_deferred_yieldable, run_in_background from synapse.util.retryutils import NotRetryingDestination logger = logging.getLogger(__name__) diff --git a/synapse/handlers/events.py b/synapse/handlers/events.py index 5836d3c639bd..6a38328af316 100644 --- a/synapse/handlers/events.py +++ b/synapse/handlers/events.py @@ -21,8 +21,8 @@ from synapse.api.constants import EventTypes, Membership from synapse.api.errors import AuthError, SynapseError from synapse.events import EventBase +from synapse.logging.utils import log_function from synapse.types import UserID -from synapse.util.logutils import log_function from synapse.visibility import filter_events_for_client from ._base import BaseHandler diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 02d397c498e4..57be968c67c8 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -45,6 +45,13 @@ from synapse.crypto.event_signing import compute_event_signature from synapse.event_auth import auth_types_for_event from synapse.events.validator import EventValidator +from synapse.logging.context import ( + make_deferred_yieldable, + nested_logging_context, + preserve_fn, + run_in_background, +) +from synapse.logging.utils import log_function from synapse.replication.http.federation import ( ReplicationCleanRoomRestServlet, ReplicationFederationSendEventsRestServlet, @@ -52,10 +59,9 @@ from synapse.replication.http.membership import ReplicationUserJoinedLeftRoomRestServlet from synapse.state import StateResolutionStore, resolve_events_with_store from synapse.types import UserID, get_domain_from_id -from synapse.util import logcontext, unwrapFirstError +from synapse.util import unwrapFirstError from synapse.util.async_helpers import Linearizer from synapse.util.distributor import user_joined_room -from synapse.util.logutils import log_function from synapse.util.retryutils import NotRetryingDestination from synapse.visibility import filter_events_for_server @@ -338,7 +344,7 @@ def on_receive_pdu(self, origin, pdu, sent_to_us_directly=False): room_version = yield self.store.get_room_version(room_id) - with logcontext.nested_logging_context(p): + with nested_logging_context(p): # note that if any of the missing prevs share missing state or # auth events, the requests to fetch those events are deduped # by the get_pdu_cache in federation_client. @@ -532,7 +538,7 @@ def _get_missing_events_for_pdu(self, origin, pdu, prevs, min_depth): event_id, ev.event_id, ) - with logcontext.nested_logging_context(ev.event_id): + with nested_logging_context(ev.event_id): try: yield self.on_receive_pdu(origin, ev, sent_to_us_directly=False) except FederationError as e: @@ -725,10 +731,10 @@ def backfill(self, dest, room_id, limit, extremities): missing_auth - failed_to_fetch, ) - results = yield logcontext.make_deferred_yieldable( + results = yield make_deferred_yieldable( defer.gatherResults( [ - logcontext.run_in_background( + run_in_background( self.federation_client.get_pdu, [dest], event_id, @@ -994,10 +1000,8 @@ def try_backfill(domains): event_ids = list(extremities.keys()) logger.debug("calling resolve_state_groups in _maybe_backfill") - resolve = logcontext.preserve_fn( - self.state_handler.resolve_state_groups_for_events - ) - states = yield logcontext.make_deferred_yieldable( + resolve = preserve_fn(self.state_handler.resolve_state_groups_for_events) + states = yield make_deferred_yieldable( defer.gatherResults( [resolve(room_id, [e]) for e in event_ids], consumeErrors=True ) @@ -1171,7 +1175,7 @@ def do_invite_join(self, target_hosts, room_id, joinee, content): # lots of requests for missing prev_events which we do actually # have. Hence we fire off the deferred, but don't wait for it. - logcontext.run_in_background(self._handle_queued_pdus, room_queue) + run_in_background(self._handle_queued_pdus, room_queue) defer.returnValue(True) @@ -1191,7 +1195,7 @@ def _handle_queued_pdus(self, room_queue): p.event_id, p.room_id, ) - with logcontext.nested_logging_context(p.event_id): + with nested_logging_context(p.event_id): yield self.on_receive_pdu(origin, p, sent_to_us_directly=True) except Exception as e: logger.warn( @@ -1610,7 +1614,7 @@ def _handle_new_event( success = True finally: if not success: - logcontext.run_in_background( + run_in_background( self.store.remove_push_actions_from_staging, event.event_id ) @@ -1629,7 +1633,7 @@ def _handle_new_events(self, origin, event_infos, backfilled=False): @defer.inlineCallbacks def prep(ev_info): event = ev_info["event"] - with logcontext.nested_logging_context(suffix=event.event_id): + with nested_logging_context(suffix=event.event_id): res = yield self._prep_event( origin, event, @@ -1639,12 +1643,9 @@ def prep(ev_info): ) defer.returnValue(res) - contexts = yield logcontext.make_deferred_yieldable( + contexts = yield make_deferred_yieldable( defer.gatherResults( - [ - logcontext.run_in_background(prep, ev_info) - for ev_info in event_infos - ], + [run_in_background(prep, ev_info) for ev_info in event_infos], consumeErrors=True, ) ) @@ -2106,10 +2107,10 @@ def _update_auth_events_and_context_for_auth( room_version = yield self.store.get_room_version(event.room_id) - different_events = yield logcontext.make_deferred_yieldable( + different_events = yield make_deferred_yieldable( defer.gatherResults( [ - logcontext.run_in_background( + run_in_background( self.store.get_event, d, allow_none=True, allow_rejected=False ) for d in different_auth diff --git a/synapse/handlers/initial_sync.py b/synapse/handlers/initial_sync.py index a1fe9d116faf..54c966c8a67b 100644 --- a/synapse/handlers/initial_sync.py +++ b/synapse/handlers/initial_sync.py @@ -21,12 +21,12 @@ from synapse.api.errors import AuthError, Codes, SynapseError from synapse.events.validator import EventValidator from synapse.handlers.presence import format_user_presence_state +from synapse.logging.context import make_deferred_yieldable, run_in_background from synapse.streams.config import PaginationConfig from synapse.types import StreamToken, UserID from synapse.util import unwrapFirstError from synapse.util.async_helpers import concurrently_execute from synapse.util.caches.snapshot_cache import SnapshotCache -from synapse.util.logcontext import make_deferred_yieldable, run_in_background from synapse.visibility import filter_events_for_client from ._base import BaseHandler diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 683da6bf3204..eaeda7a5cbb2 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -34,13 +34,13 @@ from synapse.api.room_versions import RoomVersions from synapse.api.urls import ConsentURIBuilder from synapse.events.validator import EventValidator +from synapse.logging.context import run_in_background from synapse.metrics.background_process_metrics import run_as_background_process from synapse.replication.http.send_event import ReplicationSendEventRestServlet from synapse.storage.state import StateFilter from synapse.types import RoomAlias, UserID, create_requester from synapse.util.async_helpers import Linearizer from synapse.util.frozenutils import frozendict_json_encoder -from synapse.util.logcontext import run_in_background from synapse.util.metrics import measure_func from synapse.visibility import filter_events_for_client diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py index 76ee97ddd323..20bcfed334f9 100644 --- a/synapse/handlers/pagination.py +++ b/synapse/handlers/pagination.py @@ -20,10 +20,10 @@ from synapse.api.constants import EventTypes, Membership from synapse.api.errors import SynapseError +from synapse.logging.context import run_in_background from synapse.storage.state import StateFilter from synapse.types import RoomStreamToken from synapse.util.async_helpers import ReadWriteLock -from synapse.util.logcontext import run_in_background from synapse.util.stringutils import random_string from synapse.visibility import filter_events_for_client diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index c80dc2eba0b0..6f3537e43545 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -34,14 +34,14 @@ import synapse.metrics from synapse.api.constants import EventTypes, Membership, PresenceState from synapse.api.errors import SynapseError +from synapse.logging.context import run_in_background +from synapse.logging.utils import log_function from synapse.metrics import LaterGauge from synapse.metrics.background_process_metrics import run_as_background_process from synapse.storage.presence import UserPresenceState from synapse.types import UserID, get_domain_from_id from synapse.util.async_helpers import Linearizer from synapse.util.caches.descriptors import cachedInlineCallbacks -from synapse.util.logcontext import run_in_background -from synapse.util.logutils import log_function from synapse.util.metrics import Measure from synapse.util.wheel_timer import WheelTimer diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index a3f550554f05..cd1ac0a27ab7 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -25,6 +25,7 @@ from twisted.internet import defer from synapse.api.constants import EventTypes, Membership +from synapse.logging.context import LoggingContext from synapse.push.clientformat import format_push_rules_for_user from synapse.storage.roommember import MemberSummary from synapse.storage.state import StateFilter @@ -33,7 +34,6 @@ from synapse.util.caches.expiringcache import ExpiringCache from synapse.util.caches.lrucache import LruCache from synapse.util.caches.response_cache import ResponseCache -from synapse.util.logcontext import LoggingContext from synapse.util.metrics import Measure, measure_func from synapse.visibility import filter_events_for_client diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py index f8062c867133..c3e0c8fc7e37 100644 --- a/synapse/handlers/typing.py +++ b/synapse/handlers/typing.py @@ -19,9 +19,9 @@ from twisted.internet import defer from synapse.api.errors import AuthError, SynapseError +from synapse.logging.context import run_in_background from synapse.types import UserID, get_domain_from_id from synapse.util.caches.stream_change_cache import StreamChangeCache -from synapse.util.logcontext import run_in_background from synapse.util.metrics import Measure from synapse.util.wheel_timer import WheelTimer diff --git a/synapse/http/client.py b/synapse/http/client.py index 9bc7035c8dae..45d501095299 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -45,9 +45,9 @@ cancelled_to_request_timed_out_error, redact_uri, ) +from synapse.logging.context import make_deferred_yieldable from synapse.util.async_helpers import timeout_deferred from synapse.util.caches import CACHE_SIZE_FACTOR -from synapse.util.logcontext import make_deferred_yieldable logger = logging.getLogger(__name__) diff --git a/synapse/http/federation/matrix_federation_agent.py b/synapse/http/federation/matrix_federation_agent.py index 414cde077776..054c321a2015 100644 --- a/synapse/http/federation/matrix_federation_agent.py +++ b/synapse/http/federation/matrix_federation_agent.py @@ -30,9 +30,9 @@ from twisted.web.iweb import IAgent from synapse.http.federation.srv_resolver import SrvResolver, pick_server_from_list +from synapse.logging.context import make_deferred_yieldable from synapse.util import Clock from synapse.util.caches.ttlcache import TTLCache -from synapse.util.logcontext import make_deferred_yieldable from synapse.util.metrics import Measure # period to cache .well-known results for by default diff --git a/synapse/http/federation/srv_resolver.py b/synapse/http/federation/srv_resolver.py index 1f22f78a755f..ecc88f9b9667 100644 --- a/synapse/http/federation/srv_resolver.py +++ b/synapse/http/federation/srv_resolver.py @@ -25,7 +25,7 @@ from twisted.names import client, dns from twisted.names.error import DNSNameError, DomainError -from synapse.util.logcontext import make_deferred_yieldable +from synapse.logging.context import make_deferred_yieldable logger = logging.getLogger(__name__) diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 5ef8bb60a3ab..dee3710f682a 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -48,8 +48,8 @@ from synapse.http import QuieterFileBodyProducer from synapse.http.client import BlacklistingAgentWrapper, IPBlacklistingResolver from synapse.http.federation.matrix_federation_agent import MatrixFederationAgent +from synapse.logging.context import make_deferred_yieldable from synapse.util.async_helpers import timeout_deferred -from synapse.util.logcontext import make_deferred_yieldable from synapse.util.metrics import Measure logger = logging.getLogger(__name__) diff --git a/synapse/http/request_metrics.py b/synapse/http/request_metrics.py index 62045a918bf5..46af27c8f651 100644 --- a/synapse/http/request_metrics.py +++ b/synapse/http/request_metrics.py @@ -19,8 +19,8 @@ from prometheus_client.core import Counter, Histogram +from synapse.logging.context import LoggingContext from synapse.metrics import LaterGauge -from synapse.util.logcontext import LoggingContext logger = logging.getLogger(__name__) diff --git a/synapse/http/server.py b/synapse/http/server.py index d993161a3e08..72a3d67eb69d 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -39,8 +39,8 @@ SynapseError, UnrecognizedRequestError, ) +from synapse.logging.context import preserve_fn from synapse.util.caches import intern_dict -from synapse.util.logcontext import preserve_fn logger = logging.getLogger(__name__) diff --git a/synapse/http/site.py b/synapse/http/site.py index 93f679ea4845..df5274c17766 100644 --- a/synapse/http/site.py +++ b/synapse/http/site.py @@ -19,7 +19,7 @@ from synapse.http import redact_uri from synapse.http.request_metrics import RequestMetrics, requests_counter -from synapse.util.logcontext import LoggingContext, PreserveLoggingContext +from synapse.logging.context import LoggingContext, PreserveLoggingContext logger = logging.getLogger(__name__) diff --git a/synapse/logging/__init__.py b/synapse/logging/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/synapse/util/logcontext.py b/synapse/logging/context.py similarity index 100% rename from synapse/util/logcontext.py rename to synapse/logging/context.py diff --git a/synapse/util/logformatter.py b/synapse/logging/formatter.py similarity index 100% rename from synapse/util/logformatter.py rename to synapse/logging/formatter.py diff --git a/synapse/util/logutils.py b/synapse/logging/utils.py similarity index 100% rename from synapse/util/logutils.py rename to synapse/logging/utils.py diff --git a/synapse/metrics/background_process_metrics.py b/synapse/metrics/background_process_metrics.py index 167e2c068a45..edd6b42db3ef 100644 --- a/synapse/metrics/background_process_metrics.py +++ b/synapse/metrics/background_process_metrics.py @@ -22,7 +22,7 @@ from twisted.internet import defer -from synapse.util.logcontext import LoggingContext, PreserveLoggingContext +from synapse.logging.context import LoggingContext, PreserveLoggingContext logger = logging.getLogger(__name__) diff --git a/synapse/notifier.py b/synapse/notifier.py index d398078eedbb..918ef64897be 100644 --- a/synapse/notifier.py +++ b/synapse/notifier.py @@ -23,12 +23,12 @@ from synapse.api.constants import EventTypes, Membership from synapse.api.errors import AuthError from synapse.handlers.presence import format_user_presence_state +from synapse.logging.context import PreserveLoggingContext +from synapse.logging.utils import log_function from synapse.metrics import LaterGauge from synapse.metrics.background_process_metrics import run_as_background_process from synapse.types import StreamToken from synapse.util.async_helpers import ObservableDeferred, timeout_deferred -from synapse.util.logcontext import PreserveLoggingContext -from synapse.util.logutils import log_function from synapse.util.metrics import Measure from synapse.visibility import filter_events_for_client diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py index 809199fe8856..521c6e2cd7f8 100644 --- a/synapse/push/mailer.py +++ b/synapse/push/mailer.py @@ -29,6 +29,7 @@ from synapse.api.constants import EventTypes from synapse.api.errors import StoreError +from synapse.logging.context import make_deferred_yieldable from synapse.push.presentable_names import ( calculate_room_name, descriptor_from_member_events, @@ -36,7 +37,6 @@ ) from synapse.types import UserID from synapse.util.async_helpers import concurrently_execute -from synapse.util.logcontext import make_deferred_yieldable from synapse.visibility import filter_events_for_client logger = logging.getLogger(__name__) diff --git a/synapse/replication/tcp/protocol.py b/synapse/replication/tcp/protocol.py index 97efb835ad7b..5ffdf2675df4 100644 --- a/synapse/replication/tcp/protocol.py +++ b/synapse/replication/tcp/protocol.py @@ -62,9 +62,9 @@ from twisted.protocols.basic import LineOnlyReceiver from twisted.python.failure import Failure +from synapse.logging.context import make_deferred_yieldable, run_in_background from synapse.metrics import LaterGauge from synapse.metrics.background_process_metrics import run_as_background_process -from synapse.util.logcontext import make_deferred_yieldable, run_in_background from synapse.util.stringutils import random_string from .commands import ( diff --git a/synapse/rest/client/transactions.py b/synapse/rest/client/transactions.py index 36404b797dfe..6da71dc46fff 100644 --- a/synapse/rest/client/transactions.py +++ b/synapse/rest/client/transactions.py @@ -17,8 +17,8 @@ to ensure idempotency when performing PUTs using the REST API.""" import logging +from synapse.logging.context import make_deferred_yieldable, run_in_background from synapse.util.async_helpers import ObservableDeferred -from synapse.util.logcontext import make_deferred_yieldable, run_in_background logger = logging.getLogger(__name__) diff --git a/synapse/rest/media/v1/_base.py b/synapse/rest/media/v1/_base.py index 3318638d3e69..5fefee4dde38 100644 --- a/synapse/rest/media/v1/_base.py +++ b/synapse/rest/media/v1/_base.py @@ -25,7 +25,7 @@ from synapse.api.errors import Codes, SynapseError, cs_error from synapse.http.server import finish_request, respond_with_json -from synapse.util import logcontext +from synapse.logging.context import make_deferred_yieldable from synapse.util.stringutils import is_ascii logger = logging.getLogger(__name__) @@ -75,9 +75,7 @@ def respond_with_file(request, media_type, file_path, file_size=None, upload_nam add_file_headers(request, media_type, file_size, upload_name) with open(file_path, "rb") as f: - yield logcontext.make_deferred_yieldable( - FileSender().beginFileTransfer(f, request) - ) + yield make_deferred_yieldable(FileSender().beginFileTransfer(f, request)) finish_request(request) else: diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index df3d985a3855..65afffbb4230 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -33,8 +33,8 @@ RequestSendFailed, SynapseError, ) +from synapse.logging.context import defer_to_thread from synapse.metrics.background_process_metrics import run_as_background_process -from synapse.util import logcontext from synapse.util.async_helpers import Linearizer from synapse.util.retryutils import NotRetryingDestination from synapse.util.stringutils import random_string @@ -463,7 +463,7 @@ def generate_local_exact_thumbnail( ) thumbnailer = Thumbnailer(input_path) - t_byte_source = yield logcontext.defer_to_thread( + t_byte_source = yield defer_to_thread( self.hs.get_reactor(), self._generate_thumbnail, thumbnailer, @@ -511,7 +511,7 @@ def generate_remote_exact_thumbnail( ) thumbnailer = Thumbnailer(input_path) - t_byte_source = yield logcontext.defer_to_thread( + t_byte_source = yield defer_to_thread( self.hs.get_reactor(), self._generate_thumbnail, thumbnailer, @@ -596,7 +596,7 @@ def _generate_thumbnails( return if thumbnailer.transpose_method is not None: - m_width, m_height = yield logcontext.defer_to_thread( + m_width, m_height = yield defer_to_thread( self.hs.get_reactor(), thumbnailer.transpose ) @@ -616,11 +616,11 @@ def _generate_thumbnails( for (t_width, t_height, t_type), t_method in iteritems(thumbnails): # Generate the thumbnail if t_method == "crop": - t_byte_source = yield logcontext.defer_to_thread( + t_byte_source = yield defer_to_thread( self.hs.get_reactor(), thumbnailer.crop, t_width, t_height, t_type ) elif t_method == "scale": - t_byte_source = yield logcontext.defer_to_thread( + t_byte_source = yield defer_to_thread( self.hs.get_reactor(), thumbnailer.scale, t_width, t_height, t_type ) else: diff --git a/synapse/rest/media/v1/media_storage.py b/synapse/rest/media/v1/media_storage.py index eff86836fb8a..25e5ac284818 100644 --- a/synapse/rest/media/v1/media_storage.py +++ b/synapse/rest/media/v1/media_storage.py @@ -24,9 +24,8 @@ from twisted.internet import defer from twisted.protocols.basic import FileSender -from synapse.util import logcontext +from synapse.logging.context import defer_to_thread, make_deferred_yieldable from synapse.util.file_consumer import BackgroundFileConsumer -from synapse.util.logcontext import make_deferred_yieldable from ._base import Responder @@ -65,7 +64,7 @@ def store_file(self, source, file_info): with self.store_into_file(file_info) as (f, fname, finish_cb): # Write to the main repository - yield logcontext.defer_to_thread( + yield defer_to_thread( self.hs.get_reactor(), _write_file_synchronously, source, f ) yield finish_cb() diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py index 053346fb86d2..5871737bfdfb 100644 --- a/synapse/rest/media/v1/preview_url_resource.py +++ b/synapse/rest/media/v1/preview_url_resource.py @@ -42,11 +42,11 @@ wrap_json_request_handler, ) from synapse.http.servlet import parse_integer, parse_string +from synapse.logging.context import make_deferred_yieldable, run_in_background from synapse.metrics.background_process_metrics import run_as_background_process from synapse.rest.media.v1._base import get_filename_from_headers from synapse.util.async_helpers import ObservableDeferred from synapse.util.caches.expiringcache import ExpiringCache -from synapse.util.logcontext import make_deferred_yieldable, run_in_background from synapse.util.stringutils import random_string from ._base import FileInfo diff --git a/synapse/rest/media/v1/storage_provider.py b/synapse/rest/media/v1/storage_provider.py index 359b45ebfc4f..e8f559acc194 100644 --- a/synapse/rest/media/v1/storage_provider.py +++ b/synapse/rest/media/v1/storage_provider.py @@ -20,8 +20,7 @@ from twisted.internet import defer from synapse.config._base import Config -from synapse.util import logcontext -from synapse.util.logcontext import run_in_background +from synapse.logging.context import defer_to_thread, run_in_background from .media_storage import FileResponder @@ -125,7 +124,7 @@ def store_file(self, path, file_info): if not os.path.exists(dirname): os.makedirs(dirname) - return logcontext.defer_to_thread( + return defer_to_thread( self.hs.get_reactor(), shutil.copyfile, primary_fname, backup_fname ) diff --git a/synapse/state/__init__.py b/synapse/state/__init__.py index 1b454a56a196..9f708fa20546 100644 --- a/synapse/state/__init__.py +++ b/synapse/state/__init__.py @@ -28,11 +28,11 @@ from synapse.api.constants import EventTypes from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, StateResolutionVersions from synapse.events.snapshot import EventContext +from synapse.logging.utils import log_function from synapse.state import v1, v2 from synapse.util.async_helpers import Linearizer from synapse.util.caches import get_cache_factor_for from synapse.util.caches.expiringcache import ExpiringCache -from synapse.util.logutils import log_function from synapse.util.metrics import Measure logger = logging.getLogger(__name__) diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 29589853c62f..2f940dbae6a2 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -30,12 +30,12 @@ from twisted.internet import defer from synapse.api.errors import StoreError +from synapse.logging.context import LoggingContext, PreserveLoggingContext from synapse.metrics.background_process_metrics import run_as_background_process from synapse.storage.engines import PostgresEngine, Sqlite3Engine from synapse.types import get_domain_from_id from synapse.util import batch_iter from synapse.util.caches.descriptors import Cache -from synapse.util.logcontext import LoggingContext, PreserveLoggingContext from synapse.util.stringutils import exception_to_unicode # import a function which will return a monotonic time, in seconds diff --git a/synapse/storage/events.py b/synapse/storage/events.py index 86f8485704c3..b486ca50eb33 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -33,6 +33,8 @@ from synapse.api.errors import SynapseError from synapse.events import EventBase # noqa: F401 from synapse.events.snapshot import EventContext # noqa: F401 +from synapse.logging.context import PreserveLoggingContext, make_deferred_yieldable +from synapse.logging.utils import log_function from synapse.metrics import BucketCollector from synapse.metrics.background_process_metrics import run_as_background_process from synapse.state import StateResolutionStore @@ -45,8 +47,6 @@ from synapse.util.async_helpers import ObservableDeferred from synapse.util.caches.descriptors import cached, cachedInlineCallbacks from synapse.util.frozenutils import frozendict_json_encoder -from synapse.util.logcontext import PreserveLoggingContext, make_deferred_yieldable -from synapse.util.logutils import log_function from synapse.util.metrics import Measure logger = logging.getLogger(__name__) diff --git a/synapse/storage/events_worker.py b/synapse/storage/events_worker.py index 6d680d405ac1..09db8725112f 100644 --- a/synapse/storage/events_worker.py +++ b/synapse/storage/events_worker.py @@ -29,14 +29,14 @@ from synapse.events import FrozenEvent, event_type_from_format_version # noqa: F401 from synapse.events.snapshot import EventContext # noqa: F401 from synapse.events.utils import prune_event -from synapse.metrics.background_process_metrics import run_as_background_process -from synapse.types import get_domain_from_id -from synapse.util.logcontext import ( +from synapse.logging.context import ( LoggingContext, PreserveLoggingContext, make_deferred_yieldable, run_in_background, ) +from synapse.metrics.background_process_metrics import run_as_background_process +from synapse.types import get_domain_from_id from synapse.util.metrics import Measure from ._base import SQLBaseStore diff --git a/synapse/storage/stream.py b/synapse/storage/stream.py index d9482a384339..386a9dbe14d6 100644 --- a/synapse/storage/stream.py +++ b/synapse/storage/stream.py @@ -41,12 +41,12 @@ from twisted.internet import defer +from synapse.logging.context import make_deferred_yieldable, run_in_background from synapse.storage._base import SQLBaseStore from synapse.storage.engines import PostgresEngine from synapse.storage.events_worker import EventsWorkerStore from synapse.types import RoomStreamToken from synapse.util.caches.stream_change_cache import StreamChangeCache -from synapse.util.logcontext import make_deferred_yieldable, run_in_background logger = logging.getLogger(__name__) diff --git a/synapse/util/__init__.py b/synapse/util/__init__.py index 954e32fb2aaa..c6d2ce440409 100644 --- a/synapse/util/__init__.py +++ b/synapse/util/__init__.py @@ -21,10 +21,14 @@ from twisted.internet import defer, task -from synapse.util.logcontext import PreserveLoggingContext +from synapse.logging import context, formatter logger = logging.getLogger(__name__) +# Compatibility alias, for existing logconfigs. +logcontext = context +logformatter = formatter + def unwrapFirstError(failure): # defer.gatherResults and DeferredLists wrap failures. @@ -46,7 +50,7 @@ class Clock(object): @defer.inlineCallbacks def sleep(self, seconds): d = defer.Deferred() - with PreserveLoggingContext(): + with context.PreserveLoggingContext(): self._reactor.callLater(seconds, d.callback, seconds) res = yield d defer.returnValue(res) @@ -91,10 +95,10 @@ def call_later(self, delay, callback, *args, **kwargs): """ def wrapped_callback(*args, **kwargs): - with PreserveLoggingContext(): + with context.PreserveLoggingContext(): callback(*args, **kwargs) - with PreserveLoggingContext(): + with context.PreserveLoggingContext(): return self._reactor.callLater(delay, wrapped_callback, *args, **kwargs) def cancel_call_later(self, timer, ignore_errs=False): diff --git a/synapse/util/async_helpers.py b/synapse/util/async_helpers.py index 7757b8708ac1..58a6b8764f10 100644 --- a/synapse/util/async_helpers.py +++ b/synapse/util/async_helpers.py @@ -23,13 +23,12 @@ from twisted.internet.defer import CancelledError from twisted.python import failure -from synapse.util import Clock, logcontext, unwrapFirstError - -from .logcontext import ( +from synapse.logging.context import ( PreserveLoggingContext, make_deferred_yieldable, run_in_background, ) +from synapse.util import Clock, unwrapFirstError logger = logging.getLogger(__name__) @@ -153,7 +152,7 @@ def _concurrently_execute_inner(): except StopIteration: pass - return logcontext.make_deferred_yieldable( + return make_deferred_yieldable( defer.gatherResults( [run_in_background(_concurrently_execute_inner) for _ in range(limit)], consumeErrors=True, @@ -174,7 +173,7 @@ def yieldable_gather_results(func, iter, *args, **kwargs): Deferred[list]: Resolved when all functions have been invoked, or errors if one of the function calls fails. """ - return logcontext.make_deferred_yieldable( + return make_deferred_yieldable( defer.gatherResults( [run_in_background(func, item, *args, **kwargs) for item in iter], consumeErrors=True, diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py index d2f25063aaa8..675db2f4483c 100644 --- a/synapse/util/caches/descriptors.py +++ b/synapse/util/caches/descriptors.py @@ -24,7 +24,8 @@ from twisted.internet import defer -from synapse.util import logcontext, unwrapFirstError +from synapse.logging.context import make_deferred_yieldable, preserve_fn +from synapse.util import unwrapFirstError from synapse.util.async_helpers import ObservableDeferred from synapse.util.caches import get_cache_factor_for from synapse.util.caches.lrucache import LruCache @@ -388,7 +389,7 @@ def wrapped(*args, **kwargs): except KeyError: ret = defer.maybeDeferred( - logcontext.preserve_fn(self.function_to_call), obj, *args, **kwargs + preserve_fn(self.function_to_call), obj, *args, **kwargs ) def onErr(f): @@ -408,7 +409,7 @@ def onErr(f): observer = result_d.observe() if isinstance(observer, defer.Deferred): - return logcontext.make_deferred_yieldable(observer) + return make_deferred_yieldable(observer) else: return observer @@ -563,7 +564,7 @@ def errback(f): cached_defers.append( defer.maybeDeferred( - logcontext.preserve_fn(self.function_to_call), **args_to_call + preserve_fn(self.function_to_call), **args_to_call ).addCallbacks(complete_all, errback) ) @@ -571,7 +572,7 @@ def errback(f): d = defer.gatherResults(cached_defers, consumeErrors=True).addCallbacks( lambda _: results, unwrapFirstError ) - return logcontext.make_deferred_yieldable(d) + return make_deferred_yieldable(d) else: return results diff --git a/synapse/util/caches/response_cache.py b/synapse/util/caches/response_cache.py index cbe54d45dda3..d6908e169d45 100644 --- a/synapse/util/caches/response_cache.py +++ b/synapse/util/caches/response_cache.py @@ -16,9 +16,9 @@ from twisted.internet import defer +from synapse.logging.context import make_deferred_yieldable, run_in_background from synapse.util.async_helpers import ObservableDeferred from synapse.util.caches import register_cache -from synapse.util.logcontext import make_deferred_yieldable, run_in_background logger = logging.getLogger(__name__) @@ -78,7 +78,7 @@ def set(self, key, deferred): *deferred* should run its callbacks in the sentinel logcontext (ie, you should wrap normal synapse deferreds with - logcontext.run_in_background). + synapse.logging.context.run_in_background). Can return either a new Deferred (which also doesn't follow the synapse logcontext rules), or, if *deferred* was already complete, the actual diff --git a/synapse/util/distributor.py b/synapse/util/distributor.py index 5a79db821c98..45af8d3eeb03 100644 --- a/synapse/util/distributor.py +++ b/synapse/util/distributor.py @@ -17,8 +17,8 @@ from twisted.internet import defer +from synapse.logging.context import make_deferred_yieldable, run_in_background from synapse.metrics.background_process_metrics import run_as_background_process -from synapse.util.logcontext import make_deferred_yieldable, run_in_background logger = logging.getLogger(__name__) diff --git a/synapse/util/file_consumer.py b/synapse/util/file_consumer.py index 629ed44149bd..8b17d1c8b891 100644 --- a/synapse/util/file_consumer.py +++ b/synapse/util/file_consumer.py @@ -17,7 +17,7 @@ from twisted.internet import threads -from synapse.util.logcontext import make_deferred_yieldable, run_in_background +from synapse.logging.context import make_deferred_yieldable, run_in_background class BackgroundFileConsumer(object): diff --git a/synapse/util/metrics.py b/synapse/util/metrics.py index 01284d3cf812..c30b6de19c98 100644 --- a/synapse/util/metrics.py +++ b/synapse/util/metrics.py @@ -20,8 +20,8 @@ from twisted.internet import defer +from synapse.logging.context import LoggingContext from synapse.metrics import InFlightGauge -from synapse.util.logcontext import LoggingContext logger = logging.getLogger(__name__) diff --git a/synapse/util/ratelimitutils.py b/synapse/util/ratelimitutils.py index 06defa81992e..27bceac00ef6 100644 --- a/synapse/util/ratelimitutils.py +++ b/synapse/util/ratelimitutils.py @@ -20,7 +20,7 @@ from twisted.internet import defer from synapse.api.errors import LimitExceededError -from synapse.util.logcontext import ( +from synapse.logging.context import ( PreserveLoggingContext, make_deferred_yieldable, run_in_background, diff --git a/synapse/util/retryutils.py b/synapse/util/retryutils.py index 1a77456498ac..d8d0ceae5173 100644 --- a/synapse/util/retryutils.py +++ b/synapse/util/retryutils.py @@ -17,7 +17,7 @@ from twisted.internet import defer -import synapse.util.logcontext +import synapse.logging.context from synapse.api.errors import CodeMessageException logger = logging.getLogger(__name__) @@ -225,4 +225,4 @@ def store_retry_timings(): logger.exception("Failed to store destination_retry_timings") # we deliberately do this in the background. - synapse.util.logcontext.run_in_background(store_retry_timings) + synapse.logging.context.run_in_background(store_retry_timings) diff --git a/tests/appservice/test_scheduler.py b/tests/appservice/test_scheduler.py index db9f86bdacf0..04b8c2c07c62 100644 --- a/tests/appservice/test_scheduler.py +++ b/tests/appservice/test_scheduler.py @@ -22,7 +22,7 @@ _ServiceQueuer, _TransactionController, ) -from synapse.util.logcontext import make_deferred_yieldable +from synapse.logging.context import make_deferred_yieldable from tests import unittest diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py index 5a355f00cc2f..795703967d78 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py @@ -30,9 +30,12 @@ ServerKeyFetcher, StoreKeyFetcher, ) +from synapse.logging.context import ( + LoggingContext, + PreserveLoggingContext, + make_deferred_yieldable, +) from synapse.storage.keys import FetchKeyResult -from synapse.util import logcontext -from synapse.util.logcontext import LoggingContext from tests import unittest @@ -131,7 +134,7 @@ def test_verify_json_objects_for_server_awaits_previous_requests(self): @defer.inlineCallbacks def get_perspectives(**kwargs): self.assertEquals(LoggingContext.current_context().request, "11") - with logcontext.PreserveLoggingContext(): + with PreserveLoggingContext(): yield persp_deferred defer.returnValue(persp_resp) @@ -158,7 +161,7 @@ def first_lookup(): self.assertFalse(res_deferreds[0].called) res_deferreds[0].addBoth(self.check_context, None) - yield logcontext.make_deferred_yieldable(res_deferreds[0]) + yield make_deferred_yieldable(res_deferreds[0]) # let verify_json_objects_for_server finish its work before we kill the # logcontext @@ -184,7 +187,7 @@ def second_lookup(): [("server10", json1, 0, "test")] ) res_deferreds_2[0].addBoth(self.check_context, None) - yield logcontext.make_deferred_yieldable(res_deferreds_2[0]) + yield make_deferred_yieldable(res_deferreds_2[0]) # let verify_json_objects_for_server finish its work before we kill the # logcontext diff --git a/tests/http/federation/test_matrix_federation_agent.py b/tests/http/federation/test_matrix_federation_agent.py index 417fda3ab205..a49f9b322479 100644 --- a/tests/http/federation/test_matrix_federation_agent.py +++ b/tests/http/federation/test_matrix_federation_agent.py @@ -36,8 +36,8 @@ _cache_period_from_headers, ) from synapse.http.federation.srv_resolver import Server +from synapse.logging.context import LoggingContext from synapse.util.caches.ttlcache import TTLCache -from synapse.util.logcontext import LoggingContext from tests.http import TestServerTLSConnectionFactory, get_test_ca_cert_file from tests.server import FakeTransport, ThreadedMemoryReactorClock diff --git a/tests/http/federation/test_srv_resolver.py b/tests/http/federation/test_srv_resolver.py index cf6c6e95b520..65b51dc981b5 100644 --- a/tests/http/federation/test_srv_resolver.py +++ b/tests/http/federation/test_srv_resolver.py @@ -22,7 +22,7 @@ from twisted.names import dns, error from synapse.http.federation.srv_resolver import SrvResolver -from synapse.util.logcontext import LoggingContext +from synapse.logging.context import LoggingContext from tests import unittest from tests.utils import MockClock diff --git a/tests/http/test_fedclient.py b/tests/http/test_fedclient.py index c4c0d9b96817..b9d6d7ad1c1f 100644 --- a/tests/http/test_fedclient.py +++ b/tests/http/test_fedclient.py @@ -29,7 +29,7 @@ MatrixFederationHttpClient, MatrixFederationRequest, ) -from synapse.util.logcontext import LoggingContext +from synapse.logging.context import LoggingContext from tests.server import FakeTransport from tests.unittest import HomeserverTestCase diff --git a/tests/patch_inline_callbacks.py b/tests/patch_inline_callbacks.py index ee0add3455e7..220884311cae 100644 --- a/tests/patch_inline_callbacks.py +++ b/tests/patch_inline_callbacks.py @@ -28,7 +28,7 @@ def do_patch(): Patch defer.inlineCallbacks so that it checks the state of the logcontext on exit """ - from synapse.util.logcontext import LoggingContext + from synapse.logging.context import LoggingContext orig_inline_callbacks = defer.inlineCallbacks diff --git a/tests/push/test_http.py b/tests/push/test_http.py index 22c3f73ef31e..8ce6bb62da4b 100644 --- a/tests/push/test_http.py +++ b/tests/push/test_http.py @@ -18,8 +18,8 @@ from twisted.internet.defer import Deferred import synapse.rest.admin +from synapse.logging.context import make_deferred_yieldable from synapse.rest.client.v1 import login, room -from synapse.util.logcontext import make_deferred_yieldable from tests.unittest import HomeserverTestCase diff --git a/tests/rest/client/test_transactions.py b/tests/rest/client/test_transactions.py index 708dc26e6147..a8adc9a61d1b 100644 --- a/tests/rest/client/test_transactions.py +++ b/tests/rest/client/test_transactions.py @@ -2,9 +2,9 @@ from twisted.internet import defer, reactor +from synapse.logging.context import LoggingContext from synapse.rest.client.transactions import CLEANUP_PERIOD_MS, HttpTransactionCache from synapse.util import Clock -from synapse.util.logcontext import LoggingContext from tests import unittest from tests.utils import MockClock diff --git a/tests/rest/media/v1/test_media_storage.py b/tests/rest/media/v1/test_media_storage.py index 39c9342423af..bc662b61dbef 100644 --- a/tests/rest/media/v1/test_media_storage.py +++ b/tests/rest/media/v1/test_media_storage.py @@ -24,11 +24,11 @@ from twisted.internet.defer import Deferred +from synapse.logging.context import make_deferred_yieldable from synapse.rest.media.v1._base import FileInfo from synapse.rest.media.v1.filepath import MediaFilePaths from synapse.rest.media.v1.media_storage import MediaStorage from synapse.rest.media.v1.storage_provider import FileStorageProviderBackend -from synapse.util.logcontext import make_deferred_yieldable from tests import unittest diff --git a/tests/test_federation.py b/tests/test_federation.py index 6a8339b5614f..a73f18f88e22 100644 --- a/tests/test_federation.py +++ b/tests/test_federation.py @@ -3,9 +3,9 @@ from twisted.internet.defer import maybeDeferred, succeed from synapse.events import FrozenEvent +from synapse.logging.context import LoggingContext from synapse.types import Requester, UserID from synapse.util import Clock -from synapse.util.logcontext import LoggingContext from tests import unittest from tests.server import ThreadedMemoryReactorClock, setup_test_homeserver diff --git a/tests/test_server.py b/tests/test_server.py index da29ae92ced7..ba08483a4b69 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -26,8 +26,8 @@ from synapse.api.errors import Codes, SynapseError from synapse.http.server import JsonResource from synapse.http.site import SynapseSite, logger +from synapse.logging.context import make_deferred_yieldable from synapse.util import Clock -from synapse.util.logcontext import make_deferred_yieldable from tests import unittest from tests.server import ( diff --git a/tests/test_utils/logging_setup.py b/tests/test_utils/logging_setup.py index 813f984199be..2d96b0fa8da4 100644 --- a/tests/test_utils/logging_setup.py +++ b/tests/test_utils/logging_setup.py @@ -17,7 +17,7 @@ import twisted.logger -from synapse.util.logcontext import LoggingContextFilter +from synapse.logging.context import LoggingContextFilter class ToTwistedHandler(logging.Handler): diff --git a/tests/unittest.py b/tests/unittest.py index d26804b5b507..a09e76c7c21d 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -33,9 +33,9 @@ from synapse.config.homeserver import HomeServerConfig from synapse.http.server import JsonResource from synapse.http.site import SynapseRequest +from synapse.logging.context import LoggingContext from synapse.server import HomeServer from synapse.types import Requester, UserID, create_requester -from synapse.util.logcontext import LoggingContext from tests.server import get_clock, make_request, render, setup_test_homeserver from tests.test_utils.logging_setup import setup_logging diff --git a/tests/util/caches/test_descriptors.py b/tests/util/caches/test_descriptors.py index 6f8f52537cd3..7807328e2fb1 100644 --- a/tests/util/caches/test_descriptors.py +++ b/tests/util/caches/test_descriptors.py @@ -21,7 +21,11 @@ from twisted.internet import defer, reactor from synapse.api.errors import SynapseError -from synapse.util import logcontext +from synapse.logging.context import ( + LoggingContext, + PreserveLoggingContext, + make_deferred_yieldable, +) from synapse.util.caches import descriptors from tests import unittest @@ -32,7 +36,7 @@ def run_on_reactor(): d = defer.Deferred() reactor.callLater(0, d.callback, 0) - return logcontext.make_deferred_yieldable(d) + return make_deferred_yieldable(d) class CacheTestCase(unittest.TestCase): @@ -153,7 +157,7 @@ class Cls(object): def fn(self, arg1): @defer.inlineCallbacks def inner_fn(): - with logcontext.PreserveLoggingContext(): + with PreserveLoggingContext(): yield complete_lookup defer.returnValue(1) @@ -161,10 +165,10 @@ def inner_fn(): @defer.inlineCallbacks def do_lookup(): - with logcontext.LoggingContext() as c1: + with LoggingContext() as c1: c1.name = "c1" r = yield obj.fn(1) - self.assertEqual(logcontext.LoggingContext.current_context(), c1) + self.assertEqual(LoggingContext.current_context(), c1) defer.returnValue(r) def check_result(r): @@ -174,18 +178,12 @@ def check_result(r): # set off a deferred which will do a cache lookup d1 = do_lookup() - self.assertEqual( - logcontext.LoggingContext.current_context(), - logcontext.LoggingContext.sentinel, - ) + self.assertEqual(LoggingContext.current_context(), LoggingContext.sentinel) d1.addCallback(check_result) # and another d2 = do_lookup() - self.assertEqual( - logcontext.LoggingContext.current_context(), - logcontext.LoggingContext.sentinel, - ) + self.assertEqual(LoggingContext.current_context(), LoggingContext.sentinel) d2.addCallback(check_result) # let the lookup complete @@ -210,29 +208,25 @@ def inner_fn(): @defer.inlineCallbacks def do_lookup(): - with logcontext.LoggingContext() as c1: + with LoggingContext() as c1: c1.name = "c1" try: d = obj.fn(1) self.assertEqual( - logcontext.LoggingContext.current_context(), - logcontext.LoggingContext.sentinel, + LoggingContext.current_context(), LoggingContext.sentinel ) yield d self.fail("No exception thrown") except SynapseError: pass - self.assertEqual(logcontext.LoggingContext.current_context(), c1) + self.assertEqual(LoggingContext.current_context(), c1) obj = Cls() # set off a deferred which will do a cache lookup d1 = do_lookup() - self.assertEqual( - logcontext.LoggingContext.current_context(), - logcontext.LoggingContext.sentinel, - ) + self.assertEqual(LoggingContext.current_context(), LoggingContext.sentinel) return d1 @@ -288,23 +282,20 @@ def fn(self, arg1, arg2): @descriptors.cachedList("fn", "args1", inlineCallbacks=True) def list_fn(self, args1, arg2): - assert logcontext.LoggingContext.current_context().request == "c1" + assert LoggingContext.current_context().request == "c1" # we want this to behave like an asynchronous function yield run_on_reactor() - assert logcontext.LoggingContext.current_context().request == "c1" + assert LoggingContext.current_context().request == "c1" defer.returnValue(self.mock(args1, arg2)) - with logcontext.LoggingContext() as c1: + with LoggingContext() as c1: c1.request = "c1" obj = Cls() obj.mock.return_value = {10: "fish", 20: "chips"} d1 = obj.list_fn([10, 20], 2) - self.assertEqual( - logcontext.LoggingContext.current_context(), - logcontext.LoggingContext.sentinel, - ) + self.assertEqual(LoggingContext.current_context(), LoggingContext.sentinel) r = yield d1 - self.assertEqual(logcontext.LoggingContext.current_context(), c1) + self.assertEqual(LoggingContext.current_context(), c1) obj.mock.assert_called_once_with([10, 20], 2) self.assertEqual(r, {10: "fish", 20: "chips"}) obj.mock.reset_mock() diff --git a/tests/util/test_async_utils.py b/tests/util/test_async_utils.py index bf85d3b8ecff..f60918069ac6 100644 --- a/tests/util/test_async_utils.py +++ b/tests/util/test_async_utils.py @@ -16,9 +16,8 @@ from twisted.internet.defer import CancelledError, Deferred from twisted.internet.task import Clock -from synapse.util import logcontext +from synapse.logging.context import LoggingContext, PreserveLoggingContext from synapse.util.async_helpers import timeout_deferred -from synapse.util.logcontext import LoggingContext from tests.unittest import TestCase @@ -69,14 +68,14 @@ def test_logcontext_is_preserved_on_cancellation(self): @defer.inlineCallbacks def blocking(): non_completing_d = Deferred() - with logcontext.PreserveLoggingContext(): + with PreserveLoggingContext(): try: yield non_completing_d except CancelledError: blocking_was_cancelled[0] = True raise - with logcontext.LoggingContext("one") as context_one: + with LoggingContext("one") as context_one: # the errbacks should be run in the test logcontext def errback(res, deferred_name): self.assertIs( diff --git a/tests/util/test_linearizer.py b/tests/util/test_linearizer.py index ec7ba9719cf3..0ec8ef90ce8b 100644 --- a/tests/util/test_linearizer.py +++ b/tests/util/test_linearizer.py @@ -19,7 +19,8 @@ from twisted.internet import defer, reactor from twisted.internet.defer import CancelledError -from synapse.util import Clock, logcontext +from synapse.logging.context import LoggingContext +from synapse.util import Clock from synapse.util.async_helpers import Linearizer from tests import unittest @@ -51,13 +52,13 @@ def test_lots_of_queued_things(self): @defer.inlineCallbacks def func(i, sleep=False): - with logcontext.LoggingContext("func(%s)" % i) as lc: + with LoggingContext("func(%s)" % i) as lc: with (yield linearizer.queue("")): - self.assertEqual(logcontext.LoggingContext.current_context(), lc) + self.assertEqual(LoggingContext.current_context(), lc) if sleep: yield Clock(reactor).sleep(0) - self.assertEqual(logcontext.LoggingContext.current_context(), lc) + self.assertEqual(LoggingContext.current_context(), lc) func(0, sleep=True) for i in range(1, 100): diff --git a/tests/util/test_logcontext.py b/tests/util/test_logcontext.py index 8d69fbf11160..8b8455c8b7b0 100644 --- a/tests/util/test_logcontext.py +++ b/tests/util/test_logcontext.py @@ -1,8 +1,14 @@ import twisted.python.failure from twisted.internet import defer, reactor -from synapse.util import Clock, logcontext -from synapse.util.logcontext import LoggingContext +from synapse.logging.context import ( + LoggingContext, + PreserveLoggingContext, + make_deferred_yieldable, + nested_logging_context, + run_in_background, +) +from synapse.util import Clock from .. import unittest @@ -43,7 +49,7 @@ def _test_run_in_background(self, function): context_one.request = "one" # fire off function, but don't wait on it. - d2 = logcontext.run_in_background(function) + d2 = run_in_background(function) def cb(res): callback_completed[0] = True @@ -85,7 +91,7 @@ def blocking_function(): def test_run_in_background_with_non_blocking_fn(self): @defer.inlineCallbacks def nonblocking_function(): - with logcontext.PreserveLoggingContext(): + with PreserveLoggingContext(): yield defer.succeed(None) return self._test_run_in_background(nonblocking_function) @@ -94,7 +100,7 @@ def test_run_in_background_with_chained_deferred(self): # a function which returns a deferred which looks like it has been # called, but is actually paused def testfunc(): - return logcontext.make_deferred_yieldable(_chained_deferred_function()) + return make_deferred_yieldable(_chained_deferred_function()) return self._test_run_in_background(testfunc) @@ -128,7 +134,7 @@ def blocking_function(): with LoggingContext() as context_one: context_one.request = "one" - d1 = logcontext.make_deferred_yieldable(blocking_function()) + d1 = make_deferred_yieldable(blocking_function()) # make sure that the context was reset by make_deferred_yieldable self.assertIs(LoggingContext.current_context(), sentinel_context) @@ -144,7 +150,7 @@ def test_make_deferred_yieldable_with_chained_deferreds(self): with LoggingContext() as context_one: context_one.request = "one" - d1 = logcontext.make_deferred_yieldable(_chained_deferred_function()) + d1 = make_deferred_yieldable(_chained_deferred_function()) # make sure that the context was reset by make_deferred_yieldable self.assertIs(LoggingContext.current_context(), sentinel_context) @@ -161,7 +167,7 @@ def test_make_deferred_yieldable_on_non_deferred(self): with LoggingContext() as context_one: context_one.request = "one" - d1 = logcontext.make_deferred_yieldable("bum") + d1 = make_deferred_yieldable("bum") self._check_test_key("one") r = yield d1 @@ -170,7 +176,7 @@ def test_make_deferred_yieldable_on_non_deferred(self): def test_nested_logging_context(self): with LoggingContext(request="foo"): - nested_context = logcontext.nested_logging_context(suffix="bar") + nested_context = nested_logging_context(suffix="bar") self.assertEqual(nested_context.request, "foo-bar") diff --git a/tests/util/test_logformatter.py b/tests/util/test_logformatter.py index 297aebbfbeef..0fb60caacb1a 100644 --- a/tests/util/test_logformatter.py +++ b/tests/util/test_logformatter.py @@ -14,7 +14,7 @@ # limitations under the License. import sys -from synapse.util.logformatter import LogFormatter +from synapse.logging.formatter import LogFormatter from tests import unittest diff --git a/tests/utils.py b/tests/utils.py index da43166f3a0b..d8e55b0801de 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -34,6 +34,7 @@ from synapse.config.server import DEFAULT_ROOM_VERSION from synapse.federation.transport import server as federation_server from synapse.http.server import HttpServer +from synapse.logging.context import LoggingContext from synapse.server import HomeServer from synapse.storage import DataStore from synapse.storage.engines import PostgresEngine, create_engine @@ -42,7 +43,6 @@ _setup_new_database, prepare_database, ) -from synapse.util.logcontext import LoggingContext from synapse.util.ratelimitutils import FederationRateLimiter # set this to True to run the tests against postgres instead of sqlite. From ee91ac179c4e518fddbdb40a7b57b1f11fa71293 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Thu, 4 Jul 2019 16:24:13 +0100 Subject: [PATCH 3/5] Add a sytest blacklist file (#5611) * Add a sytest blacklist file * Add changelog * Add blacklist to manifest --- .buildkite/pipeline.yml | 9 ++- .buildkite/synapse_sytest.sh | 145 ----------------------------------- MANIFEST.in | 1 + changelog.d/5611.misc | 1 + sytest-blacklist | 35 +++++++++ 5 files changed, 43 insertions(+), 148 deletions(-) delete mode 100644 .buildkite/synapse_sytest.sh create mode 100644 changelog.d/5611.misc create mode 100644 sytest-blacklist diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index 513eb3bde9cc..dd0f98cba0ec 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -173,11 +173,12 @@ steps: queue: "medium" command: - "bash .buildkite/merge_base_branch.sh" - - "bash .buildkite/synapse_sytest.sh" + - "bash /synapse_sytest.sh" plugins: - docker#v3.0.1: image: "matrixdotorg/sytest-synapse:py35" propagate-environment: true + always-pull: true retry: automatic: - exit_status: -1 @@ -192,11 +193,12 @@ steps: POSTGRES: "1" command: - "bash .buildkite/merge_base_branch.sh" - - "bash .buildkite/synapse_sytest.sh" + - "bash /synapse_sytest.sh" plugins: - docker#v3.0.1: image: "matrixdotorg/sytest-synapse:py35" propagate-environment: true + always-pull: true retry: automatic: - exit_status: -1 @@ -212,11 +214,12 @@ steps: WORKERS: "1" command: - "bash .buildkite/merge_base_branch.sh" - - "bash .buildkite/synapse_sytest.sh" + - "bash /synapse_sytest.sh" plugins: - docker#v3.0.1: image: "matrixdotorg/sytest-synapse:py35" propagate-environment: true + always-pull: true soft_fail: true retry: automatic: diff --git a/.buildkite/synapse_sytest.sh b/.buildkite/synapse_sytest.sh deleted file mode 100644 index 3011b88bb799..000000000000 --- a/.buildkite/synapse_sytest.sh +++ /dev/null @@ -1,145 +0,0 @@ -#!/bin/bash -# -# Fetch sytest, and then run the tests for synapse. The entrypoint for the -# sytest-synapse docker images. - -set -ex - -if [ -n "$BUILDKITE" ] -then - SYNAPSE_DIR=`pwd` -else - SYNAPSE_DIR="/src" -fi - -# Attempt to find a sytest to use. -# If /sytest exists, it means that a SyTest checkout has been mounted into the Docker image. -if [ -d "/sytest" ]; then - # If the user has mounted in a SyTest checkout, use that. - echo "Using local sytests..." - - # create ourselves a working directory and dos2unix some scripts therein - mkdir -p /work/jenkins - for i in install-deps.pl run-tests.pl tap-to-junit-xml.pl jenkins/prep_sytest_for_postgres.sh; do - dos2unix -n "/sytest/$i" "/work/$i" - done - ln -sf /sytest/tests /work - ln -sf /sytest/keys /work - SYTEST_LIB="/sytest/lib" -else - if [ -n "BUILDKITE_BRANCH" ] - then - branch_name=$BUILDKITE_BRANCH - else - # Otherwise, try and find out what the branch that the Synapse checkout is using. Fall back to develop if it's not a branch. - branch_name="$(git --git-dir=/src/.git symbolic-ref HEAD 2>/dev/null)" || branch_name="develop" - fi - - # Try and fetch the branch - echo "Trying to get same-named sytest branch..." - wget -q https://github.com/matrix-org/sytest/archive/$branch_name.tar.gz -O sytest.tar.gz || { - # Probably a 404, fall back to develop - echo "Using develop instead..." - wget -q https://github.com/matrix-org/sytest/archive/develop.tar.gz -O sytest.tar.gz - } - - mkdir -p /work - tar -C /work --strip-components=1 -xf sytest.tar.gz - SYTEST_LIB="/work/lib" -fi - -cd /work - -# PostgreSQL setup -if [ -n "$POSTGRES" ] -then - export PGUSER=postgres - export POSTGRES_DB_1=pg1 - export POSTGRES_DB_2=pg2 - - # Start the database - su -c 'eatmydata /usr/lib/postgresql/9.6/bin/pg_ctl -w -D /var/lib/postgresql/data start' postgres - - # Use the Jenkins script to write out the configuration for a PostgreSQL using Synapse - jenkins/prep_sytest_for_postgres.sh - - # Make the test databases for the two Synapse servers that will be spun up - su -c 'psql -c "CREATE DATABASE pg1;"' postgres - su -c 'psql -c "CREATE DATABASE pg2;"' postgres - -fi - -if [ -n "$OFFLINE" ]; then - # if we're in offline mode, just put synapse into the virtualenv, and - # hope that the deps are up-to-date. - # - # (`pip install -e` likes to reinstall setuptools even if it's already installed, - # so we just run setup.py explicitly.) - # - (cd $SYNAPSE_DIR && /venv/bin/python setup.py -q develop) -else - # We've already created the virtualenv, but lets double check we have all - # deps. - /venv/bin/pip install -q --upgrade --no-cache-dir -e $SYNAPSE_DIR - /venv/bin/pip install -q --upgrade --no-cache-dir \ - lxml psycopg2 coverage codecov tap.py - - # Make sure all Perl deps are installed -- this is done in the docker build - # so will only install packages added since the last Docker build - ./install-deps.pl -fi - - -# Run the tests ->&2 echo "+++ Running tests" - -RUN_TESTS=( - perl -I "$SYTEST_LIB" ./run-tests.pl --python=/venv/bin/python --synapse-directory=$SYNAPSE_DIR --coverage -O tap --all -) - -TEST_STATUS=0 - -if [ -n "$WORKERS" ]; then - RUN_TESTS+=(-I Synapse::ViaHaproxy --dendron-binary=/pydron.py) -else - RUN_TESTS+=(-I Synapse) -fi - -"${RUN_TESTS[@]}" "$@" > results.tap || TEST_STATUS=$? - -if [ $TEST_STATUS -ne 0 ]; then - >&2 echo -e "run-tests \e[31mFAILED\e[0m: exit code $TEST_STATUS" -else - >&2 echo -e "run-tests \e[32mPASSED\e[0m" -fi - ->&2 echo "--- Copying assets" - -# Copy out the logs -mkdir -p /logs -cp results.tap /logs/results.tap -rsync --ignore-missing-args --min-size=1B -av server-0 server-1 /logs --include "*/" --include="*.log.*" --include="*.log" --exclude="*" - -# Upload coverage to codecov and upload files, if running on Buildkite -if [ -n "$BUILDKITE" ] -then - /venv/bin/coverage combine || true - /venv/bin/coverage xml || true - /venv/bin/codecov -X gcov -f coverage.xml - - wget -O buildkite.tar.gz https://github.com/buildkite/agent/releases/download/v3.13.0/buildkite-agent-linux-amd64-3.13.0.tar.gz - tar xvf buildkite.tar.gz - chmod +x ./buildkite-agent - - # Upload the files - ./buildkite-agent artifact upload "/logs/**/*.log*" - ./buildkite-agent artifact upload "/logs/results.tap" - - if [ $TEST_STATUS -ne 0 ]; then - # Annotate, if failure - /venv/bin/python $SYNAPSE_DIR/.buildkite/format_tap.py /logs/results.tap "$BUILDKITE_LABEL" | ./buildkite-agent annotate --style="error" --context="$BUILDKITE_LABEL" - fi -fi - - -exit $TEST_STATUS diff --git a/MANIFEST.in b/MANIFEST.in index 2c59c7bdc29a..834ddfad39c6 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -7,6 +7,7 @@ include demo/README include demo/demo.tls.dh include demo/*.py include demo/*.sh +include sytest-blacklist recursive-include synapse/storage/schema *.sql recursive-include synapse/storage/schema *.sql.postgres diff --git a/changelog.d/5611.misc b/changelog.d/5611.misc new file mode 100644 index 000000000000..a2d169513912 --- /dev/null +++ b/changelog.d/5611.misc @@ -0,0 +1 @@ +Add a blacklist file to the repo to blacklist certain sytests from failing CI. diff --git a/sytest-blacklist b/sytest-blacklist new file mode 100644 index 000000000000..b760a48c57ca --- /dev/null +++ b/sytest-blacklist @@ -0,0 +1,35 @@ +# This file serves as a blacklist for SyTest tests that we expect will fail in +# Synapse. +# +# Each line of this file is scanned by sytest during a run and if the line +# exactly matches the name of a test, it will be marked as "expected fail", +# meaning the test will still run, but failure will not mark the entire test +# suite as failing. +# +# Test names are encouraged to have a bug accompanied with them, serving as an +# explanation for why the test has been excluded. + +# Blacklisted due to https://github.com/matrix-org/synapse/issues/1679 +Remote room members also see posted message events + +# Blacklisted due to https://github.com/matrix-org/synapse/issues/2065 +Guest users can accept invites to private rooms over federation + +# Blacklisted due to https://github.com/vector-im/riot-web/issues/7211 +The only membership state included in a gapped incremental sync is for senders in the timeline + +# Blacklisted due to https://github.com/matrix-org/synapse/issues/1658 +Newly created users see their own presence in /initialSync (SYT-34) + +# Blacklisted due to https://github.com/matrix-org/synapse/issues/1396 +Should reject keys claiming to belong to a different user + +# Blacklisted due to https://github.com/matrix-org/synapse/issues/2306 +Users appear/disappear from directory when join_rules are changed +Users appear/disappear from directory when history_visibility are changed + +# Blacklisted due to https://github.com/matrix-org/synapse/issues/1531 +Enabling an unknown default rule fails with 404 + +# Blacklisted due to https://github.com/matrix-org/synapse/issues/1663 +New federated private chats get full presence information (SYN-115) From 1ee268d33d3e4eb40270eb169ca5266843481b41 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Fri, 5 Jul 2019 02:32:02 +1000 Subject: [PATCH 4/5] Improve the backwards compatibility re-exports of synapse.logging.context (#5617) * Improve the backwards compatibility re-exports of synapse.logging.context. * reexport logformatter too --- changelog.d/5617.misc | 1 + synapse/util/__init__.py | 6 +----- synapse/util/logcontext.py | 39 ++++++++++++++++++++++++++++++++++++ synapse/util/logformatter.py | 21 +++++++++++++++++++ 4 files changed, 62 insertions(+), 5 deletions(-) create mode 100644 changelog.d/5617.misc create mode 100644 synapse/util/logcontext.py create mode 100644 synapse/util/logformatter.py diff --git a/changelog.d/5617.misc b/changelog.d/5617.misc new file mode 100644 index 000000000000..bb3c02816789 --- /dev/null +++ b/changelog.d/5617.misc @@ -0,0 +1 @@ +Move logging code out of `synapse.util` and into `synapse.logging`. diff --git a/synapse/util/__init__.py b/synapse/util/__init__.py index c6d2ce440409..f506b2a695f6 100644 --- a/synapse/util/__init__.py +++ b/synapse/util/__init__.py @@ -21,14 +21,10 @@ from twisted.internet import defer, task -from synapse.logging import context, formatter +from synapse.logging import context logger = logging.getLogger(__name__) -# Compatibility alias, for existing logconfigs. -logcontext = context -logformatter = formatter - def unwrapFirstError(failure): # defer.gatherResults and DeferredLists wrap failures. diff --git a/synapse/util/logcontext.py b/synapse/util/logcontext.py new file mode 100644 index 000000000000..40e5c10a493d --- /dev/null +++ b/synapse/util/logcontext.py @@ -0,0 +1,39 @@ +# Copyright 2019 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Backwards compatibility re-exports of ``synapse.logging.context`` functionality. +""" + +from synapse.logging.context import ( + LoggingContext, + LoggingContextFilter, + PreserveLoggingContext, + defer_to_thread, + make_deferred_yieldable, + nested_logging_context, + preserve_fn, + run_in_background, +) + +__all__ = [ + "defer_to_thread", + "LoggingContext", + "LoggingContextFilter", + "make_deferred_yieldable", + "nested_logging_context", + "preserve_fn", + "PreserveLoggingContext", + "run_in_background", +] diff --git a/synapse/util/logformatter.py b/synapse/util/logformatter.py new file mode 100644 index 000000000000..320e8f817404 --- /dev/null +++ b/synapse/util/logformatter.py @@ -0,0 +1,21 @@ +# Copyright 2019 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Backwards compatibility re-exports of ``synapse.logging.formatter`` functionality. +""" + +from synapse.logging.formatter import LogFormatter + +__all__ = ["LogFormatter"] From 0e5434264f4c3d4025278ebe47f365fd93d6fada Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Fri, 5 Jul 2019 10:44:12 +0100 Subject: [PATCH 5/5] Make errors about email password resets much clearer (#5616) The runtime errors that dealt with local email password resets talked about config options that users may not even have in their config file yet (if upgrading). Instead, the cryptic errors are now replaced with hopefully much more helpful ones. --- changelog.d/5616.misc | 1 + synapse/config/emailconfig.py | 19 ++++++++----------- 2 files changed, 9 insertions(+), 11 deletions(-) create mode 100644 changelog.d/5616.misc diff --git a/changelog.d/5616.misc b/changelog.d/5616.misc new file mode 100644 index 000000000000..9f94be6778c6 --- /dev/null +++ b/changelog.d/5616.misc @@ -0,0 +1 @@ +Make runtime errors surrounding password reset emails much clearer. diff --git a/synapse/config/emailconfig.py b/synapse/config/emailconfig.py index fcd55d3e3d76..8381b8eb296d 100644 --- a/synapse/config/emailconfig.py +++ b/synapse/config/emailconfig.py @@ -112,13 +112,17 @@ def read_config(self, config, **kwargs): missing = [] for k in required: if k not in email_config: - missing.append(k) + missing.append("email." + k) + + if config.get("public_baseurl") is None: + missing.append("public_base_url") if len(missing) > 0: raise RuntimeError( - "email.password_reset_behaviour is set to 'local' " - "but required keys are missing: %s" - % (", ".join(["email." + k for k in missing]),) + "Password resets emails are configured to be sent from " + "this homeserver due to a partial 'email' block. " + "However, the following required keys are missing: %s" + % (", ".join(missing),) ) # Templates for password reset emails @@ -156,13 +160,6 @@ def read_config(self, config, **kwargs): filepath, "email.password_reset_template_success_html" ) - if config.get("public_baseurl") is None: - raise RuntimeError( - "email.password_reset_behaviour is set to 'local' but no " - "public_baseurl is set. This is necessary to generate password " - "reset links" - ) - if self.email_enable_notifs: required = [ "smtp_host",