Optimise LoggingContext creation and copying

It turns out that the only thing we use the __dict__ of LoggingContext for is
`request`, and given we create lots of LoggingContexts and then copy them every
time we do a db transaction or log line, using the __dict__ seems a bit
redundant. Let's try to optimise things by making the request attribute
explicit.
pull/4/merge
Richard van der Hoff 7 years ago
parent febdca4b37
commit 44a498418c
  1. 25
      synapse/util/logcontext.py
  2. 14
      tests/crypto/test_keyring.py
  3. 16
      tests/util/test_logcontext.py

@ -52,13 +52,16 @@ except Exception:
class LoggingContext(object):
"""Additional context for log formatting. Contexts are scoped within a
"with" block.
Args:
name (str): Name for the context for debugging.
"""
__slots__ = [
"previous_context", "name", "usage_start", "usage_end", "main_thread",
"__dict__", "tag", "alive",
"previous_context", "name", "ru_stime", "ru_utime",
"db_txn_count", "db_txn_duration", "usage_start", "usage_end",
"main_thread", "alive",
"request", "tag",
]
thread_local = threading.local()
@ -96,7 +99,9 @@ class LoggingContext(object):
self.db_txn_count = 0
self.db_txn_duration = 0.
self.usage_start = None
self.usage_end = None
self.main_thread = threading.current_thread()
self.request = None
self.tag = ""
self.alive = True
@ -105,7 +110,11 @@ class LoggingContext(object):
@classmethod
def current_context(cls):
"""Get the current logging context from thread local storage"""
"""Get the current logging context from thread local storage
Returns:
LoggingContext: the current logging context
"""
return getattr(cls.thread_local, "current_context", cls.sentinel)
@classmethod
@ -155,11 +164,13 @@ class LoggingContext(object):
self.alive = False
def copy_to(self, record):
"""Copy fields from this context to the record"""
for key, value in self.__dict__.items():
setattr(record, key, value)
"""Copy logging fields from this context to a log record or
another LoggingContext
"""
record.ru_utime, record.ru_stime = self.get_resource_usage()
# 'request' is the only field we currently use in the logger, so that's
# all we need to copy
record.request = self.request
def start(self):
if threading.current_thread() is not self.main_thread:

@ -68,7 +68,7 @@ class KeyringTestCase(unittest.TestCase):
def check_context(self, _, expected):
self.assertEquals(
getattr(LoggingContext.current_context(), "test_key", None),
getattr(LoggingContext.current_context(), "request", None),
expected
)
@ -82,7 +82,7 @@ class KeyringTestCase(unittest.TestCase):
lookup_2_deferred = defer.Deferred()
with LoggingContext("one") as context_one:
context_one.test_key = "one"
context_one.request = "one"
wait_1_deferred = kr.wait_for_previous_lookups(
["server1"],
@ -96,7 +96,7 @@ class KeyringTestCase(unittest.TestCase):
wait_1_deferred.addBoth(self.check_context, "one")
with LoggingContext("two") as context_two:
context_two.test_key = "two"
context_two.request = "two"
# set off another wait. It should block because the first lookup
# hasn't yet completed.
@ -137,7 +137,7 @@ class KeyringTestCase(unittest.TestCase):
@defer.inlineCallbacks
def get_perspectives(**kwargs):
self.assertEquals(
LoggingContext.current_context().test_key, "11",
LoggingContext.current_context().request, "11",
)
with logcontext.PreserveLoggingContext():
yield persp_deferred
@ -145,7 +145,7 @@ class KeyringTestCase(unittest.TestCase):
self.http_client.post_json.side_effect = get_perspectives
with LoggingContext("11") as context_11:
context_11.test_key = "11"
context_11.request = "11"
# start off a first set of lookups
res_deferreds = kr.verify_json_objects_for_server(
@ -173,7 +173,7 @@ class KeyringTestCase(unittest.TestCase):
self.assertIs(LoggingContext.current_context(), context_11)
context_12 = LoggingContext("12")
context_12.test_key = "12"
context_12.request = "12"
with logcontext.PreserveLoggingContext(context_12):
# a second request for a server with outstanding requests
# should block rather than start a second call
@ -211,7 +211,7 @@ class KeyringTestCase(unittest.TestCase):
sentinel_context = LoggingContext.current_context()
with LoggingContext("one") as context_one:
context_one.test_key = "one"
context_one.request = "one"
defer = kr.verify_json_for_server("server9", {})
try:

@ -12,12 +12,12 @@ class LoggingContextTestCase(unittest.TestCase):
def _check_test_key(self, value):
self.assertEquals(
LoggingContext.current_context().test_key, value
LoggingContext.current_context().request, value
)
def test_with_context(self):
with LoggingContext() as context_one:
context_one.test_key = "test"
context_one.request = "test"
self._check_test_key("test")
@defer.inlineCallbacks
@ -25,14 +25,14 @@ class LoggingContextTestCase(unittest.TestCase):
@defer.inlineCallbacks
def competing_callback():
with LoggingContext() as competing_context:
competing_context.test_key = "competing"
competing_context.request = "competing"
yield sleep(0)
self._check_test_key("competing")
reactor.callLater(0, competing_callback)
with LoggingContext() as context_one:
context_one.test_key = "one"
context_one.request = "one"
yield sleep(0)
self._check_test_key("one")
@ -43,14 +43,14 @@ class LoggingContextTestCase(unittest.TestCase):
@defer.inlineCallbacks
def cb():
context_one.test_key = "one"
context_one.request = "one"
yield function()
self._check_test_key("one")
callback_completed[0] = True
with LoggingContext() as context_one:
context_one.test_key = "one"
context_one.request = "one"
# fire off function, but don't wait on it.
logcontext.preserve_fn(cb)()
@ -107,7 +107,7 @@ class LoggingContextTestCase(unittest.TestCase):
sentinel_context = LoggingContext.current_context()
with LoggingContext() as context_one:
context_one.test_key = "one"
context_one.request = "one"
d1 = logcontext.make_deferred_yieldable(blocking_function())
# make sure that the context was reset by make_deferred_yieldable
@ -124,7 +124,7 @@ class LoggingContextTestCase(unittest.TestCase):
argument isn't actually a deferred"""
with LoggingContext() as context_one:
context_one.test_key = "one"
context_one.request = "one"
d1 = logcontext.make_deferred_yieldable("bum")
self._check_test_key("one")

Loading…
Cancel
Save