From aeb6a083f79e0de2873822d21280995486403fce Mon Sep 17 00:00:00 2001 From: visi Date: Wed, 12 Feb 2025 09:44:57 -0500 Subject: [PATCH 01/52] wip --- synapse/cortex.py | 51 ++++++++++++++++++++++++++------------------- synapse/lib/link.py | 26 +++++++++++++++++------ 2 files changed, 50 insertions(+), 27 deletions(-) diff --git a/synapse/cortex.py b/synapse/cortex.py index 6f5d6b64cdd..95668b7c9b1 100644 --- a/synapse/cortex.py +++ b/synapse/cortex.py @@ -5883,38 +5883,47 @@ async def _getMirrorProxy(self, opts): return None if self.stormpool.size() == 0: - logger.warning('Storm query mirror pool is empty, running query locally.') + await self.logWarn('Storm query mirror pool is empty.') return None - proxy = None + timeout = self.stormpoolopts.get('timeout:connection') try: - timeout = self.stormpoolopts.get('timeout:connection') proxy = await self.stormpool.proxy(timeout=timeout) - proxyname = proxy._ahainfo.get('name') - if proxyname is not None and proxyname == self.ahasvcname: - # we are part of the pool and were selected. Convert to local use. - return None + except TimeoutError as e: + await self.logWarn('Timeout connecting to storm pool mirror.', timeout=timeout) + return None + + proxyname = proxy._ahainfo.get('name') + if proxyname is not None and proxyname == self.ahasvcname: + # we are part of the pool and were selected. Convert to local use. + return None - curoffs = opts.setdefault('nexsoffs', await self.getNexsIndx() - 1) + curoffs = opts.setdefault('nexsoffs', await self.getNexsIndx() - 1) + + try: miroffs = await s_common.wait_for(proxy.getNexsIndx(), timeout) - 1 - if (delta := curoffs - miroffs) > MAX_NEXUS_DELTA: - mesg = (f'Pool mirror [{proxyname}] Nexus offset delta too large ' - f'({delta} > {MAX_NEXUS_DELTA}), running query locally.') - logger.warning(mesg, extra=await self.getLogExtra(delta=delta, mirror=proxyname, mirror_offset=miroffs)) - return None - return proxy + except s_exc.IsFini as e: + await self.logWarn('Storm pool mirror is shutting down.', mirror=proxyname) + return None - except (TimeoutError, s_exc.IsFini): - if proxy is None: - logger.warning('Timeout waiting for pool mirror, running query locally.') - else: - mesg = f'Timeout waiting for pool mirror [{proxyname}] Nexus offset, running query locally.' - logger.warning(mesg, extra=await self.getLogExtra(mirror=proxyname)) - await proxy.fini() + except TimeoutError as e: + mesg = 'Timeout retrieving storm pool mirror nexus offset.' + await self.logWarn(mesg, mirror=proxyname, nexsoffs=curoffs, timeout=timeout) + return None + + if (delta := curoffs - miroffs) > MAX_NEXUS_DELTA: + mesg = f'Pool mirror nexus offset delta too large.' + await self.logWarn(mesg, mirror=proxyname, delta=delta, nexsoffs=curoffs, mirror_offset=miroffs) return None + return proxy + + async def logWarn(self, mesg, **extra): + extra = await self.getLogExtra(**extra) + logger.warning(mesg, extra=extra) + async def storm(self, text, opts=None): opts = self._initStormOpts(opts) diff --git a/synapse/lib/link.py b/synapse/lib/link.py index 891db6cce6d..f13006bcc41 100644 --- a/synapse/lib/link.py +++ b/synapse/lib/link.py @@ -32,37 +32,51 @@ async def connect(host, port, ssl=None, hostname=None, linkinfo=None): reader, writer = await asyncio.open_connection(host, port, ssl=ssl, server_hostname=hostname) return await Link.anit(reader, writer, info=info) -async def listen(host, port, onlink, ssl=None): +async def listen(host, port, onlink, ssl=None, linkinfo=None): ''' Listen on the given host/port and fire onlink(Link). Returns a server object that contains the listening sockets ''' + info = { + 'ssl': ssl, + 'tls': bool(ssl), + 'host': host, + 'port': port, + } + + if linkinfo is not None: + info.udpate(linkinfo) + async def onconn(reader, writer): - info = {'tls': bool(ssl)} link = await Link.anit(reader, writer, info=info) link.schedCoro(onlink(link)) server = await asyncio.start_server(onconn, host=host, port=port, ssl=ssl) return server -async def unixlisten(path, onlink): +async def unixlisten(path, onlink, linkinfo=None): ''' Start an PF_UNIX server listening on the given path. ''' info = {'path': path, 'unix': True} + if linkinfo is not None: + info.update(linkinfo) async def onconn(reader, writer): link = await Link.anit(reader, writer, info=info) link.schedCoro(onlink(link)) return await asyncio.start_unix_server(onconn, path=path) -async def unixconnect(path): +async def unixconnect(path, linkinfo=None): ''' Connect to a PF_UNIX server listening on the given path. ''' - reader, writer = await asyncio.open_unix_connection(path=path) info = {'path': path, 'unix': True} + if linkinfo is not None: + info.update(linkinfo) + + reader, writer = await asyncio.open_unix_connection(path=path) return await Link.anit(reader, writer, info=info) async def linkfile(mode='wb'): @@ -270,7 +284,7 @@ async def tx(self, mesg): await self.writer.drain() - except (asyncio.CancelledError, Exception) as e: + except Exception as e: await self.fini() From b894d1589034c9b0e1b6ea51fc0e2e62a96b50dd Mon Sep 17 00:00:00 2001 From: visi Date: Wed, 12 Feb 2025 18:56:34 -0500 Subject: [PATCH 02/52] wip --- synapse/common.py | 78 -------------------- synapse/cortex.py | 3 +- synapse/lib/cell.py | 41 +++++------ synapse/lib/coro.py | 14 +++- synapse/lib/logging.py | 129 +++++++++++++++++++++++++++++++++ synapse/lib/structlog.py | 40 ---------- synapse/tests/test_lib_cell.py | 4 +- synapse/tests/utils.py | 8 +- synapse/tools/aha/easycert.py | 4 +- synapse/tools/aha/list.py | 4 +- synapse/tools/autodoc.py | 3 +- synapse/tools/backup.py | 8 +- synapse/tools/cellauth.py | 7 +- synapse/tools/cmdr.py | 4 +- synapse/tools/feed.py | 3 +- synapse/tools/rstorm.py | 3 +- synapse/utils/getrefs.py | 3 +- 17 files changed, 188 insertions(+), 168 deletions(-) create mode 100644 synapse/lib/logging.py delete mode 100644 synapse/lib/structlog.py diff --git a/synapse/common.py b/synapse/common.py index c36ded1ce35..670a1b9db97 100644 --- a/synapse/common.py +++ b/synapse/common.py @@ -37,7 +37,6 @@ import synapse.exc as s_exc import synapse.lib.const as s_const import synapse.lib.msgpack as s_msgpack -import synapse.lib.structlog as s_structlog import synapse.vendor.cpython.lib.ipaddress as ipaddress import synapse.vendor.cpython.lib.http.cookies as v_cookies @@ -781,83 +780,6 @@ def makedirs(path, mode=0o777): def iterzip(*args, fillvalue=None): return itertools.zip_longest(*args, fillvalue=fillvalue) -def _getLogConfFromEnv(defval=None, structlog=None, datefmt=None): - if structlog: - structlog = 'true' - else: - structlog = 'false' - defval = os.getenv('SYN_LOG_LEVEL', defval) - datefmt = os.getenv('SYN_LOG_DATEFORMAT', datefmt) - structlog = envbool('SYN_LOG_STRUCT', structlog) - ret = {'defval': defval, 'structlog': structlog, 'datefmt': datefmt} - return ret - -def normLogLevel(valu): - ''' - Norm a log level value to a integer. - - Args: - valu: The value to norm ( a string or integer ). - - Returns: - int: A valid Logging log level. - ''' - if isinstance(valu, int): - if valu not in s_const.LOG_LEVEL_INVERSE_CHOICES: - raise s_exc.BadArg(mesg=f'Invalid log level provided: {valu}', valu=valu) - return valu - if isinstance(valu, str): - valu = valu.strip() - try: - valu = int(valu) - except ValueError: - valu = valu.upper() - ret = s_const.LOG_LEVEL_CHOICES.get(valu) - if ret is None: - raise s_exc.BadArg(mesg=f'Invalid log level provided: {valu}', valu=valu) from None - return ret - else: - return normLogLevel(valu) - raise s_exc.BadArg(mesg=f'Unknown log level type: {type(valu)} {valu}', valu=valu) - -def setlogging(mlogger, defval=None, structlog=None, log_setup=True, datefmt=None): - ''' - Configure synapse logging. - - Args: - mlogger (logging.Logger): Reference to a logging.Logger() - defval (str): Default log level. May be an integer. - structlog (bool): Enabled structured (jsonl) logging output. - datefmt (str): Optional strftime format string. - - Notes: - This calls logging.basicConfig and should only be called once per process. - - Returns: - None - ''' - ret = _getLogConfFromEnv(defval, structlog, datefmt) - - datefmt = ret.get('datefmt') - log_level = ret.get('defval') - log_struct = ret.get('structlog') - - if log_level: # pragma: no cover - - log_level = normLogLevel(log_level) - - if log_struct: - handler = logging.StreamHandler() - formatter = s_structlog.JsonFormatter(datefmt=datefmt) - handler.setFormatter(formatter) - logging.basicConfig(level=log_level, handlers=(handler,)) - else: - logging.basicConfig(level=log_level, format=s_const.LOG_FORMAT, datefmt=datefmt) - if log_setup: - mlogger.info('log level set to %s', s_const.LOG_LEVEL_INVERSE_CHOICES.get(log_level)) - - return ret - syndir_default = '~/.syn' syndir = os.getenv('SYN_DIR') if syndir is None: diff --git a/synapse/cortex.py b/synapse/cortex.py index 95668b7c9b1..f7166d60264 100644 --- a/synapse/cortex.py +++ b/synapse/cortex.py @@ -36,6 +36,7 @@ import synapse.lib.dyndeps as s_dyndeps import synapse.lib.grammar as s_grammar import synapse.lib.httpapi as s_httpapi +import synapse.lib.logging as s_logging import synapse.lib.msgpack as s_msgpack import synapse.lib.modules as s_modules import synapse.lib.schemas as s_schemas @@ -927,7 +928,7 @@ async def initServiceStorage(self): self._initCorePerms() # Reset the storm:log:level from the config value to an int for internal use. - self.conf['storm:log:level'] = s_common.normLogLevel(self.conf.get('storm:log:level')) + self.conf['storm:log:level'] = s_logging.normLogLevel(self.conf.get('storm:log:level')) self.stormlog = self.conf.get('storm:log') self.stormloglvl = self.conf.get('storm:log:level') diff --git a/synapse/lib/cell.py b/synapse/lib/cell.py index e904edf5792..a9b211a939a 100644 --- a/synapse/lib/cell.py +++ b/synapse/lib/cell.py @@ -48,6 +48,7 @@ import synapse.lib.certdir as s_certdir import synapse.lib.dyndeps as s_dyndeps import synapse.lib.httpapi as s_httpapi +import synapse.lib.logging as s_logging import synapse.lib.msgpack as s_msgpack import synapse.lib.schemas as s_schemas import synapse.lib.spooled as s_spooled @@ -189,7 +190,8 @@ def _iterBackupProc(path, linkinfo): ''' # This logging call is okay to run since we're executing in # our own process space and no logging has been configured. - s_common.setlogging(logger, **linkinfo.get('logconf')) + logconf = linkinfo.get('logconf') + s_logging.setup(level=logconf.get('level'), structlog=logconf.get('structlog')) logger.info(f'Backup streaming process for [{path}] starting.') asyncio.run(_iterBackupWork(path, linkinfo)) @@ -2609,7 +2611,7 @@ def _backupProc(pipe, srcdir, dstdir, lmdbpaths, logconf): (In a separate process) Actually do the backup ''' # This is a new process: configure logging - s_common.setlogging(logger, **logconf) + s_logging.setup(level=logconf.get('level'), structlog=logconf.get('structlog')) try: with s_t_backup.capturelmdbs(srcdir) as lmdbinfo: @@ -3661,25 +3663,20 @@ async def getLogExtra(self, **kwargs): Returns: Dict: A dictionary ''' - extra = {**kwargs} sess = s_scope.get('sess') # type: s_daemon.Sess user = s_scope.get('user') # type: s_auth.User + if user: - extra['user'] = user.iden - extra['username'] = user.name + kwargs['user'] = user.iden + kwargs['username'] = user.name elif sess and sess.user: - extra['user'] = sess.user.iden - extra['username'] = sess.user.name - return {'synapse': extra} + kwargs['user'] = sess.user.iden + kwargs['username'] = sess.user.name + + return s_logging.getLogExtra(**kwargs) async def _getSpawnLogConf(self): - conf = self.conf.get('_log_conf') - if conf: - conf = conf.copy() - else: - conf = s_common._getLogConfFromEnv() - conf['log_setup'] = False - return conf + return self.conf.get('_log_conf', {}) def modCellConf(self, conf): ''' @@ -3786,7 +3783,7 @@ def getArgParser(cls, conf=None): pars.add_argument('--log-level', default='INFO', choices=list(s_const.LOG_LEVEL_CHOICES.keys()), help='Specify the Python logging log level.', type=str.upper) - pars.add_argument('--structured-logging', default=False, action='store_true', + pars.add_argument('--structured-logging', default=True, action='store_true', help='Use structured logging.') telendef = None @@ -4226,12 +4223,14 @@ async def initFromArgv(cls, argv, outp=None): path = s_common.genpath(opts.dirn, 'cell.yaml') mods_path = s_common.genpath(opts.dirn, 'cell.mods.yaml') - logconf = s_common.setlogging(logger, defval=opts.log_level, - structlog=opts.structured_logging) + level = s_logging.normLogLevel(opts.log_level) + logconf = s_logging.setup(level=level, structlog=opts.structured_logging) + + extra = s_logging.getLogExtra(service_type=cls.getCellType(), + service_version=cls.VERSTRING, + synapse_version=s_version.verstring) - logger.info(f'Starting {cls.getCellType()} version {cls.VERSTRING}, Synapse version: {s_version.verstring}', - extra={'synapse': {'svc_type': cls.getCellType(), 'svc_version': cls.VERSTRING, - 'synapse_version': s_version.verstring}}) + logger.info('Starting synapse service.', extra=extra) await cls._initBootRestore(opts.dirn) diff --git a/synapse/lib/coro.py b/synapse/lib/coro.py index c341b2bb493..d37376d4a74 100644 --- a/synapse/lib/coro.py +++ b/synapse/lib/coro.py @@ -19,6 +19,8 @@ import synapse.glob as s_glob import synapse.common as s_common +import synapse.lib.logging as s_logging + def iscoro(item): return inspect.iscoroutine(item) @@ -215,8 +217,12 @@ def func(*args, **kwargs): return func def _exectodo(que, todo, logconf): + # This is a new process: configure logging - s_common.setlogging(logger, **logconf) + level = logconf.get('level') + structlog = logconf.get('structlog') + s_logging.setup(level=level, structlog=structlog) + func, args, kwargs = todo try: ret = func(*args, **kwargs) @@ -311,7 +317,11 @@ def _runtodo(todo): return todo[0](*todo[1], **todo[2]) def _init_pool_worker(logger_, logconf): - s_common.setlogging(logger_, **logconf) + + level = logconf.get('level') + structlog = logconf.get('structlog') + s_logging.setup(level=level, structlog=structlog) + p = multiprocessing.current_process() logger.debug(f'Initialized new forkserver pool worker: name={p.name} pid={p.ident}') diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py new file mode 100644 index 00000000000..4d3030742ef --- /dev/null +++ b/synapse/lib/logging.py @@ -0,0 +1,129 @@ +import os +import json +import logging +import collections + +import synapse.common as s_common +import synapse.lib.const as s_const + +logger = logging.getLogger(__name__) + +logfifo = collections.deque() + +def _addLogInfo(info): + logfifo.append(info) + # TODO notify waiters... + +# TODO: getLogInfo(wait=True) + +def getLogExtra(**kwargs): + return {'synapse': kwargs} + +class Formatter(logging.Formatter): + + def genLogInfo(self, record): + + record.message = record.getMessage() + + loginfo = { + 'message': record.message, + 'logger': { + 'name': record.name, + 'filename': record.filename, + 'func': record.funcName, + }, + 'level': record.levelname, + 'time': self.formatTime(record, self.datefmt), + } + + if record.exc_info: + loginfo['err'] = s_common.err(record.exc_info[1], fulltb=True) + + loginfo['synapse'] = record.__dict__.get('synapse') + + _addLogInfo(loginfo) + + return loginfo + + def format(self, record: logging.LogRecord): + loginfo = self.genLogInfo(record) + return json.dumps(loginfo, default=str) + +class TextFormatter(Formatter): + + def format(self, record): + + loginfo = self.genLogInfo(record) + mesg = loginfo.get('message') + + syns = loginfo.get('synapse') + if syns: + mesg += ' ({json.dumps(syns, default=str)})' + + return mesg + +def setup(level=logging.WARNING, structlog=False): + ''' + Configure synapse logging. + ''' + conf = getLogConfFromEnv() + conf.setdefault('level', level) + conf.setdefault('structlog', structlog) + + fmtclass = Formatter + if not conf.get('structlog'): + fmtclass = TextFormatter + + handler = logging.StreamHandler() + handler.setFormatter(fmtclass(datefmt=conf.get('datefmt'))) + logging.basicConfig(level=conf.get('level'), handlers=(handler,)) + + logger.info('log level set to %s', s_const.LOG_LEVEL_INVERSE_CHOICES.get(level)) + + return conf + +def getLogConfFromEnv(): + + conf = {} + + if level := os.getenv('SYN_LOG_LEVEL') is not None: + conf['level'] = normLogLevel(level) + + if datefmt := os.getenv('SYN_LOG_DATEFORMAT') is not None: + conf['datefmt'] = datefmt + + if structlog := os.getenv('SYN_LOG_STRUCT') is not None: + conf['structlog'] = structlog.lower() in ('1', 'true') + + return conf + +def normLogLevel(valu): + ''' + Norm a log level value to a integer. + + Args: + valu: The value to norm ( a string or integer ). + + Returns: + int: A valid Logging log level. + ''' + if isinstance(valu, str): + + valu = valu.strip() + level = s_const.LOG_LEVEL_CHOICES.get(valu.upper()) + if level is not None: + return level + + try: + valu = int(valu) + except ValueError: + raise s_exc.BadArg(mesg=f'Invalid log level provided: {valu}', valu=valu) from None + + if isinstance(valu, int): + + if valu not in s_const.LOG_LEVEL_INVERSE_CHOICES: + raise s_exc.BadArg(mesg=f'Invalid log level provided: {valu}', valu=valu) + + return valu + + raise s_exc.BadArg(mesg=f'Unknown log level type: {type(valu)} {valu}', valu=valu) diff --git a/synapse/lib/structlog.py b/synapse/lib/structlog.py deleted file mode 100644 index daa59d35e7d..00000000000 --- a/synapse/lib/structlog.py +++ /dev/null @@ -1,40 +0,0 @@ -import json - -import logging - -import synapse.common as s_common - -class JsonFormatter(logging.Formatter): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - def format(self, record: logging.LogRecord): - - record.message = record.getMessage() - mesg = self.formatMessage(record) - ret = { - 'message': mesg, - 'logger': { - 'name': record.name, - 'process': record.processName, - 'filename': record.filename, - 'func': record.funcName, - }, - 'level': record.levelname, - 'time': self.formatTime(record, self.datefmt), - } - - if record.exc_info: - name, info = s_common.err(record.exc_info[1], fulltb=True) - # This is the actual exception name. The ename key is the function name. - info['errname'] = name - ret['err'] = info - - # stuffing our extra into a single dictionary avoids a loop - # over record.__dict__ extracting fields which are not known - # attributes for each log record. - extras = record.__dict__.get('synapse') - if extras: - ret.update({k: v for k, v in extras.items() if k not in ret}) - - return json.dumps(ret, default=str) diff --git a/synapse/tests/test_lib_cell.py b/synapse/tests/test_lib_cell.py index 3a520823485..b5dd1481e8e 100644 --- a/synapse/tests/test_lib_cell.py +++ b/synapse/tests/test_lib_cell.py @@ -448,7 +448,7 @@ async def test_cell_auth(self): self.true(await stream.wait(timeout=10)) msgs = stream.jsonlines() self.len(1, msgs) - self.eq('EchoAuthApi.adminOnlyLog', msgs[0].get('wrapped_func')) + self.eq('EchoAuthApi.adminOnlyLog', msgs[0]['synapse']['wrapped_func']) visi = await echo.auth.addUser('visi') await visi.setPasswd('foo') @@ -3158,7 +3158,7 @@ async def test_cell_check_sysctl(self): mesg += 'See https://synapse.docs.vertex.link/en/latest/synapse/devopsguide.html#performance-tuning ' mesg += 'for information about these sysctl parameters.' self.eq(msgs[0]['message'], mesg) - self.eq(msgs[0]['sysctls'], [ + self.eq(msgs[0]['synapse']['sysctls'], [ {'name': 'vm.dirty_expire_centisecs', 'expected': 21, 'actual': sysctls['vm.dirty_expire_centisecs']}, {'name': 'vm.dirty_writeback_centisecs', 'expected': 21, 'actual': sysctls['vm.dirty_writeback_centisecs']}, ]) diff --git a/synapse/tests/utils.py b/synapse/tests/utils.py index a6b6145b5c4..61146079f4d 100644 --- a/synapse/tests/utils.py +++ b/synapse/tests/utils.py @@ -69,12 +69,12 @@ import synapse.lib.output as s_output import synapse.lib.certdir as s_certdir import synapse.lib.httpapi as s_httpapi +import synapse.lib.logging as s_logging import synapse.lib.msgpack as s_msgpack import synapse.lib.jsonstor as s_jsonstor import synapse.lib.lmdbslab as s_lmdbslab import synapse.lib.modelrev as s_modelrev import synapse.lib.thishost as s_thishost -import synapse.lib.structlog as s_structlog import synapse.lib.stormtypes as s_stormtypes import synapse.tools.genpkg as s_genpkg @@ -1260,12 +1260,12 @@ def withCliPromptMock(self): @contextlib.asynccontextmanager async def withSetLoggingMock(self): ''' - Context manager to mock calls to the setlogging function to avoid unittests calling logging.basicconfig. + Context manager to mock calls to the logging setup function to avoid unittests calling logging.basicconfig. Returns: mock.MagicMock: Yields a mock.MagicMock object. ''' - with mock.patch('synapse.common.setlogging', + with mock.patch('synapse.lib.logging.setup', PickleableMagicMock(return_value=dict())) as patch: # type: mock.MagicMock yield patch @@ -1831,7 +1831,7 @@ def getStructuredAsyncLoggerStream(self, logname, mesg='') -> contextlib.Abstrac stream.setMesg(mesg) handler = logging.StreamHandler(stream) slogger = logging.getLogger(logname) - formatter = s_structlog.JsonFormatter() + formatter = s_logging.Formatter() handler.setFormatter(formatter) slogger.addHandler(handler) level = slogger.level diff --git a/synapse/tools/aha/easycert.py b/synapse/tools/aha/easycert.py index 8eefc352aee..c7ebca1c4b8 100644 --- a/synapse/tools/aha/easycert.py +++ b/synapse/tools/aha/easycert.py @@ -10,6 +10,7 @@ import synapse.lib.output as s_output import synapse.lib.certdir as s_certdir +import synapse.lib.logging as s_logging logger = logging.getLogger(__name__) @@ -75,12 +76,11 @@ async def main(argv, outp=None): # pragma: no cover if outp is None: outp = s_output.stdout - s_common.setlogging(logger, 'WARNING') - async with s_telepath.withTeleEnv(): await _main(argv, outp) return 0 if __name__ == '__main__': # pragma: no cover + s_logging.setup() sys.exit(asyncio.run(main(sys.argv[1:]))) diff --git a/synapse/tools/aha/list.py b/synapse/tools/aha/list.py index 890eb6b8b2b..3f15b151e68 100644 --- a/synapse/tools/aha/list.py +++ b/synapse/tools/aha/list.py @@ -8,6 +8,7 @@ import synapse.telepath as s_telepath import synapse.lib.output as s_output +import synapse.lib.logging as s_logging import synapse.lib.version as s_version logger = logging.getLogger(__name__) @@ -78,12 +79,11 @@ async def main(argv, outp=None): # pragma: no cover outp.printf('usage: python -m synapse.tools.aha.list [network name]') return 1 - s_common.setlogging(logger, 'WARNING') - async with s_telepath.withTeleEnv(): await _main(argv, outp) return 0 if __name__ == '__main__': # pragma: no cover + s_logging.setup() sys.exit(asyncio.run(main(sys.argv[1:]))) diff --git a/synapse/tools/autodoc.py b/synapse/tools/autodoc.py index dfe802ae569..ba3e4633b42 100644 --- a/synapse/tools/autodoc.py +++ b/synapse/tools/autodoc.py @@ -18,6 +18,7 @@ import synapse.lib.output as s_output import synapse.lib.autodoc as s_autodoc import synapse.lib.dyndeps as s_dyndeps +import synapse.lib.logging as s_logging import synapse.lib.version as s_version import synapse.lib.stormsvc as s_stormsvc import synapse.lib.stormtypes as s_stormtypes @@ -1027,5 +1028,5 @@ def makeargparser(): return pars if __name__ == '__main__': # pragma: no cover - s_common.setlogging(logger, 'DEBUG') + s_logging.setup(level=logging.DEBUG) asyncio.run(main(sys.argv[1:])) diff --git a/synapse/tools/backup.py b/synapse/tools/backup.py index 2018cf1fb9c..c4792c37f4c 100644 --- a/synapse/tools/backup.py +++ b/synapse/tools/backup.py @@ -11,6 +11,7 @@ import lmdb import synapse.common as s_common +import synapse.lib.logging as s_logging logger = logging.getLogger(__name__) @@ -174,9 +175,6 @@ def parse_args(argv): args = parser.parse_args(argv) return args -def _main(argv): # pragma: no cover - s_common.setlogging(logger, defval='DEBUG') - return main(argv) - if __name__ == '__main__': # pragma: no cover - sys.exit(_main(sys.argv[1:])) + s_logging.setup(level=logging.DEBUG) + sys.exit(main(sys.argv[1:])) diff --git a/synapse/tools/cellauth.py b/synapse/tools/cellauth.py index 3a5172e9701..992e9a4d77a 100644 --- a/synapse/tools/cellauth.py +++ b/synapse/tools/cellauth.py @@ -338,9 +338,6 @@ def makeargparser(): pars_mod.set_defaults(func=handleModify) return pars -async def _main(): # pragma: no cover - s_common.setlogging(logger, 'DEBUG') - return await main(sys.argv[1:]) - if __name__ == '__main__': # pragma: no cover - sys.exit(s_glob.sync(_main())) + s_logging.setup(level=logging.DEBUG) + asyncio.run(main(sys.argv[1:])) diff --git a/synapse/tools/cmdr.py b/synapse/tools/cmdr.py index 05d2537ee9b..9a53d0c2577 100644 --- a/synapse/tools/cmdr.py +++ b/synapse/tools/cmdr.py @@ -9,6 +9,7 @@ import synapse.telepath as s_telepath import synapse.lib.cmdr as s_cmdr +import synapse.lib.logging as s_logging import synapse.lib.version as s_version logger = logging.getLogger(__name__) @@ -47,8 +48,6 @@ async def main(argv): # pragma: no cover print('usage: python -m synapse.tools.cmdr []') return 1 - s_common.setlogging(logger, 'WARNING') - async with s_telepath.withTeleEnv(): await _main(argv) @@ -56,4 +55,5 @@ async def main(argv): # pragma: no cover if __name__ == '__main__': # pragma: no cover warnings.filterwarnings("default", category=PendingDeprecationWarning) + s_logging.setup() sys.exit(asyncio.run(main(sys.argv[1:]))) diff --git a/synapse/tools/feed.py b/synapse/tools/feed.py index 4ae2e38f22e..250051b43fd 100644 --- a/synapse/tools/feed.py +++ b/synapse/tools/feed.py @@ -12,6 +12,7 @@ import synapse.lib.cmdr as s_cmdr import synapse.lib.output as s_output +import synapse.lib.logging as s_logging import synapse.lib.msgpack as s_msgpack import synapse.lib.version as s_version import synapse.lib.encoding as s_encoding @@ -149,5 +150,5 @@ def makeargparser(): return pars if __name__ == '__main__': # pragma: no cover - s_common.setlogging(logger, 'DEBUG') + s_logging.setup(level=logging.DEBUG) asyncio.run(main(sys.argv[1:])) diff --git a/synapse/tools/rstorm.py b/synapse/tools/rstorm.py index bfc549ee103..df9e8c5b76a 100644 --- a/synapse/tools/rstorm.py +++ b/synapse/tools/rstorm.py @@ -7,6 +7,7 @@ import synapse.lib.output as s_output import synapse.lib.rstorm as s_rstorm +import synapse.lib.logging as s_logging logger = logging.getLogger(__name__) @@ -33,5 +34,5 @@ async def main(argv, outp=s_output.stdout): outp.printf(line, addnl=False) if __name__ == '__main__': - s_common.setlogging(logger) + s_logging.setup() sys.exit(asyncio.run(main(sys.argv[1:]))) diff --git a/synapse/utils/getrefs.py b/synapse/utils/getrefs.py index a5ea5a5742b..2aac6e0df5f 100644 --- a/synapse/utils/getrefs.py +++ b/synapse/utils/getrefs.py @@ -13,6 +13,7 @@ import synapse.common as s_common import synapse.lib.config as s_config +import synapse.lib.logging as s_logging logger = logging.getLogger(__name__) @@ -90,6 +91,6 @@ def parse_args(argv): return args if __name__ == '__main__': # pragma: no cover - s_common.setlogging(logger, defval='DEBUG') + s_logging.setup(level=logging.DEBUG) argv = parse_args(sys.argv[1:]) sys.exit(main(argv)) From 31b687c455944b54536a1acfa8c70439da006f17 Mon Sep 17 00:00:00 2001 From: visi Date: Wed, 12 Feb 2025 19:17:20 -0500 Subject: [PATCH 03/52] wip --- synapse/lib/cell.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/synapse/lib/cell.py b/synapse/lib/cell.py index a9b211a939a..e2b76b6ded6 100644 --- a/synapse/lib/cell.py +++ b/synapse/lib/cell.py @@ -191,7 +191,11 @@ def _iterBackupProc(path, linkinfo): # This logging call is okay to run since we're executing in # our own process space and no logging has been configured. logconf = linkinfo.get('logconf') - s_logging.setup(level=logconf.get('level'), structlog=logconf.get('structlog')) + + level = logconf.get('level') + structlog = logconf.get('structlog') + + s_logging.setup(level=level, structlog=structlog) logger.info(f'Backup streaming process for [{path}] starting.') asyncio.run(_iterBackupWork(path, linkinfo)) @@ -2611,7 +2615,11 @@ def _backupProc(pipe, srcdir, dstdir, lmdbpaths, logconf): (In a separate process) Actually do the backup ''' # This is a new process: configure logging - s_logging.setup(level=logconf.get('level'), structlog=logconf.get('structlog')) + level = logconf.get('level') + structlog = logconf.get('structlog') + + s_logging.setup(level=level, structlog=structlog) + try: with s_t_backup.capturelmdbs(srcdir) as lmdbinfo: From 5615fefa505173655822f840c52c2f75b34a0605 Mon Sep 17 00:00:00 2001 From: visi Date: Wed, 12 Feb 2025 19:22:40 -0500 Subject: [PATCH 04/52] wip --- synapse/cortex.py | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/synapse/cortex.py b/synapse/cortex.py index f7166d60264..093ee2357e0 100644 --- a/synapse/cortex.py +++ b/synapse/cortex.py @@ -5884,7 +5884,8 @@ async def _getMirrorProxy(self, opts): return None if self.stormpool.size() == 0: - await self.logWarn('Storm query mirror pool is empty.') + extra = self.getLogExtra() + logger.warning('Storm query mirror pool is empty.', extra=extra) return None timeout = self.stormpoolopts.get('timeout:connection') @@ -5892,7 +5893,8 @@ async def _getMirrorProxy(self, opts): try: proxy = await self.stormpool.proxy(timeout=timeout) except TimeoutError as e: - await self.logWarn('Timeout connecting to storm pool mirror.', timeout=timeout) + extra = self.getLogExtra(timeout=timeout) + logger.warning('Timeout connecting to storm pool mirror.', extra=extra) return None proxyname = proxy._ahainfo.get('name') @@ -5906,25 +5908,22 @@ async def _getMirrorProxy(self, opts): miroffs = await s_common.wait_for(proxy.getNexsIndx(), timeout) - 1 except s_exc.IsFini as e: - await self.logWarn('Storm pool mirror is shutting down.', mirror=proxyname) + extra = self.getLogExtra(mirror=proxyname) + logger.warning('Storm pool mirror is shutting down.', extra=extra) return None except TimeoutError as e: - mesg = 'Timeout retrieving storm pool mirror nexus offset.' - await self.logWarn(mesg, mirror=proxyname, nexsoffs=curoffs, timeout=timeout) + extra = self.getLogExtra(mirror=proxyname, nexsoffs=curoffs, timeout=timeout) + logger.warning('Timeout retrieving storm pool mirror nexus offset.', extra=extra) return None if (delta := curoffs - miroffs) > MAX_NEXUS_DELTA: - mesg = f'Pool mirror nexus offset delta too large.' - await self.logWarn(mesg, mirror=proxyname, delta=delta, nexsoffs=curoffs, mirror_offset=miroffs) + extra = self.getLogExtra(mirror=proxyname, delta=delta, nexsoffs=curoffs, mirror_offset=miroffs) + logger.warning('Pool mirror nexus offset delta too large.', extra=extra) return None return proxy - async def logWarn(self, mesg, **extra): - extra = await self.getLogExtra(**extra) - logger.warning(mesg, extra=extra) - async def storm(self, text, opts=None): opts = self._initStormOpts(opts) From 80b2726923cb81580e8f9d912e0d19f5e4c41149 Mon Sep 17 00:00:00 2001 From: visi Date: Wed, 12 Feb 2025 19:25:08 -0500 Subject: [PATCH 05/52] wip --- synapse/lib/logging.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index 4d3030742ef..148284a80c2 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -45,7 +45,7 @@ def genLogInfo(self, record): return loginfo - def format(self, record: logging.LogRecord): + def format(self, record): loginfo = self.genLogInfo(record) return json.dumps(loginfo, default=str) From 637db6a752c0d15f439fb5ef4aa4ce5f4c7f0426 Mon Sep 17 00:00:00 2001 From: visi Date: Thu, 13 Feb 2025 07:31:02 -0500 Subject: [PATCH 06/52] wip --- synapse/cortex.py | 10 +-- synapse/lib/cell.py | 1 + synapse/tests/test_common.py | 13 --- synapse/tests/test_cortex.py | 62 +++++++------ synapse/tests/test_lib_lmdbslab.py | 8 +- synapse/tests/test_lib_stormlib_log.py | 2 +- synapse/tests/test_lib_structlog.py | 118 ------------------------- 7 files changed, 41 insertions(+), 173 deletions(-) delete mode 100644 synapse/tests/test_lib_structlog.py diff --git a/synapse/cortex.py b/synapse/cortex.py index 093ee2357e0..b2bfd2a5e21 100644 --- a/synapse/cortex.py +++ b/synapse/cortex.py @@ -5884,7 +5884,7 @@ async def _getMirrorProxy(self, opts): return None if self.stormpool.size() == 0: - extra = self.getLogExtra() + extra = await self.getLogExtra() logger.warning('Storm query mirror pool is empty.', extra=extra) return None @@ -5893,7 +5893,7 @@ async def _getMirrorProxy(self, opts): try: proxy = await self.stormpool.proxy(timeout=timeout) except TimeoutError as e: - extra = self.getLogExtra(timeout=timeout) + extra = await self.getLogExtra(timeout=timeout) logger.warning('Timeout connecting to storm pool mirror.', extra=extra) return None @@ -5908,17 +5908,17 @@ async def _getMirrorProxy(self, opts): miroffs = await s_common.wait_for(proxy.getNexsIndx(), timeout) - 1 except s_exc.IsFini as e: - extra = self.getLogExtra(mirror=proxyname) + extra = await self.getLogExtra(mirror=proxyname) logger.warning('Storm pool mirror is shutting down.', extra=extra) return None except TimeoutError as e: - extra = self.getLogExtra(mirror=proxyname, nexsoffs=curoffs, timeout=timeout) + extra = await self.getLogExtra(mirror=proxyname, nexsoffs=curoffs, timeout=timeout) logger.warning('Timeout retrieving storm pool mirror nexus offset.', extra=extra) return None if (delta := curoffs - miroffs) > MAX_NEXUS_DELTA: - extra = self.getLogExtra(mirror=proxyname, delta=delta, nexsoffs=curoffs, mirror_offset=miroffs) + extra = await self.getLogExtra(mirror=proxyname, delta=delta, nexsoffs=curoffs, mirror_offset=miroffs) logger.warning('Pool mirror nexus offset delta too large.', extra=extra) return None diff --git a/synapse/lib/cell.py b/synapse/lib/cell.py index e2b76b6ded6..c08c028f787 100644 --- a/synapse/lib/cell.py +++ b/synapse/lib/cell.py @@ -3661,6 +3661,7 @@ async def getCellApi(self, link, user, path): ''' return await self.cellapi.anit(self, link, user) + # TODO: why is this async? async def getLogExtra(self, **kwargs): ''' Get an extra dictionary for structured logging which can be used as a extra argument for loggers. diff --git a/synapse/tests/test_common.py b/synapse/tests/test_common.py index 5085cdb5218..cbdca7677b2 100644 --- a/synapse/tests/test_common.py +++ b/synapse/tests/test_common.py @@ -352,19 +352,6 @@ def test_envbool(self): self.false(s_common.envbool('SYN_FOO')) self.false(s_common.envbool('SYN_BAR')) - def test_normlog(self): - self.eq(10, s_common.normLogLevel(' 10 ')) - self.eq(10, s_common.normLogLevel(10)) - self.eq(20, s_common.normLogLevel(' inFo\n')) - with self.raises(s_exc.BadArg): - s_common.normLogLevel(100) - with self.raises(s_exc.BadArg): - s_common.normLogLevel('BEEP') - with self.raises(s_exc.BadArg): - s_common.normLogLevel('12') - with self.raises(s_exc.BadArg): - s_common.normLogLevel({'key': 'newp'}) - async def test_merggenr(self): async def asyncl(data): for item in data: diff --git a/synapse/tests/test_cortex.py b/synapse/tests/test_cortex.py index b7dd4f518a5..944a9d26f69 100644 --- a/synapse/tests/test_cortex.py +++ b/synapse/tests/test_cortex.py @@ -1134,7 +1134,7 @@ async def test_cortex_storm_dmon_log(self): mesg = stream.jsonlines()[0] self.eq(mesg.get('message'), f'Running dmon {iden}') - self.eq(mesg.get('iden'), iden) + self.eq(mesg['synapse'].get('iden'), iden) opts = {'vars': {'iden': iden}} logs = await core.callStorm('return($lib.dmon.log($iden))', opts=opts) @@ -3386,7 +3386,7 @@ async def test_storm_logging(self): self.true(await stream.wait(4)) mesg = stream.jsonlines()[0] - self.eq(mesg.get('view'), view) + self.eq(mesg['synapse'].get('view'), view) async def test_strict(self): @@ -8074,8 +8074,8 @@ async def test_cortex_user_scope(self): msgs = stream.jsonlines() mesg = [m for m in msgs if 'Added user' in m.get('message')][0] self.eq('Added user=lowuser', mesg.get('message')) - self.eq('admin', mesg.get('username')) - self.eq('lowuser', mesg.get('target_username')) + self.eq('admin', mesg['synapse'].get('username')) + self.eq('lowuser', mesg['synapse'].get('target_username')) with self.getStructuredAsyncLoggerStream('synapse.lib.cell') as stream: @@ -8088,8 +8088,8 @@ async def test_cortex_user_scope(self): msgs = stream.jsonlines() mesg = [m for m in msgs if 'Set admin' in m.get('message')][0] self.isin('Set admin=True for lowuser', mesg.get('message')) - self.eq('admin', mesg.get('username')) - self.eq('lowuser', mesg.get('target_username')) + self.eq('admin', mesg['synapse'].get('username')) + self.eq('lowuser', mesg['synapse'].get('target_username')) async def test_cortex_ext_httpapi(self): # Cortex API tests for Extended HttpAPI @@ -8297,12 +8297,12 @@ async def _hang(*args, **kwargs): self.len(2, msgs) self.eq(msgs[0].get('message'), f'Offloading Storm query to mirror 01.core.{ahanet}.') - self.eq(msgs[0].get('hash'), qhash) - self.eq(msgs[0].get('mirror'), f'01.core.{ahanet}') + self.eq(msgs[0]['synapse'].get('hash'), qhash) + self.eq(msgs[0]['synapse'].get('mirror'), f'01.core.{ahanet}') self.eq(msgs[1].get('message'), f'Executing storm query {{{q}}} as [root]') - self.eq(msgs[1].get('hash'), qhash) - self.eq(msgs[1].get('pool:from'), f'00.core.{ahanet}') + self.eq(msgs[1]['synapse'].get('hash'), qhash) + self.eq(msgs[1]['synapse'].get('pool:from'), f'00.core.{ahanet}') # callStorm() q = 'inet:asn=0 return($lib.true)' @@ -8316,12 +8316,12 @@ async def _hang(*args, **kwargs): self.len(2, msgs) self.eq(msgs[0].get('message'), f'Offloading Storm query to mirror 01.core.{ahanet}.') - self.eq(msgs[0].get('hash'), qhash) - self.eq(msgs[0].get('mirror'), f'01.core.{ahanet}') + self.eq(msgs[0]['synapse'].get('hash'), qhash) + self.eq(msgs[0]['synapse'].get('mirror'), f'01.core.{ahanet}') self.eq(msgs[1].get('message'), f'Executing storm query {{{q}}} as [root]') - self.eq(msgs[1].get('hash'), qhash) - self.eq(msgs[1].get('pool:from'), f'00.core.{ahanet}') + self.eq(msgs[1]['synapse'].get('hash'), qhash) + self.eq(msgs[1]['synapse'].get('pool:from'), f'00.core.{ahanet}') # exportStorm() q = 'inet:asn=0' @@ -8335,12 +8335,12 @@ async def _hang(*args, **kwargs): self.len(2, msgs) self.eq(msgs[0].get('message'), f'Offloading Storm query to mirror 01.core.{ahanet}.') - self.eq(msgs[0].get('hash'), qhash) - self.eq(msgs[0].get('mirror'), f'01.core.{ahanet}') + self.eq(msgs[0]['synapse'].get('hash'), qhash) + self.eq(msgs[0]['synapse'].get('mirror'), f'01.core.{ahanet}') self.eq(msgs[1].get('message'), f'Executing storm query {{{q}}} as [root]') - self.eq(msgs[1].get('hash'), qhash) - self.eq(msgs[1].get('pool:from'), f'00.core.{ahanet}') + self.eq(msgs[1]['synapse'].get('hash'), qhash) + self.eq(msgs[1]['synapse'].get('pool:from'), f'00.core.{ahanet}') # count() q = 'inet:asn=0' @@ -8354,12 +8354,12 @@ async def _hang(*args, **kwargs): self.len(2, msgs) self.eq(msgs[0].get('message'), f'Offloading Storm query to mirror 01.core.{ahanet}.') - self.eq(msgs[0].get('hash'), qhash) - self.eq(msgs[0].get('mirror'), f'01.core.{ahanet}') + self.eq(msgs[0]['synapse'].get('hash'), qhash) + self.eq(msgs[0]['synapse'].get('mirror'), f'01.core.{ahanet}') self.eq(msgs[1].get('message'), f'Executing storm query {{{q}}} as [root]') - self.eq(msgs[1].get('hash'), qhash) - self.eq(msgs[1].get('pool:from'), f'00.core.{ahanet}') + self.eq(msgs[1]['synapse'].get('hash'), qhash) + self.eq(msgs[1]['synapse'].get('pool:from'), f'00.core.{ahanet}') with patch('synapse.cortex.CoreApi.getNexsIndx', _hang): @@ -8370,7 +8370,7 @@ async def _hang(*args, **kwargs): stream.seek(0) data = stream.read() self.notin('Offloading Storm query', data) - self.isin('Timeout waiting for pool mirror [01.core.synapse] Nexus offset', data) + self.isin('Timeout retrieving storm pool mirror nexus offset.', data) self.notin('Timeout waiting for query mirror', data) await core00.stormpool.waitready(timeout=12) @@ -8384,7 +8384,7 @@ async def _hang(*args, **kwargs): stream.seek(0) data = stream.read() self.notin('Offloading Storm query', data) - self.isin('Timeout waiting for pool mirror [01.core.synapse] Nexus offset', data) + self.isin('Timeout retrieving storm pool mirror nexus offset.', data) self.notin('Timeout waiting for query mirror', data) await core00.stormpool.waitready(timeout=12) @@ -8411,9 +8411,7 @@ async def _hang(*args, **kwargs): stream.seek(0) data = stream.read() - explog = (f'Pool mirror [01.core.synapse] Nexus offset delta too large ' - f'({nexsoffs} > 1), running query locally') - self.isin(explog, data) + self.isin('Pool mirror nexus offset delta too large.', data) self.notin('Offloading Storm query', data) with self.getLoggerStream('synapse') as stream: @@ -8470,7 +8468,7 @@ async def _hang(*args, **kwargs): stream.seek(0) data = stream.read() - self.isin('Timeout waiting for pool mirror, running query locally', data) + self.isin('Timeout connecting to storm pool mirror.', data) await core01.fini() @@ -8480,28 +8478,28 @@ async def _hang(*args, **kwargs): stream.seek(0) data = stream.read() - self.isin('Storm query mirror pool is empty, running query locally.', data) + self.isin('Storm query mirror pool is empty.', data) with self.getLoggerStream('synapse') as stream: self.true(await core00.callStorm('inet:asn=0 return($lib.true)')) stream.seek(0) data = stream.read() - self.isin('Storm query mirror pool is empty, running query locally.', data) + self.isin('Storm query mirror pool is empty.', data) with self.getLoggerStream('synapse') as stream: self.len(1, await alist(core00.exportStorm('inet:asn=0'))) stream.seek(0) data = stream.read() - self.isin('Storm query mirror pool is empty, running query locally.', data) + self.isin('Storm query mirror pool is empty.', data) with self.getLoggerStream('synapse') as stream: self.eq(1, await core00.count('inet:asn=0')) stream.seek(0) data = stream.read() - self.isin('Storm query mirror pool is empty, running query locally.', data) + self.isin('Storm query mirror pool is empty.', data) core01 = await base.enter_context(self.getTestCore(dirn=dirn01)) await core01.promote(graceful=True) diff --git a/synapse/tests/test_lib_lmdbslab.py b/synapse/tests/test_lib_lmdbslab.py index 71e459bdcc1..0f3f514693a 100644 --- a/synapse/tests/test_lib_lmdbslab.py +++ b/synapse/tests/test_lib_lmdbslab.py @@ -360,16 +360,16 @@ async def test_lmdbslab_commit_warn(self): msgs = stream.jsonlines() self.gt(len(msgs), 0) - self.nn(msgs[0].get('delta')) - self.nn(msgs[0].get('path')) - self.nn(msgs[0].get('xactopslen')) + self.nn(msgs[0]['synapse'].get('delta')) + self.nn(msgs[0]['synapse'].get('path')) + self.nn(msgs[0]['synapse'].get('xactopslen')) self.sorteq([ 'vm.swappiness', 'vm.dirty_expire_centisecs', 'vm.dirty_writeback_centisecs', 'vm.dirty_background_ratio', 'vm.dirty_ratio', - ], msgs[0].get('sysctls', {}).keys()) + ], msgs[0]['synapse'].get('sysctls', {}).keys()) async def test_lmdbslab_max_replay(self): with self.getTestDir() as dirn: diff --git a/synapse/tests/test_lib_stormlib_log.py b/synapse/tests/test_lib_stormlib_log.py index 884c6522969..1496351f588 100644 --- a/synapse/tests/test_lib_stormlib_log.py +++ b/synapse/tests/test_lib_stormlib_log.py @@ -57,4 +57,4 @@ async def test_stormlib_log(self): mesg = msgs[1] self.eq(mesg.get('logger').get('name'), 'synapse.storm.log') self.eq(mesg.get('message'), 'struct2 message') - self.eq(mesg.get('key'), 'valu') + self.eq(mesg['synapse'].get('key'), 'valu') diff --git a/synapse/tests/test_lib_structlog.py b/synapse/tests/test_lib_structlog.py deleted file mode 100644 index 66b0c250284..00000000000 --- a/synapse/tests/test_lib_structlog.py +++ /dev/null @@ -1,118 +0,0 @@ -import io -import json -import time -import logging - -import synapse.common as s_common -import synapse.lib.structlog as s_structlog - -import synapse.tests.utils as s_test -import synapse.exc as s_exc -logger = logging.getLogger(__name__) - - -class ZDE(s_exc.SynErr): pass - - -class StructLogTest(s_test.SynTest): - - def test_structlog_base(self): - stream = io.StringIO() - handler = logging.StreamHandler(stream=stream) - formatter = s_structlog.JsonFormatter() - handler.setFormatter(formatter) - logger.addHandler(handler) - - logger.warning('Test message 1') - logger.error('Test message 2') - iden = s_common.guid() - logger.error('Extra test', extra={'synapse': {'foo': 'bar', 'iden': iden, 'time': 0}}) - - def foo(): - _ = 1 / 0 - def bar(): - try: - foo() - except ZeroDivisionError as e: - raise ZDE(mesg='ZDE test', args=(1, 0), buffer='vertex'.encode()) from e - try: - bar() - except s_exc.SynErr: - logger.exception('Exception handling') - - logger.warning('Unicode is cool for 程序员!') - - data = stream.getvalue() - - # There is a trailing \n on the stream - raw_mesgs = [m for m in data.split('\n') if m] - mesgs = [json.loads(m) for m in raw_mesgs] - self.len(5, mesgs) - - mesg = mesgs[0] - self.eq(set(mesg.keys()), {'message', 'logger', 'level', 'time'}) - lnfo = mesg.get('logger') - self.eq(set(lnfo.keys()), {'name', 'process', 'filename', 'func'}) - self.eq(mesg.get('message'), 'Test message 1') - self.eq(mesg.get('level'), 'WARNING') - - mesg = mesgs[1] - self.eq(mesg.get('message'), 'Test message 2') - self.eq(mesg.get('level'), 'ERROR') - - mesg = mesgs[2] - self.eq(mesg.get('message'), 'Extra test') - self.eq(mesg.get('level'), 'ERROR') - self.eq(mesg.get('foo'), 'bar') - self.eq(mesg.get('iden'), iden) - self.ne(mesg.get('time'), 0) # time was not overwritten by the extra - - mesg = mesgs[3] - self.eq(mesg.get('message'), 'Exception handling') - self.eq(mesg.get('level'), 'ERROR') - erfo = mesg.get('err') - - etb = erfo.get('etb') - self.isin('Traceback', etb) - self.isin('_ = 1 / 0', etb) - self.isin('The above exception was the direct cause of the following exception:', etb) - self.isin('ZeroDivisionError: division by zero', etb) - self.isin("""test_lib_structlog.ZDE: ZDE: args=(1, 0) buffer=b'vertex' mesg='ZDE test'""", etb) - self.eq(erfo.get('errname'), 'ZDE') - self.eq(erfo.get('mesg'), 'ZDE test') - self.eq(erfo.get('args'), (1, 0)) - self.eq(erfo.get('buffer'), "b'vertex'") - - mesg = mesgs[4] - rawm = raw_mesgs[4] - self.isin(r'Unicode is cool for \u7a0b\u5e8f\u5458!', rawm) - self.eq(mesg.get('message'), 'Unicode is cool for 程序员!') - - logger.removeHandler(handler) - - def test_structlog_datefmt(self): - stream = io.StringIO() - handler = logging.StreamHandler(stream=stream) - datefmt = '%m-%Y-%d' # MMYYYYYDD - formatter = s_structlog.JsonFormatter(datefmt=datefmt) - handler.setFormatter(formatter) - logger.addHandler(handler) - - now = time.gmtime() - logger.error('Time test', extra={'synapse': {'foo': 'bar'}}) - - data = stream.getvalue() - - # There is a trailing \n on the stream - raw_mesgs = [m for m in data.split('\n') if m] - mesgs = [json.loads(m) for m in raw_mesgs] - self.len(1, mesgs) - ptime = time.strptime(mesgs[0].get('time'), datefmt) - self.eq(now.tm_year, ptime.tm_year) - self.eq(now.tm_mon, ptime.tm_mon) - self.eq(now.tm_mday, ptime.tm_mday) - self.eq(0, ptime.tm_hour) - self.eq(0, ptime.tm_min) - self.eq(0, ptime.tm_sec) - - logger.removeHandler(handler) From c719239ccbfced02466f1699578a21f71c3a2463 Mon Sep 17 00:00:00 2001 From: visi Date: Thu, 13 Feb 2025 08:18:44 -0500 Subject: [PATCH 07/52] wip --- synapse/lib/cell.py | 13 +++---------- synapse/lib/logging.py | 28 +++++++++++++++++++++++++++- synapse/lib/scope.py | 1 - 3 files changed, 30 insertions(+), 12 deletions(-) diff --git a/synapse/lib/cell.py b/synapse/lib/cell.py index c08c028f787..a20d43097c6 100644 --- a/synapse/lib/cell.py +++ b/synapse/lib/cell.py @@ -3672,16 +3672,6 @@ async def getLogExtra(self, **kwargs): Returns: Dict: A dictionary ''' - sess = s_scope.get('sess') # type: s_daemon.Sess - user = s_scope.get('user') # type: s_auth.User - - if user: - kwargs['user'] = user.iden - kwargs['username'] = user.name - elif sess and sess.user: - kwargs['user'] = sess.user.iden - kwargs['username'] = sess.user.name - return s_logging.getLogExtra(**kwargs) async def _getSpawnLogConf(self): @@ -4311,6 +4301,9 @@ async def execmain(cls, argv, outp=None): cell = await cls.initFromArgv(argv, outp=outp) + if cell.ahasvcname is not None: + s_logging.setLogExtra('service', cell.ahasvcname) + await cell.main() async def _getCellUser(self, link, mesg): diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index 148284a80c2..566833aafad 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -3,8 +3,11 @@ import logging import collections +import synapse.exc as s_exc import synapse.common as s_common + import synapse.lib.const as s_const +import synapse.lib.scope as s_scope logger = logging.getLogger(__name__) @@ -16,8 +19,30 @@ def _addLogInfo(info): # TODO: getLogInfo(wait=True) +_logextra = {} +def setLogExtra(name, valu): + ''' + Configure global extra values which should be added to every log. + ''' + _logextra[name] = valu + def getLogExtra(**kwargs): - return {'synapse': kwargs} + + extra = {'synapse': kwargs} + extra.update(_logextra) + + user = s_scope.get('user') # type: s_auth.User + if user is not None: + extra['user'] = user.iden + extra['username'] = user.name + + else: + sess = s_scope.get('sess') # type: s_daemon.Sess + if sess is not None and sess.user is not None: + extra['user'] = sess.user.iden + extra['username'] = sess.user.name + + return extra class Formatter(logging.Formatter): @@ -33,6 +58,7 @@ def genLogInfo(self, record): 'func': record.funcName, }, 'level': record.levelname, + 'tick': s_common.now(), 'time': self.formatTime(record, self.datefmt), } diff --git a/synapse/lib/scope.py b/synapse/lib/scope.py index a504cd553d6..a6572cad8b1 100644 --- a/synapse/lib/scope.py +++ b/synapse/lib/scope.py @@ -114,7 +114,6 @@ def copy(self): # set up a global scope with an empty frame globscope = Scope(dict()) - def _task_scope() -> Scope: ''' Get the current task scope. If the _syn_scope is not set, set it to a new scope From f4d15dfc61c336fa6005017f6e545d7a033d6f6e Mon Sep 17 00:00:00 2001 From: visi Date: Thu, 13 Feb 2025 08:26:52 -0500 Subject: [PATCH 08/52] wip --- synapse/lib/logging.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index 566833aafad..c365ce799b6 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -84,7 +84,7 @@ def format(self, record): syns = loginfo.get('synapse') if syns: - mesg += ' ({json.dumps(syns, default=str)})' + mesg += f' ({json.dumps(syns, default=str)})' return mesg From 0d18d484faffcf55d6e4c4e340de26cd89f44722 Mon Sep 17 00:00:00 2001 From: visi Date: Thu, 13 Feb 2025 09:02:36 -0500 Subject: [PATCH 09/52] wip --- synapse/lib/const.py | 4 ++-- synapse/lib/logging.py | 21 ++++++++++----------- 2 files changed, 12 insertions(+), 13 deletions(-) diff --git a/synapse/lib/const.py b/synapse/lib/const.py index f4c838b098c..8653a0b1e7e 100644 --- a/synapse/lib/const.py +++ b/synapse/lib/const.py @@ -1,8 +1,8 @@ # Logging related constants import logging -LOG_FORMAT = '%(asctime)s [%(levelname)s] %(message)s ' \ - '[%(filename)s:%(funcName)s:%(threadName)s:%(processName)s]' +LOG_FORMAT = '%(asctime)s [%(levelname)s] %(message)s %(synapse)s [%(filename)s:%(funcName)s]' + LOG_LEVEL_CHOICES = { 'DEBUG': logging.DEBUG, 'INFO': logging.INFO, diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index c365ce799b6..cea25c0eaa0 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -53,19 +53,20 @@ def genLogInfo(self, record): loginfo = { 'message': record.message, 'logger': { - 'name': record.name, 'filename': record.filename, 'func': record.funcName, }, 'level': record.levelname, - 'tick': s_common.now(), 'time': self.formatTime(record, self.datefmt), } if record.exc_info: loginfo['err'] = s_common.err(record.exc_info[1], fulltb=True) - loginfo['synapse'] = record.__dict__.get('synapse') + if not hasattr(record, 'synapse'): + record.synapse = {} + + loginfo['synapse'] = record.synapse _addLogInfo(loginfo) @@ -77,16 +78,13 @@ def format(self, record): class TextFormatter(Formatter): - def format(self, record): + def __init__(self, *args, **kwargs): + kwargs['fmt'] = s_const.LOG_FORMAT + return super().__init__(*args, **kwargs) + def format(self, record): loginfo = self.genLogInfo(record) - mesg = loginfo.get('message') - - syns = loginfo.get('synapse') - if syns: - mesg += f' ({json.dumps(syns, default=str)})' - - return mesg + return logging.Formatter.format(self, record) def setup(level=logging.WARNING, structlog=False): ''' @@ -102,6 +100,7 @@ def setup(level=logging.WARNING, structlog=False): handler = logging.StreamHandler() handler.setFormatter(fmtclass(datefmt=conf.get('datefmt'))) + logging.basicConfig(level=conf.get('level'), handlers=(handler,)) logger.info('log level set to %s', s_const.LOG_LEVEL_INVERSE_CHOICES.get(level)) From 9eaef91caec8d5c3ce0b9e4ad358a0dad0b2f9e8 Mon Sep 17 00:00:00 2001 From: visi Date: Thu, 13 Feb 2025 09:15:12 -0500 Subject: [PATCH 10/52] wip --- synapse/lib/cell.py | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/synapse/lib/cell.py b/synapse/lib/cell.py index a20d43097c6..2fa874f52be 100644 --- a/synapse/lib/cell.py +++ b/synapse/lib/cell.py @@ -3781,9 +3781,10 @@ def getArgParser(cls, conf=None): pars.add_argument('dirn', help=f'The storage directory for the {name} service.') pars.add_argument('--log-level', default='INFO', choices=list(s_const.LOG_LEVEL_CHOICES.keys()), - help='Specify the Python logging log level.', type=str.upper) + help='Deprecated. Please use SYN_LOG_LEVEL environment variable.', type=str.upper) + pars.add_argument('--structured-logging', default=True, action='store_true', - help='Use structured logging.') + help='Deprecated. Please use SYN_LOG_STRUCT environment variable.') telendef = None telepdef = 'tcp://0.0.0.0:27492' @@ -3796,15 +3797,11 @@ def getArgParser(cls, conf=None): https = os.getenv(httpsvar, httpsdef) pars.add_argument('--telepath', default=telep, type=str, - help=f'The telepath URL to listen on. This defaults to {telepdef}, and may be ' - f'also be overridden by the {telepvar} environment variable.') + help=f'Deprecated. Please use the {telepvar} environment variable.') pars.add_argument('--https', default=https, type=int, - help=f'The port to bind for the HTTPS/REST API. This defaults to {httpsdef}, ' - f'and may be also be overridden by the {httpsvar} environment variable.') + help=f'Deprecated. Please use the {httpsvar} environment variable.') pars.add_argument('--name', type=str, default=telen, - help=f'The (optional) additional name to share the {name} as. This defaults to ' - f'{telendef}, and may be also be overridden by the {telenvar} environment' - f' variable.') + help=f'Deprecated. Please use the {telenvar} environment variable.') if conf is not None: args = conf.getArgParseArgs() From fd4b617a78beab3813d0e6d849256e6c55dfddd1 Mon Sep 17 00:00:00 2001 From: visi Date: Thu, 13 Feb 2025 10:19:08 -0500 Subject: [PATCH 11/52] wip --- synapse/lib/logging.py | 74 ++++++++++++++++++++++++++++--- synapse/tests/test_lib_logging.py | 53 ++++++++++++++++++++++ 2 files changed, 120 insertions(+), 7 deletions(-) create mode 100644 synapse/tests/test_lib_logging.py diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index cea25c0eaa0..08846a89bf6 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -1,6 +1,8 @@ import os import json +import asyncio import logging +import weakref import collections import synapse.exc as s_exc @@ -11,25 +13,83 @@ logger = logging.getLogger(__name__) -logfifo = collections.deque() +logtodo = [] +logbase = None +logevnt = asyncio.Event() +logwindows = weakref.WeakSet() + +logfifo = collections.deque(maxlen=1000) def _addLogInfo(info): logfifo.append(info) - # TODO notify waiters... + if logbase is not None: + logtodo.append(info) + logevnt.set() + +async def _feedLogInfo(): + + while not logbase.isfini: + + await logevnt.wait() + + if logbase.isfini: + return + + todo = list(logtodo) + + logevnt.clear() + logtodo.clear() + + for wind in logwindows: + await wind.puts(todo) + +async def _initLogBase(): + + global logbase + + # FIXME: resolve circurlar deps + import synapse.lib.base as s_base + + logbase = await s_base.Base.anit() + logbase._fini_at_exit = True + logbase.schedCoro(_feedLogInfo()) + +async def getLogInfo(wait=False): + + if not wait: + for loginfo in list(logfifo): + yield loginfo + return + + global logbase + + if logbase is None: + await _initLogBase() + + # FIXME: resolve circurlar deps + import synapse.lib.queue as s_queue + + async with await s_queue.Window.anit(maxsize=2000) as window: + + await window.puts(list(logfifo)) + + logwindows.add(window) -# TODO: getLogInfo(wait=True) + async for loginfo in window: + print(f'YIELD {loginfo}') + yield loginfo -_logextra = {} +logextra = {} def setLogExtra(name, valu): ''' Configure global extra values which should be added to every log. ''' - _logextra[name] = valu + logextra[name] = valu def getLogExtra(**kwargs): extra = {'synapse': kwargs} - extra.update(_logextra) + extra.update(logextra) user = s_scope.get('user') # type: s_auth.User if user is not None: @@ -130,7 +190,7 @@ def normLogLevel(valu): valu: The value to norm ( a string or integer ). Returns: - int: A valid Logging log level. + int: A valid log level. ''' if isinstance(valu, str): diff --git a/synapse/tests/test_lib_logging.py b/synapse/tests/test_lib_logging.py new file mode 100644 index 00000000000..397a1d364e5 --- /dev/null +++ b/synapse/tests/test_lib_logging.py @@ -0,0 +1,53 @@ +import asyncio +import logging + +import synapse.exc as s_exc + +import synapse.lib.base as s_base +import synapse.lib.logging as s_logging + +import synapse.tests.utils as s_test + +class LoggingTest(s_test.SynTest): + + async def test_lib_logging(self): + + s_logging.setup(structlog=True) + logger = logging.getLogger(__name__) + + self.eq(10, s_logging.normLogLevel(' 10 ')) + self.eq(10, s_logging.normLogLevel(10)) + self.eq(20, s_logging.normLogLevel(' inFo\n')) + + with self.raises(s_exc.BadArg): + s_logging.normLogLevel(100) + + with self.raises(s_exc.BadArg): + s_logging.normLogLevel('BEEP') + + with self.raises(s_exc.BadArg): + s_logging.normLogLevel('12') + + with self.raises(s_exc.BadArg): + s_logging.normLogLevel({'key': 'newp'}) + + s_logging.setLogExtra('woot', 'hehe') + extra = s_logging.getLogExtra() + + self.eq(extra['woot'], 'hehe') + + event = asyncio.Event() + async def logtask(): + await asyncio.sleep(1) + print('LOG TASK') + logger.warning('OMG WARNING', s_logging.getLogExtra(hehe='haha')) + logger.warning('OMG WARNING', s_logging.getLogExtra(hehe='haha')) + print('DONE') + + async with await s_base.Base.anit() as base: + s_logging.logfifo.clear() + base.schedCoro(logtask()) + async for loginfo in s_logging.getLogInfo(wait=True): + print(loginfo) + self.eq(loginfo['loglevel'], 'WARNING') + break From d7b93337eb00d8d7dd6feea166169731d2d20698 Mon Sep 17 00:00:00 2001 From: visi Date: Thu, 13 Feb 2025 10:19:39 -0500 Subject: [PATCH 12/52] wip --- synapse/tests/test_lib_logging.py | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/synapse/tests/test_lib_logging.py b/synapse/tests/test_lib_logging.py index 397a1d364e5..8b49cb05d0c 100644 --- a/synapse/tests/test_lib_logging.py +++ b/synapse/tests/test_lib_logging.py @@ -36,18 +36,18 @@ async def test_lib_logging(self): self.eq(extra['woot'], 'hehe') - event = asyncio.Event() - async def logtask(): - await asyncio.sleep(1) - print('LOG TASK') - logger.warning('OMG WARNING', s_logging.getLogExtra(hehe='haha')) - logger.warning('OMG WARNING', s_logging.getLogExtra(hehe='haha')) - print('DONE') - - async with await s_base.Base.anit() as base: - s_logging.logfifo.clear() - base.schedCoro(logtask()) - async for loginfo in s_logging.getLogInfo(wait=True): - print(loginfo) - self.eq(loginfo['loglevel'], 'WARNING') - break + # event = asyncio.Event() + # async def logtask(): + # await asyncio.sleep(1) + # print('LOG TASK') + # logger.warning('OMG WARNING', s_logging.getLogExtra(hehe='haha')) + # logger.warning('OMG WARNING', s_logging.getLogExtra(hehe='haha')) + # print('DONE') + + # async with await s_base.Base.anit() as base: + # s_logging.logfifo.clear() + # base.schedCoro(logtask()) + # async for loginfo in s_logging.getLogInfo(wait=True): + # print(loginfo) + # self.eq(loginfo['loglevel'], 'WARNING') + # break From 832e80bb67ef4f0215b9ae482e2854e60f8b8940 Mon Sep 17 00:00:00 2001 From: visi Date: Thu, 13 Feb 2025 11:00:09 -0500 Subject: [PATCH 13/52] wip --- synapse/lib/logging.py | 2 +- synapse/tests/test_lib_stormlib_cortex.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index 08846a89bf6..ce4f92bb90e 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -113,7 +113,7 @@ def genLogInfo(self, record): loginfo = { 'message': record.message, 'logger': { - 'filename': record.filename, + 'name': record.name, 'func': record.funcName, }, 'level': record.levelname, diff --git a/synapse/tests/test_lib_stormlib_cortex.py b/synapse/tests/test_lib_stormlib_cortex.py index 9b2b327e692..7bddbfc17f0 100644 --- a/synapse/tests/test_lib_stormlib_cortex.py +++ b/synapse/tests/test_lib_stormlib_cortex.py @@ -305,7 +305,7 @@ async def test_libcortex_httpapi_methods(self): self.eq(resp.status, 200) self.true(await stream.wait(timeout=12)) msgs = stream.jsonlines() - self.eq(msgs[0].get('httpapi'), echoiden) + self.eq(msgs[0]['synapse'].get('httpapi'), echoiden) core.stormlog = False # Sad paths on the $request methods From 9cf59fd18b2a35c5971815c2170a179270a961a8 Mon Sep 17 00:00:00 2001 From: visi Date: Thu, 13 Feb 2025 16:23:05 -0500 Subject: [PATCH 14/52] wip --- synapse/lib/stormlib/cell.py | 2 ++ synapse/tests/test_cortex.py | 2 +- synapse/tests/test_lib_httpapi.py | 44 ++++++++++++------------- synapse/tests/test_lib_stormlib_cell.py | 1 + synapse/tests/test_lib_trigger.py | 4 +-- 5 files changed, 28 insertions(+), 25 deletions(-) diff --git a/synapse/lib/stormlib/cell.py b/synapse/lib/stormlib/cell.py index c428f5080aa..72a5fa503ea 100644 --- a/synapse/lib/stormlib/cell.py +++ b/synapse/lib/stormlib/cell.py @@ -300,7 +300,9 @@ async def _getMirrorUrls(self, name=None): if name is None: return await self.runt.snap.core.getMirrorUrls() + print(f'NAME: {name}') ssvc = self.runt.snap.core.getStormSvc(name) + print(f'SSVC: {ssvc}') if ssvc is None: mesg = f'No service with name/iden: {name}' raise s_exc.NoSuchName(mesg=mesg) diff --git a/synapse/tests/test_cortex.py b/synapse/tests/test_cortex.py index 944a9d26f69..42f12b733c6 100644 --- a/synapse/tests/test_cortex.py +++ b/synapse/tests/test_cortex.py @@ -8074,7 +8074,7 @@ async def test_cortex_user_scope(self): msgs = stream.jsonlines() mesg = [m for m in msgs if 'Added user' in m.get('message')][0] self.eq('Added user=lowuser', mesg.get('message')) - self.eq('admin', mesg['synapse'].get('username')) + self.eq('admin', mesg.get('username')) self.eq('lowuser', mesg['synapse'].get('target_username')) with self.getStructuredAsyncLoggerStream('synapse.lib.cell') as stream: diff --git a/synapse/tests/test_lib_httpapi.py b/synapse/tests/test_lib_httpapi.py index 552dc07e143..51dead59708 100644 --- a/synapse/tests/test_lib_httpapi.py +++ b/synapse/tests/test_lib_httpapi.py @@ -1757,12 +1757,12 @@ def get_mesg(stream: s_tests.AsyncStreamEvent) -> dict: self.true(await stream.wait(6)) mesg = get_mesg(stream) - self.eq(mesg.get('uri'), '/api/v1/auth/adduser') - self.eq(mesg.get('username'), 'root') - self.eq(mesg.get('user'), core.auth.rootuser.iden) - self.isin('headers', mesg) - self.eq(mesg['headers'].get('user-agent'), 'test_request_logging') - self.isin('remoteip', mesg) + self.eq(mesg['synapse'].get('uri'), '/api/v1/auth/adduser') + self.eq(mesg['synapse'].get('username'), 'root') + self.eq(mesg['synapse'].get('user'), core.auth.rootuser.iden) + self.isin('headers', mesg['synapse']) + self.eq(mesg['synapse']['headers'].get('user-agent'), 'test_request_logging') + self.isin('remoteip', mesg['synapse']) self.isin('(root)', mesg.get('message')) self.isin('200 POST /api/v1/auth/adduser', mesg.get('message')) self.notin('1.2.3.4', mesg.get('message')) @@ -1774,11 +1774,11 @@ def get_mesg(stream: s_tests.AsyncStreamEvent) -> dict: self.true(await stream.wait(6)) mesg = get_mesg(stream) - self.eq(mesg.get('uri'), '/api/v1/active') + self.eq(mesg['synapse'].get('uri'), '/api/v1/active') self.notin('headers', mesg) self.notin('username', mesg) self.notin('user', mesg) - self.isin('remoteip', mesg) + self.isin('remoteip', mesg['synapse']) self.isin('200 GET /api/v1/active', mesg.get('message')) # Sessions populate the data too @@ -1791,9 +1791,9 @@ def get_mesg(stream: s_tests.AsyncStreamEvent) -> dict: self.true(await stream.wait(6)) mesg = get_mesg(stream) - self.eq(mesg.get('uri'), '/api/v1/login') - self.eq(mesg.get('username'), 'visi') - self.eq(mesg.get('user'), visiiden) + self.eq(mesg['synapse'].get('uri'), '/api/v1/login') + self.eq(mesg['synapse'].get('username'), 'visi') + self.eq(mesg['synapse'].get('user'), visiiden) # session cookie loging populates the data upon reuse with self.getStructuredAsyncLoggerStream(logname, 'api/v1/auth/users') as stream: @@ -1802,9 +1802,9 @@ def get_mesg(stream: s_tests.AsyncStreamEvent) -> dict: self.true(await stream.wait(6)) mesg = get_mesg(stream) - self.eq(mesg.get('uri'), '/api/v1/auth/users') - self.eq(mesg.get('username'), 'visi') - self.eq(mesg.get('user'), visiiden) + self.eq(mesg['synapse'].get('uri'), '/api/v1/auth/users') + self.eq(mesg['synapse'].get('username'), 'visi') + self.eq(mesg['synapse'].get('user'), visiiden) async with self.getTestCore(conf={'https:parse:proxy:remoteip': True}) as core: @@ -1831,10 +1831,10 @@ def get_mesg(stream: s_tests.AsyncStreamEvent) -> dict: self.true(await stream.wait(6)) mesg = get_mesg(stream) - self.eq(mesg.get('uri'), '/api/v1/auth/adduser') - self.eq(mesg.get('username'), 'root') - self.eq(mesg.get('user'), core.auth.rootuser.iden) - self.eq(mesg.get('remoteip'), '1.2.3.4') + self.eq(mesg['synapse'].get('uri'), '/api/v1/auth/adduser') + self.eq(mesg['synapse'].get('username'), 'root') + self.eq(mesg['synapse'].get('user'), core.auth.rootuser.iden) + self.eq(mesg['synapse'].get('remoteip'), '1.2.3.4') self.isin('(root)', mesg.get('message')) self.isin('200 POST /api/v1/auth/adduser', mesg.get('message')) @@ -1849,10 +1849,10 @@ def get_mesg(stream: s_tests.AsyncStreamEvent) -> dict: self.true(await stream.wait(6)) mesg = get_mesg(stream) - self.eq(mesg.get('uri'), '/api/v1/auth/adduser') - self.eq(mesg.get('username'), 'root') - self.eq(mesg.get('user'), core.auth.rootuser.iden) - self.eq(mesg.get('remoteip'), '8.8.8.8') + self.eq(mesg['synapse'].get('uri'), '/api/v1/auth/adduser') + self.eq(mesg['synapse'].get('username'), 'root') + self.eq(mesg['synapse'].get('user'), core.auth.rootuser.iden) + self.eq(mesg['synapse'].get('remoteip'), '8.8.8.8') self.isin('(root)', mesg.get('message')) self.isin('200 POST /api/v1/auth/adduser', mesg.get('message')) diff --git a/synapse/tests/test_lib_stormlib_cell.py b/synapse/tests/test_lib_stormlib_cell.py index a76e56dfa3a..553f7487c52 100644 --- a/synapse/tests/test_lib_stormlib_cell.py +++ b/synapse/tests/test_lib_stormlib_cell.py @@ -146,6 +146,7 @@ async def test_stormlib_cell_getmirrors(self): await aha.delAhaSvc('00.testsvc.synapse') + # FIXME: how did this ever work? with self.raises(s_exc.NoSuchName): await core00.callStorm('return($lib.cell.getMirrorUrls(name=testsvc))') diff --git a/synapse/tests/test_lib_trigger.py b/synapse/tests/test_lib_trigger.py index c51ea9cf963..4b17073f50d 100644 --- a/synapse/tests/test_lib_trigger.py +++ b/synapse/tests/test_lib_trigger.py @@ -253,9 +253,9 @@ async def test_trigger_basics(self): await core.nodes('[ test:str=logit ]') self.true(await stream.wait(6)) msgs = stream.jsonlines() - mesg = [m for m in msgs if m.get('iden') == tdef.get('iden')][0] + mesg = [m for m in msgs if m['synapse'].get('iden') == tdef.get('iden')][0] self.eq(mesg['message'], f'test trigger {tdef.get("iden")}') - self.eq(mesg['iden'], tdef.get('iden')) + self.eq(mesg['synapse']['iden'], tdef.get('iden')) # Attempting to add trigger with existing iden raises with self.raises(s_exc.DupIden): From 05b1c3d55c4f07a3ef3ede33f93493568e6028e0 Mon Sep 17 00:00:00 2001 From: visi Date: Thu, 13 Feb 2025 16:27:34 -0500 Subject: [PATCH 15/52] wip --- synapse/lib/const.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/lib/const.py b/synapse/lib/const.py index 8653a0b1e7e..8dbf1e322a6 100644 --- a/synapse/lib/const.py +++ b/synapse/lib/const.py @@ -1,7 +1,7 @@ # Logging related constants import logging -LOG_FORMAT = '%(asctime)s [%(levelname)s] %(message)s %(synapse)s [%(filename)s:%(funcName)s]' +LOG_FORMAT = '%(asctime)s [%(levelname)s] %(message)s %(synapse)s [%(name)s.%(funcName)s]' LOG_LEVEL_CHOICES = { 'DEBUG': logging.DEBUG, From 5bd5db4684fa0a7c606fe79c3c3f54313875a7bc Mon Sep 17 00:00:00 2001 From: visi Date: Thu, 13 Feb 2025 17:21:04 -0500 Subject: [PATCH 16/52] wip --- synapse/lib/cell.py | 11 ++++++++++- synapse/lib/logging.py | 7 +++---- synapse/tests/test_lib_cell.py | 30 ++++++++++++++++++++++++++++++ synapse/tests/utils.py | 20 ++++++++++++++++++++ 4 files changed, 63 insertions(+), 5 deletions(-) diff --git a/synapse/lib/cell.py b/synapse/lib/cell.py index 2fa874f52be..c7417dac9e6 100644 --- a/synapse/lib/cell.py +++ b/synapse/lib/cell.py @@ -215,6 +215,11 @@ async def __anit__(self, cell, link, user): async def initCellApi(self): pass + @adminapi(log=True) + async def logs(self, wait=False, last=None): + async for loginfo in self.cell.logs(wait=wait, last=last): + yield loginfo + @adminapi(log=True) async def freeze(self, timeout=30): return await self.cell.freeze(timeout=timeout) @@ -3661,7 +3666,7 @@ async def getCellApi(self, link, user, path): ''' return await self.cellapi.anit(self, link, user) - # TODO: why is this async? + # FIXME: why is this async? async def getLogExtra(self, **kwargs): ''' Get an extra dictionary for structured logging which can be used as a extra argument for loggers. @@ -5082,6 +5087,10 @@ def getCachedSslCtx(self, opts=None, verify=None): key = tuple(sorted(opts.items())) return self._sslctx_cache.get(key) + async def logs(self, wait=False, last=None): + async for loginfo in s_logging.getLogInfo(wait=wait, last=last): + yield loginfo + async def freeze(self, timeout=30): if self.paused: diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index ce4f92bb90e..afdaa8b74ca 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -54,10 +54,10 @@ async def _initLogBase(): logbase._fini_at_exit = True logbase.schedCoro(_feedLogInfo()) -async def getLogInfo(wait=False): +async def getLogInfo(wait=False, last=None): if not wait: - for loginfo in list(logfifo): + for loginfo in list(logfifo)[last:]: yield loginfo return @@ -71,12 +71,11 @@ async def getLogInfo(wait=False): async with await s_queue.Window.anit(maxsize=2000) as window: - await window.puts(list(logfifo)) + await window.puts(list(logfifo)[last:]) logwindows.add(window) async for loginfo in window: - print(f'YIELD {loginfo}') yield loginfo logextra = {} diff --git a/synapse/tests/test_lib_cell.py b/synapse/tests/test_lib_cell.py index b5dd1481e8e..b4b991e5d0f 100644 --- a/synapse/tests/test_lib_cell.py +++ b/synapse/tests/test_lib_cell.py @@ -7,6 +7,7 @@ import signal import socket import asyncio +import logging import tarfile import collections import multiprocessing @@ -31,6 +32,7 @@ import synapse.lib.nexus as s_nexus import synapse.lib.config as s_config import synapse.lib.certdir as s_certdir +import synapse.lib.logging as s_logging import synapse.lib.msgpack as s_msgpack import synapse.lib.version as s_version import synapse.lib.lmdbslab as s_lmdbslab @@ -41,6 +43,8 @@ import synapse.tests.utils as s_t_utils +logger = logging.getLogger(__name__) + # Defective versions of spawned backup processes def _sleeperProc(pipe, srcdir, dstdir, lmdbpaths, logconf): time.sleep(3.0) @@ -3460,3 +3464,29 @@ async def sleep99(cell): self.none(await cell00.getTask(task01)) self.false(await cell00.killTask(task01)) + + async def test_cell_logs(self): + + async with self.getTestAha() as aha: + + async with aha.getLocalProxy() as proxy: + + async def logtask(): + logger.warning('one little piggy', extra=await aha.getLogExtra()) + + with self.getLogStream('synapse.tests.test_lib_cell') as stream: + + logger.warning('oh hai', extra=await aha.getLogExtra()) + + logs = [] + async for loginfo in proxy.logs(wait=True, last=-1): + + logs.append(loginfo) + + if len(logs) == 2: + break + + aha.schedCoro(logtask()) + + self.eq('oh hai', logs[0]['message']) + self.eq('one little piggy', logs[1]['message']) diff --git a/synapse/tests/utils.py b/synapse/tests/utils.py index 61146079f4d..39b3752906c 100644 --- a/synapse/tests/utils.py +++ b/synapse/tests/utils.py @@ -1844,6 +1844,26 @@ def getStructuredAsyncLoggerStream(self, logname, mesg='') -> contextlib.Abstrac slogger.removeHandler(handler) slogger.setLevel(level) + @contextlib.contextmanager + def getLogStream(self, name, level='DEBUG'): + + stream = AsyncStreamEvent() + logger = logging.getLogger(name) + + oldlevel = logger.level + + handler = logging.StreamHandler(stream) + handler.setFormatter(s_logging.Formatter()) + + logger.setLevel(level) + logger.addHandler(handler) + + try: + yield stream + finally: + logger.setLevel(oldlevel) + logger.removeHandler(handler) + @contextlib.asynccontextmanager async def getHttpSess(self, auth=None, port=None): ''' From ebe3c57dda7cb56d5b00748302356c002a0da5be Mon Sep 17 00:00:00 2001 From: visi Date: Thu, 13 Feb 2025 17:25:31 -0500 Subject: [PATCH 17/52] wip --- synapse/tests/test_lib_cell.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/synapse/tests/test_lib_cell.py b/synapse/tests/test_lib_cell.py index b4b991e5d0f..bf8ba206588 100644 --- a/synapse/tests/test_lib_cell.py +++ b/synapse/tests/test_lib_cell.py @@ -3478,6 +3478,10 @@ async def logtask(): logger.warning('oh hai', extra=await aha.getLogExtra()) + # test the non-wait version quick... + logs = [loginfo async for loginfo in proxy.logs(last=-1)] + self.eq('oh hai', logs[0]['message']) + logs = [] async for loginfo in proxy.logs(wait=True, last=-1): From 473325e9f90731869834bd0eb5c95d840ddecd67 Mon Sep 17 00:00:00 2001 From: visi Date: Thu, 13 Feb 2025 18:55:10 -0500 Subject: [PATCH 18/52] wip --- synapse/lib/cell.py | 9 ++++--- synapse/lib/logging.py | 44 +++++++++++++++---------------- synapse/tests/test_cortex.py | 2 +- synapse/tests/test_lib_agenda.py | 4 +-- synapse/tests/test_lib_cell.py | 2 ++ synapse/tests/test_lib_logging.py | 21 --------------- 6 files changed, 32 insertions(+), 50 deletions(-) diff --git a/synapse/lib/cell.py b/synapse/lib/cell.py index c7417dac9e6..375ec50b15d 100644 --- a/synapse/lib/cell.py +++ b/synapse/lib/cell.py @@ -215,7 +215,7 @@ async def __anit__(self, cell, link, user): async def initCellApi(self): pass - @adminapi(log=True) + @adminapi() async def logs(self, wait=False, last=None): async for loginfo in self.cell.logs(wait=wait, last=last): yield loginfo @@ -3677,7 +3677,10 @@ async def getLogExtra(self, **kwargs): Returns: Dict: A dictionary ''' - return s_logging.getLogExtra(**kwargs) + extra = s_logging.getLogExtra(**kwargs) + if self.ahasvcname is not None: + extra['loginfo']['service'] = self.ahasvcname + return extra async def _getSpawnLogConf(self): return self.conf.get('_log_conf', {}) @@ -4304,7 +4307,7 @@ async def execmain(cls, argv, outp=None): cell = await cls.initFromArgv(argv, outp=outp) if cell.ahasvcname is not None: - s_logging.setLogExtra('service', cell.ahasvcname) + s_logging.setLogGlobal('service', cell.ahasvcname) await cell.main() diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index afdaa8b74ca..52bdf345ea7 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -78,30 +78,15 @@ async def getLogInfo(wait=False, last=None): async for loginfo in window: yield loginfo -logextra = {} -def setLogExtra(name, valu): +_glob_loginfo = {} +def setLogGlobal(name, valu): ''' - Configure global extra values which should be added to every log. + Configure global values which should be added to every log. ''' - logextra[name] = valu + _glob_loginfo[name] = valu def getLogExtra(**kwargs): - - extra = {'synapse': kwargs} - extra.update(logextra) - - user = s_scope.get('user') # type: s_auth.User - if user is not None: - extra['user'] = user.iden - extra['username'] = user.name - - else: - sess = s_scope.get('sess') # type: s_daemon.Sess - if sess is not None and sess.user is not None: - extra['user'] = sess.user.iden - extra['username'] = sess.user.name - - return extra + return {'synapse': kwargs, 'loginfo': {}} class Formatter(logging.Formatter): @@ -119,6 +104,19 @@ def genLogInfo(self, record): 'time': self.formatTime(record, self.datefmt), } + loginfo.update(_glob_loginfo) + if hasattr(record, 'loginfo'): + loginfo.update(record.loginfo) + + if (user := s_scope.get('user')) is not None: + loginfo['user'] = user.iden + loginfo['username'] = user.name + + elif (sess := s_scope.get('sess')) is not None: + if sess.user is not None: + loginfo['user'] = sess.user.iden + loginfo['username'] = sess.user.name + if record.exc_info: loginfo['err'] = s_common.err(record.exc_info[1], fulltb=True) @@ -170,13 +168,13 @@ def getLogConfFromEnv(): conf = {} - if level := os.getenv('SYN_LOG_LEVEL') is not None: + if (level := os.getenv('SYN_LOG_LEVEL')) is not None: conf['level'] = normLogLevel(level) - if datefmt := os.getenv('SYN_LOG_DATEFORMAT') is not None: + if (datefmt := os.getenv('SYN_LOG_DATEFORMAT')) is not None: conf['datefmt'] = datefmt - if structlog := os.getenv('SYN_LOG_STRUCT') is not None: + if (structlog := os.getenv('SYN_LOG_STRUCT')) is not None: conf['structlog'] = structlog.lower() in ('1', 'true') return conf diff --git a/synapse/tests/test_cortex.py b/synapse/tests/test_cortex.py index 42f12b733c6..6d29c1081b9 100644 --- a/synapse/tests/test_cortex.py +++ b/synapse/tests/test_cortex.py @@ -8088,7 +8088,7 @@ async def test_cortex_user_scope(self): msgs = stream.jsonlines() mesg = [m for m in msgs if 'Set admin' in m.get('message')][0] self.isin('Set admin=True for lowuser', mesg.get('message')) - self.eq('admin', mesg['synapse'].get('username')) + self.eq('admin', mesg.get('username')) self.eq('lowuser', mesg['synapse'].get('target_username')) async def test_cortex_ext_httpapi(self): diff --git a/synapse/tests/test_lib_agenda.py b/synapse/tests/test_lib_agenda.py index fe8422498d2..d2b6c4773f0 100644 --- a/synapse/tests/test_lib_agenda.py +++ b/synapse/tests/test_lib_agenda.py @@ -402,10 +402,10 @@ def looptime(): core.stormlog = False msgs = stream.jsonlines() - msgs = [m for m in msgs if m['text'] == '$lib.queue.gen(visi).put(bar)'] + msgs = [m for m in msgs if m['synapse']['text'] == '$lib.queue.gen(visi).put(bar)'] self.gt(len(msgs), 0) for m in msgs: - self.eq(m.get('cron'), appt.iden) + self.eq(m['synapse'].get('cron'), appt.iden) self.eq(1, appt.startcount) diff --git a/synapse/tests/test_lib_cell.py b/synapse/tests/test_lib_cell.py index bf8ba206588..e775178f53b 100644 --- a/synapse/tests/test_lib_cell.py +++ b/synapse/tests/test_lib_cell.py @@ -3467,6 +3467,7 @@ async def sleep99(cell): async def test_cell_logs(self): + s_logging.setLogGlobal('woot', 'hehe') async with self.getTestAha() as aha: async with aha.getLocalProxy() as proxy: @@ -3492,5 +3493,6 @@ async def logtask(): aha.schedCoro(logtask()) + self.eq('hehe', logs[0]['woot']) self.eq('oh hai', logs[0]['message']) self.eq('one little piggy', logs[1]['message']) diff --git a/synapse/tests/test_lib_logging.py b/synapse/tests/test_lib_logging.py index 8b49cb05d0c..83602cb0f38 100644 --- a/synapse/tests/test_lib_logging.py +++ b/synapse/tests/test_lib_logging.py @@ -30,24 +30,3 @@ async def test_lib_logging(self): with self.raises(s_exc.BadArg): s_logging.normLogLevel({'key': 'newp'}) - - s_logging.setLogExtra('woot', 'hehe') - extra = s_logging.getLogExtra() - - self.eq(extra['woot'], 'hehe') - - # event = asyncio.Event() - # async def logtask(): - # await asyncio.sleep(1) - # print('LOG TASK') - # logger.warning('OMG WARNING', s_logging.getLogExtra(hehe='haha')) - # logger.warning('OMG WARNING', s_logging.getLogExtra(hehe='haha')) - # print('DONE') - - # async with await s_base.Base.anit() as base: - # s_logging.logfifo.clear() - # base.schedCoro(logtask()) - # async for loginfo in s_logging.getLogInfo(wait=True): - # print(loginfo) - # self.eq(loginfo['loglevel'], 'WARNING') - # break From 92ad17822ce6d97126ce5d33633276772f1cbdea Mon Sep 17 00:00:00 2001 From: visi Date: Thu, 13 Feb 2025 19:49:24 -0500 Subject: [PATCH 19/52] wip --- synapse/tests/test_lib_stormtypes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/tests/test_lib_stormtypes.py b/synapse/tests/test_lib_stormtypes.py index cc01047cecd..d93fd71e8c3 100644 --- a/synapse/tests/test_lib_stormtypes.py +++ b/synapse/tests/test_lib_stormtypes.py @@ -4928,7 +4928,7 @@ async def getCronJob(text): self.true(await stream.wait(6)) mesg = stream.jsonlines()[0] self.eq(mesg['message'], f'm3 cron {guid}') - self.eq(mesg['iden'], guid) + self.eq(mesg['synapse']['iden'], guid) ################## From 45d09ab91c076f108757382345e248cbd2af72bf Mon Sep 17 00:00:00 2001 From: visi Date: Fri, 14 Feb 2025 12:47:26 -0500 Subject: [PATCH 20/52] wip --- synapse/lib/logging.py | 4 ++-- synapse/lib/stormlib/cell.py | 2 -- synapse/tests/test_lib_aha.py | 5 +++-- synapse/tests/test_lib_stormlib_cell.py | 4 +--- 4 files changed, 6 insertions(+), 9 deletions(-) diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index 52bdf345ea7..264762f15aa 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -181,13 +181,13 @@ def getLogConfFromEnv(): def normLogLevel(valu): ''' - Norm a log level value to a integer. + Norm a log level value to an integer. Args: valu: The value to norm ( a string or integer ). Returns: - int: A valid log level. + int: A valid log level integer. ''' if isinstance(valu, str): diff --git a/synapse/lib/stormlib/cell.py b/synapse/lib/stormlib/cell.py index 72a5fa503ea..c428f5080aa 100644 --- a/synapse/lib/stormlib/cell.py +++ b/synapse/lib/stormlib/cell.py @@ -300,9 +300,7 @@ async def _getMirrorUrls(self, name=None): if name is None: return await self.runt.snap.core.getMirrorUrls() - print(f'NAME: {name}') ssvc = self.runt.snap.core.getStormSvc(name) - print(f'SSVC: {ssvc}') if ssvc is None: mesg = f'No service with name/iden: {name}' raise s_exc.NoSuchName(mesg=mesg) diff --git a/synapse/tests/test_lib_aha.py b/synapse/tests/test_lib_aha.py index 28dcf67c57c..3cac4d01c05 100644 --- a/synapse/tests/test_lib_aha.py +++ b/synapse/tests/test_lib_aha.py @@ -1418,8 +1418,9 @@ async def test_aha_gather(self): self.len(nexsindx * 2, items) # ensure we handle down services correctly - async with aha.waiter(1, 'aha:svcdown', timeout=10): - await cell01.fini() + async with aha.waiter(2, 'aha:svcadd', timeout=10): + async with aha.waiter(1, 'aha:svcdown', timeout=10): + await cell01.fini() # test the call endpoint todo = s_common.todo('getCellInfo') diff --git a/synapse/tests/test_lib_stormlib_cell.py b/synapse/tests/test_lib_stormlib_cell.py index 553f7487c52..3952b42f44b 100644 --- a/synapse/tests/test_lib_stormlib_cell.py +++ b/synapse/tests/test_lib_stormlib_cell.py @@ -146,9 +146,7 @@ async def test_stormlib_cell_getmirrors(self): await aha.delAhaSvc('00.testsvc.synapse') - # FIXME: how did this ever work? - with self.raises(s_exc.NoSuchName): - await core00.callStorm('return($lib.cell.getMirrorUrls(name=testsvc))') + self.false(await core00.callStorm('return($lib.cell.getMirrorUrls(name=testsvc))')) # No AHA case From e170356e553557db2c64a4079f3aa4d0f8e61449 Mon Sep 17 00:00:00 2001 From: visi Date: Tue, 18 Feb 2025 13:24:04 -0500 Subject: [PATCH 21/52] wip --- synapse/tests/test_lib_stormlib_cell.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/synapse/tests/test_lib_stormlib_cell.py b/synapse/tests/test_lib_stormlib_cell.py index 3952b42f44b..a76e56dfa3a 100644 --- a/synapse/tests/test_lib_stormlib_cell.py +++ b/synapse/tests/test_lib_stormlib_cell.py @@ -146,7 +146,8 @@ async def test_stormlib_cell_getmirrors(self): await aha.delAhaSvc('00.testsvc.synapse') - self.false(await core00.callStorm('return($lib.cell.getMirrorUrls(name=testsvc))')) + with self.raises(s_exc.NoSuchName): + await core00.callStorm('return($lib.cell.getMirrorUrls(name=testsvc))') # No AHA case From 1e31864aa706494ce4b3dd7cd226c5c44cc77954 Mon Sep 17 00:00:00 2001 From: visi Date: Tue, 18 Feb 2025 13:28:36 -0500 Subject: [PATCH 22/52] wip --- synapse/lib/link.py | 26 ++++++-------------------- 1 file changed, 6 insertions(+), 20 deletions(-) diff --git a/synapse/lib/link.py b/synapse/lib/link.py index f13006bcc41..891db6cce6d 100644 --- a/synapse/lib/link.py +++ b/synapse/lib/link.py @@ -32,51 +32,37 @@ async def connect(host, port, ssl=None, hostname=None, linkinfo=None): reader, writer = await asyncio.open_connection(host, port, ssl=ssl, server_hostname=hostname) return await Link.anit(reader, writer, info=info) -async def listen(host, port, onlink, ssl=None, linkinfo=None): +async def listen(host, port, onlink, ssl=None): ''' Listen on the given host/port and fire onlink(Link). Returns a server object that contains the listening sockets ''' - info = { - 'ssl': ssl, - 'tls': bool(ssl), - 'host': host, - 'port': port, - } - - if linkinfo is not None: - info.udpate(linkinfo) - async def onconn(reader, writer): + info = {'tls': bool(ssl)} link = await Link.anit(reader, writer, info=info) link.schedCoro(onlink(link)) server = await asyncio.start_server(onconn, host=host, port=port, ssl=ssl) return server -async def unixlisten(path, onlink, linkinfo=None): +async def unixlisten(path, onlink): ''' Start an PF_UNIX server listening on the given path. ''' info = {'path': path, 'unix': True} - if linkinfo is not None: - info.update(linkinfo) async def onconn(reader, writer): link = await Link.anit(reader, writer, info=info) link.schedCoro(onlink(link)) return await asyncio.start_unix_server(onconn, path=path) -async def unixconnect(path, linkinfo=None): +async def unixconnect(path): ''' Connect to a PF_UNIX server listening on the given path. ''' - info = {'path': path, 'unix': True} - if linkinfo is not None: - info.update(linkinfo) - reader, writer = await asyncio.open_unix_connection(path=path) + info = {'path': path, 'unix': True} return await Link.anit(reader, writer, info=info) async def linkfile(mode='wb'): @@ -284,7 +270,7 @@ async def tx(self, mesg): await self.writer.drain() - except Exception as e: + except (asyncio.CancelledError, Exception) as e: await self.fini() From a2c61f601746c44ff2895cb236662e1a9dcd001a Mon Sep 17 00:00:00 2001 From: visi Date: Tue, 18 Feb 2025 13:37:50 -0500 Subject: [PATCH 23/52] wip --- synapse/lib/logging.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index 264762f15aa..a7493889e51 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -47,7 +47,6 @@ async def _initLogBase(): global logbase - # FIXME: resolve circurlar deps import synapse.lib.base as s_base logbase = await s_base.Base.anit() @@ -66,7 +65,6 @@ async def getLogInfo(wait=False, last=None): if logbase is None: await _initLogBase() - # FIXME: resolve circurlar deps import synapse.lib.queue as s_queue async with await s_queue.Window.anit(maxsize=2000) as window: From 371d6b53a5fbffe3acd4f99a2299b0f90a9b54d3 Mon Sep 17 00:00:00 2001 From: visi Date: Tue, 18 Feb 2025 13:42:18 -0500 Subject: [PATCH 24/52] revert pool exc context --- synapse/cortex.py | 53 ++++++++++++++++++++--------------------------- 1 file changed, 22 insertions(+), 31 deletions(-) diff --git a/synapse/cortex.py b/synapse/cortex.py index 1c120bfbdff..5b7f30a796c 100644 --- a/synapse/cortex.py +++ b/synapse/cortex.py @@ -35,7 +35,6 @@ import synapse.lib.dyndeps as s_dyndeps import synapse.lib.grammar as s_grammar import synapse.lib.httpapi as s_httpapi -import synapse.lib.logging as s_logging import synapse.lib.msgpack as s_msgpack import synapse.lib.modules as s_modules import synapse.lib.schemas as s_schemas @@ -926,7 +925,7 @@ async def initServiceStorage(self): self._initCorePerms() # Reset the storm:log:level from the config value to an int for internal use. - self.conf['storm:log:level'] = s_logging.normLogLevel(self.conf.get('storm:log:level')) + self.conf['storm:log:level'] = s_common.normLogLevel(self.conf.get('storm:log:level')) self.stormlog = self.conf.get('storm:log') self.stormloglvl = self.conf.get('storm:log:level') @@ -5876,46 +5875,38 @@ async def _getMirrorProxy(self, opts): return None if self.stormpool.size() == 0: - extra = await self.getLogExtra() - logger.warning('Storm query mirror pool is empty.', extra=extra) + logger.warning('Storm query mirror pool is empty, running query locally.') return None - timeout = self.stormpoolopts.get('timeout:connection') + proxy = None try: + timeout = self.stormpoolopts.get('timeout:connection') proxy = await self.stormpool.proxy(timeout=timeout) - except TimeoutError as e: - extra = await self.getLogExtra(timeout=timeout) - logger.warning('Timeout connecting to storm pool mirror.', extra=extra) - return None - - proxyname = proxy._ahainfo.get('name') - if proxyname is not None and proxyname == self.ahasvcname: - # we are part of the pool and were selected. Convert to local use. - return None - - curoffs = opts.setdefault('nexsoffs', await self.getNexsIndx() - 1) + proxyname = proxy._ahainfo.get('name') + if proxyname is not None and proxyname == self.ahasvcname: + # we are part of the pool and were selected. Convert to local use. + return None - try: + curoffs = opts.setdefault('nexsoffs', await self.getNexsIndx() - 1) miroffs = await s_common.wait_for(proxy.getNexsIndx(), timeout) - 1 + if (delta := curoffs - miroffs) > MAX_NEXUS_DELTA: + mesg = (f'Pool mirror [{proxyname}] Nexus offset delta too large ' + f'({delta} > {MAX_NEXUS_DELTA}), running query locally.') + logger.warning(mesg, extra=await self.getLogExtra(delta=delta, mirror=proxyname, mirror_offset=miroffs)) + return None - except s_exc.IsFini as e: - extra = await self.getLogExtra(mirror=proxyname) - logger.warning('Storm pool mirror is shutting down.', extra=extra) - return None - - except TimeoutError as e: - extra = await self.getLogExtra(mirror=proxyname, nexsoffs=curoffs, timeout=timeout) - logger.warning('Timeout retrieving storm pool mirror nexus offset.', extra=extra) - return None + return proxy - if (delta := curoffs - miroffs) > MAX_NEXUS_DELTA: - extra = await self.getLogExtra(mirror=proxyname, delta=delta, nexsoffs=curoffs, mirror_offset=miroffs) - logger.warning('Pool mirror nexus offset delta too large.', extra=extra) + except (TimeoutError, s_exc.IsFini): + if proxy is None: + logger.warning('Timeout waiting for pool mirror, running query locally.') + else: + mesg = f'Timeout waiting for pool mirror [{proxyname}] Nexus offset, running query locally.' + logger.warning(mesg, extra=await self.getLogExtra(mirror=proxyname)) + await proxy.fini() return None - return proxy - async def storm(self, text, opts=None): opts = self._initStormOpts(opts) From 984481a107a6c2f2add9720d119c97a4114f28ce Mon Sep 17 00:00:00 2001 From: visi Date: Tue, 18 Feb 2025 13:45:02 -0500 Subject: [PATCH 25/52] wip --- synapse/lib/logging.py | 1 + 1 file changed, 1 insertion(+) diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index a7493889e51..88e8b5ac39a 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -103,6 +103,7 @@ def genLogInfo(self, record): } loginfo.update(_glob_loginfo) + if hasattr(record, 'loginfo'): loginfo.update(record.loginfo) From 376d08962200d614d5aac56228f91750a604aa52 Mon Sep 17 00:00:00 2001 From: visi Date: Tue, 18 Feb 2025 13:54:23 -0500 Subject: [PATCH 26/52] wip --- synapse/tests/test_cortex.py | 58 +++++++++++++++++++----------------- 1 file changed, 30 insertions(+), 28 deletions(-) diff --git a/synapse/tests/test_cortex.py b/synapse/tests/test_cortex.py index 969fba09975..b79d70c45fa 100644 --- a/synapse/tests/test_cortex.py +++ b/synapse/tests/test_cortex.py @@ -1242,7 +1242,7 @@ async def test_cortex_storm_dmon_log(self): mesg = stream.jsonlines()[0] self.eq(mesg.get('message'), f'Running dmon {iden}') - self.eq(mesg['synapse'].get('iden'), iden) + self.eq(mesg.get('iden'), iden) opts = {'vars': {'iden': iden}} logs = await core.callStorm('return($lib.dmon.log($iden))', opts=opts) @@ -3494,7 +3494,7 @@ async def test_storm_logging(self): self.true(await stream.wait(4)) mesg = stream.jsonlines()[0] - self.eq(mesg['synapse'].get('view'), view) + self.eq(mesg.get('view'), view) async def test_strict(self): @@ -8177,7 +8177,7 @@ async def test_cortex_user_scope(self): mesg = [m for m in msgs if 'Added user' in m.get('message')][0] self.eq('Added user=lowuser', mesg.get('message')) self.eq('admin', mesg.get('username')) - self.eq('lowuser', mesg['synapse'].get('target_username')) + self.eq('lowuser', mesg.get('target_username')) with self.getStructuredAsyncLoggerStream('synapse.lib.cell') as stream: @@ -8191,7 +8191,7 @@ async def test_cortex_user_scope(self): mesg = [m for m in msgs if 'Set admin' in m.get('message')][0] self.isin('Set admin=True for lowuser', mesg.get('message')) self.eq('admin', mesg.get('username')) - self.eq('lowuser', mesg['synapse'].get('target_username')) + self.eq('lowuser', mesg.get('target_username')) async def test_cortex_ext_httpapi(self): # Cortex API tests for Extended HttpAPI @@ -8399,12 +8399,12 @@ async def _hang(*args, **kwargs): self.len(2, msgs) self.eq(msgs[0].get('message'), f'Offloading Storm query to mirror 01.core.{ahanet}.') - self.eq(msgs[0]['synapse'].get('hash'), qhash) - self.eq(msgs[0]['synapse'].get('mirror'), f'01.core.{ahanet}') + self.eq(msgs[0].get('hash'), qhash) + self.eq(msgs[0].get('mirror'), f'01.core.{ahanet}') self.eq(msgs[1].get('message'), f'Executing storm query {{{q}}} as [root]') - self.eq(msgs[1]['synapse'].get('hash'), qhash) - self.eq(msgs[1]['synapse'].get('pool:from'), f'00.core.{ahanet}') + self.eq(msgs[1].get('hash'), qhash) + self.eq(msgs[1].get('pool:from'), f'00.core.{ahanet}') # callStorm() q = 'inet:asn=0 return($lib.true)' @@ -8418,12 +8418,12 @@ async def _hang(*args, **kwargs): self.len(2, msgs) self.eq(msgs[0].get('message'), f'Offloading Storm query to mirror 01.core.{ahanet}.') - self.eq(msgs[0]['synapse'].get('hash'), qhash) - self.eq(msgs[0]['synapse'].get('mirror'), f'01.core.{ahanet}') + self.eq(msgs[0].get('hash'), qhash) + self.eq(msgs[0].get('mirror'), f'01.core.{ahanet}') self.eq(msgs[1].get('message'), f'Executing storm query {{{q}}} as [root]') - self.eq(msgs[1]['synapse'].get('hash'), qhash) - self.eq(msgs[1]['synapse'].get('pool:from'), f'00.core.{ahanet}') + self.eq(msgs[1].get('hash'), qhash) + self.eq(msgs[1].get('pool:from'), f'00.core.{ahanet}') # exportStorm() q = 'inet:asn=0' @@ -8437,12 +8437,12 @@ async def _hang(*args, **kwargs): self.len(2, msgs) self.eq(msgs[0].get('message'), f'Offloading Storm query to mirror 01.core.{ahanet}.') - self.eq(msgs[0]['synapse'].get('hash'), qhash) - self.eq(msgs[0]['synapse'].get('mirror'), f'01.core.{ahanet}') + self.eq(msgs[0].get('hash'), qhash) + self.eq(msgs[0].get('mirror'), f'01.core.{ahanet}') self.eq(msgs[1].get('message'), f'Executing storm query {{{q}}} as [root]') - self.eq(msgs[1]['synapse'].get('hash'), qhash) - self.eq(msgs[1]['synapse'].get('pool:from'), f'00.core.{ahanet}') + self.eq(msgs[1].get('hash'), qhash) + self.eq(msgs[1].get('pool:from'), f'00.core.{ahanet}') # count() q = 'inet:asn=0' @@ -8456,12 +8456,12 @@ async def _hang(*args, **kwargs): self.len(2, msgs) self.eq(msgs[0].get('message'), f'Offloading Storm query to mirror 01.core.{ahanet}.') - self.eq(msgs[0]['synapse'].get('hash'), qhash) - self.eq(msgs[0]['synapse'].get('mirror'), f'01.core.{ahanet}') + self.eq(msgs[0].get('hash'), qhash) + self.eq(msgs[0].get('mirror'), f'01.core.{ahanet}') self.eq(msgs[1].get('message'), f'Executing storm query {{{q}}} as [root]') - self.eq(msgs[1]['synapse'].get('hash'), qhash) - self.eq(msgs[1]['synapse'].get('pool:from'), f'00.core.{ahanet}') + self.eq(msgs[1].get('hash'), qhash) + self.eq(msgs[1].get('pool:from'), f'00.core.{ahanet}') with patch('synapse.cortex.CoreApi.getNexsIndx', _hang): @@ -8472,7 +8472,7 @@ async def _hang(*args, **kwargs): stream.seek(0) data = stream.read() self.notin('Offloading Storm query', data) - self.isin('Timeout retrieving storm pool mirror nexus offset.', data) + self.isin('Timeout waiting for pool mirror [01.core.synapse] Nexus offset', data) self.notin('Timeout waiting for query mirror', data) await core00.stormpool.waitready(timeout=12) @@ -8486,7 +8486,7 @@ async def _hang(*args, **kwargs): stream.seek(0) data = stream.read() self.notin('Offloading Storm query', data) - self.isin('Timeout retrieving storm pool mirror nexus offset.', data) + self.isin('Timeout waiting for pool mirror [01.core.synapse] Nexus offset', data) self.notin('Timeout waiting for query mirror', data) await core00.stormpool.waitready(timeout=12) @@ -8513,7 +8513,9 @@ async def _hang(*args, **kwargs): stream.seek(0) data = stream.read() - self.isin('Pool mirror nexus offset delta too large.', data) + explog = (f'Pool mirror [01.core.synapse] Nexus offset delta too large ' + f'({nexsoffs} > 1), running query locally') + self.isin(explog, data) self.notin('Offloading Storm query', data) with self.getLoggerStream('synapse') as stream: @@ -8570,7 +8572,7 @@ async def _hang(*args, **kwargs): stream.seek(0) data = stream.read() - self.isin('Timeout connecting to storm pool mirror.', data) + self.isin('Timeout waiting for pool mirror, running query locally', data) await core01.fini() @@ -8580,28 +8582,28 @@ async def _hang(*args, **kwargs): stream.seek(0) data = stream.read() - self.isin('Storm query mirror pool is empty.', data) + self.isin('Storm query mirror pool is empty, running query locally.', data) with self.getLoggerStream('synapse') as stream: self.true(await core00.callStorm('inet:asn=0 return($lib.true)')) stream.seek(0) data = stream.read() - self.isin('Storm query mirror pool is empty.', data) + self.isin('Storm query mirror pool is empty, running query locally.', data) with self.getLoggerStream('synapse') as stream: self.len(1, await alist(core00.exportStorm('inet:asn=0'))) stream.seek(0) data = stream.read() - self.isin('Storm query mirror pool is empty.', data) + self.isin('Storm query mirror pool is empty, running query locally.', data) with self.getLoggerStream('synapse') as stream: self.eq(1, await core00.count('inet:asn=0')) stream.seek(0) data = stream.read() - self.isin('Storm query mirror pool is empty.', data) + self.isin('Storm query mirror pool is empty, running query locally.', data) core01 = await base.enter_context(self.getTestCore(dirn=dirn01)) await core01.promote(graceful=True) From 8577c4212998d79c89545c68b073390cc6810ed3 Mon Sep 17 00:00:00 2001 From: visi Date: Tue, 18 Feb 2025 14:00:00 -0500 Subject: [PATCH 27/52] wip --- synapse/cortex.py | 3 ++- synapse/tests/test_cortex.py | 40 ++++++++++++++++++------------------ 2 files changed, 22 insertions(+), 21 deletions(-) diff --git a/synapse/cortex.py b/synapse/cortex.py index 5b7f30a796c..2b22be220e8 100644 --- a/synapse/cortex.py +++ b/synapse/cortex.py @@ -35,6 +35,7 @@ import synapse.lib.dyndeps as s_dyndeps import synapse.lib.grammar as s_grammar import synapse.lib.httpapi as s_httpapi +import synapse.lib.logging as s_logging import synapse.lib.msgpack as s_msgpack import synapse.lib.modules as s_modules import synapse.lib.schemas as s_schemas @@ -925,7 +926,7 @@ async def initServiceStorage(self): self._initCorePerms() # Reset the storm:log:level from the config value to an int for internal use. - self.conf['storm:log:level'] = s_common.normLogLevel(self.conf.get('storm:log:level')) + self.conf['storm:log:level'] = s_logging.normLogLevel(self.conf.get('storm:log:level')) self.stormlog = self.conf.get('storm:log') self.stormloglvl = self.conf.get('storm:log:level') diff --git a/synapse/tests/test_cortex.py b/synapse/tests/test_cortex.py index b79d70c45fa..7c15308f02a 100644 --- a/synapse/tests/test_cortex.py +++ b/synapse/tests/test_cortex.py @@ -1242,7 +1242,7 @@ async def test_cortex_storm_dmon_log(self): mesg = stream.jsonlines()[0] self.eq(mesg.get('message'), f'Running dmon {iden}') - self.eq(mesg.get('iden'), iden) + self.eq(mesg['synapse'].get('iden'), iden) opts = {'vars': {'iden': iden}} logs = await core.callStorm('return($lib.dmon.log($iden))', opts=opts) @@ -3494,7 +3494,7 @@ async def test_storm_logging(self): self.true(await stream.wait(4)) mesg = stream.jsonlines()[0] - self.eq(mesg.get('view'), view) + self.eq(mesg['synapse'].get('view'), view) async def test_strict(self): @@ -8177,7 +8177,7 @@ async def test_cortex_user_scope(self): mesg = [m for m in msgs if 'Added user' in m.get('message')][0] self.eq('Added user=lowuser', mesg.get('message')) self.eq('admin', mesg.get('username')) - self.eq('lowuser', mesg.get('target_username')) + self.eq('lowuser', mesg['synapse'].get('target_username')) with self.getStructuredAsyncLoggerStream('synapse.lib.cell') as stream: @@ -8191,7 +8191,7 @@ async def test_cortex_user_scope(self): mesg = [m for m in msgs if 'Set admin' in m.get('message')][0] self.isin('Set admin=True for lowuser', mesg.get('message')) self.eq('admin', mesg.get('username')) - self.eq('lowuser', mesg.get('target_username')) + self.eq('lowuser', mesg['synapse'].get('target_username')) async def test_cortex_ext_httpapi(self): # Cortex API tests for Extended HttpAPI @@ -8399,12 +8399,12 @@ async def _hang(*args, **kwargs): self.len(2, msgs) self.eq(msgs[0].get('message'), f'Offloading Storm query to mirror 01.core.{ahanet}.') - self.eq(msgs[0].get('hash'), qhash) - self.eq(msgs[0].get('mirror'), f'01.core.{ahanet}') + self.eq(msgs[0]['synapse'].get('hash'), qhash) + self.eq(msgs[0]['synapse'].get('mirror'), f'01.core.{ahanet}') self.eq(msgs[1].get('message'), f'Executing storm query {{{q}}} as [root]') - self.eq(msgs[1].get('hash'), qhash) - self.eq(msgs[1].get('pool:from'), f'00.core.{ahanet}') + self.eq(msgs[1]['synapse'].get('hash'), qhash) + self.eq(msgs[1]['synapse'].get('pool:from'), f'00.core.{ahanet}') # callStorm() q = 'inet:asn=0 return($lib.true)' @@ -8418,12 +8418,12 @@ async def _hang(*args, **kwargs): self.len(2, msgs) self.eq(msgs[0].get('message'), f'Offloading Storm query to mirror 01.core.{ahanet}.') - self.eq(msgs[0].get('hash'), qhash) - self.eq(msgs[0].get('mirror'), f'01.core.{ahanet}') + self.eq(msgs[0]['synapse'].get('hash'), qhash) + self.eq(msgs[0]['synapse'].get('mirror'), f'01.core.{ahanet}') self.eq(msgs[1].get('message'), f'Executing storm query {{{q}}} as [root]') - self.eq(msgs[1].get('hash'), qhash) - self.eq(msgs[1].get('pool:from'), f'00.core.{ahanet}') + self.eq(msgs[1]['synapse'].get('hash'), qhash) + self.eq(msgs[1]['synapse'].get('pool:from'), f'00.core.{ahanet}') # exportStorm() q = 'inet:asn=0' @@ -8437,12 +8437,12 @@ async def _hang(*args, **kwargs): self.len(2, msgs) self.eq(msgs[0].get('message'), f'Offloading Storm query to mirror 01.core.{ahanet}.') - self.eq(msgs[0].get('hash'), qhash) - self.eq(msgs[0].get('mirror'), f'01.core.{ahanet}') + self.eq(msgs[0]['synapse'].get('hash'), qhash) + self.eq(msgs[0]['synapse'].get('mirror'), f'01.core.{ahanet}') self.eq(msgs[1].get('message'), f'Executing storm query {{{q}}} as [root]') - self.eq(msgs[1].get('hash'), qhash) - self.eq(msgs[1].get('pool:from'), f'00.core.{ahanet}') + self.eq(msgs[1]['synapse'].get('hash'), qhash) + self.eq(msgs[1]['synapse'].get('pool:from'), f'00.core.{ahanet}') # count() q = 'inet:asn=0' @@ -8456,12 +8456,12 @@ async def _hang(*args, **kwargs): self.len(2, msgs) self.eq(msgs[0].get('message'), f'Offloading Storm query to mirror 01.core.{ahanet}.') - self.eq(msgs[0].get('hash'), qhash) - self.eq(msgs[0].get('mirror'), f'01.core.{ahanet}') + self.eq(msgs[0]['synapse'].get('hash'), qhash) + self.eq(msgs[0]['synapse'].get('mirror'), f'01.core.{ahanet}') self.eq(msgs[1].get('message'), f'Executing storm query {{{q}}} as [root]') - self.eq(msgs[1].get('hash'), qhash) - self.eq(msgs[1].get('pool:from'), f'00.core.{ahanet}') + self.eq(msgs[1]['synapse'].get('hash'), qhash) + self.eq(msgs[1]['synapse'].get('pool:from'), f'00.core.{ahanet}') with patch('synapse.cortex.CoreApi.getNexsIndx', _hang): From 84946e81c4b18bc6b8f1296b0f22f62b80b03633 Mon Sep 17 00:00:00 2001 From: visi Date: Tue, 18 Feb 2025 15:05:50 -0500 Subject: [PATCH 28/52] remove circ deps --- synapse/lib/cell.py | 30 ++++++++++++++++++++++++++++-- synapse/lib/logging.py | 36 ++---------------------------------- 2 files changed, 30 insertions(+), 36 deletions(-) diff --git a/synapse/lib/cell.py b/synapse/lib/cell.py index 375ec50b15d..88d175cf8d9 100644 --- a/synapse/lib/cell.py +++ b/synapse/lib/cell.py @@ -5090,9 +5090,35 @@ def getCachedSslCtx(self, opts=None, verify=None): key = tuple(sorted(opts.items())) return self._sslctx_cache.get(key) + async def _initLogBase(self): + + async with s_logging.loglock: + + if s_logging.logbase is not None: + return + + s_logging.logbase = await s_base.Base.anit() + s_logging.logbase._fini_at_exit = True + + s_logging.logbase.schedCoro(s_logging._feedLogTask()) + async def logs(self, wait=False, last=None): - async for loginfo in s_logging.getLogInfo(wait=wait, last=last): - yield loginfo + + await self._initLogBase() + + if not wait: + for loginfo in list(s_logging.logfifo)[last:]: + yield loginfo + return + + async with await s_queue.Window.anit(maxsize=2000) as window: + + await window.puts(list(s_logging.logfifo)[last:]) + + s_logging.logwindows.add(window) + + async for loginfo in window: + yield loginfo async def freeze(self, timeout=30): diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index 88e8b5ac39a..92da26dfd15 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -15,6 +15,7 @@ logtodo = [] logbase = None +loglock = asyncio.Lock() logevnt = asyncio.Event() logwindows = weakref.WeakSet() @@ -26,7 +27,7 @@ def _addLogInfo(info): logtodo.append(info) logevnt.set() -async def _feedLogInfo(): +async def _feedLogTask(): while not logbase.isfini: @@ -43,39 +44,6 @@ async def _feedLogInfo(): for wind in logwindows: await wind.puts(todo) -async def _initLogBase(): - - global logbase - - import synapse.lib.base as s_base - - logbase = await s_base.Base.anit() - logbase._fini_at_exit = True - logbase.schedCoro(_feedLogInfo()) - -async def getLogInfo(wait=False, last=None): - - if not wait: - for loginfo in list(logfifo)[last:]: - yield loginfo - return - - global logbase - - if logbase is None: - await _initLogBase() - - import synapse.lib.queue as s_queue - - async with await s_queue.Window.anit(maxsize=2000) as window: - - await window.puts(list(logfifo)[last:]) - - logwindows.add(window) - - async for loginfo in window: - yield loginfo - _glob_loginfo = {} def setLogGlobal(name, valu): ''' From 83c2fa840d5f2923b4754594c057668c1ed2c6e5 Mon Sep 17 00:00:00 2001 From: visi Date: Fri, 21 Feb 2025 07:00:45 -0500 Subject: [PATCH 29/52] wip --- synapse/lib/cell.py | 16 ++++++++++++---- synapse/lib/logging.py | 16 ++++++++++------ 2 files changed, 22 insertions(+), 10 deletions(-) diff --git a/synapse/lib/cell.py b/synapse/lib/cell.py index 88d175cf8d9..97cdcb6db94 100644 --- a/synapse/lib/cell.py +++ b/synapse/lib/cell.py @@ -3789,6 +3789,7 @@ def getArgParser(cls, conf=None): pars.add_argument('dirn', help=f'The storage directory for the {name} service.') pars.add_argument('--log-level', default='INFO', choices=list(s_const.LOG_LEVEL_CHOICES.keys()), + type=s_logging.level, help='Deprecated. Please use SYN_LOG_LEVEL environment variable.', type=str.upper) pars.add_argument('--structured-logging', default=True, action='store_true', @@ -4227,8 +4228,15 @@ async def initFromArgv(cls, argv, outp=None): path = s_common.genpath(opts.dirn, 'cell.yaml') mods_path = s_common.genpath(opts.dirn, 'cell.mods.yaml') - level = s_logging.normLogLevel(opts.log_level) - logconf = s_logging.setup(level=level, structlog=opts.structured_logging) + logconf = { + 'level': opts.log_level, + 'structlog': opts.structured_logging + } + + # if (logarchive := conf.get('log:archive')) is not None: + # logconf['archive'] = logarchive + + logconf = s_logging.setup(**logconf) extra = s_logging.getLogExtra(service_type=cls.getCellType(), service_version=cls.VERSTRING, @@ -4239,7 +4247,7 @@ async def initFromArgv(cls, argv, outp=None): await cls._initBootRestore(opts.dirn) try: - conf.setdefault('_log_conf', logconf) + conf['_log_conf'] = logconf conf.setConfFromOpts(opts) conf.setConfFromEnvs() conf.setConfFromFile(path) @@ -4248,7 +4256,7 @@ async def initFromArgv(cls, argv, outp=None): logger.exception(f'Error while bootstrapping cell config.') raise - s_coro.set_pool_logging(logger, logconf=conf['_log_conf']) + # s_coro.set_pool_logging(logger, logconf=conf['_log_conf']) try: cell = await cls.anit(opts.dirn, conf=conf) diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index 92da26dfd15..5bd23ce6385 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -110,13 +110,14 @@ def format(self, record): loginfo = self.genLogInfo(record) return logging.Formatter.format(self, record) -def setup(level=logging.WARNING, structlog=False): +_glob_logconf = {} +def setup(**conf): ''' Configure synapse logging. ''' - conf = getLogConfFromEnv() - conf.setdefault('level', level) - conf.setdefault('structlog', structlog) + conf.update(getLogConfFromEnv()) + conf.setdefault('level', logging.WARNING) + conf.setdefault('structlog', False) fmtclass = Formatter if not conf.get('structlog'): @@ -129,6 +130,9 @@ def setup(level=logging.WARNING, structlog=False): logger.info('log level set to %s', s_const.LOG_LEVEL_INVERSE_CHOICES.get(level)) + _glob_logconf.clear() + _glob_logconf.update(conf) + return conf def getLogConfFromEnv(): @@ -146,9 +150,9 @@ def getLogConfFromEnv(): return conf -def normLogLevel(valu): +def level(valu): ''' - Norm a log level value to an integer. + Normalize a log level value to an integer. Args: valu: The value to norm ( a string or integer ). From 6da829a544ac1e4ba641a1cfe4cac8f117d728d0 Mon Sep 17 00:00:00 2001 From: visi Date: Mon, 24 Feb 2025 12:00:03 -0500 Subject: [PATCH 30/52] wip --- synapse/cortex.py | 20 +++++++----- synapse/lib/cell.py | 9 +++-- synapse/lib/httpapi.py | 9 ++--- synapse/lib/logging.py | 22 ++++++++----- synapse/lib/snap.py | 6 ++-- synapse/lib/stormlib/log.py | 8 +++-- synapse/lib/view.py | 14 ++++---- synapse/tests/test_cortex.py | 40 +++++++++++------------ synapse/tests/test_lib_cell.py | 6 ++-- synapse/tests/test_lib_stormlib_cortex.py | 2 +- synapse/tests/test_lib_stormlib_log.py | 2 +- synapse/tests/test_lib_stormtypes.py | 2 +- synapse/tests/test_lib_trigger.py | 4 +-- 13 files changed, 77 insertions(+), 67 deletions(-) diff --git a/synapse/cortex.py b/synapse/cortex.py index 2b22be220e8..29673737502 100644 --- a/synapse/cortex.py +++ b/synapse/cortex.py @@ -4633,6 +4633,7 @@ async def getHttpExtApi(self, iden): adef = self._exthttpapis.get(iden) if adef is None: raise s_exc.NoSuchIden(mesg=f'No extended http api for {iden=}', iden=iden) + # TODO: any reason this (and above) uses the slower copy.deepcopy? return copy.deepcopy(adef) async def getHttpExtApiByPath(self, path): @@ -6160,19 +6161,20 @@ async def reqValidStorm(self, text, opts=None): await self.getStormQuery(text, mode=mode) return True - def _logStormQuery(self, text, user, info=None): + async def _logStormQuery(self, text, user, extra=None): ''' Log a storm query. ''' if self.stormlog: - if info is None: - info = {} - info['text'] = text - info['username'] = user.name - info['user'] = user.iden - info['hash'] = s_storm.queryhash(text) - stormlogger.log(self.stormloglvl, 'Executing storm query {%s} as [%s]', text, user.name, - extra={'synapse': info}) + + if extra is None: + extra = {} + + extra['text'] = text + extra['hash'] = s_storm.queryhash(text) + + extra = await self.getLogExtra(**extra) + stormlogger.log(self.stormloglvl, 'Executing storm query as [%s]', user.name, extra=extra) async def getNodeByNdef(self, ndef, view=None): ''' diff --git a/synapse/lib/cell.py b/synapse/lib/cell.py index 97cdcb6db94..f4abe78229c 100644 --- a/synapse/lib/cell.py +++ b/synapse/lib/cell.py @@ -105,8 +105,8 @@ def wrapped(self, *args, **kwargs): raise s_exc.AuthDeny(mesg=f'User is not an admin [{self.user.name}]', user=self.user.iden, username=self.user.name) if log: - logger.info(f'Executing [{func.__qualname__}] as [{self.user.name}] with args [{args}[{kwargs}]', - extra={'synapse': {'wrapped_func': func.__qualname__}}) + extra = s_logging.getLogExtra(func=func.__qualname__, args=args, kwargs=kwargs) + logger.info('Executing remote admin API call.', extra=extra) return func(self, *args, **kwargs) @@ -3666,7 +3666,6 @@ async def getCellApi(self, link, user, path): ''' return await self.cellapi.anit(self, link, user) - # FIXME: why is this async? async def getLogExtra(self, **kwargs): ''' Get an extra dictionary for structured logging which can be used as a extra argument for loggers. @@ -3789,8 +3788,8 @@ def getArgParser(cls, conf=None): pars.add_argument('dirn', help=f'The storage directory for the {name} service.') pars.add_argument('--log-level', default='INFO', choices=list(s_const.LOG_LEVEL_CHOICES.keys()), - type=s_logging.level, - help='Deprecated. Please use SYN_LOG_LEVEL environment variable.', type=str.upper) + type=s_logging.normLogLevel, + help='Deprecated. Please use SYN_LOG_LEVEL environment variable.') pars.add_argument('--structured-logging', default=True, action='store_true', help='Deprecated. Please use SYN_LOG_STRUCT environment variable.') diff --git a/synapse/lib/httpapi.py b/synapse/lib/httpapi.py index a2bb0cd49ef..51362b6151f 100644 --- a/synapse/lib/httpapi.py +++ b/synapse/lib/httpapi.py @@ -1373,7 +1373,7 @@ async def _runHttpExt(self, meth, path): # change the status code or the response headers. We just have to # log the error and move along. mesg = f'Extended HTTP API {iden} tried to set code after sending body.' - logger.error(mesg) + logger.error(mesg, extra=await core.getLogExtra()) continue rcode = True @@ -1385,7 +1385,7 @@ async def _runHttpExt(self, meth, path): # change the status code or the response headers. We just have to # log the error and move along. mesg = f'Extended HTTP API {iden} tried to set headers after sending body.' - logger.error(mesg) + logger.error(mesg, extra=await core.getLogExtra()) continue for hkey, hval in info['headers'].items(): self.set_header(hkey, hval) @@ -1405,7 +1405,7 @@ async def _runHttpExt(self, meth, path): elif mtyp == 'err': errname, erfo = info mesg = f'Error executing Extended HTTP API {iden}: {errname} {erfo.get("mesg")}' - logger.error(mesg) + logger.error(mesg, extra=await core.getLogExtra()) if rbody: # We've already flushed() the stream at this point, so we cannot # change the status code or the response headers. We just have to @@ -1423,7 +1423,8 @@ async def _runHttpExt(self, meth, path): except Exception as e: rcode = True enfo = s_common.err(e) - logger.exception(f'Extended HTTP API {iden} encountered fatal error: {enfo[1].get("mesg")}') + extra = await core.getLogExtra(iden=iden) + logger.exception(f'Extended HTTP API {iden} encountered fatal error: {enfo[1].get("mesg")}', extra=extra) if rbody is False: self.clear() self.set_status(500) diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index 5bd23ce6385..e9df51dbe93 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -52,7 +52,7 @@ def setLogGlobal(name, valu): _glob_loginfo[name] = valu def getLogExtra(**kwargs): - return {'synapse': kwargs, 'loginfo': {}} + return {'params': kwargs, 'loginfo': {}} class Formatter(logging.Formatter): @@ -87,10 +87,10 @@ def genLogInfo(self, record): if record.exc_info: loginfo['err'] = s_common.err(record.exc_info[1], fulltb=True) - if not hasattr(record, 'synapse'): - record.synapse = {} + if not hasattr(record, 'params'): + record.params = {} - loginfo['synapse'] = record.synapse + loginfo['params'] = record.params _addLogInfo(loginfo) @@ -116,8 +116,12 @@ def setup(**conf): Configure synapse logging. ''' conf.update(getLogConfFromEnv()) - conf.setdefault('level', logging.WARNING) - conf.setdefault('structlog', False) + + if conf.get('level') is None: + conf['level'] = logging.WARNING + + if conf.get('structlog') is None: + conf['structlog'] = False fmtclass = Formatter if not conf.get('structlog'): @@ -126,7 +130,9 @@ def setup(**conf): handler = logging.StreamHandler() handler.setFormatter(fmtclass(datefmt=conf.get('datefmt'))) - logging.basicConfig(level=conf.get('level'), handlers=(handler,)) + level = normLogLevel(conf.get('level')) + + logging.basicConfig(level=level, handlers=(handler,)) logger.info('log level set to %s', s_const.LOG_LEVEL_INVERSE_CHOICES.get(level)) @@ -150,7 +156,7 @@ def getLogConfFromEnv(): return conf -def level(valu): +def normLogLevel(valu): ''' Normalize a log level value to an integer. diff --git a/synapse/lib/snap.py b/synapse/lib/snap.py index 338c23b8a6e..be344c02e46 100644 --- a/synapse/lib/snap.py +++ b/synapse/lib/snap.py @@ -780,9 +780,9 @@ async def iterStormPodes(self, text, opts, user=None): show_storage = False - info = opts.get('_loginfo', {}) - info.update({'mode': opts.get('mode', 'storm'), 'view': self.view.iden}) - self.core._logStormQuery(text, user, info=info) + extra = opts.get('_loginfo', {}) + extra.update({'mode': opts.get('mode', 'storm'), 'view': self.view.iden}) + await self.core._logStormQuery(text, user, extra=extra) # { form: ( embedprop, ... ) } embeds = opts.get('embeds') diff --git a/synapse/lib/stormlib/log.py b/synapse/lib/stormlib/log.py index efaa6f02d52..1546d12e213 100644 --- a/synapse/lib/stormlib/log.py +++ b/synapse/lib/stormlib/log.py @@ -135,15 +135,17 @@ def getObjLocals(self): } async def _getExtra(self, extra=None): + if extra is None: - return extra + return await self.runt.snap.core.getLogExtra() + extra = await s_stormtypes.toprim(extra) if extra and not isinstance(extra, dict): mesg = f'extra provided to log call must be a dictionary compatible type. Got {extra.__class__.__name__} ' \ f'instead.' raise s_exc.BadArg(mesg=mesg, arg='extra') - extra = {'synapse': extra} - return extra + + return await self.runt.snap.core.getLogExtra(**extra) @s_stormtypes.stormfunc(readonly=True) async def _logDebug(self, mesg, extra=None): diff --git a/synapse/lib/view.py b/synapse/lib/view.py index 8ad8a7f17e0..3d12a5ff8e3 100644 --- a/synapse/lib/view.py +++ b/synapse/lib/view.py @@ -920,15 +920,15 @@ async def eval(self, text, opts=None): opts = self.core._initStormOpts(opts) user = self.core._userFromOpts(opts) - info = opts.get('_loginfo', {}) - info.update({'mode': opts.get('mode', 'storm'), 'view': self.iden}) - self.core._logStormQuery(text, user, info=info) - taskiden = opts.get('task') taskinfo = {'query': text, 'view': self.iden} with s_scope.enter({'user': user}): + extra = opts.get('_loginfo', {}) + extra.update({'mode': opts.get('mode', 'storm'), 'view': self.iden}) + await self.core._logStormQuery(text, user, extra=extra) + await self.core.boss.promote('storm', user=user, info=taskinfo, taskiden=taskiden) async with await self.snap(user=user) as snap: @@ -1047,9 +1047,9 @@ async def runStorm(): count += 1 else: - info = opts.get('_loginfo', {}) - info.update({'mode': opts.get('mode', 'storm'), 'view': self.iden}) - self.core._logStormQuery(text, user, info=info) + extra = opts.get('_loginfo', {}) + extra.update({'mode': opts.get('mode', 'storm'), 'view': self.iden}) + await self.core._logStormQuery(text, user, extra=extra) async for item in snap.storm(text, opts=opts, user=user): count += 1 diff --git a/synapse/tests/test_cortex.py b/synapse/tests/test_cortex.py index 7c15308f02a..f1d22ce5479 100644 --- a/synapse/tests/test_cortex.py +++ b/synapse/tests/test_cortex.py @@ -1242,7 +1242,7 @@ async def test_cortex_storm_dmon_log(self): mesg = stream.jsonlines()[0] self.eq(mesg.get('message'), f'Running dmon {iden}') - self.eq(mesg['synapse'].get('iden'), iden) + self.eq(mesg['params'].get('iden'), iden) opts = {'vars': {'iden': iden}} logs = await core.callStorm('return($lib.dmon.log($iden))', opts=opts) @@ -3494,7 +3494,7 @@ async def test_storm_logging(self): self.true(await stream.wait(4)) mesg = stream.jsonlines()[0] - self.eq(mesg['synapse'].get('view'), view) + self.eq(mesg['params'].get('view'), view) async def test_strict(self): @@ -8177,7 +8177,7 @@ async def test_cortex_user_scope(self): mesg = [m for m in msgs if 'Added user' in m.get('message')][0] self.eq('Added user=lowuser', mesg.get('message')) self.eq('admin', mesg.get('username')) - self.eq('lowuser', mesg['synapse'].get('target_username')) + self.eq('lowuser', mesg['params'].get('target_username')) with self.getStructuredAsyncLoggerStream('synapse.lib.cell') as stream: @@ -8191,7 +8191,7 @@ async def test_cortex_user_scope(self): mesg = [m for m in msgs if 'Set admin' in m.get('message')][0] self.isin('Set admin=True for lowuser', mesg.get('message')) self.eq('admin', mesg.get('username')) - self.eq('lowuser', mesg['synapse'].get('target_username')) + self.eq('lowuser', mesg['params'].get('target_username')) async def test_cortex_ext_httpapi(self): # Cortex API tests for Extended HttpAPI @@ -8399,12 +8399,12 @@ async def _hang(*args, **kwargs): self.len(2, msgs) self.eq(msgs[0].get('message'), f'Offloading Storm query to mirror 01.core.{ahanet}.') - self.eq(msgs[0]['synapse'].get('hash'), qhash) - self.eq(msgs[0]['synapse'].get('mirror'), f'01.core.{ahanet}') + self.eq(msgs[0]['params'].get('hash'), qhash) + self.eq(msgs[0]['params'].get('mirror'), f'01.core.{ahanet}') self.eq(msgs[1].get('message'), f'Executing storm query {{{q}}} as [root]') - self.eq(msgs[1]['synapse'].get('hash'), qhash) - self.eq(msgs[1]['synapse'].get('pool:from'), f'00.core.{ahanet}') + self.eq(msgs[1]['params'].get('hash'), qhash) + self.eq(msgs[1]['params'].get('pool:from'), f'00.core.{ahanet}') # callStorm() q = 'inet:asn=0 return($lib.true)' @@ -8418,12 +8418,12 @@ async def _hang(*args, **kwargs): self.len(2, msgs) self.eq(msgs[0].get('message'), f'Offloading Storm query to mirror 01.core.{ahanet}.') - self.eq(msgs[0]['synapse'].get('hash'), qhash) - self.eq(msgs[0]['synapse'].get('mirror'), f'01.core.{ahanet}') + self.eq(msgs[0]['params'].get('hash'), qhash) + self.eq(msgs[0]['params'].get('mirror'), f'01.core.{ahanet}') self.eq(msgs[1].get('message'), f'Executing storm query {{{q}}} as [root]') - self.eq(msgs[1]['synapse'].get('hash'), qhash) - self.eq(msgs[1]['synapse'].get('pool:from'), f'00.core.{ahanet}') + self.eq(msgs[1]['params'].get('hash'), qhash) + self.eq(msgs[1]['params'].get('pool:from'), f'00.core.{ahanet}') # exportStorm() q = 'inet:asn=0' @@ -8437,12 +8437,12 @@ async def _hang(*args, **kwargs): self.len(2, msgs) self.eq(msgs[0].get('message'), f'Offloading Storm query to mirror 01.core.{ahanet}.') - self.eq(msgs[0]['synapse'].get('hash'), qhash) - self.eq(msgs[0]['synapse'].get('mirror'), f'01.core.{ahanet}') + self.eq(msgs[0]['params'].get('hash'), qhash) + self.eq(msgs[0]['params'].get('mirror'), f'01.core.{ahanet}') self.eq(msgs[1].get('message'), f'Executing storm query {{{q}}} as [root]') - self.eq(msgs[1]['synapse'].get('hash'), qhash) - self.eq(msgs[1]['synapse'].get('pool:from'), f'00.core.{ahanet}') + self.eq(msgs[1]['params'].get('hash'), qhash) + self.eq(msgs[1]['params'].get('pool:from'), f'00.core.{ahanet}') # count() q = 'inet:asn=0' @@ -8456,12 +8456,12 @@ async def _hang(*args, **kwargs): self.len(2, msgs) self.eq(msgs[0].get('message'), f'Offloading Storm query to mirror 01.core.{ahanet}.') - self.eq(msgs[0]['synapse'].get('hash'), qhash) - self.eq(msgs[0]['synapse'].get('mirror'), f'01.core.{ahanet}') + self.eq(msgs[0]['params'].get('hash'), qhash) + self.eq(msgs[0]['params'].get('mirror'), f'01.core.{ahanet}') self.eq(msgs[1].get('message'), f'Executing storm query {{{q}}} as [root]') - self.eq(msgs[1]['synapse'].get('hash'), qhash) - self.eq(msgs[1]['synapse'].get('pool:from'), f'00.core.{ahanet}') + self.eq(msgs[1]['params'].get('hash'), qhash) + self.eq(msgs[1]['params'].get('pool:from'), f'00.core.{ahanet}') with patch('synapse.cortex.CoreApi.getNexsIndx', _hang): diff --git a/synapse/tests/test_lib_cell.py b/synapse/tests/test_lib_cell.py index f5f38e5edd4..a244aa030be 100644 --- a/synapse/tests/test_lib_cell.py +++ b/synapse/tests/test_lib_cell.py @@ -446,13 +446,13 @@ async def test_cell_auth(self): # @adminApi methods are allowed self.true(await proxy.adminOnly()) - mesg = "Executing [EchoAuthApi.adminOnlyLog] as [root] with args [(1, 2)[{'three': 4}]" + mesg = 'Executing remote admin API call.' with self.getStructuredAsyncLoggerStream('synapse.lib.cell', mesg) as stream: self.eq(await proxy.adminOnlyLog(1, 2, three=4), (1, 2, {'three': 4})) self.true(await stream.wait(timeout=10)) msgs = stream.jsonlines() self.len(1, msgs) - self.eq('EchoAuthApi.adminOnlyLog', msgs[0]['synapse']['wrapped_func']) + self.eq('EchoAuthApi.adminOnlyLog', msgs[0]['params']['func']) visi = await echo.auth.addUser('visi') await visi.setPasswd('foo') @@ -3162,7 +3162,7 @@ async def test_cell_check_sysctl(self): mesg += 'See https://synapse.docs.vertex.link/en/latest/synapse/devopsguide.html#performance-tuning ' mesg += 'for information about these sysctl parameters.' self.eq(msgs[0]['message'], mesg) - self.eq(msgs[0]['synapse']['sysctls'], [ + self.eq(msgs[0]['params']['sysctls'], [ {'name': 'vm.dirty_expire_centisecs', 'expected': 21, 'actual': sysctls['vm.dirty_expire_centisecs']}, {'name': 'vm.dirty_writeback_centisecs', 'expected': 21, 'actual': sysctls['vm.dirty_writeback_centisecs']}, ]) diff --git a/synapse/tests/test_lib_stormlib_cortex.py b/synapse/tests/test_lib_stormlib_cortex.py index 7bddbfc17f0..3f2dd1fa3e6 100644 --- a/synapse/tests/test_lib_stormlib_cortex.py +++ b/synapse/tests/test_lib_stormlib_cortex.py @@ -305,7 +305,7 @@ async def test_libcortex_httpapi_methods(self): self.eq(resp.status, 200) self.true(await stream.wait(timeout=12)) msgs = stream.jsonlines() - self.eq(msgs[0]['synapse'].get('httpapi'), echoiden) + self.eq(msgs[0]['params'].get('httpapi'), echoiden) core.stormlog = False # Sad paths on the $request methods diff --git a/synapse/tests/test_lib_stormlib_log.py b/synapse/tests/test_lib_stormlib_log.py index 1496351f588..fa113b2c004 100644 --- a/synapse/tests/test_lib_stormlib_log.py +++ b/synapse/tests/test_lib_stormlib_log.py @@ -57,4 +57,4 @@ async def test_stormlib_log(self): mesg = msgs[1] self.eq(mesg.get('logger').get('name'), 'synapse.storm.log') self.eq(mesg.get('message'), 'struct2 message') - self.eq(mesg['synapse'].get('key'), 'valu') + self.eq(mesg['params'].get('key'), 'valu') diff --git a/synapse/tests/test_lib_stormtypes.py b/synapse/tests/test_lib_stormtypes.py index d93fd71e8c3..7307d423695 100644 --- a/synapse/tests/test_lib_stormtypes.py +++ b/synapse/tests/test_lib_stormtypes.py @@ -4928,7 +4928,7 @@ async def getCronJob(text): self.true(await stream.wait(6)) mesg = stream.jsonlines()[0] self.eq(mesg['message'], f'm3 cron {guid}') - self.eq(mesg['synapse']['iden'], guid) + self.eq(mesg['params']['iden'], guid) ################## diff --git a/synapse/tests/test_lib_trigger.py b/synapse/tests/test_lib_trigger.py index 4b17073f50d..b13fca03eff 100644 --- a/synapse/tests/test_lib_trigger.py +++ b/synapse/tests/test_lib_trigger.py @@ -253,9 +253,9 @@ async def test_trigger_basics(self): await core.nodes('[ test:str=logit ]') self.true(await stream.wait(6)) msgs = stream.jsonlines() - mesg = [m for m in msgs if m['synapse'].get('iden') == tdef.get('iden')][0] + mesg = [m for m in msgs if m['params'].get('iden') == tdef.get('iden')][0] self.eq(mesg['message'], f'test trigger {tdef.get("iden")}') - self.eq(mesg['synapse']['iden'], tdef.get('iden')) + self.eq(mesg['params']['iden'], tdef.get('iden')) # Attempting to add trigger with existing iden raises with self.raises(s_exc.DupIden): From 7e3a4a2b9dafec4de369a77af66e27283a3c564d Mon Sep 17 00:00:00 2001 From: visi Date: Tue, 25 Feb 2025 18:31:53 -0500 Subject: [PATCH 31/52] wip --- synapse/axon.py | 36 +-- synapse/cortex.py | 24 +- synapse/lib/agenda.py | 201 +++++++++-------- synapse/lib/aha.py | 36 +-- synapse/lib/cell.py | 90 ++++---- synapse/lib/const.py | 2 +- synapse/lib/httpapi.py | 65 +++--- synapse/lib/stormlib/log.py | 9 +- synapse/lib/stormlib/storm.py | 8 +- synapse/tests/test_cmds_cortex.py | 5 +- synapse/tests/test_cortex.py | 79 +++---- synapse/tests/test_daemon.py | 21 +- synapse/tests/test_datamodel.py | 31 +-- synapse/tests/test_lib_agenda.py | 36 ++- synapse/tests/test_lib_aha.py | 16 +- synapse/tests/test_lib_boss.py | 5 +- synapse/tests/test_lib_cell.py | 97 ++++---- synapse/tests/test_lib_config.py | 6 +- synapse/tests/test_lib_httpapi.py | 123 +++++------ synapse/tests/test_lib_link.py | 4 +- synapse/tests/test_lib_lmdbslab.py | 4 +- synapse/tests/test_lib_modelrev.py | 5 +- synapse/tests/test_lib_module.py | 16 +- synapse/tests/test_lib_multislabseqn.py | 8 +- synapse/tests/test_lib_storm.py | 39 ++-- synapse/tests/test_lib_stormlib_cortex.py | 36 ++- synapse/tests/test_lib_stormlib_log.py | 32 ++- synapse/tests/test_lib_stormlib_model.py | 5 +- synapse/tests/test_lib_stormtypes.py | 4 +- synapse/tests/test_lib_trigger.py | 21 +- synapse/tests/test_telepath.py | 18 +- synapse/tests/test_utils.py | 4 +- synapse/tests/utils.py | 257 ++++------------------ 33 files changed, 552 insertions(+), 791 deletions(-) diff --git a/synapse/axon.py b/synapse/axon.py index 61ca0a0e8bf..667340c0fd5 100644 --- a/synapse/axon.py +++ b/synapse/axon.py @@ -895,14 +895,15 @@ async def _axonHealth(self, health): health.update('axon', 'nominal', '', data=await self.metrics()) async def _migrateAxonMetrics(self): - logger.warning('migrating Axon metrics data out of hive') + extra = self.getLogExtra() + logger.warning('migrating Axon metrics data out of hive', extra=extra) async with await self.hive.open(('axon', 'metrics')) as hivenode: axonmetrics = await hivenode.dict() self.axonmetrics.set('size:bytes', axonmetrics.get('size:bytes', 0)) self.axonmetrics.set('file:count', axonmetrics.get('file:count', 0)) - logger.warning('...Axon metrics migration complete!') + logger.warning('...Axon metrics migration complete!', extra=extra) async def _initBlobStor(self): @@ -925,7 +926,8 @@ async def _initBlobStor(self): async def _setStorVers01(self): - logger.warning('Updating Axon storage version (adding offset index). This may take a while.') + extra = self.getLogExtra() + logger.warning('Updating Axon storage version (adding offset index). This may take a while.', extra=extra) offs = 0 cursha = b'' @@ -1067,7 +1069,7 @@ async def get(self, sha256, offs=None, size=None): fsize = await self._reqHas(sha256) fhash = s_common.ehex(sha256) - logger.debug(f'Getting blob [{fhash}].', extra=await self.getLogExtra(sha256=fhash)) + logger.debug('axon.get()', extra=self.getLogExtra(sha256=fhash)) if offs is not None or size is not None: @@ -1203,7 +1205,7 @@ async def hashset(self, sha256): await self._reqHas(sha256) fhash = s_common.ehex(sha256) - logger.debug(f'Getting blob [{fhash}].', extra=await self.getLogExtra(sha256=fhash)) + logger.debug('axon.hashset()', extra=self.getLogExtra(sha256=fhash)) hashset = s_hashset.HashSet() @@ -1251,7 +1253,7 @@ async def _populate(self, sha256, genr, size): return int.from_bytes(byts, 'big') fhash = s_common.ehex(sha256) - logger.debug(f'Saving blob [{fhash}].', extra=await self.getLogExtra(sha256=fhash)) + logger.debug('axon.save()', extra=self.getLogExtra(sha256=fhash)) size = await self._saveFileGenr(sha256, genr, size) @@ -1378,7 +1380,7 @@ async def _axonFileDel(self, sha256): return False fhash = s_common.ehex(sha256) - logger.debug(f'Deleting blob [{fhash}].', extra=await self.getLogExtra(sha256=fhash)) + logger.debug('axon.del()', extra=self.getLogExtra(sha256=fhash)) size = int.from_bytes(byts, 'big') self.axonmetrics.inc('file:count', valu=-1) @@ -1511,9 +1513,11 @@ async def jsonlines(self, sha256, errors='ignore'): try: yield json.loads(line) except json.JSONDecodeError as e: - logger.exception(f'Bad json line encountered for {sha256}') - raise s_exc.BadJsonText(mesg=f'Bad json line encountered while processing {sha256}, ({e})', - sha256=sha256) from None + # TODO: this feels like it should not be a log... + extra = self.getLogExtra(sha256=sha256, err=str(e)) + logger.exception('Bad JSON line encountered', extra=extra) + mesg = f'Bad JSON line while processing {sha256}: {e}' + raise s_exc.BadJsonText(mesg=mesg, sha256=sha256) from None async def unpack(self, sha256, fmt, offs=0): ''' @@ -1664,7 +1668,8 @@ async def postfiles(self, fields, url, params=None, headers=None, method='POST', return info except Exception as e: - logger.exception(f'Error POSTing files to [{s_urlhelp.sanitizeUrl(url)}]') + extra = self.getLogExtra(url=s_urlhelp.sanitizeUrl(url)) + logger.exception(f'Error POSTing file', extra=extra) err = s_common.err(e) errmsg = err[1].get('mesg') if errmsg: @@ -1711,7 +1716,8 @@ async def wput(self, sha256, url, params=None, headers=None, method='PUT', ssl=T return info except Exception as e: - logger.exception(f'Error streaming [{sha256}] to [{s_urlhelp.sanitizeUrl(url)}]') + extra = self.getLogExtra(sha256=sha256, url=s_urlhelp.sanitizeUrl(url)) + logger.exception('axon.wput() error streaming blob', extra=extra) err = s_common.err(e) errmsg = err[1].get('mesg') if errmsg: @@ -1814,7 +1820,8 @@ async def wget(self, url, params=None, headers=None, json=None, body=None, metho Returns: dict: An information dictionary containing the results of the request. ''' - logger.debug(f'Wget called for [{url}].', extra=await self.getLogExtra(url=s_urlhelp.sanitizeUrl(url))) + extra = self.getLogExtra(url=s_urlhelp.sanitizeUrl(url)) + logger.debug('axon.wget()', extra=extra) ssl = self.getCachedSslCtx(opts=ssl_opts, verify=ssl) @@ -1848,7 +1855,8 @@ async def wget(self, url, params=None, headers=None, json=None, body=None, metho raise except Exception as e: - logger.exception(f'Failed to wget {s_urlhelp.sanitizeUrl(url)}') + extra = self.getLogExtra(url=s_urlhelp.sanitizeUrl(url)) + logger.exception('axon.wget() failed', extra=extra) err = s_common.err(e) errmsg = err[1].get('mesg') if errmsg: diff --git a/synapse/cortex.py b/synapse/cortex.py index 29673737502..561c2e8618a 100644 --- a/synapse/cortex.py +++ b/synapse/cortex.py @@ -1015,7 +1015,7 @@ async def initServiceStorage(self): mesg = f'User {useriden} ({user.name}) has a rule on the "cortex" authgate. This authgate is not used ' \ f'for permission checks and will be removed in Synapse v3.0.0.' - logger.warning(mesg, extra=await self.getLogExtra(user=useriden, username=user.name)) + logger.warning(mesg, extra=self.getLogExtra(user=useriden, username=user.name)) for roleiden in ag.gateroles.keys(): role = self.auth.role(roleiden) if role is None: @@ -1023,7 +1023,7 @@ async def initServiceStorage(self): mesg = f'Role {roleiden} ({role.name}) has a rule on the "cortex" authgate. This authgate is not used ' \ f'for permission checks and will be removed in Synapse v3.0.0.' - logger.warning(mesg, extra=await self.getLogExtra(role=roleiden, rolename=role.name)) + logger.warning(mesg, extra=self.getLogExtra(role=roleiden, rolename=role.name)) self._initVaults() @@ -2865,7 +2865,7 @@ async def _normStormPkg(self, pkgdef, validstorm=True): name = cdef.get('name') mesg = f"Storm command definition 'forms' key is deprecated and will be removed " \ f"in 3.0.0 (command {name} in package {pkgname})" - logger.warning(mesg, extra=await self.getLogExtra(name=name, pkgname=pkgname)) + logger.warning(mesg, extra=self.getLogExtra(name=name, pkgname=pkgname)) for gdef in pkgdef.get('graphs', ()): gdef['iden'] = s_common.guid((pkgname, gdef.get('name'))) @@ -5837,7 +5837,7 @@ async def count(self, text, opts=None): if proxy is not None: proxname = proxy._ahainfo.get('name') - extra = await self.getLogExtra(mirror=proxname, hash=s_storm.queryhash(text)) + extra = self.getLogExtra(mirror=proxname, hash=s_storm.queryhash(text)) logger.info(f'Offloading Storm query to mirror {proxname}.', extra=extra) mirropts = await self._getMirrorOpts(opts) @@ -5895,7 +5895,7 @@ async def _getMirrorProxy(self, opts): if (delta := curoffs - miroffs) > MAX_NEXUS_DELTA: mesg = (f'Pool mirror [{proxyname}] Nexus offset delta too large ' f'({delta} > {MAX_NEXUS_DELTA}), running query locally.') - logger.warning(mesg, extra=await self.getLogExtra(delta=delta, mirror=proxyname, mirror_offset=miroffs)) + logger.warning(mesg, extra=self.getLogExtra(delta=delta, mirror=proxyname, mirror_offset=miroffs)) return None return proxy @@ -5905,7 +5905,7 @@ async def _getMirrorProxy(self, opts): logger.warning('Timeout waiting for pool mirror, running query locally.') else: mesg = f'Timeout waiting for pool mirror [{proxyname}] Nexus offset, running query locally.' - logger.warning(mesg, extra=await self.getLogExtra(mirror=proxyname)) + logger.warning(mesg, extra=self.getLogExtra(mirror=proxyname)) await proxy.fini() return None @@ -5918,7 +5918,7 @@ async def storm(self, text, opts=None): if proxy is not None: proxname = proxy._ahainfo.get('name') - extra = await self.getLogExtra(mirror=proxname, hash=s_storm.queryhash(text)) + extra = self.getLogExtra(mirror=proxname, hash=s_storm.queryhash(text)) logger.info(f'Offloading Storm query to mirror {proxname}.', extra=extra) mirropts = await self._getMirrorOpts(opts) @@ -5952,7 +5952,7 @@ async def callStorm(self, text, opts=None): if proxy is not None: proxname = proxy._ahainfo.get('name') - extra = await self.getLogExtra(mirror=proxname, hash=s_storm.queryhash(text)) + extra = self.getLogExtra(mirror=proxname, hash=s_storm.queryhash(text)) logger.info(f'Offloading Storm query to mirror {proxname}.', extra=extra) mirropts = await self._getMirrorOpts(opts) @@ -5981,7 +5981,7 @@ async def exportStorm(self, text, opts=None): if proxy is not None: proxname = proxy._ahainfo.get('name') - extra = await self.getLogExtra(mirror=proxname, hash=s_storm.queryhash(text)) + extra = self.getLogExtra(mirror=proxname, hash=s_storm.queryhash(text)) logger.info(f'Offloading Storm query to mirror {proxname}.', extra=extra) mirropts = await self._getMirrorOpts(opts) @@ -6173,7 +6173,7 @@ async def _logStormQuery(self, text, user, extra=None): extra['text'] = text extra['hash'] = s_storm.queryhash(text) - extra = await self.getLogExtra(**extra) + extra = self.getLogExtra(**extra) stormlogger.log(self.stormloglvl, 'Executing storm query as [%s]', user.name, extra=extra) async def getNodeByNdef(self, ndef, view=None): @@ -6622,7 +6622,7 @@ async def enableCronJob(self, iden): ''' await self.agenda.enable(iden) await self.feedBeholder('cron:enable', {'iden': iden}, gates=[iden]) - logger.info(f'Enabled cron job {iden}', extra=await self.getLogExtra(iden=iden, status='MODIFY')) + logger.info(f'Enabled cron job {iden}', extra=self.getLogExtra(iden=iden, status='MODIFY')) @s_nexus.Pusher.onPushAuto('cron:disable') async def disableCronJob(self, iden): @@ -6635,7 +6635,7 @@ async def disableCronJob(self, iden): await self.agenda.disable(iden) await self._killCronTask(iden) await self.feedBeholder('cron:disable', {'iden': iden}, gates=[iden]) - logger.info(f'Disabled cron job {iden}', extra=await self.getLogExtra(iden=iden, status='MODIFY')) + logger.info(f'Disabled cron job {iden}', extra=self.getLogExtra(iden=iden, status='MODIFY')) async def killCronTask(self, iden): if self.agenda.appts.get(iden) is None: diff --git a/synapse/lib/agenda.py b/synapse/lib/agenda.py index ff54475bc2c..4b2a1e351c1 100644 --- a/synapse/lib/agenda.py +++ b/synapse/lib/agenda.py @@ -14,6 +14,8 @@ import synapse.lib.base as s_base import synapse.lib.coro as s_coro +import synapse.lib.scope as s_scope +import synapse.lib.logging as s_logging # Agenda: manages running one-shot and periodic tasks in the future ("appointments") @@ -401,7 +403,8 @@ def updateNexttime(self, now): if nexttime == 0.0: # We blew by and missed a fixed-year appointment, either due to clock shenanigans, this query going # really long, or the initial requirement being in the past - logger.warning(f'Missed an appointment: {rec}') + extra = self.getLogExtra() + logger.warning('Missed an appointment', extra=extra) del self.recs[i] continue if nexttime < lowtime: @@ -420,11 +423,19 @@ def updateNexttime(self, now): return self.nexttime + def getLogExtra(self, **kwargs): + kwargs['cron'] = { + 'iden': self.iden, + 'name': self.name, + 'view': self.view, + } + return self.stor.core.getLogExtra(**kwargs) + async def edits(self, edits): for name, valu in edits.items(): if name not in self.__class__._synced_attrs: - extra = await self.stor.core.getLogExtra(name=name, valu=valu) - logger.warning('_Appt.edits() Invalid attribute received: %s = %r', name, valu, extra=extra) + extra = self.getLogExtra(prop=name, valu=valu) + logger.warning('Invalid cron property edit', extra=extra) continue else: @@ -476,7 +487,8 @@ async def _load_all(self): self._next_indx = max(self._next_indx, appt.indx + 1) except (s_exc.InconsistentStorage, s_exc.BadStorageVersion, s_exc.BadTime, TypeError, KeyError, UnicodeDecodeError) as e: - logger.warning('Invalid appointment %r found in storage: %r. Removing.', iden, e) + extra = self.core.getLogExtra(cron={'iden': iden}, err=str(e)) + logger.warning('Removing invalid cron job', extra=extra) to_delete.append(iden) continue @@ -716,7 +728,8 @@ async def clearRunningStatus(self): '''Used for clearing the running state at startup or change of leadership.''' for appt in list(self.appts.values()): if appt.isrunning: - logger.debug(f'Clearing the isrunning flag for {appt.iden}') + extra = appt.getLogExtra() + logger.debug('Clearing isrunning flag', extra=extra) edits = { 'isrunning': False, @@ -764,24 +777,15 @@ async def runloop(self): continue if appt.isrunning: # pragma: no cover - mesg = f'Appointment {appt.iden} {appt.name} is still running from previous time when scheduled' \ - f' to run. Skipping.' - logger.warning(mesg, - extra={'synapse': {'iden': appt.iden, 'name': appt.name}}) + extra = appt.getLogExtra() + logger.warning('Cron job is still running. Skipping.', extra=extra) + else: try: await self._execute(appt) except Exception as e: - extra = {'iden': appt.iden, 'name': appt.name, 'user': appt.creator, 'view': appt.view} - user = self.core.auth.user(appt.creator) - if user is not None: - extra['username'] = user.name - if isinstance(e, s_exc.SynErr): - mesg = e.get('mesg', str(e)) - else: # pragma: no cover - mesg = str(e) - logger.exception(f'Agenda error running appointment {appt.iden} {appt.name}: {mesg}', - extra={'synapse': extra}) + extra = appt.getLogExtra() + logger.exception('Cron job error', extra=extra) await self._markfailed(appt, f'error: {e}') async def _execute(self, appt): @@ -790,24 +794,19 @@ async def _execute(self, appt): ''' user = self.core.auth.user(appt.creator) if user is None: - logger.warning(f'Unknown user {appt.creator} in stored appointment {appt.iden} {appt.name}', - extra={'synapse': {'iden': appt.iden, 'name': appt.name, 'user': appt.creator}}) + logger.warning('Cron job has unknown user', extra=appt.getLogExtra()) await self._markfailed(appt, 'unknown user') return locked = user.info.get('locked') if locked: - logger.warning(f'Cron {appt.iden} {appt.name} failed because creator {user.name} is locked', - extra={'synapse': {'iden': appt.iden, 'name': appt.name, 'user': appt.creator, - 'username': user.name}}) + logger.warning('Cron job has locked user', extra=appt.getLogExtra()) await self._markfailed(appt, 'locked user') return view = self.core.getView(iden=appt.view, user=user) if view is None: - logger.warning(f'Unknown view {appt.view} in stored appointment {appt.iden} {appt.name}', - extra={'synapse': {'iden': appt.iden, 'name': appt.name, 'user': appt.creator, - 'username': user.name, 'view': appt.view}}) + logger.warning('Cron job has unknown view', extra=appt.getLogExtra()) await self._markfailed(appt, 'unknown view') return @@ -837,94 +836,94 @@ async def _runJob(self, user, appt): ''' Actually run the storm query, updating the appropriate statistics and results ''' - count = 0 - edits = { - 'isrunning': True, - 'laststarttime': self._getNowTick(), - 'startcount': appt.startcount + 1, - } - await self.core.addCronEdits(appt.iden, edits) + with s_scope.enter({'user': user}): + + count = 0 + edits = { + 'isrunning': True, + 'laststarttime': self._getNowTick(), + 'startcount': appt.startcount + 1, + } + await self.core.addCronEdits(appt.iden, edits) - logger.info(f'Agenda executing for iden={appt.iden}, name={appt.name} user={user.name}, view={appt.view}, query={appt.query}', - extra={'synapse': {'iden': appt.iden, 'name': appt.name, 'user': user.iden, 'text': appt.query, - 'username': user.name, 'view': appt.view}}) - starttime = self._getNowTick() + extra = appt.getLogExtra(text=appt.query) + logger.info('Cron job starting', extra=extra) - success = False - loglevel = s_common.normLogLevel(appt.loglevel) + starttime = self._getNowTick() - try: - opts = { - 'user': user.iden, - 'view': appt.view, - 'mirror': appt.pool, - 'vars': {'auto': {'iden': appt.iden, 'type': 'cron'}}, - '_loginfo': { - 'cron': appt.iden + success = False + loglevel = s_logging.normLogLevel(appt.loglevel) + + try: + opts = { + 'user': user.iden, + 'view': appt.view, + 'mirror': appt.pool, + 'vars': {'auto': {'iden': appt.iden, 'type': 'cron'}}, + '_loginfo': { + 'cron': appt.iden + } } - } - opts = self.core._initStormOpts(opts) + opts = self.core._initStormOpts(opts) - await self.core.feedBeholder('cron:start', {'iden': appt.iden}) + await self.core.feedBeholder('cron:start', {'iden': appt.iden}) - async for mesg in self.core.storm(appt.query, opts=opts): + async for mesg in self.core.storm(appt.query, opts=opts): - if mesg[0] == 'node': - count += 1 + if mesg[0] == 'node': + count += 1 - elif mesg[0] == 'warn' and loglevel <= logging.WARNING: - text = mesg[1].get('mesg', '') - extra = await self.core.getLogExtra(cron=appt.iden, **mesg[1]) - logger.warning(f'Cron job {appt.iden} issued warning: {text}', extra=extra) + elif mesg[0] == 'warn' and loglevel <= logging.WARNING: + extra = appt.getLogExtra(**mesg[1]) + logger.warning('Cron job emitted warning', extra=extra) - elif mesg[0] == 'err': - excname, errinfo = mesg[1] - errinfo.pop('eline', None) - errinfo.pop('efile', None) - excctor = getattr(s_exc, excname, s_exc.SynErr) - raise excctor(**errinfo) + elif mesg[0] == 'err': + excname, errinfo = mesg[1] + errinfo.pop('eline', None) + errinfo.pop('efile', None) + excctor = getattr(s_exc, excname, s_exc.SynErr) + raise excctor(**errinfo) - except asyncio.CancelledError: - result = 'cancelled' - raise + except asyncio.CancelledError: + result = 'cancelled' + raise - except Exception as e: - result = f'raised exception {e}' - logger.exception(f'Agenda job {appt.iden} {appt.name} raised exception', - extra={'synapse': {'iden': appt.iden, 'name': appt.name}} - ) - else: - success = True - result = f'finished successfully with {count} nodes' + except Exception as e: + result = f'raised exception {e}' + logger.exception('Cron job raised an exception', extra=extra) + + else: + success = True + result = f'finished successfully with {count} nodes' + + finally: + finishtime = self._getNowTick() + if not success: + appt.lasterrs.append(result) + edits = { + 'errcount': appt.errcount + 1, + # we only care about the last five errors + 'lasterrs': list(appt.lasterrs[-5:]), + } + + if self.core.isactive: + await self.core.addCronEdits(appt.iden, edits) + + took = finishtime - starttime + + extra = appt.getLogExtra(result=result, took=took) + logger.info('Cron job completed', extra=extra) + + if not self.core.isactive: + logger.warning('Cron job status is not saved. We are no longer the leader.', extra=extra) - finally: - finishtime = self._getNowTick() - if not success: - appt.lasterrs.append(result) edits = { - 'errcount': appt.errcount + 1, - # we only care about the last five errors - 'lasterrs': list(appt.lasterrs[-5:]), + 'lastfinishtime': finishtime, + 'isrunning': False, + 'lastresult': result, } - if self.core.isactive: await self.core.addCronEdits(appt.iden, edits) - took = finishtime - starttime - mesg = f'Agenda completed query for iden={appt.iden} name={appt.name} with result "{result}" ' \ - f'took {took:.3f}s' - if not self.core.isactive: - mesg = mesg + ' Agenda status will not be saved since the Cortex is no longer the leader.' - logger.info(mesg, extra={'synapse': {'iden': appt.iden, 'name': appt.name, 'user': user.iden, - 'result': result, 'username': user.name, 'took': took}}) - edits = { - 'lastfinishtime': finishtime, - 'isrunning': False, - 'lastresult': result, - } - if self.core.isactive: - await self.core.addCronEdits(appt.iden, edits) - - if not self.isfini: - # fire beholder event before invoking nexus change (in case readonly) - await self.core.feedBeholder('cron:stop', {'iden': appt.iden}) + if not self.isfini: + await self.core.feedBeholder('cron:stop', {'iden': appt.iden}) diff --git a/synapse/lib/aha.py b/synapse/lib/aha.py index b8cfb846b3e..902df18cfad 100644 --- a/synapse/lib/aha.py +++ b/synapse/lib/aha.py @@ -204,11 +204,11 @@ async def addAhaSvc(self, name, info, network=None): async def fini(): if self.cell.isfini: # pragma: no cover mesg = f'{self.cell.__class__.__name__} is fini. Unable to set {name}@{network} as down.' - logger.warning(mesg, await self.cell.getLogExtra(name=svcname, netw=svcnetw)) + logger.warning(mesg, extra=self.cell.getLogExtra(name=svcname, netw=svcnetw)) return logger.info(f'AhaCellApi fini, setting service offline [{name}]', - extra=await self.cell.getLogExtra(name=svcname, netw=svcnetw)) + extra=self.cell.getLogExtra(name=svcname, netw=svcnetw)) coro = self.cell.setAhaSvcDown(name, sess, network=network) self.cell.schedCoro(coro) # this will eventually execute or get cancelled. @@ -388,14 +388,14 @@ async def _getSharedItem(self, name): anam = conf.get('aha:name') anet = conf.get('aha:network') mesg = f'Retrieved service provisioning info for {anam}.{anet} iden {name}' - logger.info(mesg, extra=await self.aha.getLogExtra(iden=name, name=anam, netw=anet)) + logger.info(mesg, extra=self.aha.getLogExtra(iden=name, name=anam, netw=anet)) return ProvApi(self.aha, provinfo) userinfo = await self.aha.getAhaUserEnroll(name) if userinfo is not None: unam = userinfo.get('name') mesg = f'Retrieved user provisioning info for {unam} iden {name}' - logger.info(mesg, extra=await self.aha.getLogExtra(iden=name, name=unam)) + logger.info(mesg, extra=self.aha.getLogExtra(iden=name, name=unam)) await self.aha.delAhaUserEnroll(name) return EnrollApi(self.aha, userinfo) @@ -403,7 +403,7 @@ async def _getSharedItem(self, name): if clone is not None: host = clone.get('host') mesg = f'Retrieved AHA clone info for {host} iden {name}' - logger.info(mesg, extra=await self.aha.getLogExtra(iden=name, host=host)) + logger.info(mesg, extra=self.aha.getLogExtra(iden=name, host=host)) return CloneApi(self.aha, clone) mesg = f'Invalid provisioning identifier name={name}. This could be' \ @@ -459,7 +459,7 @@ async def signUserCsr(self, byts): raise s_exc.BadArg(mesg=mesg) logger.info(f'Signing user CSR for [{username}], signas={ahanetw}', - extra=await self.aha.getLogExtra(name=username, signas=ahanetw)) + extra=self.aha.getLogExtra(name=username, signas=ahanetw)) pkey, cert = self.aha.certdir.signUserCsr(xcsr, ahanetw, save=False) return self.aha.certdir._certToByts(cert) @@ -491,7 +491,7 @@ async def signHostCsr(self, byts): raise s_exc.BadArg(mesg=mesg) logger.info(f'Signing host CSR for [{hostname}], signas={ahanetw}', - extra=await self.aha.getLogExtra(name=hostname, signas=ahanetw)) + extra=self.aha.getLogExtra(name=hostname, signas=ahanetw)) pkey, cert = self.aha.certdir.signHostCsr(xcsr, ahanetw, save=False) return self.aha.certdir._certToByts(cert) @@ -510,7 +510,7 @@ async def signUserCsr(self, byts): raise s_exc.BadArg(mesg=mesg) logger.info(f'Signing user CSR for [{username}], signas={ahanetw}', - extra=await self.aha.getLogExtra(name=username, signas=ahanetw)) + extra=self.aha.getLogExtra(name=username, signas=ahanetw)) pkey, cert = self.aha.certdir.signUserCsr(xcsr, ahanetw, save=False) return self.aha.certdir._certToByts(cert) @@ -916,7 +916,7 @@ async def _clearInactiveSessions(self): linkiden = svc.get('svcinfo').get('online') if linkiden not in current_sessions: logger.info(f'AhaCell activecoro setting service offline [{svcname}.{network}]', - extra=await self.getLogExtra(name=svcname, netw=network)) + extra=self.getLogExtra(name=svcname, netw=network)) await self.setAhaSvcDown(svcname, linkiden, network=network) # Wait until we are cancelled or the cell is fini. @@ -1007,7 +1007,7 @@ async def addAhaSvc(self, name, info, network=None): unfo = info.get('urlinfo') logger.info(f'Adding service [{svcfull}] from [{unfo.get("scheme")}://{unfo.get("host")}:{unfo.get("port")}]', - extra=await self.getLogExtra(name=svcname, netw=svcnetw)) + extra=self.getLogExtra(name=svcname, netw=svcnetw)) svcinfo = { 'name': svcfull, @@ -1172,7 +1172,7 @@ async def delAhaSvc(self, name, network=None): name = self._getAhaName(name) svcname, svcnetw, svcfull = self._nameAndNetwork(name, network) - logger.info(f'Deleting service [{svcfull}].', extra=await self.getLogExtra(name=svcname, netw=svcnetw)) + logger.info(f'Deleting service [{svcfull}].', extra=self.getLogExtra(name=svcname, netw=svcnetw)) full = ('aha', 'svcfull', svcfull) path = ('aha', 'services', svcnetw, svcname) @@ -1215,7 +1215,7 @@ async def _setAhaSvcDown(self, name, linkiden, network=None): await self.fire(f'aha:svcdown:{svcfull}', svcname=svcname, svcnetw=svcnetw) logger.info(f'Set [{svcfull}] offline.', - extra=await self.getLogExtra(name=svcname, netw=svcnetw)) + extra=self.getLogExtra(name=svcname, netw=svcnetw)) client = self.clients.pop(svcfull, None) if client is not None: @@ -1303,7 +1303,7 @@ async def genCaCert(self, network): return fd.read().decode() logger.info(f'Generating CA certificate for {network}', - extra=await self.getLogExtra(netw=network)) + extra=self.getLogExtra(netw=network)) fut = s_coro.executor(self.certdir.genCaCert, network, save=False) pkey, cert = await fut @@ -1382,7 +1382,7 @@ async def signHostCsr(self, csrtext, signas=None, sans=None): signas = hostname.split('.', 1)[1] logger.info(f'Signing host CSR for [{hostname}], signas={signas}, sans={sans}', - extra=await self.getLogExtra(hostname=hostname, signas=signas)) + extra=self.getLogExtra(hostname=hostname, signas=signas)) pkey, cert = self.certdir.signHostCsr(xcsr, signas=signas, sans=sans) @@ -1401,7 +1401,7 @@ async def signUserCsr(self, csrtext, signas=None): signas = username.split('@', 1)[1] logger.info(f'Signing user CSR for [{username}], signas={signas}', - extra=await self.getLogExtra(name=username, signas=signas)) + extra=self.getLogExtra(name=username, signas=signas)) pkey, cert = self.certdir.signUserCsr(xcsr, signas=signas) @@ -1459,7 +1459,7 @@ async def addAhaClone(self, host, port=27492, conf=None): await self._push('aha:clone:add', clone) logger.info(f'Created AHA clone provisioning for {host} with iden {iden}', - extra=await self.getLogExtra(iden=iden, name=host, netw=network)) + extra=self.getLogExtra(iden=iden, name=host, netw=network)) return self._getProvClientUrl(iden) @@ -1542,7 +1542,7 @@ async def addAhaSvcProv(self, name, provinfo=None): iden = await self._push('aha:svc:prov:add', provinfo) logger.info(f'Created service provisioning for {name}.{netw} with iden {iden}', - extra=await self.getLogExtra(iden=iden, name=name, netw=netw)) + extra=self.getLogExtra(iden=iden, name=name, netw=netw)) return self._getProvClientUrl(iden) @@ -1648,7 +1648,7 @@ async def addAhaUserEnroll(self, name, userinfo=None, again=False): iden = await self._push('aha:enroll:add', userinfo) logger.info(f'Created user provisioning for {name} with iden {iden}', - extra=await self.getLogExtra(iden=iden, name=name)) + extra=self.getLogExtra(iden=iden, name=name)) return self._getProvClientUrl(iden) diff --git a/synapse/lib/cell.py b/synapse/lib/cell.py index f4abe78229c..e7b9be43080 100644 --- a/synapse/lib/cell.py +++ b/synapse/lib/cell.py @@ -1762,7 +1762,7 @@ async def _runSysctlLoop(self): mesg += 'See https://synapse.docs.vertex.link/en/latest/synapse/devopsguide.html#performance-tuning ' mesg += 'for information about these sysctl parameters.' - extra = await self.getLogExtra(sysctls=fixvals) + extra = self.getLogExtra(sysctls=fixvals) logger.warning(mesg, extra=extra) await self.waitfini(self.SYSCTL_CHECK_FREQ) @@ -2845,7 +2845,7 @@ async def addUserRule(self, iden, rule, indx=None, gateiden=None): user = await self.auth.reqUser(iden) retn = await user.addRule(rule, indx=indx, gateiden=gateiden) logger.info(f'Added rule={rule} on user {user.name} for gateiden={gateiden}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, rule=rule, gateiden=gateiden, status='MODIFY')) return retn @@ -2853,21 +2853,21 @@ async def addRoleRule(self, iden, rule, indx=None, gateiden=None): role = await self.auth.reqRole(iden) retn = await role.addRule(rule, indx=indx, gateiden=gateiden) logger.info(f'Added rule={rule} on role {role.name} for gateiden={gateiden}', - extra=await self.getLogExtra(target_role=role.iden, target_rolename=role.name, + extra=self.getLogExtra(target_role=role.iden, target_rolename=role.name, rule=rule, gateiden=gateiden, status='MODIFY')) return retn async def delUserRule(self, iden, rule, gateiden=None): user = await self.auth.reqUser(iden) logger.info(f'Removing rule={rule} on user {user.name} for gateiden={gateiden}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, rule=rule, gateiden=gateiden, status='MODIFY')) return await user.delRule(rule, gateiden=gateiden) async def delRoleRule(self, iden, rule, gateiden=None): role = await self.auth.reqRole(iden) logger.info(f'Removing rule={rule} on role {role.name} for gateiden={gateiden}', - extra=await self.getLogExtra(target_role=role.iden, target_rolename=role.name, + extra=self.getLogExtra(target_role=role.iden, target_rolename=role.name, rule=rule, gateiden=gateiden, status='MODIFY')) return await role.delRule(rule, gateiden=gateiden) @@ -2875,14 +2875,14 @@ async def setUserRules(self, iden, rules, gateiden=None): user = await self.auth.reqUser(iden) await user.setRules(rules, gateiden=gateiden) logger.info(f'Set user rules = {rules} on user {user.name} for gateiden={gateiden}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, rules=rules, gateiden=gateiden, status='MODIFY')) async def setRoleRules(self, iden, rules, gateiden=None): role = await self.auth.reqRole(iden) await role.setRules(rules, gateiden=gateiden) logger.info(f'Set role rules = {rules} on role {role.name} for gateiden={gateiden}', - extra=await self.getLogExtra(target_role=role.iden, target_rolename=role.name, + extra=self.getLogExtra(target_role=role.iden, target_rolename=role.name, rules=rules, gateiden=gateiden, status='MODIFY')) async def setRoleName(self, iden, name): @@ -2890,14 +2890,14 @@ async def setRoleName(self, iden, name): oname = role.name await role.setName(name) logger.info(f'Set name={name} from {oname} on role iden={role.iden}', - extra=await self.getLogExtra(target_role=role.iden, target_rolename=role.name, + extra=self.getLogExtra(target_role=role.iden, target_rolename=role.name, status='MODIFY')) async def setUserAdmin(self, iden, admin, gateiden=None): user = await self.auth.reqUser(iden) await user.setAdmin(admin, gateiden=gateiden) logger.info(f'Set admin={admin} for {user.name} for gateiden={gateiden}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, gateiden=gateiden, status='MODIFY')) async def addUserRole(self, useriden, roleiden, indx=None): @@ -2905,7 +2905,7 @@ async def addUserRole(self, useriden, roleiden, indx=None): role = await self.auth.reqRole(roleiden) await user.grant(roleiden, indx=indx) logger.info(f'Granted role {role.name} to user {user.name}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, target_role=role.iden, target_rolename=role.name, status='MODIFY')) @@ -2913,7 +2913,7 @@ async def setUserRoles(self, useriden, roleidens): user = await self.auth.reqUser(useriden) await user.setRoles(roleidens) logger.info(f'Set roleidens={roleidens} on user {user.name}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, roleidens=roleidens, status='MODIFY')) async def delUserRole(self, useriden, roleiden): @@ -2921,14 +2921,14 @@ async def delUserRole(self, useriden, roleiden): role = await self.auth.reqRole(roleiden) await user.revoke(roleiden) logger.info(f'Revoked role {role.name} from user {user.name}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, target_role=role.iden, target_rolename=role.name, status='MODIFY')) async def addUser(self, name, passwd=None, email=None, iden=None): user = await self.auth.addUser(name, passwd=passwd, email=email, iden=iden) logger.info(f'Added user={name}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, status='CREATE')) return user.pack(packroles=True) @@ -2937,12 +2937,12 @@ async def delUser(self, iden): name = user.name await self.auth.delUser(iden) logger.info(f'Deleted user={name}', - extra=await self.getLogExtra(target_user=iden, target_username=name, status='DELETE')) + extra=self.getLogExtra(target_user=iden, target_username=name, status='DELETE')) async def addRole(self, name, iden=None): role = await self.auth.addRole(name, iden=iden) logger.info(f'Added role={name}', - extra=await self.getLogExtra(target_role=role.iden, target_rolename=role.name, status='CREATE')) + extra=self.getLogExtra(target_role=role.iden, target_rolename=role.name, status='CREATE')) return role.pack() async def delRole(self, iden): @@ -2950,26 +2950,26 @@ async def delRole(self, iden): name = role.name await self.auth.delRole(iden) logger.info(f'Deleted role={name}', - extra=await self.getLogExtra(target_role=iden, target_rolename=name, status='DELETE')) + extra=self.getLogExtra(target_role=iden, target_rolename=name, status='DELETE')) async def setUserEmail(self, useriden, email): await self.auth.setUserInfo(useriden, 'email', email) user = await self.auth.reqUser(useriden) logger.info(f'Set email={email} for {user.name}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, status='MODIFY')) + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, status='MODIFY')) async def setUserName(self, useriden, name): user = await self.auth.reqUser(useriden) oname = user.name await user.setName(name) logger.info(f'Set name={name} from {oname} on user iden={user.iden}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, status='MODIFY')) + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, status='MODIFY')) async def setUserPasswd(self, iden, passwd): user = await self.auth.reqUser(iden) await user.setPasswd(passwd) logger.info(f'Set password for {user.name}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, status='MODIFY')) + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, status='MODIFY')) async def genUserOnepass(self, iden, duration=600000): user = await self.auth.reqUser(iden) @@ -2981,7 +2981,7 @@ async def genUserOnepass(self, iden, duration=600000): await self.auth.setUserInfo(iden, 'onepass', onepass) logger.info(f'Issued one time password for {user.name}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, status='MODIFY')) + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, status='MODIFY')) return passwd @@ -2989,13 +2989,13 @@ async def setUserLocked(self, iden, locked): user = await self.auth.reqUser(iden) await user.setLocked(locked) logger.info(f'Set lock={locked} for user {user.name}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, status='MODIFY')) + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, status='MODIFY')) async def setUserArchived(self, iden, archived): user = await self.auth.reqUser(iden) await user.setArchived(archived) logger.info(f'Set archive={archived} for user {user.name}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, status='MODIFY')) + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, status='MODIFY')) async def getUserDef(self, iden, packroles=True): user = self.auth.user(iden) @@ -3274,44 +3274,28 @@ def _log_web_request(self, handler: s_httpapi.Handler) -> None: else: log_method = t_log.access_log.error - request_time = 1000.0 * handler.request.request_time() - user = None username = None - uri = handler.request.uri - remote_ip = handler.request.remote_ip - enfo = {'http_status': status, - 'uri': uri, - 'remoteip': remote_ip, - } - headers = {} - for header in self.LOGGED_HTTPAPI_HEADERS: if (valu := handler.request.headers.get(header)) is not None: headers[header.lower()] = valu - if headers: - enfo['headers'] = headers - - extra = {'synapse': enfo} + extra = self.getLogExtra( + status=status, + headers=headers, + path=handler.request.uri, + remoteip=handler.request.remote_ip, + took=int(1000 * handler.request.request_time())) - # It is possible that a Cell implementor may register handlers which - # do not derive from our Handler class, so we have to handle that. - if hasattr(handler, 'web_useriden') and handler.web_useriden: - user = handler.web_useriden - enfo['user'] = user - if hasattr(handler, 'web_username') and handler.web_username: - username = handler.web_username - enfo['username'] = username + if handler.web_useriden is not None: + extra['loginfo']['user'] = handler.web_useriden - if user: - mesg = f'{status} {handler.request.method} {uri} ({remote_ip}) user={user} ({username}) {request_time:.2f}ms' - else: - mesg = f'{status} {handler.request.method} {uri} ({remote_ip}) {request_time:.2f}ms' + if handler.web_username is not None: + extra['loginfo']['username'] = handler.web_username - log_method(mesg, extra=extra) + log_method('HTTP API Request', extra=extra) async def _getCellHttpOpts(self): # Generate/Load a Cookie Secret @@ -3666,7 +3650,7 @@ async def getCellApi(self, link, user, path): ''' return await self.cellapi.anit(self, link, user) - async def getLogExtra(self, **kwargs): + def getLogExtra(self, **kwargs): ''' Get an extra dictionary for structured logging which can be used as a extra argument for loggers. @@ -4815,7 +4799,7 @@ async def addUserApiKey(self, useriden, name, duration=None): await self._push('user:apikey:add', kdef) logger.info(f'Created HTTP API key {iden} for {user.name}, {name=}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, iden=iden, + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, iden=iden, status='MODIFY')) kdef.pop('shadow') @@ -4969,7 +4953,7 @@ async def modUserApiKey(self, iden, key, valu): await self._push('user:apikey:edit', kdef.get('user'), iden, vals) logger.info(f'Updated HTTP API key {iden} for {user.name}, set {key}={valu}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, iden=iden, + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, iden=iden, status='MODIFY')) kdef.pop('shadow') @@ -5003,7 +4987,7 @@ async def delUserApiKey(self, iden): user = await self.auth.reqUser(useriden) ret = await self._push('user:apikey:del', useriden, iden) logger.info(f'Deleted HTTP API key {iden} for {user.name}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, iden=iden, + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, iden=iden, status='MODIFY')) return ret diff --git a/synapse/lib/const.py b/synapse/lib/const.py index 8dbf1e322a6..5e576ebce38 100644 --- a/synapse/lib/const.py +++ b/synapse/lib/const.py @@ -1,7 +1,7 @@ # Logging related constants import logging -LOG_FORMAT = '%(asctime)s [%(levelname)s] %(message)s %(synapse)s [%(name)s.%(funcName)s]' +LOG_FORMAT = '%(asctime)s [%(levelname)s] %(message)s %(params)s [%(name)s.%(funcName)s]' LOG_LEVEL_CHOICES = { 'DEBUG': logging.DEBUG, diff --git a/synapse/lib/httpapi.py b/synapse/lib/httpapi.py index 51362b6151f..cdde8b8a4a0 100644 --- a/synapse/lib/httpapi.py +++ b/synapse/lib/httpapi.py @@ -145,7 +145,7 @@ def loadJsonMesg(self, byts, validator=None): self.sendRestErr('SchemaViolation', 'Invalid JSON content.') return None - def logAuthIssue(self, mesg=None, user=None, username=None, level=logging.WARNING): + async def logAuthIssue(self, mesg=None, user=None, username=None, level=logging.WARNING): ''' Helper to log issues related to request authentication. @@ -158,21 +158,14 @@ def logAuthIssue(self, mesg=None, user=None, username=None, level=logging.WARNIN Returns: None ''' - uri = self.request.uri - remote_ip = self.request.remote_ip - enfo = {'uri': uri, - 'remoteip': remote_ip, - } - errm = f'Failed to authenticate request to {uri} from {remote_ip} ' - if mesg: - errm = f'{errm}: {mesg}' - if user: - errm = f'{errm}: user={user}' - enfo['user'] = user - if username: - errm = f'{errm} ({username})' - enfo['username'] = username - logger.log(level, msg=errm, extra={'synapse': enfo}) + extra = self.cell.getLogExtra( + mesg=mesg, + path=self.request.uri, + remoteip=self.request.remote_ip, + username=username, + user=user) + + logger.log(level, 'Failed to authenticate HTTP request', extra=extra) def sendAuthRequired(self): self.set_header('WWW-Authenticate', 'Basic realm=synapse') @@ -321,15 +314,15 @@ async def handleBasicAuth(self): udef = await authcell.getUserDefByName(name) if udef is None: - self.logAuthIssue(mesg='No such user.', username=name) + await self.logAuthIssue(mesg='No such user.', username=name) return None if udef.get('locked'): - self.logAuthIssue(mesg='User is locked.', user=udef.get('iden'), username=name) + await self.logAuthIssue(mesg='User is locked.', user=udef.get('iden'), username=name) return None if not await authcell.tryUserPasswd(name, passwd): - self.logAuthIssue(mesg='Incorrect password.', user=udef.get('iden'), username=name) + await self.logAuthIssue(mesg='Incorrect password.', user=udef.get('iden'), username=name) return None self.web_useriden = udef.get('iden') @@ -341,7 +334,7 @@ async def handleApiKeyAuth(self): key = self.request.headers.get('X-API-KEY') isok, info = await authcell.checkUserApiKey(key) # errfo or dict with tdef + udef if isok is False: - self.logAuthIssue(mesg=info.get('mesg'), user=info.get('user'), username=info.get('name')) + await self.logAuthIssue(mesg=info.get('mesg'), user=info.get('user'), username=info.get('name')) return udef = info.get('udef') @@ -685,15 +678,15 @@ async def post(self): authcell = self.getAuthCell() udef = await authcell.getUserDefByName(name) if udef is None: - self.logAuthIssue(mesg='No such user.', username=name) + await self.logAuthIssue(mesg='No such user.', username=name) return self.sendRestErr('AuthDeny', 'No such user.') if udef.get('locked'): - self.logAuthIssue(mesg='User is locked.', user=udef.get('iden'), username=name) + await self.logAuthIssue(mesg='User is locked.', user=udef.get('iden'), username=name) return self.sendRestErr('AuthDeny', 'User is locked.') if not await authcell.tryUserPasswd(name, passwd): - self.logAuthIssue(mesg='Incorrect password.', user=udef.get('iden'), username=name) + await self.logAuthIssue(mesg='Incorrect password.', user=udef.get('iden'), username=name) return self.sendRestErr('AuthDeny', 'Incorrect password.') iden = udef.get('iden') @@ -1372,8 +1365,8 @@ async def _runHttpExt(self, meth, path): # We've already flushed() the stream at this point, so we cannot # change the status code or the response headers. We just have to # log the error and move along. - mesg = f'Extended HTTP API {iden} tried to set code after sending body.' - logger.error(mesg, extra=await core.getLogExtra()) + extra = core.getLogExtra(httpapi=iden) + logger.error('Extended HTTP API sent code after sending body.', extra=extra) continue rcode = True @@ -1384,8 +1377,8 @@ async def _runHttpExt(self, meth, path): # We've already flushed() the stream at this point, so we cannot # change the status code or the response headers. We just have to # log the error and move along. - mesg = f'Extended HTTP API {iden} tried to set headers after sending body.' - logger.error(mesg, extra=await core.getLogExtra()) + extra = core.getLogExtra(httpapi=iden) + logger.error('Extended HTTP API set headers after sending body.', extra=extra) continue for hkey, hval in info['headers'].items(): self.set_header(hkey, hval) @@ -1395,7 +1388,7 @@ async def _runHttpExt(self, meth, path): self.clear() self.set_status(500) self.sendRestErr('StormRuntimeError', - f'Extended HTTP API {iden} must set status code before sending body.') + 'Extended HTTP API must set status code before sending body.') return await self.finish() rbody = True body = info['body'] @@ -1404,8 +1397,8 @@ async def _runHttpExt(self, meth, path): elif mtyp == 'err': errname, erfo = info - mesg = f'Error executing Extended HTTP API {iden}: {errname} {erfo.get("mesg")}' - logger.error(mesg, extra=await core.getLogExtra()) + extra = core.getLogExtra(httpapi=iden, errname=errname, **erfo) + logger.error('Extended HTTP API encountered an error.', extra=extra) if rbody: # We've already flushed() the stream at this point, so we cannot # change the status code or the response headers. We just have to @@ -1422,18 +1415,18 @@ async def _runHttpExt(self, meth, path): except Exception as e: rcode = True - enfo = s_common.err(e) - extra = await core.getLogExtra(iden=iden) - logger.exception(f'Extended HTTP API {iden} encountered fatal error: {enfo[1].get("mesg")}', extra=extra) + errname, errinfo = s_common.err(e) + extra = core.getLogExtra(httpapi=iden, errname=errname, **errinfo) + logger.exception(f'Extended HTTP API encountered a fatal error.', extra=extra) if rbody is False: self.clear() self.set_status(500) - self.sendRestErr(enfo[0], - f'Extended HTTP API {iden} encountered fatal error: {enfo[1].get("mesg")}') + self.sendRestErr(errname, + f'Extended HTTP API {iden} encountered fatal error: {errinfo.get("mesg")}') if rcode is False: self.clear() self.set_status(500) - self.sendRestErr('StormRuntimeError', f'Extended HTTP API {iden} never set status code.') + self.sendRestErr('StormRuntimeError', 'Extended HTTP API never set status code.') await self.finish() diff --git a/synapse/lib/stormlib/log.py b/synapse/lib/stormlib/log.py index 1546d12e213..dba81fd19a4 100644 --- a/synapse/lib/stormlib/log.py +++ b/synapse/lib/stormlib/log.py @@ -137,15 +137,14 @@ def getObjLocals(self): async def _getExtra(self, extra=None): if extra is None: - return await self.runt.snap.core.getLogExtra() + return self.runt.snap.core.getLogExtra() extra = await s_stormtypes.toprim(extra) if extra and not isinstance(extra, dict): - mesg = f'extra provided to log call must be a dictionary compatible type. Got {extra.__class__.__name__} ' \ - f'instead.' - raise s_exc.BadArg(mesg=mesg, arg='extra') + mesg = '$lib.log argument extra= must be a dictionary' + raise s_exc.BadArg(mesg=mesg, arg='extra', got=extra.__class__.__name__) - return await self.runt.snap.core.getLogExtra(**extra) + return self.runt.snap.core.getLogExtra(**extra) @s_stormtypes.stormfunc(readonly=True) async def _logDebug(self, mesg, extra=None): diff --git a/synapse/lib/stormlib/storm.py b/synapse/lib/stormlib/storm.py index 35cfd3df559..447729966e1 100644 --- a/synapse/lib/stormlib/storm.py +++ b/synapse/lib/stormlib/storm.py @@ -60,7 +60,7 @@ async def execStormCmd(self, runt, genr): text = await s_stormtypes.tostr(self.opts.query) query = await runt.getStormQuery(text) - extra = await self.runt.snap.core.getLogExtra(text=text, view=self.runt.snap.view.iden) + extra = self.runt.snap.core.getLogExtra(text=text, view=self.runt.snap.view.iden) stormlogger.info(f'Executing storm query via storm.exec {{{text}}} as [{self.runt.user.name}]', extra=extra) async with runt.getSubRuntime(query) as subr: @@ -76,7 +76,7 @@ async def execStormCmd(self, runt, genr): text = await s_stormtypes.tostr(self.opts.query) query = await runt.getStormQuery(text) - extra = await self.runt.snap.core.getLogExtra(text=text, view=self.runt.snap.view.iden) + extra = self.runt.snap.core.getLogExtra(text=text, view=self.runt.snap.view.iden) stormlogger.info(f'Executing storm query via storm.exec {{{text}}} as [{self.runt.user.name}]', extra=extra) async with runt.getSubRuntime(query) as subr: @@ -91,7 +91,7 @@ async def execStormCmd(self, runt, genr): subr.query = query subr._initRuntVars(query) - extra = await self.runt.snap.core.getLogExtra(text=text, view=self.runt.snap.view.iden) + extra = self.runt.snap.core.getLogExtra(text=text, view=self.runt.snap.view.iden) stormlogger.info(f'Executing storm query via storm.exec {{{text}}} as [{self.runt.user.name}]', extra=extra) async for subp in subr.execute(genr=s_common.agen(item)): @@ -153,7 +153,7 @@ async def _evalStorm(self, text, cast=None): cast = await s_stormtypes.tostr(cast, noneok=True) if self.runt.snap.core.stormlog: - extra = await self.runt.snap.core.getLogExtra(text=text, view=self.runt.snap.view.iden) + extra = self.runt.snap.core.getLogExtra(text=text, view=self.runt.snap.view.iden) stormlogger.info(f'Executing storm query via $lib.storm.eval() {{{text}}} as [{self.runt.user.name}]', extra=extra) casttype = None diff --git a/synapse/tests/test_cmds_cortex.py b/synapse/tests/test_cmds_cortex.py index 8c085a1e0d2..6f360541592 100644 --- a/synapse/tests/test_cmds_cortex.py +++ b/synapse/tests/test_cmds_cortex.py @@ -331,10 +331,9 @@ def check_locs_cleanup(cobj): cmdr = await s_cmdr.getItemCmdr(core, outp=outp) await cmdr.runCmdLine('log --on --nodes-only') cmdr.locs['log:fmt'] = 'newp' - with self.getAsyncLoggerStream('synapse.cmds.cortex', - 'Unknown encoding format: newp') as stream: + with self.getLoggerStream('synapse.cmds.cortex') as stream: await cmdr.runCmdLine('storm test:str') - self.true(await stream.wait(2)) + self.true(await stream.expect('Unknown encoding format: newp')) await cmdr.fini() diff --git a/synapse/tests/test_cortex.py b/synapse/tests/test_cortex.py index f1d22ce5479..3a0134c9832 100644 --- a/synapse/tests/test_cortex.py +++ b/synapse/tests/test_cortex.py @@ -305,7 +305,7 @@ async def testCoreJson(core): async with self.getTestCore(dirn=dirn) as core: await core.callStorm('$lib.jsonstor.set((path,), hehe)') - with self.getAsyncLoggerStream('synapse.lib.nexus') as stream: + with self.getLoggerStream('synapse.lib.nexus') as stream: async with self.getTestCore(dirn=dirn) as core: q = 'return( $lib.jsonstor.get((path,)) )' self.eq('hehe', await core.callStorm(q)) @@ -1161,7 +1161,7 @@ async def test_cortex_callstorm(self): self.eq(cm.exception.get('mesg'), 'Generator control statement "stop" used outside of a generator function.') - with self.getAsyncLoggerStream('synapse.lib.view', 'callStorm cancelled') as stream: + with self.getLoggerStream('synapse.lib.view') as stream: async with core.getLocalProxy() as proxy: # async cancellation test @@ -1171,7 +1171,7 @@ async def test_cortex_callstorm(self): except asyncio.TimeoutError: logger.exception('Woohoo!') - self.true(await stream.wait(6)) + self.true(await stream.expect('callStorm cancelled')) host, port = await core.addHttpsPort(0, host='127.0.0.1') @@ -1221,8 +1221,7 @@ async def test_cortex_storm_dmon_log(self): async with self.getTestCore() as core: - with self.getStructuredAsyncLoggerStream('synapse.storm.log', - 'Running dmon') as stream: + with self.getLoggerStream('synapse.storm.log') as stream: iden = await core.callStorm(''' $que = $lib.queue.add(foo) @@ -1238,7 +1237,7 @@ async def test_cortex_storm_dmon_log(self): $que.get() return($ddef.iden) ''') - self.true(await stream.wait(6)) + self.true(await stream.expect('Running dmon')) mesg = stream.jsonlines()[0] self.eq(mesg.get('message'), f'Running dmon {iden}') @@ -2911,10 +2910,9 @@ async def test_storm_fromtags(self): # Attempt a formpivot from a syn:tag node to a secondary property # which is not valid - with self.getAsyncLoggerStream('synapse.lib.ast', - 'Unknown time format') as stream: + with self.getLoggerStream('synapse.lib.ast') as stream: self.len(0, await core.nodes('syn:tag=foo.bar -> test:str:tick')) - self.true(await stream.wait(4)) + self.true(await stream.expect('Unknown time format')) async def test_storm_tagtags(self): @@ -3479,22 +3477,12 @@ async def test_storm_logging(self): self.nn(view) # Storm logging - with self.getAsyncLoggerStream('synapse.storm', 'Executing storm query {help ask} as [root]') \ - as stream: + with self.getLoggerStream('synapse.storm') as stream: await alist(core.storm('help ask')) - self.true(await stream.wait(4)) - - mesg = 'Executing storm query {help foo} as [root]' - with self.getAsyncLoggerStream('synapse.storm', mesg) as stream: - await alist(core.storm('help foo', opts={'show': ('init', 'fini', 'print',)})) - self.true(await stream.wait(4)) - - with self.getStructuredAsyncLoggerStream('synapse.storm', mesg) as stream: - await alist(core.storm('help foo', opts={'show': ('init', 'fini', 'print',)})) - self.true(await stream.wait(4)) - - mesg = stream.jsonlines()[0] - self.eq(mesg['params'].get('view'), view) + rows = stream.jsonlines() + self.eq(rows[0]['message'], 'Executing storm query as [root]') + self.eq(rows[0]['params']['view'], view) + self.eq(rows[0]['params']['text'], 'help ask') async def test_strict(self): @@ -5892,9 +5880,9 @@ async def test_cortex_mirror_culled(self): log01 = await alist(core01.nexsroot.nexslog.iter(0)) self.eq(log00, log01) - with self.getAsyncLoggerStream('synapse.lib.nexus', 'offset is out of sync') as stream: + with self.getLoggerStream('synapse.lib.nexus') as stream: async with self.getTestCore(dirn=path02, conf={'mirror': url01}) as core02: - self.true(await stream.wait(6)) + self.true(await stream.expect('offset is out of sync')) self.true(core02.nexsroot.isfini) # restore mirror @@ -6283,10 +6271,9 @@ async def test_cortex_storm_dmon_view(self): await asyncio.sleep(0) q = '''$q = $lib.queue.get(dmon) $q.puts((1, 3, 5))''' - with self.getAsyncLoggerStream('synapse.lib.storm', - "made ('test:int', 5)") as stream: + with self.getLoggerStream('synapse.lib.storm', struct=False) as stream: await core.nodes(q) - self.true(await stream.wait(6)) + self.true(await stream.expect("made ('test:int', 5)", escape=True)) nodes = await core.nodes('test:int', opts={'view': view2_iden}) self.len(3, nodes) @@ -6313,10 +6300,9 @@ async def test_cortex_storm_dmon_view(self): await core.addStormDmon(ddef) q = '''$q = $lib.queue.get(dmon2) $q.puts((1, 3, 5))''' - with self.getAsyncLoggerStream('synapse.lib.storm', - "made ('test:str', '5')") as stream: + with self.getLoggerStream('synapse.lib.storm', struct=False) as stream: await core.nodes(q) - self.true(await stream.wait(6)) + self.true(await stream.expect("made ('test:str', '5')", escape=True)) nodes = await core.nodes('test:str', opts={'view': view2_iden}) self.len(3, nodes) @@ -6330,10 +6316,9 @@ async def test_cortex_storm_dmon_view(self): with self.raises(s_exc.NoSuchView): await core.nodes('test:int', opts={'view': view2_iden}) - with self.getAsyncLoggerStream('synapse.lib.storm', - 'Dmon View is invalid. Stopping Dmon') as stream: + with self.getLoggerStream('synapse.lib.storm') as stream: async with self.getTestCore(dirn=dirn) as core: - self.true(await stream.wait(6)) + self.true(await stream.expect('Dmon View is invalid. Stopping Dmon')) msgs = await core.stormlist('dmon.list') self.stormIsInPrint('fatal error: invalid view', msgs) @@ -7692,7 +7677,7 @@ async def test_cortex_depr_props_warning(self): self.eq(1, data.count('deprecated properties unlocked')) self.isin('deprecated properties unlocked and not in use', data) - match = regex.match(r'Detected (?P\d+) deprecated properties', data) + match = regex.search(r'Detected (?P\d+) deprecated properties', data) count = int(match.groupdict().get('count')) here = stream.tell() @@ -8166,7 +8151,7 @@ async def test_cortex_user_scope(self): self.eq('admin', await prox.callStorm('return( $lib.user.name() )', opts=opts)) - with self.getStructuredAsyncLoggerStream('synapse.lib.cell') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: q = 'return( ($lib.user.name(), $lib.auth.users.add(lowuser) ))' (whoami, udef) = await prox.callStorm(q, opts=opts) @@ -8179,7 +8164,7 @@ async def test_cortex_user_scope(self): self.eq('admin', mesg.get('username')) self.eq('lowuser', mesg['params'].get('target_username')) - with self.getStructuredAsyncLoggerStream('synapse.lib.cell') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: q = 'auth.user.mod lowuser --admin $lib.true' msgs = [] @@ -8389,7 +8374,7 @@ async def _hang(*args, **kwargs): # storm() q = 'inet:asn=0' qhash = s_storm.queryhash(q) - with self.getStructuredAsyncLoggerStream('synapse') as stream: + with self.getLoggerStream('synapse') as stream: msgs = await alist(core00.storm(q)) self.len(1, [m for m in msgs if m[0] == 'node']) @@ -8402,14 +8387,14 @@ async def _hang(*args, **kwargs): self.eq(msgs[0]['params'].get('hash'), qhash) self.eq(msgs[0]['params'].get('mirror'), f'01.core.{ahanet}') - self.eq(msgs[1].get('message'), f'Executing storm query {{{q}}} as [root]') + self.eq(msgs[1].get('message'), f'Executing storm query as [root]') self.eq(msgs[1]['params'].get('hash'), qhash) self.eq(msgs[1]['params'].get('pool:from'), f'00.core.{ahanet}') # callStorm() q = 'inet:asn=0 return($lib.true)' qhash = s_storm.queryhash(q) - with self.getStructuredAsyncLoggerStream('synapse') as stream: + with self.getLoggerStream('synapse') as stream: self.true(await core00.callStorm(q)) data = stream.getvalue() @@ -8421,14 +8406,14 @@ async def _hang(*args, **kwargs): self.eq(msgs[0]['params'].get('hash'), qhash) self.eq(msgs[0]['params'].get('mirror'), f'01.core.{ahanet}') - self.eq(msgs[1].get('message'), f'Executing storm query {{{q}}} as [root]') + self.eq(msgs[1].get('message'), f'Executing storm query as [root]') self.eq(msgs[1]['params'].get('hash'), qhash) self.eq(msgs[1]['params'].get('pool:from'), f'00.core.{ahanet}') # exportStorm() q = 'inet:asn=0' qhash = s_storm.queryhash(q) - with self.getStructuredAsyncLoggerStream('synapse') as stream: + with self.getLoggerStream('synapse') as stream: self.len(1, await alist(core00.exportStorm(q))) data = stream.getvalue() @@ -8440,14 +8425,14 @@ async def _hang(*args, **kwargs): self.eq(msgs[0]['params'].get('hash'), qhash) self.eq(msgs[0]['params'].get('mirror'), f'01.core.{ahanet}') - self.eq(msgs[1].get('message'), f'Executing storm query {{{q}}} as [root]') + self.eq(msgs[1].get('message'), f'Executing storm query as [root]') self.eq(msgs[1]['params'].get('hash'), qhash) self.eq(msgs[1]['params'].get('pool:from'), f'00.core.{ahanet}') # count() q = 'inet:asn=0' qhash = s_storm.queryhash(q) - with self.getStructuredAsyncLoggerStream('synapse') as stream: + with self.getLoggerStream('synapse') as stream: self.eq(1, await core00.count(q)) data = stream.getvalue() @@ -8459,7 +8444,7 @@ async def _hang(*args, **kwargs): self.eq(msgs[0]['params'].get('hash'), qhash) self.eq(msgs[0]['params'].get('mirror'), f'01.core.{ahanet}') - self.eq(msgs[1].get('message'), f'Executing storm query {{{q}}} as [root]') + self.eq(msgs[1].get('message'), f'Executing storm query as [root]') self.eq(msgs[1]['params'].get('hash'), qhash) self.eq(msgs[1]['params'].get('pool:from'), f'00.core.{ahanet}') @@ -8685,7 +8670,7 @@ async def test_cortex_authgate(self): core.auth.stor.set('gate:cortex:user:newp', {'iden': 'newp'}) core.auth.stor.set('gate:cortex:role:newp', {'iden': 'newp'}) - with self.getAsyncLoggerStream('synapse.cortex') as stream: + with self.getLoggerStream('synapse.cortex', struct=False) as stream: async with self.getTestCore(dirn=dirn) as core: # type: s_cortex.Cortex # The cortex authgate still does nothing with self.raises(s_exc.AuthDeny) as cm: diff --git a/synapse/tests/test_daemon.py b/synapse/tests/test_daemon.py index 52fe58e5ee3..310fb2d7615 100644 --- a/synapse/tests/test_daemon.py +++ b/synapse/tests/test_daemon.py @@ -30,14 +30,13 @@ async def test_unixsock_longpath(self): extrapath = 108 * 'A' longdirn = s_common.genpath(dirn, extrapath) listpath = f'unix://{s_common.genpath(longdirn, "sock")}' - with self.getAsyncLoggerStream('synapse.daemon', - 'exceeds OS supported UNIX socket path length') as stream: + with self.getLoggerStream('synapse.daemon') as stream: async with await s_daemon.Daemon.anit() as dmon: with self.raises(OSError): await dmon.listen(listpath) - self.true(await stream.wait(1)) + self.true(await stream.expect('exceeds OS supported UNIX socket path length')) async def test_dmon_ready(self): @@ -84,32 +83,32 @@ async def test_dmon_errors(self): # Throw an exception when trying to handle mesg outright async with await prox.getPoolLink() as link: - with self.getAsyncLoggerStream('synapse.daemon', 'Dmon.onLinkMesg Handler: mesg=') as stream: + with self.getLoggerStream('synapse.daemon') as stream: await link.tx(31337) - self.true(await stream.wait(timeout=6)) + self.true(await stream.expect('Dmon.onLinkMesg Handler: mesg=')) # Valid format; do not know what the message is. async with await prox.getPoolLink() as link: mesg = ('newp', {}) emsg = "Dmon.onLinkMesg Invalid mesg: mesg=('newp', {})" - with self.getAsyncLoggerStream('synapse.daemon', emsg) as stream: + with self.getLoggerStream('synapse.daemon', emsg) as stream: await link.tx(mesg) - self.true(await stream.wait(timeout=6)) + self.true(await stream.expect(emsg)) # Invalid data casues a link to fail on rx async with await prox.getPoolLink() as link: - with self.getAsyncLoggerStream('synapse.lib.link', 'rx error') as stream: + with self.getLoggerStream('synapse.lib.link') as stream: byts = b'\x16\x03\x01\x02\x00\x01\x00\x01\xfc\x03\x03\xa6\xa3D\xd5\xdf%\xac\xa9\x92\xc3' await link.send(byts) - self.true(await stream.wait(timeout=6)) + self.true(await stream.expect('rx error')) # bad t2:init message async with await prox.getPoolLink() as link: mesg = ('t2:init', {}) emsg = "Error on t2:init:" - with self.getAsyncLoggerStream('synapse.daemon', emsg) as stream: + with self.getLoggerStream('synapse.daemon', emsg) as stream: await link.tx(mesg) - self.true(await stream.wait(timeout=6)) + self.true(await stream.expect(emsg)) class SvcApi(s_cell.CellApi, s_stormsvc.StormSvc): _storm_svc_name = 'foo' diff --git a/synapse/tests/test_datamodel.py b/synapse/tests/test_datamodel.py index 2ba6126297d..92ccb21a657 100644 --- a/synapse/tests/test_datamodel.py +++ b/synapse/tests/test_datamodel.py @@ -157,12 +157,12 @@ async def test_datamodel_dynamics(self): modl.addIface('depr:iface', {'deprecated': True}) - with self.getAsyncLoggerStream('synapse.datamodel') as dstream: + with self.getLoggerStream('synapse.datamodel') as stream: modl.addType('foo:bar', 'int', {}, {'interfaces': ('depr:iface',)}) modl.addForm('foo:bar', {}, ()) - dstream.seek(0) - self.isin('Form foo:bar depends on deprecated interface depr:iface', dstream.read()) + stream.seek(0) + self.isin('Form foo:bar depends on deprecated interface depr:iface', stream.read()) async def test_datamodel_del_prop(self): @@ -226,8 +226,9 @@ async def test_model_deprecation(self): with self.getTestDir() as dirn: - with self.getAsyncLoggerStream('synapse.lib.types') as tstream, \ - self.getAsyncLoggerStream('synapse.datamodel') as dstream: + with self.getLoggerStream('synapse.lib.types') as tstream, \ + self.getLoggerStream('synapse.datamodel') as dstream: + core = await s_cortex.Cortex.anit(dirn, conf) dstream.seek(0) @@ -245,28 +246,28 @@ async def test_model_deprecation(self): # Comp type warning is logged by the server, not sent back to users mesg = 'type test:dep:comp field str uses a deprecated type test:dep:easy' - with self.getAsyncLoggerStream('synapse.lib.types', mesg) as tstream: + with self.getLoggerStream('synapse.lib.types') as tstream: _ = await core.stormlist('[test:dep:easy=test2 :comp=(1, two)]') - self.true(await tstream.wait(6)) + self.true(await tstream.expect(mesg)) msgs = await core.stormlist('[test:str=tehe .pdep=beep]') self.stormIsInWarn('property test:str.pdep is deprecated', msgs) # Extended props, custom universals and tagprops can all trigger deprecation notices mesg = 'tag property depr is using a deprecated type test:dep:easy' - with self.getAsyncLoggerStream('synapse.datamodel', mesg) as dstream: + with self.getLoggerStream('synapse.datamodel') as dstream: await core.addTagProp('depr', ('test:dep:easy', {}), {}) - self.true(await dstream.wait(6)) + self.true(await dstream.expect(mesg)) mesg = 'universal property ._test is using a deprecated type test:dep:easy' - with self.getAsyncLoggerStream('synapse.datamodel', mesg) as dstream: + with self.getLoggerStream('synapse.datamodel') as dstream: await core.addUnivProp('_test', ('test:dep:easy', {}), {}) - self.true(await dstream.wait(6)) + self.true(await dstream.expect(mesg)) mesg = 'extended property test:str:_depr is using a deprecated type test:dep:easy' - with self.getAsyncLoggerStream('synapse.cortex', mesg) as cstream: + with self.getLoggerStream('synapse.cortex') as cstream: await core.addFormProp('test:str', '_depr', ('test:dep:easy', {}), {}) - self.true(await cstream.wait(6)) + self.true(await cstream.expect(mesg)) # Deprecated ctor information propagates upward to types and forms msgs = await core.stormlist('[test:dep:str=" test" :beep=" boop "]') @@ -277,9 +278,9 @@ async def test_model_deprecation(self): # Restarting the cortex warns again for various items that it loads from the hive # with deprecated types in them. This is a coverage test for extended properties. - with self.getAsyncLoggerStream('synapse.cortex', mesg) as cstream: + with self.getLoggerStream('synapse.cortex') as cstream: async with await s_cortex.Cortex.anit(dirn, conf) as core: - self.true(await cstream.wait(6)) + self.true(await cstream.expect(mesg)) async def test_datamodel_getmodeldefs(self): ''' diff --git a/synapse/tests/test_lib_agenda.py b/synapse/tests/test_lib_agenda.py index 59b381398e4..a592ad8f961 100644 --- a/synapse/tests/test_lib_agenda.py +++ b/synapse/tests/test_lib_agenda.py @@ -395,17 +395,17 @@ def looptime(): # Ensure structured logging captures the cron iden value core.stormlog = True - with self.getStructuredAsyncLoggerStream('synapse.storm') as stream: + with self.getLoggerStream('synapse.storm') as stream: unixtime = datetime.datetime(year=2019, month=2, day=13, hour=10, minute=16, tzinfo=tz.utc).timestamp() self.eq((12, 'bar'), await asyncio.wait_for(core.callStorm('return($lib.queue.gen(visi).pop(wait=$lib.true))'), timeout=5)) core.stormlog = False msgs = stream.jsonlines() - msgs = [m for m in msgs if m['synapse']['text'] == '$lib.queue.gen(visi).put(bar)'] + msgs = [m for m in msgs if m['params']['text'] == '$lib.queue.gen(visi).put(bar)'] self.gt(len(msgs), 0) for m in msgs: - self.eq(m['synapse'].get('cron'), appt.iden) + self.eq(m['params'].get('cron'), appt.iden) self.eq(1, appt.startcount) @@ -419,11 +419,11 @@ def looptime(): await visi.setLocked(True) - with self.getLoggerStream('synapse.lib.agenda', 'locked') as stream: + with self.getLoggerStream('synapse.lib.agenda') as stream: unixtime = datetime.datetime(year=2019, month=2, day=16, hour=10, minute=16, tzinfo=tz.utc).timestamp() # pump the ioloop via sleep(0) until the log message appears - while not stream.wait(0.1): + while not await stream.expect('locked', timeout=0.1): await asyncio.sleep(0) await core.nexsroot.waitOffs(strt + 4) @@ -737,16 +737,16 @@ async def test_agenda_fatal_run(self): # Force the cron to run. - with self.getAsyncLoggerStream('synapse.lib.agenda', 'Agenda error running appointment ') as stream: + with self.getLoggerStream('synapse.lib.agenda') as stream: core.agenda._addTickOff(55) - self.true(await stream.wait(timeout=12)) + self.true(await stream.expect('Cron job error')) await core.addUserRule(user, (True, ('storm',))) await core.addUserRule(user, (True, ('view', 'read')), gateiden=fork) - with self.getAsyncLoggerStream('synapse.storm.log', 'I am a cron job') as stream: + with self.getLoggerStream('synapse.storm.log') as stream: core.agenda._addTickOff(60) - self.true(await stream.wait(timeout=12)) + self.true(await stream.expect('I am a cron job')) async def test_agenda_mirror_realtime(self): with self.getTestDir() as dirn: @@ -825,7 +825,7 @@ async def test_agenda_mirror_realtime(self): stream.seek(0) data = stream.read() - self.isin("_Appt.edits() Invalid attribute received: invalid = 'newp'", data) + self.isin('Invalid cron property edit', data) async def test_agenda_promotions(self): # Adjust this knob for the number of cron jobs you want to test. Below @@ -917,16 +917,12 @@ async def test_agenda_promotions(self): tasks01 = await core01.callStorm('return($lib.ps.list())') self.len(0, tasks01) - with self.getLoggerStream('synapse.lib.agenda', mesg='name=CRON99') as stream: + with self.getLoggerStream('synapse.lib.agenda') as stream: # Promote and inspect cortex status await core01.promote(graceful=True) self.false(core00.isactive) self.true(core01.isactive) - - stream.seek(0) - data = stream.read() - for ii in range(NUMJOBS): - self.isin(f' name=CRON{ii} with result "cancelled" took ', data) + self.true(await stream.expect('Cron job completed.*cancelled', count=NUMJOBS, escape=False)) # Sync the (now) follower so the isrunning status gets updated to false on both cortexes await core00.sync() @@ -1101,11 +1097,11 @@ async def task(): async def test_agenda_warnings(self): async with self.getTestCore() as core: - with self.getAsyncLoggerStream('synapse.lib.agenda', 'issued warning: oh hai') as stream: + with self.getLoggerStream('synapse.lib.agenda') as stream: q = '$lib.warn("oh hai")' msgs = await core.stormlist('cron.at --now $q', opts={'vars': {'q': q}}) self.stormHasNoWarnErr(msgs) - self.true(await stream.wait(timeout=6)) + self.true(await stream.expect('warning.*oh hai', escape=False)) async def test_agenda_graceful_promotion_with_running_cron(self): @@ -1137,8 +1133,8 @@ async def test_agenda_graceful_promotion_with_running_cron(self): async with self.getTestCore(conf=conf01) as core01: - with self.getAsyncLoggerStream('synapse.storm.log', 'I AM A ERROR LOG MESSAGE') as stream: - self.true(await stream.wait(timeout=6)) + with self.getLoggerStream('synapse.storm.log') as stream: + self.true(await stream.expect('I AM A ERROR LOG MESSAGE')) cron = await core00.callStorm('return($lib.cron.list())') self.len(1, cron) diff --git a/synapse/tests/test_lib_aha.py b/synapse/tests/test_lib_aha.py index c67ee21fb91..c787a62701e 100644 --- a/synapse/tests/test_lib_aha.py +++ b/synapse/tests/test_lib_aha.py @@ -152,9 +152,9 @@ async def test_lib_aha_offon(self): # Tear down the Aha cell. await aha.__aexit__(None, None, None) - with self.getAsyncLoggerStream('synapse.lib.aha', f'Set [0.cryo.synapse] offline.') as stream: + with self.getLoggerStream('synapse.lib.aha') as stream: async with self.getTestAha(dirn=dirn) as aha: - self.true(await asyncio.wait_for(stream.wait(), timeout=12)) + self.true(await stream.expect(f'Set [0.cryo.synapse] offline.')) svc = await aha.getAhaSvc('0.cryo...') self.notin('online', svc.get('svcinfo')) @@ -672,17 +672,16 @@ async def test_lib_aha_provision(self): s_common.yamlsave(overconf, axonpath, 'cell.mods.yaml') # force a re-provision... (because the providen is different) - with self.getAsyncLoggerStream('synapse.lib.cell', - 'Provisioning axon from AHA service') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: async with await s_axon.Axon.initFromArgv((axonpath,)) as axon: - self.true(await stream.wait(6)) + self.true(await stream.expect('Provisioning axon from AHA service')) self.ne(axon.conf.get('dmon:listen'), 'tcp://0.0.0.0:0') overconf2 = s_common.yamlload(axonpath, 'cell.mods.yaml') self.eq(overconf2, {'nexslog:async': True}) # tests startup logic that recognizes it's already done - with self.getAsyncLoggerStream('synapse.lib.cell', ) as stream: + with self.getLoggerStream('synapse.lib.cell', ) as stream: async with await s_axon.Axon.initFromArgv((axonpath,)) as axon: pass stream.seek(0) @@ -1428,9 +1427,8 @@ async def test_aha_gather(self): self.len(nexsindx * 2, items) # ensure we handle down services correctly - async with aha.waiter(2, 'aha:svcadd', timeout=10): - async with aha.waiter(1, 'aha:svcdown', timeout=10): - await cell01.fini() + async with aha.waiter(1, 'aha:svcdown', timeout=10): + await cell01.fini() # test the call endpoint todo = s_common.todo('getCellInfo') diff --git a/synapse/tests/test_lib_boss.py b/synapse/tests/test_lib_boss.py index 884ffb935af..31da609ef3a 100644 --- a/synapse/tests/test_lib_boss.py +++ b/synapse/tests/test_lib_boss.py @@ -50,8 +50,7 @@ async def testloop(): self.len(1, boss.ps()) - with self.getAsyncLoggerStream('synapse.lib.boss', - 'Iden specified for existing task') as stream: + with self.getLoggerStream('synapse.lib.boss') as stream: iden = s_common.guid() @@ -60,5 +59,5 @@ async def double_promote(): await boss.promote(f'double', root, taskiden=iden + iden) coro = boss.schedCoro(double_promote()) - self.true(await stream.wait(timeout=6)) + self.true(await stream.expect('Iden specified for existing task')) await coro diff --git a/synapse/tests/test_lib_cell.py b/synapse/tests/test_lib_cell.py index a244aa030be..0459e0a07ab 100644 --- a/synapse/tests/test_lib_cell.py +++ b/synapse/tests/test_lib_cell.py @@ -446,10 +446,9 @@ async def test_cell_auth(self): # @adminApi methods are allowed self.true(await proxy.adminOnly()) - mesg = 'Executing remote admin API call.' - with self.getStructuredAsyncLoggerStream('synapse.lib.cell', mesg) as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: self.eq(await proxy.adminOnlyLog(1, 2, three=4), (1, 2, {'three': 4})) - self.true(await stream.wait(timeout=10)) + self.true(await stream.expect('Executing remote admin API call', timeout=10)) msgs = stream.jsonlines() self.len(1, msgs) self.eq('EchoAuthApi.adminOnlyLog', msgs[0]['params']['func']) @@ -742,10 +741,10 @@ async def test_longpath(self): with self.getTestDir() as dirn: extrapath = 108 * 'A' longdirn = s_common.genpath(dirn, extrapath) - with self.getAsyncLoggerStream('synapse.lib.cell', 'LOCAL UNIX SOCKET WILL BE UNAVAILABLE') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: async with self.getTestCell(s_cell.Cell, dirn=longdirn) as cell: self.none(cell.dmon.addr) - self.true(await stream.wait(1)) + self.true(await stream.expect('LOCAL UNIX SOCKET WILL BE UNAVAILABLE')) async def test_cell_setuser(self): @@ -1228,7 +1227,7 @@ async def test_cell_confprint(self): } s_common.yamlsave(conf, dirn, 'cell.yaml') - with self.getAsyncLoggerStream('synapse.lib.cell') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: async with await s_cell.Cell.initFromArgv([dirn]): pass stream.seek(0) @@ -1242,7 +1241,7 @@ async def test_cell_confprint(self): } s_common.yamlsave(conf, dirn, 'cell.yaml') - with self.getAsyncLoggerStream('synapse.lib.cell') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: async with await s_cell.Cell.initFromArgv([dirn]): pass stream.seek(0) @@ -1295,23 +1294,21 @@ async def test_initargv_failure(self): self.skip('Test requires /dev/null to exist.') async with self.withSetLoggingMock(): - with self.getAsyncLoggerStream('synapse.lib.cell', - 'Error starting cell at /dev/null') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: with self.raises(FileExistsError): async with await s_cell.Cell.initFromArgv(['/dev/null']): pass - self.true(await stream.wait(timeout=6)) + self.true(await stream.expect('Error starting cell at /dev/null')) # Bad configs can also cause a failure. with self.getTestDir() as dirn: - with self.getAsyncLoggerStream('synapse.lib.cell', - 'Error while bootstrapping cell config') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: with self.raises(s_exc.BadConfValu) as cm: with self.setTstEnvars(SYN_CELL_AUTH_PASSWD="true"): # interpreted as a yaml bool true async with await s_cell.Cell.initFromArgv([dirn, ]): pass self.eq(cm.exception.get('name'), 'auth:passwd') - self.true(await stream.wait(timeout=6)) + self.true(await stream.expect('Error while bootstrapping cell config')) async def test_cell_backup(self): @@ -2060,10 +2057,9 @@ async def test_mirror_badiden(self): async with self.getTestCell(s_cell.Cell, dirn=path01, conf={'nexslog:en': True}) as cell01: pass - with self.getAsyncLoggerStream('synapse.lib.nexus', - 'has different iden') as stream: + with self.getLoggerStream('synapse.lib.nexus') as stream: async with self.getTestCell(s_cell.Cell, dirn=path01, conf=conf01) as cell01: - await stream.wait(timeout=2) + self.true(await stream.expect('has different iden', timeout=2)) self.true(await cell01.nexsroot.waitfini(6)) async def test_backup_restore_base(self): @@ -2097,16 +2093,15 @@ async def test_backup_restore_base(self): with self.setTstEnvars(SYN_RESTORE_HTTPS_URL=furl): with self.getTestDir() as cdir: # Restore works - with self.getAsyncLoggerStream('synapse.lib.cell', - 'Restoring cortex from SYN_RESTORE_HTTPS_URL') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: argv = [cdir, '--https', '0', '--telepath', 'tcp://127.0.0.1:0'] async with await s_cortex.Cortex.initFromArgv(argv) as core: - self.true(await stream.wait(6)) + self.true(await stream.expect('Restoring cortex from SYN_RESTORE_HTTPS_URL')) self.len(1, await core.nodes('inet:ipv4=1.2.3.4')) self.true(core.conf.get('storm:log')) # Turning the service back on with the restore URL is fine too. - with self.getAsyncLoggerStream('synapse.lib.cell') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: argv = [cdir, '--https', '0', '--telepath', 'tcp://127.0.0.1:0'] async with await s_cortex.Cortex.initFromArgv(argv) as core: self.len(1, await core.nodes('inet:ipv4=1.2.3.4')) @@ -2134,11 +2129,10 @@ async def test_backup_restore_base(self): # all of the existing content of that directory. Remove the restore.done file # to force the restore from happening again. os.unlink(rpath) - with self.getAsyncLoggerStream('synapse.lib.cell', - 'Removing existing') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: argv = [cdir, '--https', '0', '--telepath', 'tcp://127.0.0.1:0'] async with await s_cortex.Cortex.initFromArgv(argv) as core: - self.true(await stream.wait(6)) + self.true(await stream.expect('Removing existing')) self.len(1, await core.nodes('inet:ipv4=1.2.3.4')) # Restore a backup which has an existing restore.done file in it - that marker file will get overwritten @@ -2146,11 +2140,10 @@ async def test_backup_restore_base(self): with self.setTstEnvars(SYN_RESTORE_HTTPS_URL=furl2): with self.getTestDir() as cdir: # Restore works - with self.getAsyncLoggerStream('synapse.lib.cell', - 'Restoring cortex from SYN_RESTORE_HTTPS_URL') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: argv = [cdir, '--https', '0', '--telepath', 'tcp://127.0.0.1:0'] async with await s_cortex.Cortex.initFromArgv(argv) as core: - self.true(await stream.wait(6)) + self.true(await stream.expect('Restoring cortex from SYN_RESTORE_HTTPS_URL')) self.len(1, await core.nodes('inet:ipv4=1.2.3.4')) rpath = s_common.genpath(cdir, 'restore.done') @@ -2242,11 +2235,10 @@ async def test_backup_restore_aha(self): with self.setTstEnvars(SYN_RESTORE_HTTPS_URL=furl, SYN_CORTEX_AHA_PROVISION=purl): # Restore works - with self.getAsyncLoggerStream('synapse.lib.cell', - 'Restoring cortex from SYN_RESTORE_HTTPS_URL') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: argv = [bdr0, '--https', '0', '--telepath', 'tcp://127.0.0.1:0'] async with await s_cortex.Cortex.initFromArgv(argv) as bcree00: - self.true(await stream.wait(6)) + self.true(await stream.expect('Restoring cortex from SYN_RESTORE_HTTPS_URL')) self.len(1, await bcree00.nodes('inet:asn=0')) self.len(1, await bcree00.nodes('[inet:asn=1234]')) @@ -2258,12 +2250,13 @@ async def test_backup_restore_aha(self): # Restore the backup as a mirror of the mynewcortex purl = await aha.addAhaSvcProv('01.mynewcortex', provinfo={'mirror': 'mynewcortex'}) + stream.clear() with self.setTstEnvars(SYN_RESTORE_HTTPS_URL=furl, SYN_CORTEX_AHA_PROVISION=purl): argv = [bdr1, '--https', '0', '--telepath', 'tcp://127.0.0.1:0'] async with await s_cortex.Cortex.initFromArgv(argv) as bcree01: - self.true(await stream.wait(6)) + self.true(await stream.expect('Restoring cortex from SYN_RESTORE_HTTPS_URL')) self.true(bcree00.isactive) self.false(bcree01.isactive) @@ -2338,11 +2331,10 @@ async def test_backup_restore_double_promote_aha(self): with self.setTstEnvars(SYN_RESTORE_HTTPS_URL=furl, SYN_CORTEX_AHA_PROVISION=purl): # Restore works - with self.getAsyncLoggerStream('synapse.lib.cell', - 'Restoring cortex from SYN_RESTORE_HTTPS_URL') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: argv = [bdr0, '--https', '0', '--telepath', 'tcp://127.0.0.1:0'] async with await s_cortex.Cortex.initFromArgv(argv) as bcree00: - self.true(await stream.wait(6)) + self.true(await stream.expect('Restoring cortex from SYN_RESTORE_HTTPS_URL')) self.len(1, await bcree00.nodes('inet:asn=0')) self.len(1, await bcree00.nodes('[inet:asn=1234]')) @@ -2354,12 +2346,13 @@ async def test_backup_restore_double_promote_aha(self): # Restore the backup as a mirror of the mynewcortex purl = await aha.addAhaSvcProv('01.mynewcortex', provinfo={'mirror': 'mynewcortex'}) + stream.clear() with self.setTstEnvars(SYN_RESTORE_HTTPS_URL=furl, SYN_CORTEX_AHA_PROVISION=purl): argv = [bdr1, '--https', '0', '--telepath', 'tcp://127.0.0.1:0'] async with await s_cortex.Cortex.initFromArgv(argv) as bcree01: - self.true(await stream.wait(6)) + self.true(await stream.expect('Restoring cortex from SYN_RESTORE_HTTPS_URL')) self.true(bcree00.isactive) self.false(bcree01.isactive) @@ -2470,9 +2463,8 @@ async def wrapDelWriteHold(root, reason): # This tmp_reason assertion seems counter-intuitive at first; but it's really # asserting that the message which was incorrectly being logged is no longer logged. log_enable_writes = f'Free space on {core.dirn} above minimum threshold' - with self.getAsyncLoggerStream('synapse.lib.cell', log_enable_writes) as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: await core.nexsroot.addWriteHold(tmp_reason := 'something else') - self.false(await stream.wait(1)) stream.seek(0) self.eq(stream.read(), '') @@ -2566,10 +2558,9 @@ def spaceexc(self): viewiden = view.get('iden') opts = {'view': viewiden} - with self.getLoggerStream('synapse.lib.lmdbslab', - 'Error during slab resize callback - foo') as stream: + with self.getLoggerStream('synapse.lib.lmdbslab') as stream: nodes = await core.stormlist('for $x in $lib.range(200) {[inet:ipv4=$x]}', opts=opts) - self.true(stream.wait(1)) + self.true(await stream.expect('Error during slab resize callback - foo')) async with self.getTestCore() as core: @@ -2603,7 +2594,7 @@ async def test_cell_onboot_optimize(self): lmdbfile = s_common.genpath(dirn, 'layers', layriden, 'layer_v2.lmdb', 'data.mdb') stat00 = os.stat(lmdbfile) - with self.getAsyncLoggerStream('synapse.lib.cell') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: conf = {'onboot:optimize': True} async with self.getTestCore(dirn=dirn, conf=conf) as core: @@ -2626,7 +2617,7 @@ def diffdev(path): return real with mock.patch('os.stat', diffdev): - with self.getAsyncLoggerStream('synapse.lib.cell') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: conf = {'onboot:optimize': True} async with self.getTestCore(dirn=dirn, conf=conf) as core: @@ -2641,7 +2632,7 @@ def diffdev(path): async with self.getTestCore(dirn=dirn) as core: await core.runBackup() - with self.getAsyncLoggerStream('synapse.lib.cell') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: conf = {'onboot:optimize': True} async with self.getTestCore(dirn=dirn, conf=conf) as core: @@ -3081,7 +3072,7 @@ async def test_cell_nexus_compat(self): async def test_cell_hive_migration(self): - with self.getAsyncLoggerStream('synapse.lib.cell') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: async with self.getRegrCore('hive-migration') as core: visi = await core.auth.getUserByName('visi') @@ -3149,7 +3140,7 @@ async def test_cell_check_sysctl(self): sysvals['vm.dirty_writeback_centisecs'] += 1 # Detect and report incorrect values - with self.getStructuredAsyncLoggerStream('synapse.lib.cell') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: with mock.patch.object(s_cell.Cell, 'SYSCTL_VALS', sysvals): async with self.getTestCore(conf={'health:sysctl:checks': True}): pass @@ -3318,10 +3309,10 @@ def __init__(self, proxy=None): async def proxy(self, timeout=None): return self._proxy - with self.getAsyncLoggerStream('synapse.lib.cell', 'AHA client connection failed.') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: cell.ahaclient = MockAhaClient() self.none(await cell.getAhaProxy()) - self.true(await stream.wait(timeout=1)) + self.true(await stream.expect('AHA client connection failed.')) class MockProxyHasNot: def _hasTeleFeat(self, name, vers): @@ -3373,11 +3364,11 @@ async def mock_executor(func, *args, **kwargs): return mock_proc with mock.patch('synapse.lib.cell.s_coro.executor', mock_executor): - with self.getAsyncLoggerStream('synapse.lib.cell', 'Error during backup streaming') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: with self.raises(Exception) as cm: async for _ in proxy.iterBackupArchive('bkup'): pass - self.true(await stream.wait(timeout=6)) + self.true(await stream.expect('Error during backup streaming')) async def test_iter_new_backup_archive(self): @@ -3395,13 +3386,13 @@ async def mock_runBackup(*args, **kwargs): raise Exception('backup failed') with mock.patch.object(s_cell.Cell, 'runBackup', mock_runBackup): - with self.getAsyncLoggerStream('synapse.lib.cell', 'Removing') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: with self.raises(s_exc.SynErr) as cm: async for _ in proxy.iterNewBackupArchive('failedbackup', remove=True): pass self.isin('backup failed', str(cm.exception)) - self.true(await stream.wait(timeout=6)) + self.true(await stream.expect('Removing')) path = os.path.join(backdirn, 'failedbackup') self.false(os.path.exists(path)) @@ -3473,11 +3464,11 @@ async def test_cell_logs(self): async with aha.getLocalProxy() as proxy: async def logtask(): - logger.warning('one little piggy', extra=await aha.getLogExtra()) + logger.warning('one little piggy', extra=aha.getLogExtra()) - with self.getLogStream('synapse.tests.test_lib_cell') as stream: + with self.getLoggerStream('synapse.tests.test_lib_cell') as stream: - logger.warning('oh hai', extra=await aha.getLogExtra()) + logger.warning('oh hai', extra=aha.getLogExtra()) # test the non-wait version quick... logs = [loginfo async for loginfo in proxy.logs(last=-1)] diff --git a/synapse/tests/test_lib_config.py b/synapse/tests/test_lib_config.py index bc2972a8352..fe9d371ac2a 100644 --- a/synapse/tests/test_lib_config.py +++ b/synapse/tests/test_lib_config.py @@ -49,10 +49,10 @@ async def test_config_basics(self): 'Will not form argparse for [key:bool:nodefval]' pars = argparse.ArgumentParser('synapse.tests.test_lib_config.basics') pars.add_argument('--beep', type=str, help='beep option', default='beep.sys') - with self.getLoggerStream('synapse.lib.config', mesg) as stream: + with self.getLoggerStream('synapse.lib.config') as stream: for optname, optinfo in conf.getArgParseArgs(): pars.add_argument(optname, **optinfo) - self.true(stream.wait(3)) + self.true(stream.expect(mesg)) hmsg = pars.format_help() # Undo pretty-printing @@ -239,7 +239,7 @@ async def test_config_basics(self): s_common.yamlsave({'key:integer': 5678, 'key:string': 'haha'}, dirn, '3.yaml') fp = s_common.genpath(dirn, '3.yaml') - with self.getAsyncLoggerStream('synapse.lib.config') as stream: + with self.getLoggerStream('synapse.lib.config') as stream: conf3.setConfFromFile(fp, force=True) stream.seek(0) buf = stream.read() diff --git a/synapse/tests/test_lib_httpapi.py b/synapse/tests/test_lib_httpapi.py index 51dead59708..15c941c645d 100644 --- a/synapse/tests/test_lib_httpapi.py +++ b/synapse/tests/test_lib_httpapi.py @@ -382,30 +382,30 @@ async def test_http_auth(self): async with self.getHttpSess() as sess: info = {'user': 'hehe'} - with self.getAsyncLoggerStream('synapse.lib.httpapi', 'No such user.') as stream: + with self.getLoggerStream('synapse.lib.httpapi') as stream: async with sess.post(f'https://localhost:{port}/api/v1/login', json=info) as resp: item = await resp.json() self.eq('AuthDeny', item.get('code')) - self.true(await stream.wait(timeout=6)) + self.true(await stream.expect('No such user.')) async with self.getHttpSess() as sess: info = {'user': 'visi', 'passwd': 'secret'} await core.setUserLocked(visiiden, True) - with self.getAsyncLoggerStream('synapse.lib.httpapi', 'User is locked.') as stream: + with self.getLoggerStream('synapse.lib.httpapi') as stream: async with sess.post(f'https://localhost:{port}/api/v1/login', json=info) as resp: item = await resp.json() self.eq('AuthDeny', item.get('code')) - self.true(await stream.wait(timeout=6)) + self.true(await stream.expect('User is locked.')) await core.setUserLocked(visiiden, False) async with self.getHttpSess() as sess: info = {'user': 'visi', 'passwd': 'borked'} - with self.getAsyncLoggerStream('synapse.lib.httpapi', 'Incorrect password.') as stream: + with self.getLoggerStream('synapse.lib.httpapi') as stream: async with sess.post(f'https://localhost:{port}/api/v1/login', json=info) as resp: item = await resp.json() self.eq('AuthDeny', item.get('code')) - self.true(await stream.wait(timeout=6)) + self.true(await stream.expect('Incorrect password.')) async with self.getHttpSess() as sess: info = {'user': 'visi', 'passwd': 'secret'} @@ -451,25 +451,25 @@ async def test_http_auth(self): item = await resp.json() self.eq('ok', item.get('status')) - with self.getAsyncLoggerStream('synapse.lib.httpapi', 'No such user.') as stream: + with self.getLoggerStream('synapse.lib.httpapi') as stream: async with sess.get(f'https://localhost:{port}/api/v1/auth/users', auth=heheauth) as resp: item = await resp.json() self.eq('NotAuthenticated', item.get('code')) - self.true(await stream.wait(timeout=12)) + self.true(await stream.expect('No such user.')) await core.setUserLocked(visiiden, True) - with self.getAsyncLoggerStream('synapse.lib.httpapi', 'User is locked.') as stream: + with self.getLoggerStream('synapse.lib.httpapi') as stream: async with sess.get(f'https://localhost:{port}/api/v1/auth/users', auth=visiauth) as resp: item = await resp.json() self.eq('NotAuthenticated', item.get('code')) - self.true(await stream.wait(timeout=12)) + self.true(await stream.expect('User is locked.')) await core.setUserLocked(visiiden, False) - with self.getAsyncLoggerStream('synapse.lib.httpapi', 'Incorrect password.') as stream: + with self.getLoggerStream('synapse.lib.httpapi') as stream: async with sess.get(f'https://localhost:{port}/api/v1/auth/users', auth=newpauth) as resp: item = await resp.json() self.eq('NotAuthenticated', item.get('code')) - self.true(await stream.wait(timeout=12)) + self.true(await stream.expect('Incorrect password.')) headers = {'Authorization': 'yermom'} async with sess.get(f'https://localhost:{port}/api/v1/auth/users', headers=headers) as resp: @@ -1722,11 +1722,6 @@ async def test_http_sess_mirror(self): async def test_request_logging(self): - def get_mesg(stream: s_tests.AsyncStreamEvent) -> dict: - msgs = stream.jsonlines() - self.len(1, msgs) - return msgs[0] - async with self.getTestCore() as core: # structlog tests @@ -1742,7 +1737,7 @@ def get_mesg(stream: s_tests.AsyncStreamEvent) -> dict: # Basic-auth - with self.getStructuredAsyncLoggerStream(logname, 'api/v1/auth/adduser') as stream: + with self.getLoggerStream(logname) as stream: headers = { 'X-Forwarded-For': '1.2.3.4', @@ -1754,57 +1749,52 @@ def get_mesg(stream: s_tests.AsyncStreamEvent) -> dict: self.nn(item.get('result').get('iden')) visiiden = item['result']['iden'] self.eq(resp.status, 200) - self.true(await stream.wait(6)) - - mesg = get_mesg(stream) - self.eq(mesg['synapse'].get('uri'), '/api/v1/auth/adduser') - self.eq(mesg['synapse'].get('username'), 'root') - self.eq(mesg['synapse'].get('user'), core.auth.rootuser.iden) - self.isin('headers', mesg['synapse']) - self.eq(mesg['synapse']['headers'].get('user-agent'), 'test_request_logging') - self.isin('remoteip', mesg['synapse']) - self.isin('(root)', mesg.get('message')) - self.isin('200 POST /api/v1/auth/adduser', mesg.get('message')) + + mesg = stream.jsonlines()[0] + self.eq(mesg['params'].get('status'), 200) + self.eq(mesg['params'].get('path'), '/api/v1/auth/adduser') + self.eq(mesg['username'], 'root') + self.eq(mesg['user'], core.auth.rootuser.iden) + self.isin('headers', mesg['params']) + self.eq(mesg['params']['headers'].get('user-agent'), 'test_request_logging') + self.isin('remoteip', mesg['params']) self.notin('1.2.3.4', mesg.get('message')) # No auth provided - with self.getStructuredAsyncLoggerStream(logname, 'api/v1/active') as stream: + with self.getLoggerStream(logname) as stream: async with sess.get(f'https://root:root@localhost:{port}/api/v1/active', skip_auto_headers=['User-Agent']) as resp: self.eq(resp.status, 200) - self.true(await stream.wait(6)) - mesg = get_mesg(stream) - self.eq(mesg['synapse'].get('uri'), '/api/v1/active') - self.notin('headers', mesg) - self.notin('username', mesg) - self.notin('user', mesg) - self.isin('remoteip', mesg['synapse']) - self.isin('200 GET /api/v1/active', mesg.get('message')) + mesg = stream.jsonlines()[0] + self.eq(mesg['params'].get('status'), 200) + self.eq(mesg['params'].get('path'), '/api/v1/active') + self.none(mesg.get('user')) + self.none(mesg.get('username')) + self.nn(mesg['params']['remoteip']) + self.false(mesg['params']['headers']) # Sessions populate the data too async with self.getHttpSess() as sess: # api/v1/login populates the data - with self.getStructuredAsyncLoggerStream(logname, 'api/v1/login') as stream: + with self.getLoggerStream(logname) as stream: async with sess.post(f'https://localhost:{port}/api/v1/login', json={'user': 'visi', 'passwd': 'secret'}) as resp: self.eq(resp.status, 200) - self.true(await stream.wait(6)) - mesg = get_mesg(stream) - self.eq(mesg['synapse'].get('uri'), '/api/v1/login') - self.eq(mesg['synapse'].get('username'), 'visi') - self.eq(mesg['synapse'].get('user'), visiiden) + mesg = stream.jsonlines()[0] + self.eq(mesg['params'].get('path'), '/api/v1/login') + self.eq(mesg['user'], visiiden) + self.eq(mesg['username'], 'visi') # session cookie loging populates the data upon reuse - with self.getStructuredAsyncLoggerStream(logname, 'api/v1/auth/users') as stream: + with self.getLoggerStream(logname) as stream: async with sess.get(f'https://localhost:{port}/api/v1/auth/users') as resp: self.eq(resp.status, 200) - self.true(await stream.wait(6)) - mesg = get_mesg(stream) - self.eq(mesg['synapse'].get('uri'), '/api/v1/auth/users') - self.eq(mesg['synapse'].get('username'), 'visi') - self.eq(mesg['synapse'].get('user'), visiiden) + mesg = stream.jsonlines()[0] + self.eq(mesg['params'].get('path'), '/api/v1/auth/users') + self.eq(mesg['user'], visiiden) + self.eq(mesg['username'], 'visi') async with self.getTestCore(conf={'https:parse:proxy:remoteip': True}) as core: @@ -1821,40 +1811,35 @@ def get_mesg(stream: s_tests.AsyncStreamEvent) -> dict: # Basic-auth - with self.getStructuredAsyncLoggerStream(logname, 'api/v1/auth/adduser') as stream: + with self.getLoggerStream(logname) as stream: async with sess.post(f'https://root:root@localhost:{port}/api/v1/auth/adduser', json=info, headers={'X-Forwarded-For': '1.2.3.4'}) as resp: item = await resp.json() self.nn(item.get('result').get('iden')) self.eq(resp.status, 200) - self.true(await stream.wait(6)) - mesg = get_mesg(stream) - self.eq(mesg['synapse'].get('uri'), '/api/v1/auth/adduser') - self.eq(mesg['synapse'].get('username'), 'root') - self.eq(mesg['synapse'].get('user'), core.auth.rootuser.iden) - self.eq(mesg['synapse'].get('remoteip'), '1.2.3.4') - self.isin('(root)', mesg.get('message')) - self.isin('200 POST /api/v1/auth/adduser', mesg.get('message')) + mesg = stream.jsonlines()[0] + self.eq(mesg['params'].get('path'), '/api/v1/auth/adduser') + self.eq(mesg['params'].get('remoteip'), '1.2.3.4') + self.eq(mesg['username'], 'root') + self.eq(mesg['user'], core.auth.rootuser.iden) info = {'name': 'charles', 'passwd': 'secret', 'admin': True} - with self.getStructuredAsyncLoggerStream(logname, 'api/v1/auth/adduser') as stream: + with self.getLoggerStream(logname) as stream: async with sess.post(f'https://root:root@localhost:{port}/api/v1/auth/adduser', json=info, headers={'X-Real-Ip': '8.8.8.8'}) as resp: item = await resp.json() self.nn(item.get('result').get('iden')) self.eq(resp.status, 200) - self.true(await stream.wait(6)) - - mesg = get_mesg(stream) - self.eq(mesg['synapse'].get('uri'), '/api/v1/auth/adduser') - self.eq(mesg['synapse'].get('username'), 'root') - self.eq(mesg['synapse'].get('user'), core.auth.rootuser.iden) - self.eq(mesg['synapse'].get('remoteip'), '8.8.8.8') - self.isin('(root)', mesg.get('message')) - self.isin('200 POST /api/v1/auth/adduser', mesg.get('message')) + + mesg = stream.jsonlines()[0] + self.eq(mesg['params'].get('status'), 200) + self.eq(mesg['params'].get('path'), '/api/v1/auth/adduser') + self.eq(mesg['params'].get('remoteip'), '8.8.8.8') + self.eq(mesg['username'], 'root') + self.eq(mesg['user'], core.auth.rootuser.iden) async def test_core_local_axon_http(self): async with self.getTestCore() as core: diff --git a/synapse/tests/test_lib_link.py b/synapse/tests/test_lib_link.py index 6a0209ed422..c25f852474b 100644 --- a/synapse/tests/test_lib_link.py +++ b/synapse/tests/test_lib_link.py @@ -121,9 +121,9 @@ async def onlink(link): self.eq(msg0, ('what', {'k': 1})) evt.set() await asyncio.sleep(0) - with self.getAsyncLoggerStream('synapse.lib.link', 'rx error') as stream: + with self.getLoggerStream('synapse.lib.link') as stream: msg1 = await link.rx() - self.true(await stream.wait(6)) + self.true(await stream.expect('rx error')) self.none(msg1) async def test_link_file(self): diff --git a/synapse/tests/test_lib_lmdbslab.py b/synapse/tests/test_lib_lmdbslab.py index 056f1fe59fa..dcbc381df8f 100644 --- a/synapse/tests/test_lib_lmdbslab.py +++ b/synapse/tests/test_lib_lmdbslab.py @@ -350,13 +350,13 @@ async def test_lmdbslab_commit_warn(self): with self.getTestDir() as dirn, patch('synapse.lib.lmdbslab.Slab.WARN_COMMIT_TIME_MS', 1), \ patch('synapse.common.now', self.simplenow): path = os.path.join(dirn, 'test.lmdb') - with self.getStructuredAsyncLoggerStream('synapse.lib.lmdbslab', 'Commit with') as stream: + with self.getLoggerStream('synapse.lib.lmdbslab') as stream: async with await s_lmdbslab.Slab.anit(path, map_size=100000) as slab: foo = slab.initdb('foo', dupsort=True) byts = b'\x00' * 256 for i in range(10): slab.put(b'\xff\xff\xff\xff' + s_common.guid(i).encode('utf8'), byts, db=foo) - self.true(await stream.wait(timeout=1)) + self.true(await stream.expect('Commit with')) msgs = stream.jsonlines() self.gt(len(msgs), 0) diff --git a/synapse/tests/test_lib_modelrev.py b/synapse/tests/test_lib_modelrev.py index b4ae673b414..fbee1590949 100644 --- a/synapse/tests/test_lib_modelrev.py +++ b/synapse/tests/test_lib_modelrev.py @@ -389,10 +389,9 @@ async def test_modelrev_0_2_19(self): self.len(1, await core.nodes('ou:campname="operation overlord" -> ou:campaign')) self.len(1, await core.nodes('risk:vuln:type:taxonomy="cyber.int_overflow" -> risk:vuln')) - with self.getAsyncLoggerStream('synapse.lib.modelrev', - 'error re-norming risk:vuln:type=foo.bar...newp') as stream: + with self.getLoggerStream('synapse.lib.modelrev') as stream: async with self.getRegrCore('model-0.2.19-bad-risk-types') as core: - self.true(await stream.wait(timeout=6)) + self.true(await stream.expect('error re-norming risk:vuln:type=foo.bar...newp')) self.len(5, await core.nodes('risk:vuln')) self.len(4, await core.nodes('risk:vuln:type')) nodes = await core.nodes('yield $lib.lift.byNodeData(_migrated:risk:vuln:type)') diff --git a/synapse/tests/test_lib_module.py b/synapse/tests/test_lib_module.py index 0a21a363888..98ad3e12613 100644 --- a/synapse/tests/test_lib_module.py +++ b/synapse/tests/test_lib_module.py @@ -71,15 +71,15 @@ async def test_basics(self): async def test_load_failures(self): async with self.getTestCore() as core: # type: s_cortex.Cortex with self.setTstEnvars(SYN_TEST_MOD_FAIL_PRE=1) as cm: - with self.getAsyncLoggerStream('synapse.cortex', 'preCoreModuleFail') as stream: + with self.getLoggerStream('synapse.cortex') as stream: self.none(await core.loadCoreModule(foo_ctor)) - self.true(await stream.wait(1)) + self.true(await stream.expect('preCoreModuleFail')) self.none(core.getCoreMod(foo_ctor)) with self.setTstEnvars(SYN_TEST_MOD_FAIL_INIT=1) as cm: - with self.getAsyncLoggerStream('synapse.cortex', 'initCoreModuleFail') as stream: + with self.getLoggerStream('synapse.cortex') as stream: self.none(await core.loadCoreModule(foo_ctor)) - self.true(await stream.wait(1)) + self.true(await stream.expect('initCoreModuleFail')) self.none(core.getCoreMod(foo_ctor)) with self.getTestDir(mirror='testcore') as dirn: @@ -89,13 +89,13 @@ async def test_load_failures(self): conf = s_common.yamlload(dirn, 'cell.yaml') with self.setTstEnvars(SYN_TEST_MOD_FAIL_PRE=1) as cm: - with self.getAsyncLoggerStream('synapse.cortex', 'preCoreModuleFail') as stream: + with self.getLoggerStream('synapse.cortex') as stream: async with await s_cortex.Cortex.anit(dirn) as core: - self.true(await stream.wait(1)) + self.true(await stream.expect('preCoreModuleFail')) self.none(core.getCoreMod(foo_ctor)) with self.setTstEnvars(SYN_TEST_MOD_FAIL_INIT=1) as cm: - with self.getAsyncLoggerStream('synapse.cortex', 'initCoreModuleFail') as stream: + with self.getLoggerStream('synapse.cortex') as stream: async with await s_cortex.Cortex.anit(dirn) as core: - self.true(await stream.wait(1)) + self.true(await stream.expect('initCoreModuleFail')) self.none(core.getCoreMod(foo_ctor)) diff --git a/synapse/tests/test_lib_multislabseqn.py b/synapse/tests/test_lib_multislabseqn.py index 16c1e1a5de3..59a07f9d10e 100644 --- a/synapse/tests/test_lib_multislabseqn.py +++ b/synapse/tests/test_lib_multislabseqn.py @@ -316,10 +316,10 @@ async def test_multislabseqn_discover(self): shutil.rmtree(slab0dirn) s_common.jssave('{}', slab0dirn) - with self.getAsyncLoggerStream('synapse.lib.multislabseqn', 'non-directory') as stream: + with self.getLoggerStream('synapse.lib.multislabseqn') as stream: async with await s_multislabseqn.MultiSlabSeqn.anit(baddirn) as msqn: await self.agenlen(15, msqn.iter(0)) - await stream.wait(timeout=1) + self.true(await stream.expect('non-directory')) # Switcheroo @@ -344,10 +344,10 @@ async def test_multislabseqn_discover(self): slab10dirn = s_common.genpath(baddirn, f'seqn{"0" * 14}0a.lmdb') shutil.rmtree(slab10dirn) - with self.getAsyncLoggerStream('synapse.lib.multislabseqn', 'gap in indices') as stream: + with self.getLoggerStream('synapse.lib.multislabseqn') as stream: async with await s_multislabseqn.MultiSlabSeqn.anit(baddirn) as msqn: await self.agenlen(15, msqn.iter(0)) - await stream.wait(timeout=1) + self.true(await stream.expect('gap in indices')) # Wipe a seqn clean baddirn = s_common.genpath(dirn, 'bad4') diff --git a/synapse/tests/test_lib_storm.py b/synapse/tests/test_lib_storm.py index 8a79f6255c2..3ed5e32f214 100644 --- a/synapse/tests/test_lib_storm.py +++ b/synapse/tests/test_lib_storm.py @@ -1445,9 +1445,9 @@ async def get(self, name): } } - with self.getAsyncLoggerStream('synapse.cortex', 'bazfaz requirement') as stream: + with self.getLoggerStream('synapse.cortex') as stream: await core.addStormPkg(pkgdef) - self.true(await stream.wait(timeout=1)) + self.true(await stream.expect('bazfaz requirement')) pkgdef = { 'name': 'bazfaz', @@ -1459,9 +1459,9 @@ async def get(self, name): } } - with self.getAsyncLoggerStream('synapse.cortex', 'bazfaz optional requirement') as stream: + with self.getLoggerStream('synapse.cortex') as stream: await core.addStormPkg(pkgdef) - self.true(await stream.wait(timeout=1)) + self.true(await stream.expect('bazfaz optional requirement')) deps = await core.callStorm('return($lib.pkg.deps($pkgdef))', opts={'vars': {'pkgdef': pkgdef}}) self.eq({ @@ -1705,7 +1705,7 @@ async def test_storm_diff_merge(self): await visi.addRule((True, ('node', 'add')), gateiden=lowriden) - with self.getAsyncLoggerStream('synapse.lib.snap') as stream: + with self.getLoggerStream('synapse.lib.snap') as stream: await core.stormlist('ou:name | merge --apply', opts=altview) stream.seek(0) @@ -1725,7 +1725,7 @@ async def test_storm_diff_merge(self): newn = await core.nodes('[ ou:name=readonly ]') self.ne(oldn[0].props['.created'], newn[0].props['.created']) - with self.getAsyncLoggerStream('synapse.lib.snap') as stream: + with self.getLoggerStream('synapse.lib.snap') as stream: await core.stormlist('ou:name | merge --apply', opts=altview) stream.seek(0) @@ -1745,7 +1745,7 @@ async def test_storm_diff_merge(self): q = 'ou:name=readonly2 | movenodes --apply --srclayers $lib.view.get().layers.2.iden' await core.nodes(q, opts=altview2) - with self.getAsyncLoggerStream('synapse.lib.snap') as stream: + with self.getLoggerStream('synapse.lib.snap') as stream: await core.stormlist('ou:name | merge --apply', opts=altview2) stream.seek(0) @@ -2503,29 +2503,30 @@ async def test_storm_dmon_user_locked(self): name=hehedmon))''' ddef0 = await asvisi.callStorm(q) - with self.getAsyncLoggerStream('synapse.lib.storm', 'user is locked') as stream: + with self.getLoggerStream('synapse.lib.storm') as stream: await visi.setLocked(True) q = 'return($lib.dmon.bump($iden))' self.true(await core.callStorm(q, opts={'vars': {'iden': ddef0['iden']}})) - self.true(await stream.wait(2)) + self.true(await stream.expect('user is locked')) async def test_storm_dmon_user_autobump(self): async with self.getTestCore() as core: visi = await core.auth.addUser('visi') await visi.addRule((True, ('dmon', 'add'))) async with core.getLocalProxy(user='visi') as asvisi: - with self.getAsyncLoggerStream('synapse.lib.storm', 'Dmon query exited') as stream: + with self.getLoggerStream('synapse.lib.storm') as stream: q = '''return($lib.dmon.add(${{ $lib.print(foobar) $lib.time.sleep(10) }}, name=hehedmon))''' await asvisi.callStorm(q) + self.true(await stream.expect('Dmon query exited')) - with self.getAsyncLoggerStream('synapse.lib.storm', 'user is locked') as stream: + with self.getLoggerStream('synapse.lib.storm') as stream: await core.setUserLocked(visi.iden, True) - self.true(await stream.wait(2)) + self.true(await stream.expect('user is locked')) - with self.getAsyncLoggerStream('synapse.lib.storm', 'Dmon query exited') as stream: + with self.getLoggerStream('synapse.lib.storm') as stream: await core.setUserLocked(visi.iden, False) - self.true(await stream.wait(2)) + self.true(await stream.expect('Dmon query exited')) async def test_storm_dmon_caching(self): @@ -2800,7 +2801,7 @@ async def get(self, name): # because the pkg hasn't changed so no loading occurs waiter = core.waiter(1, 'core:pkg:onload:complete') - with self.getAsyncLoggerStream('synapse.cortex') as stream: + with self.getLoggerStream('synapse.cortex') as stream: msgs = await core.stormlist(f'pkg.load --ssl-noverify https://127.0.0.1:{port}/api/v1/pkgtest/yep') self.stormIsInPrint('testload @0.3.0', msgs) @@ -3192,14 +3193,14 @@ async def test_storm_iden(self): self.len(3, nodes) q = 'iden newp' - with self.getLoggerStream('synapse.lib.snap', 'Failed to decode iden') as stream: + with self.getLoggerStream('synapse.lib.snap') as stream: self.len(0, await core.nodes(q)) - self.true(stream.wait(1)) + self.true(await stream.expect('Failed to decode iden')) q = 'iden deadb33f' - with self.getLoggerStream('synapse.lib.snap', 'iden must be 32 bytes') as stream: + with self.getLoggerStream('synapse.lib.snap') as stream: self.len(0, await core.nodes(q)) - self.true(stream.wait(1)) + self.true(await stream.expect('iden must be 32 bytes')) # Runtsafety test q = 'test:str=hehe | iden $node.iden()' diff --git a/synapse/tests/test_lib_stormlib_cortex.py b/synapse/tests/test_lib_stormlib_cortex.py index 3f2dd1fa3e6..819d2ad272d 100644 --- a/synapse/tests/test_lib_stormlib_cortex.py +++ b/synapse/tests/test_lib_stormlib_cortex.py @@ -300,10 +300,10 @@ async def test_libcortex_httpapi_methods(self): # Storm query logging includes the httpapi iden in structlog data core.stormlog = True - with self.getStructuredAsyncLoggerStream('synapse.storm', 'Executing storm query') as stream: + with self.getLoggerStream('synapse.storm') as stream: resp = await sess.get(url) self.eq(resp.status, 200) - self.true(await stream.wait(timeout=12)) + self.true(await stream.expect('Executing storm query')) msgs = stream.jsonlines() self.eq(msgs[0]['params'].get('httpapi'), echoiden) core.stormlog = False @@ -336,11 +336,11 @@ async def test_libcortex_httpapi_methods(self): return ( $api.iden )''' test04 = await core.callStorm(q) - emsg = f'Error executing custom HTTP API {test04}: BadArg Response.reply() has already been called.' - with self.getAsyncLoggerStream('synapse.lib.httpapi', emsg) as stream: + with self.getLoggerStream('synapse.lib.httpapi') as stream: resp = await sess.get(f'https://localhost:{hport}/api/ext/testpath04') self.eq(resp.status, 200) self.eq(await resp.json(), {'hehe': 'yes!'}) + self.true(await stream.expect('Response.reply() has already been called')) async def test_libcortex_httpapi_runas_owner(self): async with self.getTestCore() as core: @@ -1306,34 +1306,32 @@ async def test_libcortex_httpapi_fsm_sadpath(self): self.eq(resp.status, 500) data = await resp.json() self.eq(data.get('code'), 'StormRuntimeError') - self.eq(data.get('mesg'), f'Extended HTTP API {iden00} never set status code.') + self.eq(data.get('mesg'), 'Extended HTTP API never set status code.') resp = await sess.get(f'https://localhost:{hport}/api/ext/bad01') self.eq(resp.status, 500) data = await resp.json() self.notin('oh', resp.headers) self.eq(data.get('code'), 'StormRuntimeError') - self.eq(data.get('mesg'), f'Extended HTTP API {iden01} never set status code.') + self.eq(data.get('mesg'), 'Extended HTTP API never set status code.') resp = await sess.get(f'https://localhost:{hport}/api/ext/bad02') self.eq(resp.status, 500) data = await resp.json() self.notin('oh', resp.headers) self.eq(data.get('code'), 'StormRuntimeError') - self.eq(data.get('mesg'), f'Extended HTTP API {iden02} must set status code before sending body.') + self.eq(data.get('mesg'), 'Extended HTTP API must set status code before sending body.') - with self.getAsyncLoggerStream('synapse.lib.httpapi', - f'Extended HTTP API {iden03} tried to set code after sending body.') as stream: + with self.getLoggerStream('synapse.lib.httpapi') as stream: resp = await sess.get(f'https://localhost:{hport}/api/ext/bad03') - self.true(await stream.wait(timeout=6)) + self.true(await stream.expect('Extended HTTP API sent code after sending body.')) self.eq(resp.status, 201) self.eq(await resp.read(), b'text') - with self.getAsyncLoggerStream('synapse.lib.httpapi', - f'Extended HTTP API {iden04} tried to set headers after sending body.') as stream: + with self.getLoggerStream('synapse.lib.httpapi') as stream: resp = await sess.get(f'https://localhost:{hport}/api/ext/bad04') - self.true(await stream.wait(timeout=6)) + self.true(await stream.expect('Extended HTTP API set headers after sending body.')) self.eq(resp.status, 200) self.eq(await resp.read(), b'text') @@ -1343,17 +1341,17 @@ async def test_libcortex_httpapi_fsm_sadpath(self): self.eq(data.get('code'), 'BadTypeValu') self.eq(data.get('mesg'), "invalid literal for int() with base 0: 'notAnInt'") - with self.getAsyncLoggerStream('synapse.lib.httpapi', - f'Error executing Extended HTTP API {iden06}: BadTypeValu') as stream: + with self.getLoggerStream('synapse.lib.httpapi') as stream: resp = await sess.get(f'https://localhost:{hport}/api/ext/bad06') - self.true(await stream.wait(timeout=6)) + self.true(await stream.expect('Extended HTTP API encountered an error.')) + self.eq('BadTypeValu', stream.jsonlines()[0]['params']['errname']) self.eq(resp.status, 201) self.eq(await resp.json(), {}) - with self.getAsyncLoggerStream('synapse.lib.httpapi', - f'Error executing Extended HTTP API {iden07}: StormRuntimeError') as stream: + with self.getLoggerStream('synapse.lib.httpapi') as stream: resp = await sess.get(f'https://localhost:{hport}/api/ext/bad07') - self.true(await stream.wait(timeout=6)) + self.true(await stream.expect('Extended HTTP API encountered an error.')) + self.eq('StormRuntimeError', stream.jsonlines()[0]['params']['errname']) self.eq(resp.status, 500) data = await resp.json() self.eq(data.get('code'), 'StormRuntimeError') diff --git a/synapse/tests/test_lib_stormlib_log.py b/synapse/tests/test_lib_stormlib_log.py index fa113b2c004..1d6f6aed912 100644 --- a/synapse/tests/test_lib_stormlib_log.py +++ b/synapse/tests/test_lib_stormlib_log.py @@ -11,42 +11,38 @@ async def test_stormlib_log(self): async with self.getTestCore() as core: # Raw message - with self.getAsyncLoggerStream(logname, 'debug message') as stream: + with self.getLoggerStream(logname) as stream: await core.callStorm('$lib.log.debug("debug message")') - self.true(await stream.wait(6)) - with self.getAsyncLoggerStream(logname, 'info message') as stream: + self.true(await stream.expect('debug message')) + with self.getLoggerStream(logname) as stream: await core.callStorm('$lib.log.info("info message")') - self.true(await stream.wait(6)) - with self.getAsyncLoggerStream(logname, 'warn message') as stream: + self.true(await stream.expect('info message')) + with self.getLoggerStream(logname) as stream: await core.callStorm('$lib.log.warning("warn message")') - self.true(await stream.wait(6)) - with self.getAsyncLoggerStream(logname, 'error message') as stream: + self.true(await stream.expect('warn message')) + with self.getLoggerStream(logname) as stream: await core.callStorm('$lib.log.error("error message")') - self.true(await stream.wait(6)) + self.true(await stream.expect('error message')) # Extra without structlog handler in place has no change in results - with self.getAsyncLoggerStream(logname, 'debug message') as stream: + with self.getLoggerStream(logname) as stream: await core.callStorm('$lib.log.debug("debug message", extra=({"key": "valu"}))') - self.true(await stream.wait(6)) - stream.seek(0) - self.eq(stream.read(), 'debug message\n') + self.true(await stream.expect('debug message.*valu')) # Extra can be empty too - with self.getAsyncLoggerStream(logname, 'debug message') as stream: + with self.getLoggerStream(logname) as stream: await core.callStorm('$lib.log.debug("debug message", extra=({}))') - self.true(await stream.wait(6)) - stream.seek(0) - self.eq(stream.read(), 'debug message\n') + self.true(await stream.expect('debug message')) # Extra must be a dict after toprim is called on him. with self.raises(s_exc.BadArg): await core.callStorm('$lib.log.debug("debug message", extra=(foo, bar, baz))') # structlog test - with self.getStructuredAsyncLoggerStream(logname, '"key": "valu"') as stream: + with self.getLoggerStream(logname) as stream: await core.callStorm('$lib.log.debug("struct1 message")') await core.callStorm('$lib.log.debug("struct2 message", extra=({"key": "valu"}))') - self.true(await stream.wait(6)) + msgs = stream.jsonlines() self.len(2, msgs) mesg = msgs[0] diff --git a/synapse/tests/test_lib_stormlib_model.py b/synapse/tests/test_lib_stormlib_model.py index 42d36bfcc74..b4b95129b36 100644 --- a/synapse/tests/test_lib_stormlib_model.py +++ b/synapse/tests/test_lib_stormlib_model.py @@ -225,13 +225,12 @@ async def test_stormlib_model_depr(self): with self.raises(s_exc.IsDeprLocked): await core.nodes('[ou:hasalias=(*, hehe)]') - with self.getAsyncLoggerStream('synapse.lib.snap', - 'Prop ou:org:sic is locked due to deprecation') as stream: + with self.getLoggerStream('synapse.lib.snap') as stream: data = ( (('ou:org', ('t0',)), {'props': {'sic': '5678'}}), ) await core.addFeedData('syn.nodes', data) - self.true(await stream.wait(1)) + self.true(await stream.expect('Prop ou:org:sic is locked due to deprecation')) nodes = await core.nodes('ou:org=(t0,)') self.none(nodes[0].get('sic')) diff --git a/synapse/tests/test_lib_stormtypes.py b/synapse/tests/test_lib_stormtypes.py index 7307d423695..c45d89efe21 100644 --- a/synapse/tests/test_lib_stormtypes.py +++ b/synapse/tests/test_lib_stormtypes.py @@ -4922,10 +4922,10 @@ async def getCronJob(text): q = '{$lib.queue.get(foo).put(m3) $s=$lib.str.format("m3 {t} {i}", t=$auto.type, i=$auto.iden) $lib.log.info($s, ({"iden": $auto.iden})) }' text = f'cron.add --minute 17 {q}' async with getCronJob(text) as guid: - with self.getStructuredAsyncLoggerStream('synapse.storm.log', 'm3 cron') as stream: + with self.getLoggerStream('synapse.storm.log') as stream: unixtime += 7 * MINSECS self.eq('m3', await getNextFoo()) - self.true(await stream.wait(6)) + self.true(await stream.expect('m3 cron')) mesg = stream.jsonlines()[0] self.eq(mesg['message'], f'm3 cron {guid}') self.eq(mesg['params']['iden'], guid) diff --git a/synapse/tests/test_lib_trigger.py b/synapse/tests/test_lib_trigger.py index b13fca03eff..0accd38fc7c 100644 --- a/synapse/tests/test_lib_trigger.py +++ b/synapse/tests/test_lib_trigger.py @@ -145,10 +145,9 @@ async def test_trigger_basics(self): [ test:guid="*" +#nodeadd]''' tdef = {'cond': 'node:add', 'form': 'test:str', 'storm': q} await view.addTrigger(tdef) - with self.getAsyncLoggerStream('synapse.storm.log', 'f=') as stream: + with self.getLoggerStream('synapse.storm.log') as stream: await core.nodes('[ test:str=foo ]') - self.true(await stream.wait(6)) - self.eq(stream.getvalue().strip(), 'f=test:str v=foo') + self.true(await stream.expect('f=test:str v=foo')) self.len(1, await core.nodes('test:guid#nodeadd')) # node:del case @@ -169,10 +168,10 @@ async def test_trigger_basics(self): 'tag': 'a.*.c'} await view.addTrigger(tdef) await core.nodes('[ test:str=foo +#a.b ]') - with self.getAsyncLoggerStream('synapse.storm.log', 'a.b.c') as stream: + with self.getLoggerStream('synapse.storm.log') as stream: await core.nodes('[ test:str=foo +#a.b.c ]') - self.true(await stream.wait(6)) - self.true(stream.getvalue().strip().startswith('a.b.c\n')) + self.true(await stream.expect('a.b.c', escape=True)) + await core.nodes('[ test:str=foo +#a.b.ccc ]') self.len(1, await core.nodes('#count')) self.len(1, await core.nodes('test:str=a.b.c')) @@ -213,11 +212,9 @@ async def test_trigger_basics(self): 'prop': 'test:type10:intprop'} await view.addTrigger(tdef) await core.nodes('[ test:type10=1 ]') - with self.getAsyncLoggerStream('synapse.storm.log', 'pf=') as stream: + with self.getLoggerStream('synapse.storm.log') as stream: await core.nodes('[ test:type10=1 :intprop=25 ]') - self.true(await stream.wait(6)) - buf = stream.getvalue().strip() - self.eq(buf, 'pf=test:type10:intprop pn=intprop') + self.true(await stream.expect('pf=test:type10:intprop pn=intprop')) self.len(1, await core.nodes('test:guid#propset')) # Test re-setting doesn't fire @@ -249,9 +246,9 @@ async def test_trigger_basics(self): q = '+test:str~=log $s=$lib.str.format("test {t} {i}", t=$auto.type, i=$auto.iden) $lib.log.info($s, ({"iden": $auto.iden}))' tdef = {'cond': 'node:add', 'form': 'test:str', 'storm': q} await view.addTrigger(tdef) - with self.getStructuredAsyncLoggerStream('synapse.storm.log', 'test trigger') as stream: + with self.getLoggerStream('synapse.storm.log') as stream: await core.nodes('[ test:str=logit ]') - self.true(await stream.wait(6)) + self.true(await stream.expect('test trigger')) msgs = stream.jsonlines() mesg = [m for m in msgs if m['params'].get('iden') == tdef.get('iden')][0] self.eq(mesg['message'], f'test trigger {tdef.get("iden")}') diff --git a/synapse/tests/test_telepath.py b/synapse/tests/test_telepath.py index ee5a5134a95..bc5fd38447b 100644 --- a/synapse/tests/test_telepath.py +++ b/synapse/tests/test_telepath.py @@ -880,12 +880,12 @@ async def dostuff(self, x): async with await s_telepath.open(urls) as targ: - with self.getAsyncLoggerStream('synapse.telepath', 'Connect call failed') as stream: + with self.getLoggerStream('synapse.telepath') as stream: await targ.waitready() # Verify the password doesn't leak into the log - self.true(await stream.wait(2)) + self.true(await stream.expect('Connect call failed')) stream.seek(0) mesgs = stream.read() self.notin('password', mesgs) @@ -906,18 +906,17 @@ async def onlink(proxy, urlinfo): _url = s_telepath.zipurl(urlinfo) logger.info(f'Connected to url={_url}') - with self.getAsyncLoggerStream('synapse.tests.test_telepath', - f'Connected to url=tcp://127.0.0.1:{addr1[1]}/foo') as stream: + with self.getLoggerStream('synapse.tests.test_telepath') as stream: async with await s_telepath.open(url1, onlink=onlink) as targ: - self.true(await stream.wait(timeout=12)) + self.true(await stream.expect(f'Connected to url=tcp://127.0.0.1:{addr1[1]}/foo', escape=True)) # Coverage async def badonlink(proxy, urlinfo): raise ValueError('oopsie') - with self.getAsyncLoggerStream('synapse.telepath', 'onlink: ') as stream: + with self.getLoggerStream('synapse.telepath', 'onlink: ') as stream: async with await s_telepath.open(url1, onlink=badonlink) as targ: - self.true(await stream.wait(timeout=12)) + self.true(await stream.expect('onlink: ')) await dmon0.fini() await dmon1.fini() @@ -1004,8 +1003,7 @@ async def test_link_fini_breaking_tasks(self): async with await s_telepath.openurl(url) as proxy: - with self.getAsyncLoggerStream('synapse.daemon', - 'task sleepg') as stream: + with self.getLoggerStream('synapse.daemon') as stream: # Fire up an async generator which will yield a message then # wait for a while so that our break will tear it down @@ -1016,7 +1014,7 @@ async def test_link_fini_breaking_tasks(self): # Ensure that the sleepg function got canceled. self.true(await asyncio.wait_for(foo.sleepg_evt.wait(), timeout=6)) # Ensure we logged the cancellation. - self.true(await stream.wait(6)) + self.true(await stream.expect('task sleepg')) async def test_link_fini_breaking_tasks2(self): ''' diff --git a/synapse/tests/test_utils.py b/synapse/tests/test_utils.py index af323e58f21..1a0ff22a620 100644 --- a/synapse/tests/test_utils.py +++ b/synapse/tests/test_utils.py @@ -289,9 +289,9 @@ async def test_checknode(self): self.checkNode(nodes[0], (('test:comp', (1, 'newp')), {'hehe': 1, 'haha': 'test'})) with self.raises(AssertionError): self.checkNode(nodes[0], (('test:comp', (1, 'test')), {'hehe': 1, 'haha': 'newp'})) - with self.getAsyncLoggerStream('synapse.tests.utils', 'untested properties') as stream: + with self.getLoggerStream('synapse.tests.utils') as stream: self.checkNode(nodes[0], (('test:comp', (1, 'test')), {'hehe': 1})) - self.true(await stream.wait(timeout=12)) + self.true(await stream.expect('untested properties')) await self.checkNodes(core, [('test:comp', (1, 'test')),]) with self.raises(AssertionError): diff --git a/synapse/tests/utils.py b/synapse/tests/utils.py index 7e0dcd4493c..0f27db3363e 100644 --- a/synapse/tests/utils.py +++ b/synapse/tests/utils.py @@ -785,68 +785,56 @@ async def _corocall(self, *args, **kwargs): return retn -class StreamEvent(io.StringIO, threading.Event): - ''' - A combination of a io.StringIO object and a threading.Event object. - ''' +class StreamEvent(io.StringIO): + def __init__(self, *args, **kwargs): io.StringIO.__init__(self, *args, **kwargs) - threading.Event.__init__(self) - self.mesg = '' - - def setMesg(self, mesg): - ''' - Clear the internal event and set a new message that is used to set the event. - - Args: - mesg (str): The string to monitor for. + self._lines = [] + self._event = asyncio.Event() - Returns: - None - ''' - self.mesg = mesg - self.clear() + def clear(self): + self._lines.clear() def write(self, s): io.StringIO.write(self, s) - if self.mesg and self.mesg in s: - self.set() + self._lines.append(s) + self._event.set() - def jsonlines(self) -> typing.List[dict]: - '''Get the messages as jsonlines. May throw Json errors if the captured stream is not jsonlines.''' - return jsonlines(self.getvalue()) + async def expect(self, text, count=1, timeout=5, escape=True): -class AsyncStreamEvent(io.StringIO, asyncio.Event): - ''' - A combination of a io.StringIO object and an asyncio.Event object. - ''' - def __init__(self, *args, **kwargs): - io.StringIO.__init__(self, *args, **kwargs) - asyncio.Event.__init__(self) - self.mesg = '' + offs = 0 + tally = 0 - def setMesg(self, mesg): - ''' - Clear the internal event and set a new message that is used to set the event. + if escape: + text = regex.escape(text) - Args: - mesg (str): The string to monitor for. + regx = regex.compile(text) - Returns: - None - ''' - self.mesg = mesg - self.clear() + async def _expect(): - def write(self, s): - io.StringIO.write(self, s) - if self.mesg and self.mesg in s: - self.set() + while True: - async def wait(self, timeout=None): - if timeout is None: - return await asyncio.Event.wait(self) - return await s_coro.event_wait(self, timeout=timeout) + if thereyet(): + return True + + await self._event.wait() + self._event.clear() + + def thereyet(): + nonlocal offs + nonlocal tally + for line in self._lines[offs:]: + offs += 1 + if regx.search(line) is not None: + tally += 1 + return tally >= count + + try: + return await s_common.wait_for(_expect(), timeout=timeout) + except TimeoutError: + logger.warning(f'Pattern [{text}] not found in...') + [logger.warning(f' {line}') for line in self._lines] + return False def jsonlines(self) -> typing.List[dict]: '''Get the messages as jsonlines. May throw Json errors if the captured stream is not jsonlines.''' @@ -1029,6 +1017,8 @@ def __init__(self, *args, **kwargs): if inspect.iscoroutinefunction(attr) and s.startswith('test_') and inspect.ismethod(attr): setattr(self, s, s_glob.synchelp(attr)) + s_logging.setup() + def checkNode(self, node, expected): ex_ndef, ex_props = expected self.eq(node.ndef, ex_ndef) @@ -1704,175 +1694,22 @@ def getTestFilePath(self, *names): return os.path.join(path, 'files', *names) @contextlib.contextmanager - def getLoggerStream(self, logname, mesg=''): - ''' - Get a logger and attach a io.StringIO object to the logger to capture log messages. - - Args: - logname (str): Name of the logger to get. - mesg (str): A string which, if provided, sets the StreamEvent event if a message - containing the string is written to the log. - - Examples: - Do an action and get the stream of log messages to check against:: - - with self.getLoggerStream('synapse.foo.bar') as stream: - # Do something that triggers a log message - doSomething() - - stream.seek(0) - mesgs = stream.read() - # Do something with messages + def getLoggerStream(self, name, struct=True): - Do an action and wait for a specific log message to be written:: - - with self.getLoggerStream('synapse.foo.bar', 'big badda boom happened') as stream: - # Do something that triggers a log message - doSomething() - stream.wait(timeout=10) # Wait for the mesg to be written to the stream - - stream.seek(0) - mesgs = stream.read() - # Do something with messages - - You can also reset the message and wait for another message to occur:: - - with self.getLoggerStream('synapse.foo.bar', 'big badda boom happened') as stream: - # Do something that triggers a log message - doSomething() - stream.wait(timeout=10) - stream.setMesg('yo dawg') # This will now wait for the 'yo dawg' string to be written. - stream.wait(timeout=10) - - stream.seek(0) - mesgs = stream.read() - # Do something with messages - - Notes: - This **only** captures logs for the current process. - - Yields: - StreamEvent: A StreamEvent object - ''' stream = StreamEvent() - stream.setMesg(mesg) - handler = logging.StreamHandler(stream) - slogger = logging.getLogger(logname) - slogger.addHandler(handler) - level = slogger.level - slogger.setLevel('DEBUG') - try: - yield stream - except Exception: # pragma: no cover - raise - finally: - slogger.removeHandler(handler) - slogger.setLevel(level) - - @contextlib.contextmanager - def getAsyncLoggerStream(self, logname, mesg='') -> contextlib.AbstractContextManager[StreamEvent, None, None]: - ''' - Async version of getLoggerStream. - - Args: - logname (str): Name of the logger to get. - mesg (str): A string which, if provided, sets the StreamEvent event if a message containing the string is written to the log. - - Notes: - The event object mixed in for the AsyncStreamEvent is a asyncio.Event object. - This requires the user to await the Event specific calls as neccesary. - - Examples: - Do an action and wait for a specific log message to be written:: - - with self.getAsyncLoggerStream('synapse.foo.bar', - 'big badda boom happened') as stream: - # Do something that triggers a log message - await doSomething() - # Wait for the mesg to be written to the stream - await stream.wait(timeout=10) - - stream.seek(0) - mesgs = stream.read() - # Do something with messages - - Returns: - AsyncStreamEvent: An AsyncStreamEvent object. - ''' - stream = AsyncStreamEvent() - stream.setMesg(mesg) - handler = logging.StreamHandler(stream) - slogger = logging.getLogger(logname) - slogger.addHandler(handler) - level = slogger.level - slogger.setLevel('DEBUG') - try: - yield stream - except Exception: # pragma: no cover - raise - finally: - slogger.removeHandler(handler) - slogger.setLevel(level) - - @contextlib.contextmanager - def getStructuredAsyncLoggerStream(self, logname, mesg='') -> contextlib.AbstractContextManager[AsyncStreamEvent, None, None]: - ''' - Async version of getLoggerStream which uses structured logging. - - Args: - logname (str): Name of the logger to get. - mesg (str): A string which, if provided, sets the StreamEvent event if a message containing the string is written to the log. - - Notes: - The event object mixed in for the AsyncStreamEvent is a asyncio.Event object. - This requires the user to await the Event specific calls as needed. - The messages written to the stream will be JSON lines. - - Examples: - Do an action and wait for a specific log message to be written:: - - with self.getStructuredAsyncLoggerStream('synapse.foo.bar', - '"some JSON string"') as stream: - # Do something that triggers a log message - await doSomething() - # Wait for the mesg to be written to the stream - await stream.wait(timeout=10) - - msgs = stream.jsonlines() - # Do something with messages - - Returns: - AsyncStreamEvent: An AsyncStreamEvent object. - ''' - stream = AsyncStreamEvent() - stream.setMesg(mesg) - handler = logging.StreamHandler(stream) - slogger = logging.getLogger(logname) - formatter = s_logging.Formatter() - handler.setFormatter(formatter) - slogger.addHandler(handler) - level = slogger.level - slogger.setLevel('DEBUG') - try: - yield stream - except Exception: # pragma: no cover - raise - finally: - slogger.removeHandler(handler) - slogger.setLevel(level) - - @contextlib.contextmanager - def getLogStream(self, name, level='DEBUG'): - - stream = AsyncStreamEvent() logger = logging.getLogger(name) oldlevel = logger.level handler = logging.StreamHandler(stream) - handler.setFormatter(s_logging.Formatter()) - logger.setLevel(level) + fmtclass = s_logging.Formatter + if not struct: + fmtclass = s_logging.TextFormatter + + handler.setFormatter(fmtclass()) + + logger.setLevel(logging.DEBUG) logger.addHandler(handler) try: From efc13c03d6bda31862b09186efeb2a7545dd5654 Mon Sep 17 00:00:00 2001 From: visi Date: Wed, 26 Feb 2025 09:58:58 -0500 Subject: [PATCH 32/52] wip --- synapse/lib/lmdbslab.py | 3 ++- synapse/lib/logging.py | 22 ++++++++++++++-------- synapse/tests/test_common.py | 4 ++-- synapse/tests/test_cortex.py | 4 ++-- synapse/tests/test_init.py | 18 ------------------ synapse/tests/test_lib_dyndeps.py | 4 ++-- synapse/tests/test_lib_lmdbslab.py | 16 +++++++++------- synapse/tests/test_lib_stormlib_log.py | 3 ++- synapse/tests/test_lib_stormlib_storm.py | 13 +++++-------- synapse/tests/test_lib_trigger.py | 2 +- synapse/tests/test_model_dns.py | 10 ++++------ synapse/tests/test_model_infotech.py | 5 ++--- synapse/tests/test_telepath.py | 2 +- synapse/tests/test_utils.py | 15 +++------------ 14 files changed, 49 insertions(+), 72 deletions(-) delete mode 100644 synapse/tests/test_init.py diff --git a/synapse/lib/lmdbslab.py b/synapse/lib/lmdbslab.py index f50569c7e4e..2631d883ceb 100644 --- a/synapse/lib/lmdbslab.py +++ b/synapse/lib/lmdbslab.py @@ -18,6 +18,7 @@ import synapse.lib.cache as s_cache import synapse.lib.const as s_const import synapse.lib.nexus as s_nexus +import synapse.lib.logging as s_logging import synapse.lib.msgpack as s_msgpack import synapse.lib.thishost as s_thishost import synapse.lib.thisplat as s_thisplat @@ -1804,7 +1805,7 @@ def forcecommit(self): } mesg = f'Commit with {xactopslen} items in {self!r} took {delta} ms - performance may be degraded.' - logger.warning(mesg, extra={'synapse': extra}) + logger.warning(mesg, extra=s_logging.getLogExtra(**extra)) self._initCoXact() return True diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index e9df51dbe93..f7f9712b960 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -75,14 +75,20 @@ def genLogInfo(self, record): if hasattr(record, 'loginfo'): loginfo.update(record.loginfo) - if (user := s_scope.get('user')) is not None: - loginfo['user'] = user.iden - loginfo['username'] = user.name - - elif (sess := s_scope.get('sess')) is not None: - if sess.user is not None: - loginfo['user'] = sess.user.iden - loginfo['username'] = sess.user.name + try: + + if (user := s_scope.get('user')) is not None: + loginfo['user'] = user.iden + loginfo['username'] = user.name + + elif (sess := s_scope.get('sess')) is not None: + if sess.user is not None: + loginfo['user'] = sess.user.iden + loginfo['username'] = sess.user.name + + except RuntimeError: + # if there is no running loop, there can be no scope vars... + pass if record.exc_info: loginfo['err'] = s_common.err(record.exc_info[1], fulltb=True) diff --git a/synapse/tests/test_common.py b/synapse/tests/test_common.py index cbdca7677b2..ec07ba11194 100644 --- a/synapse/tests/test_common.py +++ b/synapse/tests/test_common.py @@ -436,9 +436,9 @@ def test_sslctx(self): with self.getTestDir(mirror='certdir') as dirn: cadir = s_common.genpath(dirn, 'cas') os.makedirs(s_common.genpath(cadir, 'newp')) - with self.getLoggerStream('synapse.common', f'Error loading {cadir}/ca.key') as stream: + with self.getLoggerStream('synapse.common') as stream: ctx = s_common.getSslCtx(cadir) - self.true(stream.wait(10)) + self.true(await stream.expect(f'Error loading {cadir}/ca.key')) ca_subjects = {cert.get('subject') for cert in ctx.get_ca_certs()} self.isin(((('commonName', 'test'),),), ca_subjects) diff --git a/synapse/tests/test_cortex.py b/synapse/tests/test_cortex.py index 3a0134c9832..ce44ae98d3f 100644 --- a/synapse/tests/test_cortex.py +++ b/synapse/tests/test_cortex.py @@ -6273,7 +6273,7 @@ async def test_cortex_storm_dmon_view(self): q = '''$q = $lib.queue.get(dmon) $q.puts((1, 3, 5))''' with self.getLoggerStream('synapse.lib.storm', struct=False) as stream: await core.nodes(q) - self.true(await stream.expect("made ('test:int', 5)", escape=True)) + self.true(await stream.expect("made ('test:int', 5)")) nodes = await core.nodes('test:int', opts={'view': view2_iden}) self.len(3, nodes) @@ -6302,7 +6302,7 @@ async def test_cortex_storm_dmon_view(self): q = '''$q = $lib.queue.get(dmon2) $q.puts((1, 3, 5))''' with self.getLoggerStream('synapse.lib.storm', struct=False) as stream: await core.nodes(q) - self.true(await stream.expect("made ('test:str', '5')", escape=True)) + self.true(await stream.expect("made ('test:str', '5')")) nodes = await core.nodes('test:str', opts={'view': view2_iden}) self.len(3, nodes) diff --git a/synapse/tests/test_init.py b/synapse/tests/test_init.py deleted file mode 100644 index 01a8ba75410..00000000000 --- a/synapse/tests/test_init.py +++ /dev/null @@ -1,18 +0,0 @@ - -import synapse.tests.utils as s_t_utils - -class InitTest(s_t_utils.SynTest): - pass - - ''' - def test_init_modules(self): - os.environ['SYN_MODULES'] = 'fakenotrealmod , badnothere,math' - msg = 'SYN_MODULES failed: badnothere (NoSuchDyn: name=\'badnothere\')' - with self.getLoggerStream('synapse', msg) as stream: - imp.reload(synapse) - self.true(stream.wait(10)) - - stream.seek(0) - self.isin(msg, stream.read()) - self.isin(('math', 2.0, None), synapse.lib.modules.call('sqrt', 4)) - ''' diff --git a/synapse/tests/test_lib_dyndeps.py b/synapse/tests/test_lib_dyndeps.py index 7e28db01ae9..75092a1c3fa 100644 --- a/synapse/tests/test_lib_dyndeps.py +++ b/synapse/tests/test_lib_dyndeps.py @@ -14,9 +14,9 @@ def woot(x, y=30): class DynDepsTest(s_t_utils.SynTest): def test_dyndeps_dynmod(self): - with self.getLoggerStream('synapse.lib.dyndeps', 'Failed to import "- -"') as stream: + with self.getLoggerStream('synapse.lib.dyndeps') as stream: self.none(s_dyndeps.getDynMod('- -')) - self.true(stream.wait(1)) + self.true(stream.expect('Failed to import "- -"')) self.nn(s_dyndeps.getDynMod('sys')) def test_dyndeps_dynloc(self): diff --git a/synapse/tests/test_lib_lmdbslab.py b/synapse/tests/test_lib_lmdbslab.py index dcbc381df8f..e0370d6b55e 100644 --- a/synapse/tests/test_lib_lmdbslab.py +++ b/synapse/tests/test_lib_lmdbslab.py @@ -347,8 +347,11 @@ def simplenow(self): return self._nowtime async def test_lmdbslab_commit_warn(self): - with self.getTestDir() as dirn, patch('synapse.lib.lmdbslab.Slab.WARN_COMMIT_TIME_MS', 1), \ - patch('synapse.common.now', self.simplenow): + + with (self.getTestDir() as dirn, + patch('synapse.lib.lmdbslab.Slab.WARN_COMMIT_TIME_MS', 1), + patch('synapse.common.now', self.simplenow)): + path = os.path.join(dirn, 'test.lmdb') with self.getLoggerStream('synapse.lib.lmdbslab') as stream: async with await s_lmdbslab.Slab.anit(path, map_size=100000) as slab: @@ -356,20 +359,19 @@ async def test_lmdbslab_commit_warn(self): byts = b'\x00' * 256 for i in range(10): slab.put(b'\xff\xff\xff\xff' + s_common.guid(i).encode('utf8'), byts, db=foo) - self.true(await stream.expect('Commit with')) msgs = stream.jsonlines() self.gt(len(msgs), 0) - self.nn(msgs[0]['synapse'].get('delta')) - self.nn(msgs[0]['synapse'].get('path')) - self.nn(msgs[0]['synapse'].get('xactopslen')) + self.nn(msgs[0]['params'].get('delta')) + self.nn(msgs[0]['params'].get('path')) + self.nn(msgs[0]['params'].get('xactopslen')) self.sorteq([ 'vm.swappiness', 'vm.dirty_expire_centisecs', 'vm.dirty_writeback_centisecs', 'vm.dirty_background_ratio', 'vm.dirty_ratio', - ], msgs[0]['synapse'].get('sysctls', {}).keys()) + ], msgs[0]['params'].get('sysctls', {}).keys()) async def test_lmdbslab_commit_over_max_xactops(self): diff --git a/synapse/tests/test_lib_stormlib_log.py b/synapse/tests/test_lib_stormlib_log.py index 1d6f6aed912..78dbee9694d 100644 --- a/synapse/tests/test_lib_stormlib_log.py +++ b/synapse/tests/test_lib_stormlib_log.py @@ -27,7 +27,8 @@ async def test_stormlib_log(self): # Extra without structlog handler in place has no change in results with self.getLoggerStream(logname) as stream: await core.callStorm('$lib.log.debug("debug message", extra=({"key": "valu"}))') - self.true(await stream.expect('debug message.*valu')) + self.true(await stream.expect('debug message')) + self.eq('valu', stream.jsonlines()[0]['params']['key']) # Extra can be empty too with self.getLoggerStream(logname) as stream: diff --git a/synapse/tests/test_lib_stormlib_storm.py b/synapse/tests/test_lib_stormlib_storm.py index b1813c38dde..28db0462a3d 100644 --- a/synapse/tests/test_lib_stormlib_storm.py +++ b/synapse/tests/test_lib_stormlib_storm.py @@ -53,14 +53,11 @@ async def test_lib_stormlib_storm_eval(self): self.len(1, await core.nodes('test:str=omg')) # Check that we saw the logs - stream.seek(0) - data = stream.read() - - mesg = 'Executing storm query {return( $lib.storm.eval($q) )} as [root]' - self.isin(mesg, data) - - mesg = f'Executing storm query via $lib.storm.eval() {{{q}}} as [root]' - self.isin(mesg, data) + rows = stream.jsonlines() + self.eq(rows[0]['user'], core.auth.rootuser.iden) + self.eq(rows[0]['params']['text'], 'return( $lib.storm.eval($q) )') + self.eq(rows[1]['user'], core.auth.rootuser.iden) + self.eq(rows[1]['params']['text'], q) async def test_lib_stormlib_storm(self): diff --git a/synapse/tests/test_lib_trigger.py b/synapse/tests/test_lib_trigger.py index 0accd38fc7c..320c41bb122 100644 --- a/synapse/tests/test_lib_trigger.py +++ b/synapse/tests/test_lib_trigger.py @@ -170,7 +170,7 @@ async def test_trigger_basics(self): await core.nodes('[ test:str=foo +#a.b ]') with self.getLoggerStream('synapse.storm.log') as stream: await core.nodes('[ test:str=foo +#a.b.c ]') - self.true(await stream.expect('a.b.c', escape=True)) + self.true(await stream.expect('a.b.c')) await core.nodes('[ test:str=foo +#a.b.ccc ]') self.len(1, await core.nodes('#count')) diff --git a/synapse/tests/test_model_dns.py b/synapse/tests/test_model_dns.py index d9bf555a548..219e0ed285d 100644 --- a/synapse/tests/test_model_dns.py +++ b/synapse/tests/test_model_dns.py @@ -176,16 +176,14 @@ async def test_model_dns_request(self): q = '[inet:dns:query=(tcp://1.2.3.4, "foo*.haha.com", 1)]' self.len(1, await core.nodes(q)) q = 'inet:dns:query=(tcp://1.2.3.4, "", 1) :name -> inet:fqdn' - with self.getLoggerStream('synapse.lib.ast', - 'Cannot generate fqdn index bytes for a empty string') as stream: + with self.getLoggerStream('synapse.lib.ast') as stream: self.len(0, await core.nodes(q)) - self.true(stream.wait(1)) + self.true(await stream.expect('Cannot generate fqdn index bytes for a empty string')) q = 'inet:dns:query=(tcp://1.2.3.4, "foo*.haha.com", 1) :name -> inet:fqdn' - with self.getLoggerStream('synapse.lib.ast', - 'Wild card may only appear at the beginning') as stream: + with self.getLoggerStream('synapse.lib.ast') as stream: self.len(0, await core.nodes(q)) - self.true(stream.wait(1)) + self.true(await stream.expect('Wild card may only appear at the beginning')) async def test_forms_dns_simple(self): diff --git a/synapse/tests/test_model_infotech.py b/synapse/tests/test_model_infotech.py index 77b83c39484..cbb8667c5cd 100644 --- a/synapse/tests/test_model_infotech.py +++ b/synapse/tests/test_model_infotech.py @@ -985,15 +985,14 @@ async def test_it_forms_prodsoft(self): self.none(node.get('vers:norm')) self.none(node.get('semver')) - with self.getLoggerStream('synapse.models.infotech', - 'Unable to parse string as a semver') as stream: + with self.getLoggerStream('synapse.models.infotech') as stream: nodes = await core.nodes('[it:prod:softver=* :vers=$valu]', opts={'vars': {'valu': 'alpha'}}) self.len(1, nodes) node = nodes[0] self.eq(node.get('vers'), 'alpha') self.none(node.get('semver')) - self.true(stream.is_set()) + self.true(await stream.expect('Unable to parse string as a semver')) async def test_it_form_callbacks(self): async with self.getTestCore() as core: diff --git a/synapse/tests/test_telepath.py b/synapse/tests/test_telepath.py index bc5fd38447b..252a541ac31 100644 --- a/synapse/tests/test_telepath.py +++ b/synapse/tests/test_telepath.py @@ -908,7 +908,7 @@ async def onlink(proxy, urlinfo): with self.getLoggerStream('synapse.tests.test_telepath') as stream: async with await s_telepath.open(url1, onlink=onlink) as targ: - self.true(await stream.expect(f'Connected to url=tcp://127.0.0.1:{addr1[1]}/foo', escape=True)) + self.true(await stream.expect(f'Connected to url=tcp://127.0.0.1:{addr1[1]}/foo')) # Coverage async def badonlink(proxy, urlinfo): diff --git a/synapse/tests/test_utils.py b/synapse/tests/test_utils.py index 1a0ff22a620..26c3992f1e0 100644 --- a/synapse/tests/test_utils.py +++ b/synapse/tests/test_utils.py @@ -107,7 +107,7 @@ def test_syntest_logstream(self): mesgs = stream.read() self.isin('ruh roh', mesgs) - def test_syntest_logstream_event(self): + async def test_syntest_logstream_event(self): @s_common.firethread def logathing(mesg): @@ -115,9 +115,9 @@ def logathing(mesg): logger.error(mesg) logger.error('notthere') - with self.getLoggerStream('synapse.tests.test_utils', 'Test Message') as stream: + with self.getLoggerStream('synapse.tests.test_utils') as stream: thr = logathing('StreamEvent Test Message') - self.true(stream.wait(10)) + self.true(await stream.expect('Test Message')) thr.join() stream.seek(0) @@ -125,15 +125,6 @@ def logathing(mesg): self.isin('StreamEvent Test Message', mesgs) self.notin('notthere', mesgs) - with self.getLoggerStream('synapse.tests.test_utils', 'Test Message') as stream: - thr = logathing(json.dumps({'mesg': 'Test Message'})) - self.true(stream.wait(10)) - thr.join() - - msgs = stream.jsonlines() - self.len(1, msgs) - self.eq(msgs[0], {'mesg': 'Test Message'}) - def test_syntest_envars(self): os.environ['foo'] = '1' os.environ['bar'] = '2' From c41625b80f959595ff00e393f58871318a7bbb34 Mon Sep 17 00:00:00 2001 From: visi Date: Wed, 26 Feb 2025 10:44:58 -0500 Subject: [PATCH 33/52] wip --- synapse/lib/cell.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/synapse/lib/cell.py b/synapse/lib/cell.py index e7b9be43080..6841079b17b 100644 --- a/synapse/lib/cell.py +++ b/synapse/lib/cell.py @@ -3289,11 +3289,11 @@ def _log_web_request(self, handler: s_httpapi.Handler) -> None: remoteip=handler.request.remote_ip, took=int(1000 * handler.request.request_time())) - if handler.web_useriden is not None: - extra['loginfo']['user'] = handler.web_useriden + if hasattr(handler, 'web_useriden') and handler.web_useriden is not None: + extra['loginfo']['user'] = handler.web_useriden - if handler.web_username is not None: - extra['loginfo']['username'] = handler.web_username + if hasattr(handler, 'web_username') and handler.web_username is not None: + extra['loginfo']['username'] = handler.web_username log_method('HTTP API Request', extra=extra) From 1c807f4e08cb55fb58380954417b23ca71356311 Mon Sep 17 00:00:00 2001 From: visi Date: Wed, 26 Feb 2025 10:59:22 -0500 Subject: [PATCH 34/52] wip --- synapse/tests/test_common.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/tests/test_common.py b/synapse/tests/test_common.py index ec07ba11194..86d8a03b8fc 100644 --- a/synapse/tests/test_common.py +++ b/synapse/tests/test_common.py @@ -432,7 +432,7 @@ def test_jsonsafe(self): with self.raises(eret): s_common.reqJsonSafeStrict(item) - def test_sslctx(self): + async def test_sslctx(self): with self.getTestDir(mirror='certdir') as dirn: cadir = s_common.genpath(dirn, 'cas') os.makedirs(s_common.genpath(cadir, 'newp')) From 74c0a59f347bebf637cb9c22c87ecf5becf4e436 Mon Sep 17 00:00:00 2001 From: visi Date: Wed, 26 Feb 2025 13:01:40 -0500 Subject: [PATCH 35/52] wip --- synapse/axon.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/synapse/axon.py b/synapse/axon.py index 667340c0fd5..e5e0e2f5ebf 100644 --- a/synapse/axon.py +++ b/synapse/axon.py @@ -896,7 +896,7 @@ async def _axonHealth(self, health): async def _migrateAxonMetrics(self): extra = self.getLogExtra() - logger.warning('migrating Axon metrics data out of hive', extra=extra) + logger.warning('Migrating Axon metrics data out of hive.', extra=extra) async with await self.hive.open(('axon', 'metrics')) as hivenode: axonmetrics = await hivenode.dict() @@ -1515,7 +1515,7 @@ async def jsonlines(self, sha256, errors='ignore'): except json.JSONDecodeError as e: # TODO: this feels like it should not be a log... extra = self.getLogExtra(sha256=sha256, err=str(e)) - logger.exception('Bad JSON line encountered', extra=extra) + logger.exception('Bad JSON line encountered.', extra=extra) mesg = f'Bad JSON line while processing {sha256}: {e}' raise s_exc.BadJsonText(mesg=mesg, sha256=sha256) from None @@ -1669,7 +1669,7 @@ async def postfiles(self, fields, url, params=None, headers=None, method='POST', except Exception as e: extra = self.getLogExtra(url=s_urlhelp.sanitizeUrl(url)) - logger.exception(f'Error POSTing file', extra=extra) + logger.exception('Axon file HTTP POST request failed.', extra=extra) err = s_common.err(e) errmsg = err[1].get('mesg') if errmsg: @@ -1717,7 +1717,7 @@ async def wput(self, sha256, url, params=None, headers=None, method='PUT', ssl=T except Exception as e: extra = self.getLogExtra(sha256=sha256, url=s_urlhelp.sanitizeUrl(url)) - logger.exception('axon.wput() error streaming blob', extra=extra) + logger.exception('Axon file HTTP PUT request failed.', extra=extra) err = s_common.err(e) errmsg = err[1].get('mesg') if errmsg: @@ -1856,7 +1856,7 @@ async def wget(self, url, params=None, headers=None, json=None, body=None, metho except Exception as e: extra = self.getLogExtra(url=s_urlhelp.sanitizeUrl(url)) - logger.exception('axon.wget() failed', extra=extra) + logger.exception('Axon failed to retrieve URL.', extra=extra) err = s_common.err(e) errmsg = err[1].get('mesg') if errmsg: From 85ecf63210eac7076263ba2f8f99e92eaa4e5603 Mon Sep 17 00:00:00 2001 From: visi Date: Wed, 26 Feb 2025 14:44:40 -0500 Subject: [PATCH 36/52] wip --- synapse/tests/test_cmds_cortex.py | 2 +- synapse/tests/test_common.py | 2 +- synapse/tests/test_cortex.py | 14 +++++----- synapse/tests/test_daemon.py | 10 +++---- synapse/tests/test_datamodel.py | 10 +++---- synapse/tests/test_lib_agenda.py | 12 ++++----- synapse/tests/test_lib_aha.py | 4 +-- synapse/tests/test_lib_boss.py | 2 +- synapse/tests/test_lib_cell.py | 32 +++++++++++------------ synapse/tests/test_lib_config.py | 2 +- synapse/tests/test_lib_dyndeps.py | 2 +- synapse/tests/test_lib_httpapi.py | 12 ++++----- synapse/tests/test_lib_link.py | 2 +- synapse/tests/test_lib_modelrev.py | 2 +- synapse/tests/test_lib_module.py | 8 +++--- synapse/tests/test_lib_multislabseqn.py | 4 +-- synapse/tests/test_lib_storm.py | 16 ++++++------ synapse/tests/test_lib_stormlib_cortex.py | 12 ++++----- synapse/tests/test_lib_stormlib_log.py | 12 ++++----- synapse/tests/test_lib_stormlib_model.py | 2 +- synapse/tests/test_lib_stormtypes.py | 2 +- synapse/tests/test_lib_trigger.py | 8 +++--- synapse/tests/test_model_dns.py | 4 +-- synapse/tests/test_model_infotech.py | 2 +- synapse/tests/test_telepath.py | 8 +++--- synapse/tests/test_utils.py | 4 +-- synapse/tests/utils.py | 16 ++++++++++-- 27 files changed, 109 insertions(+), 97 deletions(-) diff --git a/synapse/tests/test_cmds_cortex.py b/synapse/tests/test_cmds_cortex.py index 6f360541592..38326a0ee77 100644 --- a/synapse/tests/test_cmds_cortex.py +++ b/synapse/tests/test_cmds_cortex.py @@ -333,7 +333,7 @@ def check_locs_cleanup(cobj): cmdr.locs['log:fmt'] = 'newp' with self.getLoggerStream('synapse.cmds.cortex') as stream: await cmdr.runCmdLine('storm test:str') - self.true(await stream.expect('Unknown encoding format: newp')) + await stream.expect('Unknown encoding format: newp') await cmdr.fini() diff --git a/synapse/tests/test_common.py b/synapse/tests/test_common.py index 86d8a03b8fc..33ca8ad4798 100644 --- a/synapse/tests/test_common.py +++ b/synapse/tests/test_common.py @@ -438,7 +438,7 @@ async def test_sslctx(self): os.makedirs(s_common.genpath(cadir, 'newp')) with self.getLoggerStream('synapse.common') as stream: ctx = s_common.getSslCtx(cadir) - self.true(await stream.expect(f'Error loading {cadir}/ca.key')) + await stream.expect(f'Error loading {cadir}/ca.key') ca_subjects = {cert.get('subject') for cert in ctx.get_ca_certs()} self.isin(((('commonName', 'test'),),), ca_subjects) diff --git a/synapse/tests/test_cortex.py b/synapse/tests/test_cortex.py index c72ff83ddae..bdf10300fb7 100644 --- a/synapse/tests/test_cortex.py +++ b/synapse/tests/test_cortex.py @@ -1171,7 +1171,7 @@ async def test_cortex_callstorm(self): except asyncio.TimeoutError: logger.exception('Woohoo!') - self.true(await stream.expect('callStorm cancelled')) + await stream.expect('callStorm cancelled') host, port = await core.addHttpsPort(0, host='127.0.0.1') @@ -1237,7 +1237,7 @@ async def test_cortex_storm_dmon_log(self): $que.get() return($ddef.iden) ''') - self.true(await stream.expect('Running dmon')) + await stream.expect('Running dmon') mesg = stream.jsonlines()[0] self.eq(mesg.get('message'), f'Running dmon {iden}') @@ -2912,7 +2912,7 @@ async def test_storm_fromtags(self): # which is not valid with self.getLoggerStream('synapse.lib.ast') as stream: self.len(0, await core.nodes('syn:tag=foo.bar -> test:str:tick')) - self.true(await stream.expect('Unknown time format')) + await stream.expect('Unknown time format') async def test_storm_tagtags(self): @@ -5882,7 +5882,7 @@ async def test_cortex_mirror_culled(self): with self.getLoggerStream('synapse.lib.nexus') as stream: async with self.getTestCore(dirn=path02, conf={'mirror': url01}) as core02: - self.true(await stream.expect('offset is out of sync')) + await stream.expect('offset is out of sync') self.true(core02.nexsroot.isfini) # restore mirror @@ -6273,7 +6273,7 @@ async def test_cortex_storm_dmon_view(self): q = '''$q = $lib.queue.get(dmon) $q.puts((1, 3, 5))''' with self.getLoggerStream('synapse.lib.storm', struct=False) as stream: await core.nodes(q) - self.true(await stream.expect("made ('test:int', 5)")) + await stream.expect("made ('test:int', 5)") nodes = await core.nodes('test:int', opts={'view': view2_iden}) self.len(3, nodes) @@ -6302,7 +6302,7 @@ async def test_cortex_storm_dmon_view(self): q = '''$q = $lib.queue.get(dmon2) $q.puts((1, 3, 5))''' with self.getLoggerStream('synapse.lib.storm', struct=False) as stream: await core.nodes(q) - self.true(await stream.expect("made ('test:str', '5')")) + await stream.expect("made ('test:str', '5')") nodes = await core.nodes('test:str', opts={'view': view2_iden}) self.len(3, nodes) @@ -6318,7 +6318,7 @@ async def test_cortex_storm_dmon_view(self): with self.getLoggerStream('synapse.lib.storm') as stream: async with self.getTestCore(dirn=dirn) as core: - self.true(await stream.expect('Dmon View is invalid. Stopping Dmon')) + await stream.expect('Dmon View is invalid. Stopping Dmon') msgs = await core.stormlist('dmon.list') self.stormIsInPrint('fatal error: invalid view', msgs) diff --git a/synapse/tests/test_daemon.py b/synapse/tests/test_daemon.py index 310fb2d7615..a03fc0f22ff 100644 --- a/synapse/tests/test_daemon.py +++ b/synapse/tests/test_daemon.py @@ -36,7 +36,7 @@ async def test_unixsock_longpath(self): with self.raises(OSError): await dmon.listen(listpath) - self.true(await stream.expect('exceeds OS supported UNIX socket path length')) + await stream.expect('exceeds OS supported UNIX socket path length') async def test_dmon_ready(self): @@ -85,7 +85,7 @@ async def test_dmon_errors(self): async with await prox.getPoolLink() as link: with self.getLoggerStream('synapse.daemon') as stream: await link.tx(31337) - self.true(await stream.expect('Dmon.onLinkMesg Handler: mesg=')) + await stream.expect('Dmon.onLinkMesg Handler: mesg=') # Valid format; do not know what the message is. async with await prox.getPoolLink() as link: @@ -93,14 +93,14 @@ async def test_dmon_errors(self): emsg = "Dmon.onLinkMesg Invalid mesg: mesg=('newp', {})" with self.getLoggerStream('synapse.daemon', emsg) as stream: await link.tx(mesg) - self.true(await stream.expect(emsg)) + await stream.expect(emsg) # Invalid data casues a link to fail on rx async with await prox.getPoolLink() as link: with self.getLoggerStream('synapse.lib.link') as stream: byts = b'\x16\x03\x01\x02\x00\x01\x00\x01\xfc\x03\x03\xa6\xa3D\xd5\xdf%\xac\xa9\x92\xc3' await link.send(byts) - self.true(await stream.expect('rx error')) + await stream.expect('rx error') # bad t2:init message async with await prox.getPoolLink() as link: @@ -108,7 +108,7 @@ async def test_dmon_errors(self): emsg = "Error on t2:init:" with self.getLoggerStream('synapse.daemon', emsg) as stream: await link.tx(mesg) - self.true(await stream.expect(emsg)) + await stream.expect(emsg) class SvcApi(s_cell.CellApi, s_stormsvc.StormSvc): _storm_svc_name = 'foo' diff --git a/synapse/tests/test_datamodel.py b/synapse/tests/test_datamodel.py index 92ccb21a657..aef4619f98f 100644 --- a/synapse/tests/test_datamodel.py +++ b/synapse/tests/test_datamodel.py @@ -248,7 +248,7 @@ async def test_model_deprecation(self): mesg = 'type test:dep:comp field str uses a deprecated type test:dep:easy' with self.getLoggerStream('synapse.lib.types') as tstream: _ = await core.stormlist('[test:dep:easy=test2 :comp=(1, two)]') - self.true(await tstream.expect(mesg)) + await tstream.expect(mesg) msgs = await core.stormlist('[test:str=tehe .pdep=beep]') self.stormIsInWarn('property test:str.pdep is deprecated', msgs) @@ -257,17 +257,17 @@ async def test_model_deprecation(self): mesg = 'tag property depr is using a deprecated type test:dep:easy' with self.getLoggerStream('synapse.datamodel') as dstream: await core.addTagProp('depr', ('test:dep:easy', {}), {}) - self.true(await dstream.expect(mesg)) + await dstream.expect(mesg) mesg = 'universal property ._test is using a deprecated type test:dep:easy' with self.getLoggerStream('synapse.datamodel') as dstream: await core.addUnivProp('_test', ('test:dep:easy', {}), {}) - self.true(await dstream.expect(mesg)) + await dstream.expect(mesg) mesg = 'extended property test:str:_depr is using a deprecated type test:dep:easy' with self.getLoggerStream('synapse.cortex') as cstream: await core.addFormProp('test:str', '_depr', ('test:dep:easy', {}), {}) - self.true(await cstream.expect(mesg)) + await cstream.expect(mesg) # Deprecated ctor information propagates upward to types and forms msgs = await core.stormlist('[test:dep:str=" test" :beep=" boop "]') @@ -280,7 +280,7 @@ async def test_model_deprecation(self): # with deprecated types in them. This is a coverage test for extended properties. with self.getLoggerStream('synapse.cortex') as cstream: async with await s_cortex.Cortex.anit(dirn, conf) as core: - self.true(await cstream.expect(mesg)) + await cstream.expect(mesg) async def test_datamodel_getmodeldefs(self): ''' diff --git a/synapse/tests/test_lib_agenda.py b/synapse/tests/test_lib_agenda.py index a592ad8f961..306f50016a1 100644 --- a/synapse/tests/test_lib_agenda.py +++ b/synapse/tests/test_lib_agenda.py @@ -423,7 +423,7 @@ def looptime(): unixtime = datetime.datetime(year=2019, month=2, day=16, hour=10, minute=16, tzinfo=tz.utc).timestamp() # pump the ioloop via sleep(0) until the log message appears - while not await stream.expect('locked', timeout=0.1): + while not await stream.contains('locked'): await asyncio.sleep(0) await core.nexsroot.waitOffs(strt + 4) @@ -739,14 +739,14 @@ async def test_agenda_fatal_run(self): with self.getLoggerStream('synapse.lib.agenda') as stream: core.agenda._addTickOff(55) - self.true(await stream.expect('Cron job error')) + await stream.expect('Cron job error') await core.addUserRule(user, (True, ('storm',))) await core.addUserRule(user, (True, ('view', 'read')), gateiden=fork) with self.getLoggerStream('synapse.storm.log') as stream: core.agenda._addTickOff(60) - self.true(await stream.expect('I am a cron job')) + await stream.expect('I am a cron job') async def test_agenda_mirror_realtime(self): with self.getTestDir() as dirn: @@ -922,7 +922,7 @@ async def test_agenda_promotions(self): await core01.promote(graceful=True) self.false(core00.isactive) self.true(core01.isactive) - self.true(await stream.expect('Cron job completed.*cancelled', count=NUMJOBS, escape=False)) + await stream.expect('Cron job completed.*cancelled', count=NUMJOBS, escape=False) # Sync the (now) follower so the isrunning status gets updated to false on both cortexes await core00.sync() @@ -1101,7 +1101,7 @@ async def test_agenda_warnings(self): q = '$lib.warn("oh hai")' msgs = await core.stormlist('cron.at --now $q', opts={'vars': {'q': q}}) self.stormHasNoWarnErr(msgs) - self.true(await stream.expect('warning.*oh hai', escape=False)) + await stream.expect('warning.*oh hai', escape=False) async def test_agenda_graceful_promotion_with_running_cron(self): @@ -1134,7 +1134,7 @@ async def test_agenda_graceful_promotion_with_running_cron(self): async with self.getTestCore(conf=conf01) as core01: with self.getLoggerStream('synapse.storm.log') as stream: - self.true(await stream.expect('I AM A ERROR LOG MESSAGE')) + await stream.expect('I AM A ERROR LOG MESSAGE') cron = await core00.callStorm('return($lib.cron.list())') self.len(1, cron) diff --git a/synapse/tests/test_lib_aha.py b/synapse/tests/test_lib_aha.py index c787a62701e..07b480fd42b 100644 --- a/synapse/tests/test_lib_aha.py +++ b/synapse/tests/test_lib_aha.py @@ -154,7 +154,7 @@ async def test_lib_aha_offon(self): with self.getLoggerStream('synapse.lib.aha') as stream: async with self.getTestAha(dirn=dirn) as aha: - self.true(await stream.expect(f'Set [0.cryo.synapse] offline.')) + await stream.expect(f'Set [0.cryo.synapse] offline.') svc = await aha.getAhaSvc('0.cryo...') self.notin('online', svc.get('svcinfo')) @@ -674,7 +674,7 @@ async def test_lib_aha_provision(self): # force a re-provision... (because the providen is different) with self.getLoggerStream('synapse.lib.cell') as stream: async with await s_axon.Axon.initFromArgv((axonpath,)) as axon: - self.true(await stream.expect('Provisioning axon from AHA service')) + await stream.expect('Provisioning axon from AHA service') self.ne(axon.conf.get('dmon:listen'), 'tcp://0.0.0.0:0') overconf2 = s_common.yamlload(axonpath, 'cell.mods.yaml') diff --git a/synapse/tests/test_lib_boss.py b/synapse/tests/test_lib_boss.py index 31da609ef3a..9b3cc316328 100644 --- a/synapse/tests/test_lib_boss.py +++ b/synapse/tests/test_lib_boss.py @@ -59,5 +59,5 @@ async def double_promote(): await boss.promote(f'double', root, taskiden=iden + iden) coro = boss.schedCoro(double_promote()) - self.true(await stream.expect('Iden specified for existing task')) + await stream.expect('Iden specified for existing task') await coro diff --git a/synapse/tests/test_lib_cell.py b/synapse/tests/test_lib_cell.py index 0459e0a07ab..d303fbb11f8 100644 --- a/synapse/tests/test_lib_cell.py +++ b/synapse/tests/test_lib_cell.py @@ -448,7 +448,7 @@ async def test_cell_auth(self): self.true(await proxy.adminOnly()) with self.getLoggerStream('synapse.lib.cell') as stream: self.eq(await proxy.adminOnlyLog(1, 2, three=4), (1, 2, {'three': 4})) - self.true(await stream.expect('Executing remote admin API call', timeout=10)) + await stream.expect('Executing remote admin API call') msgs = stream.jsonlines() self.len(1, msgs) self.eq('EchoAuthApi.adminOnlyLog', msgs[0]['params']['func']) @@ -744,7 +744,7 @@ async def test_longpath(self): with self.getLoggerStream('synapse.lib.cell') as stream: async with self.getTestCell(s_cell.Cell, dirn=longdirn) as cell: self.none(cell.dmon.addr) - self.true(await stream.expect('LOCAL UNIX SOCKET WILL BE UNAVAILABLE')) + await stream.expect('LOCAL UNIX SOCKET WILL BE UNAVAILABLE') async def test_cell_setuser(self): @@ -1298,7 +1298,7 @@ async def test_initargv_failure(self): with self.raises(FileExistsError): async with await s_cell.Cell.initFromArgv(['/dev/null']): pass - self.true(await stream.expect('Error starting cell at /dev/null')) + await stream.expect('Error starting cell at /dev/null') # Bad configs can also cause a failure. with self.getTestDir() as dirn: @@ -1308,7 +1308,7 @@ async def test_initargv_failure(self): async with await s_cell.Cell.initFromArgv([dirn, ]): pass self.eq(cm.exception.get('name'), 'auth:passwd') - self.true(await stream.expect('Error while bootstrapping cell config')) + await stream.expect('Error while bootstrapping cell config') async def test_cell_backup(self): @@ -2059,7 +2059,7 @@ async def test_mirror_badiden(self): with self.getLoggerStream('synapse.lib.nexus') as stream: async with self.getTestCell(s_cell.Cell, dirn=path01, conf=conf01) as cell01: - self.true(await stream.expect('has different iden', timeout=2)) + await stream.expect('has different iden') self.true(await cell01.nexsroot.waitfini(6)) async def test_backup_restore_base(self): @@ -2096,7 +2096,7 @@ async def test_backup_restore_base(self): with self.getLoggerStream('synapse.lib.cell') as stream: argv = [cdir, '--https', '0', '--telepath', 'tcp://127.0.0.1:0'] async with await s_cortex.Cortex.initFromArgv(argv) as core: - self.true(await stream.expect('Restoring cortex from SYN_RESTORE_HTTPS_URL')) + await stream.expect('Restoring cortex from SYN_RESTORE_HTTPS_URL') self.len(1, await core.nodes('inet:ipv4=1.2.3.4')) self.true(core.conf.get('storm:log')) @@ -2132,7 +2132,7 @@ async def test_backup_restore_base(self): with self.getLoggerStream('synapse.lib.cell') as stream: argv = [cdir, '--https', '0', '--telepath', 'tcp://127.0.0.1:0'] async with await s_cortex.Cortex.initFromArgv(argv) as core: - self.true(await stream.expect('Removing existing')) + await stream.expect('Removing existing') self.len(1, await core.nodes('inet:ipv4=1.2.3.4')) # Restore a backup which has an existing restore.done file in it - that marker file will get overwritten @@ -2143,7 +2143,7 @@ async def test_backup_restore_base(self): with self.getLoggerStream('synapse.lib.cell') as stream: argv = [cdir, '--https', '0', '--telepath', 'tcp://127.0.0.1:0'] async with await s_cortex.Cortex.initFromArgv(argv) as core: - self.true(await stream.expect('Restoring cortex from SYN_RESTORE_HTTPS_URL')) + await stream.expect('Restoring cortex from SYN_RESTORE_HTTPS_URL') self.len(1, await core.nodes('inet:ipv4=1.2.3.4')) rpath = s_common.genpath(cdir, 'restore.done') @@ -2238,7 +2238,7 @@ async def test_backup_restore_aha(self): with self.getLoggerStream('synapse.lib.cell') as stream: argv = [bdr0, '--https', '0', '--telepath', 'tcp://127.0.0.1:0'] async with await s_cortex.Cortex.initFromArgv(argv) as bcree00: - self.true(await stream.expect('Restoring cortex from SYN_RESTORE_HTTPS_URL')) + await stream.expect('Restoring cortex from SYN_RESTORE_HTTPS_URL') self.len(1, await bcree00.nodes('inet:asn=0')) self.len(1, await bcree00.nodes('[inet:asn=1234]')) @@ -2256,7 +2256,7 @@ async def test_backup_restore_aha(self): SYN_CORTEX_AHA_PROVISION=purl): argv = [bdr1, '--https', '0', '--telepath', 'tcp://127.0.0.1:0'] async with await s_cortex.Cortex.initFromArgv(argv) as bcree01: - self.true(await stream.expect('Restoring cortex from SYN_RESTORE_HTTPS_URL')) + await stream.expect('Restoring cortex from SYN_RESTORE_HTTPS_URL') self.true(bcree00.isactive) self.false(bcree01.isactive) @@ -2334,7 +2334,7 @@ async def test_backup_restore_double_promote_aha(self): with self.getLoggerStream('synapse.lib.cell') as stream: argv = [bdr0, '--https', '0', '--telepath', 'tcp://127.0.0.1:0'] async with await s_cortex.Cortex.initFromArgv(argv) as bcree00: - self.true(await stream.expect('Restoring cortex from SYN_RESTORE_HTTPS_URL')) + await stream.expect('Restoring cortex from SYN_RESTORE_HTTPS_URL') self.len(1, await bcree00.nodes('inet:asn=0')) self.len(1, await bcree00.nodes('[inet:asn=1234]')) @@ -2352,7 +2352,7 @@ async def test_backup_restore_double_promote_aha(self): SYN_CORTEX_AHA_PROVISION=purl): argv = [bdr1, '--https', '0', '--telepath', 'tcp://127.0.0.1:0'] async with await s_cortex.Cortex.initFromArgv(argv) as bcree01: - self.true(await stream.expect('Restoring cortex from SYN_RESTORE_HTTPS_URL')) + await stream.expect('Restoring cortex from SYN_RESTORE_HTTPS_URL') self.true(bcree00.isactive) self.false(bcree01.isactive) @@ -2560,7 +2560,7 @@ def spaceexc(self): opts = {'view': viewiden} with self.getLoggerStream('synapse.lib.lmdbslab') as stream: nodes = await core.stormlist('for $x in $lib.range(200) {[inet:ipv4=$x]}', opts=opts) - self.true(await stream.expect('Error during slab resize callback - foo')) + await stream.expect('Error during slab resize callback - foo') async with self.getTestCore() as core: @@ -3312,7 +3312,7 @@ async def proxy(self, timeout=None): with self.getLoggerStream('synapse.lib.cell') as stream: cell.ahaclient = MockAhaClient() self.none(await cell.getAhaProxy()) - self.true(await stream.expect('AHA client connection failed.')) + await stream.expect('AHA client connection failed.') class MockProxyHasNot: def _hasTeleFeat(self, name, vers): @@ -3368,7 +3368,7 @@ async def mock_executor(func, *args, **kwargs): with self.raises(Exception) as cm: async for _ in proxy.iterBackupArchive('bkup'): pass - self.true(await stream.expect('Error during backup streaming')) + await stream.expect('Error during backup streaming') async def test_iter_new_backup_archive(self): @@ -3392,7 +3392,7 @@ async def mock_runBackup(*args, **kwargs): pass self.isin('backup failed', str(cm.exception)) - self.true(await stream.expect('Removing')) + await stream.expect('Removing') path = os.path.join(backdirn, 'failedbackup') self.false(os.path.exists(path)) diff --git a/synapse/tests/test_lib_config.py b/synapse/tests/test_lib_config.py index fe9d371ac2a..98d7eb3dbd0 100644 --- a/synapse/tests/test_lib_config.py +++ b/synapse/tests/test_lib_config.py @@ -52,7 +52,7 @@ async def test_config_basics(self): with self.getLoggerStream('synapse.lib.config') as stream: for optname, optinfo in conf.getArgParseArgs(): pars.add_argument(optname, **optinfo) - self.true(stream.expect(mesg)) + await stream.expect(mesg) hmsg = pars.format_help() # Undo pretty-printing diff --git a/synapse/tests/test_lib_dyndeps.py b/synapse/tests/test_lib_dyndeps.py index 75092a1c3fa..ab782c67f67 100644 --- a/synapse/tests/test_lib_dyndeps.py +++ b/synapse/tests/test_lib_dyndeps.py @@ -16,7 +16,7 @@ class DynDepsTest(s_t_utils.SynTest): def test_dyndeps_dynmod(self): with self.getLoggerStream('synapse.lib.dyndeps') as stream: self.none(s_dyndeps.getDynMod('- -')) - self.true(stream.expect('Failed to import "- -"')) + await stream.expect('Failed to import "- -"') self.nn(s_dyndeps.getDynMod('sys')) def test_dyndeps_dynloc(self): diff --git a/synapse/tests/test_lib_httpapi.py b/synapse/tests/test_lib_httpapi.py index 15c941c645d..bae69330fc9 100644 --- a/synapse/tests/test_lib_httpapi.py +++ b/synapse/tests/test_lib_httpapi.py @@ -386,7 +386,7 @@ async def test_http_auth(self): async with sess.post(f'https://localhost:{port}/api/v1/login', json=info) as resp: item = await resp.json() self.eq('AuthDeny', item.get('code')) - self.true(await stream.expect('No such user.')) + await stream.expect('No such user.') async with self.getHttpSess() as sess: info = {'user': 'visi', 'passwd': 'secret'} @@ -395,7 +395,7 @@ async def test_http_auth(self): async with sess.post(f'https://localhost:{port}/api/v1/login', json=info) as resp: item = await resp.json() self.eq('AuthDeny', item.get('code')) - self.true(await stream.expect('User is locked.')) + await stream.expect('User is locked.') await core.setUserLocked(visiiden, False) async with self.getHttpSess() as sess: @@ -405,7 +405,7 @@ async def test_http_auth(self): async with sess.post(f'https://localhost:{port}/api/v1/login', json=info) as resp: item = await resp.json() self.eq('AuthDeny', item.get('code')) - self.true(await stream.expect('Incorrect password.')) + await stream.expect('Incorrect password.') async with self.getHttpSess() as sess: info = {'user': 'visi', 'passwd': 'secret'} @@ -455,21 +455,21 @@ async def test_http_auth(self): async with sess.get(f'https://localhost:{port}/api/v1/auth/users', auth=heheauth) as resp: item = await resp.json() self.eq('NotAuthenticated', item.get('code')) - self.true(await stream.expect('No such user.')) + await stream.expect('No such user.') await core.setUserLocked(visiiden, True) with self.getLoggerStream('synapse.lib.httpapi') as stream: async with sess.get(f'https://localhost:{port}/api/v1/auth/users', auth=visiauth) as resp: item = await resp.json() self.eq('NotAuthenticated', item.get('code')) - self.true(await stream.expect('User is locked.')) + await stream.expect('User is locked.') await core.setUserLocked(visiiden, False) with self.getLoggerStream('synapse.lib.httpapi') as stream: async with sess.get(f'https://localhost:{port}/api/v1/auth/users', auth=newpauth) as resp: item = await resp.json() self.eq('NotAuthenticated', item.get('code')) - self.true(await stream.expect('Incorrect password.')) + await stream.expect('Incorrect password.') headers = {'Authorization': 'yermom'} async with sess.get(f'https://localhost:{port}/api/v1/auth/users', headers=headers) as resp: diff --git a/synapse/tests/test_lib_link.py b/synapse/tests/test_lib_link.py index c25f852474b..35db8c19d99 100644 --- a/synapse/tests/test_lib_link.py +++ b/synapse/tests/test_lib_link.py @@ -123,7 +123,7 @@ async def onlink(link): await asyncio.sleep(0) with self.getLoggerStream('synapse.lib.link') as stream: msg1 = await link.rx() - self.true(await stream.expect('rx error')) + await stream.expect('rx error') self.none(msg1) async def test_link_file(self): diff --git a/synapse/tests/test_lib_modelrev.py b/synapse/tests/test_lib_modelrev.py index fbee1590949..427912ef8d1 100644 --- a/synapse/tests/test_lib_modelrev.py +++ b/synapse/tests/test_lib_modelrev.py @@ -391,7 +391,7 @@ async def test_modelrev_0_2_19(self): with self.getLoggerStream('synapse.lib.modelrev') as stream: async with self.getRegrCore('model-0.2.19-bad-risk-types') as core: - self.true(await stream.expect('error re-norming risk:vuln:type=foo.bar...newp')) + await stream.expect('error re-norming risk:vuln:type=foo.bar...newp') self.len(5, await core.nodes('risk:vuln')) self.len(4, await core.nodes('risk:vuln:type')) nodes = await core.nodes('yield $lib.lift.byNodeData(_migrated:risk:vuln:type)') diff --git a/synapse/tests/test_lib_module.py b/synapse/tests/test_lib_module.py index 98ad3e12613..7695e63c391 100644 --- a/synapse/tests/test_lib_module.py +++ b/synapse/tests/test_lib_module.py @@ -73,13 +73,13 @@ async def test_load_failures(self): with self.setTstEnvars(SYN_TEST_MOD_FAIL_PRE=1) as cm: with self.getLoggerStream('synapse.cortex') as stream: self.none(await core.loadCoreModule(foo_ctor)) - self.true(await stream.expect('preCoreModuleFail')) + await stream.expect('preCoreModuleFail') self.none(core.getCoreMod(foo_ctor)) with self.setTstEnvars(SYN_TEST_MOD_FAIL_INIT=1) as cm: with self.getLoggerStream('synapse.cortex') as stream: self.none(await core.loadCoreModule(foo_ctor)) - self.true(await stream.expect('initCoreModuleFail')) + await stream.expect('initCoreModuleFail') self.none(core.getCoreMod(foo_ctor)) with self.getTestDir(mirror='testcore') as dirn: @@ -91,11 +91,11 @@ async def test_load_failures(self): with self.setTstEnvars(SYN_TEST_MOD_FAIL_PRE=1) as cm: with self.getLoggerStream('synapse.cortex') as stream: async with await s_cortex.Cortex.anit(dirn) as core: - self.true(await stream.expect('preCoreModuleFail')) + await stream.expect('preCoreModuleFail') self.none(core.getCoreMod(foo_ctor)) with self.setTstEnvars(SYN_TEST_MOD_FAIL_INIT=1) as cm: with self.getLoggerStream('synapse.cortex') as stream: async with await s_cortex.Cortex.anit(dirn) as core: - self.true(await stream.expect('initCoreModuleFail')) + await stream.expect('initCoreModuleFail') self.none(core.getCoreMod(foo_ctor)) diff --git a/synapse/tests/test_lib_multislabseqn.py b/synapse/tests/test_lib_multislabseqn.py index 59a07f9d10e..97348d7c1b4 100644 --- a/synapse/tests/test_lib_multislabseqn.py +++ b/synapse/tests/test_lib_multislabseqn.py @@ -319,7 +319,7 @@ async def test_multislabseqn_discover(self): with self.getLoggerStream('synapse.lib.multislabseqn') as stream: async with await s_multislabseqn.MultiSlabSeqn.anit(baddirn) as msqn: await self.agenlen(15, msqn.iter(0)) - self.true(await stream.expect('non-directory')) + await stream.expect('non-directory') # Switcheroo @@ -347,7 +347,7 @@ async def test_multislabseqn_discover(self): with self.getLoggerStream('synapse.lib.multislabseqn') as stream: async with await s_multislabseqn.MultiSlabSeqn.anit(baddirn) as msqn: await self.agenlen(15, msqn.iter(0)) - self.true(await stream.expect('gap in indices')) + await stream.expect('gap in indices') # Wipe a seqn clean baddirn = s_common.genpath(dirn, 'bad4') diff --git a/synapse/tests/test_lib_storm.py b/synapse/tests/test_lib_storm.py index 3ed5e32f214..8e4a2c6858e 100644 --- a/synapse/tests/test_lib_storm.py +++ b/synapse/tests/test_lib_storm.py @@ -1447,7 +1447,7 @@ async def get(self, name): with self.getLoggerStream('synapse.cortex') as stream: await core.addStormPkg(pkgdef) - self.true(await stream.expect('bazfaz requirement')) + await stream.expect('bazfaz requirement') pkgdef = { 'name': 'bazfaz', @@ -1461,7 +1461,7 @@ async def get(self, name): with self.getLoggerStream('synapse.cortex') as stream: await core.addStormPkg(pkgdef) - self.true(await stream.expect('bazfaz optional requirement')) + await stream.expect('bazfaz optional requirement') deps = await core.callStorm('return($lib.pkg.deps($pkgdef))', opts={'vars': {'pkgdef': pkgdef}}) self.eq({ @@ -2507,7 +2507,7 @@ async def test_storm_dmon_user_locked(self): await visi.setLocked(True) q = 'return($lib.dmon.bump($iden))' self.true(await core.callStorm(q, opts={'vars': {'iden': ddef0['iden']}})) - self.true(await stream.expect('user is locked')) + await stream.expect('user is locked') async def test_storm_dmon_user_autobump(self): async with self.getTestCore() as core: @@ -2518,15 +2518,15 @@ async def test_storm_dmon_user_autobump(self): q = '''return($lib.dmon.add(${{ $lib.print(foobar) $lib.time.sleep(10) }}, name=hehedmon))''' await asvisi.callStorm(q) - self.true(await stream.expect('Dmon query exited')) + await stream.expect('Dmon query exited') with self.getLoggerStream('synapse.lib.storm') as stream: await core.setUserLocked(visi.iden, True) - self.true(await stream.expect('user is locked')) + await stream.expect('user is locked') with self.getLoggerStream('synapse.lib.storm') as stream: await core.setUserLocked(visi.iden, False) - self.true(await stream.expect('Dmon query exited')) + await stream.expect('Dmon query exited') async def test_storm_dmon_caching(self): @@ -3195,12 +3195,12 @@ async def test_storm_iden(self): q = 'iden newp' with self.getLoggerStream('synapse.lib.snap') as stream: self.len(0, await core.nodes(q)) - self.true(await stream.expect('Failed to decode iden')) + await stream.expect('Failed to decode iden') q = 'iden deadb33f' with self.getLoggerStream('synapse.lib.snap') as stream: self.len(0, await core.nodes(q)) - self.true(await stream.expect('iden must be 32 bytes')) + await stream.expect('iden must be 32 bytes') # Runtsafety test q = 'test:str=hehe | iden $node.iden()' diff --git a/synapse/tests/test_lib_stormlib_cortex.py b/synapse/tests/test_lib_stormlib_cortex.py index 819d2ad272d..b5b196e5e3d 100644 --- a/synapse/tests/test_lib_stormlib_cortex.py +++ b/synapse/tests/test_lib_stormlib_cortex.py @@ -303,7 +303,7 @@ async def test_libcortex_httpapi_methods(self): with self.getLoggerStream('synapse.storm') as stream: resp = await sess.get(url) self.eq(resp.status, 200) - self.true(await stream.expect('Executing storm query')) + await stream.expect('Executing storm query') msgs = stream.jsonlines() self.eq(msgs[0]['params'].get('httpapi'), echoiden) core.stormlog = False @@ -340,7 +340,7 @@ async def test_libcortex_httpapi_methods(self): resp = await sess.get(f'https://localhost:{hport}/api/ext/testpath04') self.eq(resp.status, 200) self.eq(await resp.json(), {'hehe': 'yes!'}) - self.true(await stream.expect('Response.reply() has already been called')) + await stream.expect('Response.reply() has already been called') async def test_libcortex_httpapi_runas_owner(self): async with self.getTestCore() as core: @@ -1325,13 +1325,13 @@ async def test_libcortex_httpapi_fsm_sadpath(self): with self.getLoggerStream('synapse.lib.httpapi') as stream: resp = await sess.get(f'https://localhost:{hport}/api/ext/bad03') - self.true(await stream.expect('Extended HTTP API sent code after sending body.')) + await stream.expect('Extended HTTP API sent code after sending body.') self.eq(resp.status, 201) self.eq(await resp.read(), b'text') with self.getLoggerStream('synapse.lib.httpapi') as stream: resp = await sess.get(f'https://localhost:{hport}/api/ext/bad04') - self.true(await stream.expect('Extended HTTP API set headers after sending body.')) + await stream.expect('Extended HTTP API set headers after sending body.') self.eq(resp.status, 200) self.eq(await resp.read(), b'text') @@ -1343,14 +1343,14 @@ async def test_libcortex_httpapi_fsm_sadpath(self): with self.getLoggerStream('synapse.lib.httpapi') as stream: resp = await sess.get(f'https://localhost:{hport}/api/ext/bad06') - self.true(await stream.expect('Extended HTTP API encountered an error.')) + await stream.expect('Extended HTTP API encountered an error.') self.eq('BadTypeValu', stream.jsonlines()[0]['params']['errname']) self.eq(resp.status, 201) self.eq(await resp.json(), {}) with self.getLoggerStream('synapse.lib.httpapi') as stream: resp = await sess.get(f'https://localhost:{hport}/api/ext/bad07') - self.true(await stream.expect('Extended HTTP API encountered an error.')) + await stream.expect('Extended HTTP API encountered an error.') self.eq('StormRuntimeError', stream.jsonlines()[0]['params']['errname']) self.eq(resp.status, 500) data = await resp.json() diff --git a/synapse/tests/test_lib_stormlib_log.py b/synapse/tests/test_lib_stormlib_log.py index 78dbee9694d..9f06cf1bf68 100644 --- a/synapse/tests/test_lib_stormlib_log.py +++ b/synapse/tests/test_lib_stormlib_log.py @@ -13,27 +13,27 @@ async def test_stormlib_log(self): # Raw message with self.getLoggerStream(logname) as stream: await core.callStorm('$lib.log.debug("debug message")') - self.true(await stream.expect('debug message')) + await stream.expect('debug message') with self.getLoggerStream(logname) as stream: await core.callStorm('$lib.log.info("info message")') - self.true(await stream.expect('info message')) + await stream.expect('info message') with self.getLoggerStream(logname) as stream: await core.callStorm('$lib.log.warning("warn message")') - self.true(await stream.expect('warn message')) + await stream.expect('warn message') with self.getLoggerStream(logname) as stream: await core.callStorm('$lib.log.error("error message")') - self.true(await stream.expect('error message')) + await stream.expect('error message') # Extra without structlog handler in place has no change in results with self.getLoggerStream(logname) as stream: await core.callStorm('$lib.log.debug("debug message", extra=({"key": "valu"}))') - self.true(await stream.expect('debug message')) + await stream.expect('debug message') self.eq('valu', stream.jsonlines()[0]['params']['key']) # Extra can be empty too with self.getLoggerStream(logname) as stream: await core.callStorm('$lib.log.debug("debug message", extra=({}))') - self.true(await stream.expect('debug message')) + await stream.expect('debug message') # Extra must be a dict after toprim is called on him. with self.raises(s_exc.BadArg): diff --git a/synapse/tests/test_lib_stormlib_model.py b/synapse/tests/test_lib_stormlib_model.py index b4b95129b36..39580d0828e 100644 --- a/synapse/tests/test_lib_stormlib_model.py +++ b/synapse/tests/test_lib_stormlib_model.py @@ -230,7 +230,7 @@ async def test_stormlib_model_depr(self): (('ou:org', ('t0',)), {'props': {'sic': '5678'}}), ) await core.addFeedData('syn.nodes', data) - self.true(await stream.expect('Prop ou:org:sic is locked due to deprecation')) + await stream.expect('Prop ou:org:sic is locked due to deprecation') nodes = await core.nodes('ou:org=(t0,)') self.none(nodes[0].get('sic')) diff --git a/synapse/tests/test_lib_stormtypes.py b/synapse/tests/test_lib_stormtypes.py index c45d89efe21..942f30cdabd 100644 --- a/synapse/tests/test_lib_stormtypes.py +++ b/synapse/tests/test_lib_stormtypes.py @@ -4925,7 +4925,7 @@ async def getCronJob(text): with self.getLoggerStream('synapse.storm.log') as stream: unixtime += 7 * MINSECS self.eq('m3', await getNextFoo()) - self.true(await stream.expect('m3 cron')) + await stream.expect('m3 cron') mesg = stream.jsonlines()[0] self.eq(mesg['message'], f'm3 cron {guid}') self.eq(mesg['params']['iden'], guid) diff --git a/synapse/tests/test_lib_trigger.py b/synapse/tests/test_lib_trigger.py index 320c41bb122..a2611657683 100644 --- a/synapse/tests/test_lib_trigger.py +++ b/synapse/tests/test_lib_trigger.py @@ -147,7 +147,7 @@ async def test_trigger_basics(self): await view.addTrigger(tdef) with self.getLoggerStream('synapse.storm.log') as stream: await core.nodes('[ test:str=foo ]') - self.true(await stream.expect('f=test:str v=foo')) + await stream.expect('f=test:str v=foo') self.len(1, await core.nodes('test:guid#nodeadd')) # node:del case @@ -170,7 +170,7 @@ async def test_trigger_basics(self): await core.nodes('[ test:str=foo +#a.b ]') with self.getLoggerStream('synapse.storm.log') as stream: await core.nodes('[ test:str=foo +#a.b.c ]') - self.true(await stream.expect('a.b.c')) + await stream.expect('a.b.c') await core.nodes('[ test:str=foo +#a.b.ccc ]') self.len(1, await core.nodes('#count')) @@ -214,7 +214,7 @@ async def test_trigger_basics(self): await core.nodes('[ test:type10=1 ]') with self.getLoggerStream('synapse.storm.log') as stream: await core.nodes('[ test:type10=1 :intprop=25 ]') - self.true(await stream.expect('pf=test:type10:intprop pn=intprop')) + await stream.expect('pf=test:type10:intprop pn=intprop') self.len(1, await core.nodes('test:guid#propset')) # Test re-setting doesn't fire @@ -248,7 +248,7 @@ async def test_trigger_basics(self): await view.addTrigger(tdef) with self.getLoggerStream('synapse.storm.log') as stream: await core.nodes('[ test:str=logit ]') - self.true(await stream.expect('test trigger')) + await stream.expect('test trigger') msgs = stream.jsonlines() mesg = [m for m in msgs if m['params'].get('iden') == tdef.get('iden')][0] self.eq(mesg['message'], f'test trigger {tdef.get("iden")}') diff --git a/synapse/tests/test_model_dns.py b/synapse/tests/test_model_dns.py index 219e0ed285d..af7b257e9cf 100644 --- a/synapse/tests/test_model_dns.py +++ b/synapse/tests/test_model_dns.py @@ -178,12 +178,12 @@ async def test_model_dns_request(self): q = 'inet:dns:query=(tcp://1.2.3.4, "", 1) :name -> inet:fqdn' with self.getLoggerStream('synapse.lib.ast') as stream: self.len(0, await core.nodes(q)) - self.true(await stream.expect('Cannot generate fqdn index bytes for a empty string')) + await stream.expect('Cannot generate fqdn index bytes for a empty string') q = 'inet:dns:query=(tcp://1.2.3.4, "foo*.haha.com", 1) :name -> inet:fqdn' with self.getLoggerStream('synapse.lib.ast') as stream: self.len(0, await core.nodes(q)) - self.true(await stream.expect('Wild card may only appear at the beginning')) + await stream.expect('Wild card may only appear at the beginning') async def test_forms_dns_simple(self): diff --git a/synapse/tests/test_model_infotech.py b/synapse/tests/test_model_infotech.py index cbb8667c5cd..ca94d13acaf 100644 --- a/synapse/tests/test_model_infotech.py +++ b/synapse/tests/test_model_infotech.py @@ -992,7 +992,7 @@ async def test_it_forms_prodsoft(self): node = nodes[0] self.eq(node.get('vers'), 'alpha') self.none(node.get('semver')) - self.true(await stream.expect('Unable to parse string as a semver')) + await stream.expect('Unable to parse string as a semver') async def test_it_form_callbacks(self): async with self.getTestCore() as core: diff --git a/synapse/tests/test_telepath.py b/synapse/tests/test_telepath.py index 252a541ac31..a3b13a2a311 100644 --- a/synapse/tests/test_telepath.py +++ b/synapse/tests/test_telepath.py @@ -885,7 +885,7 @@ async def dostuff(self, x): await targ.waitready() # Verify the password doesn't leak into the log - self.true(await stream.expect('Connect call failed')) + await stream.expect('Connect call failed') stream.seek(0) mesgs = stream.read() self.notin('password', mesgs) @@ -908,7 +908,7 @@ async def onlink(proxy, urlinfo): with self.getLoggerStream('synapse.tests.test_telepath') as stream: async with await s_telepath.open(url1, onlink=onlink) as targ: - self.true(await stream.expect(f'Connected to url=tcp://127.0.0.1:{addr1[1]}/foo')) + await stream.expect(f'Connected to url=tcp://127.0.0.1:{addr1[1]}/foo') # Coverage async def badonlink(proxy, urlinfo): @@ -916,7 +916,7 @@ async def badonlink(proxy, urlinfo): with self.getLoggerStream('synapse.telepath', 'onlink: ') as stream: async with await s_telepath.open(url1, onlink=badonlink) as targ: - self.true(await stream.expect('onlink: ')) + await stream.expect('onlink: ') await dmon0.fini() await dmon1.fini() @@ -1014,7 +1014,7 @@ async def test_link_fini_breaking_tasks(self): # Ensure that the sleepg function got canceled. self.true(await asyncio.wait_for(foo.sleepg_evt.wait(), timeout=6)) # Ensure we logged the cancellation. - self.true(await stream.expect('task sleepg')) + await stream.expect('task sleepg') async def test_link_fini_breaking_tasks2(self): ''' diff --git a/synapse/tests/test_utils.py b/synapse/tests/test_utils.py index 26c3992f1e0..5b2de539109 100644 --- a/synapse/tests/test_utils.py +++ b/synapse/tests/test_utils.py @@ -117,7 +117,7 @@ def logathing(mesg): logger.error('notthere') with self.getLoggerStream('synapse.tests.test_utils') as stream: thr = logathing('StreamEvent Test Message') - self.true(await stream.expect('Test Message')) + await stream.expect('Test Message') thr.join() stream.seek(0) @@ -282,7 +282,7 @@ async def test_checknode(self): self.checkNode(nodes[0], (('test:comp', (1, 'test')), {'hehe': 1, 'haha': 'newp'})) with self.getLoggerStream('synapse.tests.utils') as stream: self.checkNode(nodes[0], (('test:comp', (1, 'test')), {'hehe': 1})) - self.true(await stream.expect('untested properties')) + await stream.expect('untested properties') await self.checkNodes(core, [('test:comp', (1, 'test')),]) with self.raises(AssertionError): diff --git a/synapse/tests/utils.py b/synapse/tests/utils.py index 0f27db3363e..6552f61521f 100644 --- a/synapse/tests/utils.py +++ b/synapse/tests/utils.py @@ -800,6 +800,18 @@ def write(self, s): self._lines.append(s) self._event.set() + async def contains(self, text, count=1, escape=True): + + if escape: + text = regex.escape(text) + + regx = regex.compile(text) + for line in self._lines: + if regx.search(line): + return True + + return False + async def expect(self, text, count=1, timeout=5, escape=True): offs = 0 @@ -830,11 +842,11 @@ def thereyet(): return tally >= count try: - return await s_common.wait_for(_expect(), timeout=timeout) + await s_common.wait_for(_expect(), timeout=timeout) except TimeoutError: logger.warning(f'Pattern [{text}] not found in...') [logger.warning(f' {line}') for line in self._lines] - return False + raise s_exc.SynErr(mesg='Pattern [{text}] not found!') def jsonlines(self) -> typing.List[dict]: '''Get the messages as jsonlines. May throw Json errors if the captured stream is not jsonlines.''' From 6494f2b8e49844b7fa6022d467086c55ad5485e3 Mon Sep 17 00:00:00 2001 From: visi Date: Wed, 26 Feb 2025 18:12:22 -0500 Subject: [PATCH 37/52] wip --- synapse/cortex.py | 210 ++++++++++++++++++++-------------- synapse/lib/aha.py | 106 +++++++++-------- synapse/lib/cell.py | 2 +- synapse/lib/logging.py | 16 ++- synapse/lib/snap.py | 2 +- synapse/lib/view.py | 4 +- synapse/telepath.py | 5 +- synapse/tests/test_cortex.py | 26 ++--- synapse/tests/test_lib_aha.py | 2 +- 9 files changed, 223 insertions(+), 150 deletions(-) diff --git a/synapse/cortex.py b/synapse/cortex.py index 561c2e8618a..0d0573e9b06 100644 --- a/synapse/cortex.py +++ b/synapse/cortex.py @@ -1029,7 +1029,7 @@ async def initServiceStorage(self): async def _storCortexHiveMigration(self): - logger.warning('migrating Cortex data out of hive') + logger.warning('Migrating Cortex data out of hive.') viewdefs = self.cortexdata.getSubKeyVal('view:info:') async with await self.hive.open(('cortex', 'views')) as viewnodes: @@ -1126,7 +1126,8 @@ async def _storUpdateMacros(self): await self._addStormMacro(mdef) except Exception as e: - logger.exception(f'Macro migration error for macro: {name} (skipped).') + extra = self.getLogExtra(name=name) + logger.exception(f'Macro migration error. Skipped.', extra=extra) def getStormMacro(self, name, user=None): @@ -1604,7 +1605,8 @@ async def initStormPool(self): async def onlink(proxy, urlinfo): _url = s_urlhelp.sanitizeUrl(s_telepath.zipurl(urlinfo)) - logger.debug(f'Stormpool client connected to {_url}') + extra = self.getLogExtra(url=_url) + logger.debug(f'Stormpool client connected.', extra=extra) self.stormpool = await s_telepath.open(url, onlink=onlink) @@ -1612,7 +1614,8 @@ async def onlink(proxy, urlinfo): self.onfini(self.stormpool) except Exception as e: # pragma: no cover - logger.exception(f'Error starting stormpool, it will not be available: {e}') + extra = self.getLogExtra() + logger.exception('Error starting stormpool.', extra=extra) async def finiStormPool(self): @@ -2174,7 +2177,8 @@ async def _initStormDmons(self): raise except Exception as e: - logger.warning(f'initStormDmon ({iden}) failed: {e}') + extra = self.getLogExtra(iden=iden, exc=e) + logger.warning('Failed to start Storm dmon.', extra=extra) async def _initStormSvcs(self): @@ -2187,7 +2191,8 @@ async def _initStormSvcs(self): raise except Exception as e: - logger.warning(f'initStormService ({iden}) failed: {e}') + extra = self.getLogExtra(iden=iden, exc=e) + logger.warning('Failed to initialize Storm service.', extra=extra) async def _initCoreQueues(self): path = os.path.join(self.dirn, 'slabs', 'queues.lmdb') @@ -2692,7 +2697,7 @@ async def getStormMod(self, name, reqvers=None): if pkgvers is None: mesg = f'getStormMod: requested storm module {name}@{reqvers}' \ 'has no version information to check.' - logger.warning(mesg) + logger.warning(mesg, extra=self.getLogExtra()) return if isinstance(pkgvers, tuple): @@ -2714,7 +2719,8 @@ async def _tryLoadStormPkg(self, pkgdef): except Exception as e: name = pkgdef.get('name', '') - logger.exception(f'Error loading pkg: {name}, {str(e)}') + extra = self.getLogExtra(name=name, exc=e) + logger.exception('Error loading Storm package.', extra=extra) async def verifyStormPkgDeps(self, pkgdef): @@ -2781,12 +2787,8 @@ async def _reqStormPkgDeps(self, pkgdef): if require['ok']: continue - option = ' ' - if require.get('optional'): - option = ' optional ' - - mesg = f'Storm package {name}{option}requirement {require.get("name")}{require.get("version")} is currently unmet.' - logger.debug(mesg) + extra = self.getLogExtra(name=name, require=require) + logger.debug('Storm package requirement is unmet.', extra=extra) for conflict in deps['conflicts']: @@ -2924,19 +2926,29 @@ def loadStormPkg(self, pkgdef): if onload is not None and self.isactive: async def _onload(): try: + async for mesg in self.storm(onload): + if mesg[0] == 'print': - logger.info(f'{name} onload output: {mesg[1].get("mesg")}') - if mesg[0] == 'warn': - logger.warning(f'{name} onload output: {mesg[1].get("mesg")}') - if mesg[0] == 'err': - logger.error(f'{name} onload output: {mesg[1]}') + extra = self.getLogExtra(name=name, mesg=mesg[1].get('mesg')) + logger.info('Storm package onload print.', extra=extra) + + elif mesg[0] == 'warn': + extra = self.getLogExtra(name=name, mesg=mesg[1].get('mesg')) + logger.warning('Storm package onload warning.', extra=extra) + + elif mesg[0] == 'err': + extra = self.getLogExtra(name=name, mesg=mesg[1].get('mesg')) + logger.error('Storm package onload error.', extra=extra) + await asyncio.sleep(0) - except asyncio.CancelledError: # pragma: no cover - raise - except Exception: # pragma: no cover - logger.warning(f'onload failed for package: {name}') + + except Exception as e: + extra = self.getLogExtra(name=name, exc=e) + logger.warning('Storm package onload failure.', extra=extra) + await self.fire('core:pkg:onload:complete', pkg=name) + self.schedCoro(_onload()) # N.B. This function is intentionally not async in order to prevent possible user race conditions for code @@ -3027,10 +3039,9 @@ async def _delStormSvc(self, iden): try: if self.isactive: await self.runStormSvcEvent(iden, 'del') - except asyncio.CancelledError: # pragma: no cover TODO: remove once py 3.8 only - raise except Exception as e: - logger.exception(f'service.del hook for service {iden} failed with error: {e}') + extra = self.getLogExtra(iden=iden, exc=e) + logger.warning('Service delete hook failed.', extra=extra) sdef = self.svcdefs.pop(iden) @@ -3118,7 +3129,8 @@ async def _runStormSvcAdd(self, iden): except asyncio.CancelledError: # pragma: no cover TODO: remove once py 3.8 only raise except Exception as e: - logger.exception(f'runStormSvcEvent service.add failed with error {e}') + extra = self.getLogExtra(iden=iden) + logger.exception('Failed to run Storm service add event.', extra=extra) return sdef['added'] = True @@ -3186,7 +3198,8 @@ async def _migrateTaxonomyIface(self): ifaces = typeinfo.get('interfaces') if ifaces and 'taxonomy' in ifaces: - logger.warning(f'Migrating taxonomy interface on form {formname} to meta:taxonomy.') + extra = self.getLogExtra(form=formname) + logger.warning('Migrating taxonomy interface to meta:taxonomy.', extra=extra) ifaces = set(ifaces) ifaces.remove('taxonomy') @@ -3196,7 +3209,8 @@ async def _migrateTaxonomyIface(self): await extforms.set(formname, (formname, basetype, typeopts, typeinfo)) except Exception as e: # pragma: no cover - logger.exception(f'Taxonomy migration error for form: {formname} (skipped).') + extra = self.getLogExtra(form=formname, exc=e) + logger.warning('Taxonomy migration error (skipped).', extra=extra) async def _loadExtModel(self): @@ -3211,48 +3225,52 @@ async def _loadExtModel(self): try: self.model.addType(typename, basetype, typeopts, typeinfo) except Exception as e: - logger.warning(f'Extended type ({typename}) error: {e}') + extra = self.getLogExtra(type=typename, exc=e) + logger.warning(f'Extended type definition error.', extra=extra) for formname, basetype, typeopts, typeinfo in self.extforms.values(): try: self.model.addType(formname, basetype, typeopts, typeinfo) form = self.model.addForm(formname, {}, ()) except Exception as e: - logger.warning(f'Extended form ({formname}) error: {e}') + extra = self.getLogExtra(form=formname, exc=e) + logger.warning('Extended form definition error.', extra=extra) else: if form.type.deprecated: - mesg = f'The extended property {formname} is using a deprecated type {form.type.name} which will' \ - f' be removed in 3.0.0' - logger.warning(mesg) + mesg = 'Extended form is using a deprecated type which will be removed in 3.0.0.' + logger.warning(mesg, extra=self.getLogExtra(form=formname, type=form.type.name)) for form, prop, tdef, info in self.extprops.values(): try: prop = self.model.addFormProp(form, prop, tdef, info) except Exception as e: - logger.warning(f'ext prop ({form}:{prop}) error: {e}') + extra = self.getLogExtra(prop=f'{form}:{prop}', exc=e) + logger.warning('Extended property definition error.', extra=extra) else: if prop.type.deprecated: - mesg = f'The extended property {prop.full} is using a deprecated type {prop.type.name} which will' \ - f' be removed in 3.0.0' - logger.warning(mesg) + mesg = 'Extended property is using a deprecated type which will be removed in 3.0.0.' + logger.warning(mesg, extra=self.getLogExtra(prop=prop.full, type=prop.type.name)) for prop, tdef, info in self.extunivs.values(): try: self.model.addUnivProp(prop, tdef, info) except Exception as e: - logger.warning(f'ext univ ({prop}) error: {e}') + extra = self.getLogExtra(univ=prop, exc=e) + logger.warning('Extended universal property definition error.', extra=extra) for prop, tdef, info in self.exttagprops.values(): try: self.model.addTagProp(prop, tdef, info) except Exception as e: - logger.warning(f'ext tag prop ({prop}) error: {e}') + extra = self.getLogExtra(prop=prop, exc=e) + logger.warning('Tag property definition error.', extra=extra) for edge, info in self.extedges.values(): try: self.model.addEdge(edge, info) except Exception as e: - logger.warning(f'ext edge ({edge}) error: {e}') + extra = self.getLogExtra(edge=edge, exc=e) + logger.warning('Extended edge definition error.', extra=extra) async def getExtModel(self): ''' @@ -3466,7 +3484,9 @@ async def _addForm(self, formname, basetype, typeopts, typeinfo): ifaces = typeinfo.get('interfaces') if ifaces and 'taxonomy' in ifaces: - logger.warning(f'{formname} is using the deprecated taxonomy interface, updating to meta:taxonomy.') + + mesg = 'Form is using the deprecated taxonomy interface, updating to meta:taxonomy.' + logger.warning(mesg, extra=self.getLogExtra(form=formname)) ifaces = set(ifaces) ifaces.remove('taxonomy') @@ -3542,7 +3562,9 @@ async def _addType(self, typename, basetype, typeopts, typeinfo): ifaces = typeinfo.get('interfaces') if ifaces and 'taxonomy' in ifaces: - logger.warning(f'{typename} is using the deprecated taxonomy interface, updating to meta:taxonomy.') + + mesg = 'Type is using the deprecated taxonomy interface, updating to meta:taxonomy.' + logger.warning(mesg, extra=self.getLogExtra(type=typename)) ifaces = set(ifaces) ifaces.remove('taxonomy') @@ -3605,9 +3627,8 @@ async def _addFormProp(self, form, prop, tdef, info): _prop = self.model.addFormProp(form, prop, tdef, info) if _prop.type.deprecated: - mesg = f'The extended property {_prop.full} is using a deprecated type {_prop.type.name} which will' \ - f' be removed in 3.0.0' - logger.warning(mesg) + mesg = 'Extended property is using a deprecated type which will be removed in 3.0.0.' + logger.warning(mesg, extra=self.getLogExtra(prop=_prop.full, type=_prop.type.name)) full = f'{form}:{prop}' self.extprops.set(full, (form, prop, tdef, info)) @@ -4084,7 +4105,7 @@ def onaddlayr(mesg): # First, catch up to what was the current offset when we started, guaranteeing order - logger.debug(f'_syncNodeEdits() running catch-up sync to offs={topoffs}') + logger.debug('_syncNodeEdits() running catch-up sync to offs=%s', topoffs) genrs = [genrfunc(layr, offsdict.get(layr.iden, 0), endoff=topoffs) for layr in self.layers.values()] async for item in s_common.merggenr(genrs, lambda x, y: x[0] < y[0]): @@ -4222,7 +4243,8 @@ async def _initJsonStor(self): if self.jsonurl is not None: async def onlink(proxy: s_telepath.Proxy): - logger.debug(f'Connected to remote jsonstor {s_urlhelp.sanitizeUrl(self.jsonurl)}') + extra = self.getLogExtra(url=s_urlhelp.sanitizeUrl(self.jsonurl)) + logger.debug('Connected to remote jsonstor.', extra=extra) self.jsonstor = await s_telepath.Client.anit(self.jsonurl, onlink=onlink) else: @@ -4338,7 +4360,9 @@ async def _initCoreAxon(self): return async def onlink(proxy: s_telepath.Proxy): - logger.debug(f'Connected to remote axon {s_urlhelp.sanitizeUrl(turl)}') + + extra = self.getLogExtra(url=s_urlhelp.sanitizeUrl(turl)) + logger.debug('Connected to remote axon.', extra=extra) async def fini(): self.axready.clear() @@ -4434,7 +4458,8 @@ async def _initPureStormCmds(self): await self._trySetStormCmd(name, cdef) for name in oldcmds: - logger.warning(f'Removing old command: [{name}]') + extra = self.getLogExtra(name=name) + logger.warning('Removing old command.', extra=extra) self.cmddefs.pop(name) for pkgdef in self.pkgdefs.values(): @@ -4443,8 +4468,9 @@ async def _initPureStormCmds(self): async def _trySetStormCmd(self, name, cdef): try: self._setStormCmd(cdef) - except (asyncio.CancelledError, Exception): - logger.exception(f'Storm command load failed: {name}') + except (asyncio.CancelledError, Exception) as e: + extra = self.getLogExtra(name=name, exc=e) + logger.warning('Storm command load failed.', extra=extra) def _initStormLibs(self): ''' @@ -4504,10 +4530,13 @@ def _initCortexExtHttpApi(self): order = s_msgpack.un(byts) for iden in order: + byts = self.slab.get(s_common.uhex(iden), self.httpextapidb) if byts is None: # pragma: no cover - logger.error(f'Missing HTTP API definition for iden={iden}') + extra = self.getLogExtra(iden=iden) + logger.error('Missing HTTP API definition.', extra=extra) continue + adef = s_msgpack.un(byts) self._exthttpapis[adef.get('iden')] = adef @@ -5457,11 +5486,9 @@ async def fill(): await queue.put(item) await queue.close() - except asyncio.CancelledError: # pragma: no cover - raise - except Exception as e: - logger.exception(f'pushBulkEdits fill() error: {e}') + extra = self.getLogExtra(push=iden, exc=e) + logger.warning('Error while pushing bulk edits to remote layer.', extra=extra) await queue.close() base.schedCoro(fill()) @@ -5838,7 +5865,7 @@ async def count(self, text, opts=None): if proxy is not None: proxname = proxy._ahainfo.get('name') extra = self.getLogExtra(mirror=proxname, hash=s_storm.queryhash(text)) - logger.info(f'Offloading Storm query to mirror {proxname}.', extra=extra) + logger.info('Offloading Storm query to mirror.', extra=extra) mirropts = await self._getMirrorOpts(opts) @@ -5849,8 +5876,8 @@ async def count(self, text, opts=None): return await proxy.count(text, opts=mirropts) except s_exc.TimeOut: - mesg = 'Timeout waiting for query mirror, running locally instead.' - logger.warning(mesg) + mesg = 'Timeout waiting for query mirror. (running locally)' + logger.warning(mesg, extra=extra) if (nexsoffs := opts.get('nexsoffs')) is not None: if not await self.waitNexsOffs(nexsoffs, timeout=opts.get('nexstimeout')): @@ -5877,36 +5904,49 @@ async def _getMirrorProxy(self, opts): return None if self.stormpool.size() == 0: - logger.warning('Storm query mirror pool is empty, running query locally.') + logger.info('Storm query mirror pool is empty. (running locally)', extra=self.getLogExtra()) return None - proxy = None + timeout = self.stormpoolopts.get('timeout:connection') try: - timeout = self.stormpoolopts.get('timeout:connection') + proxy = await self.stormpool.proxy(timeout=timeout) proxyname = proxy._ahainfo.get('name') if proxyname is not None and proxyname == self.ahasvcname: # we are part of the pool and were selected. Convert to local use. return None + except s_exc.IsFini: + return None + + except TimeoutError as e: + extra = self.getLogExtra(timeout=timeout) + logger.warning('Timeout waiting for pool mirror connection. (running locally)', extra=extra) + return None + + try: + curoffs = opts.setdefault('nexsoffs', await self.getNexsIndx() - 1) miroffs = await s_common.wait_for(proxy.getNexsIndx(), timeout) - 1 + if (delta := curoffs - miroffs) > MAX_NEXUS_DELTA: - mesg = (f'Pool mirror [{proxyname}] Nexus offset delta too large ' - f'({delta} > {MAX_NEXUS_DELTA}), running query locally.') - logger.warning(mesg, extra=self.getLogExtra(delta=delta, mirror=proxyname, mirror_offset=miroffs)) + extra = self.getLogExtra(mirror=proxyname, offset=miroffs, + delta=delta, maxdelta=MAX_NEXUS_DELTA) + mesg = 'Storm query pool mirror nexus offset delta is too large. (running locally)' + logger.warning(mesg, extra=extra) return None return proxy - except (TimeoutError, s_exc.IsFini): - if proxy is None: - logger.warning('Timeout waiting for pool mirror, running query locally.') - else: - mesg = f'Timeout waiting for pool mirror [{proxyname}] Nexus offset, running query locally.' - logger.warning(mesg, extra=self.getLogExtra(mirror=proxyname)) - await proxy.fini() + except s_exc.IsFini: + extra = self.getLogExtra(mirror=proxyname) + logger.warning('Proxy closed waiting for pool mirror nexus offset. (running locally)', extra=extra) + return None + + except TimeoutError: + extra = self.getLogExtra(mirror=proxyname, timeout=timeout) + logger.warning('Timeout waiting for pool mirror nexus offset. (running locally)', extra=extra) return None async def storm(self, text, opts=None): @@ -5919,7 +5959,7 @@ async def storm(self, text, opts=None): if proxy is not None: proxname = proxy._ahainfo.get('name') extra = self.getLogExtra(mirror=proxname, hash=s_storm.queryhash(text)) - logger.info(f'Offloading Storm query to mirror {proxname}.', extra=extra) + logger.info('Offloading Storm query to mirror.', extra=extra) mirropts = await self._getMirrorOpts(opts) @@ -5932,7 +5972,7 @@ async def storm(self, text, opts=None): return except s_exc.TimeOut: - mesg = 'Timeout waiting for query mirror, running locally instead.' + mesg = 'Timeout waiting for query mirror. (running locally)' logger.warning(mesg, extra=extra) if (nexsoffs := opts.get('nexsoffs')) is not None: @@ -5953,7 +5993,7 @@ async def callStorm(self, text, opts=None): if proxy is not None: proxname = proxy._ahainfo.get('name') extra = self.getLogExtra(mirror=proxname, hash=s_storm.queryhash(text)) - logger.info(f'Offloading Storm query to mirror {proxname}.', extra=extra) + logger.info('Offloading Storm query to mirror.', extra=extra) mirropts = await self._getMirrorOpts(opts) @@ -5963,7 +6003,7 @@ async def callStorm(self, text, opts=None): try: return await proxy.callStorm(text, opts=mirropts) except s_exc.TimeOut: - mesg = 'Timeout waiting for query mirror, running locally instead.' + mesg = 'Timeout waiting for query mirror. (running locally)' logger.warning(mesg, extra=extra) if (nexsoffs := opts.get('nexsoffs')) is not None: @@ -5982,7 +6022,7 @@ async def exportStorm(self, text, opts=None): if proxy is not None: proxname = proxy._ahainfo.get('name') extra = self.getLogExtra(mirror=proxname, hash=s_storm.queryhash(text)) - logger.info(f'Offloading Storm query to mirror {proxname}.', extra=extra) + logger.info('Offloading Storm query to mirror.', extra=extra) mirropts = await self._getMirrorOpts(opts) @@ -5995,7 +6035,7 @@ async def exportStorm(self, text, opts=None): return except s_exc.TimeOut: - mesg = 'Timeout waiting for query mirror, running locally instead.' + mesg = 'Timeout waiting for query mirror. (running locally)' logger.warning(mesg, extra=extra) if (nexsoffs := opts.get('nexsoffs')) is not None: @@ -6161,7 +6201,7 @@ async def reqValidStorm(self, text, opts=None): await self.getStormQuery(text, mode=mode) return True - async def _logStormQuery(self, text, user, extra=None): + def _logStormQuery(self, text, user, extra=None): ''' Log a storm query. ''' @@ -6622,7 +6662,9 @@ async def enableCronJob(self, iden): ''' await self.agenda.enable(iden) await self.feedBeholder('cron:enable', {'iden': iden}, gates=[iden]) - logger.info(f'Enabled cron job {iden}', extra=self.getLogExtra(iden=iden, status='MODIFY')) + + appt = await self.agenda.get(iden) + logger.info('Enabled cron job.', extra=appt.getLogExtra()) @s_nexus.Pusher.onPushAuto('cron:disable') async def disableCronJob(self, iden): @@ -6635,7 +6677,9 @@ async def disableCronJob(self, iden): await self.agenda.disable(iden) await self._killCronTask(iden) await self.feedBeholder('cron:disable', {'iden': iden}, gates=[iden]) - logger.info(f'Disabled cron job {iden}', extra=self.getLogExtra(iden=iden, status='MODIFY')) + + appt = await self.agenda.get(iden) + logger.info('Disabled cron job.', extra=appt.getLogExtra()) async def killCronTask(self, iden): if self.agenda.appts.get(iden) is None: @@ -7512,7 +7556,7 @@ async def getTempCortex(mods=None): Proxy to the cortex. ''' with s_common.getTempDir() as dirn: - logger.debug(f'Creating temporary cortex as {dirn}') + logger.debug('Creating temporary cortex as %s', dirn) conf = { 'health:sysctl:checks': False, } diff --git a/synapse/lib/aha.py b/synapse/lib/aha.py index 902df18cfad..8f63087355a 100644 --- a/synapse/lib/aha.py +++ b/synapse/lib/aha.py @@ -80,10 +80,12 @@ async def post(self): except asyncio.CancelledError: # pragma: no cover raise except s_exc.SynErr as e: - logger.exception(f'Error provisioning {name}') + extra = self.cell.getLogExtra(name=name) + logger.exception('Error provisioning service.', extra=extra) return self.sendRestErr(e.__class__.__name__, e.get('mesg', str(e))) except Exception as e: # pragma: no cover - logger.exception(f'Error provisioning {name}') + extra = self.cell.getLogExtra(name=name) + logger.exception('Error while provisioning service.', extra=extra) return self.sendRestErr(e.__class__.__name__, str(e)) return self.sendRestRetn({'url': url}) @@ -120,7 +122,8 @@ async def get(self): except asyncio.CancelledError: # pragma: no cover raise except Exception as e: # pragma: no cover - logger.exception(f'Error getting Aha services.') + extra = self.cell.getLogExtra() + logger.exception(f'Error getting Aha services.', extra=extra) return self.sendRestErr(e.__class__.__name__, str(e)) return self.sendRestRetn(ret) @@ -202,13 +205,13 @@ async def addAhaSvc(self, name, info, network=None): urlinfo.setdefault('host', host) async def fini(): + if self.cell.isfini: # pragma: no cover - mesg = f'{self.cell.__class__.__name__} is fini. Unable to set {name}@{network} as down.' - logger.warning(mesg, extra=self.cell.getLogExtra(name=svcname, netw=svcnetw)) return - logger.info(f'AhaCellApi fini, setting service offline [{name}]', - extra=self.cell.getLogExtra(name=svcname, netw=svcnetw)) + extra = self.cell.getLogExtra(name=svcfull) + logger.info('Setting AHA service offline.', extra=extra) + coro = self.cell.setAhaSvcDown(name, sess, network=network) self.cell.schedCoro(coro) # this will eventually execute or get cancelled. @@ -387,22 +390,22 @@ async def _getSharedItem(self, name): conf = provinfo.get('conf', {}) anam = conf.get('aha:name') anet = conf.get('aha:network') - mesg = f'Retrieved service provisioning info for {anam}.{anet} iden {name}' - logger.info(mesg, extra=self.aha.getLogExtra(iden=name, name=anam, netw=anet)) + extra = self.aha.getLogExtra(name=name) + logger.info('Retrieved service provisioning info.', extra=extra) return ProvApi(self.aha, provinfo) userinfo = await self.aha.getAhaUserEnroll(name) if userinfo is not None: unam = userinfo.get('name') - mesg = f'Retrieved user provisioning info for {unam} iden {name}' - logger.info(mesg, extra=self.aha.getLogExtra(iden=name, name=unam)) + extra = self.aha.getLogExtra(name=name) + logger.info('Retrieved user provisioning info.', extra=extra) await self.aha.delAhaUserEnroll(name) return EnrollApi(self.aha, userinfo) clone = await self.aha.getAhaClone(name) if clone is not None: host = clone.get('host') - mesg = f'Retrieved AHA clone info for {host} iden {name}' + mesg = f'Retrieved clone provisioning info.' logger.info(mesg, extra=self.aha.getLogExtra(iden=name, host=host)) return CloneApi(self.aha, clone) @@ -458,8 +461,8 @@ async def signUserCsr(self, byts): mesg = f'Invalid user CSR CN={name}.' raise s_exc.BadArg(mesg=mesg) - logger.info(f'Signing user CSR for [{username}], signas={ahanetw}', - extra=self.aha.getLogExtra(name=username, signas=ahanetw)) + extra = self.aha.getLogExtra(username=username, signas=ahanetw) + logger.info(f'Signing user CSR.', extra=extra) pkey, cert = self.aha.certdir.signUserCsr(xcsr, ahanetw, save=False) return self.aha.certdir._certToByts(cert) @@ -490,8 +493,8 @@ async def signHostCsr(self, byts): mesg = f'Invalid host CSR CN={name}.' raise s_exc.BadArg(mesg=mesg) - logger.info(f'Signing host CSR for [{hostname}], signas={ahanetw}', - extra=self.aha.getLogExtra(name=hostname, signas=ahanetw)) + extra = self.aha.getLogExtra(hostname=hostname, signas=ahanetw) + logger.info('Signing host CSR', extra=extra) pkey, cert = self.aha.certdir.signHostCsr(xcsr, ahanetw, save=False) return self.aha.certdir._certToByts(cert) @@ -509,8 +512,8 @@ async def signUserCsr(self, byts): mesg = f'Invalid user CSR CN={name}.' raise s_exc.BadArg(mesg=mesg) - logger.info(f'Signing user CSR for [{username}], signas={ahanetw}', - extra=self.aha.getLogExtra(name=username, signas=ahanetw)) + extra = self.aha.getLogExtra(username=username, signas=ahanetw) + logger.info(f'Signing user CSR.', extra=extra) pkey, cert = self.aha.certdir.signUserCsr(xcsr, ahanetw, save=False) return self.aha.certdir._certToByts(cert) @@ -555,10 +558,12 @@ async def _initCellBoot(self): path = s_common.genpath(self.dirn, 'cell.guid') if os.path.isfile(path): - logger.info('Cloning AHA: cell.guid detected. Skipping.') + extra = self.getLogExtra() + logger.debug('Cloning AHA: cell.guid detected. Skipping.', extra=extra) return - logger.warning(f'Cloning AHA: {curl}') + extra = self.getLogExtra(url=curl) + logger.warning(f'Cloning AHA: starting.', extra=extra) async with await s_telepath.openurl(curl) as proxy: clone = await proxy.getCloneDef() @@ -669,7 +674,8 @@ async def iterPoolTopo(self, name): svcitem = await self.jsonstor.getPathObj(('aha', 'svcfull', svcname)) if not svcitem: - logger.warning(f'Pool ({name}) includes service ({svcname}) which does not exist.') + extra = self.getLogExtra(pool=name, service=svcname) + logger.warning('Pool includes service which does not exist.', extra=extra) continue await wind.put(('svc:add', svcitem)) @@ -818,14 +824,16 @@ async def initServiceRuntime(self): netw = self.conf.req('aha:network') if self.certdir.getCaCertPath(netw) is None: - logger.info(f'Adding CA certificate for {netw}') + extra = self.getLogExtra(ca=netw) + logger.info('Adding CA certificate.', extra=extra) await self.genCaCert(netw) name = self.conf.get('aha:name') if name is not None: host = f'{name}.{netw}' if self.certdir.getHostCertPath(host) is None: - logger.info(f'Adding server certificate for {host}') + extra = self.getLogExtra(hostname=host) + logger.info('Adding server certificate.', extra=extra) await self._genHostCert(host, signas=netw) root = f'root@{netw}' @@ -902,7 +910,8 @@ async def initServiceNetwork(self): if provurl is not None: self.provdmon = await ProvDmon.anit(self) self.onfini(self.provdmon) - logger.info(f'provision listening: {provurl}') + extra = self.getLogExtra(url=provurl) + logger.info('Provisioning serivce listening.', extra=extra) self.provaddr = await self.provdmon.listen(provurl) async def _clearInactiveSessions(self): @@ -915,8 +924,8 @@ async def _clearInactiveSessions(self): network = svc.get('svcnetw') linkiden = svc.get('svcinfo').get('online') if linkiden not in current_sessions: - logger.info(f'AhaCell activecoro setting service offline [{svcname}.{network}]', - extra=self.getLogExtra(name=svcname, netw=network)) + extra = self.getLogExtra(service=f'{svcname}.{network}') + logger.info('Clearing inactive session.', extra=extra) await self.setAhaSvcDown(svcname, linkiden, network=network) # Wait until we are cancelled or the cell is fini. @@ -1006,8 +1015,11 @@ async def addAhaSvc(self, name, info, network=None): path = ('aha', 'services', svcnetw, svcname) unfo = info.get('urlinfo') - logger.info(f'Adding service [{svcfull}] from [{unfo.get("scheme")}://{unfo.get("host")}:{unfo.get("port")}]', - extra=self.getLogExtra(name=svcname, netw=svcnetw)) + extra = self.getLogExtra(service=svcfull, + host=unfo.get('host'), + port=unfo.get('port')) + + logger.info(f'Adding service.', extra=extra) svcinfo = { 'name': svcfull, @@ -1172,7 +1184,7 @@ async def delAhaSvc(self, name, network=None): name = self._getAhaName(name) svcname, svcnetw, svcfull = self._nameAndNetwork(name, network) - logger.info(f'Deleting service [{svcfull}].', extra=self.getLogExtra(name=svcname, netw=svcnetw)) + logger.info('Deleting service.', extra=self.getLogExtra(service=svcfull)) full = ('aha', 'svcfull', svcfull) path = ('aha', 'services', svcnetw, svcname) @@ -1214,8 +1226,7 @@ async def _setAhaSvcDown(self, name, linkiden, network=None): await self.fire('aha:svcdown', svcname=svcname, svcnetw=svcnetw) await self.fire(f'aha:svcdown:{svcfull}', svcname=svcname, svcnetw=svcnetw) - logger.info(f'Set [{svcfull}] offline.', - extra=self.getLogExtra(name=svcname, netw=svcnetw)) + logger.info('Setting service offline.', extra=self.getLogExtra(service=svcfull)) client = self.clients.pop(svcfull, None) if client is not None: @@ -1302,8 +1313,9 @@ async def genCaCert(self, network): with open(path, 'rb') as fd: return fd.read().decode() - logger.info(f'Generating CA certificate for {network}', - extra=self.getLogExtra(netw=network)) + extra = self.getLogExtra(network=network) + logger.info('Generating CA certificate.', extra) + fut = s_coro.executor(self.certdir.genCaCert, network, save=False) pkey, cert = await fut @@ -1330,7 +1342,8 @@ async def _genUserCert(self, username, signas=None): if self.certdir.getUserCertPath(username) is not None: return - logger.info(f'Adding user certificate for {username}') + extra = self.getLogExtra(username=username) + logger.info('Generating user certificate.', extra=extra) pkey, cert = await s_coro.executor(self.certdir.genUserCert, username, signas=signas, save=False) pkey = self.certdir._pkeyToByts(pkey).decode() @@ -1381,8 +1394,8 @@ async def signHostCsr(self, csrtext, signas=None, sans=None): if signas is None: signas = hostname.split('.', 1)[1] - logger.info(f'Signing host CSR for [{hostname}], signas={signas}, sans={sans}', - extra=self.getLogExtra(hostname=hostname, signas=signas)) + extra = self.getLogExtra(hostname=hostname, signas=signas, sans=sans) + logger.info('Signing host CSR.', extra=extra) pkey, cert = self.certdir.signHostCsr(xcsr, signas=signas, sans=sans) @@ -1400,8 +1413,8 @@ async def signUserCsr(self, csrtext, signas=None): if signas is None: signas = username.split('@', 1)[1] - logger.info(f'Signing user CSR for [{username}], signas={signas}', - extra=self.getLogExtra(name=username, signas=signas)) + extra = self.getLogExtra(username=username, signas=signas) + logger.info('Signing user CSR.', extra=extra) pkey, cert = self.certdir.signUserCsr(xcsr, signas=signas) @@ -1458,8 +1471,8 @@ async def addAhaClone(self, host, port=27492, conf=None): } await self._push('aha:clone:add', clone) - logger.info(f'Created AHA clone provisioning for {host} with iden {iden}', - extra=self.getLogExtra(iden=iden, name=host, netw=network)) + extra = self.getLogExtra(iden=iden, host=host, network=network) + logger.info(f'Created clone provisioning info.', extra=extra) return self._getProvClientUrl(iden) @@ -1541,8 +1554,8 @@ async def addAhaSvcProv(self, name, provinfo=None): iden = await self._push('aha:svc:prov:add', provinfo) - logger.info(f'Created service provisioning for {name}.{netw} with iden {iden}', - extra=self.getLogExtra(iden=iden, name=name, netw=netw)) + extra = self.getLogExtra(iden=iden, service=hostname) + logger.info('Created service provisioning info.', extra=extra) return self._getProvClientUrl(iden) @@ -1591,7 +1604,8 @@ async def clearAhaSvcProvs(self): for iden, byts in self.slab.scanByFull(db='aha:provs'): self.slab.delete(iden, db='aha:provs') provinfo = s_msgpack.un(byts) - logger.info(f'Deleted service provisioning service={provinfo.get("conf").get("aha:name")}, iden={iden.decode()}') + extra = self.getLogExtra(iden=iden.decode(), service=provinfo.get('conf').get('aha:name')) + logger.info('Deleted service provisioning info.', extra=extra) await asyncio.sleep(0) @s_nexus.Pusher.onPushAuto('aha:enroll:clear') @@ -1599,7 +1613,8 @@ async def clearAhaUserEnrolls(self): for iden, byts in self.slab.scanByFull(db='aha:enrolls'): self.slab.delete(iden, db='aha:enrolls') userinfo = s_msgpack.un(byts) - logger.info(f'Deleted user enrollment username={userinfo.get("name")}, iden={iden.decode()}') + extra = self.getLogExtra(iden=iden.decode(), username=userinfo.get('name')) + logger.info('Deleted user enrollment info.', extra=extra) await asyncio.sleep(0) @s_nexus.Pusher.onPushAuto('aha:clone:clear') @@ -1607,7 +1622,8 @@ async def clearAhaClones(self): for lkey, byts in self.slab.scanByFull(db='aha:clones'): self.slab.delete(lkey, db='aha:clones') cloninfo = s_msgpack.un(byts) - logger.info(f'Deleted AHA clone enrollment username={cloninfo.get("host")}, iden={s_common.ehex(lkey)}') + extra = self.getLogExtra(iden=s_common.ehex(lkey), hostname=cloninfo.get('host')) + logger.info(f'Deleted clone enrollment info.', extra=extra) await asyncio.sleep(0) @s_nexus.Pusher.onPushAuto('aha:svc:prov:del') diff --git a/synapse/lib/cell.py b/synapse/lib/cell.py index 6841079b17b..66bb2408489 100644 --- a/synapse/lib/cell.py +++ b/synapse/lib/cell.py @@ -1180,6 +1180,7 @@ async def __anit__(self, dirn, conf=None, readonly=False, parent=None): self.isactive = False self.activebase = None self.inaugural = False + self.ahasvcname = None self.activecoros = {} self.sockaddr = None # Default value... self.https_listeners = [] @@ -1215,7 +1216,6 @@ async def __anit__(self, dirn, conf=None, readonly=False, parent=None): await self._initCellBoot() # we need to know this pretty early... - self.ahasvcname = None ahaname = self.conf.get('aha:name') ahanetw = self.conf.get('aha:network') if ahaname is not None and ahanetw is not None: diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index f7f9712b960..27559f379bb 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -52,7 +52,19 @@ def setLogGlobal(name, valu): _glob_loginfo[name] = valu def getLogExtra(**kwargs): - return {'params': kwargs, 'loginfo': {}} + ''' + Construct a properly enveloped log extra dictionary. + + NOTE: If the key "exc" is specified, it will be used as + an exception to generate standardized error info. + ''' + exc = kwargs.pop('exc', None) + extra = {'params': kwargs, 'loginfo': {}} + + if exc is not None: + extra['loginfo']['error'] = s_common.excinfo(exc) + + return extra class Formatter(logging.Formatter): @@ -91,7 +103,7 @@ def genLogInfo(self, record): pass if record.exc_info: - loginfo['err'] = s_common.err(record.exc_info[1], fulltb=True) + loginfo['error'] = s_common.excinfo(record.exc_info[1]) if not hasattr(record, 'params'): record.params = {} diff --git a/synapse/lib/snap.py b/synapse/lib/snap.py index be344c02e46..c261b3d9b3f 100644 --- a/synapse/lib/snap.py +++ b/synapse/lib/snap.py @@ -782,7 +782,7 @@ async def iterStormPodes(self, text, opts, user=None): extra = opts.get('_loginfo', {}) extra.update({'mode': opts.get('mode', 'storm'), 'view': self.view.iden}) - await self.core._logStormQuery(text, user, extra=extra) + self.core._logStormQuery(text, user, extra=extra) # { form: ( embedprop, ... ) } embeds = opts.get('embeds') diff --git a/synapse/lib/view.py b/synapse/lib/view.py index 3d12a5ff8e3..f2b6f0e5e13 100644 --- a/synapse/lib/view.py +++ b/synapse/lib/view.py @@ -927,7 +927,7 @@ async def eval(self, text, opts=None): extra = opts.get('_loginfo', {}) extra.update({'mode': opts.get('mode', 'storm'), 'view': self.iden}) - await self.core._logStormQuery(text, user, extra=extra) + self.core._logStormQuery(text, user, extra=extra) await self.core.boss.promote('storm', user=user, info=taskinfo, taskiden=taskiden) @@ -1049,7 +1049,7 @@ async def runStorm(): else: extra = opts.get('_loginfo', {}) extra.update({'mode': opts.get('mode', 'storm'), 'view': self.iden}) - await self.core._logStormQuery(text, user, extra=extra) + self.core._logStormQuery(text, user, extra=extra) async for item in snap.storm(text, opts=opts, user=user): count += 1 diff --git a/synapse/telepath.py b/synapse/telepath.py index 7b355058b8b..6303474e296 100644 --- a/synapse/telepath.py +++ b/synapse/telepath.py @@ -20,6 +20,7 @@ import synapse.lib.link as s_link import synapse.lib.queue as s_queue import synapse.lib.certdir as s_certdir +import synapse.lib.logging as s_logging import synapse.lib.threads as s_threads import synapse.lib.urlhelp as s_urlhelp import synapse.lib.version as s_version @@ -1142,7 +1143,9 @@ async def onfini(): try: await self.onlink(proxy, urlinfo) except Exception as e: - logger.exception(f'onlink: {self.onlink}') + name = proxy._ahainfo.get('name', '') + extra = s_logging.getLogExtra(service=name) + logger.exception('Telepath ClientV2 onlink error.', extra=extra) async def _shutDownPool(self): # when we reconnect to our AHA service, we need to dump the current diff --git a/synapse/tests/test_cortex.py b/synapse/tests/test_cortex.py index bdf10300fb7..445114e3384 100644 --- a/synapse/tests/test_cortex.py +++ b/synapse/tests/test_cortex.py @@ -8386,7 +8386,7 @@ async def _hang(*args, **kwargs): msgs = stream.jsonlines() self.len(2, msgs) - self.eq(msgs[0].get('message'), f'Offloading Storm query to mirror 01.core.{ahanet}.') + self.eq(msgs[0].get('message'), f'Offloading Storm query to mirror.') self.eq(msgs[0]['params'].get('hash'), qhash) self.eq(msgs[0]['params'].get('mirror'), f'01.core.{ahanet}') @@ -8405,7 +8405,7 @@ async def _hang(*args, **kwargs): msgs = stream.jsonlines() self.len(2, msgs) - self.eq(msgs[0].get('message'), f'Offloading Storm query to mirror 01.core.{ahanet}.') + self.eq(msgs[0].get('message'), f'Offloading Storm query to mirror.') self.eq(msgs[0]['params'].get('hash'), qhash) self.eq(msgs[0]['params'].get('mirror'), f'01.core.{ahanet}') @@ -8424,7 +8424,7 @@ async def _hang(*args, **kwargs): msgs = stream.jsonlines() self.len(2, msgs) - self.eq(msgs[0].get('message'), f'Offloading Storm query to mirror 01.core.{ahanet}.') + self.eq(msgs[0].get('message'), f'Offloading Storm query to mirror.') self.eq(msgs[0]['params'].get('hash'), qhash) self.eq(msgs[0]['params'].get('mirror'), f'01.core.{ahanet}') @@ -8443,7 +8443,7 @@ async def _hang(*args, **kwargs): msgs = stream.jsonlines() self.len(2, msgs) - self.eq(msgs[0].get('message'), f'Offloading Storm query to mirror 01.core.{ahanet}.') + self.eq(msgs[0].get('message'), f'Offloading Storm query to mirror.') self.eq(msgs[0]['params'].get('hash'), qhash) self.eq(msgs[0]['params'].get('mirror'), f'01.core.{ahanet}') @@ -8460,7 +8460,7 @@ async def _hang(*args, **kwargs): stream.seek(0) data = stream.read() self.notin('Offloading Storm query', data) - self.isin('Timeout waiting for pool mirror [01.core.synapse] Nexus offset', data) + self.isin('Timeout waiting for pool mirror nexus offset', data) self.notin('Timeout waiting for query mirror', data) await core00.stormpool.waitready(timeout=12) @@ -8474,7 +8474,7 @@ async def _hang(*args, **kwargs): stream.seek(0) data = stream.read() self.notin('Offloading Storm query', data) - self.isin('Timeout waiting for pool mirror [01.core.synapse] Nexus offset', data) + self.isin('Timeout waiting for pool mirror nexus offset.', data) self.notin('Timeout waiting for query mirror', data) await core00.stormpool.waitready(timeout=12) @@ -8501,9 +8501,7 @@ async def _hang(*args, **kwargs): stream.seek(0) data = stream.read() - explog = (f'Pool mirror [01.core.synapse] Nexus offset delta too large ' - f'({nexsoffs} > 1), running query locally') - self.isin(explog, data) + self.isin('Storm query pool mirror nexus offset delta is too large', data) self.notin('Offloading Storm query', data) with self.getLoggerStream('synapse') as stream: @@ -8560,7 +8558,7 @@ async def _hang(*args, **kwargs): stream.seek(0) data = stream.read() - self.isin('Timeout waiting for pool mirror, running query locally', data) + self.isin('Timeout waiting for pool mirror connection. (running locally)', data) await core01.fini() @@ -8570,28 +8568,28 @@ async def _hang(*args, **kwargs): stream.seek(0) data = stream.read() - self.isin('Storm query mirror pool is empty, running query locally.', data) + self.isin('Storm query mirror pool is empty. (running locally)', data) with self.getLoggerStream('synapse') as stream: self.true(await core00.callStorm('inet:asn=0 return($lib.true)')) stream.seek(0) data = stream.read() - self.isin('Storm query mirror pool is empty, running query locally.', data) + self.isin('Storm query mirror pool is empty. (running locally)', data) with self.getLoggerStream('synapse') as stream: self.len(1, await alist(core00.exportStorm('inet:asn=0'))) stream.seek(0) data = stream.read() - self.isin('Storm query mirror pool is empty, running query locally.', data) + self.isin('Storm query mirror pool is empty. (running locally)', data) with self.getLoggerStream('synapse') as stream: self.eq(1, await core00.count('inet:asn=0')) stream.seek(0) data = stream.read() - self.isin('Storm query mirror pool is empty, running query locally.', data) + self.isin('Storm query mirror pool is empty. (running locally)', data) core01 = await base.enter_context(self.getTestCore(dirn=dirn01)) await core01.promote(graceful=True) diff --git a/synapse/tests/test_lib_aha.py b/synapse/tests/test_lib_aha.py index 07b480fd42b..b5e6b42455e 100644 --- a/synapse/tests/test_lib_aha.py +++ b/synapse/tests/test_lib_aha.py @@ -154,7 +154,7 @@ async def test_lib_aha_offon(self): with self.getLoggerStream('synapse.lib.aha') as stream: async with self.getTestAha(dirn=dirn) as aha: - await stream.expect(f'Set [0.cryo.synapse] offline.') + await stream.expect('Setting service offline.') svc = await aha.getAhaSvc('0.cryo...') self.notin('online', svc.get('svcinfo')) From bfabc3e62d7abd5ca4e767260e99d45195b8c40f Mon Sep 17 00:00:00 2001 From: visi Date: Mon, 3 Mar 2025 12:53:04 -0500 Subject: [PATCH 38/52] wip --- synapse/tests/test_datamodel.py | 17 ++++++++++++----- synapse/tests/test_lib_storm.py | 32 +++++++++++++++++++++++++------- synapse/tests/test_telepath.py | 2 +- 3 files changed, 38 insertions(+), 13 deletions(-) diff --git a/synapse/tests/test_datamodel.py b/synapse/tests/test_datamodel.py index aef4619f98f..4907c34c89c 100644 --- a/synapse/tests/test_datamodel.py +++ b/synapse/tests/test_datamodel.py @@ -264,10 +264,13 @@ async def test_model_deprecation(self): await core.addUnivProp('_test', ('test:dep:easy', {}), {}) await dstream.expect(mesg) - mesg = 'extended property test:str:_depr is using a deprecated type test:dep:easy' - with self.getLoggerStream('synapse.cortex') as cstream: + with self.getLoggerStream('synapse.cortex') as stream: await core.addFormProp('test:str', '_depr', ('test:dep:easy', {}), {}) - await cstream.expect(mesg) + + logs = stream.jsonlines() + self.eq(logs[0]['message'], 'Extended property is using a deprecated type which will be removed in 3.0.0.') + self.eq(logs[0]['params']['prop'], 'test:str:_depr') + self.eq(logs[0]['params']['type'], 'test:dep:easy') # Deprecated ctor information propagates upward to types and forms msgs = await core.stormlist('[test:dep:str=" test" :beep=" boop "]') @@ -278,9 +281,13 @@ async def test_model_deprecation(self): # Restarting the cortex warns again for various items that it loads from the hive # with deprecated types in them. This is a coverage test for extended properties. - with self.getLoggerStream('synapse.cortex') as cstream: + with self.getLoggerStream('synapse.cortex') as stream: async with await s_cortex.Cortex.anit(dirn, conf) as core: - await cstream.expect(mesg) + pass + logs = stream.jsonlines() + self.eq(logs[0]['message'], 'Extended property is using a deprecated type which will be removed in 3.0.0.') + self.eq(logs[0]['params']['prop'], 'test:str:_depr') + self.eq(logs[0]['params']['type'], 'test:dep:easy') async def test_datamodel_getmodeldefs(self): ''' diff --git a/synapse/tests/test_lib_storm.py b/synapse/tests/test_lib_storm.py index 4ac35b4cab0..31428476b64 100644 --- a/synapse/tests/test_lib_storm.py +++ b/synapse/tests/test_lib_storm.py @@ -1447,7 +1447,10 @@ async def get(self, name): with self.getLoggerStream('synapse.cortex') as stream: await core.addStormPkg(pkgdef) - await stream.expect('bazfaz requirement') + + logs = stream.jsonlines() + self.eq(logs[0]['message'], 'Storm package requirement is unmet.') + self.eq(logs[0]['params']['name'], 'bazfaz') pkgdef = { 'name': 'bazfaz', @@ -1461,7 +1464,11 @@ async def get(self, name): with self.getLoggerStream('synapse.cortex') as stream: await core.addStormPkg(pkgdef) - await stream.expect('bazfaz optional requirement') + + logs = stream.jsonlines() + self.eq(logs[0]['message'], 'Storm package requirement is unmet.') + self.eq(logs[0]['params']['name'], 'bazfaz') + self.true(logs[0]['params']['require']['optional']) deps = await core.callStorm('return($lib.pkg.deps($pkgdef))', opts={'vars': {'pkgdef': pkgdef}}) self.eq({ @@ -2808,11 +2815,22 @@ async def get(self, name): msgs = await core.stormlist(f'pkg.load --ssl-noverify --raw https://127.0.0.1:{port}/api/v1/pkgtestraw/yep') self.stormIsInPrint('testload @0.3.0', msgs) - stream.seek(0) - buf = stream.read() - self.isin("testload onload output: teststring", buf) - self.isin("testload onload output: testwarn", buf) - self.isin("No var with name: newp", buf) + logs = stream.jsonlines() + self.eq(logs[0]['level'], 'INFO') + self.eq(logs[0]['message'], 'Storm package onload print.') + self.eq(logs[0]['params']['mesg'], 'teststring') + self.eq(logs[0]['params']['name'], 'testload') + + self.eq(logs[1]['level'], 'WARNING') + self.eq(logs[1]['message'], 'Storm package onload warning.') + self.eq(logs[1]['params']['mesg'], 'testwarn') + self.eq(logs[1]['params']['name'], 'testload') + + self.eq(logs[2]['level'], 'WARNING') + self.eq(logs[2]['message'], 'Storm package onload failure.') + self.eq(logs[2]['params']['name'], 'testload') + self.eq(logs[2]['error']['err'], 'AttributeError') + self.len(1, await core.nodes(f'ps:contact={cont}')) evnts = await waiter.wait(timeout=4) diff --git a/synapse/tests/test_telepath.py b/synapse/tests/test_telepath.py index a3b13a2a311..1b1fb69ec87 100644 --- a/synapse/tests/test_telepath.py +++ b/synapse/tests/test_telepath.py @@ -916,7 +916,7 @@ async def badonlink(proxy, urlinfo): with self.getLoggerStream('synapse.telepath', 'onlink: ') as stream: async with await s_telepath.open(url1, onlink=badonlink) as targ: - await stream.expect('onlink: ') + await stream.expect('onlink error') await dmon0.fini() await dmon1.fini() From f19ad88888e618761587ead491ec39d1f76d44df Mon Sep 17 00:00:00 2001 From: visi Date: Mon, 3 Mar 2025 13:04:21 -0500 Subject: [PATCH 39/52] wip --- synapse/tests/test_lib_dyndeps.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/synapse/tests/test_lib_dyndeps.py b/synapse/tests/test_lib_dyndeps.py index ab782c67f67..89abe771c96 100644 --- a/synapse/tests/test_lib_dyndeps.py +++ b/synapse/tests/test_lib_dyndeps.py @@ -16,7 +16,8 @@ class DynDepsTest(s_t_utils.SynTest): def test_dyndeps_dynmod(self): with self.getLoggerStream('synapse.lib.dyndeps') as stream: self.none(s_dyndeps.getDynMod('- -')) - await stream.expect('Failed to import "- -"') + logs = stream.jsonlines() + self.eq(logs[0]['message'], 'Failed to import "- -"') self.nn(s_dyndeps.getDynMod('sys')) def test_dyndeps_dynloc(self): From 606a937d66a0f64543393dff5bbd46dd226a4855 Mon Sep 17 00:00:00 2001 From: visi Date: Mon, 3 Mar 2025 13:21:31 -0500 Subject: [PATCH 40/52] wip --- synapse/lib/aha.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/synapse/lib/aha.py b/synapse/lib/aha.py index 8f63087355a..0cf6ed2691b 100644 --- a/synapse/lib/aha.py +++ b/synapse/lib/aha.py @@ -123,7 +123,7 @@ async def get(self): raise except Exception as e: # pragma: no cover extra = self.cell.getLogExtra() - logger.exception(f'Error getting Aha services.', extra=extra) + logger.exception('Error getting Aha services.', extra=extra) return self.sendRestErr(e.__class__.__name__, str(e)) return self.sendRestRetn(ret) @@ -462,7 +462,7 @@ async def signUserCsr(self, byts): raise s_exc.BadArg(mesg=mesg) extra = self.aha.getLogExtra(username=username, signas=ahanetw) - logger.info(f'Signing user CSR.', extra=extra) + logger.info('Signing user CSR.', extra=extra) pkey, cert = self.aha.certdir.signUserCsr(xcsr, ahanetw, save=False) return self.aha.certdir._certToByts(cert) @@ -513,7 +513,7 @@ async def signUserCsr(self, byts): raise s_exc.BadArg(mesg=mesg) extra = self.aha.getLogExtra(username=username, signas=ahanetw) - logger.info(f'Signing user CSR.', extra=extra) + logger.info('Signing user CSR.', extra=extra) pkey, cert = self.aha.certdir.signUserCsr(xcsr, ahanetw, save=False) return self.aha.certdir._certToByts(cert) @@ -548,7 +548,7 @@ class AhaCell(s_cell.Cell): # Rename the class and remove these two overrides in 3.0.0 @classmethod def getEnvPrefix(cls): - return (f'SYN_AHA', f'SYN_{cls.__name__.upper()}', ) + return ('SYN_AHA', f'SYN_{cls.__name__.upper()}', ) async def _initCellBoot(self): @@ -563,7 +563,7 @@ async def _initCellBoot(self): return extra = self.getLogExtra(url=curl) - logger.warning(f'Cloning AHA: starting.', extra=extra) + logger.warning('Cloning AHA: starting.', extra=extra) async with await s_telepath.openurl(curl) as proxy: clone = await proxy.getCloneDef() @@ -1019,7 +1019,7 @@ async def addAhaSvc(self, name, info, network=None): host=unfo.get('host'), port=unfo.get('port')) - logger.info(f'Adding service.', extra=extra) + logger.info('Adding service.', extra=extra) svcinfo = { 'name': svcfull, @@ -1472,7 +1472,7 @@ async def addAhaClone(self, host, port=27492, conf=None): await self._push('aha:clone:add', clone) extra = self.getLogExtra(iden=iden, host=host, network=network) - logger.info(f'Created clone provisioning info.', extra=extra) + logger.info('Created clone provisioning info.', extra=extra) return self._getProvClientUrl(iden) @@ -1623,7 +1623,7 @@ async def clearAhaClones(self): self.slab.delete(lkey, db='aha:clones') cloninfo = s_msgpack.un(byts) extra = self.getLogExtra(iden=s_common.ehex(lkey), hostname=cloninfo.get('host')) - logger.info(f'Deleted clone enrollment info.', extra=extra) + logger.info('Deleted clone enrollment info.', extra=extra) await asyncio.sleep(0) @s_nexus.Pusher.onPushAuto('aha:svc:prov:del') @@ -1663,8 +1663,8 @@ async def addAhaUserEnroll(self, name, userinfo=None, again=False): iden = await self._push('aha:enroll:add', userinfo) - logger.info(f'Created user provisioning for {name} with iden {iden}', - extra=self.getLogExtra(iden=iden, name=name)) + logger.info('Created user enrollment info.', + extra=self.getLogExtra(iden=iden, name=name)) return self._getProvClientUrl(iden) From 9b20458a5e4cff82da5ddbb3d9d32e50efee2b09 Mon Sep 17 00:00:00 2001 From: visi Date: Mon, 3 Mar 2025 14:03:45 -0500 Subject: [PATCH 41/52] wip --- synapse/cortex.py | 39 +++++++++++++------------------------ synapse/lib/aha.py | 4 ++-- synapse/tests/test_utils.py | 6 ++++++ 3 files changed, 22 insertions(+), 27 deletions(-) diff --git a/synapse/cortex.py b/synapse/cortex.py index 0d0573e9b06..ff9b1039035 100644 --- a/synapse/cortex.py +++ b/synapse/cortex.py @@ -1125,14 +1125,14 @@ async def _storUpdateMacros(self): await self._addStormMacro(mdef) - except Exception as e: + except Exception as e: # pragma: no cover extra = self.getLogExtra(name=name) - logger.exception(f'Macro migration error. Skipped.', extra=extra) + logger.exception('Macro migration error. Skipped.', extra=extra) def getStormMacro(self, name, user=None): if not name: - raise s_exc.BadArg(mesg=f'Macro names must be at least 1 character long') + raise s_exc.BadArg(mesg='Macro names must be at least 1 character long.') if len(name) > 491: raise s_exc.BadArg(mesg='Macro names may only be up to 491 chars.') @@ -2173,10 +2173,7 @@ async def _initStormDmons(self): try: await self.runStormDmon(iden, ddef) - except asyncio.CancelledError: # pragma: no cover TODO: remove once >= py 3.8 only - raise - - except Exception as e: + except Exception as e: # pragma: no cover extra = self.getLogExtra(iden=iden, exc=e) logger.warning('Failed to start Storm dmon.', extra=extra) @@ -2187,10 +2184,7 @@ async def _initStormSvcs(self): try: await self._setStormSvc(sdef) - except asyncio.CancelledError: # pragma: no cover TODO: remove once >= py 3.8 only - raise - - except Exception as e: + except Exception as e: # pragma: no cover extra = self.getLogExtra(iden=iden, exc=e) logger.warning('Failed to initialize Storm service.', extra=extra) @@ -2714,10 +2708,7 @@ async def _tryLoadStormPkg(self, pkgdef): await self._normStormPkg(pkgdef, validstorm=False) self.loadStormPkg(pkgdef) - except asyncio.CancelledError: # pragma: no cover TODO: remove once >= py 3.8 only - raise - - except Exception as e: + except Exception as e: # pragma: no cover name = pkgdef.get('name', '') extra = self.getLogExtra(name=name, exc=e) logger.exception('Error loading Storm package.', extra=extra) @@ -3126,9 +3117,7 @@ async def _runStormSvcAdd(self, iden): try: await self.runStormSvcEvent(iden, 'add') - except asyncio.CancelledError: # pragma: no cover TODO: remove once py 3.8 only - raise - except Exception as e: + except Exception as e: # pragma: no cover extra = self.getLogExtra(iden=iden) logger.exception('Failed to run Storm service add event.', extra=extra) return @@ -3243,7 +3232,7 @@ async def _loadExtModel(self): for form, prop, tdef, info in self.extprops.values(): try: prop = self.model.addFormProp(form, prop, tdef, info) - except Exception as e: + except Exception as e: # pragma: no cover extra = self.getLogExtra(prop=f'{form}:{prop}', exc=e) logger.warning('Extended property definition error.', extra=extra) else: @@ -3254,14 +3243,14 @@ async def _loadExtModel(self): for prop, tdef, info in self.extunivs.values(): try: self.model.addUnivProp(prop, tdef, info) - except Exception as e: + except Exception as e: # pragma: no cover extra = self.getLogExtra(univ=prop, exc=e) logger.warning('Extended universal property definition error.', extra=extra) for prop, tdef, info in self.exttagprops.values(): try: self.model.addTagProp(prop, tdef, info) - except Exception as e: + except Exception as e: # pragma: no cover extra = self.getLogExtra(prop=prop, exc=e) logger.warning('Tag property definition error.', extra=extra) @@ -4457,9 +4446,9 @@ async def _initPureStormCmds(self): else: await self._trySetStormCmd(name, cdef) - for name in oldcmds: + for name in oldcmds: # pragma: no cover extra = self.getLogExtra(name=name) - logger.warning('Removing old command.', extra=extra) + logger.warning('Storm outdated command removed.', extra=extra) self.cmddefs.pop(name) for pkgdef in self.pkgdefs.values(): @@ -4468,7 +4457,7 @@ async def _initPureStormCmds(self): async def _trySetStormCmd(self, name, cdef): try: self._setStormCmd(cdef) - except (asyncio.CancelledError, Exception) as e: + except Exception as e: # pragma: no cover extra = self.getLogExtra(name=name, exc=e) logger.warning('Storm command load failed.', extra=extra) @@ -5939,7 +5928,7 @@ async def _getMirrorProxy(self, opts): return proxy - except s_exc.IsFini: + except s_exc.IsFini: # pragma: no cover extra = self.getLogExtra(mirror=proxyname) logger.warning('Proxy closed waiting for pool mirror nexus offset. (running locally)', extra=extra) return None diff --git a/synapse/lib/aha.py b/synapse/lib/aha.py index 0cf6ed2691b..24d12f457d6 100644 --- a/synapse/lib/aha.py +++ b/synapse/lib/aha.py @@ -557,7 +557,7 @@ async def _initCellBoot(self): return path = s_common.genpath(self.dirn, 'cell.guid') - if os.path.isfile(path): + if os.path.isfile(path): # pragma: no cover extra = self.getLogExtra() logger.debug('Cloning AHA: cell.guid detected. Skipping.', extra=extra) return @@ -673,7 +673,7 @@ async def iterPoolTopo(self, name): for svcname in poolinfo.get('services'): svcitem = await self.jsonstor.getPathObj(('aha', 'svcfull', svcname)) - if not svcitem: + if not svcitem: # pragma: no cover extra = self.getLogExtra(pool=name, service=svcname) logger.warning('Pool includes service which does not exist.', extra=extra) continue diff --git a/synapse/tests/test_utils.py b/synapse/tests/test_utils.py index 5b2de539109..4f66d6e6ac5 100644 --- a/synapse/tests/test_utils.py +++ b/synapse/tests/test_utils.py @@ -4,6 +4,7 @@ import logging import unittest +import synapse.exc as s_exc import synapse.common as s_common import synapse.lib.base as s_base @@ -125,6 +126,11 @@ def logathing(mesg): self.isin('StreamEvent Test Message', mesgs) self.notin('notthere', mesgs) + # coverage for the stream sad path... + with self.getLoggerStream('synapse.tests.test_utils') as stream: + with self.raises(s_exc.SynErr): + await stream.expect('newp', timeout=0.0001) + def test_syntest_envars(self): os.environ['foo'] = '1' os.environ['bar'] = '2' From b9dd34549d356f867802b022f5328c291782ae2e Mon Sep 17 00:00:00 2001 From: visi Date: Mon, 3 Mar 2025 15:27:24 -0500 Subject: [PATCH 42/52] wip --- synapse/tests/test_lib_agenda.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/tests/test_lib_agenda.py b/synapse/tests/test_lib_agenda.py index ce4c507f3b4..fe70733ad33 100644 --- a/synapse/tests/test_lib_agenda.py +++ b/synapse/tests/test_lib_agenda.py @@ -1134,7 +1134,7 @@ async def test_agenda_graceful_promotion_with_running_cron(self): async with self.getTestCore(conf=conf01) as core01: with self.getLoggerStream('synapse.storm.log') as stream: - await stream.expect('I AM A ERROR LOG MESSAGE') + await stream.expect('I AM A ERROR LOG MESSAGE', timeout=10) cron = await core00.callStorm('return($lib.cron.list())') self.len(1, cron) From 009d7451304ecaef2b36cdc3d4855a78e227afa8 Mon Sep 17 00:00:00 2001 From: visi Date: Tue, 4 Mar 2025 08:25:27 -0500 Subject: [PATCH 43/52] wip --- synapse/axon.py | 4 +-- synapse/lib/cell.py | 50 ++++++++++++---------------------- synapse/lib/coro.py | 27 ++++++++---------- synapse/lib/logging.py | 39 ++++++++++++++------------ synapse/lib/scope.py | 8 +++++- synapse/lib/storm.py | 2 -- synapse/tests/test_lib_cell.py | 8 +++--- 7 files changed, 63 insertions(+), 75 deletions(-) diff --git a/synapse/axon.py b/synapse/axon.py index e5e0e2f5ebf..88854f7d1e3 100644 --- a/synapse/axon.py +++ b/synapse/axon.py @@ -1449,7 +1449,7 @@ async def readlines(self, sha256, errors='ignore'): todo = s_common.todo(_spawn_readlines, sock00, errors=errors) async with await s_base.Base.anit() as scope: - scope.schedCoro(s_coro.spawn(todo, log_conf=await self._getSpawnLogConf())) + scope.schedCoro(s_coro.spawn(todo, logconf=self.getLogConf())) feedtask = scope.schedCoro(self._sha256ToLink(sha256, link00)) while not self.isfini: @@ -1483,7 +1483,7 @@ async def csvrows(self, sha256, dialect='excel', errors='ignore', **fmtparams): todo = s_common.todo(_spawn_readrows, sock00, dialect, fmtparams, errors=errors) async with await s_base.Base.anit() as scope: - scope.schedCoro(s_coro.spawn(todo, log_conf=await self._getSpawnLogConf())) + scope.schedCoro(s_coro.spawn(todo, logconf=self.getLogConf())) feedtask = scope.schedCoro(self._sha256ToLink(sha256, link00)) while not self.isfini: diff --git a/synapse/lib/cell.py b/synapse/lib/cell.py index 66bb2408489..938a9972fb5 100644 --- a/synapse/lib/cell.py +++ b/synapse/lib/cell.py @@ -184,20 +184,15 @@ async def _iterBackupWork(path, linkinfo): logger.info(f'Backup streaming for [{path}] completed.') -def _iterBackupProc(path, linkinfo): +def _iterBackupProc(path, linkinfo, logconf): ''' Multiprocessing target for streaming a backup. ''' # This logging call is okay to run since we're executing in # our own process space and no logging has been configured. - logconf = linkinfo.get('logconf') - - level = logconf.get('level') - structlog = logconf.get('structlog') - - s_logging.setup(level=level, structlog=structlog) - - logger.info(f'Backup streaming process for [{path}] starting.') + s_logging.setup(**logconf) + extra = s_logging.getLogExtra(path=path) + logger.info('Backup streaming process starting.', extra=extra) asyncio.run(_iterBackupWork(path, linkinfo)) class CellApi(s_base.Base): @@ -1139,7 +1134,7 @@ class Cell(s_nexus.Pusher, s_telepath.Aware): 'hidedocs': True, }, '_log_conf': { - 'description': 'Opaque structure used for logging by spawned processes.', + 'description': 'Deprecated. Remove in 3.0.0.', 'type': 'object', 'hideconf': True } @@ -1220,6 +1215,7 @@ async def __anit__(self, dirn, conf=None, readonly=False, parent=None): ahanetw = self.conf.get('aha:network') if ahaname is not None and ahanetw is not None: self.ahasvcname = f'{ahaname}.{ahanetw}' + s_logging.setLogInfo('service', self.ahasvcname) # each cell has a guid path = s_common.genpath(self.dirn, 'cell.guid') @@ -2077,8 +2073,6 @@ async def _initAhaService(self): ahalead = self.conf.get('aha:leader') - self.ahasvcname = f'{ahaname}.{ahanetw}' - async def _runAhaRegLoop(): while not self.isfini: @@ -2561,7 +2555,7 @@ async def _execBackupTask(self, dirn): mypipe, child_pipe = ctx.Pipe() paths = [str(slab.path) for slab in slabs] - logconf = await self._getSpawnLogConf() + logconf = self.getLogConf() proc = None try: @@ -2620,10 +2614,7 @@ def _backupProc(pipe, srcdir, dstdir, lmdbpaths, logconf): (In a separate process) Actually do the backup ''' # This is a new process: configure logging - level = logconf.get('level') - structlog = logconf.get('structlog') - - s_logging.setup(level=level, structlog=structlog) + s_logging.setup(**logconf) try: @@ -2682,15 +2673,15 @@ async def _streamBackupArchive(self, path, user, name): mesg = 'Link not found in scope. This API must be called via a CellApi.' raise s_exc.SynErr(mesg=mesg) + logconf = self.getLogConf() linkinfo = await link.getSpawnInfo() - linkinfo['logconf'] = await self._getSpawnLogConf() await self.boss.promote('backup:stream', user=user, info={'name': name}) ctx = multiprocessing.get_context('spawn') def getproc(): - proc = ctx.Process(target=_iterBackupProc, args=(path, linkinfo)) + proc = ctx.Process(target=_iterBackupProc, args=(path, linkinfo, logconf)) proc.start() return proc @@ -3650,6 +3641,12 @@ async def getCellApi(self, link, user, path): ''' return await self.cellapi.anit(self, link, user) + def getLogConf(self): + logconf = s_logging.getLogConf() + if self.ahasvcname is not None: + logconf['loginfo']['service'] = self.ahasvcname + return logconf + def getLogExtra(self, **kwargs): ''' Get an extra dictionary for structured logging which can be used as a extra argument for loggers. @@ -3665,9 +3662,6 @@ def getLogExtra(self, **kwargs): extra['loginfo']['service'] = self.ahasvcname return extra - async def _getSpawnLogConf(self): - return self.conf.get('_log_conf', {}) - def modCellConf(self, conf): ''' Modify the Cell's ondisk configuration overrides file and runtime configuration. @@ -3771,7 +3765,7 @@ def getArgParser(cls, conf=None): pars = argparse.ArgumentParser(prog=name) pars.add_argument('dirn', help=f'The storage directory for the {name} service.') - pars.add_argument('--log-level', default='INFO', choices=list(s_const.LOG_LEVEL_CHOICES.keys()), + pars.add_argument('--log-level', default='WARNING', choices=list(s_const.LOG_LEVEL_CHOICES.keys()), type=s_logging.normLogLevel, help='Deprecated. Please use SYN_LOG_LEVEL environment variable.') @@ -4050,7 +4044,6 @@ async def _bootProvConf(self, provconf): ''' # replace our runtime config with the updated config with provconf data new_conf = self.initCellConf(self.conf) - new_conf.setdefault('_log_conf', await self._getSpawnLogConf()) # Load any opts we have and environment variables. new_conf.setConfFromOpts() @@ -4216,9 +4209,6 @@ async def initFromArgv(cls, argv, outp=None): 'structlog': opts.structured_logging } - # if (logarchive := conf.get('log:archive')) is not None: - # logconf['archive'] = logarchive - logconf = s_logging.setup(**logconf) extra = s_logging.getLogExtra(service_type=cls.getCellType(), @@ -4230,7 +4220,6 @@ async def initFromArgv(cls, argv, outp=None): await cls._initBootRestore(opts.dirn) try: - conf['_log_conf'] = logconf conf.setConfFromOpts(opts) conf.setConfFromEnvs() conf.setConfFromFile(path) @@ -4239,8 +4228,6 @@ async def initFromArgv(cls, argv, outp=None): logger.exception(f'Error while bootstrapping cell config.') raise - # s_coro.set_pool_logging(logger, logconf=conf['_log_conf']) - try: cell = await cls.anit(opts.dirn, conf=conf) except: @@ -4297,9 +4284,6 @@ async def execmain(cls, argv, outp=None): cell = await cls.initFromArgv(argv, outp=outp) - if cell.ahasvcname is not None: - s_logging.setLogGlobal('service', cell.ahasvcname) - await cell.main() async def _getCellUser(self, link, mesg): diff --git a/synapse/lib/coro.py b/synapse/lib/coro.py index d37376d4a74..8265b7f8c8b 100644 --- a/synapse/lib/coro.py +++ b/synapse/lib/coro.py @@ -219,9 +219,7 @@ def func(*args, **kwargs): def _exectodo(que, todo, logconf): # This is a new process: configure logging - level = logconf.get('level') - structlog = logconf.get('structlog') - s_logging.setup(level=level, structlog=structlog) + s_logging.setup(**logconf) func, args, kwargs = todo try: @@ -238,7 +236,7 @@ def _exectodo(que, todo, logconf): exc = s_exc.SynErr(mesg=mesg, name=name, info=info) que.put(exc) -async def spawn(todo, timeout=None, ctx=None, log_conf=None): +async def spawn(todo, timeout=None, ctx=None, logconf=None): ''' Run a todo (func, args, kwargs) tuple in a multiprocessing subprocess. @@ -246,7 +244,7 @@ async def spawn(todo, timeout=None, ctx=None, log_conf=None): todo (tuple): A tuple of function, ``*args``, and ``**kwargs``. timeout (int): The timeout to wait for the todo function to finish. ctx (multiprocess.Context): A optional multiprocessing context object. - log_conf (dict): An optional logging configuration for the spawned process. + logconf (dict): An optional logging configuration for the spawned process. Notes: The contents of the todo tuple must be able to be pickled for execution. @@ -257,12 +255,12 @@ async def spawn(todo, timeout=None, ctx=None, log_conf=None): ''' if ctx is None: ctx = multiprocessing.get_context('spawn') - if log_conf is None: - log_conf = {} + + if logconf is None: + logconf = s_logging.getLogConf() que = ctx.Queue() - proc = ctx.Process(target=_exectodo, - args=(que, todo, log_conf)) + proc = ctx.Process(target=_exectodo, args=(que, todo, logconf)) def execspawn(): @@ -317,13 +315,10 @@ def _runtodo(todo): return todo[0](*todo[1], **todo[2]) def _init_pool_worker(logger_, logconf): - - level = logconf.get('level') - structlog = logconf.get('structlog') - s_logging.setup(level=level, structlog=structlog) - + s_logging.setup(**logconf) p = multiprocessing.current_process() - logger.debug(f'Initialized new forkserver pool worker: name={p.name} pid={p.ident}') + extra = s_logging.getLogExtra(name=p.name, pid=p.ident) + logger.debug('Initialized new forkserver pool worker.', extra=extra) _pool_logconf = None def set_pool_logging(logger_, logconf): @@ -357,7 +352,7 @@ async def forked(func, *args, **kwargs): logger.exception(f'Shared forkserver pool is broken, fallback enabled: {func}') logger.debug(f'Forkserver pool using spawn fallback: {func}') - return await spawn(todo, log_conf=_pool_logconf) + return await spawn(todo, logconf=s_logging.getLogConf()) async def semafork(func, *args, **kwargs): ''' diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index 27559f379bb..bb839e8d753 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -45,7 +45,7 @@ async def _feedLogTask(): await wind.puts(todo) _glob_loginfo = {} -def setLogGlobal(name, valu): +def setLogInfo(name, valu): ''' Configure global values which should be added to every log. ''' @@ -87,20 +87,14 @@ def genLogInfo(self, record): if hasattr(record, 'loginfo'): loginfo.update(record.loginfo) - try: - - if (user := s_scope.get('user')) is not None: - loginfo['user'] = user.iden - loginfo['username'] = user.name - - elif (sess := s_scope.get('sess')) is not None: - if sess.user is not None: - loginfo['user'] = sess.user.iden - loginfo['username'] = sess.user.name + if (user := s_scope.get('user')) is not None: + loginfo['user'] = user.iden + loginfo['username'] = user.name - except RuntimeError: - # if there is no running loop, there can be no scope vars... - pass + elif (sess := s_scope.get('sess')) is not None: + if sess.user is not None: + loginfo['user'] = sess.user.iden + loginfo['username'] = sess.user.name if record.exc_info: loginfo['error'] = s_common.excinfo(record.exc_info[1]) @@ -145,20 +139,31 @@ def setup(**conf): if not conf.get('structlog'): fmtclass = TextFormatter + # this is used to pass things like service name + # to child processes and forked workers... + loginfo = conf.pop('loginfo', None) + if loginfo is not None: + _glob_loginfo.update(loginfo) + handler = logging.StreamHandler() handler.setFormatter(fmtclass(datefmt=conf.get('datefmt'))) level = normLogLevel(conf.get('level')) + _glob_logconf.clear() + _glob_logconf.update(conf) + logging.basicConfig(level=level, handlers=(handler,)) logger.info('log level set to %s', s_const.LOG_LEVEL_INVERSE_CHOICES.get(level)) - _glob_logconf.clear() - _glob_logconf.update(conf) - return conf +def getLogConf(): + logconf = _glob_logconf.copy() + logconf['loginfo'] = _glob_loginfo.copy() + return logconf + def getLogConfFromEnv(): conf = {} diff --git a/synapse/lib/scope.py b/synapse/lib/scope.py index a6572cad8b1..9f36fc3fed3 100644 --- a/synapse/lib/scope.py +++ b/synapse/lib/scope.py @@ -126,6 +126,9 @@ def _task_scope() -> Scope: Scope: A Scope object. ''' task = asyncio.current_task() + if task is None: + return None + scope = getattr(task, '_syn_scope', None) # no need to lock because it's per-task... @@ -139,7 +142,10 @@ def get(name, defval=None): ''' Access this task's scope with default values from glob. ''' - return _task_scope().get(name, defval=defval) + scope = _task_scope() + if scope is None: + return defval + return scope.get(name, defval=defval) def set(name, valu): ''' diff --git a/synapse/lib/storm.py b/synapse/lib/storm.py index 344175fdceb..bf656fe22fb 100644 --- a/synapse/lib/storm.py +++ b/synapse/lib/storm.py @@ -1542,8 +1542,6 @@ async def __anit__(self, query, snap, opts=None, user=None, root=None): self.query = query - self.spawn_log_conf = await self.snap.core._getSpawnLogConf() - self.readonly = opts.get('readonly', False) # EXPERIMENTAL: Make it safe to run untrusted queries self.model = snap.core.getDataModel() diff --git a/synapse/tests/test_lib_cell.py b/synapse/tests/test_lib_cell.py index d303fbb11f8..68dca64a515 100644 --- a/synapse/tests/test_lib_cell.py +++ b/synapse/tests/test_lib_cell.py @@ -62,14 +62,14 @@ def _backupSleep(path, linkinfo): async def _doEOFBackup(path): return -async def _iterBackupEOF(path, linkinfo): +async def _iterBackupEOF(path, linkinfo, logconf): link = await s_link.fromspawn(linkinfo) await s_daemon.t2call(link, _doEOFBackup, (path,), {}) link.writer.write_eof() await link.fini() -def _backupEOF(path, linkinfo): - asyncio.run(_iterBackupEOF(path, linkinfo)) +def _backupEOF(path, linkinfo, logconf): + asyncio.run(_iterBackupEOF(path, linkinfo, logconf)) def lock_target(dirn, evt1): # pragma: no cover ''' @@ -3458,7 +3458,7 @@ async def sleep99(cell): async def test_cell_logs(self): - s_logging.setLogGlobal('woot', 'hehe') + s_logging.setLogInfo('woot', 'hehe') async with self.getTestAha() as aha: async with aha.getLocalProxy() as proxy: From d60e267a6770eaf8e0d5e0d283327e908c4fb6e4 Mon Sep 17 00:00:00 2001 From: visi Date: Tue, 4 Mar 2025 08:36:58 -0500 Subject: [PATCH 44/52] wip --- synapse/lib/scope.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/synapse/lib/scope.py b/synapse/lib/scope.py index 9f36fc3fed3..3ad42d299ce 100644 --- a/synapse/lib/scope.py +++ b/synapse/lib/scope.py @@ -125,8 +125,12 @@ def _task_scope() -> Scope: Returns: Scope: A Scope object. ''' - task = asyncio.current_task() - if task is None: + try: + task = asyncio.current_task() + if task is None: + return None + except RuntimeError: + # no running loop... return None scope = getattr(task, '_syn_scope', None) From 94edbd32ce15f29f4bc807cf10a4aa3551c93527 Mon Sep 17 00:00:00 2001 From: visi Date: Thu, 6 Mar 2025 13:02:26 -0500 Subject: [PATCH 45/52] wip --- synapse/lib/coro.py | 7 ++++++ synapse/lib/logging.py | 56 +++++++++++++++++++++++++++++++++++++++++- 2 files changed, 62 insertions(+), 1 deletion(-) diff --git a/synapse/lib/coro.py b/synapse/lib/coro.py index 8265b7f8c8b..6f71bc59719 100644 --- a/synapse/lib/coro.py +++ b/synapse/lib/coro.py @@ -164,6 +164,13 @@ async def ornot(func, *args, **kwargs): return await retn return retn +def has_running_loop(): + try: + asyncio.get_running_loop() + return True + except RuntimeError: + return False + bgtasks = set() def create_task(coro): diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index bb839e8d753..dc62f5f6f71 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -1,4 +1,5 @@ import os +import sys import json import asyncio import logging @@ -8,6 +9,7 @@ import synapse.exc as s_exc import synapse.common as s_common +import synapse.lib.coro as s_coro import synapse.lib.const as s_const import synapse.lib.scope as s_scope @@ -122,10 +124,59 @@ def format(self, record): loginfo = self.genLogInfo(record) return logging.Formatter.format(self, record) +class StreamHandler(logging.StreamHandler): + + _pump_task = None + _pump_fifo = collections.deque(maxlen=10000) + _pump_event = asyncio.Event() + + def emit(self, record): + + if self._pump_task is None: + return logging.StreamHandler.emit(self, record) + + try: + text = self.format(record) + self._pump_fifo.append(text) + self._pump_event.set() + + # emulating behavior of parent class + except RecursionError: + raise + + except Exception as e: + self.handleError(record) + +def _writestderr(text): + sys.stderr.write(text) + sys.stderr.flush() + +async def _pumpLogStream(): + + while True: + + await StreamHandler._pump_event.wait() + + StreamHandler._pump_event.clear() + + if not StreamHandler._pump_fifo: + continue + + todo = list(StreamHandler._pump_fifo) + text = '\n'.join(todo) + '\n' + + StreamHandler._pump_fifo.clear() + await s_coro.executor(_writestderr, text) + _glob_logconf = {} def setup(**conf): ''' Configure synapse logging. + + NOTE: If this API is invoked while there is a running + asyncio loop, it will automatically enter async + mode and fire a task to pump log events without + blocking. ''' conf.update(getLogConfFromEnv()) @@ -139,13 +190,16 @@ def setup(**conf): if not conf.get('structlog'): fmtclass = TextFormatter + if s_coro.has_running_loop() and StreamHandler._pump_task is None: + StreamHandler._pump_task = s_coro.create_task(_pumpLogStream()) + # this is used to pass things like service name # to child processes and forked workers... loginfo = conf.pop('loginfo', None) if loginfo is not None: _glob_loginfo.update(loginfo) - handler = logging.StreamHandler() + handler = StreamHandler() handler.setFormatter(fmtclass(datefmt=conf.get('datefmt'))) level = normLogLevel(conf.get('level')) From 0fef14c9e81fbfbe3ddc27d4d41e49ba63dcfbf7 Mon Sep 17 00:00:00 2001 From: visi Date: Thu, 6 Mar 2025 13:38:54 -0500 Subject: [PATCH 46/52] wip --- synapse/lib/logging.py | 4 ++-- synapse/tests/test_cortex.py | 16 ++++++++-------- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index dc62f5f6f71..a6e2a3673d8 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -1,6 +1,5 @@ import os import sys -import json import asyncio import logging import weakref @@ -10,6 +9,7 @@ import synapse.common as s_common import synapse.lib.coro as s_coro +import synapse.lib.json as s_json import synapse.lib.const as s_const import synapse.lib.scope as s_scope @@ -112,7 +112,7 @@ def genLogInfo(self, record): def format(self, record): loginfo = self.genLogInfo(record) - return json.dumps(loginfo, default=str) + return s_json.dumps(loginfo, default=str).decode() class TextFormatter(Formatter): diff --git a/synapse/tests/test_cortex.py b/synapse/tests/test_cortex.py index 5aa303ba2a5..8440b0e8344 100644 --- a/synapse/tests/test_cortex.py +++ b/synapse/tests/test_cortex.py @@ -8460,7 +8460,7 @@ async def _hang(*args, **kwargs): stream.seek(0) data = stream.read() self.notin('Offloading Storm query', data) - self.isin('Timeout waiting for pool mirror nexus offset', data) + self.isin('Timeout waiting for pool mirror Nexus offset.', data) self.notin('Timeout waiting for query mirror', data) await core00.stormpool.waitready(timeout=12) @@ -8474,7 +8474,7 @@ async def _hang(*args, **kwargs): stream.seek(0) data = stream.read() self.notin('Offloading Storm query', data) - self.isin('Timeout waiting for pool mirror nexus offset.', data) + self.isin('Timeout waiting for pool mirror Nexus offset.', data) self.notin('Timeout waiting for query mirror', data) await core00.stormpool.waitready(timeout=12) @@ -8502,7 +8502,7 @@ async def finidproxy(self, timeout=None): stream.seek(0) data = stream.read() - self.isin('Proxy for pool mirror [01.core.synapse] was shutdown. Skipping.', data) + self.isin('Proxy for pool mirror was shutdown. Skipping.', data) msgs = await core00.stormlist('cortex.storm.pool.set --connection-timeout 1 --sync-timeout 1 aha://pool00...') self.stormHasNoWarnErr(msgs) @@ -8589,28 +8589,28 @@ async def finidproxy(self, timeout=None): stream.seek(0) data = stream.read() - self.isin('Storm query mirror pool is empty. (running locally)', data) + self.isin('Storm query mirror pool is empty.', data) with self.getLoggerStream('synapse') as stream: self.true(await core00.callStorm('inet:asn=0 return($lib.true)')) stream.seek(0) data = stream.read() - self.isin('Storm query mirror pool is empty. (running locally)', data) + self.isin('Storm query mirror pool is empty.', data) with self.getLoggerStream('synapse') as stream: self.len(1, await alist(core00.exportStorm('inet:asn=0'))) stream.seek(0) data = stream.read() - self.isin('Storm query mirror pool is empty. (running locally)', data) + self.isin('Storm query mirror pool is empty.', data) with self.getLoggerStream('synapse') as stream: self.eq(1, await core00.count('inet:asn=0')) stream.seek(0) data = stream.read() - self.isin('Storm query mirror pool is empty. (running locally)', data) + self.isin('Storm query mirror pool is empty.', data) core01 = await base.enter_context(self.getTestCore(dirn=dirn01)) await core01.promote(graceful=True) @@ -8643,7 +8643,7 @@ async def finidproxy(self, timeout=None): stream.seek(0) data = stream.read() - self.isin('Storm query mirror pool is empty', data) + self.isin('Storm query mirror pool is empty.', data) with self.getLoggerStream('synapse') as stream: msgs = await alist(core01.storm('inet:asn=0', opts={'mirror': False})) From 764f176408695401735eb1820a46ae775b28f4c6 Mon Sep 17 00:00:00 2001 From: invisig0th Date: Thu, 6 Mar 2025 14:38:56 -0500 Subject: [PATCH 47/52] Apply suggestions from code review Co-authored-by: Cisphyx --- synapse/cortex.py | 2 +- synapse/lib/agenda.py | 24 ++++++++++++------------ synapse/lib/aha.py | 4 ++-- synapse/lib/cell.py | 2 +- synapse/lib/httpapi.py | 2 +- synapse/lib/stormlib/log.py | 2 +- synapse/tests/test_daemon.py | 4 ++-- synapse/tests/test_lib_aha.py | 2 +- synapse/tests/test_telepath.py | 2 +- 9 files changed, 22 insertions(+), 22 deletions(-) diff --git a/synapse/cortex.py b/synapse/cortex.py index 8a1349491b0..0d0499d922c 100644 --- a/synapse/cortex.py +++ b/synapse/cortex.py @@ -4351,7 +4351,7 @@ async def _initCoreAxon(self): async def onlink(proxy: s_telepath.Proxy): extra = self.getLogExtra(url=s_urlhelp.sanitizeUrl(turl)) - logger.debug('Connected to remote axon.', extra=extra) + logger.debug('Connected to remote Axon.', extra=extra) async def fini(): self.axready.clear() diff --git a/synapse/lib/agenda.py b/synapse/lib/agenda.py index da4cf3fa94e..ad232be95ba 100644 --- a/synapse/lib/agenda.py +++ b/synapse/lib/agenda.py @@ -405,7 +405,7 @@ def updateNexttime(self, now): # We blew by and missed a fixed-year appointment, either due to clock shenanigans, this query going # really long, or the initial requirement being in the past extra = self.getLogExtra() - logger.warning('Missed an appointment', extra=extra) + logger.warning('Missed an appointment.', extra=extra) del self.recs[i] continue if nexttime < lowtime: @@ -436,7 +436,7 @@ async def edits(self, edits): for name, valu in edits.items(): if name not in self.__class__._synced_attrs: extra = self.getLogExtra(prop=name, valu=valu) - logger.warning('Invalid cron property edit', extra=extra) + logger.warning('Invalid cron property edit.', extra=extra) continue if name == 'lasterrs' and not isinstance(valu, list): @@ -491,7 +491,7 @@ async def _load_all(self): except (s_exc.InconsistentStorage, s_exc.BadStorageVersion, s_exc.BadTime, TypeError, KeyError, UnicodeDecodeError) as e: extra = self.core.getLogExtra(cron={'iden': iden}, err=str(e)) - logger.warning('Removing invalid cron job', extra=extra) + logger.warning('Removing invalid cron job.', extra=extra) to_delete.append(iden) continue @@ -732,7 +732,7 @@ async def clearRunningStatus(self): for appt in list(self.appts.values()): if appt.isrunning: extra = appt.getLogExtra() - logger.debug('Clearing isrunning flag', extra=extra) + logger.debug('Clearing isrunning flag.', extra=extra) edits = { 'isrunning': False, @@ -788,7 +788,7 @@ async def runloop(self): await self._execute(appt) except Exception as e: extra = appt.getLogExtra() - logger.exception('Cron job error', extra=extra) + logger.exception('Cron job error.', extra=extra) await self._markfailed(appt, f'error: {e}') async def _execute(self, appt): @@ -797,19 +797,19 @@ async def _execute(self, appt): ''' user = self.core.auth.user(appt.creator) if user is None: - logger.warning('Cron job has unknown user', extra=appt.getLogExtra()) + logger.warning('Cron job has unknown user.', extra=appt.getLogExtra()) await self._markfailed(appt, 'unknown user') return locked = user.info.get('locked') if locked: - logger.warning('Cron job has locked user', extra=appt.getLogExtra()) + logger.warning('Cron job has locked user.', extra=appt.getLogExtra()) await self._markfailed(appt, 'locked user') return view = self.core.getView(iden=appt.view, user=user) if view is None: - logger.warning('Cron job has unknown view', extra=appt.getLogExtra()) + logger.warning('Cron job has unknown view.', extra=appt.getLogExtra()) await self._markfailed(appt, 'unknown view') return @@ -850,7 +850,7 @@ async def _runJob(self, user, appt): await self.core.addCronEdits(appt.iden, edits) extra = appt.getLogExtra(text=appt.query) - logger.info('Cron job starting', extra=extra) + logger.info('Cron job starting.', extra=extra) starttime = self._getNowTick() @@ -878,7 +878,7 @@ async def _runJob(self, user, appt): elif mesg[0] == 'warn' and loglevel <= logging.WARNING: extra = appt.getLogExtra(**mesg[1]) - logger.warning('Cron job emitted warning', extra=extra) + logger.warning('Cron job emitted warning.', extra=extra) elif mesg[0] == 'err': excname, errinfo = mesg[1] @@ -893,7 +893,7 @@ async def _runJob(self, user, appt): except Exception as e: result = f'raised exception {e}' - logger.exception('Cron job raised an exception', extra=extra) + logger.exception('Cron job raised an exception.', extra=extra) else: success = True @@ -915,7 +915,7 @@ async def _runJob(self, user, appt): took = finishtime - starttime extra = appt.getLogExtra(result=result, took=took) - logger.info('Cron job completed', extra=extra) + logger.info('Cron job completed.', extra=extra) if not self.core.isactive: logger.warning('Cron job status is not saved. We are no longer the leader.', extra=extra) diff --git a/synapse/lib/aha.py b/synapse/lib/aha.py index 24d12f457d6..cba41c46212 100644 --- a/synapse/lib/aha.py +++ b/synapse/lib/aha.py @@ -123,7 +123,7 @@ async def get(self): raise except Exception as e: # pragma: no cover extra = self.cell.getLogExtra() - logger.exception('Error getting Aha services.', extra=extra) + logger.exception('Error getting AHA services.', extra=extra) return self.sendRestErr(e.__class__.__name__, str(e)) return self.sendRestRetn(ret) @@ -911,7 +911,7 @@ async def initServiceNetwork(self): self.provdmon = await ProvDmon.anit(self) self.onfini(self.provdmon) extra = self.getLogExtra(url=provurl) - logger.info('Provisioning serivce listening.', extra=extra) + logger.info('Provisioning service listening.', extra=extra) self.provaddr = await self.provdmon.listen(provurl) async def _clearInactiveSessions(self): diff --git a/synapse/lib/cell.py b/synapse/lib/cell.py index 5276d523714..a893f810bbb 100644 --- a/synapse/lib/cell.py +++ b/synapse/lib/cell.py @@ -4211,7 +4211,7 @@ async def initFromArgv(cls, argv, outp=None): service_version=cls.VERSTRING, synapse_version=s_version.verstring) - logger.info('Starting synapse service.', extra=extra) + logger.info('Starting Synapse service.', extra=extra) await cls._initBootRestore(opts.dirn) diff --git a/synapse/lib/httpapi.py b/synapse/lib/httpapi.py index 648911cbbf6..b8c04f2b327 100644 --- a/synapse/lib/httpapi.py +++ b/synapse/lib/httpapi.py @@ -165,7 +165,7 @@ async def logAuthIssue(self, mesg=None, user=None, username=None, level=logging. username=username, user=user) - logger.log(level, 'Failed to authenticate HTTP request', extra=extra) + logger.log(level, 'Failed to authenticate HTTP request.', extra=extra) def sendAuthRequired(self): self.set_header('WWW-Authenticate', 'Basic realm=synapse') diff --git a/synapse/lib/stormlib/log.py b/synapse/lib/stormlib/log.py index dba81fd19a4..1bc066c6fa6 100644 --- a/synapse/lib/stormlib/log.py +++ b/synapse/lib/stormlib/log.py @@ -141,7 +141,7 @@ async def _getExtra(self, extra=None): extra = await s_stormtypes.toprim(extra) if extra and not isinstance(extra, dict): - mesg = '$lib.log argument extra= must be a dictionary' + mesg = '$lib.log argument extra= must be a dictionary.' raise s_exc.BadArg(mesg=mesg, arg='extra', got=extra.__class__.__name__) return self.runt.snap.core.getLogExtra(**extra) diff --git a/synapse/tests/test_daemon.py b/synapse/tests/test_daemon.py index a03fc0f22ff..0f51a2a192a 100644 --- a/synapse/tests/test_daemon.py +++ b/synapse/tests/test_daemon.py @@ -91,7 +91,7 @@ async def test_dmon_errors(self): async with await prox.getPoolLink() as link: mesg = ('newp', {}) emsg = "Dmon.onLinkMesg Invalid mesg: mesg=('newp', {})" - with self.getLoggerStream('synapse.daemon', emsg) as stream: + with self.getLoggerStream('synapse.daemon') as stream: await link.tx(mesg) await stream.expect(emsg) @@ -106,7 +106,7 @@ async def test_dmon_errors(self): async with await prox.getPoolLink() as link: mesg = ('t2:init', {}) emsg = "Error on t2:init:" - with self.getLoggerStream('synapse.daemon', emsg) as stream: + with self.getLoggerStream('synapse.daemon') as stream: await link.tx(mesg) await stream.expect(emsg) diff --git a/synapse/tests/test_lib_aha.py b/synapse/tests/test_lib_aha.py index b5e6b42455e..6b79c428ce1 100644 --- a/synapse/tests/test_lib_aha.py +++ b/synapse/tests/test_lib_aha.py @@ -681,7 +681,7 @@ async def test_lib_aha_provision(self): self.eq(overconf2, {'nexslog:async': True}) # tests startup logic that recognizes it's already done - with self.getLoggerStream('synapse.lib.cell', ) as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: async with await s_axon.Axon.initFromArgv((axonpath,)) as axon: pass stream.seek(0) diff --git a/synapse/tests/test_telepath.py b/synapse/tests/test_telepath.py index 3b1d0675e05..1d18aca32d2 100644 --- a/synapse/tests/test_telepath.py +++ b/synapse/tests/test_telepath.py @@ -917,7 +917,7 @@ async def onlink(proxy, urlinfo): async def badonlink(proxy, urlinfo): raise ValueError('oopsie') - with self.getLoggerStream('synapse.telepath', 'onlink: ') as stream: + with self.getLoggerStream('synapse.telepath') as stream: async with await s_telepath.open(url1, onlink=badonlink) as targ: await stream.expect('onlink error') From 14d7873f88f078144abcb870d11128d3fd9eadb3 Mon Sep 17 00:00:00 2001 From: visi Date: Thu, 6 Mar 2025 14:53:26 -0500 Subject: [PATCH 48/52] wip --- synapse/tests/test_common.py | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/synapse/tests/test_common.py b/synapse/tests/test_common.py index 5ab693eaaec..3f866c657fb 100644 --- a/synapse/tests/test_common.py +++ b/synapse/tests/test_common.py @@ -418,25 +418,6 @@ async def alist(coro): retn = s_common.merggenr2([asyncl(lt) for lt in (l3, l2, l1)], reverse=True) self.eq((9, 8, 7, 6, 5, 4, 3, 2, 1), await alist(retn)) - def test_jsonsafe(self): - items = ( - (None, None), - (1234, None), - ('1234', None), - ({'asdf': 'haha'}, None), - ({'a': (1,), 'b': [{'': 4}, 56, None, {'t': True, 'f': False}, 'oh my']}, None), - (b'1234', s_exc.BadArg), - ({'a': 'a', 2: 2}, s_exc.BadArg), - ({'a', 'b', 'c'}, s_exc.BadArg), - (s_common.novalu, s_exc.BadArg), - ) - for (item, eret) in items: - if eret is None: - self.none(s_common.reqJsonSafeStrict(item)) - else: - with self.raises(eret): - s_common.reqJsonSafeStrict(item) - async def test_sslctx(self): with self.getTestDir(mirror='certdir') as dirn: cadir = s_common.genpath(dirn, 'cas') From 48eb0035d8a82a6087855b266ce56e0871a5642a Mon Sep 17 00:00:00 2001 From: visi Date: Thu, 6 Mar 2025 15:26:31 -0500 Subject: [PATCH 49/52] wip --- synapse/lib/cell.py | 4 ++-- synapse/tests/test_lib_cell.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/synapse/lib/cell.py b/synapse/lib/cell.py index a893f810bbb..25f0be50ef0 100644 --- a/synapse/lib/cell.py +++ b/synapse/lib/cell.py @@ -5078,13 +5078,13 @@ async def logs(self, wait=False, last=None): await self._initLogBase() if not wait: - for loginfo in list(s_logging.logfifo)[last:]: + for loginfo in list(s_logging.logfifo)[-last:]: yield loginfo return async with await s_queue.Window.anit(maxsize=2000) as window: - await window.puts(list(s_logging.logfifo)[last:]) + await window.puts(list(s_logging.logfifo)[-last:]) s_logging.logwindows.add(window) diff --git a/synapse/tests/test_lib_cell.py b/synapse/tests/test_lib_cell.py index 23ce3cee600..09ad633fed1 100644 --- a/synapse/tests/test_lib_cell.py +++ b/synapse/tests/test_lib_cell.py @@ -3482,11 +3482,11 @@ async def logtask(): logger.warning('oh hai', extra=aha.getLogExtra()) # test the non-wait version quick... - logs = [loginfo async for loginfo in proxy.logs(last=-1)] + logs = [loginfo async for loginfo in proxy.logs(last=1)] self.eq('oh hai', logs[0]['message']) logs = [] - async for loginfo in proxy.logs(wait=True, last=-1): + async for loginfo in proxy.logs(wait=True, last=1): logs.append(loginfo) From 72039660de1695b2c898b5ebd0f5cfe74c3ff83f Mon Sep 17 00:00:00 2001 From: visi Date: Thu, 6 Mar 2025 15:38:20 -0500 Subject: [PATCH 50/52] wip --- synapse/axon.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/synapse/axon.py b/synapse/axon.py index 0a92d9bfda9..719bc92765b 100644 --- a/synapse/axon.py +++ b/synapse/axon.py @@ -1505,7 +1505,9 @@ async def csvrows(self, sha256, dialect='excel', errors='ignore', **fmtparams): await feedtask async def jsonlines(self, sha256, errors='ignore'): + async for line in self.readlines(sha256, errors=errors): + line = line.strip() if not line: continue @@ -1513,8 +1515,7 @@ async def jsonlines(self, sha256, errors='ignore'): try: yield s_json.loads(line) except s_exc.BadJsonText as e: - # TODO: this feels like it should not be a log... - extra = self.getLogExtra(sha256=sha256, err=str(e)) + extra = self.getLogExtra(line=line, sha256=sha256, err=str(e)) logger.warning('Bad JSON line encountered.', extra=extra) mesg = f'Bad JSON line while processing {sha256}: {e}' raise s_exc.BadJsonText(mesg=mesg, sha256=sha256) from None From 1432c5c9e45397b1ef0c5d9529bb12340066a79f Mon Sep 17 00:00:00 2001 From: epiphyte Date: Tue, 11 Mar 2025 14:46:41 -0400 Subject: [PATCH 51/52] Add branch build. --- .circleci/config.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index bb551aa5cc9..040b303029f 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -610,6 +610,7 @@ workflows: branches: only: - master + - visi-link-errinfo - build_docker_tag: requires: From f5a8bc7bf023468a4b39d65d4033f6a95e88cfc0 Mon Sep 17 00:00:00 2001 From: visi Date: Thu, 13 Mar 2025 17:26:41 -0400 Subject: [PATCH 52/52] edits from feedback --- synapse/cortex.py | 54 +++++++++++++++++++++--------------------- synapse/lib/aha.py | 27 +++++++++++---------- synapse/lib/httpapi.py | 20 +++++++--------- synapse/lib/logging.py | 26 +++++++++++++++----- 4 files changed, 70 insertions(+), 57 deletions(-) diff --git a/synapse/cortex.py b/synapse/cortex.py index 0d0499d922c..6a0b0991299 100644 --- a/synapse/cortex.py +++ b/synapse/cortex.py @@ -2174,8 +2174,8 @@ async def _initStormDmons(self): await self.runStormDmon(iden, ddef) except Exception as e: # pragma: no cover - extra = self.getLogExtra(iden=iden, exc=e) - logger.warning('Failed to start Storm dmon.', extra=extra) + extra = self.getLogExtra(iden=iden) + logger.warning('Failed to start Storm dmon.', extra=extra, exc_info=e) async def _initStormSvcs(self): @@ -2185,8 +2185,8 @@ async def _initStormSvcs(self): await self._setStormSvc(sdef) except Exception as e: # pragma: no cover - extra = self.getLogExtra(iden=iden, exc=e) - logger.warning('Failed to initialize Storm service.', extra=extra) + extra = self.getLogExtra(iden=iden) + logger.warning('Failed to initialize Storm service.', extra=extra, exc_info=e) async def _initCoreQueues(self): path = os.path.join(self.dirn, 'slabs', 'queues.lmdb') @@ -2710,7 +2710,7 @@ async def _tryLoadStormPkg(self, pkgdef): except Exception as e: # pragma: no cover name = pkgdef.get('name', '') - extra = self.getLogExtra(name=name, exc=e) + extra = self.getLogExtra(name=name) logger.exception('Error loading Storm package.', extra=extra) async def verifyStormPkgDeps(self, pkgdef): @@ -2935,8 +2935,8 @@ async def _onload(): await asyncio.sleep(0) except Exception as e: - extra = self.getLogExtra(name=name, exc=e) - logger.warning('Storm package onload failure.', extra=extra) + extra = self.getLogExtra(name=name) + logger.warning('Storm package onload failure.', extra=extra, exc_info=e) await self.fire('core:pkg:onload:complete', pkg=name) @@ -3031,8 +3031,8 @@ async def _delStormSvc(self, iden): if self.isactive: await self.runStormSvcEvent(iden, 'del') except Exception as e: - extra = self.getLogExtra(iden=iden, exc=e) - logger.warning('Service delete hook failed.', extra=extra) + extra = self.getLogExtra(iden=iden) + logger.warning('Service delete hook failed.', extra=extra, exc_info=e) sdef = self.svcdefs.pop(iden) @@ -3198,8 +3198,8 @@ async def _migrateTaxonomyIface(self): await extforms.set(formname, (formname, basetype, typeopts, typeinfo)) except Exception as e: # pragma: no cover - extra = self.getLogExtra(form=formname, exc=e) - logger.warning('Taxonomy migration error (skipped).', extra=extra) + extra = self.getLogExtra(form=formname) + logger.warning('Taxonomy migration error (skipped).', extra=extra, exc_info=e) async def _loadExtModel(self): @@ -3214,16 +3214,16 @@ async def _loadExtModel(self): try: self.model.addType(typename, basetype, typeopts, typeinfo) except Exception as e: - extra = self.getLogExtra(type=typename, exc=e) - logger.warning(f'Extended type definition error.', extra=extra) + extra = self.getLogExtra(type=typename) + logger.warning(f'Extended type definition error.', extra=extra, exc_info=e) for formname, basetype, typeopts, typeinfo in self.extforms.values(): try: self.model.addType(formname, basetype, typeopts, typeinfo) form = self.model.addForm(formname, {}, ()) except Exception as e: - extra = self.getLogExtra(form=formname, exc=e) - logger.warning('Extended form definition error.', extra=extra) + extra = self.getLogExtra(form=formname) + logger.warning('Extended form definition error.', extra=extra, exc_info=e) else: if form.type.deprecated: mesg = 'Extended form is using a deprecated type which will be removed in 3.0.0.' @@ -3233,8 +3233,8 @@ async def _loadExtModel(self): try: prop = self.model.addFormProp(form, prop, tdef, info) except Exception as e: # pragma: no cover - extra = self.getLogExtra(prop=f'{form}:{prop}', exc=e) - logger.warning('Extended property definition error.', extra=extra) + extra = self.getLogExtra(prop=f'{form}:{prop}') + logger.warning('Extended property definition error.', extra=extra, exc_info=e) else: if prop.type.deprecated: mesg = 'Extended property is using a deprecated type which will be removed in 3.0.0.' @@ -3244,22 +3244,22 @@ async def _loadExtModel(self): try: self.model.addUnivProp(prop, tdef, info) except Exception as e: # pragma: no cover - extra = self.getLogExtra(univ=prop, exc=e) - logger.warning('Extended universal property definition error.', extra=extra) + extra = self.getLogExtra(univ=prop) + logger.warning('Extended universal property definition error.', extra=extra, exc_info=e) for prop, tdef, info in self.exttagprops.values(): try: self.model.addTagProp(prop, tdef, info) except Exception as e: # pragma: no cover - extra = self.getLogExtra(prop=prop, exc=e) - logger.warning('Tag property definition error.', extra=extra) + extra = self.getLogExtra(prop=prop) + logger.warning('Tag property definition error.', extra=extra, exc_info=e) for edge, info in self.extedges.values(): try: self.model.addEdge(edge, info) except Exception as e: - extra = self.getLogExtra(edge=edge, exc=e) - logger.warning('Extended edge definition error.', extra=extra) + extra = self.getLogExtra(edge=edge) + logger.warning('Extended edge definition error.', extra=extra, exc_info=e) async def getExtModel(self): ''' @@ -4458,8 +4458,8 @@ async def _trySetStormCmd(self, name, cdef): try: self._setStormCmd(cdef) except Exception as e: # pragma: no cover - extra = self.getLogExtra(name=name, exc=e) - logger.warning('Storm command load failed.', extra=extra) + extra = self.getLogExtra(name=name) + logger.warning('Storm command load failed.', extra=extra, exc_info=e) def _initStormLibs(self): ''' @@ -5476,8 +5476,8 @@ async def fill(): await queue.close() except Exception as e: - extra = self.getLogExtra(push=iden, exc=e) - logger.warning('Error while pushing bulk edits to remote layer.', extra=extra) + extra = self.getLogExtra(push=iden) + logger.warning('Error while pushing bulk edits to remote layer.', extra=extra, exc_info=e) await queue.close() base.schedCoro(fill()) diff --git a/synapse/lib/aha.py b/synapse/lib/aha.py index cba41c46212..fedd221aac2 100644 --- a/synapse/lib/aha.py +++ b/synapse/lib/aha.py @@ -383,33 +383,34 @@ async def __anit__(self, aha): self.aha = aha await s_daemon.Daemon.__anit__(self) - async def _getSharedItem(self, name): - provinfo = await self.aha.getAhaSvcProv(name) + async def _getSharedItem(self, iden): + + provinfo = await self.aha.getAhaSvcProv(iden) if provinfo is not None: - await self.aha.delAhaSvcProv(name) + await self.aha.delAhaSvcProv(iden) conf = provinfo.get('conf', {}) - anam = conf.get('aha:name') - anet = conf.get('aha:network') - extra = self.aha.getLogExtra(name=name) + name = conf.get('aha:name') + netw = conf.get('aha:network') + extra = self.aha.getLogExtra(name=f'{name}.{netw}', iden=iden) logger.info('Retrieved service provisioning info.', extra=extra) return ProvApi(self.aha, provinfo) - userinfo = await self.aha.getAhaUserEnroll(name) + userinfo = await self.aha.getAhaUserEnroll(iden) if userinfo is not None: - unam = userinfo.get('name') - extra = self.aha.getLogExtra(name=name) + name = userinfo.get('name') + extra = self.aha.getLogExtra(name=name, iden=iden) logger.info('Retrieved user provisioning info.', extra=extra) - await self.aha.delAhaUserEnroll(name) + await self.aha.delAhaUserEnroll(iden) return EnrollApi(self.aha, userinfo) - clone = await self.aha.getAhaClone(name) + clone = await self.aha.getAhaClone(iden) if clone is not None: host = clone.get('host') mesg = f'Retrieved clone provisioning info.' - logger.info(mesg, extra=self.aha.getLogExtra(iden=name, host=host)) + logger.info(mesg, extra=self.aha.getLogExtra(iden=iden, host=host)) return CloneApi(self.aha, clone) - mesg = f'Invalid provisioning identifier name={name}. This could be' \ + mesg = f'Invalid provisioning identifier iden={iden}. This could be' \ f' caused by the re-use of a provisioning URL.' raise s_exc.NoSuchName(mesg=mesg, name=name) diff --git a/synapse/lib/httpapi.py b/synapse/lib/httpapi.py index b8c04f2b327..7625f2f90bd 100644 --- a/synapse/lib/httpapi.py +++ b/synapse/lib/httpapi.py @@ -145,7 +145,7 @@ def loadJsonMesg(self, byts, validator=None): self.sendRestErr('SchemaViolation', 'Invalid JSON content.') return None - async def logAuthIssue(self, mesg=None, user=None, username=None, level=logging.WARNING): + def logAuthIssue(self, mesg=None, user=None, username=None, level=logging.WARNING): ''' Helper to log issues related to request authentication. @@ -314,15 +314,15 @@ async def handleBasicAuth(self): udef = await authcell.getUserDefByName(name) if udef is None: - await self.logAuthIssue(mesg='No such user.', username=name) + self.logAuthIssue(mesg='No such user.', username=name) return None if udef.get('locked'): - await self.logAuthIssue(mesg='User is locked.', user=udef.get('iden'), username=name) + self.logAuthIssue(mesg='User is locked.', user=udef.get('iden'), username=name) return None if not await authcell.tryUserPasswd(name, passwd): - await self.logAuthIssue(mesg='Incorrect password.', user=udef.get('iden'), username=name) + self.logAuthIssue(mesg='Incorrect password.', user=udef.get('iden'), username=name) return None self.web_useriden = udef.get('iden') @@ -334,7 +334,7 @@ async def handleApiKeyAuth(self): key = self.request.headers.get('X-API-KEY') isok, info = await authcell.checkUserApiKey(key) # errfo or dict with tdef + udef if isok is False: - await self.logAuthIssue(mesg=info.get('mesg'), user=info.get('user'), username=info.get('name')) + self.logAuthIssue(mesg=info.get('mesg'), user=info.get('user'), username=info.get('name')) return udef = info.get('udef') @@ -674,15 +674,15 @@ async def post(self): authcell = self.getAuthCell() udef = await authcell.getUserDefByName(name) if udef is None: - await self.logAuthIssue(mesg='No such user.', username=name) + self.logAuthIssue(mesg='No such user.', username=name) return self.sendRestErr('AuthDeny', 'No such user.') if udef.get('locked'): - await self.logAuthIssue(mesg='User is locked.', user=udef.get('iden'), username=name) + self.logAuthIssue(mesg='User is locked.', user=udef.get('iden'), username=name) return self.sendRestErr('AuthDeny', 'User is locked.') if not await authcell.tryUserPasswd(name, passwd): - await self.logAuthIssue(mesg='Incorrect password.', user=udef.get('iden'), username=name) + self.logAuthIssue(mesg='Incorrect password.', user=udef.get('iden'), username=name) return self.sendRestErr('AuthDeny', 'Incorrect password.') iden = udef.get('iden') @@ -1411,9 +1411,7 @@ async def _runHttpExt(self, meth, path): except Exception as e: rcode = True - errname, errinfo = s_common.err(e) - extra = core.getLogExtra(httpapi=iden, errname=errname, **errinfo) - logger.exception(f'Extended HTTP API encountered a fatal error.', extra=extra) + logger.exception('Extended HTTP API encountered a fatal error.', extra=extra, exc_info=e) if rbody is False: self.clear() self.set_status(500) diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index a6e2a3673d8..8c437c70f28 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -23,6 +23,25 @@ logfifo = collections.deque(maxlen=1000) +def excinfo(e): + + ret = { + 'code': e.__class__.__name__, + 'traceback': [] + } + + for path, line, func, sorc in traceback.extract_tb(e.__traceback__): + ret['traceback'].append({'path': path, 'line': line, 'func': func}) + + if isinstance(e, s_exc.SynErr): + ret['mesg'] = e.errinfo.pop('mesg', None) + ret['info'] = e.errinfo + + if ret.get('mesg') is None: + ret['mesg'] = str(e) + + return ret + def _addLogInfo(info): logfifo.append(info) if logbase is not None: @@ -60,12 +79,7 @@ def getLogExtra(**kwargs): NOTE: If the key "exc" is specified, it will be used as an exception to generate standardized error info. ''' - exc = kwargs.pop('exc', None) extra = {'params': kwargs, 'loginfo': {}} - - if exc is not None: - extra['loginfo']['error'] = s_common.excinfo(exc) - return extra class Formatter(logging.Formatter): @@ -99,7 +113,7 @@ def genLogInfo(self, record): loginfo['username'] = sess.user.name if record.exc_info: - loginfo['error'] = s_common.excinfo(record.exc_info[1]) + loginfo['error'] = excinfo(record.exc_info[1]) if not hasattr(record, 'params'): record.params = {}