本文整理汇总了Python中logging.handlers.QueueListener方法的典型用法代码示例。如果您正苦于以下问题:Python handlers.QueueListener方法的具体用法?Python handlers.QueueListener怎么用?Python handlers.QueueListener使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类logging.handlers
的用法示例。
在下文中一共展示了handlers.QueueListener方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
# 需要导入模块: from logging import handlers [as 别名]
# 或者: from logging.handlers import QueueListener [as 别名]
def __init__(self,
log_dir: str,
verbose: bool = False,
root_log_level: Optional[int] = None,
capture_warnings: bool = True,
console_prefix: Optional[str] = None,
hyper_params: Optional[Dict] = None) -> None:
self.log_dir = log_dir
self.verbose = verbose
self.log_level = logging.NOTSET
self.capture_warnings = capture_warnings
self.listener: handlers.QueueListener
self.console_prefix = console_prefix
self.handlers: List[logging.Handler] = []
self.queue_handler: handlers.QueueHandler
self.old_root_log_level: int = logging.NOTSET
self.hyper_params: Dict = hyper_params or {}
示例2: __init__
# 需要导入模块: from logging import handlers [as 别名]
# 或者: from logging.handlers import QueueListener [as 别名]
def __init__(self, pool_names, max_restarts=0, options=None):
self.names = pool_names
self.queue = multiprocessing.Queue()
self.pool = dict()
self.max_restarts = max_restarts
self.options = options or dict()
self.dog_path = os.curdir
self.dog_handler = LiveReload(self)
# self.dog_observer = Observer()
# self.dog_observer.schedule(self.dog_handler, self.dog_path, recursive=True)
if multiprocessing.get_start_method() != 'fork': # pragma: no cover
root_logger = logging.getLogger()
self.log_listener = QueueListener(self.queue, *root_logger.handlers)
# TODO: Find out how to get the watchdog + livereload working on a later moment.
# self.dog_observer.start()
self._restarts = dict()
示例3: _setup_logging_queue
# 需要导入模块: from logging import handlers [as 别名]
# 或者: from logging.handlers import QueueListener [as 别名]
def _setup_logging_queue(*handlers: Handler) -> QueueHandler:
"""Create a new LocalQueueHandler and start an associated QueueListener.
"""
queue: Queue = Queue()
queue_handler = LocalQueueHandler(queue)
serving_listener = QueueListener(queue, *handlers, respect_handler_level=True)
serving_listener.start()
return queue_handler
示例4: _cleanup_processes_files
# 需要导入模块: from logging import handlers [as 别名]
# 或者: from logging.handlers import QueueListener [as 别名]
def _cleanup_processes_files():
"""Cleanup function, starts with latest processes/files.
Terminates processes, sets stop events to stop threads, closes open files.
"""
logger_all.info("Cleaning up")
logger_all.debug("Cleaning up: closing files.")
for f in open_files[::-1]:
if not f.closed:
f.close()
logger_all.debug("Cleaning up: closing queues.")
for q in started_queues:
q.close()
logger_all.debug("Cleaning up: setting cleanup_event and joining threads.")
for t in started_threads[::-1]:
if t.is_alive():
logger_all.debug("Closing Thread {}.".format(t.name))
t.stop_event.set()
t.join()
else:
logger_all.debug("Thread {} already done.".format(t.name))
logger_all.debug("Cleaning up: terminating processes.")
for p in started_processes[::-1]:
if isinstance(p, Popen):
if p.poll() is not None:
logger_all.debug("Terminating mongod process.")
p.terminate()
p.wait()
else:
logger_all.debug("mongodd process already done.")
if isinstance(p, multiprocessing.Process):
if p.is_alive():
logger_all.debug("Terminating Process {}.".format(p.name))
p.terminate()
else:
logger_all.debug("Process {} already done.".format(p.name))
if isinstance(p, QueueListener):
if p._thread is not None and not p.queue._closed:
p.stop()
示例5: setup_logging_queues
# 需要导入模块: from logging import handlers [as 别名]
# 或者: from logging.handlers import QueueListener [as 别名]
def setup_logging_queues():
if sys.version_info.major < 3:
raise RuntimeError("This feature requires Python 3.")
queue_listeners = []
# Q: What about loggers created after this is called?
# A: if they don't attach their own handlers they should be fine
for logger in get_all_logger_names(include_root=True):
logger = logging.getLogger(logger)
if logger.handlers:
log_queue = queue.Queue(-1) # No limit on size
queue_handler = QueueHandler(log_queue)
queue_listener = QueueListener(
log_queue, respect_handler_level=True)
queuify_logger(logger, queue_handler, queue_listener)
# print("Replaced logger %s with queue listener: %s" % (
# logger, queue_listener
# ))
queue_listeners.append(queue_listener)
for listener in queue_listeners:
listener.start()
atexit.register(stop_queue_listeners, *queue_listeners)
return
示例6: queuify_logger
# 需要导入模块: from logging import handlers [as 别名]
# 或者: from logging.handlers import QueueListener [as 别名]
def queuify_logger(logger, queue_handler, queue_listener):
"""Replace logger's handlers with a queue handler while adding existing
handlers to a queue listener.
This is useful when you want to use a default logging config but then
optionally add a logger's handlers to a queue during runtime.
Args:
logger (mixed): Logger instance or string name of logger to queue-ify
handlers.
queue_handler (QueueHandler): Instance of a ``QueueHandler``.
queue_listener (QueueListener): Instance of a ``QueueListener``.
"""
if isinstance(logger, str):
logger = logging.getLogger(logger)
# Get handlers that aren't being listened for.
handlers = [handler for handler in logger.handlers
if handler not in queue_listener.handlers]
if handlers:
# The default QueueListener stores handlers as a tuple.
queue_listener.handlers = \
tuple(list(queue_listener.handlers) + handlers)
# Remove logger's handlers and replace with single queue handler.
del logger.handlers[:]
logger.addHandler(queue_handler)
示例7: log_queue
# 需要导入模块: from logging import handlers [as 别名]
# 或者: from logging.handlers import QueueListener [as 别名]
def log_queue():
"""
Get the log queue for child process logging.
"""
global _log_queue, _log_listener
from lenskit.util.parallel import LKContext
ctx = LKContext.INSTANCE
if _log_queue is None:
_log_queue = ctx.Queue()
_log_listener = QueueListener(_log_queue, InjectHandler())
_log_listener.start()
return _log_queue
示例8: setup_logger
# 需要导入模块: from logging import handlers [as 别名]
# 或者: from logging.handlers import QueueListener [as 别名]
def setup_logger(args):
"""
Function to setup the logger for the compare function.
:param args:
:param manager:
:return:
"""
args.log_queue = mp.Queue(-1)
args.queue_handler = log_handlers.QueueHandler(args.log_queue)
if args.log is not None:
_log_folder = os.path.dirname(args.log)
if _log_folder and not os.path.exists(_log_folder):
os.makedirs(_log_folder)
handler = logging.FileHandler(args.log, mode="w")
logger = logging.getLogger("main_compare")
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.propagate = False
else:
logger = create_default_logger("main_compare")
handler = logger.handlers[0]
if args.verbose is False:
logger.setLevel(logging.INFO)
else:
logger.setLevel(logging.DEBUG)
logger.propagate = False
log_queue_listener = log_handlers.QueueListener(args.log_queue, logger)
log_queue_listener.propagate = False
log_queue_listener.start()
queue_logger = logging.getLogger("main_queue")
for handler in queue_logger.handlers:
queue_logger.removeHandler(handler)
if args.verbose is False:
queue_logger.setLevel(logging.INFO)
else:
queue_logger.setLevel(logging.DEBUG)
main_queue_handler = log_handlers.QueueHandler(args.log_queue)
queue_logger.propagate = False
queue_logger.addHandler(main_queue_handler)
return args, handler, logger, log_queue_listener, queue_logger