python logging多进程多线程输出到同一个日志文件的实战案例
参考官方案例:https://docs.python.org/zh-cn/3.8/howto/logging-cookbook.html
import logging
import logging.config
import logging.handlers
from multiprocessing import Process, Queue
import random
import threading
import time
def logger_thread(q):
while True:
record = q.get()
if record is None:
break
logger = logging.getLogger(record.name)
logger.handle(record)
def worker_process(q):
qh = logging.handlers.QueueHandler(q)
root = logging.getLogger()
root.setLevel(logging.DEBUG)
root.addHandler(qh)
levels = [logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR,
logging.CRITICAL]
loggers = ["foo", "foo.bar", "foo.bar.baz",
"spam", "spam.ham", "spam.ham.eggs"]
for i in range(100):
lvl = random.choice(levels)
logger = logging.getLogger(random.choice(loggers))
logger.log(lvl, "Message no. %d", i)
if __name__ == "__main__":
q = Queue()
d = {
"version": 1,
"formatters": {
"detailed": {
"class": "logging.Formatter",
"format": "%(asctime)s %(name)-15s %(levelname)-8s %(processName)-10s %(message)s"
}
},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": "INFO",
},
"file": {
"class": "logging.FileHandler",
"filename": "mplog.log",
"mode": "w",
"formatter": "detailed",
"foofile": {
"filename": "mplog-foo.log",
"errors": {
"filename": "mplog-errors.log",
"level": "ERROR",
"loggers": {
"foo": {
"handlers": ["foofile"]
"root": {
"level": "DEBUG",
"handlers": ["console", "file", "errors"]
}
workers = []
for i in range(5):
wp = Process(target=worker_process, name="worker %d" % (i + 1), args=(q,))
workers.append(wp)
wp.start()
logging.config.dictConfig(d)
lp = threading.Thread(target=logger_thread, args=(q,))
lp.start()
# At this point, the main process could do some useful work of its own
# Once it"s done that, it can wait for the workers to terminate...
for wp in workers:
wp.join()
# And now tell the logging thread to finish up, too
q.put(None)
lp.join()实战案例:
1、字典形式配置日志
log_conf_dict = {
"version": 1,
"formatters": {
"my_formatter": {
"class": "logging.Formatter",
"format": "%(asctime)s %(processName)s(%(process)d) %(threadName)s(%(thread)d) %(filename)s[line:%(lineno)d] %(levelname)s %(message)s"
}
},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": "INFO",
"formatter": "my_formatter",
},
"file": {
"class": "logging.handlers.RotatingFileHandler",
"filename": "/log/test.log",
"maxBytes": 5*1024*1024,
"backupCount": 60,
"mode": "w",
"delay": True,
"formatter": "my_formatter",
"encoding": "utf-8",
"level": "INFO",
},
},
"loggers": {
"my_logger": {
"handlers": ["file"]
}
},
"root": {
"level": _level,
"handlers": ["console", "file"]
},
}2、主进程中开启独立的日志写入监听线程
"""主进程中开启独立的日志写入监听线程"""
queue = Queue(-1)
logging.config.dictConfig(dict)
log_thread = threading.Thread(target=logger_main, args=(queue,))
log_thread.start() """其他逻辑代码段""" queue.put(None) log_thread.join()日志写入函数
def logger_main(q):
"""日志队列写入文件"""
while True:
record = q.get()
if record is None:
break
logger = logging.getLogger()
logger.handle(record)3、子进程中将日志输入QueueHandler日志队列
def child_proc_main(queue):
lqh = logging.handlers.QueueHandler(queue)
lqh.set_name("my_queue_handler")
root = logging.getLogger() #很关键的一步,必须先清空,再加入。原因:多进程多线程复杂环境下,在window和linux平台运行表现不一致,linux会复制主进程的日志配置,造成同时输出多个日志文件。
root.handlers.clear()
root.addHandler(lqh)
root.setLevel(level)到此这篇关于pythonlogging多进程多线程输出到同一个日志文件的文章就介绍到这了,更多相关pythonlogging日志文件内容请搜索脚本之家以前的文章或继续浏览下面的相关文章希望大家以后多多支持脚本之家!
X 关闭
X 关闭
- 15G资费不大降!三大运营商谁提供的5G网速最快?中国信通院给出答案
- 2联想拯救者Y70发布最新预告:售价2970元起 迄今最便宜的骁龙8+旗舰
- 3亚马逊开始大规模推广掌纹支付技术 顾客可使用“挥手付”结账
- 4现代和起亚上半年出口20万辆新能源汽车同比增长30.6%
- 5如何让居民5分钟使用到各种设施?沙特“线性城市”来了
- 6AMD实现连续8个季度的增长 季度营收首次突破60亿美元利润更是翻倍
- 7转转集团发布2022年二季度手机行情报告:二手市场“飘香”
- 8充电宝100Wh等于多少毫安?铁路旅客禁止、限制携带和托运物品目录
- 9好消息!京东与腾讯续签三年战略合作协议 加强技术创新与供应链服务
- 10名创优品拟通过香港IPO全球发售4100万股 全球发售所得款项有什么用处?

