Skip to content

Commit 964b957

Browse files
authored
Merge pull request larymak#302 from Jerry0420/add-multiprocessing-logger
add multiprocessing logging helper
2 parents a6a70eb + ddda9e0 commit 964b957

File tree

4 files changed

+166
-1
lines changed

4 files changed

+166
-1
lines changed
Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
# Python Logging Helper for Multiple Processes
2+
3+
## Description
4+
This is a helper class that allows Python developers to log to a single log file from multiple processes easily.
5+
6+
## Examples
7+
The following is an example code snippet in **main.py**, which provides two modes:
8+
9+
1. Running in Multiprocessing Pool:
10+
```python
11+
python3 main.py --is_pool yes
12+
```
13+
14+
1. Running in Normal Multiprocessing:
15+
```python
16+
python3 main.py --is_pool no
17+
```
18+
19+
The log file will be created in the log directory and named mulp.log.
20+
21+
* Log File Content
22+
```
23+
06-17 13:41:13 | sub | INFO | /projects/OTHERS/MultiprocessingLogger/main.py:19 | 26802 | c
24+
06-17 13:41:13 | sub | INFO | /projects/OTHERS/MultiprocessingLogger/main.py:19 | 26803 | d
25+
06-17 13:41:13 | sub | INFO | /projects/OTHERS/MultiprocessingLogger/main.py:19 | 26799 | a
26+
06-17 13:41:13 | sub | INFO | /projects/OTHERS/MultiprocessingLogger/main.py:19 | 26804 | e
27+
06-17 13:41:13 | sub | INFO | /projects/OTHERS/MultiprocessingLogger/main.py:19 | 26801 | b
28+
```
Lines changed: 84 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,84 @@
1+
import logging
2+
import logging.handlers
3+
import multiprocessing
4+
import os
5+
import sys
6+
7+
from pathlib import Path
8+
9+
class MultiProcesses_Logging_Helper:
10+
11+
def __init__(self) -> None:
12+
self.queue = multiprocessing.Manager().Queue(-1)
13+
14+
self.process = multiprocessing.get_context('fork').Process(
15+
target=self._process_target,
16+
)
17+
self.process.start()
18+
self.redirect_main_process_log_to_queue()
19+
20+
def get_log_file_path(self) -> str:
21+
# TODO: make this configurable
22+
log_file_dir = os.path.join(Path(__file__).parent, 'logs')
23+
Path(log_file_dir).mkdir(parents=True, exist_ok=True)
24+
log_file_path = os.path.join(log_file_dir, 'multip.log')
25+
return log_file_path
26+
27+
def redirect_main_process_log_to_queue(self):
28+
29+
main_process_id = os.getpid()
30+
31+
def filter(record: logging.LogRecord):
32+
if record.process == main_process_id:
33+
self.queue.put_nowait(record)
34+
return None
35+
36+
root = logging.getLogger()
37+
root.setLevel(logging.INFO)
38+
39+
handler = logging.Handler()
40+
handler.addFilter(filter)
41+
root.addHandler(handler)
42+
43+
def init_logger_configure(self):
44+
45+
root = logging.getLogger()
46+
root.setLevel(logging.INFO)
47+
48+
# TODO: make this configurable
49+
file_handler = logging.handlers.RotatingFileHandler(
50+
self.get_log_file_path(),
51+
mode='w',
52+
# maxBytes=5000,
53+
# backupCount=0,
54+
encoding='utf-8'
55+
)
56+
file_handler.setLevel(logging.INFO)
57+
file_handler_formatter = logging.Formatter(
58+
fmt='%(asctime)s | %(name)s | %(levelname)s | %(pathname)s:%(lineno)d | %(process)d | %(message)s',
59+
datefmt="%m-%d %H:%M:%S"
60+
)
61+
file_handler.setFormatter(file_handler_formatter)
62+
root.addHandler(file_handler)
63+
64+
std_handler = logging.StreamHandler(sys.stdout)
65+
std_handler.setLevel(logging.DEBUG)
66+
std_handler_formatter = logging.Formatter(
67+
fmt='%(asctime)s | %(name)s | %(levelname)s | %(pathname)s:%(lineno)d | %(process)d | %(message)s',
68+
datefmt="%m-%d %H:%M:%S"
69+
)
70+
std_handler.setFormatter(std_handler_formatter)
71+
root.addHandler(std_handler)
72+
73+
def _process_target(self):
74+
self.init_logger_configure()
75+
while True:
76+
record: logging.LogRecord = self.queue.get()
77+
if record is None:
78+
break
79+
logger = logging.getLogger(record.name)
80+
logger.handle(record)
81+
82+
def close(self):
83+
self.process.join(timeout=0)
84+
self.process.terminate()

OTHERS/MultiprocessingLogger/main.py

Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
import argparse
2+
import logging
3+
import logging.handlers
4+
import multiprocessing
5+
from functools import partial
6+
from logger import MultiProcesses_Logging_Helper
7+
8+
9+
def process_target(queue: multiprocessing.Queue, input: str):
10+
# add queue handler at the beginning of the process
11+
sub_logger = logging.getLogger('sub')
12+
sub_logger.setLevel(logging.INFO)
13+
queue_handler = logging.handlers.QueueHandler(queue)
14+
15+
# this process may be reused multiple times (when multiprocessing pool)
16+
if len(sub_logger.handlers) == 0 or not isinstance(sub_logger.handlers[0], logging.handlers.QueueHandler):
17+
sub_logger.addHandler(queue_handler)
18+
19+
sub_logger.info(input)
20+
21+
22+
def main(is_pool: bool):
23+
logger_helper = MultiProcesses_Logging_Helper()
24+
25+
if is_pool:
26+
print("run in multiprocessing pool")
27+
pool = multiprocessing.Pool(2)
28+
inputs = [i for i in ["a", "b", "c", "d", "e"]]
29+
pool.map(partial(process_target, logger_helper.queue), inputs)
30+
logger_helper.queue.put_nowait(None)
31+
pool.close()
32+
pool.join()
33+
else:
34+
print("run in multiprocessing")
35+
workers = []
36+
for i in ["a", "b", "c", "d", "e"]:
37+
worker = multiprocessing.get_context('fork').Process(target=process_target,
38+
args=(logger_helper.queue, i))
39+
workers.append(worker)
40+
worker.start()
41+
for w in workers:
42+
w.join()
43+
logger_helper.queue.put_nowait(None)
44+
45+
logger_helper.close()
46+
47+
48+
if __name__ == "__main__":
49+
parser = argparse.ArgumentParser(description='')
50+
parser.add_argument('-p', '--is_pool', help='is run in multiprocessing pool mode', default=True, type=lambda x: (str(x).lower() in ['true','1', 'yes', 'y', 'True']))
51+
args = parser.parse_args()
52+
main(args.is_pool)

README.md

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -120,4 +120,5 @@ guide [HERE](https://github.com/larymak/Python-project-Scripts/blob/main/CONTRIB
120120
| 69 | [Bitcoin Price](https://github.com/larymak/Python-project-Scripts/tree/main/WEB%20SCRAPING/Bitcoin%20Price) | [Olu-Olagbuji Delight](https://github.com/Dheelyte)
121121
| 70 | [Password Generator](https://github.com/larymak/Python-project-Scripts/tree/main/GUI/Password%20Generator) | [LpCodes](https://github.com/LpCodes)
122122
| 71 | [HTML to Excel](https://github.com/larymak/Python-project-Scripts/tree/main/CONVERSION%20SCRIPTS/HTML%20to%20Excel) | [LpCodes](https://github.com/LpCodes)
123-
| 72 | [Star pattern](https://github.com/larymak/Python-project-Scripts/tree/main/OTHERS/Star%20pattern) | [LpCodes](https://github.com/LpCodes)
123+
| 72 | [Star pattern](https://github.com/larymak/Python-project-Scripts/tree/main/OTHERS/Star%20pattern) | [LpCodes](https://github.com/LpCodes) |
124+
| 73 | [Logging Helper](https://github.com/larymak/Python-project-Scripts/tree/main/OTHERS/add-multiprocessing-logger) | [Jerry W.](https://github.com/Jerry0420) |

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy