Skip to content

Commit 1cf5dfd

Browse files
Merge pull request #73 from DiamondLightSource/logs_new
basic logging added
2 parents 7c9b79d + dd72ccd commit 1cf5dfd

File tree

9 files changed

+195
-22
lines changed

9 files changed

+195
-22
lines changed

dls_barcode/camera/camera_scanner.py

Lines changed: 16 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33
import multiprocessing
44
import queue
5+
import logging
56

67
from .capture_worker import CaptureWorker
78
from .scanner_worker import ScannerWorker
@@ -49,29 +50,31 @@ def __init__(self, result_queue, view_queue, message_queue, config):
4950
def start_scan(self, cam_position):
5051
""" Spawn the processes that will continuously capture and process images from the camera.
5152
"""
52-
print("\nMAIN: start triggered")
53+
log = logging.getLogger(".".join([__name__, self.__class__.__name__]))
54+
log.debug("8) scan start triggered")
5355
scanner_args = (self._task_q, self._overlay_q, self._result_q, self._message_q, self._scanner_kill_q, self._config, cam_position)
5456
self._scanner_process = multiprocessing.Process(target=CameraScanner._scanner_worker, args=scanner_args)
5557

5658
self._capture_command_q.put(CaptureCommand(StreamAction.START, cam_position))
5759
self._scanner_process.start()
5860

5961
def stop_scan(self):
60-
print("\nMAIN: Stop triggered")
62+
log = logging.getLogger(".".join([__name__, self.__class__.__name__]))
63+
log.debug("stop triggered")
6164
self._terminate_scanner_process()
6265
self._capture_command_q.put(CaptureCommand(StreamAction.STOP))
63-
print("MAIN: Stop completed")
66+
log.debug("stop scan completed")
6467

6568
def kill(self):
66-
print("\n__________")
67-
print("MAIN: Kill")
69+
log = logging.getLogger(".".join([__name__, self.__class__.__name__]))
70+
log.debug("Kill")
6871
self.stop_scan()
6972
self._capture_kill_q.put(None)
70-
print("MAIN: forcing capture cleanup")
73+
log.debug("KILL forcing capture cleanup")
7174
self._process_cleanup(self._capture_process, [self._task_q, self._view_q])
7275
self._capture_process.join()
7376
self._flush_queue(self._capture_kill_q)
74-
print("MAIN: KILL COMPLETED")
77+
log.debug("KILL COMPLETED")
7578

7679
def _flush_queue(self, queue):
7780
while not queue.empty():
@@ -80,12 +83,13 @@ def _flush_queue(self, queue):
8083
def _terminate_scanner_process(self):
8184
if self._scanner_process is not None:
8285
self._scanner_kill_q.put(None)
83-
print("MAIN: forcing scanner cleanup")
86+
log = logging.getLogger(".".join([__name__, self.__class__.__name__]))
87+
log.debug("starting scanner process cleanup")
8488
self._process_cleanup(self._scanner_process, [self._result_q, self._overlay_q, self._message_q])
8589
self._scanner_process.join()
8690
self._flush_queue(self._scanner_kill_q)
8791
self._scanner_process = None
88-
print("MAIN: scanner rejoined")
92+
log.debug("scanner process cleared and rejoined - caused by stop scan")
8993

9094
def _process_cleanup(self, process, queues):
9195
""" A sub-process that writes to a queue can't terminate if the queue is not empty.
@@ -96,11 +100,12 @@ def _process_cleanup(self, process, queues):
96100
https://stackoverflow.com/questions/31708646/process-join-and-queue-dont-work-with-large-numbers
97101
"""
98102
# Put the process in a list so we don't need to check is_alive() every time - supposed to perform better
103+
log = logging.getLogger(".".join([__name__, self.__class__.__name__]))
99104
live_processes = [process]
100105
display = True
101106
while live_processes:
102107
if display:
103-
print("MAIN: sub-process still alive")
108+
log.debug("scanner process still alive")
104109
display = False
105110

106111
for q in queues:
@@ -115,7 +120,7 @@ def _process_cleanup(self, process, queues):
115120

116121
live_processes = [p for p in live_processes if p.is_alive()]
117122

118-
print("MAIN: sub-process terminated!")
123+
log.debug("scanner process terminated by process cleanup")
119124

120125
@staticmethod
121126
def _capture_worker(task_queue, view_queue, overlay_queue, command_queue, kill_queue, camera_configs):

dls_barcode/camera/camera_switch.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import logging
12
import time
23
from .camera_position import CameraPosition
34

@@ -18,6 +19,8 @@ def _stop_live_capture(self):
1819
def restart_live_capture_from_side(self):
1920
self._stop_live_capture()
2021
self._switch_to_side()
22+
log = logging.getLogger(".".join([__name__]))
23+
log.debug("6) starting scan from side")
2124
self._scanner.start_scan(CameraPosition.SIDE)
2225

2326
def restart_live_capture_from_top(self):

dls_barcode/datamatrix/read/interpret.py

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,6 @@
1+
import logging
2+
3+
14
class DatamatrixByteInterpreter:
25
@staticmethod
36
def interpret_bytes(data_bytes):
@@ -46,13 +49,20 @@ def interpret_bytes(data_bytes):
4649
i += num_bytes
4750

4851
elif byte == 240:
52+
#log = log = logging.getLogger(".".join([__name__]))
53+
#log.error(NotImplementedError("Datamatrix EDIFACT decoding not implemented"))
4954
raise NotImplementedError("Datamatrix EDIFACT decoding not implemented")
5055

5156
elif byte == 241:
57+
#log = log = logging.getLogger(".".join([__name__]))
58+
#log.error(NotImplementedError("Datamatrix Extended Channel Interpretation code not implemented"))
5259
raise NotImplementedError("Datamatrix Extended Channel Interpretation code not implemented")
5360

5461
elif 242 <= byte < 256 or byte == 0: # Unused parts of message space.
55-
raise ValueError("Code {} is not used in Datamatrix specification".format(byte))
62+
#log = log = logging.getLogger(".".join([__name__]))
63+
error = ValueError("Code {} is not used in Datamatrix specification".format(byte))
64+
#.error(error)
65+
raise error
5666

5767
return ''.join(m for m in message)
5868

dls_barcode/main.py

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,10 @@
1+
import logging
12
import sys
23

34
from os.path import dirname
45
from sys import path
5-
6+
import pygelf
7+
import logconfig
68
from dls_barcode.config import BarcodeConfig
79
from dls_barcode.gui import DiamondBarcodeMainWindow
810
from dls_barcode.main_manager import MainManager
@@ -31,12 +33,17 @@ def main(config_file, version):
3133
ui = DiamondBarcodeMainWindow(config, version, None)
3234
manager = MainManager(ui, config)
3335
manager.initialise_timers()
36+
log = logging.getLogger(".".join([__name__]))
37+
log.debug('2) timers initialised')
3438
manager.initialise_scanner()
3539
sys.exit(app.exec_())
3640

3741

3842
if __name__ == '__main__':
43+
logconfig.setup_logging()
44+
#logconfig.set_additional_handler("log.log")
3945
# Multiprocessing support for PyInstaller bundling in Windows
46+
log = logging.getLogger(".".join([__name__]))
4047
if sys.platform.startswith('win'):
4148
import multiprocessing
4249
multiprocessing.freeze_support()
@@ -45,6 +52,7 @@ def main(config_file, version):
4552
parser.add_argument("-cf", "--config_file", type=str, default=DEFAULT_CONFIG_FILE,
4653
help="The path of the configuration file (default=" + DEFAULT_CONFIG_FILE + ")")
4754
args = parser.parse_args()
48-
print("CONFIG: " + args.config_file)
55+
log.info("CONFIG: " + args.config_file)
56+
#print("CONFIG: " + args.config_file)
4957

5058
main(args.config_file, VERSION)

dls_barcode/main_manager.py

Lines changed: 19 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import logging
12
import multiprocessing
23
import queue
34
import time
@@ -54,19 +55,27 @@ def _cleanup(self):
5455
self._ui.resetCountdown()
5556

5657
def initialise_scanner(self):
58+
log = logging.getLogger(".".join([__name__]))
59+
log.debug("3) camera scanner initialisation")
5760
self._camera_scanner = CameraScanner(self._result_queue, self._view_queue, self._message_queue, self._config)
61+
log.debug("4) camera switch initialisation")
5862
self._camera_switch = CameraSwitch(self._camera_scanner, self._config.top_camera_timeout)
63+
5964
self._restart_live_capture_from_side()
6065
self._ui.resetCountdown()
6166

6267
def _camera_capture_alive(self):
6368
return self._camera_scanner is not None and self._camera_switch is not None
6469

6570
def _restart_live_capture_from_top(self):
71+
log = logging.getLogger(".".join([__name__]))
72+
log.debug("starting live capture form top")
6673
self._camera_switch.restart_live_capture_from_top()
6774
self._ui.startCountdown(self._config.top_camera_timeout.value())
6875

6976
def _restart_live_capture_from_side(self):
77+
log = logging.getLogger(".".join([__name__]))
78+
log.debug("5) starting live capture form side")
7079
self._reset_msg_timer()
7180
self._camera_switch.restart_live_capture_from_side()
7281
self._ui.resetCountdown()
@@ -138,7 +147,8 @@ def _read_side_scan(self):
138147

139148
# Barcode successfully read
140149
Beeper.beep()
141-
print("MAIN: puck barcode recorded")
150+
log = logging.getLogger(".".join([__name__]))
151+
log.debug("puck barcode recorded")
142152
holder_barcode = plate.barcodes()[0]
143153
if not self._ui.isLatestHolderBarcode(holder_barcode):
144154
self._latest_holder_barcode = holder_barcode
@@ -149,8 +159,10 @@ def _read_top_scan(self):
149159
if self._result_queue.empty():
150160
if self._camera_switch.is_top_scan_timeout():
151161
self._ui.displayScanTimeoutMessage()
152-
153-
print("\n*** Scan timeout ***")
162+
log = logging.getLogger(".".join([__name__]))
163+
extra = ({"timeout_value": 1})
164+
log = logging.LoggerAdapter(log, extra)
165+
log.info("scan timeout", extra)
154166
self._restart_live_capture_from_side()
155167
return
156168

@@ -165,5 +177,8 @@ def _read_top_scan(self):
165177

166178
# Barcodes successfully read
167179
Beeper.beep()
168-
print("Scan Completed", self._camera_switch.get_scan_time())
180+
log = logging.getLogger(".".join([__name__]))
181+
extra = ({"scan_time": self._camera_switch.get_scan_time(), "timeout_value": 0})
182+
log = logging.LoggerAdapter(log, extra)
183+
log.info("Scan Completed", extra)
169184
self._restart_live_capture_from_side()

dls_barcode/scan/with_geometry/geometry_scanner.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ def _perform_frame_scan(self):
6060
self._frame_result.set_barcodes(self._barcodes)
6161
if self.plate_type == Geometry.UNIPUCK:
6262
self._geometry = UnipuckLocator(self._frame_img).find_location()
63-
if self._geometry == None:
63+
if self._geometry is None:
6464
self._geometry = self._calculate_geometry()
6565

6666
self._frame_result.set_geometry(self._geometry)
@@ -171,8 +171,8 @@ def _make_slotted_barcodes_list(barcodes, geometry):
171171
slotted_bcs = [None] * geometry.num_slots()
172172
for bc in barcodes:
173173
slot_num = geometry.containing_slot(bc.center())
174-
if(slot_num != None):
175-
slotted_bcs[slot_num - 1] = bc
174+
if slot_num is not None:
175+
slotted_bcs[slot_num - 1] = bc
176176

177177
return slotted_bcs
178178

dls_barcode/version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
VERSION = "v1.3.0"
1+
VERSION = "v1.4.0"

dls_util/cv/camera_stream.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
import logging
2+
13
import cv2 as opencv
24

35
_OPENCV_MAJOR = opencv.__version__[0]

logconfig.py

Lines changed: 130 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,130 @@
1+
""" logconfig
2+
This is essentially a template which can be copied into a python project and
3+
used to easily achieve a good practice of logging. Modify the local copy as per
4+
the project or site requirements.
5+
"""
6+
7+
import os
8+
import json
9+
import logging.config
10+
import logging.handlers
11+
import getpass
12+
import threading
13+
14+
15+
from dls_barcode import version
16+
17+
MAXBYTES = 1048576
18+
BACKUPCOUNT = 20
19+
ENCODING = "utf8"
20+
21+
22+
default_config = {
23+
"version": 1.0,
24+
"disable_existing_loggers": False,
25+
"formatters": {
26+
"simple": {
27+
"format": "%(message)s"
28+
},
29+
"extended": {
30+
"format": "%(asctime)s - %(name)20s - %(levelname)6s - %(message)s"
31+
},
32+
"json": {
33+
"format": "name: %(name)s, level: %(levelname)s, time: %(asctime)s, message: %(message)s"
34+
}
35+
},
36+
37+
"handlers": {
38+
"console": {
39+
"class": "logging.StreamHandler",
40+
"level": "DEBUG",
41+
"formatter": "simple",
42+
"stream": "ext://sys.stdout"
43+
},
44+
45+
"graylog_gelf": {
46+
"class": "pygelf.GelfUdpHandler",
47+
"level": "INFO",
48+
# Obviously a DLS-specific configuration: the graylog server address and port
49+
# Use the input "Load Balanced GELF TCP" on graylog2.
50+
"host": "graylog2",# "localhost", - use locaklhost for tests
51+
"port": 12201,
52+
"debug": True,
53+
# The following custom fields will be disabled if setting this False
54+
"include_extra_fields": True,
55+
"username": getpass.getuser(),
56+
"pid": os.getpid(),
57+
"application": "BarcodeScanner",
58+
"facility": "XChem",
59+
"_version": version.VERSION
60+
},
61+
62+
},
63+
"root": {
64+
# Set the level here to be the default minimum level of log record to be produced
65+
# If you set a handler to level DEBUG you will need to set either this level
66+
"level": "DEBUG",
67+
"handlers": ["console", "graylog_gelf"]
68+
}
69+
}
70+
71+
72+
class ThreadContextFilter(logging.Filter):
73+
"""A logging context filter to add thread name and ID."""
74+
75+
def filter(self, record):
76+
record.thread_id = str(threading.current_thread().ident)
77+
record.thread_name = str(threading.current_thread().name)
78+
return True
79+
80+
81+
def setup_logging(
82+
default_log_config=None,
83+
default_level=logging.INFO,
84+
env_key='LOG_CFG'
85+
):
86+
"""Setup logging configuration
87+
Call this only once from the application main() function or __main__ module!
88+
This will configure the python logging module based on a logging configuration
89+
in the following order of priority:
90+
1. Log configuration file found in the environment variable specified in the `env_key` argument.
91+
2. Log configuration file found in the `default_log_config` argument.
92+
3. Default log configuration found in the `logconfig.default_config` dict.
93+
4. If all of the above fails: basicConfig is called with the `default_level` argument.
94+
Args:
95+
default_log_config (Optional[str]): Path to log configuration file.
96+
env_key (Optional[str]): Environment variable that can optionally contain
97+
a path to a configuration file.
98+
default_level (int): logging level to set as default. Ignored if a log
99+
configuration is found elsewhere.
100+
Returns: None
101+
"""
102+
dict_config = None
103+
104+
logconfig_filename = default_log_config
105+
env_var_value = os.getenv(env_key, None)
106+
107+
if env_var_value is not None:
108+
logconfig_filename = env_var_value
109+
110+
if default_config is not None:
111+
dict_config = default_config
112+
113+
if logconfig_filename is not None and os.path.exists(logconfig_filename):
114+
with open(logconfig_filename, 'rt') as f:
115+
file_config = json.load(f)
116+
if file_config is not None:
117+
dict_config = file_config
118+
119+
if dict_config is not None:
120+
logging.config.dictConfig(dict_config)
121+
else:
122+
logging.basicConfig(level=default_level)
123+
124+
125+
def set_additional_handler(file_name):
126+
logger = logging.getLogger()
127+
handler = logging.handlers.RotatingFileHandler(file_name, maxBytes=MAXBYTES, backupCount=BACKUPCOUNT, encoding=ENCODING)
128+
#json_format = default_config.get("formatters",{}).get("json",{}).get("format",{})
129+
#handler.setFormatter(logging.Formatter(json_format))
130+
logger.addHandler(handler)

0 commit comments

Comments
 (0)