diff --git a/bin/mule b/bin/mule index 5f14110..ea0468a 100755 --- a/bin/mule +++ b/bin/mule @@ -2,20 +2,24 @@ import sys import os -import traceback +import logging from importlib import import_module import argparse +# create MULE_DIR path try: MULE_DIR = str(os.environ['MULE_DIR']) except Exception as e: print("Couldn't source MULE directory") print(e) -# create MULE_DIR path sys.path.append(os.path.expanduser(MULE_DIR)) +from packs.core.logging import setup_logging + +setup_logging() + ''' argument list: 1 - Which pack? Acquisition, Calibration, etc. @@ -37,13 +41,12 @@ parser.add_argument("config", help = 'The config file provided to the pack, this args = parser.parse_args() -# import pack and run +# start logging, import pack and run try: module_name = f'packs.{args.pack}.{args.pack}' pack = getattr(import_module(module_name), args.pack) except ModuleNotFoundError: - print(f"Failed to find pack {args.pack}") - traceback.print_exc() + logging.exception(f"Failed to find pack {args.pack}") exit(1) else: pack(args.config) diff --git a/packs/acq/acq.py b/packs/acq/acq.py index 127419f..c464269 100644 --- a/packs/acq/acq.py +++ b/packs/acq/acq.py @@ -1,9 +1,9 @@ from packs.core.core_utils import check_test - +import logging def acq(config_file): - print("This works as expected: acquisition") - print("In here you should read the config provided") + logging.info("This works as expected: acquisition") + logging.info("In here you should read the config provided") if check_test(config_file): return diff --git a/packs/core/core_utils.py b/packs/core/core_utils.py index 4bb9ea2..9864d5e 100644 --- a/packs/core/core_utils.py +++ b/packs/core/core_utils.py @@ -1,3 +1,5 @@ +import logging + def flatten(xss): ''' Flattens a 2D list @@ -8,7 +10,7 @@ def flatten(xss): def check_test(file): # quick check for test config if file == "test_config": - print("Test config executable run successfully") + logging.info("Test config executable run successfully") return True else: return False diff --git a/packs/core/logging.py b/packs/core/logging.py new file mode 100644 index 0000000..bbce7b2 --- /dev/null +++ b/packs/core/logging.py @@ -0,0 +1,42 @@ +''' +Simple logging script with file name altering based on date and time +''' + +import logging +import os +from datetime import datetime +from logging.handlers import TimedRotatingFileHandler + +def setup_logging(log_dir = None, log_name = None) -> None: + ''' + Setup the logging configuration. + Taken wholeheartedly from CARP + ''' + # set and create log_dir if None + if log_dir is None: + log_dir = f'{os.environ['MULE_DIR']}/logs' + os.makedirs(log_dir, exist_ok=True) + + # create unique log file name based on current date and time, or provided name + if log_name is None: + log_file = os.path.join(log_dir, f"MULE_{datetime.now().strftime('%Y%m%d_%H%M%S')}.log") + else: + log_file = os.path.join(log_dir, log_name) + + # clear existing handlers so basicConfig works + root = logging.getLogger() + for handler in root.handlers[:]: + root.removeHandler(handler) + + # basic logging config + logging.basicConfig( + level = logging.DEBUG, + format = '%(levelname)-8s | %(asctime)s | %(message)s', + handlers = [ + TimedRotatingFileHandler(log_file, + when = "midnight", + interval = 1, + backupCount = 7), + logging.StreamHandler() + ] + ) diff --git a/packs/proc/proc.py b/packs/proc/proc.py index ec95240..f015fe6 100644 --- a/packs/proc/proc.py +++ b/packs/proc/proc.py @@ -1,11 +1,13 @@ import os +import logging + from packs.core.io import read_config_file from packs.proc.processing_utils import process_bin_WD2 from packs.core.core_utils import check_test def proc(config_file): - print("Starting the processing pack...") + logging.info("Starting the processing pack...") # checks if test, if so ends run if check_test(config_file): diff --git a/packs/proc/processing_utils.py b/packs/proc/processing_utils.py index 7102b49..9b40d32 100644 --- a/packs/proc/processing_utils.py +++ b/packs/proc/processing_utils.py @@ -5,6 +5,7 @@ import tables as tb import pandas as pd import warnings +import logging import h5py @@ -66,7 +67,7 @@ def raw_to_h5_WD1(PATH, save_h5 = False, verbose = False, print_mod = 0): file = open(PATH, 'rb') data = [] - print("File open! Processing...") + logging.info("File open! Processing...") # Collect data, while true loops are always dangerous but lets ignore that here :) while (True): @@ -75,18 +76,18 @@ def raw_to_h5_WD1(PATH, save_h5 = False, verbose = False, print_mod = 0): # breaking condition if len(array) == 0: - print("Processing finished! Saving...") + logging.info("Processing finished! Saving...") break # printing events if (array[4] % int(print_mod) == 0): - print("Event {}".format(array[4])) + logging.info("Event {}".format(array[4])) # verbose check if (verbose == True): array_tag = ['event size (ns)', 'board ID', 'pattern', 'board channel', 'event counter', 'trigger tag'] for i in range(len(array)): - print("{}: {}".format(array_tag[i], array[i])) + logging.info("{}: {}".format(array_tag[i], array[i])) @@ -100,7 +101,7 @@ def raw_to_h5_WD1(PATH, save_h5 = False, verbose = False, print_mod = 0): data.append(np.fromfile(file, dtype=int16bit, count=event_size)) if (save_h5 == True): - print("Saving raw waveforms...") + logging.info("Saving raw waveforms...") # change path to dump the h5 file where # the .dat file is directory = PATH[:-3] + "h5" @@ -208,9 +209,9 @@ def process_header(file_path : str, # check that event header is as expected if (event_number_1 -1 == event_number) and (samples_1 == samples) and sampling_period_1 == (sampling_period): - print(f"{channels} channels detected. Processing accordingly...") + logging.info(f"{channels} channels detected. Processing accordingly...") else: - print(f"Single channel detected. If you're expecting more channels, something has gone wrong.\nProcessing accordingly...") + logging.info(f"Single channel detected. If you're expecting more channels, something has gone wrong.\nProcessing accordingly...") channels = 1 file.close() @@ -395,7 +396,7 @@ def process_bin_WD2(file_path : str, # Ensure save path is clear save_path = check_save_path(save_path, overwrite) - print(f'\nData input : {file_path}\nData output : {save_path}') + logging.info(f'\nData input : {file_path}\nData output : {save_path}') # collect binary information wdtype, samples, sampling_period, channels = process_header(file_path) @@ -408,7 +409,7 @@ def process_bin_WD2(file_path : str, # Process data chunked or unchunked if counts == -1: - print("No chunking selected...") + logging.info("No chunking selected...") # read in data with open(file_path, 'rb') as file: data = read_binary(file, wdtype) @@ -419,7 +420,7 @@ def process_bin_WD2(file_path : str, # save data save_data(event_info, rwf, save_path) else: - print(f"Chunking by {counts}...") + logging.info(f"Chunking by {counts}...") # collect data into dataframes based on desired splitting counter = 0 while True: @@ -431,7 +432,7 @@ def process_bin_WD2(file_path : str, # check binary has content in it if len(data) == 0: - print("Processing Finished!") + logging.info("Processing Finished!") return True # format_data diff --git a/packs/tests/logging_test.py b/packs/tests/logging_test.py new file mode 100644 index 0000000..81d015a --- /dev/null +++ b/packs/tests/logging_test.py @@ -0,0 +1,42 @@ +''' +Testing of the logger +''' + +import os +import logging +from pathlib import Path + +from packs.core.logging import setup_logging + + +def test_logfile_exists(tmp_path): + ''' + check logfile exists upon initialisation + ''' + log_name = 'test_log.log' + setup_logging(tmp_path, log_name) + + full_path = os.path.join(tmp_path, log_name) + assert os.path.exists(full_path) + + +def test_logfile_contents(tmp_path): + ''' + check logfile contents match as expected when output. + ''' + log_name = 'test_log.log' + setup_logging(tmp_path, log_name) + + full_path = os.path.join(tmp_path, log_name) + + logging.warning('Test warning, this shouldnt cause any issues') + + # flush the logging handlers + for handler in logging.getLogger().handlers: + handler.flush() + + # open and check log content + log_content = Path(full_path).read_text() + assert "WARNING" in log_content + assert "Test warning, this shouldnt cause any issues" in log_content + diff --git a/packs/tests/tests.py b/packs/tests/tests.py index 7b2d5ce..4fb53e1 100644 --- a/packs/tests/tests.py +++ b/packs/tests/tests.py @@ -1,9 +1,9 @@ from packs.core.core_utils import check_test - +import logging def tests(config_file): - print("This works as expected: testing") - print("In here you should read the config provided") + logging.info("This works as expected: testing") + logging.info("In here you should read the config provided") if check_test(config_file): - return \ No newline at end of file + return