Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 8 additions & 5 deletions bin/mule
Original file line number Diff line number Diff line change
Expand Up @@ -2,20 +2,24 @@

import sys
import os
import traceback
import logging

from importlib import import_module
import argparse

# create MULE_DIR path
try:
MULE_DIR = str(os.environ['MULE_DIR'])
except Exception as e:
print("Couldn't source MULE directory")
print(e)

# create MULE_DIR path
sys.path.append(os.path.expanduser(MULE_DIR))

from packs.core.logging import setup_logging

setup_logging()

'''
argument list:
1 - Which pack? Acquisition, Calibration, etc.
Expand All @@ -37,13 +41,12 @@ parser.add_argument("config", help = 'The config file provided to the pack, this

args = parser.parse_args()

# import pack and run
# start logging, import pack and run
try:
module_name = f'packs.{args.pack}.{args.pack}'
pack = getattr(import_module(module_name), args.pack)
except ModuleNotFoundError:
print(f"Failed to find pack {args.pack}")
traceback.print_exc()
logging.exception(f"Failed to find pack {args.pack}")
exit(1)
else:
pack(args.config)
6 changes: 3 additions & 3 deletions packs/acq/acq.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
from packs.core.core_utils import check_test

import logging

def acq(config_file):
print("This works as expected: acquisition")
print("In here you should read the config provided")
logging.info("This works as expected: acquisition")
logging.info("In here you should read the config provided")

if check_test(config_file):
return
Expand Down
4 changes: 3 additions & 1 deletion packs/core/core_utils.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import logging

def flatten(xss):
'''
Flattens a 2D list
Expand All @@ -8,7 +10,7 @@ def flatten(xss):
def check_test(file):
# quick check for test config
if file == "test_config":
print("Test config executable run successfully")
logging.info("Test config executable run successfully")
return True
else:
return False
42 changes: 42 additions & 0 deletions packs/core/logging.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
'''
Simple logging script with file name altering based on date and time
'''

import logging
import os
from datetime import datetime
from logging.handlers import TimedRotatingFileHandler

def setup_logging(log_dir = None, log_name = None) -> None:
'''
Setup the logging configuration.
Taken wholeheartedly from CARP
'''
# set and create log_dir if None
if log_dir is None:
log_dir = f'{os.environ['MULE_DIR']}/logs'
os.makedirs(log_dir, exist_ok=True)

# create unique log file name based on current date and time, or provided name
if log_name is None:
log_file = os.path.join(log_dir, f"MULE_{datetime.now().strftime('%Y%m%d_%H%M%S')}.log")
else:
log_file = os.path.join(log_dir, log_name)

# clear existing handlers so basicConfig works
root = logging.getLogger()
for handler in root.handlers[:]:
root.removeHandler(handler)

# basic logging config
logging.basicConfig(
level = logging.DEBUG,
format = '%(levelname)-8s | %(asctime)s | %(message)s',
handlers = [
TimedRotatingFileHandler(log_file,
when = "midnight",
interval = 1,
backupCount = 7),
logging.StreamHandler()
]
)
4 changes: 3 additions & 1 deletion packs/proc/proc.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
import os
import logging


from packs.core.io import read_config_file
from packs.proc.processing_utils import process_bin_WD2
from packs.core.core_utils import check_test

def proc(config_file):
print("Starting the processing pack...")
logging.info("Starting the processing pack...")

# checks if test, if so ends run
if check_test(config_file):
Expand Down
23 changes: 12 additions & 11 deletions packs/proc/processing_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import tables as tb
import pandas as pd
import warnings
import logging

import h5py

Expand Down Expand Up @@ -66,7 +67,7 @@ def raw_to_h5_WD1(PATH, save_h5 = False, verbose = False, print_mod = 0):
file = open(PATH, 'rb')
data = []

print("File open! Processing...")
logging.info("File open! Processing...")
# Collect data, while true loops are always dangerous but lets ignore that here :)
while (True):

Expand All @@ -75,18 +76,18 @@ def raw_to_h5_WD1(PATH, save_h5 = False, verbose = False, print_mod = 0):

# breaking condition
if len(array) == 0:
print("Processing finished! Saving...")
logging.info("Processing finished! Saving...")
break

# printing events
if (array[4] % int(print_mod) == 0):
print("Event {}".format(array[4]))
logging.info("Event {}".format(array[4]))

# verbose check
if (verbose == True):
array_tag = ['event size (ns)', 'board ID', 'pattern', 'board channel', 'event counter', 'trigger tag']
for i in range(len(array)):
print("{}: {}".format(array_tag[i], array[i]))
logging.info("{}: {}".format(array_tag[i], array[i]))



Expand All @@ -100,7 +101,7 @@ def raw_to_h5_WD1(PATH, save_h5 = False, verbose = False, print_mod = 0):
data.append(np.fromfile(file, dtype=int16bit, count=event_size))

if (save_h5 == True):
print("Saving raw waveforms...")
logging.info("Saving raw waveforms...")
# change path to dump the h5 file where
# the .dat file is
directory = PATH[:-3] + "h5"
Expand Down Expand Up @@ -208,9 +209,9 @@ def process_header(file_path : str,

# check that event header is as expected
if (event_number_1 -1 == event_number) and (samples_1 == samples) and sampling_period_1 == (sampling_period):
print(f"{channels} channels detected. Processing accordingly...")
logging.info(f"{channels} channels detected. Processing accordingly...")
else:
print(f"Single channel detected. If you're expecting more channels, something has gone wrong.\nProcessing accordingly...")
logging.info(f"Single channel detected. If you're expecting more channels, something has gone wrong.\nProcessing accordingly...")
channels = 1

file.close()
Expand Down Expand Up @@ -395,7 +396,7 @@ def process_bin_WD2(file_path : str,

# Ensure save path is clear
save_path = check_save_path(save_path, overwrite)
print(f'\nData input : {file_path}\nData output : {save_path}')
logging.info(f'\nData input : {file_path}\nData output : {save_path}')

# collect binary information
wdtype, samples, sampling_period, channels = process_header(file_path)
Expand All @@ -408,7 +409,7 @@ def process_bin_WD2(file_path : str,

# Process data chunked or unchunked
if counts == -1:
print("No chunking selected...")
logging.info("No chunking selected...")
# read in data
with open(file_path, 'rb') as file:
data = read_binary(file, wdtype)
Expand All @@ -419,7 +420,7 @@ def process_bin_WD2(file_path : str,
# save data
save_data(event_info, rwf, save_path)
else:
print(f"Chunking by {counts}...")
logging.info(f"Chunking by {counts}...")
# collect data into dataframes based on desired splitting
counter = 0
while True:
Expand All @@ -431,7 +432,7 @@ def process_bin_WD2(file_path : str,

# check binary has content in it
if len(data) == 0:
print("Processing Finished!")
logging.info("Processing Finished!")
return True

# format_data
Expand Down
42 changes: 42 additions & 0 deletions packs/tests/logging_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
'''
Testing of the logger
'''

import os
import logging
from pathlib import Path

from packs.core.logging import setup_logging


def test_logfile_exists(tmp_path):
'''
check logfile exists upon initialisation
'''
log_name = 'test_log.log'
setup_logging(tmp_path, log_name)

full_path = os.path.join(tmp_path, log_name)
assert os.path.exists(full_path)


def test_logfile_contents(tmp_path):
'''
check logfile contents match as expected when output.
'''
log_name = 'test_log.log'
setup_logging(tmp_path, log_name)

full_path = os.path.join(tmp_path, log_name)

logging.warning('Test warning, this shouldnt cause any issues')

# flush the logging handlers
for handler in logging.getLogger().handlers:
handler.flush()

# open and check log content
log_content = Path(full_path).read_text()
assert "WARNING" in log_content
assert "Test warning, this shouldnt cause any issues" in log_content

8 changes: 4 additions & 4 deletions packs/tests/tests.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
from packs.core.core_utils import check_test

import logging

def tests(config_file):
print("This works as expected: testing")
print("In here you should read the config provided")
logging.info("This works as expected: testing")
logging.info("In here you should read the config provided")

if check_test(config_file):
return
return