diff --git a/python/lib/candidate.py b/python/lib/candidate.py index bbb6981c6..d0994beb1 100644 --- a/python/lib/candidate.py +++ b/python/lib/candidate.py @@ -125,8 +125,8 @@ def create_candidate(self, db, participants_info): if 'project' in row and row['project'].lower() not in ("null", ""): # search project id in Project table by its full name project_info = db.pselect( - "SELECT ProjectID FROM Project WHERE Name = %s", - [row['project'], ] + "SELECT ProjectID FROM Project WHERE Name = %s OR Alias = %s", + [row['project'], row['project']] ) if len(project_info) > 0: self.project_id = project_info[0]['ProjectID'] diff --git a/python/loris_eeg_chunker/edf_to_chunks.py b/python/loris_eeg_chunker/edf_to_chunks.py index ec2922df0..6434ae07b 100755 --- a/python/loris_eeg_chunker/edf_to_chunks.py +++ b/python/loris_eeg_chunker/edf_to_chunks.py @@ -39,8 +39,8 @@ def load_channels(exclude): eog=None, misc=None, exclude=(), - preload=False, - infer_types=False + infer_types=False, + file_type=mne_edf.FileType.EDF, ) channel_names = edf_info['ch_names'] diff --git a/python/tests/integration/scripts/test_import_bids_dataset.py b/python/tests/integration/scripts/test_import_bids_dataset.py new file mode 100644 index 000000000..4aff8c657 --- /dev/null +++ b/python/tests/integration/scripts/test_import_bids_dataset.py @@ -0,0 +1,77 @@ +from lib.db.queries.candidate import try_get_candidate_with_psc_id +from lib.db.queries.config import set_config_with_setting_name +from lib.db.queries.session import try_get_session_with_cand_id_visit_label +from tests.util.database import get_integration_database_session +from tests.util.file_system import assert_files_exist +from tests.util.run_integration_script import run_integration_script + + +def test_import_eeg_bids_dataset(): + db = get_integration_database_session() + + # Enable EEG chunking. + set_config_with_setting_name(db, 'useEEGBrowserVisualizationComponents', 'true') + db.commit() + + process = run_integration_script([ + 'bids_import.py', + '--createcandidate', '--createsession', + '--directory', '/data/loris/incoming/Face13', + ]) + + # Check the return code. + assert process.returncode == 0 + + # Check that the candidate and sessions are present in the database. + candidate = try_get_candidate_with_psc_id(db, 'OTT166') + assert candidate is not None + session = try_get_session_with_cand_id_visit_label(db, candidate.cand_id, 'V1') + assert session is not None + + # TODO: Add EEG-specific database checks once the EEG-specific ORM models have been created. + + # Check that the BIDS files have been copied. + assert_files_exist('/data/loris/bids_imports', { + 'Face13_BIDSVersion_1.1.0': { + 'dataset_description.json': None, + 'participants.tsv': None, + 'README': None, + 'sub-OTT166': { + 'ses-V1': { + 'eeg': { + 'sub-OTT166_ses-V1_task-faceO_channels.tsv': None, + 'sub-OTT166_ses-V1_task-faceO_eeg.edf': None, + 'sub-OTT166_ses-V1_task-faceO_eeg.json': None, + 'sub-OTT166_ses-V1_task-faceO_electrodes.tsv': None, + 'sub-OTT166_ses-V1_task-faceO_events.tsv': None, + } + } + } + } + }) + + # Check that the chunk files have been created. + assert_files_exist('/data/loris/bids_imports', { + 'Face13_BIDSVersion_1.1.0_chunks': { + 'sub-OTT166_ses-V1_task-faceO_eeg.chunks': { + 'index.json': None, + 'raw': { + '0': { + str(i): { + '0': { + '0.buf': None, + '1.buf': None, + } + } for i in range(0, 128) + }, + '1': { + str(i): { + '0': { + f'{j}.buf': None for j in range(0, 58) + } + } for i in range(0, 128) + } + } + } + } + }) diff --git a/python/tests/integration/scripts/test_run_dicom_archive_loader.py b/python/tests/integration/scripts/test_run_dicom_archive_loader.py index 5c8ab18cf..041a5fee8 100644 --- a/python/tests/integration/scripts/test_run_dicom_archive_loader.py +++ b/python/tests/integration/scripts/test_run_dicom_archive_loader.py @@ -2,7 +2,7 @@ from lib.db.queries.mri_upload import get_mri_upload_with_patient_name from lib.exitcode import GETOPT_FAILURE, INVALID_PATH, SELECT_FAILURE, SUCCESS from tests.util.database import get_integration_database_session -from tests.util.file_system import check_file_tree +from tests.util.file_system import assert_files_exist from tests.util.run_integration_script import run_integration_script @@ -73,7 +73,7 @@ def test_successful_run_on_valid_tarchive_path(): assert process.stderr == "" # Check that the expected files have been created - assert check_file_tree('/data/loris/assembly_bids', { + assert_files_exist('/data/loris/assembly_bids', { 'sub-300001': { 'ses-V2': { 'anat': { diff --git a/python/tests/integration/scripts/test_run_nifti_insertion.py b/python/tests/integration/scripts/test_run_nifti_insertion.py index efe17c2ec..46d9f06f3 100644 --- a/python/tests/integration/scripts/test_run_nifti_insertion.py +++ b/python/tests/integration/scripts/test_run_nifti_insertion.py @@ -18,7 +18,7 @@ UNKNOWN_PROTOCOL, ) from tests.util.database import get_integration_database_session -from tests.util.file_system import check_file_tree +from tests.util.file_system import assert_files_exist from tests.util.run_integration_script import run_integration_script @@ -388,7 +388,7 @@ def test_nifti_mri_protocol_violated_scans_features(): assert file_json_data is not None and file_json_data.value == f'{file_base_rel_path}.json' assert file_pic_data is None - assert check_file_tree('/data/loris/', { + assert_files_exist('/data/loris', { 'assembly_bids': { 'sub-400184': { 'ses-V3': { @@ -549,7 +549,7 @@ def test_nifti_mri_violations_log_exclude_features(): assert file_bvec_data is not None and file_bvec_data.value == f'{file_base_rel_path}.bvec' assert file_pic_data is None - assert check_file_tree('/data/loris/', { + assert_files_exist('/data/loris', { 'assembly_bids': { 'sub-400184': { 'ses-V3': { @@ -634,7 +634,7 @@ def test_dwi_insertion_with_mri_violations_log_warning(): assert file_bvec_data is not None and file_bvec_data.value == f'{file_base_rel_path}.bvec' assert file_pic_data is not None - assert check_file_tree('/data/loris/', { + assert_files_exist('/data/loris', { 'assembly_bids': { 'sub-400184': { 'ses-V3': { diff --git a/python/tests/util/file_system.py b/python/tests/util/file_system.py index 8852ef71f..4df2bb190 100644 --- a/python/tests/util/file_system.py +++ b/python/tests/util/file_system.py @@ -9,20 +9,17 @@ """ -def check_file_tree(path: str, file_tree: FileTree): +def assert_files_exist(path: str, file_tree: FileTree): """ - Check that a path has at least all the directories and files of a file tree. + Assert that all the directories and files specified in a path exist. """ if file_tree is None: - return os.path.isfile(path) + assert os.path.isfile(path) + return - if not os.path.isdir(path): - return False + assert os.path.isdir(path) for sub_dir_name, sub_file_tree in file_tree.items(): sub_dir_path = os.path.join(path, sub_dir_name) - if not check_file_tree(sub_dir_path, sub_file_tree): - return False - - return True + assert_files_exist(sub_dir_path, sub_file_tree)