diff --git a/.github/actions/setup-python/action.yml b/.github/actions/setup-python/action.yml index d4ae64b8d..80f0c3747 100644 --- a/.github/actions/setup-python/action.yml +++ b/.github/actions/setup-python/action.yml @@ -15,7 +15,7 @@ runs: cache: 'pip' - name: Install Python dependencies - run: pip install -r install/requirements/requirements.txt + run: pip install .[dev] shell: bash - name: Set up environment variables diff --git a/.gitignore b/.gitignore index 1cc19d7f3..6ce3ba922 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,6 @@ **/__pycache__/* *.pyc -.loris_mri/ +config/ python_virtualenvs/ environment **/.DS_Store diff --git a/DTIPrep/DTIPrepRegister.pl b/DTIPrep/DTIPrepRegister.pl index 2fd2e9cee..b84d78c8b 100755 --- a/DTIPrep/DTIPrepRegister.pl +++ b/DTIPrep/DTIPrepRegister.pl @@ -12,7 +12,7 @@ =head1 SYNOPSIS Available options are: --profile : name of the config file in C<../dicom-archive/.loris-mri> +-profile : name of the config file in C<../config> -DTIPrep_subdir : C subdirectory storing the processed files to be registered @@ -114,7 +114,7 @@ =head2 Methods # Define the table describing the command-line options my @args_table = ( - ["-profile", "string", 1, \$profile, "name of the config file in ../dicom-archive/.loris_mri."], + ["-profile", "string", 1, \$profile, "name of the config file in ../config."], ["-DTIPrep_subdir", "string", 1, \$DTIPrep_subdir, "DTIPrep subdirectory storing the processed files to be registered"], ["-DTIPrepProtocol", "string", 1, \$DTIPrepProtocol, "DTIPrep protocol used to obtain the output files"], ["-DTI_file", "string", 1, \$dti_file, "Native DWI dataset used to obtain the output files"], @@ -132,10 +132,10 @@ =head2 Methods print STDERR "$Usage\n\tERROR: missing -profile argument\n\n"; exit $NeuroDB::ExitCodes::PROFILE_FAILURE; } -{ package Settings; do "$ENV{LORIS_CONFIG}/.loris_mri/$profile" } +{ package Settings; do "$ENV{LORIS_CONFIG}/$profile" } if ( !@Settings::db ) { print STDERR "\n\tERROR: You don't have a \@db setting in the file " - . "$ENV{LORIS_CONFIG}/.loris_mri/$profile \n\n"; + . "$ENV{LORIS_CONFIG}/$profile \n\n"; exit $NeuroDB::ExitCodes::DB_SETTINGS_FAILURE; } if (!$DTIPrep_subdir) { diff --git a/DTIPrep/DTIPrep_pipeline.pl b/DTIPrep/DTIPrep_pipeline.pl index ff27bf8cf..3d7e203ed 100755 --- a/DTIPrep/DTIPrep_pipeline.pl +++ b/DTIPrep/DTIPrep_pipeline.pl @@ -13,7 +13,7 @@ =head1 SYNOPSIS -profile : name of config file in - C<../dicom-archive/.loris_mri> + C<../config> -list : file containing the list of raw diffusion MINC files (in C) @@ -140,7 +140,7 @@ =head2 Methods my ($list, @args); # Define the table describing the command-line options -my @args_table = (["-profile", "string", 1, \$profile, "name of config file in ../dicom-archive/.loris_mri" ], +my @args_table = (["-profile", "string", 1, \$profile, "name of config file in ../config" ], ["-list", "string", 1, \$list, "file containing the list of raw diffusion minc files (in assembly/DCCID/Visit/mri/native)." ], ["-DTIPrepVersion", "string", 1, \$DTIPrepVersion, "DTIPrep version used (if cannot be found in DTIPrep binary path)."], ["-mincdiffusionVersion","string", 1, \$mincdiffVersion, "mincdiffusion release version used (if cannot be found in mincdiffusion scripts path.)"], @@ -159,10 +159,10 @@ =head2 Methods print STDERR "$Usage\n\tERROR: missing -profile argument\n\n"; exit $NeuroDB::ExitCodes::PROFILE_FAILURE; } -{ package Settings; do "$ENV{LORIS_CONFIG}/.loris_mri/$profile" } +{ package Settings; do "$ENV{LORIS_CONFIG}/$profile" } if ( !@Settings::db ) { print STDERR "\n\tERROR: You don't have a \@db setting in the file " - . "$ENV{LORIS_CONFIG}/.loris_mri/$profile \n\n"; + . "$ENV{LORIS_CONFIG}/$profile \n\n"; exit $NeuroDB::ExitCodes::DB_SETTINGS_FAILURE; } @@ -239,7 +239,7 @@ =head2 Methods $nativedir =~ s/\/\//\//; $nativedir =~ s/\/$//; - + ####################### ####### Step 1: ####### Get SubjectID and Visit label ####################### @@ -248,19 +248,19 @@ =head2 Methods ####################### - ####### Step 2: ####### - If $runDTIPrep is set, create out directories. + ####### Step 2: ####### - If $runDTIPrep is set, create out directories. ####################### - If $runDTIPrep is not set, fetche out directories. my ($QCoutdir) = &getOutputDirectories($outdir, $subjID, $visit, $DTIPrepProtocol, $runDTIPrep); next if (!$QCoutdir); ####################### - ####### Step 3: ####### - Read DTIPrep XML protocol (will help to determine output names). + ####### Step 3: ####### - Read DTIPrep XML protocol (will help to determine output names). ####################### my ($protXMLrefs) = &DTI::readDTIPrepXMLprot($DTIPrepProtocol); next if (!$protXMLrefs); - # Additional checks to check whether DTIPrep or mincdiffusion tools will run post-processing. If the mincdiffusion tools will be used, then we should be able to have a version of the tool and the path to niak! + # Additional checks to check whether DTIPrep or mincdiffusion tools will run post-processing. If the mincdiffusion tools will be used, then we should be able to have a version of the tool and the path to niak! my $bCompute = $protXMLrefs->{entry}->{DTI_bCompute}->{value}; if (($bCompute eq 'No') && (!$mincdiffVersion)) { ($mincdiffVersion) = &identify_tool_version("minctensor.pl", '\/(mincdiffusion-[A-Z0-9._-]+)\/'); @@ -281,14 +281,14 @@ =head2 Methods ####################### - ####### Step 4: ####### - Fetch raw DTI files to process. - ####################### - Determine output names based on raw DTI file names and organize them into a hash ($DTIrefs). + ####### Step 4: ####### - Fetch raw DTI files to process. + ####################### - Determine output names based on raw DTI file names and organize them into a hash ($DTIrefs). my ($DTIs_list, $DTIrefs)= &fetchData($nativedir, $DTI_volumes, $t1_scan_type, $QCoutdir, $DTIPrepProtocol, $protXMLrefs, $QCed2_step); next if ((!$DTIs_list) || (!$DTIrefs)); ####################### - ####### Step 5: ####### - Run preprocessing pipeline (mnc2nrrd + DTIPrep) if $runDTIPrep option is set. + ####### Step 5: ####### - Run preprocessing pipeline (mnc2nrrd + DTIPrep) if $runDTIPrep option is set. ####################### if ($runDTIPrep) { my ($pre_success) = &preprocessingPipeline($DTIs_list, $DTIrefs, $QCoutdir, $DTIPrepProtocol); @@ -301,7 +301,7 @@ =head2 Methods ####################### - ####### Step 6: ####### Check if DTIPrep outputs are available and convert nrrd files to mnc. + ####### Step 6: ####### Check if DTIPrep outputs are available and convert nrrd files to mnc. ####################### These outputs are: # - QCed.nrrd # - QCReport.txt @@ -312,8 +312,8 @@ =head2 Methods ####################### - ####### Step 7: ####### - ####################### + ####### Step 7: ####### + ####################### # - If bCompute is not set in DTIPrep protocol will run mincdiffusion tools and create FA, MD, RGB... maps # - If bCompute is set in DTIPrep protocol, will convert DTIPrep processed nrrd file into minc files and reinsert relevant header information if ($bCompute eq 'No') { @@ -324,7 +324,7 @@ =head2 Methods print LOG "\t==> Mincdiffusion outputs were found.\n"; next if (!$post_success); } elsif ($bCompute eq 'Yes') { - my ($DTIPrep_post_success) = &check_and_convert_DTIPrep_postproc_outputs($DTIs_list, $DTIrefs, $data_dir, $QCoutdir, $DTIPrepVersion); + my ($DTIPrep_post_success) = &check_and_convert_DTIPrep_postproc_outputs($DTIs_list, $DTIrefs, $data_dir, $QCoutdir, $DTIPrepVersion); next if (!$DTIPrep_post_success); } else { print LOG "\n\tERROR: Post processing tools won't be run for this dataset. \n"; @@ -340,11 +340,11 @@ =head2 Methods print LOG "# Register files into database."; print LOG "\n##################\n"; if ($RegisterFiles) { - ®ister_processed_files_in_DB($DTIs_list, - $DTIrefs, - $profile, - $QCoutdir, - $DTIPrepVersion, + ®ister_processed_files_in_DB($DTIs_list, + $DTIrefs, + $profile, + $QCoutdir, + $DTIPrepVersion, $mincdiffVersion ); } else { @@ -413,9 +413,9 @@ =head3 getIdentifiers($nativedir) =cut sub getIdentifiers { - my ($nativedir) = @_; + my ($nativedir) = @_; - my ($subjID, $visit) = &Settings::get_DTI_CandID_Visit($nativedir); + my ($subjID, $visit) = &Settings::get_DTI_CandID_Visit($nativedir); if ((!$subjID) || (!$visit)) { print LOG "\n#############################\n"; print LOG "WARNING:Cannot find ID,visit for $nativedir\n"; @@ -460,7 +460,7 @@ =head3 getOutputDirectories($outdir, $subjID, $visit, $DTIPrepProtocol, $runDTIP =cut sub getOutputDirectories { - my ($outdir, $subjID, $visit, $DTIPrepProtocol, $runDTIPrep) = @_; + my ($outdir, $subjID, $visit, $DTIPrepProtocol, $runDTIPrep) = @_; my ($QCoutdir) = &DTI::createOutputFolders($outdir, $subjID, $visit, $DTIPrepProtocol, $runDTIPrep); if (!$QCoutdir) { @@ -504,7 +504,7 @@ sub fetchData { my ($nativedir, $DTI_volumes, $t1_scan_type, $QCoutdir, $DTIPrepProtocol, $protXMLrefs, $QCed2_step) = @_; # Get DTI datasets - my ($DTIs_list) = &DTI::getRawDTIFiles($nativedir, $DTI_volumes); + my ($DTIs_list) = &DTI::getRawDTIFiles($nativedir, $DTI_volumes); if (@$DTIs_list == 0) { print LOG "\n#############################\n"; print LOG "WARNING: Could not find DTI files with $DTI_volumes volumes for in $nativedir.\n"; @@ -519,7 +519,7 @@ sub fetchData { # dti_file_1 -> Raw_nrrd => outputname # -> QCed_nrrd => outputname etc... (QCTxtReport, QCXmlReport, QCed_minc, QCProt) # dti_file_2 -> Raw_nrrd => outputname etc... - my ($DTIrefs) = &DTI::createDTIhashref($DTIs_list, $anat, $QCoutdir, $DTIPrepProtocol, $protXMLrefs, $QCed2_step); + my ($DTIrefs) = &DTI::createDTIhashref($DTIs_list, $anat, $QCoutdir, $DTIPrepProtocol, $protXMLrefs, $QCed2_step); return ($DTIs_list, $DTIrefs); } @@ -564,7 +564,7 @@ sub preprocessingPipeline { print LOG "\t1. Convert raw minc DTI to nrrd.\n"; my ($convert_status) = &preproc_mnc2nrrd($raw_nrrd, $dti_file); # 2. run DTIPrep pipeline on the raw nrrd file - print LOG "\t2. Run DTIPrep.\n"; + print LOG "\t2. Run DTIPrep.\n"; my ($DTIPrep_status) = &preproc_DTIPrep($QCed_nrrd, $raw_nrrd, $DTIPrepProtocol, $QCed2_nrrd); # 3. copy DTIPrep XML protocol used print LOG "\t3. Copy XML protocol used in output directory\n"; @@ -588,7 +588,7 @@ sub preprocessingPipeline { } else { return 1; } -} +} =pod @@ -607,7 +607,7 @@ =head3 preproc_mnc2nrrd($raw_nrrd, $dti_file) sub preproc_mnc2nrrd { my ($raw_nrrd, $dti_file) = @_; - + if (-e $raw_nrrd) { print LOG "\t\t -> Raw DTI already converted to nrrd.\n"; # set $convert_status to 1 as converted file already exists. @@ -679,7 +679,7 @@ sub preproc_copyXMLprotocol { print LOG "\t\t -> ERROR: Failed to copy DTIPrep protocol in output directory. \n\t Protocol to copy is: $DTIPrepProtocol. \n\tOutput directory is $QCoutdir.\n" if (!$copyProt_status); return $copyProt_status; } -} +} =pod @@ -716,7 +716,7 @@ sub check_and_convertPreprocessedFiles { print LOG "\n##################\n"; my ($foundPreprocessed) = &checkPreprocessOutputs($dti_file, $DTIrefs, $QCoutdir, $DTIPrepProtocol); - # Convert QCed_nrrd DTI to minc + # Convert QCed_nrrd DTI to minc my ($convert_status) = &convertPreproc2mnc($dti_file, $DTIrefs, $data_dir, $DTIPrepVersion) if ($foundPreprocessed); # If one of the steps above failed, postprocessing status will be set to failed for this dti_file, otherwise it will be set to success. @@ -735,7 +735,7 @@ sub check_and_convertPreprocessedFiles { if ($at_least_one_success == 0) { return undef; } else { - return 1; + return 1; } } @@ -794,7 +794,7 @@ sub checkPreprocessOutputs { print LOG $err_message; return undef; } -} +} =pod @@ -828,7 +828,7 @@ sub convertPreproc2mnc { # Convert QCed nrrd file back into minc file (with updated header) my ($insert_header, $convert_status); if (-e $QCed_nrrd) { - if ( ((!$QCed2_minc) && (-e $QCed_minc)) + if ( ((!$QCed2_minc) && (-e $QCed_minc)) || (($QCed2_minc) && (-e $QCed_minc) && (-e $QCed2_minc))) { print LOG "\t\t-> QCed minc(s) already exist(s).\n"; return 1; @@ -841,7 +841,7 @@ sub convertPreproc2mnc { ($insert_header) = &DTI::insertMincHeader($dti_file, $data_dir, $QCed2_minc, $QCTxtReport, $DTIPrepVersion) if (($QCed2_minc) && ($insert_header)); } } - + if (($convert_status) && ($insert_header)) { print LOG "\t\t-> QCed DTI successfully converted to minc.\n"; return 1; @@ -872,7 +872,7 @@ =head3 mincdiffusionPipeline($DTIs_list, $DTIrefs, $data_dir, $QCoutdir, ...) =cut sub mincdiffusionPipeline { - my ($DTIs_list, $DTIrefs, $data_dir, $QCoutdir, $DTIPrepProtocol, $mincdiffVersion, $niak_path) = @_; + my ($DTIs_list, $DTIrefs, $data_dir, $QCoutdir, $DTIPrepProtocol, $mincdiffVersion, $niak_path) = @_; my $at_least_one_success = 0; foreach my $dti_file (@$DTIs_list) { @@ -896,7 +896,7 @@ sub mincdiffusionPipeline { next; } - # Run mincdiffusion tools + # Run mincdiffusion tools print LOG "\t2. Running mincdiffusion tools on $QCed_minc (...)\n"; my ($mincdiff_status) = &runMincdiffusionTools($dti_file, $DTIrefs, $data_dir, $QCoutdir, $mincdiffVersion, $niak_path); @@ -916,7 +916,7 @@ sub mincdiffusionPipeline { if ($at_least_one_success == 0) { return undef; } else { - return 1; + return 1; } } @@ -949,10 +949,10 @@ sub checkMincdiffusionPostProcessedOutputs { my $MD = $DTIrefs->{$dti_file}{'Postproc'}{'MD'}{'minc'}; my $RGB = $DTIrefs->{$dti_file}{'Postproc'}{'RGB'}{'minc'}; - if ((-e $baseline) - && (-e $preproc_minc) - && (-e $anat_mask) - && (-e $anat_mask_diff) + if ((-e $baseline) + && (-e $preproc_minc) + && (-e $anat_mask) + && (-e $anat_mask_diff) && (-e $FA) && (-e $MD) && (-e $RGB)) { @@ -968,7 +968,7 @@ sub checkMincdiffusionPostProcessedOutputs { "\tRGB file: $RGB\n"; return undef; } -} +} =pod @@ -993,7 +993,7 @@ sub runMincdiffusionTools { # 1. Initialize variables # Raw anatomical - my $raw_anat = $DTIrefs->{$dti_file}{'raw_anat'}{'minc'}; + my $raw_anat = $DTIrefs->{$dti_file}{'raw_anat'}{'minc'}; # DTIPrep preprocessing outputs my $QCed_minc = $DTIrefs->{$dti_file}{'Preproc'}{'QCed'}{'minc'}; my $QCTxtReport = $DTIrefs->{$dti_file}{'Preproc'}{'QCReport'}{'txt'}; @@ -1041,9 +1041,9 @@ sub runMincdiffusionTools { $DTIrefs->{$dti_file}{'postproc_hdr_success'} = "failed"; } } - + # Write return statement - if (($mincdiff_preproc_status) && ($minctensor_status) && ($insert_success)) { + if (($mincdiff_preproc_status) && ($minctensor_status) && ($insert_success)) { return 1; } else { return undef; @@ -1075,11 +1075,11 @@ sub check_and_convert_DTIPrep_postproc_outputs { my $at_least_one_success = 0; foreach my $dti_file (@$DTIs_list) { - + # Check if all DTIPrep post-processing output were created my $QCTxtReport = $DTIrefs->{$dti_file}->{'Preproc'}->{'QCReport'}->{'txt'}; my ($nrrds_found, $mincs_created, $hdrs_inserted) = &DTI::convert_DTIPrep_postproc_outputs($dti_file, $DTIrefs, $data_dir, $QCTxtReport, $DTIPrepVersion); - + if (($nrrds_found) && ($mincs_created) && ($hdrs_inserted)) { print LOG "All DTIPrep post-processed data were found and successfuly converted to minc files with header information.\n"; $DTIrefs->{$dti_file}{'postproc_convert_status'}= "success"; @@ -1091,12 +1091,12 @@ sub check_and_convert_DTIPrep_postproc_outputs { next; } } - + #Return undef if variable $at_least_one success is null, otherwise return 1. if ($at_least_one_success == 0) { return undef; } else { - return 1; + return 1; } } @@ -1112,7 +1112,7 @@ =head3 register_processed_files_in_DB($DTIs_list, $DTIrefs, $profile, $QCoutdir, INPUT: - $DTIs_list : list of native DTI files processed - $DTIrefs : hash containing the processed filenames - - $profile : config file (in C<../dicom-archive/.loris_mri>) + - $profile : config file (in C<../config>) - $QCoutdir : output directory containing the processed files - $DTIPrepVersion : C version used to obtain QCed files - $mincdiffVersion: C tool version used @@ -1122,9 +1122,9 @@ =head3 register_processed_files_in_DB($DTIs_list, $DTIrefs, $profile, $QCoutdir, sub register_processed_files_in_DB { my ($DTIs_list, $DTIrefs, $profile, $QCoutdir, $DTIPrepVersion, $mincdiffVersion) = @_; - # Loop through raw DTIs list + # Loop through raw DTIs list foreach my $dti_file (@$DTIs_list) { - + # If post processing pipeline used was mincdiffusion, we need to know which raw anatomical file was used to generate brain masks. # If post processing pipeline used was DTIPrep, no need to specify an anatomical raw dataset when calling DTIPrepRegister.pl my $postprocessingtool = $DTIrefs->{$dti_file}->{'Postproc'}->{'Tool'}; diff --git a/LICENSE b/LICENSE new file mode 100644 index 000000000..94a9ed024 --- /dev/null +++ b/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/batch_uploads_imageuploader.pl b/batch_uploads_imageuploader.pl index 088a414e5..6ab12d888 100755 --- a/batch_uploads_imageuploader.pl +++ b/batch_uploads_imageuploader.pl @@ -13,7 +13,7 @@ =head1 SYNOPSIS Available options are: --profile: name of the config file in C<../dicom-archive/.loris_mri> +-profile: name of the config file in C<../config> -verbose: if set, be verbose @@ -63,7 +63,7 @@ =head2 Methods my $profile = ''; -my $upload_id = undef; +my $upload_id = undef; my ($debug, $verbose) = (0,1); my $stdout = ''; my $stderr = ''; @@ -72,7 +72,7 @@ =head2 Methods [ "Basic options", "section" ], [ "-profile", "string", 1, \$profile, - "name of config file in ../dicom-archive/.loris_mri" + "name of config file in ../config" ], ["-verbose", "boolean", 1, \$verbose, "Be verbose."] ); @@ -119,10 +119,10 @@ =head2 Methods print STDERR "$Usage\n\tERROR: missing -profile argument\n\n"; exit $NeuroDB::ExitCodes::PROFILE_FAILURE; } -{ package Settings; do "$ENV{LORIS_CONFIG}/.loris_mri/$profile" } +{ package Settings; do "$ENV{LORIS_CONFIG}/$profile" } if ( !@Settings::db ) { print STDERR "\n\tERROR: You don't have a \@db setting in the file " - . "$ENV{LORIS_CONFIG}/.loris_mri/$profile \n\n"; + . "$ENV{LORIS_CONFIG}/$profile \n\n"; exit $NeuroDB::ExitCodes::DB_SETTINGS_FAILURE; } @@ -158,7 +158,7 @@ =head2 Methods -my ($stdoutbase, $stderrbase) = ("$data_dir/batch_output/imuploadstdout.log", +my ($stdoutbase, $stderrbase) = ("$data_dir/batch_output/imuploadstdout.log", "$data_dir/batch_output/imuploadstderr.log"); while($_ = $ARGV[0] // '', /^-/) { @@ -201,7 +201,7 @@ =head2 Methods my $phantom = $phantomarray[$counter-1]; my $patientname = $patientnamearray[$counter-1]; - ## Ensure that + ## Ensure that ## 1) the uploaded file is of type .tgz or .tar.gz or .zip ## 2) check that input file provides phantom details (Y for phantom, N for real candidates) ## 3) for non-phantoms, the patient name and path entries are identical; this mimics the imaging uploader in the front-end @@ -235,7 +235,7 @@ =head2 Methods } } - ## Populate the mri_upload table with necessary entries and get an upload_id + ## Populate the mri_upload table with necessary entries and get an upload_id $upload_id = insertIntoMRIUpload(\$dbh, $patientname, diff --git a/batch_uploads_tarchive.pl b/batch_uploads_tarchive.pl index d7dbc7422..8d1cea290 100755 --- a/batch_uploads_tarchive.pl +++ b/batch_uploads_tarchive.pl @@ -18,7 +18,7 @@ =head1 DESCRIPTION from C, one file name per line. Each file name is assumed to be a path relative to C (see below). -The following settings of file F<$ENV{LORIS_CONFIG}/.loris-mri/prod> affect the +The following settings of file F<$ENV{LORIS_CONFIG}/prod> affect the behvaviour of C (where C<$ENV{LORIS_CONFIG}> is the value of the Unix environment variable C): @@ -100,7 +100,7 @@ =head1 AUTHORS my $profile = undef; my $verbose = 0; -my $profile_desc = "name of the config file in ../dicom-archive/.loris_mri"; +my $profile_desc = "name of the config file in ../config"; my @opt_table = ( [ "Basic options", "section" ], @@ -148,10 +148,10 @@ =head1 AUTHORS exit 3; } -{ package Settings; do "$ENV{LORIS_CONFIG}/.loris_mri/$profile" } +{ package Settings; do "$ENV{LORIS_CONFIG}/$profile" } if ($profile && !@Settings::db) { print "\n\tERROR: You don't have a configuration file named ". - "'$profile' in: $ENV{LORIS_CONFIG}/.loris_mri/ \n\n"; + "'$profile' in: $ENV{LORIS_CONFIG}/ \n\n"; exit 2; } diff --git a/dicom-archive/dicomSummary.pl b/dicom-archive/dicomSummary.pl index 3fa01a78b..a238eb904 100755 --- a/dicom-archive/dicomSummary.pl +++ b/dicom-archive/dicomSummary.pl @@ -1,4 +1,4 @@ -#!/usr/bin/perl +#!/usr/bin/perl # J-Sebastian Muehlboeck 2006 # sebas@bic.mni.mcgill.ca # Perl tool based on DCMSUM.pm and DICOM.pm to create a summary report for a given dir containing dicoms @@ -25,8 +25,7 @@ =head1 SYNOPSIS -dbreplace : use this option only if the DICOM data changed and need to be updated in the database --profile : specify the name of the config file residing in C<.loris_mri> of the - current directory +-profile : specify the name of the config file residing in the config directory -tmp : to specify a temporary directory. It will contain the summaries if used with -noscreen option @@ -94,9 +93,9 @@ =head2 METHODS - a tool for producing an informative summary for dicoms in a given directory - a quick way to get an idea on what there is for a given subject - a quick way to obtain information about the suject, scanner and acquisition parameters -- a quick way of listing all acquisitions aquired for a given subject -- a convenient way to compare two directories in terms of the dicom data they contain... - or the contents of a directory with a database repository +- a quick way of listing all acquisitions aquired for a given subject +- a convenient way to compare two directories in terms of the dicom data they contain... + or the contents of a directory with a database repository Usage:\n\t $0 [ -comparedir ] [ -tmp ] [options] \n\n See $0 -help for more info\n\n"; @@ -108,9 +107,9 @@ =head2 METHODS ["-dbcompare","boolean",1, \$databasecomp, "Compare with database. Will only work if you actually archived your data using a database."], ["-database","boolean", 1, \$dbase, "Use a database if you have one set up for you. Just trying will fail miserably"], ["-dbreplace","boolean",1, \$dbreplace, "Use this option only if your dicom data changed and you want to re-insert the new summary"], - ["-profile","string",1, \$profile, "Specify the name of the config file which resides in .loris_mri in the current directory."], + ["-profile","string",1, \$profile, "Specify the name of the config file which resides in the config directory."], + - ["Output options", "section"], ["-screen","boolean",1, \$screen, "Print output to the screen."], # fixme add more options based on the capabilities of the DCMSUM class @@ -118,7 +117,7 @@ =head2 METHODS ["-tmp","string",1, \$temp, "You may specify a tmp dir. It will contain the summaries, if you use -noscreen"], ["-xdiff","boolean",1, \$xdiff, "You are comparing two folders or with the database and you want to see the result with sdiff."], ["-batch","boolean",1, \$batch, "Run in batchmode. Will log differences to a /tmp/diff.log"], - + ["General options", "section"], ["-verbose","boolean",1, \$verbose, "Be verbose."], ["-version","boolean",1, \$version, "Print version and revision number and exit"], @@ -130,8 +129,8 @@ =head2 METHODS if ($version) { print "$versionInfo\n"; exit; } # checking for profile settings -if($profile && -f "$ENV{LORIS_CONFIG}/.loris_mri/$profile") { { package Settings; do "$ENV{LORIS_CONFIG}/.loris_mri/$profile" } } -if ($profile && !@Settings::db) { print "\n\tERROR: You don't have a configuration file named '$profile' in: $ENV{LORIS_CONFIG}/.loris_mri/ \n\n"; exit 33; } +if($profile && -f "$ENV{LORIS_CONFIG}/$profile") { { package Settings; do "$ENV{LORIS_CONFIG}/$profile" } } +if ($profile && !@Settings::db) { print "\n\tERROR: You don't have a configuration file named '$profile' in: $ENV{LORIS_CONFIG}/ \n\n"; exit 33; } # basic error checking on dcm dir @@ -148,7 +147,7 @@ =head2 METHODS if ($xdiff || $compare || $batch || $databasecomp || $dbase){ $screen = undef; } elsif (!$compare || !$databasecomp) { $xdiff = undef; } # you can't compare with db and a dir at the same time -if (($compare || $databasecomp) && $dbase) { print $Usage; +if (($compare || $databasecomp) && $dbase) { print $Usage; print "\t Please consider that some option combinations do not make sense. \n\n"; exit 1; } @@ -162,19 +161,19 @@ =head2 METHODS push @dcmDirs, $dcm_folder; if ($compare) { push @dcmDirs, $compare; } # if compare is set # This will make sure that a user specified tmp dir does exist and is writeable -my $TmpDir = $temp || "/tmp"; if (! -e $TmpDir) { print "This is not a valid tmp dir choice: \n".$!; exit 2; } +my $TmpDir = $temp || "/tmp"; if (! -e $TmpDir) { print "This is not a valid tmp dir choice: \n".$!; exit 2; } elsif(! -w $TmpDir) { print "Sorry you have no permission to use $TmpDir as tmp dir\n"; exit 2; } # establish database connection if database option is set my $dbh; if ($dbase) { $dbh = &NeuroDB::DBI::connect_to_db(@Settings::db); print "Testing for database connectivity. \n" if $verbose; $dbh->disconnect(); print "Database is available.\n\n" if $verbose; } -####################### main ########################################### main ########################################### +####################### main ########################################### main ########################################### my $count = 0; my ($studyUnique, $metaname, @metaFiles, $dcmdir, $sumTypeVersion); -# this silly header will only show, if you choose to send your output to the screen. +# this silly header will only show, if you choose to send your output to the screen. if ($screen){ &silly_head(); } foreach $dcmdir (@dcmDirs) { @@ -197,8 +196,8 @@ =head2 METHODS # print the summary $summary->dcmsummary(); - -# If output went to a meta file, rename it and give it a count if -compare was specified. + +# If output went to a meta file, rename it and give it a count if -compare was specified. if (!$screen) { close META; my $newName; @@ -208,7 +207,7 @@ =head2 METHODS push @metaFiles, $newName; `$move`; } -# Print to stout again +# Print to stout again select (STDOUT); print "Done with $metaname\n" if $verbose; @@ -226,7 +225,7 @@ =head2 METHODS # END OF LOOP ####################################################################################### my $returnVal = 0; - + # if -databasecompare has been given look for an entry based on unique studyID if ($databasecomp) { my $conflict = &version_conflict($studyUnique); @@ -234,8 +233,8 @@ =head2 METHODS $metaFiles[1] = &read_db_metadata($studyUnique); if (!$metaFiles[1]) { print "\nYou never archived this study or you are looking in the wrong database.\n\n"; exit; } if ($xdiff) { $diff = "sdiff $metaFiles[0] $metaFiles[1]"; system($diff); } - else { - $diff = "diff -q $metaFiles[0] $metaFiles[1]"; + else { + $diff = "diff -q $metaFiles[0] $metaFiles[1]"; my $Comp = `$diff`; if ($Comp ne "") { print "There are differences\n" if $verbose; $returnVal = 99; } else { print "Comparing $dcm_folder with the database returned no differences. Smile :)\n" if $verbose; } @@ -282,7 +281,7 @@ sub read_db_metadata { $dbh = &NeuroDB::DBI::connect_to_db(@Settings::db); print "Getting data from database.\n" if $verbose; (my $query = <$dbcomparefile") || die ("Cannot Open File"); - print DBDATA "$dbmeta"; + print DBDATA "$dbmeta"; close(DBDATA); return $dbcomparefile; } @@ -350,10 +349,10 @@ =head3 silly_head() sub silly_head { print < in the current directory + the config directory -centerName : Specify the symbolic center name to be stored alongside the DICOM institution @@ -124,7 +124,7 @@ =head2 Methods ["-clobber", "boolean", 1, \$clobber, "Use this option only if you want to replace the resulting tarball!"], ["-profile","string",1, \$profile, "Specify the name of the config file - which resides in .loris_mri in the current directory."], + which resides in the config directory."], ["-centerName","string",1, \$neurodbCenterName, "Specify the symbolic center name to be stored alongside the DICOM institution."], ["General options", "section"], @@ -146,12 +146,12 @@ =head2 Methods print STDERR "$Usage\n\tERROR: missing -profile argument\n\n"; exit $NeuroDB::ExitCodes::PROFILE_FAILURE; } -if(-f "$ENV{LORIS_CONFIG}/.loris_mri/$profile") { - { package Settings; do "$ENV{LORIS_CONFIG}/.loris_mri/$profile" } +if(-f "$ENV{LORIS_CONFIG}/$profile") { + { package Settings; do "$ENV{LORIS_CONFIG}/$profile" } } if ( !@Settings::db ) { print STDERR "\n\tERROR: You don't have a \@db setting in the file " - . "$ENV{LORIS_CONFIG}/.loris_mri/$profile \n\n"; + . "$ENV{LORIS_CONFIG}/$profile \n\n"; exit $NeuroDB::ExitCodes::DB_SETTINGS_FAILURE; } # The source and the target dir have to be present and must be directories. diff --git a/dicom-archive/updateMRI_Upload.pl b/dicom-archive/updateMRI_Upload.pl index c0983f39c..1a253d69f 100755 --- a/dicom-archive/updateMRI_Upload.pl +++ b/dicom-archive/updateMRI_Upload.pl @@ -17,46 +17,46 @@ =head1 SYNOPSIS =over 2 =item * -B<-profile prod> : (mandatory) path (absolute or relative to the current directory) of the +B<-profile prod> : (mandatory) path (absolute or relative to the current directory) of the profile file =item * B<-tarchivePath tarchivePath> : (mandatory) absolute path to the DICOM archive =item * -B<-source_location source_location> : (mandatory) value to set column +B<-source_location source_location> : (mandatory) value to set column C for the newly created record in table C (see below) =item * B<-verbose> : be verbose -=back +=back =head1 DESCRIPTION This script first starts by reading the F file (argument passed to the C<-profile> switch) to fetch the C<@db> variable, a Perl array containing four elements: the database -name, the database user name used to connect to the database, the password and the -database hostname. It then checks for an entry in the C table with the same -C as the DICOM archive passed on the command line. Let C be the -DICOM archive record found in the C table. The script will then proceed to scan table -C for a record with the same C as C's. If there is none (which is the +name, the database user name used to connect to the database, the password and the +database hostname. It then checks for an entry in the C table with the same +C as the DICOM archive passed on the command line. Let C be the +DICOM archive record found in the C table. The script will then proceed to scan table +C for a record with the same C as C's. If there is none (which is the expected outcome), it will insert a record in C with the following properties/values: =over 2 =item * C : Unix username of the person currently running F - -=item * + +=item * C: timestamp representing the moment at which F was run - + =item * C: value of C for record C in table C - + =item * C: argument of the C<-source_location> switch passed on the command line - + =back If there already is an entry in C with the same C as C's, the script @@ -119,8 +119,8 @@ =head1 AUTHORS my $Help = < and McGill Centre for Integrative -Neuroscience +LORIS community and McGill Centre for Integrative +Neuroscience =cut @@ -84,35 +84,35 @@ Neuroscience ### 4.2.1 - DICOM to MINC pipeline flow -A very brief illustration summarizing the main outcomes at different steps in -the execution of the pipeline are shown below. +A very brief illustration summarizing the main outcomes at different steps in +the execution of the pipeline are shown below. ![pipeline_flow](images/pipeline_schematic_dcm2mnc.png) -This figure highlights the few -key scripts in the pipeline execution, the order in which they call each other, -and their interactions with the diverse libraries and utility classes. It +This figure highlights the few +key scripts in the pipeline execution, the order in which they call each other, +and their interactions with the diverse libraries and utility classes. It therefore serves as a tool to help project developers understand the pipeline quickly, and develop independently project customizations and debugging skills. ### 4.2.2 - DICOM to BIDS pipeline flow A very brief illustration of the key and expected outcomes at different steps in -the execution of the pipeline is shown below. +the execution of the pipeline is shown below. ![pipeline_flow_dcm2bids](images/pipeline_schematic_dcm2bids.png) -The DICOM insertion steps are quire similar to the DICOM to MINC pipeline flow. After the +The DICOM insertion steps are quire similar to the DICOM to MINC pipeline flow. After the `dicomTar.pl` step, a python script is executed. That python script will perform -the DICOM archive validation, dcm2niix conversion to generate the BIDS files, +the DICOM archive validation, dcm2niix conversion to generate the BIDS files, protocol identification and pic creation. ## 4.3 - Common insertion scripts re-run -The flow in the diagram above corresponds to a new upload. Occasionally, -however, projects might need to modify (partially or in full) the outcome of an -inserted scan. The next section highlights the "how-to" of the most commonly -encountered cases. +The flow in the diagram above corresponds to a new upload. Occasionally, +however, projects might need to modify (partially or in full) the outcome of an +inserted scan. The next section highlights the "how-to" of the most commonly +encountered cases. ### 4.3.1 Bypassing protocol violation checks @@ -140,7 +140,7 @@ See also: [MRI-PR#141](https://github.com/aces/Loris-MRI/pull/141) for more NIfTI volumes with their JSON sidecar files can be **force-loaded** into LORIS by running: ``` -python/run_nifti_insertion.py --loris_scan_type t2w --bypass_extra_checks --create_pic --profile database_config.py --force --tarchive_path /data/project/dataTransfer/library/2009/DCM_2009-09-25_project_20110214_185904581.tar --nifti_path /data/project/data/trashbin/TarLoad-3-34-pVzGC5/xxx0067_703739_v12_20090925_222403_18e1_mri.nii.gz --json_path /data/project/data/trashbin/TarLoad-3-34-pVzGC5/xxx0067_703739_v12_20090925_222403_18e1_mri.json +python/run_nifti_insertion.py --loris_scan_type t2w --bypass_extra_checks --create_pic --profile config.py --force --tarchive_path /data/project/dataTransfer/library/2009/DCM_2009-09-25_project_20110214_185904581.tar --nifti_path /data/project/data/trashbin/TarLoad-3-34-pVzGC5/xxx0067_703739_v12_20090925_222403_18e1_mri.nii.gz --json_path /data/project/data/trashbin/TarLoad-3-34-pVzGC5/xxx0067_703739_v12_20090925_222403_18e1_mri.json ``` Note carefully the following arguments: @@ -153,15 +153,15 @@ Note carefully the following arguments: - If one of the final steps such as the MINC (or BIDS) conversion is failing, you may wish to just re-run the `tarchiveLoader.pl` (or `run_dicom_archive_loader.py`) script. - + > When the need arises to re-load imaging data in LORIS, it is generally not sufficient to just re-run the MINC/NIfTI loading step (`tarchiveLoader.pl` or `batch_uploads_tarchive`). The pipeline steps must be re-run starting with `dicomTar.pl` (see section 5.4 of [Pipeline Triggering Options documentation](05-PipelineLaunchOptions.md)). -In general, to re-load an imaging dataset through the pipeline from the start - (from `dicomTar.pl`) -- Ensure entries from the previous attempt to load the +In general, to re-load an imaging dataset through the pipeline from the start + (from `dicomTar.pl`) -- Ensure entries from the previous attempt to load the dataset have been removed from the following database tables: - `parameter_file` @@ -187,20 +187,20 @@ For backing up, re-labelling and re-loading MRI datasets with QC information, ### 4.3.3 Multiple scanner datasets per session In cases where a subject was scanned in two scanner sessions as part of the same - study Timepoint, anonymize both DICOM datasets using the same Visit Label in - the Patient Name (or Patient ID) field of the DICOM, and upload as two - separate DICOM datasets. The insertion pipeline will automatically - associate and display both sets of images acquired in both scanner sessions - under the same `session` table record. - + study Timepoint, anonymize both DICOM datasets using the same Visit Label in + the Patient Name (or Patient ID) field of the DICOM, and upload as two + separate DICOM datasets. The insertion pipeline will automatically + associate and display both sets of images acquired in both scanner sessions + under the same `session` table record. + ## 4.4 - MRI upload deletion script -As of release 21.0 of LORIS-MRI, a deletion script has been added to the tools -directory of the repository. This deletion script allows to delete completely an MRI -upload from the filesystem and database or remove specific MINC files derived +As of release 21.0 of LORIS-MRI, a deletion script has been added to the tools +directory of the repository. This deletion script allows to delete completely an MRI +upload from the filesystem and database or remove specific MINC files derived from the MRI upload. Note that by default, all removed data will be backed up. -Detailed information about the script can be found in: +Detailed information about the script can be found in: https://github.com/aces/Loris-MRI/blob/21.0-dev/docs/scripts_md/delete_imaging_upload.md > Accordng to chosen options, deleting values can generate backup files with `mysqldump`. diff --git a/docs/05-PipelineLaunchOptions.md b/docs/05-PipelineLaunchOptions.md index 5fe1cb5a0..c366ca751 100644 --- a/docs/05-PipelineLaunchOptions.md +++ b/docs/05-PipelineLaunchOptions.md @@ -2,66 +2,66 @@ ## 5.1 - Pipeline Launch Options for DICOM and HRRT datasets -Scans upload into LORIS and insertion pipeline's triggering can be done in a few -ways depending on each project's needs. The best choice will depend on the study -workflow and protocol. For example, it will depend on whether data is collected -retrospectively or prospectively, and/or whether the MRI protocol is harmonized +Scans upload into LORIS and insertion pipeline's triggering can be done in a few +ways depending on each project's needs. The best choice will depend on the study +workflow and protocol. For example, it will depend on whether data is collected +retrospectively or prospectively, and/or whether the MRI protocol is harmonized across all the sites involved in the study. The different options available are illustrated in the graph below. ![PipelineLaunchOptions](images/PipelineLaunchOptions.png) -Regardless of how the project chooses to upload scans and trigger the insertion, -the automated quality control and MRI protocol checks performed by the pipeline +Regardless of how the project chooses to upload scans and trigger the insertion, +the automated quality control and MRI protocol checks performed by the pipeline should remain identical. -It is also worth mentioning that all the options illustrated here assume that +It is also worth mentioning that all the options illustrated here assume that the candidate and visit are already registered in the LORIS database. -In the next three sub-sections, the three launch options illustrated in the -figure above will be briefly highlighted and the exact command needed to launch +In the next three sub-sections, the three launch options illustrated in the +figure above will be briefly highlighted and the exact command needed to launch the pipeline shown. Details about the scripts themselves can be found in the [Scripts](04-Scripts.md) section. - + ### 5.1.1 Option 1 -Triggering the pipeline is done from the `/opt/$PROJECT/bin/mri` directory as +Triggering the pipeline is done from the `/opt/$PROJECT/bin/mri` directory as follows: ``` uploadNeuroDB/imaging_upload_file.pl -profile prod -upload_id $UploadIDID /PATH/TO/UPLOAD/PSCID_CandID_VisitLabel_OptionalSuffix.zip -verbose ``` -where `$UploadID` is the number corresponding to the `UploadID` column in the -Imaging Uploader (`mri_upload`) table, and `/PATH/TO/UPLOAD/` is typically the +where `$UploadID` is the number corresponding to the `UploadID` column in the +Imaging Uploader (`mri_upload`) table, and `/PATH/TO/UPLOAD/` is typically the `/data/incoming/` directory. -This is a typical option for a project prospectively collecting data with +This is a typical option for a project prospectively collecting data with -1. multiple sites involved, -2. a designated user per site for collecting scans from the scanner console and -uploading to LORIS, and -3. a project's imaging specialist monitoring new uploads, and launching the -insertion pipeline manually, for every new `UploadID` separately. +1. multiple sites involved, +2. a designated user per site for collecting scans from the scanner console and +uploading to LORIS, and +3. a project's imaging specialist monitoring new uploads, and launching the +insertion pipeline manually, for every new `UploadID` separately. -### 5.1.2 Option 2 +### 5.1.2 Option 2 -Triggering the pipeline from the `/opt/$PROJECT/bin/mri` directory can also be -achieved as follows: +Triggering the pipeline from the `/opt/$PROJECT/bin/mri` directory can also be +achieved as follows: ``` uploadNeuroDB/imaging_upload_file_cronjob.pl -profile prod -verbose ``` - -The script `imaging_upload_file_cronjob.pl` does not require a specific -`$UploadID` as an argument because it automatically cycles through -*all new uploads* on which the pipeline has not been triggered yet, and -launches it. As such, this option is used in similar scenarios as Option 1, -but with the added advantage of not needing to continuously monitor for new -uploads (the script can be added to the system's crontab to launch at + +The script `imaging_upload_file_cronjob.pl` does not require a specific +`$UploadID` as an argument because it automatically cycles through +*all new uploads* on which the pipeline has not been triggered yet, and +launches it. As such, this option is used in similar scenarios as Option 1, +but with the added advantage of not needing to continuously monitor for new +uploads (the script can be added to the system's crontab to launch at pre-scheduled times, as per the project's requirements). @@ -70,28 +70,28 @@ pre-scheduled times, as per the project's requirements). The insertion pipeline can also be triggered using the command: ``` -./batch_uploads_imageuploader -profile prod < scans_list.txt > log_batch_imageuploader.txt 2>&1 +./batch_uploads_imageuploader -profile prod < scans_list.txt > log_batch_imageuploader.txt 2>&1 ``` -This is an option that addresses retrospectively collected data where uploading -hundreds of scans, one scan at a time, using the LORIS Imaging Uploader -user-friendly interface is impractical. It is also the option of choice for -prospective studies that want to benefit from tracking scans through the Imaging -Uploader while automating the upload and insertion process without a user/GUI -interface interaction. +This is an option that addresses retrospectively collected data where uploading +hundreds of scans, one scan at a time, using the LORIS Imaging Uploader +user-friendly interface is impractical. It is also the option of choice for +prospective studies that want to benefit from tracking scans through the Imaging +Uploader while automating the upload and insertion process without a user/GUI +interface interaction. -In this case, the scans should be transferred to the LORIS-MRI filesystem (for -example `/data/incoming/`). In addition, an input text file (such as -`scans_list.txt`) listing one dataset per line should be provided (see example +In this case, the scans should be transferred to the LORIS-MRI filesystem (for +example `/data/incoming/`). In addition, an input text file (such as +`scans_list.txt`) listing one dataset per line should be provided (see example below). -Each line in that text file should include the following information separated +Each line in that text file should include the following information separated by spaces: -1. the full path to the zipped DICOM dataset (`.zip`, `.tgz`, `.tar.gz`), +1. the full path to the zipped DICOM dataset (`.zip`, `.tgz`, `.tar.gz`), 2. Y or N depending on whether the scan is for a phantom or not, and -3. the patient name following the `PSCID_CandID_VisitLabel` LORIS convention for +3. the patient name following the `PSCID_CandID_VisitLabel` LORIS convention for real candidates. Leave BLANK for phantoms. @@ -111,8 +111,8 @@ The BIDS dataset to import has to: - pass the BIDS validator of [pybids](https://github.com/bids-standard/pybids) - contain a `participants.tsv` file at its root. Ideally, this file will contain the following columns: - - `participant_id`: the PSCID of the candidate - - `date_of_birth`: the date of birth (in YYYY-MM-DD) to use when creating the candidate in LORIS + - `participant_id`: the PSCID of the candidate + - `date_of_birth`: the date of birth (in YYYY-MM-DD) to use when creating the candidate in LORIS - `sex`: the sex to use when creating the candidate in LORIS (value examples: `M`/`F` or `female`/`male`) - `site`: the MRI alias of the LORIS site to associate the candidate with when creating the candidate in LORIS. This field is optional. If provided, the entry should match the MRI alias of a site already populated in the database. If missing or invalid, NULL will be used instead. - `project`: the name of the LORIS project to associate the candidate with when creating the candidate in LORIS. This field is optional. If provided, the entry should match the name of a project already populated in the database. If missing or invalid, NULL will be used instead. @@ -162,9 +162,9 @@ In cases where the `site`, `project` or `cohort` are missing from the `participa -### 5.2.2 - Pipeline Launch +### 5.2.2 - Pipeline Launch -For now, the only way to run the pipeline is by running the `bids_import.py` +For now, the only way to run the pipeline is by running the `bids_import.py` script manually via the terminal. To display the help section, run the following in the terminal: @@ -175,10 +175,11 @@ bids_import.py -h This will display the following help section: ```bash -usage : bids_import -d -p +usage : bids_import -d -p -options: - -p, --profile : name of the python database config file in dicom-archive/.loris-mri +options: + -p, --profile : name of the python database config file in the config + directory -d, --directory : BIDS directory to parse & insert into LORIS -c, --createcandidate: to create BIDS candidates in LORIS (optional) -s, --createsession : to create BIDS sessions in LORIS (optional) @@ -187,29 +188,29 @@ options: To run the BIDS import, simply run: ```bash -bids_import -d /PATH/TO/BIDS/TO/IMPORT -p database_config.py +bids_import -d /PATH/TO/BIDS/TO/IMPORT -p config.py ``` -If you wish to create candidates when running the import script, the `-c` -option needs to be added. To create sessions when running the import script, +If you wish to create candidates when running the import script, the `-c` +option needs to be added. To create sessions when running the import script, the `-s` option need to be added as well. ```bash -bids_import -d /PATH/TO/BIDS/TO/IMPORT -p database_config.py -c -s +bids_import -d /PATH/TO/BIDS/TO/IMPORT -p config.py -c -s ``` -Finally, the verbose option can be turned on by using the option `-v` when +Finally, the verbose option can be turned on by using the option `-v` when calling the script: ```bash -bids_import -d /PATH/TO/BIDS/TO/IMPORT -p database_config.py -v +bids_import -d /PATH/TO/BIDS/TO/IMPORT -p config.py -v ``` -**Note on the LORIS-MRI Python virtual environment**: in order to be able to -run `bids_import.py`, you need to be in the loris-mri Python virtual -environment. It should have been sourced when sourcing your LORIS-MRI +**Note on the LORIS-MRI Python virtual environment**: in order to be able to +run `bids_import.py`, you need to be in the loris-mri Python virtual +environment. It should have been sourced when sourcing your LORIS-MRI environment file. If this is not sourced, then simply run the following: ```bash source /opt/%PROJECT%/bin/mri/python_virtualenvs/loris-mri-python/bin/activate ``` -To deactivate a Python virtual environment, simply type `deactivate` in the +To deactivate a Python virtual environment, simply type `deactivate` in the terminal. diff --git a/docs/AppendixA-Troubleshooting_guideline.md b/docs/AppendixA-Troubleshooting_guideline.md index 274030c25..a4f3bb42c 100644 --- a/docs/AppendixA-Troubleshooting_guideline.md +++ b/docs/AppendixA-Troubleshooting_guideline.md @@ -1,38 +1,38 @@ # Appendix A - Troubleshooting guideline This section covers some of the most commonly encountered errors when running -the insertion scripts. They are divided into 3 separate tables, with each table -handling errors originating from the LORIS-MRI installation (Table 1), the LORIS +the insertion scripts. They are divided into 3 separate tables, with each table +handling errors originating from the LORIS-MRI installation (Table 1), the LORIS modules setup (Table 2), or the LORIS-MRI scripts (Table 3). _**Table 1: Common errors encountered during LORIS-MRI installation, and their proposed solutions.**_ | **Error** | **Cause** | **How to Fix**| -|:------|:------|:----------| +|:------|:------|:----------| |`install_driver(mysql) failed: Can't locate DBD/mysql.pm`|Missing dependency|`sudo apt-get install libdbd-mysql-perl`| -|`ERROR: You don't have a configuration file named 'prod' in: /data/%PROJECT%/bin/mri/dicom-archive/.loris_mri/`| Your `environment` file does not contain your actual LORIS-MRI project name. Instead, it contains the placeholder `%PROJECT%` as provided in the 'generic' file and/or your `environment` file is not sourced| Source the environment file located in `/data/$PROJECT/bin/mri/` after ensuring that the `$PROJECT` variable is replaced with your LORIS-MRI project name| -|`ERROR: You don't have a configuration file named 'prod' in: /data/loris-MRI/bin/mri/dicom-archive/.loris_mri/` *note*: `loris-MRI` is an example project name used in this illustration| Wrong file and/or directory permissions| Ensure that the `/data/$PROJECT/bin/mri` directory, and all directories within are readable by the user running the scripts (`lorisadmin` or the front-end `apache` user)| -|`ERROR: You don't have a configuration file named 'prod' in: /data/loris-MRI/bin/mri/dicom-archive/.loris_mri/` *note*: `loris-MRI` is an example project name used in this illustration| Syntax error in the `prod` file in the customized routines (for example a missing closing bracket)| Check the routines that were customized for your project needs| +|`ERROR: You don't have a configuration file named 'prod' in: /data/%PROJECT%/bin/mri/config/`| Your `environment` file does not contain your actual LORIS-MRI project name. Instead, it contains the placeholder `%PROJECT%` as provided in the 'generic' file and/or your `environment` file is not sourced| Source the environment file located in `/data/$PROJECT/bin/mri/` after ensuring that the `$PROJECT` variable is replaced with your LORIS-MRI project name| +|`ERROR: You don't have a configuration file named 'prod' in: /data/loris-MRI/bin/mri/config/` *note*: `loris-MRI` is an example project name used in this illustration| Wrong file and/or directory permissions| Ensure that the `/data/$PROJECT/bin/mri` directory, and all directories within are readable by the user running the scripts (`lorisadmin` or the front-end `apache` user)| +|`ERROR: You don't have a configuration file named 'prod' in: /data/loris-MRI/bin/mri/config/` *note*: `loris-MRI` is an example project name used in this illustration| Syntax error in the `prod` file in the customized routines (for example a missing closing bracket)| Check the routines that were customized for your project needs| |`DB connection failed`| Database credentials in the `prod` file were entered incorrectly during the install, or they were modified subsequently| Ensure that your `prod` file contains the correct database connection/credentials information in the `DATABASE Settings, Section I`| _**Table 2: Common errors encountered due to missing LORIS (front-end) module setup steps, and their proposed solutions.**_ | **Error** | **Cause** | **How to Fix**| -|:------|:------|:----------| +|:------|:------|:----------| |Images thumbnails do not show up in Imaging Browser. They appear as a broken image icon|Wrong permissions to the `/data/$PROJECT/data/pic/` folder|Ensure that the `apache` user can read/execute the `pic` images folder| |Images thumbnails do not show up in Imaging Browser. They appear as a broken image icon|Wrong `Images` path under the `Paths` section in LORIS Configuration module|Ensure the path to the images is correct, typically `/data/$PROJECT/data/`| |4-D images (*e.g.* DTI, fMRI) in brainbrowser do not show any volumes (Play button not displayed)|Most likely a dcm2mnc conversion error|Post an issue on the [minc-toolkit Github Issues page](https://github.com/BIC-MNI/minc-toolkit/issues)| |Brainbrowser says `Loading…` but no image shows up|Wrong permissions to the `/data/$PROJECT/data/assembly/` folder|Ensure that the apache user can read/execute the MINC `assembly` images folder| -|Brainbrowser says `Loading…` but no image shows up|Wrong `Images` path under the `Paths` section in LORIS Configuration module|Ensure the path to the MINC images is correct, typically `/data/$PROJECT/data/`| +|Brainbrowser says `Loading…` but no image shows up|Wrong `Images` path under the `Paths` section in LORIS Configuration module|Ensure the path to the MINC images is correct, typically `/data/$PROJECT/data/`| |Brainbrowser says `Loading…` but no image shows up|The `config.xml` in LORIS does not have the MINC Toolkit Path set properly|Fill out the path `` to the MINC Toolkit Installation in the `config.xml` (on the LORIS side). The last trailing `/` in the path is mandatory| _**Table 3: Common errors encountered during execution of the LORIS-MRi insertion scripts, and their proposed solutions.**_ | **Error** | **Cause** | **How to Fix**| -|:------|:------|:----------| +|:------|:------|:----------| |`The Candidate info validation has failed`|PatientName/PatientID header in the DICOMs not anonymized according to the LORIS convention `(PSCID_CandID_VisitLabel)`|Use [DICAT](https://github.com/aces/DICAT) to anonymize it properly OR Use the DICOM toolkit `dcmodify` command. The following one-line command (to be run from the folder where the DICOM files are) which anonymizes your entire folder of DICOM files is: `for i in $(find -type f); do dcmodify -ma PatientName="PSCID_CandID_VisitLabel" -nb $i; done`| |`The Candidate info validation has failed`|The upload scan contains at least one file that is NOT of type DICOM (.bmp or .pdf are common)|Remove any file in the upload that is not of type DICOM| |`... error message = 'No space left on device ...'`|The temporary directory where the insertion scripts perform its intermediate steps is full. This directory is set to a default value of `/tmp` as specified in the line `export TMPDIR=/tmp` of the `environment` file |Change the `TMPDIR` path in the environment file to a directory with enough space. A good rule of thumb is to have at least 2-3 times the size of the scan being processed as writable space. This space is usually automatically emptied by the pipeline upon a successful execution| @@ -44,7 +44,7 @@ _**Table 3: Common errors encountered during execution of the LORIS-MRi insertio |`The target directory does not contain a single DICOM file`|Probably the DICOM headers have blank StudyUID. The logic of insertion within LORIS-MRI depends on a StudyUID header|Ensure that your DICOM headers include a non-blank StudyUID header| |My resting-state fMRI scans are tagged as task fMRI although I have 2 entries in the `mri_protocol` table|The resting-state scan has parameters that match those of the task entry of the `mri_protocol` table, and the task-related entry in the `mri_protocol` table precedes that of the resting-state fMRI|Ensure the `mri_protocol` table has parameters that discern between all the study acquired modalities in an **exclusive** manner (*i.e.* no two row entries have overlapping parameters across all their columns)| |`no MINCs inserted`|Possibly all the MINC images are violated scans|Check the details of the image headers (from the MRI Violated Scans module or using `mincheader`) against the `mri_protocol` table entries, and adjust the table protocol parameters accordingly| -|The pipeline created an invalid visit label in the `session` table when inserting a scan (a.k.a. Visit Label not listed in the `Visit_Windows` table)|The flag $subjectID{'createVisitLabel'} is set to 1 but the function `getSubjectIDs` of the profile file does not contain a call to validate the subject IDs information|Ensure that the `getSubjectIDs` function of your profile file (typically named `prod`) contains a call to the function `NeuroDB::MRI::subjectIDIsValid` on the `CandID`, `PSCID` and `VisitLabel` values (see https://github.com/aces/Loris-MRI/pull/411 for details)| +|The pipeline created an invalid visit label in the `session` table when inserting a scan (a.k.a. Visit Label not listed in the `Visit_Windows` table)|The flag $subjectID{'createVisitLabel'} is set to 1 but the function `getSubjectIDs` of the profile file does not contain a call to validate the subject IDs information|Ensure that the `getSubjectIDs` function of your profile file (typically named `prod`) contains a call to the function `NeuroDB::MRI::subjectIDIsValid` on the `CandID`, `PSCID` and `VisitLabel` values (see https://github.com/aces/Loris-MRI/pull/411 for details)| ### A.1 Installation troubleshooting notes @@ -59,7 +59,7 @@ Key configuration points to verify: - If your MINC toolkit is older than 1.9.14 and your scans have no Date of Birth value, you may see an age unit error during DICOM to MINC conversion. - Instructions for compiling a more recent version of the MINC toolkit are + Instructions for compiling a more recent version of the MINC toolkit are available on [MNI-BIC GitHub](https://github.com/BIC-MNI/minc-toolkit-v2). ### A.2 Images display troubleshooting notes @@ -103,11 +103,11 @@ If upload was successful but issues were encountered with the imaging insertion - CentOS: check for additional dependencies/configurations (*e.g.* DICOM Dictionary path) in the detailed [CentOS Imaging Installation transcript](https://github.com/aces/Loris/wiki/CentOS-Imaging-installation-transcript) -- Manually re-run the entire pipeline sequence using the +- Manually re-run the entire pipeline sequence using the `imaging_upload_file.pl` script - If one of the final steps such as the MINC conversion is failing, you may wish to just re-run the `tarchiveLoader.pl` script. - See also [re-running the Imaging pipeline](#rerunning-the-imaging-pipeline) section for troubleshooting information. -- The pipeline created an invalid visit label in the `session` table when inserting a -scan (a.k.a. Visit Label not listed in the `Visit_Windows` table): +- The pipeline created an invalid visit label in the `session` table when inserting a +scan (a.k.a. Visit Label not listed in the `Visit_Windows` table): diff --git a/docs/python/Tooling.md b/docs/python/Tooling.md index 055d32699..02bd74e4c 100644 --- a/docs/python/Tooling.md +++ b/docs/python/Tooling.md @@ -2,7 +2,7 @@ ## Virtual environment -LORIS-MRI uses a Python virtual environment to manage its execution context and dependencies. To activate the virtual environment, use the command `source environment` in the LORIS-MRI root directory. The dependencies of the virtual environment are listed in the `install/requirements/requirements.txt` file. +LORIS-MRI uses a Python virtual environment to manage its execution context and dependencies. To activate the virtual environment, use the command `source environment` in the LORIS-MRI root directory. The dependencies of the virtual environment are listed in the `pyproject.toml` file. ## Configuration diff --git a/docs/scripts_md/BackPopulateSNRAndAcquisitionOrder.md b/docs/scripts_md/BackPopulateSNRAndAcquisitionOrder.md index 1c1904a94..bb26977b1 100644 --- a/docs/scripts_md/BackPopulateSNRAndAcquisitionOrder.md +++ b/docs/scripts_md/BackPopulateSNRAndAcquisitionOrder.md @@ -11,7 +11,7 @@ perl tools/BackPopulateSNRAndAcquisitionOrder.pl `[options]` Available options are: -\-profile : name of the config file in `../dicom-archive/.loris_mri` +\-profile : name of the config file in `../config` \-tarchive\_id: ID of the DICOM archive (.tar file) to be processed from the `tarchive` table diff --git a/docs/scripts_md/DTIPrepRegister.md b/docs/scripts_md/DTIPrepRegister.md index 3fbdf31a6..f64eafeb6 100644 --- a/docs/scripts_md/DTIPrepRegister.md +++ b/docs/scripts_md/DTIPrepRegister.md @@ -8,7 +8,7 @@ perl DTIPrepRegister.pl `[options]` Available options are: -\-profile : name of the config file in `../dicom-archive/.loris-mri` +\-profile : name of the config file in `../config` \-DTIPrep\_subdir : `DTIPrep` subdirectory storing the processed files to be registered diff --git a/docs/scripts_md/DTIPrep_pipeline.md b/docs/scripts_md/DTIPrep_pipeline.md index a68b48bd5..b8db26ae7 100644 --- a/docs/scripts_md/DTIPrep_pipeline.md +++ b/docs/scripts_md/DTIPrep_pipeline.md @@ -8,7 +8,7 @@ database. perl DTIPrep\_pipeline.p `[options]` \-profile : name of config file in - `../dicom-archive/.loris_mri` + `../config` \-list : file containing the list of raw diffusion MINC files (in `assembly/DCCID/Visit/mri/native`) @@ -297,7 +297,7 @@ database. INPUT: - $DTIs\_list : list of native DTI files processed - $DTIrefs : hash containing the processed filenames - - $profile : config file (in `../dicom-archive/.loris_mri`) + - $profile : config file (in `../config`) - $QCoutdir : output directory containing the processed files - $DTIPrepVersion : `DTIPrep` version used to obtain QCed files - $mincdiffVersion: `mincdiffusion` tool version used diff --git a/docs/scripts_md/MakeArchiveLocationRelative.md b/docs/scripts_md/MakeArchiveLocationRelative.md index b6d2d7961..63d63b571 100644 --- a/docs/scripts_md/MakeArchiveLocationRelative.md +++ b/docs/scripts_md/MakeArchiveLocationRelative.md @@ -10,7 +10,7 @@ perl MakeArchiveLocationRelative.pl `[options]` Available option is: -\-profile: name of the config file in `../dicom-archive/.loris_mri` +\-profile: name of the config file in `../config` # DESCRIPTION diff --git a/docs/scripts_md/PodToConfig.md b/docs/scripts_md/PodToConfig.md index d2f9c7ad3..dcbb9ea1c 100644 --- a/docs/scripts_md/PodToConfig.md +++ b/docs/scripts_md/PodToConfig.md @@ -10,7 +10,7 @@ perl tools/ProdToConfig.pl \`\[options\]\` The available option is: \-profile : name of the config file in - `../dicom-archive/.loris_mri` + `../config` # DESCRIPTION diff --git a/docs/scripts_md/ProdToConfig.md b/docs/scripts_md/ProdToConfig.md index d4e31f3db..2355435bb 100644 --- a/docs/scripts_md/ProdToConfig.md +++ b/docs/scripts_md/ProdToConfig.md @@ -10,7 +10,7 @@ perl tools/ProdToConfig.pl \`\[options\]\` The available option is: \-profile : name of the config file in - `../dicom-archive/.loris_mri` + `../config` # DESCRIPTION diff --git a/docs/scripts_md/batch_image_uploader.md b/docs/scripts_md/batch_image_uploader.md index a4196c835..6ed8b64ca 100644 --- a/docs/scripts_md/batch_image_uploader.md +++ b/docs/scripts_md/batch_image_uploader.md @@ -10,7 +10,7 @@ batch mode Available options are: \-profile : name of the config file in - `../dicom-archive/.loris_mri` + `../config` \-verbose : if set, be verbose diff --git a/docs/scripts_md/batch_uploads_imageuploader.md b/docs/scripts_md/batch_uploads_imageuploader.md index c26b81c2a..498dc854b 100644 --- a/docs/scripts_md/batch_uploads_imageuploader.md +++ b/docs/scripts_md/batch_uploads_imageuploader.md @@ -9,7 +9,7 @@ batch mode Available options are: -\-profile: name of the config file in `../dicom-archive/.loris_mri` +\-profile: name of the config file in `../config` \-verbose: if set, be verbose diff --git a/docs/scripts_md/batch_uploads_tarchive.md b/docs/scripts_md/batch_uploads_tarchive.md index e6e603051..1eef00320 100644 --- a/docs/scripts_md/batch_uploads_tarchive.md +++ b/docs/scripts_md/batch_uploads_tarchive.md @@ -14,7 +14,7 @@ This script uploads a list of DICOM archives to the database by calling script from `STDIN`, one file name per line. Each file name is assumed to be a path relative to `tarchiveLibraryDir` (see below). -The following settings of file `$ENV{LORIS_CONFIG}/.loris-mri/prod` affect the +The following settings of file `$ENV{LORIS_CONFIG}/prod` affect the behvaviour of `batch_uploads_tarchive` (where `$ENV{LORIS_CONFIG}` is the value of the Unix environment variable `LORIS_CONFIG`): diff --git a/docs/scripts_md/cleanupTarchives.md b/docs/scripts_md/cleanupTarchives.md index 91c183206..008b267f0 100644 --- a/docs/scripts_md/cleanupTarchives.md +++ b/docs/scripts_md/cleanupTarchives.md @@ -8,7 +8,7 @@ perl cleanupTarchives.pl `[options]` Available options are: -\-profile: name of the config file in `../dicom-archive/.loris-mri` +\-profile: name of the config file in `../config` # DESCRIPTION diff --git a/docs/scripts_md/create_nifti_bval_bvec.md b/docs/scripts_md/create_nifti_bval_bvec.md index f503f3886..2f6ed48d0 100644 --- a/docs/scripts_md/create_nifti_bval_bvec.md +++ b/docs/scripts_md/create_nifti_bval_bvec.md @@ -9,7 +9,7 @@ perl tools/create\_nifti\_bval\_bvec.pl `[options]` Available options are: -\-profile: name of the config file in `../dicom-archive/.loris_mri` +\-profile: name of the config file in `../config` \-verbose: be verbose # DESCRIPTION diff --git a/docs/scripts_md/database_files_update.md b/docs/scripts_md/database_files_update.md index 9dc6d9439..61bebea08 100644 --- a/docs/scripts_md/database_files_update.md +++ b/docs/scripts_md/database_files_update.md @@ -9,7 +9,7 @@ perl database\_files\_update.pl `[options]` Available option is: -\-profile: name of the config file in `../dicom-archive/.loris_mri` +\-profile: name of the config file in `../config` # DESCRIPTION diff --git a/docs/scripts_md/delete_imaging_upload.md b/docs/scripts_md/delete_imaging_upload.md index 2fe1b880b..e8512abed 100644 --- a/docs/scripts_md/delete_imaging_upload.md +++ b/docs/scripts_md/delete_imaging_upload.md @@ -10,7 +10,7 @@ perl delete\_imaging\_upload.pl \[-profile file\] \[-ignore\] \[-backup\_path ba Available options are: -\-profile : name of the config file in `../dicom-archive/.loris_mri` (defaults to `prod`). +\-profile : name of the config file in `../config` (defaults to `prod`). \-ignore : ignore files whose paths exist in the database but do not exist on the file system. Default is to abort if such a file is found, irrespective of whether a backup file will diff --git a/docs/scripts_md/deletemincsqlwrapper.md b/docs/scripts_md/deletemincsqlwrapper.md index 9d0d0c1da..870811ee2 100644 --- a/docs/scripts_md/deletemincsqlwrapper.md +++ b/docs/scripts_md/deletemincsqlwrapper.md @@ -12,7 +12,7 @@ perl tools/example\_scripts/deletemincsqlqrapper.pl `[options]` Available options are: \-profile : Name of the config file in - `../../dicom-archive/.loris_mri` + `../../config` \-insertminc : Re-insert the deleted MINC diff --git a/docs/scripts_md/dicomSummary.md b/docs/scripts_md/dicomSummary.md index 7cf5c85a9..aed41cefb 100644 --- a/docs/scripts_md/dicomSummary.md +++ b/docs/scripts_md/dicomSummary.md @@ -17,8 +17,7 @@ Available options are: \-dbreplace : use this option only if the DICOM data changed and need to be updated in the database -\-profile : specify the name of the config file residing in `.loris_mri` of the - current directory +\-profile : specify the name of the config file residing in the config directory \-tmp : to specify a temporary directory. It will contain the summaries if used with -noscreen option diff --git a/docs/scripts_md/dicomTar.md b/docs/scripts_md/dicomTar.md index e6cf0aace..ee4b53ed5 100644 --- a/docs/scripts_md/dicomTar.md +++ b/docs/scripts_md/dicomTar.md @@ -21,7 +21,7 @@ Available options are: resulting tarball! \-profile : Specify the name of the config file which resides in - `.loris_mri` in the current directory + the config directory \-centerName : Specify the symbolic center name to be stored alongside the DICOM institution diff --git a/docs/scripts_md/get_dicom_files.md b/docs/scripts_md/get_dicom_files.md index 512027882..b6099a536 100644 --- a/docs/scripts_md/get_dicom_files.md +++ b/docs/scripts_md/get_dicom_files.md @@ -9,7 +9,7 @@ perl get\_dicom\_files.pl \[-name patient\_name\_patterns\] \[-type scan\_type\_ Available options are: -\-profile : name of the config file in `../dicom-archive/.loris_mri` (typically `prod`) +\-profile : name of the config file in `../config` (typically `prod`) \-name : comma separated list of MySQL patterns for the patient names that a DICOM file has to have in order to be extracted. A DICOM file only has to match one of the diff --git a/docs/scripts_md/imaging_non_minc_insertion.md b/docs/scripts_md/imaging_non_minc_insertion.md index 72767c893..afbb7d507 100644 --- a/docs/scripts_md/imaging_non_minc_insertion.md +++ b/docs/scripts_md/imaging_non_minc_insertion.md @@ -8,7 +8,7 @@ perl imaging\_non\_minc\_insertion.pl `[options]` Available options are: -\-profile : name of the config file in `../dicom-archive/.loris-mri` (required) +\-profile : name of the config file in `../config` (required) \-file\_path : file to register into the database (full path from the root directory is required) (required) diff --git a/docs/scripts_md/imaging_upload_file.md b/docs/scripts_md/imaging_upload_file.md index a2c1a0faa..4f15adfee 100644 --- a/docs/scripts_md/imaging_upload_file.md +++ b/docs/scripts_md/imaging_upload_file.md @@ -9,7 +9,7 @@ perl imaging\_upload\_file.pl </path/to/UploadedFile> `[options]` Available options are: -\-profile : name of the config file in `../dicom-archive/.loris_mri` +\-profile : name of the config file in `../config` \-upload\_id : The Upload ID of the given scan uploaded diff --git a/docs/scripts_md/imaging_upload_file_cronjob.md b/docs/scripts_md/imaging_upload_file_cronjob.md index 8993ef1e8..452adb571 100644 --- a/docs/scripts_md/imaging_upload_file_cronjob.md +++ b/docs/scripts_md/imaging_upload_file_cronjob.md @@ -10,7 +10,7 @@ perl imaging\_upload\_file\_cronjob.pl `[options]` Available options are: -\-profile : Name of the config file in `../dicom-archive/.loris_mri` +\-profile : Name of the config file in `../config` \-verbose : If set, be verbose diff --git a/docs/scripts_md/mass_nii.md b/docs/scripts_md/mass_nii.md index c34438834..4bd48819a 100644 --- a/docs/scripts_md/mass_nii.md +++ b/docs/scripts_md/mass_nii.md @@ -9,7 +9,7 @@ perl mass\_nii.pl `[options]` Available options are: -\-profile : name of the config file in `../dicom-archive/.loris_mri` +\-profile : name of the config file in `../config` \-minFileID: specifies the minimum `FileID` to operate on diff --git a/docs/scripts_md/mass_perldoc_md_creation.md b/docs/scripts_md/mass_perldoc_md_creation.md index 71dfd33b2..12b23d302 100644 --- a/docs/scripts_md/mass_perldoc_md_creation.md +++ b/docs/scripts_md/mass_perldoc_md_creation.md @@ -9,7 +9,7 @@ perl mass\_perldoc\_md\_creation.pl `[options]` Available options are: -\-profile: name of the config file in `../dicom-archive/.loris_mri` +\-profile: name of the config file in `../config` \-verbose: be verbose (boolean) diff --git a/docs/scripts_md/mass_pic.md b/docs/scripts_md/mass_pic.md index 74c779969..efe9525f3 100644 --- a/docs/scripts_md/mass_pic.md +++ b/docs/scripts_md/mass_pic.md @@ -8,7 +8,7 @@ perl mass\_pic.pl `[options]` Available options are: -\-profile : name of the config file in `../dicom-archive/.loris_mri` +\-profile : name of the config file in `../config` \-mincFileID: integer, minimum `FileID` to operate on diff --git a/docs/scripts_md/minc_insertion.md b/docs/scripts_md/minc_insertion.md index 8c5d40f79..e9ebd612d 100644 --- a/docs/scripts_md/minc_insertion.md +++ b/docs/scripts_md/minc_insertion.md @@ -8,7 +8,7 @@ perl minc\_insertion.pl `[options]` Available options are: -\-profile : name of the config file in `../dicom-archive/.loris_mri` +\-profile : name of the config file in `../config` \-uploadID : The upload ID from which this MINC was created diff --git a/docs/scripts_md/minc_to_bids_converter.md b/docs/scripts_md/minc_to_bids_converter.md index 391b79019..b57dba275 100644 --- a/docs/scripts_md/minc_to_bids_converter.md +++ b/docs/scripts_md/minc_to_bids_converter.md @@ -8,7 +8,7 @@ dataset from the MINC files present in the `assembly/` directory. perl tools/minc\_to\_bids\_converter.pl `[options]` Available options are: -\-profile : name of the config file in `../dicom-archive/.loris_mri` +\-profile : name of the config file in `../config` \-tarchive\_id : The ID of the DICOM archive to be converted into a BIDS dataset (optional, if not set, convert all DICOM archives) \-dataset\_name : Name/Description of the dataset to be generated in BIDS diff --git a/docs/scripts_md/register_processed_data.md b/docs/scripts_md/register_processed_data.md index 061d2057d..a5b064e35 100644 --- a/docs/scripts_md/register_processed_data.md +++ b/docs/scripts_md/register_processed_data.md @@ -9,7 +9,7 @@ perl register\_processed\_data.pl `[options]` Available options are: -\-profile : name of config file in `../dicom-archive/.loris_mri` +\-profile : name of config file in `../config` \-file : file that will be registered in the database (full path from the root directory is required) diff --git a/docs/scripts_md/run_defacing_script.md b/docs/scripts_md/run_defacing_script.md index 1d5bf225e..f4338e450 100644 --- a/docs/scripts_md/run_defacing_script.md +++ b/docs/scripts_md/run_defacing_script.md @@ -9,7 +9,7 @@ acquisitions specified in the Config module of LORIS. Available options are: -`-profile` : name of the config file in `../dicom-archive/.loris_mri` +`-profile` : name of the config file in `../config` `-tarchive_ids`: comma-separated list of MySQL `TarchiveID`s diff --git a/docs/scripts_md/tarchiveLoader.md b/docs/scripts_md/tarchiveLoader.md index 5b35e9977..8e7a023b0 100644 --- a/docs/scripts_md/tarchiveLoader.md +++ b/docs/scripts_md/tarchiveLoader.md @@ -15,7 +15,7 @@ perl uploadNeuroDB/tarchiveLoader.pl </path/to/DICOM-tarchive> `[options]` Available options are: -\-profile : Name of the config file in `../dicom-archive/.loris_mri` +\-profile : Name of the config file in `../config` \-uploadID : UploadID associated to this upload diff --git a/docs/scripts_md/tarchive_validation.md b/docs/scripts_md/tarchive_validation.md index 077f0f208..5fa5d1393 100644 --- a/docs/scripts_md/tarchive_validation.md +++ b/docs/scripts_md/tarchive_validation.md @@ -9,7 +9,7 @@ perl tarchive\_validation.pl `[options]` Available options are: -\-profile : name of the config file in `../dicom-archive/.loris-mri` +\-profile : name of the config file in `../config` \-uploadID : UploadID associated to the DICOM archive to validate diff --git a/docs/scripts_md/updateHeaders.md b/docs/scripts_md/updateHeaders.md index 14667291f..7e130e63a 100644 --- a/docs/scripts_md/updateHeaders.md +++ b/docs/scripts_md/updateHeaders.md @@ -17,7 +17,7 @@ Available options are: \-database: Enable `dicomTar`'s database features -\-profile : Name of the config file in `../dicom-archive/.loris_mri` +\-profile : Name of the config file in `../config` \-verbose : Be verbose diff --git a/docs/scripts_md/updateHeadersBatch.md b/docs/scripts_md/updateHeadersBatch.md index e02cfbce3..9c1de73d5 100644 --- a/docs/scripts_md/updateHeadersBatch.md +++ b/docs/scripts_md/updateHeadersBatch.md @@ -21,7 +21,7 @@ Available options are: \-database: Enable `dicomTar`'s database features -\-profile : Name of the config file in `../dicom-archive/.loris_mri` +\-profile : Name of the config file in `../config` \-verbose : Be verbose diff --git a/install/imaging_install.sh b/install/imaging_install.sh index 6a4e360f5..5be824344 100755 --- a/install/imaging_install.sh +++ b/install/imaging_install.sh @@ -102,7 +102,7 @@ sudo -S su $USER -c "mkdir -m 770 -p $mridir/python_virtualenvs/loris-mri-python python3.11 -m venv $mridir/python_virtualenvs/loris-mri-python source $mridir/python_virtualenvs/loris-mri-python/bin/activate echo "Installing the Python libraries into the loris-mri virtualenv..." -pip3 install -r "$installdir/requirements/requirements.txt" +pip3 install $mridir # deactivate the virtualenv for now deactivate @@ -120,7 +120,7 @@ echo "Creating the data directories" sudo -S su $USER -c "mkdir -m 770 -p /data/$PROJ/data/assembly_bids" #holds the BIDS files derived from DICOMs sudo -S su $USER -c "mkdir -m 770 -p /data/$PROJ/data/batch_output" #contains the result of the SGE (queue) sudo -S su $USER -c "mkdir -m 770 -p /data/$PROJ/data/bids_imports" #contains imported BIDS studies - sudo -S su $USER -c "mkdir -m 770 -p $mridir/dicom-archive/.loris_mri" + sudo -S su $USER -c "mkdir -m 770 -p $mridir/config" echo ##################################################################################### @@ -203,20 +203,20 @@ echo ##################################################################################### echo "Creating MRI config file" -cp $installdir/templates/profileTemplate.pl $mridir/dicom-archive/.loris_mri/$prodfilename -sudo chmod 640 $mridir/dicom-archive/.loris_mri/$prodfilename -sudo chgrp $group $mridir/dicom-archive/.loris_mri/$prodfilename +cp $installdir/templates/profileTemplate.pl $mridir/config/$prodfilename +sudo chmod 640 $mridir/config/$prodfilename +sudo chgrp $group $mridir/config/$prodfilename -sed -e "s#DBNAME#$mysqldb#g" -e "s#DBUSER#$mysqluser#g" -e "s#DBPASS#$mysqlpass#g" -e "s#DBHOST#$mysqlhost#g" $installdir/templates/profileTemplate.pl > $mridir/dicom-archive/.loris_mri/$prodfilename -echo "config file is located at $mridir/dicom-archive/.loris_mri/$prodfilename" +sed -e "s#DBNAME#$mysqldb#g" -e "s#DBUSER#$mysqluser#g" -e "s#DBPASS#$mysqlpass#g" -e "s#DBHOST#$mysqlhost#g" $installdir/templates/profileTemplate.pl > $mridir/config/$prodfilename +echo "config file is located at $mridir/config/$prodfilename" echo -echo "Creating python database config file with database credentials" -cp $installdir/templates/database_config_template.py $mridir/dicom-archive/.loris_mri/database_config.py -sudo chmod 640 $mridir/dicom-archive/.loris_mri/database_config.py -sudo chgrp $group $mridir/dicom-archive/.loris_mri/database_config.py -sed -e "s#DBNAME#$mysqldb#g" -e "s#DBUSER#$mysqluser#g" -e "s#DBPASS#$mysqlpass#g" -e "s#DBHOST#$mysqlhost#g" $installdir/templates/database_config_template.py > $mridir/dicom-archive/.loris_mri/database_config.py -echo "config file for python import scripts is located at $mridir/dicom-archive/.loris_mri/database_config.py" +echo "Creating python config file with database credentials" +cp $installdir/templates/config_template.py $mridir/config/config.py +sudo chmod 640 $mridir/config/config.py +sudo chgrp $group $mridir/config/config.py +sed -e "s#DBNAME#$mysqldb#g" -e "s#DBUSER#$mysqluser#g" -e "s#DBPASS#$mysqlpass#g" -e "s#DBHOST#$mysqlhost#g" $installdir/templates/database_config_template.py > $mridir/config/config.py +echo "config file for python import scripts is located at $mridir/config/config.py" echo ###################################################################### diff --git a/install/imaging_install_MacOSX.sh b/install/imaging_install_MacOSX.sh index 91d282171..70c38dc90 100755 --- a/install/imaging_install_MacOSX.sh +++ b/install/imaging_install_MacOSX.sh @@ -1,12 +1,12 @@ #!/bin/bash ################################## -# This script is not actively maintained. +# This script is not actively maintained. # and has not been supported since 15.10 ################################## echo "NOTE: Mac is no longer supported as of 15.10." echo "This script is not actively maintained." -echo +echo ################################## ###WHAT THIS SCRIPT WILL NOT DO### @@ -16,7 +16,7 @@ echo #4)It doesn't install DICOM toolkit -#Create a temporary log for installation and delete it on completion +#Create a temporary log for installation and delete it on completion #@TODO make sure that /tmp is writable LOGFILE="/tmp/$(basename $0).$$.tmp" touch $LOGFILE @@ -33,8 +33,8 @@ read -p "What is the project name? " PROJ ##this will be used to create all th read -p "What prod file name would you like to use? default: prod " prodfilename if [ -z "$prodfilename" ]; then prodfilename="prod" -fi - +fi + # determine the mridir installdir=`pwd` mridir=${installdir%"/install"} @@ -54,7 +54,7 @@ echo "Creating the data directories" sudo -S su $USER -c "mkdir -m 770 -p /data/$PROJ/data/assembly_bids" #holds the BIDS files derived from DICOMs sudo -S su $USER -c "mkdir -m 770 -p /data/$PROJ/data/batch_output" #contains the result of the SGE (queue) sudo -S su $USER -c "mkdir -m 770 -p /data/$PROJ/data/bids_imports" #contains imported BIDS studies - sudo -S su $USER -c "mkdir -m 770 -p $mridir/dicom-archive/.loris_mri" + sudo -S su $USER -c "mkdir -m 770 -p $mridir/config" echo ##################################################################################### ###############incoming directory ################################################### @@ -76,7 +76,7 @@ echo ######################Add the proper Apache group user ############################# #################################################################################### if egrep ^www-data: /etc/group > $LOGFILE 2>&1; -then +then group=www-data elif egrep ^www: /etc/group > $LOGFILE 2>&1; then @@ -93,7 +93,7 @@ fi #################################################################################### #echo "Changing permissions" -sudo chmod -R 770 $mridir/.loris_mri/ +sudo chmod -R 770 $mridir/config/ sudo chmod -R 770 /data/$PROJ/ sudo chmod -R 770 /data/incoming/ @@ -116,18 +116,18 @@ echo ##################################################################################### echo "Creating MRI config file" -cp $installdir/templates/profileTemplate.pl $mridir/dicom-archive/.loris_mri/$prodfilename -sudo chmod 640 $mridir/dicom-archive/.loris_mri/$prodfilename -sudo chgrp $group $mridir/dicom-archive/.loris_mri/$prodfilename +cp $installdir/templates/profileTemplate.pl $mridir/config/$prodfilename +sudo chmod 640 $mridir/config/$prodfilename +sudo chgrp $group $mridir/config/$prodfilename -sed -e "s#DBNAME#$mysqldb#g" -e "s#DBUSER#$mysqluser#g" -e "s#DBPASS#$mysqlpass#g" -e "s#DBHOST#$mysqlhost#g" $installdir/templates/profileTemplate.pl > $mridir/dicom-archive/.loris_mri/$prodfilename -echo "config file is located at $mridir/dicom-archive/.loris_mri/$prodfilename" +sed -e "s#DBNAME#$mysqldb#g" -e "s#DBUSER#$mysqluser#g" -e "s#DBPASS#$mysqlpass#g" -e "s#DBHOST#$mysqlhost#g" $installdir/templates/profileTemplate.pl > $mridir/config/$prodfilename +echo "config file is located at $mridir/config/$prodfilename" echo -echo "Creating python database config file with database credentials" -cp $installdir/templates/database_config_template.py $mridir/dicom-archive/.loris_mri/database_config.py -sudo chmod 640 $mridir/dicom-archive/.loris_mri/database_config.py -sudo chgrp $group $mridir/dicom-archive/.loris_mri/database_config.py -sed -e "s#DBNAME#$mysqldb#g" -e "s#DBUSER#$mysqluser#g" -e "s#DBPASS#$mysqlpass#g" -e "s#DBHOST#$mysqlhost#g" $installdir/templates/database_config_template.py > $mridir/dicom-archive/.loris_mri/database_config.py -echo "config file for python import scripts is located at $mridir/dicom-archive/.loris_mri/database_config.py" +echo "Creating python config file with database credentials" +cp $installdir/templates/config_template.py $mridir/config/config.py +sudo chmod 640 $mridir/config/config.py +sudo chgrp $group $mridir/config/config.py +sed -e "s#DBNAME#$mysqldb#g" -e "s#DBUSER#$mysqluser#g" -e "s#DBPASS#$mysqlpass#g" -e "s#DBHOST#$mysqlhost#g" $installdir/templates/database_config_template.py > $mridir/config/config.py +echo "config file for python import scripts is located at $mridir/config/config.py" echo diff --git a/install/requirements/requirements.txt b/install/requirements/requirements.txt deleted file mode 100755 index ea8e33142..000000000 --- a/install/requirements/requirements.txt +++ /dev/null @@ -1,22 +0,0 @@ -boto3==1.35.99 -google -mat73 -matplotlib -mne -mne-bids>=0.14 -mysqlclient -nibabel -nilearn -nose -numpy -protobuf>=3.0.0 -pybids==0.17.0 -pydicom -pyright -pytest -python-dateutil -ruff -scikit-learn -scipy -sqlalchemy>=2.0.0 -virtualenv diff --git a/install/templates/database_config_template.py b/install/templates/config_template.py similarity index 98% rename from install/templates/database_config_template.py rename to install/templates/config_template.py index 605947270..84f07e281 100644 --- a/install/templates/database_config_template.py +++ b/install/templates/config_template.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python - import re from sqlalchemy.orm import Session as Database diff --git a/install/templates/environment_template b/install/templates/environment_template index a92b06472..9fa81f9f5 100644 --- a/install/templates/environment_template +++ b/install/templates/environment_template @@ -9,11 +9,10 @@ umask 0002 export PATH=/opt/${PROJECT}/bin/mri:/opt/${PROJECT}/bin/mri/uploadNeuroDB:/opt/${PROJECT}/bin/mri/uploadNeuroDB/bin:/opt/${PROJECT}/bin/mri/dicom-archive:/opt/${PROJECT}/bin/mri/python/scripts:/opt/${PROJECT}/bin/mri/tools:/opt/${PROJECT}/bin/mri/python/react-series-data-viewer:${MINC_TOOLKIT_DIR}/bin:/usr/local/bin/tpcclib:$PATH export PERL5LIB=/opt/${PROJECT}/bin/mri/uploadNeuroDB:/opt/${PROJECT}/bin/mri/dicom-archive:$PERL5LIB export TMPDIR=/tmp -export LORIS_CONFIG=/opt/${PROJECT}/bin/mri/dicom-archive +export LORIS_CONFIG=/opt/${PROJECT}/bin/mri/config # for the Python scripts export LORIS_MRI=/opt/${PROJECT}/bin/mri -export PYTHONPATH=$PYTHONPATH:/opt/${PROJECT}/bin/mri/python:/opt/${PROJECT}/bin/mri/python/react-series-data-viewer source /opt/${PROJECT}/bin/mri/python_virtualenvs/loris-mri-python/bin/activate # for the defacing scripts diff --git a/load_tarchive_db.sh b/load_tarchive_db.sh index d876ef328..aa75efe45 100755 --- a/load_tarchive_db.sh +++ b/load_tarchive_db.sh @@ -10,7 +10,7 @@ exit 1 fi -PREFIX=$(grep '$prefix' $LORIS_CONFIG/.loris_mri/$profile | awk '{print $3}' | sed 's/"//g' | sed 's/;//g') +PREFIX=$(grep '$prefix' $LORIS_CONFIG/$profile | awk '{print $3}' | sed 's/"//g' | sed 's/;//g') tempdir=$TMPDIR/load_tarchive_db.$$ mkdir -p $tempdir diff --git a/pyproject.toml b/pyproject.toml index 992e66b2a..3e854d02a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,53 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + [project] +name = "loris-mri" +version = "27.0.0" +description = "The LORIS Python scripts" +readme = "README.md" +license = "GPL-3.0-or-later" +license-files = ["LICENSE"] requires-python = ">= 3.11" +dependencies = [ + "boto3==1.35.99", + "google", + "mat73", + "matplotlib", + "mne", + "mne-bids>=0.14", + "mysqlclient", + "nibabel", + "nilearn", + "nose", + "numpy", + "protobuf>=3.0.0", + "pybids==0.17.0", + "pydicom", + "python-dateutil", + "scikit-learn", + "scipy", + "sqlalchemy>=2.0.0", + "virtualenv", +] + +[project.optional-dependencies] +dev = [ + "pyright", + "pytest", + "ruff", +] + +[project.urls] +Homepage = "https://github.com/aces/loris-mri" + +[tool.hatch.build.targets.wheel] +packages = [ + "python/lib", + "python/react-series-data-viewer", + "python/tests", +] [tool.ruff] src = ["python"] @@ -19,6 +67,7 @@ select = ["E", "EXE", "F", "I", "N", "UP", "W"] include = [ "python/tests", "python/lib/db", + "python/lib/imaging_lib", "python/lib/import_dicom_study", "python/lib/util", "python/lib/bids.py", @@ -28,7 +77,6 @@ include = [ "python/lib/get_session_info.py", "python/lib/logging.py", "python/lib/make_env.py", - "python/lib/scanner.py", "python/scripts/import_dicom_study.py", "python/scripts/summarize_dicom_study.py", ] diff --git a/python/lib/aws_s3.py b/python/lib/aws_s3.py index 05fe8da9d..3d0ec9912 100644 --- a/python/lib/aws_s3.py +++ b/python/lib/aws_s3.py @@ -7,8 +7,6 @@ from lib.util.crypto import compute_file_md5_hash -__license__ = "GPLv3" - class AwsS3: diff --git a/python/lib/bids.py b/python/lib/bids.py index 3b7010e94..fe616d42d 100644 --- a/python/lib/bids.py +++ b/python/lib/bids.py @@ -3,7 +3,7 @@ from lib.config import get_patient_id_dicom_header_config from lib.env import Env from lib.get_session_info import SessionInfo, get_session_info -from lib.scanner import MriScannerInfo +from lib.imaging_lib.mri_scanner import MriScannerInfo def get_bids_json_scanner_info(bids_json: dict[str, Any]) -> MriScannerInfo: diff --git a/python/lib/bidsreader.py b/python/lib/bidsreader.py index 8c600b31c..bd7da2508 100644 --- a/python/lib/bidsreader.py +++ b/python/lib/bidsreader.py @@ -18,8 +18,6 @@ # from bids import BIDSLayoutIndexer -__license__ = "GPLv3" - class BidsReader: """ diff --git a/python/lib/candidate.py b/python/lib/candidate.py index 370433815..a6ac4fd06 100644 --- a/python/lib/candidate.py +++ b/python/lib/candidate.py @@ -7,8 +7,6 @@ import lib.exitcode -__license__ = "GPLv3" - class Candidate: """ diff --git a/python/lib/config_file.py b/python/lib/config_file.py index bfa20d75e..7a186f216 100644 --- a/python/lib/config_file.py +++ b/python/lib/config_file.py @@ -2,10 +2,15 @@ This module stores the classes used in the Python configuration file of LORIS-MRI. """ +import importlib.util +import os +import sys from dataclasses import dataclass +from typing import Any from sqlalchemy.orm import Session as Database +import lib.exitcode from lib.db.queries.site import get_all_sites @@ -94,6 +99,43 @@ class SessionPhantomConfig: SessionConfig = SessionCandidateConfig | SessionPhantomConfig +def load_config(arg: str | None) -> Any: + """ + Load the LORIS-MRI Python configuration file from the environment or exit the program with an + error if that file is not found or cannot be loaded. + """ + + config_dir_path = os.environ.get('LORIS_CONFIG') + if config_dir_path is None: + print("ERROR: Environment variable 'LORIS_CONFIG' not set.", file=sys.stderr) + sys.exit(lib.exitcode.INVALID_ENVIRONMENT_VAR) + + # Get the name of the configuration file from the argument or use the default name. + config_file_name = arg if arg is not None else 'config.py' + + config_file_path = os.path.join(config_dir_path, config_file_name) + if not os.path.exists(config_file_path): + print( + f"ERROR: No configuration file '{config_file_name}' found in the '{config_dir_path}' directory.", + file=sys.stderr, + ) + + sys.exit(lib.exitcode.INVALID_PATH) + + # Get the name of the configuration module from its file name. + module_name = os.path.splitext(os.path.basename(config_file_path))[0] + + spec = importlib.util.spec_from_file_location(module_name, config_file_path) + if spec is None or spec.loader is None: + print(f"ERROR: Cannot load module specification for configuration file '{config_file_name}'.", file=sys.stderr) + sys.exit(lib.exitcode.INVALID_IMPORT) + + # Load the configuration module. + config = importlib.util.module_from_spec(spec) + spec.loader.exec_module(config) + return config + + def try_get_site_id_with_patient_id_heuristic(db: Database, patient_id: str) -> int | None: """ Try to get the ID of a session's site based on its patient ID. This function is a heuristic diff --git a/python/lib/database.py b/python/lib/database.py index de80fa64f..3ac3166ed 100644 --- a/python/lib/database.py +++ b/python/lib/database.py @@ -7,8 +7,6 @@ import lib.exitcode from lib.config_file import DatabaseConfig -__license__ = "GPLv3" - class Database: """ diff --git a/python/lib/database_lib/bids_event_mapping.py b/python/lib/database_lib/bids_event_mapping.py index 2893a604d..c8a89d4bc 100644 --- a/python/lib/database_lib/bids_event_mapping.py +++ b/python/lib/database_lib/bids_event_mapping.py @@ -1,9 +1,6 @@ """This class performs bids_event_*_mapping related database queries""" -__license__ = "GPLv3" - - class BidsEventMapping: def __init__(self, db, verbose): diff --git a/python/lib/database_lib/candidate_db.py b/python/lib/database_lib/candidate_db.py index 42fe7c7fb..878f3a079 100644 --- a/python/lib/database_lib/candidate_db.py +++ b/python/lib/database_lib/candidate_db.py @@ -2,8 +2,6 @@ from typing_extensions import deprecated -__license__ = "GPLv3" - @deprecated('Use `lib.db.models.candidate.DbCandidate` instead') class CandidateDB: diff --git a/python/lib/database_lib/config.py b/python/lib/database_lib/config.py index a5030762c..33d5f922b 100644 --- a/python/lib/database_lib/config.py +++ b/python/lib/database_lib/config.py @@ -1,9 +1,6 @@ """This class performs database queries for the config table""" -__license__ = "GPLv3" - - class Config: """ This class performs database queries for the Config* tables. diff --git a/python/lib/database_lib/files.py b/python/lib/database_lib/files.py index cd281a737..fd912715d 100644 --- a/python/lib/database_lib/files.py +++ b/python/lib/database_lib/files.py @@ -2,8 +2,6 @@ from lib.database_lib.parameter_type import ParameterType -__license__ = "GPLv3" - class Files: """ diff --git a/python/lib/database_lib/mri_candidate_errors.py b/python/lib/database_lib/mri_candidate_errors.py index 25b70e5e7..eb5acbe4d 100644 --- a/python/lib/database_lib/mri_candidate_errors.py +++ b/python/lib/database_lib/mri_candidate_errors.py @@ -1,9 +1,6 @@ """This class performs database queries for the MRICandidateErrors table""" -__license__ = "GPLv3" - - class MriCandidateErrors: """ This class performs database queries for imaging dataset stored in the MRICandidateErrors table. diff --git a/python/lib/database_lib/mri_protocol.py b/python/lib/database_lib/mri_protocol.py index 4d82cf0ec..f1a0b6ccf 100644 --- a/python/lib/database_lib/mri_protocol.py +++ b/python/lib/database_lib/mri_protocol.py @@ -1,9 +1,6 @@ """This class performs database queries for the mri_protocol tables""" -__license__ = "GPLv3" - - class MriProtocol: """ This class performs database queries for imaging dataset stored in the mri_protocol table. diff --git a/python/lib/database_lib/mri_protocol_checks.py b/python/lib/database_lib/mri_protocol_checks.py index 49683919a..9cd0a69e7 100644 --- a/python/lib/database_lib/mri_protocol_checks.py +++ b/python/lib/database_lib/mri_protocol_checks.py @@ -1,9 +1,6 @@ """This class performs database queries for the mri_protocol_checks tables""" -__license__ = "GPLv3" - - class MriProtocolChecks: """ This class performs database queries for imaging dataset stored in the mri_protocol_checks table. diff --git a/python/lib/database_lib/mri_protocol_violated_scans.py b/python/lib/database_lib/mri_protocol_violated_scans.py index fd65b55fa..e799e5ad8 100644 --- a/python/lib/database_lib/mri_protocol_violated_scans.py +++ b/python/lib/database_lib/mri_protocol_violated_scans.py @@ -1,9 +1,6 @@ """This class performs database queries for the mri_protocol_violated_scans table""" -__license__ = "GPLv3" - - class MriProtocolViolatedScans: """ This class performs database queries for imaging dataset stored in the mri_protocol_violated_scans table. diff --git a/python/lib/database_lib/mri_scan_type.py b/python/lib/database_lib/mri_scan_type.py index 850053b0e..575e2d80c 100644 --- a/python/lib/database_lib/mri_scan_type.py +++ b/python/lib/database_lib/mri_scan_type.py @@ -1,9 +1,6 @@ """This class performs database queries for the site mri_scan_type table""" -__license__ = "GPLv3" - - class MriScanType: """ This class performs database queries for imaging dataset stored in the mri_scan_type table. diff --git a/python/lib/database_lib/mri_scanner.py b/python/lib/database_lib/mri_scanner.py index 46ca46af2..c3dd1bbff 100644 --- a/python/lib/database_lib/mri_scanner.py +++ b/python/lib/database_lib/mri_scanner.py @@ -6,8 +6,6 @@ from lib.candidate import Candidate -__license__ = "GPLv3" - @deprecated('Use `lib.scanner` instead') class MriScanner: diff --git a/python/lib/database_lib/mri_upload_db.py b/python/lib/database_lib/mri_upload_db.py index cb76ebcc0..bb7901540 100644 --- a/python/lib/database_lib/mri_upload_db.py +++ b/python/lib/database_lib/mri_upload_db.py @@ -2,8 +2,6 @@ from typing_extensions import deprecated -__license__ = "GPLv3" - @deprecated('Use `lib.db.models.mri_upload.DbMriUpload` instead') class MriUploadDB: diff --git a/python/lib/database_lib/mri_violations_log.py b/python/lib/database_lib/mri_violations_log.py index 517c97a54..83c5a925d 100644 --- a/python/lib/database_lib/mri_violations_log.py +++ b/python/lib/database_lib/mri_violations_log.py @@ -1,9 +1,6 @@ """This class performs database queries for the mri_violations_log table""" -__license__ = "GPLv3" - - class MriViolationsLog: """ This class performs database queries for imaging dataset stored in the mri_violations_log table. diff --git a/python/lib/database_lib/notification.py b/python/lib/database_lib/notification.py index c7be0cc0d..dba81b964 100644 --- a/python/lib/database_lib/notification.py +++ b/python/lib/database_lib/notification.py @@ -4,8 +4,6 @@ from typing_extensions import deprecated -__license__ = "GPLv3" - @deprecated('Use `lib.db.models.notification_spool` instead') class Notification: diff --git a/python/lib/database_lib/parameter_file.py b/python/lib/database_lib/parameter_file.py index 720431389..faefbd6d6 100644 --- a/python/lib/database_lib/parameter_file.py +++ b/python/lib/database_lib/parameter_file.py @@ -1,7 +1,5 @@ """This class performs parameter_file related database queries and common checks""" -__license__ = "GPLv3" - class ParameterFile: """ diff --git a/python/lib/database_lib/parameter_type.py b/python/lib/database_lib/parameter_type.py index 421cae4a1..e38526437 100644 --- a/python/lib/database_lib/parameter_type.py +++ b/python/lib/database_lib/parameter_type.py @@ -1,7 +1,5 @@ """This class performs parameter_type* related database queries""" -__license__ = "GPLv3" - class ParameterType: """ diff --git a/python/lib/database_lib/physiological_coord_system.py b/python/lib/database_lib/physiological_coord_system.py index 4e7005034..4d659c554 100644 --- a/python/lib/database_lib/physiological_coord_system.py +++ b/python/lib/database_lib/physiological_coord_system.py @@ -11,8 +11,6 @@ # from lib.point_3d import Point3D # from lib.database_lib.point_3d import Point3DDB -__license__ = "GPLv3" - class PhysiologicalCoordSystem: diff --git a/python/lib/database_lib/physiological_event_archive.py b/python/lib/database_lib/physiological_event_archive.py index 400d88687..ae6ed5b9e 100644 --- a/python/lib/database_lib/physiological_event_archive.py +++ b/python/lib/database_lib/physiological_event_archive.py @@ -1,9 +1,6 @@ """This class performs database queries for the physiological_event_archive table""" -__license__ = "GPLv3" - - class PhysiologicalEventArchive: def __init__(self, db, verbose): diff --git a/python/lib/database_lib/physiological_event_file.py b/python/lib/database_lib/physiological_event_file.py index 0eee8268e..bb55642fb 100644 --- a/python/lib/database_lib/physiological_event_file.py +++ b/python/lib/database_lib/physiological_event_file.py @@ -1,9 +1,6 @@ """This class performs database queries for the physiological_event_file table""" -__license__ = "GPLv3" - - class PhysiologicalEventFile: def __init__(self, db, verbose): diff --git a/python/lib/database_lib/physiological_file.py b/python/lib/database_lib/physiological_file.py index fba57c608..47353810d 100644 --- a/python/lib/database_lib/physiological_file.py +++ b/python/lib/database_lib/physiological_file.py @@ -1,9 +1,6 @@ """This class performs database queries for the physiological_file table""" -__license__ = "GPLv3" - - class PhysiologicalFile: def __init__(self, db, verbose): diff --git a/python/lib/database_lib/physiological_modality.py b/python/lib/database_lib/physiological_modality.py index 4112c9c5d..5449fc7b8 100644 --- a/python/lib/database_lib/physiological_modality.py +++ b/python/lib/database_lib/physiological_modality.py @@ -1,9 +1,6 @@ """This class performs database queries for the physiological_modality table""" -__license__ = "GPLv3" - - class PhysiologicalModality: def __init__(self, db, verbose): diff --git a/python/lib/database_lib/physiological_output_type.py b/python/lib/database_lib/physiological_output_type.py index 2441571b1..691f6f778 100644 --- a/python/lib/database_lib/physiological_output_type.py +++ b/python/lib/database_lib/physiological_output_type.py @@ -1,9 +1,6 @@ """This class performs database queries for the physiological_output_type table""" -__license__ = "GPLv3" - - class PhysiologicalOutputType: def __init__(self, db, verbose): diff --git a/python/lib/database_lib/physiological_parameter_file.py b/python/lib/database_lib/physiological_parameter_file.py index 0f5b5fb80..7cc48068d 100644 --- a/python/lib/database_lib/physiological_parameter_file.py +++ b/python/lib/database_lib/physiological_parameter_file.py @@ -1,9 +1,6 @@ """This class performs database queries for the physiological_parameter_file table""" -__license__ = "GPLv3" - - class PhysiologicalParameterFile: def __init__(self, db, verbose): diff --git a/python/lib/database_lib/physiological_task_event.py b/python/lib/database_lib/physiological_task_event.py index 00836c09f..8849b9d4a 100644 --- a/python/lib/database_lib/physiological_task_event.py +++ b/python/lib/database_lib/physiological_task_event.py @@ -1,9 +1,6 @@ """This class performs database queries for the physiological_task_event table""" -__license__ = "GPLv3" - - class PhysiologicalTaskEvent: def __init__(self, db, verbose): diff --git a/python/lib/database_lib/physiological_task_event_hed_rel.py b/python/lib/database_lib/physiological_task_event_hed_rel.py index 768e83d0b..97130cfb6 100644 --- a/python/lib/database_lib/physiological_task_event_hed_rel.py +++ b/python/lib/database_lib/physiological_task_event_hed_rel.py @@ -1,9 +1,6 @@ """This class performs database queries for the physiological_task_event_hed_rel table""" -__license__ = "GPLv3" - - class PhysiologicalTaskEventHEDRel: def __init__(self, db, verbose): diff --git a/python/lib/database_lib/physiological_task_event_opt.py b/python/lib/database_lib/physiological_task_event_opt.py index 218d52ef0..932bbd398 100644 --- a/python/lib/database_lib/physiological_task_event_opt.py +++ b/python/lib/database_lib/physiological_task_event_opt.py @@ -1,9 +1,6 @@ """This class performs database queries for the physiological_task_event_opt table""" -__license__ = "GPLv3" - - class PhysiologicalTaskEventOpt: def __init__(self, db, verbose): diff --git a/python/lib/database_lib/point_3d.py b/python/lib/database_lib/point_3d.py index 3445e814f..15adb2f5e 100644 --- a/python/lib/database_lib/point_3d.py +++ b/python/lib/database_lib/point_3d.py @@ -2,8 +2,6 @@ from lib.point_3d import Point3D -__license__ = "GPLv3" - class Point3DDB: def __init__(self, db, verbose): diff --git a/python/lib/database_lib/project_cohort_rel.py b/python/lib/database_lib/project_cohort_rel.py index 393fe65ee..5bec2d20c 100644 --- a/python/lib/database_lib/project_cohort_rel.py +++ b/python/lib/database_lib/project_cohort_rel.py @@ -2,8 +2,6 @@ from typing_extensions import deprecated -__license__ = "GPLv3" - @deprecated('Use `lib.db.models.project_cohort.DbProjectCohort` instead') class ProjectCohortRel: diff --git a/python/lib/database_lib/session_db.py b/python/lib/database_lib/session_db.py index 69cdf5c64..ad3b97264 100644 --- a/python/lib/database_lib/session_db.py +++ b/python/lib/database_lib/session_db.py @@ -2,8 +2,6 @@ from typing_extensions import deprecated -__license__ = "GPLv3" - @deprecated('Use `lib.db.models.session.DbSession` instead') class SessionDB: diff --git a/python/lib/database_lib/site.py b/python/lib/database_lib/site.py index ca83741ef..3812f8ee1 100644 --- a/python/lib/database_lib/site.py +++ b/python/lib/database_lib/site.py @@ -2,8 +2,6 @@ from typing_extensions import deprecated -__license__ = "GPLv3" - @deprecated('Use `lib.db.models.site.DbSite` instead') class Site: diff --git a/python/lib/database_lib/tarchive.py b/python/lib/database_lib/tarchive.py index 7254c73c8..621615d19 100644 --- a/python/lib/database_lib/tarchive.py +++ b/python/lib/database_lib/tarchive.py @@ -2,8 +2,6 @@ from typing_extensions import deprecated -__license__ = "GPLv3" - @deprecated('Use `lib.db.models.dicom_archive.DbDicomArchive` instead') class Tarchive: diff --git a/python/lib/database_lib/tarchive_series.py b/python/lib/database_lib/tarchive_series.py index df0716d02..c1f52461d 100644 --- a/python/lib/database_lib/tarchive_series.py +++ b/python/lib/database_lib/tarchive_series.py @@ -2,8 +2,6 @@ from typing_extensions import deprecated -__license__ = "GPLv3" - @deprecated('Use `lib.db.models.dicom_archive_series.DbDicomArchiveSeries` instead') class TarchiveSeries: diff --git a/python/lib/database_lib/visit_windows.py b/python/lib/database_lib/visit_windows.py index d4aacd3c9..f45cfc94a 100644 --- a/python/lib/database_lib/visit_windows.py +++ b/python/lib/database_lib/visit_windows.py @@ -3,8 +3,6 @@ from typing_extensions import deprecated -__license__ = "GPLv3" - @deprecated('Use `lib.db.models.visit_window.DbVisitWindow` instead') class VisitWindows: diff --git a/python/lib/db/decorators/int_bool.py b/python/lib/db/decorators/int_bool.py new file mode 100644 index 000000000..fba0e3580 --- /dev/null +++ b/python/lib/db/decorators/int_bool.py @@ -0,0 +1,33 @@ +from typing import Literal + +from sqlalchemy import Integer +from sqlalchemy.engine import Dialect +from sqlalchemy.types import TypeDecorator + + +class IntBool(TypeDecorator[bool]): + """ + Decorator for a database boolean integer type. + In SQL, the type will appear as 'int'. + In Python, the type will appear as a boolean. + """ + + impl = Integer + + def process_bind_param(self, value: bool | None, dialect: Dialect) -> Literal[0, 1] | None: + match value: + case True: + return 1 + case False: + return 0 + case None: + return None + + def process_result_value(self, value: Literal[0, 1] | None, dialect: Dialect) -> bool | None: + match value: + case 1: + return True + case 0: + return False + case None: + return None diff --git a/python/lib/db/decorators/int_datetime.py b/python/lib/db/decorators/int_datetime.py new file mode 100644 index 000000000..7ef2a7eb1 --- /dev/null +++ b/python/lib/db/decorators/int_datetime.py @@ -0,0 +1,27 @@ +from datetime import datetime + +from sqlalchemy import Integer +from sqlalchemy.engine import Dialect +from sqlalchemy.types import TypeDecorator + + +class IntDatetime(TypeDecorator[datetime]): + """ + Decorator for a database timestamp integer type. + In SQL, the type will appear as 'int'. + In Python, the type will appear as a datetime object. + """ + + impl = Integer + + def process_bind_param(self, value: datetime | None, dialect: Dialect) -> int | None: + if value is None: + return None + + return int(value.timestamp()) + + def process_result_value(self, value: int | None | None, dialect: Dialect) -> datetime | None: + if value is None: + return None + + return datetime.fromtimestamp(value) diff --git a/python/lib/db/decorators/true_false_bool.py b/python/lib/db/decorators/true_false_bool.py new file mode 100644 index 000000000..881f01fc2 --- /dev/null +++ b/python/lib/db/decorators/true_false_bool.py @@ -0,0 +1,33 @@ +from typing import Literal + +from sqlalchemy import Enum +from sqlalchemy.engine import Dialect +from sqlalchemy.types import TypeDecorator + + +class TrueFalseBool(TypeDecorator[bool]): + """ + Decorator for a database yes/no type. + In SQL, the type will appear as 'true' | 'false'. + In Python, the type will appear as a boolean. + """ + + impl = Enum('true', 'false') + + def process_bind_param(self, value: bool | None, dialect: Dialect) -> Literal['true', 'false'] | None: + match value: + case True: + return 'true' + case False: + return 'false' + case None: + return None + + def process_result_value(self, value: Literal['true', 'false'] | None, dialect: Dialect) -> bool | None: + match value: + case 'true': + return True + case 'false': + return False + case None: + return None diff --git a/python/lib/db/decorators/y_n_bool.py b/python/lib/db/decorators/y_n_bool.py index 656208204..6539a04cd 100644 --- a/python/lib/db/decorators/y_n_bool.py +++ b/python/lib/db/decorators/y_n_bool.py @@ -14,7 +14,7 @@ class YNBool(TypeDecorator[bool]): impl = Enum('Y', 'N') - def process_bind_param(self, value: bool | None, dialect: Dialect): + def process_bind_param(self, value: bool | None, dialect: Dialect) -> Literal['Y', 'N'] | None: match value: case True: return 'Y' @@ -23,7 +23,7 @@ def process_bind_param(self, value: bool | None, dialect: Dialect): case None: return None - def process_result_value(self, value: Literal['Y', 'N'] | None, dialect: Dialect): + def process_result_value(self, value: Literal['Y', 'N'] | None, dialect: Dialect) -> bool | None: match value: case 'Y': return True diff --git a/python/lib/db/models/candidate.py b/python/lib/db/models/candidate.py index 18c89edf0..4e8ee25df 100644 --- a/python/lib/db/models/candidate.py +++ b/python/lib/db/models/candidate.py @@ -9,6 +9,7 @@ import lib.db.models.session as db_session import lib.db.models.site as db_site from lib.db.base import Base +from lib.db.decorators.true_false_bool import TrueFalseBool from lib.db.decorators.y_n_bool import YNBool @@ -32,7 +33,7 @@ class DbCandidate(Base): registered_by : Mapped[str | None] = mapped_column('RegisteredBy') user_id : Mapped[str] = mapped_column('UserID') date_registered : Mapped[date | None] = mapped_column('Date_registered') - flagged_caveatemptor : Mapped[str | None] = mapped_column('flagged_caveatemptor') + flagged_caveatemptor : Mapped[bool | None] = mapped_column('flagged_caveatemptor', TrueFalseBool) flagged_reason : Mapped[int | None] = mapped_column('flagged_reason') flagged_other : Mapped[str | None] = mapped_column('flagged_other') flagged_other_status : Mapped[str | None] = mapped_column('flagged_other_status') diff --git a/python/lib/db/models/cohort.py b/python/lib/db/models/cohort.py index 3968506cf..0df7cde44 100644 --- a/python/lib/db/models/cohort.py +++ b/python/lib/db/models/cohort.py @@ -1,6 +1,7 @@ from sqlalchemy.orm import Mapped, mapped_column from lib.db.base import Base +from lib.db.decorators.int_bool import IntBool class DbCohort(Base): @@ -8,6 +9,6 @@ class DbCohort(Base): id : Mapped[int] = mapped_column('CohortID', primary_key=True) name : Mapped[str] = mapped_column('title') - use_edc : Mapped[bool | None] = mapped_column('useEDC') + use_edc : Mapped[bool | None] = mapped_column('useEDC', IntBool) window_difference : Mapped[str | None] = mapped_column('WindowDifference') recruitment_target : Mapped[int | None] = mapped_column('RecruitmentTarget') diff --git a/python/lib/db/models/config_setting.py b/python/lib/db/models/config_setting.py index b2b4cfa28..16c388b3d 100644 --- a/python/lib/db/models/config_setting.py +++ b/python/lib/db/models/config_setting.py @@ -1,6 +1,7 @@ from sqlalchemy.orm import Mapped, mapped_column from lib.db.base import Base +from lib.db.decorators.int_bool import IntBool class DbConfigSetting(Base): @@ -9,8 +10,8 @@ class DbConfigSetting(Base): id : Mapped[int] = mapped_column('ID', primary_key=True) name : Mapped[str] = mapped_column('Name') description : Mapped[str | None] = mapped_column('Description') - visible : Mapped[bool | None] = mapped_column('Visible') - allow_multiple : Mapped[bool | None] = mapped_column('AllowMultiple') + visible : Mapped[bool | None] = mapped_column('Visible', IntBool) + allow_multiple : Mapped[bool | None] = mapped_column('AllowMultiple', IntBool) data_type : Mapped[str | None] = mapped_column('DataType') parent_id : Mapped[int | None] = mapped_column('Parent') label : Mapped[str | None] = mapped_column('Label') diff --git a/python/lib/db/models/dicom_archive.py b/python/lib/db/models/dicom_archive.py index f41818008..e5952b321 100644 --- a/python/lib/db/models/dicom_archive.py +++ b/python/lib/db/models/dicom_archive.py @@ -11,6 +11,7 @@ import lib.db.models.mri_violation_log as db_mri_violation_log import lib.db.models.session as db_session from lib.db.base import Base +from lib.db.decorators.int_bool import IntBool class DbDicomArchive(Base): @@ -47,7 +48,7 @@ class DbDicomArchive(Base): create_info : Mapped[str | None] = mapped_column('CreateInfo') acquisition_metadata : Mapped[str] = mapped_column('AcquisitionMetadata') date_sent : Mapped[datetime | None] = mapped_column('DateSent') - pending_transfer : Mapped[bool] = mapped_column('PendingTransfer') + pending_transfer : Mapped[bool] = mapped_column('PendingTransfer', IntBool) series : Mapped[list['db_dicom_archive_series.DbDicomArchiveSeries']] \ = relationship('DbDicomArchiveSeries', back_populates='archive') diff --git a/python/lib/db/models/file.py b/python/lib/db/models/file.py index b247af6ae..15f2c0b37 100644 --- a/python/lib/db/models/file.py +++ b/python/lib/db/models/file.py @@ -1,4 +1,4 @@ -from datetime import date +from datetime import date, datetime from sqlalchemy import ForeignKey from sqlalchemy.orm import Mapped, mapped_column, relationship @@ -6,6 +6,8 @@ import lib.db.models.file_parameter as db_file_parameter import lib.db.models.session as db_session from lib.db.base import Base +from lib.db.decorators.int_bool import IntBool +from lib.db.decorators.int_datetime import IntDatetime class DbFile(Base): @@ -23,12 +25,12 @@ class DbFile(Base): scan_type_id : Mapped[int | None] = mapped_column('MriScanTypeID') file_type : Mapped[str | None] = mapped_column('FileType') inserted_by_user_id : Mapped[str] = mapped_column('InsertedByUserID') - insert_time : Mapped[int] = mapped_column('InsertTime') + insert_time : Mapped[datetime] = mapped_column('InsertTime', IntDatetime) source_pipeline : Mapped[str | None] = mapped_column('SourcePipeline') pipeline_date : Mapped[date | None] = mapped_column('PipelineDate') source_file_id : Mapped[int | None] = mapped_column('SourceFileID') process_protocol_id : Mapped[int | None] = mapped_column('ProcessProtocolID') - caveat : Mapped[bool | None] = mapped_column('Caveat') + caveat : Mapped[bool | None] = mapped_column('Caveat', IntBool) dicom_archive_id : Mapped[int | None] = mapped_column('TarchiveSource') hrrt_archive_id : Mapped[int | None] = mapped_column('HrrtArchiveID') scanner_id : Mapped[int | None] = mapped_column('ScannerID') diff --git a/python/lib/db/models/file_parameter.py b/python/lib/db/models/file_parameter.py index 2019265d3..5f803b061 100644 --- a/python/lib/db/models/file_parameter.py +++ b/python/lib/db/models/file_parameter.py @@ -1,9 +1,12 @@ +from datetime import datetime + from sqlalchemy import ForeignKey from sqlalchemy.orm import Mapped, mapped_column, relationship import lib.db.models.file as db_file import lib.db.models.parameter_type as db_parameter_type from lib.db.base import Base +from lib.db.decorators.int_datetime import IntDatetime class DbFileParameter(Base): @@ -13,7 +16,7 @@ class DbFileParameter(Base): file_id : Mapped[int] = mapped_column('FileID', ForeignKey('files.FileID')) type_id : Mapped[int] = mapped_column('ParameterTypeID', ForeignKey('parameter_type.ParameterTypeID')) value : Mapped[str | None] = mapped_column('Value') - insert_time : Mapped[int] = mapped_column('InsertTime') + insert_time : Mapped[datetime] = mapped_column('InsertTime', IntDatetime) file: Mapped['db_file.DbFile'] \ = relationship('DbFile', back_populates='parameters') diff --git a/python/lib/db/models/mri_upload.py b/python/lib/db/models/mri_upload.py index b9a8fcdae..bcdf79aec 100644 --- a/python/lib/db/models/mri_upload.py +++ b/python/lib/db/models/mri_upload.py @@ -7,6 +7,7 @@ import lib.db.models.dicom_archive as db_dicom_archive import lib.db.models.session as db_session from lib.db.base import Base +from lib.db.decorators.int_bool import IntBool from lib.db.decorators.y_n_bool import YNBool @@ -18,16 +19,16 @@ class DbMriUpload(Base): upload_date : Mapped[datetime | None] = mapped_column('UploadDate') upload_location : Mapped[str] = mapped_column('UploadLocation') decompressed_location : Mapped[str] = mapped_column('DecompressedLocation') - insertion_complete : Mapped[bool] = mapped_column('InsertionComplete') - inserting : Mapped[bool | None] = mapped_column('Inserting') + insertion_complete : Mapped[bool] = mapped_column('InsertionComplete', IntBool) + inserting : Mapped[bool | None] = mapped_column('Inserting', IntBool) patient_name : Mapped[str] = mapped_column('PatientName') number_of_minc_inserted : Mapped[int | None] = mapped_column('number_of_mincInserted') number_of_minc_created : Mapped[int | None] = mapped_column('number_of_mincCreated') dicom_archive_id : Mapped[int | None] \ = mapped_column('TarchiveID', ForeignKey('tarchive.TarchiveID')) session_id : Mapped[int | None] = mapped_column('SessionID', ForeignKey('session.ID')) - is_candidate_info_validated : Mapped[bool | None] = mapped_column('IsCandidateInfoValidated') - is_dicom_archive_validated : Mapped[bool] = mapped_column('IsTarchiveValidated') + is_candidate_info_validated : Mapped[bool | None] = mapped_column('IsCandidateInfoValidated', IntBool) + is_dicom_archive_validated : Mapped[bool] = mapped_column('IsTarchiveValidated', IntBool) is_phantom : Mapped[bool] = mapped_column('IsPhantom', YNBool) dicom_archive : Mapped[Optional['db_dicom_archive.DbDicomArchive']] \ diff --git a/python/lib/db/models/notification_type.py b/python/lib/db/models/notification_type.py index e5454fdac..3845fcbbe 100644 --- a/python/lib/db/models/notification_type.py +++ b/python/lib/db/models/notification_type.py @@ -1,6 +1,7 @@ from sqlalchemy.orm import Mapped, mapped_column from lib.db.base import Base +from lib.db.decorators.int_bool import IntBool class DbNotificationType(Base): @@ -8,5 +9,5 @@ class DbNotificationType(Base): id : Mapped[int] = mapped_column('NotificationTypeID', primary_key=True) name : Mapped[str] = mapped_column('Type') - private : Mapped[bool | None] = mapped_column('private') + private : Mapped[bool | None] = mapped_column('private', IntBool) description: Mapped[str | None] = mapped_column('Description') diff --git a/python/lib/db/models/parameter_type.py b/python/lib/db/models/parameter_type.py index 1d29d3e37..83efc1eb2 100644 --- a/python/lib/db/models/parameter_type.py +++ b/python/lib/db/models/parameter_type.py @@ -2,6 +2,7 @@ import lib.db.models.file_parameter as db_file_parameter from lib.db.base import Base +from lib.db.decorators.int_bool import IntBool class DbParameterType(Base): @@ -17,8 +18,8 @@ class DbParameterType(Base): source_field : Mapped[str | None] = mapped_column('SourceField') source_from : Mapped[str | None] = mapped_column('SourceFrom') source_condition : Mapped[str | None] = mapped_column('SourceCondition') - queryable : Mapped[bool | None] = mapped_column('Queryable') - is_file : Mapped[bool | None] = mapped_column('IsFile') + queryable : Mapped[bool | None] = mapped_column('Queryable', IntBool) + is_file : Mapped[bool | None] = mapped_column('IsFile', IntBool) file_parameters: Mapped[list['db_file_parameter.DbFileParameter']] \ = relationship('DbFileParameter', back_populates='type') diff --git a/python/lib/db/models/parameter_type_category.py b/python/lib/db/models/parameter_type_category.py new file mode 100644 index 000000000..eb867ee63 --- /dev/null +++ b/python/lib/db/models/parameter_type_category.py @@ -0,0 +1,11 @@ +from sqlalchemy.orm import Mapped, mapped_column + +from lib.db.base import Base + + +class DbParameterTypeCategory(Base): + __tablename__ = 'parameter_type_category' + + id : Mapped[int] = mapped_column('ParameterTypeCategoryID', primary_key=True) + name : Mapped[str | None] = mapped_column('Name') + type : Mapped[str | None] = mapped_column('Type') diff --git a/python/lib/db/models/parameter_type_category_rel.py b/python/lib/db/models/parameter_type_category_rel.py new file mode 100644 index 000000000..223a98951 --- /dev/null +++ b/python/lib/db/models/parameter_type_category_rel.py @@ -0,0 +1,10 @@ +from sqlalchemy.orm import Mapped, mapped_column + +from lib.db.base import Base + + +class DbParameterTypeCategoryRel(Base): + __tablename__ = 'parameter_type_category_rel' + + parameter_type_id : Mapped[int] = mapped_column('ParameterTypeID', primary_key=True) + parameter_type_category_id : Mapped[int] = mapped_column('ParameterTypeCategoryID', primary_key=True) diff --git a/python/lib/db/models/session.py b/python/lib/db/models/session.py index 9d1467347..680d91ba6 100644 --- a/python/lib/db/models/session.py +++ b/python/lib/db/models/session.py @@ -8,6 +8,7 @@ import lib.db.models.project as db_project import lib.db.models.site as db_site from lib.db.base import Base +from lib.db.decorators.true_false_bool import TrueFalseBool from lib.db.decorators.y_n_bool import YNBool @@ -47,7 +48,7 @@ class DbSession(Base): mri_qc_pending : Mapped[bool] = mapped_column('MRIQCPending', YNBool) mri_qc_first_change_time : Mapped[datetime | None] = mapped_column('MRIQCFirstChangeTime') mri_qc_last_change_time : Mapped[datetime | None] = mapped_column('MRIQCLastChangeTime') - mri_caveat : Mapped[str] = mapped_column('MRICaveat') + mri_caveat : Mapped[bool] = mapped_column('MRICaveat', TrueFalseBool) language_id : Mapped[int | None] = mapped_column('languageID') candidate : Mapped['db_candidate.DbCandidate'] = relationship('DbCandidate', back_populates='sessions') diff --git a/python/lib/db/models/sex.py b/python/lib/db/models/sex.py new file mode 100644 index 000000000..3350a83ef --- /dev/null +++ b/python/lib/db/models/sex.py @@ -0,0 +1,9 @@ +from sqlalchemy.orm import Mapped, mapped_column + +from lib.db.base import Base + + +class DbSex(Base): + __tablename__ = 'sex' + + name : Mapped[str] = mapped_column('Name', primary_key=True) diff --git a/python/lib/db/queries/file.py b/python/lib/db/queries/file.py index e1d4817a4..395904b2d 100644 --- a/python/lib/db/queries/file.py +++ b/python/lib/db/queries/file.py @@ -26,6 +26,17 @@ def try_get_file_with_unique_combination( ).scalar_one_or_none() +def try_get_file_with_rel_path(db: Database, rel_path: str) -> DbFile | None: + """ + Get an imaging file from the database using its relative path, or return `None` if no imaging + file is found. + """ + + return db.execute(select(DbFile) + .where(DbFile.rel_path == rel_path) + ).scalar_one_or_none() + + def try_get_file_with_hash(db: Database, file_hash: str) -> DbFile | None: """ Get an imaging file from the database using its BLAKE2b or MD5 hash, or return `None` if no diff --git a/python/lib/db/queries/file_parameter.py b/python/lib/db/queries/file_parameter.py index db4ba107f..809c28eee 100644 --- a/python/lib/db/queries/file_parameter.py +++ b/python/lib/db/queries/file_parameter.py @@ -28,3 +28,15 @@ def try_get_parameter_value_with_file_id_parameter_name( .where(DbParameterType.name == parameter_name) .where(DbFileParameter.file_id == file_id) ).scalar_one_or_none() + + +def try_get_file_parameter_with_file_id_type_id(db: Database, file_id: int, type_id: int) -> DbFileParameter | None: + """ + Get a file parameter from the database using its file ID and type ID, or return `None` if no + file parameter is found. + """ + + return db.execute(select(DbFileParameter) + .where(DbFileParameter.type_id == type_id) + .where(DbFileParameter.file_id == file_id) + ).scalar_one_or_none() diff --git a/python/lib/db/queries/parameter_type.py b/python/lib/db/queries/parameter_type.py index f4674bc2e..a4102feec 100644 --- a/python/lib/db/queries/parameter_type.py +++ b/python/lib/db/queries/parameter_type.py @@ -4,6 +4,7 @@ from sqlalchemy.orm import Session as Database from lib.db.models.parameter_type import DbParameterType +from lib.db.models.parameter_type_category import DbParameterTypeCategory def get_all_parameter_types(db: Database) -> Sequence[DbParameterType]: @@ -12,3 +13,25 @@ def get_all_parameter_types(db: Database) -> Sequence[DbParameterType]: """ return db.execute(select(DbParameterType)).scalars().all() + + +def try_get_parameter_type_with_name(db: Database, name: str) -> DbParameterType | None: + """ + Get a parameter type from the database using its name, or return `None` if no parameter type is + found. + """ + + return db.execute(select(DbParameterType) + .where(DbParameterType.name == name) + ).scalar_one_or_none() + + +def get_parameter_type_category_with_name(db: Database, name: str) -> DbParameterTypeCategory: + """ + Get a parameter type category from the database using its name, or raise an exception if no + parameter type category is found. + """ + + return db.execute(select(DbParameterTypeCategory) + .where(DbParameterTypeCategory.name == name) + ).scalar_one() diff --git a/python/lib/db/queries/sex.py b/python/lib/db/queries/sex.py new file mode 100644 index 000000000..caa871aa6 --- /dev/null +++ b/python/lib/db/queries/sex.py @@ -0,0 +1,14 @@ +from sqlalchemy import select +from sqlalchemy.orm import Session as Database + +from lib.db.models.sex import DbSex + + +def try_get_sex_with_name(db: Database, name: str) -> DbSex | None: + """ + Try to get a sex from the database using its name, or return `None` if no sex is found. + """ + + return db.execute(select(DbSex) + .where(DbSex.name == name) + ).scalar_one_or_none() diff --git a/python/lib/dcm2bids_imaging_pipeline_lib/dicom_archive_loader_pipeline.py b/python/lib/dcm2bids_imaging_pipeline_lib/dicom_archive_loader_pipeline.py index cd5f4a0bf..76ccf640e 100644 --- a/python/lib/dcm2bids_imaging_pipeline_lib/dicom_archive_loader_pipeline.py +++ b/python/lib/dcm2bids_imaging_pipeline_lib/dicom_archive_loader_pipeline.py @@ -8,8 +8,6 @@ from lib.dcm2bids_imaging_pipeline_lib.base_pipeline import BasePipeline from lib.logging import log_error_exit, log_verbose -__license__ = "GPLv3" - class DicomArchiveLoaderPipeline(BasePipeline): """ @@ -112,10 +110,11 @@ def _run_dicom_archive_validation_pipeline(self): validation_command = [ "run_dicom_archive_validation.py", - "-p", self.options_dict["profile"]["value"], "-t", self.tarchive_path, "-u", str(self.mri_upload.id) ] + if self.options_dict["profile"]["value"] is not None: + validation_command.extend(['-p', self.options_dict["profile"]["value"]]) if self.verbose: validation_command.append("-v") @@ -300,12 +299,13 @@ def _run_nifti_insertion(self, nifti_file_path, json_file_path, bval_file_path=N nifti_insertion_command = [ "run_nifti_insertion.py", - "-p", self.options_dict["profile"]["value"], "-u", str(self.mri_upload.id), "-n", nifti_file_path, "-j", json_file_path, "-c" ] + if self.options_dict["profile"]["value"] is not None: + nifti_insertion_command.extend(['-p', self.options_dict["profile"]["value"]]) if bval_file_path: nifti_insertion_command.extend(["-l", bval_file_path]) if bvec_file_path: diff --git a/python/lib/dcm2bids_imaging_pipeline_lib/dicom_validation_pipeline.py b/python/lib/dcm2bids_imaging_pipeline_lib/dicom_validation_pipeline.py index 56c0c7745..f4b6abda6 100644 --- a/python/lib/dcm2bids_imaging_pipeline_lib/dicom_validation_pipeline.py +++ b/python/lib/dcm2bids_imaging_pipeline_lib/dicom_validation_pipeline.py @@ -8,8 +8,6 @@ from lib.logging import log_error_exit, log_verbose from lib.util.crypto import compute_file_md5_hash -__license__ = "GPLv3" - class DicomValidationPipeline(BasePipeline): """ diff --git a/python/lib/dcm2bids_imaging_pipeline_lib/nifti_insertion_pipeline.py b/python/lib/dcm2bids_imaging_pipeline_lib/nifti_insertion_pipeline.py index 150759907..e4a3d5f0a 100644 --- a/python/lib/dcm2bids_imaging_pipeline_lib/nifti_insertion_pipeline.py +++ b/python/lib/dcm2bids_imaging_pipeline_lib/nifti_insertion_pipeline.py @@ -11,11 +11,10 @@ from lib.db.queries.dicom_archive import try_get_dicom_archive_series_with_series_uid_echo_time from lib.dcm2bids_imaging_pipeline_lib.base_pipeline import BasePipeline from lib.get_session_info import SessionConfigError, get_dicom_archive_session_info +from lib.imaging_lib.nifti import add_nifti_spatial_file_parameters from lib.logging import log_error_exit, log_verbose from lib.util.crypto import compute_file_blake2b_hash, compute_file_md5_hash -__license__ = "GPLv3" - class NiftiInsertionPipeline(BasePipeline): """ @@ -75,7 +74,7 @@ def __init__(self, loris_getopt_obj, script_name): # Load the JSON file object with scan parameters if a JSON file was provided # --------------------------------------------------------------------------------------------- self.json_file_dict = self._load_json_sidecar_file() - self._add_step_and_space_params_to_json_file_dict() + add_nifti_spatial_file_parameters(self.nifti_path, self.json_file_dict) # --------------------------------------------------------------------------------- # Determine subject IDs based on DICOM headers and validate the IDs against the DB @@ -373,20 +372,6 @@ def _determine_acquisition_protocol(self): return protocol_info['scan_type_id'], protocol_info['mri_protocol_group_id'] - def _add_step_and_space_params_to_json_file_dict(self): - """ - Adds step and space information to the JSON file dictionary listing NIfTI file acquisition parameters. - """ - step_params = self.imaging_obj.get_nifti_image_step_parameters(self.nifti_path) - length_params = self.imaging_obj.get_nifti_image_length_parameters(self.nifti_path) - self.json_file_dict['xstep'] = step_params[0] - self.json_file_dict['ystep'] = step_params[1] - self.json_file_dict['zstep'] = step_params[2] - self.json_file_dict['xspace'] = length_params[0] - self.json_file_dict['yspace'] = length_params[1] - self.json_file_dict['zspace'] = length_params[2] - self.json_file_dict['time'] = length_params[3] if len(length_params) == 4 else None - def _move_to_assembly_and_insert_file_info(self): """ Determines where the NIfTI file and its associated files (.json, .bval, .bvec...) will go in the assembly_bids @@ -706,7 +691,7 @@ def _create_pic_image(self): 'cand_id': self.session.candidate.cand_id, 'data_dir_path': self.data_dir, 'file_rel_path': self.assembly_nifti_rel_path, - 'is_4D_dataset': True if self.json_file_dict['time'] else False, + 'is_4D_dataset': self.json_file_dict['time'] is not None, 'file_id': self.file_id } pic_rel_path = self.imaging_obj.create_imaging_pic(file_info) @@ -721,9 +706,10 @@ def _run_push_to_s3_pipeline(self): push_to_s3_cmd = [ "run_push_imaging_files_to_s3_pipeline.py", - "-p", self.options_dict["profile"]["value"], "-u", str(self.mri_upload.id), ] + if self.options_dict["profile"]["value"] is not None: + push_to_s3_cmd.extend(['-p', self.options_dict["profile"]["value"]]) if self.verbose: push_to_s3_cmd.append("-v") diff --git a/python/lib/dcm2bids_imaging_pipeline_lib/push_imaging_files_to_s3_pipeline.py b/python/lib/dcm2bids_imaging_pipeline_lib/push_imaging_files_to_s3_pipeline.py index f603a1a68..0483e0f14 100644 --- a/python/lib/dcm2bids_imaging_pipeline_lib/push_imaging_files_to_s3_pipeline.py +++ b/python/lib/dcm2bids_imaging_pipeline_lib/push_imaging_files_to_s3_pipeline.py @@ -7,8 +7,6 @@ from lib.logging import log_error_exit from lib.util.fs import remove_empty_directories -__license__ = "GPLv3" - class PushImagingFilesToS3Pipeline(BasePipeline): """ diff --git a/python/lib/dicom_archive.py b/python/lib/dicom_archive.py index 2f32035fb..16f7538da 100644 --- a/python/lib/dicom_archive.py +++ b/python/lib/dicom_archive.py @@ -6,8 +6,6 @@ from lib.database_lib.tarchive import Tarchive from lib.database_lib.tarchive_series import TarchiveSeries -__license__ = "GPLv3" - @deprecated('Use `lib.db.models.dicom_archive.DbDicomArchive` instead') class DicomArchive: diff --git a/python/lib/eeg.py b/python/lib/eeg.py index 96893ce5a..a909e0139 100644 --- a/python/lib/eeg.py +++ b/python/lib/eeg.py @@ -18,8 +18,6 @@ from lib.session import Session from lib.util.crypto import compute_file_blake2b_hash -__license__ = "GPLv3" - class Eeg: """ diff --git a/python/lib/exitcode.py b/python/lib/exitcode.py index e1556bdd3..39ba4bdec 100644 --- a/python/lib/exitcode.py +++ b/python/lib/exitcode.py @@ -2,9 +2,6 @@ Perl ones that are in uploadNeuroDB/NeuroDB/ExitCodes.pm""" -__license__ = "GPLv3" - - # -- Script ran successfully SUCCESS = 0 # yeah!! Success!! diff --git a/python/lib/get_session_info.py b/python/lib/get_session_info.py index 09785812a..9a5f11cb0 100644 --- a/python/lib/get_session_info.py +++ b/python/lib/get_session_info.py @@ -17,7 +17,7 @@ from lib.db.queries.site import try_get_site_with_alias from lib.db.queries.visit import try_get_visit_window_with_visit_label, try_get_visit_with_visit_label from lib.env import Env -from lib.scanner import MriScannerInfo, get_or_create_scanner +from lib.imaging_lib.mri_scanner import MriScannerInfo, get_or_create_scanner @dataclass @@ -259,7 +259,7 @@ def create_session( hardcopy_request = '-', mri_qc_status = '', mri_qc_pending = False, - mri_caveat = 'true', + mri_caveat = True, ) env.db.add(session) diff --git a/python/lib/imaging.py b/python/lib/imaging.py index 4a718fafd..7a83111ad 100644 --- a/python/lib/imaging.py +++ b/python/lib/imaging.py @@ -24,8 +24,6 @@ from lib.database_lib.parameter_type import ParameterType from lib.util.crypto import compute_file_blake2b_hash -__license__ = "GPLv3" - class Imaging: """ diff --git a/python/lib/imaging_io.py b/python/lib/imaging_io.py index 070141a9f..08f4c531f 100644 --- a/python/lib/imaging_io.py +++ b/python/lib/imaging_io.py @@ -11,8 +11,6 @@ """Set of io functions.""" -__license__ = "GPLv3" - @deprecated('Use `lib.logging` and `lib.util.fs` instead') class ImagingIO: diff --git a/python/lib/scanner.py b/python/lib/imaging_lib/mri_scanner.py similarity index 97% rename from python/lib/scanner.py rename to python/lib/imaging_lib/mri_scanner.py index 2fdf6d86b..4daa2e379 100644 --- a/python/lib/scanner.py +++ b/python/lib/imaging_lib/mri_scanner.py @@ -14,7 +14,7 @@ @dataclass class MriScannerInfo: """ - Information about an MRI scanner extracted from DICOM data. + Information about an MRI scanner. """ manufacturer: str | None diff --git a/python/lib/imaging_lib/nifti.py b/python/lib/imaging_lib/nifti.py new file mode 100644 index 000000000..7d8c5697f --- /dev/null +++ b/python/lib/imaging_lib/nifti.py @@ -0,0 +1,41 @@ +import os +from collections.abc import Iterator +from typing import Any, cast + +import nibabel as nib + + +def add_nifti_spatial_file_parameters(nifti_path: str, file_parameters: dict[str, Any]): + """ + Read a NIfTI image and add its spatial metadata to the file parameters. + """ + + img = nib.load(nifti_path) # type: ignore + + # Add the voxel step parameters from the NIfTI file header. + zooms = cast(tuple[float, ...], img.header.get_zooms()) # type: ignore + file_parameters['xstep'] = zooms[0] + file_parameters['ystep'] = zooms[1] + file_parameters['zstep'] = zooms[2] + + # Add the time length parameters from the NIfTI file header. + shape = cast(tuple[int, ...], img.shape) # type: ignore + file_parameters['xspace'] = shape[0] + file_parameters['yspace'] = shape[1] + file_parameters['zspace'] = shape[2] + + # Add the time parameter if the image is a 4D dataset. + if len(shape) == 4: + file_parameters['time'] = shape[3] + else: + file_parameters['time'] = None + + +def find_dir_nifti_names(dir_path: str) -> Iterator[str]: + """ + Iterate over the names of the NIfTI files found in a directory. + """ + + for file_name in os.listdir(dir_path): + if file_name.endswith(('.nii', '.nii.gz')): + yield file_name diff --git a/python/lib/imaging_upload.py b/python/lib/imaging_upload.py index 3e21587c8..a390c19b6 100644 --- a/python/lib/imaging_upload.py +++ b/python/lib/imaging_upload.py @@ -4,8 +4,6 @@ from lib.database_lib.mri_upload_db import MriUploadDB -__license__ = "GPLv3" - @deprecated('Use `lib.db.models.mri_upload.DbMriUpload` instead') class ImagingUpload: diff --git a/python/lib/import_dicom_study/summary_util.py b/python/lib/import_dicom_study/summary_util.py index d12368a20..fee4048d9 100644 --- a/python/lib/import_dicom_study/summary_util.py +++ b/python/lib/import_dicom_study/summary_util.py @@ -1,8 +1,8 @@ from lib.config import get_patient_id_dicom_header_config from lib.env import Env from lib.get_session_info import SessionInfo, get_session_info +from lib.imaging_lib.mri_scanner import MriScannerInfo from lib.import_dicom_study.summary_type import DicomStudySummary -from lib.scanner import MriScannerInfo def get_dicom_study_summary_scanner_info(dicom_summary: DicomStudySummary) -> MriScannerInfo: diff --git a/python/lib/log.py b/python/lib/log.py index 91a769b32..1efb885a0 100644 --- a/python/lib/log.py +++ b/python/lib/log.py @@ -6,8 +6,6 @@ from lib.database_lib.notification import Notification -__license__ = "GPLv3" - @deprecated('Use `lib.logging` instead') class Log: diff --git a/python/lib/lorisgetopt.py b/python/lib/lorisgetopt.py index 25bb58e4a..63b59d0f9 100644 --- a/python/lib/lorisgetopt.py +++ b/python/lib/lorisgetopt.py @@ -6,11 +6,10 @@ import lib.exitcode import lib.utilities from lib.aws_s3 import AwsS3 +from lib.config_file import load_config from lib.database import Database from lib.database_lib.config import Config -__license__ = "GPLv3" - class LorisGetOpt: """ @@ -33,12 +32,11 @@ class LorisGetOpt: "usage : example.py -p -f ...\n\n" "options: \n" - "\t-p, --profile : Name of the python database config file in dicom-archive/.loris_mri\n" + "\t-p, --profile : Name of the python database config file in config\n" "\t-n, --file_path : Absolute file path to process\n" "\t-v, --verbose : If set, be verbose\n\n" "required options are: \n" - "\t--profile\n" "\t--file_path\n" ) @@ -181,26 +179,7 @@ def load_config_file(self): with a proper error message. """ - profile_value = self.options_dict["profile"]["value"] - - if "LORIS_CONFIG" not in os.environ.keys(): - print("\n[ERROR ] Environment variable 'LORIS_CONFIG' not set\n") - sys.exit(lib.exitcode.INVALID_ENVIRONMENT_VAR) - - config_file = os.path.join(os.environ["LORIS_CONFIG"], ".loris_mri", profile_value) - if not config_file.endswith(".py"): - print( - f"\n[ERROR ] {config_file} does not appear to be the python configuration file." - f" Try using 'database_config.py' instead.\n" - ) - sys.exit(lib.exitcode.INVALID_ARG) - - if os.path.isfile(config_file): - sys.path.append(os.path.dirname(config_file)) - self.config_info = __import__(os.path.basename(config_file[:-3])) - else: - print(f"\n[ERROR ] {profile_value} does not exist in {os.environ['LORIS_CONFIG']}.") - sys.exit(lib.exitcode.INVALID_PATH) + self.config_info = load_config(self.options_dict["profile"]["value"]) def check_required_options_are_set(self): """ diff --git a/python/lib/mri.py b/python/lib/mri.py index 6afaaf2cf..03259b5ca 100644 --- a/python/lib/mri.py +++ b/python/lib/mri.py @@ -14,8 +14,6 @@ from lib.session import Session from lib.util.crypto import compute_file_blake2b_hash -__license__ = "GPLv3" - class Mri: """ diff --git a/python/lib/physiological.py b/python/lib/physiological.py index 2abef84ab..1398e8f75 100644 --- a/python/lib/physiological.py +++ b/python/lib/physiological.py @@ -21,8 +21,6 @@ from lib.database_lib.point_3d import Point3DDB from lib.point_3d import Point3D -__license__ = "GPLv3" - class Physiological: """ diff --git a/python/lib/point_3d.py b/python/lib/point_3d.py index e1fc8c6ec..d0d03b42a 100644 --- a/python/lib/point_3d.py +++ b/python/lib/point_3d.py @@ -1,7 +1,5 @@ """This class represents a Point with 3D coordinates""" -__license__ = "GPLv3" - class Point3D: def __init__(self, pid: int, x: float, y: float, z: float): diff --git a/python/lib/scanstsv.py b/python/lib/scanstsv.py index 34d1bf4df..5cb938615 100644 --- a/python/lib/scanstsv.py +++ b/python/lib/scanstsv.py @@ -7,8 +7,6 @@ import lib import lib.utilities as utilities -__license__ = "GPLv3" - class ScansTSV: """ diff --git a/python/lib/session.py b/python/lib/session.py index a727bc2fd..5d2e93793 100644 --- a/python/lib/session.py +++ b/python/lib/session.py @@ -7,8 +7,6 @@ from lib.database_lib.session_db import SessionDB from lib.database_lib.site import Site -__license__ = "GPLv3" - class Session: """ diff --git a/python/lib/util/fs.py b/python/lib/util/fs.py index df29841c2..126a7c217 100644 --- a/python/lib/util/fs.py +++ b/python/lib/util/fs.py @@ -1,8 +1,9 @@ import os +import re import shutil import tarfile import tempfile -from collections.abc import Generator +from collections.abc import Iterator from datetime import datetime import lib.exitcode @@ -25,7 +26,7 @@ def extract_archive(env: Env, tar_path: str, prefix: str, dir_path: str) -> str: return extract_path -def iter_all_dir_files(dir_path: str) -> Generator[str, None, None]: +def iter_all_dir_files(dir_path: str) -> Iterator[str]: """ Iterate through all the files in a directory recursively, and yield the path of each file relative to that directory. @@ -77,3 +78,39 @@ def remove_empty_directories(dir_path: str): for subdir_path, _, _ in os.walk(dir_path, topdown=False): if is_directory_empty(subdir_path): os.rmdir(subdir_path) + + +def get_file_extension(file_name: str) -> str: + """ + Get the extension (including multiple extensions) of a file name or path without the leading + dot. + """ + + parts = file_name.split('.', maxsplit=1) + if len(parts) == 1: + return '' + + return parts[1] + + +def replace_file_extension(file_name: str, extension: str) -> str: + """ + Replace the extension (including multiple extensions) of a file name or path by another + extension. + """ + + parts = file_name.split('.') + return f'{parts[0]}.{extension}' + + +def search_dir_file_with_regex(dir_path: str, regex: str) -> str | None: + """ + Search for a file within a directory whose name matches a regular expression, or return `None` + if no such file is found. + """ + + for file in os.scandir(dir_path): + if re.search(regex, file.name): + return file.name + + return None diff --git a/python/lib/utilities.py b/python/lib/utilities.py index 71444e91b..dcf3c06bf 100644 --- a/python/lib/utilities.py +++ b/python/lib/utilities.py @@ -20,8 +20,6 @@ import lib.exitcode import lib.util.crypto -__license__ = "GPLv3" - def read_tsv_file(tsv_file): """ diff --git a/python/scripts/bids_import.py b/python/scripts/bids_import.py index 50f8e0f25..9f54eabac 100755 --- a/python/scripts/bids_import.py +++ b/python/scripts/bids_import.py @@ -13,6 +13,7 @@ import lib.utilities from lib.bidsreader import BidsReader from lib.candidate import Candidate +from lib.config_file import load_config from lib.database import Database from lib.database_lib.config import Config from lib.eeg import Eeg @@ -20,9 +21,6 @@ from lib.session import Session from lib.util.crypto import compute_file_blake2b_hash -__license__ = "GPLv3" - - sys.path.append('/home/user/python') @@ -37,7 +35,7 @@ def main(): idsvalidation = False nobidsvalidation = False type = None - profile = '' + profile = None nocopy = False long_options = [ @@ -50,7 +48,7 @@ def main(): '\n' 'usage : bids_import -d -p \n\n' 'options: \n' - '\t-p, --profile : name of the python database config file in dicom-archive/.loris-mri\n' + '\t-p, --profile : name of the python database config file in the config directory\n' '\t-d, --directory : BIDS directory to parse & insert into LORIS\n' 'If directory is within $data_dir/assembly_bids, no copy will be performed' '\t-c, --createcandidate : to create BIDS candidates in LORIS (optional)\n' @@ -75,7 +73,7 @@ def main(): print(usage) sys.exit() elif opt in ('-p', '--profile'): - profile = os.environ['LORIS_CONFIG'] + "/.loris_mri/" + arg + profile = arg elif opt in ('-d', '--directory'): bids_dir = arg elif opt in ('-v', '--verbose'): @@ -94,7 +92,8 @@ def main(): type = arg # input error checking and load config_file file - config_file = input_error_checking(profile, bids_dir, usage) + config_file = load_config(profile) + input_error_checking(bids_dir, usage) dataset_json = bids_dir + "/dataset_description.json" if not os.path.isfile(dataset_json) and not type: @@ -131,30 +130,16 @@ def main(): ) -def input_error_checking(profile, bids_dir, usage): +def input_error_checking(bids_dir, usage): """ - Checks whether the required inputs are set and that paths are valid. If - the path to the config_file file valid, then it will import the file as a - module so the database connection information can be used to connect. + Checks whether the required inputs are set and that paths are valid. - :param profile : path to the profile file with MySQL credentials - :type profile : str :param bids_dir: path to the BIDS directory to parse and insert into LORIS :type bids_dir: str :param usage : script usage to be displayed when encountering an error :type usage : st - - :return: config_file module with database credentials (config_file.mysql) - :rtype: module """ - if not profile: - message = '\n\tERROR: you must specify a profile file using -p or ' \ - '--profile option' - print(message) - print(usage) - sys.exit(lib.exitcode.MISSING_ARG) - if not bids_dir: message = '\n\tERROR: you must specify a BIDS directory using -d or ' \ '--directory option' @@ -162,16 +147,6 @@ def input_error_checking(profile, bids_dir, usage): print(usage) sys.exit(lib.exitcode.MISSING_ARG) - if os.path.isfile(profile): - sys.path.append(os.path.dirname(profile)) - config_file = __import__(os.path.basename(profile[:-3])) - else: - message = '\n\tERROR: you must specify a valid profile file.\n' + \ - profile + ' does not exist!' - print(message) - print(usage) - sys.exit(lib.exitcode.INVALID_PATH) - if not os.path.isdir(bids_dir): message = '\n\tERROR: you must specify a valid BIDS directory.\n' + \ bids_dir + ' does not exist!' @@ -179,8 +154,6 @@ def input_error_checking(profile, bids_dir, usage): print(usage) sys.exit(lib.exitcode.INVALID_PATH) - return config_file - def read_and_insert_bids( bids_dir, data_dir, verbose, createcand, createvisit, diff --git a/python/scripts/delete_physiological_file.py b/python/scripts/delete_physiological_file.py index 536c272c6..227a817ac 100755 --- a/python/scripts/delete_physiological_file.py +++ b/python/scripts/delete_physiological_file.py @@ -24,20 +24,19 @@ def main(): "usage : delete_physiological_file.py -f file_id -p ...\n\n" "options: \n" - "\t-p, --profile : Name of the python database config file in dicom-archive/.loris_mri\n" + "\t-p, --profile : Name of the python database config file in config\n" "\t-f, --fileid : Id of the file (PhysiologicalFileID) to delete\n" "\t-c, --confirm : After a trial run, perform the deletion\n" "\t-d, --deleteondisk : Delete files on disk\n" "\t-v, --verbose : If set, be verbose\n\n" "required options are: \n" - "\t--profile\n" "\t--fileid\n" ) options_dict = { "profile": { - "value": None, "required": True, "expect_arg": True, "short_opt": "p", "is_path": False + "value": None, "required": False, "expect_arg": True, "short_opt": "p", "is_path": False }, "fileid": { "value": None, "required": True, "expect_arg": True, "short_opt": "f", "is_path": False diff --git a/python/scripts/extract_eeg_bids_archive.py b/python/scripts/extract_eeg_bids_archive.py index d7c43bb44..7399b18f0 100755 --- a/python/scripts/extract_eeg_bids_archive.py +++ b/python/scripts/extract_eeg_bids_archive.py @@ -15,8 +15,6 @@ from lib.make_env import make_env from lib.util.fs import copy_file, extract_archive, remove_directory -__license__ = "GPLv3" - sys.path.append('/home/user/python') @@ -33,17 +31,14 @@ def main(): "usage : extract_eeg_bids_archive.py -p -u ...\n\n" "options: \n" - "\t-p, --profile : Name of the python database config file in dicom-archive/.loris_mri\n" + "\t-p, --profile : Name of the python database config file in config\n" "\t-u, --upload_id : ID of the upload (from electrophysiology_uploader) of the EEG archive\n" "\t-v, --verbose : If set, be verbose\n\n" - - "required options are: \n" - "\t--profile\n" ) options_dict = { "profile": { - "value": None, "required": True, "expect_arg": True, "short_opt": "p", "is_path": False + "value": None, "required": False, "expect_arg": True, "short_opt": "p", "is_path": False }, "upload_id": { "value": None, "required": False, "expect_arg": True, "short_opt": "u", "is_path": False diff --git a/python/scripts/import_dicom_study.py b/python/scripts/import_dicom_study.py index 63365332d..bec34dd11 100755 --- a/python/scripts/import_dicom_study.py +++ b/python/scripts/import_dicom_study.py @@ -60,8 +60,8 @@ def main() -> None: "Usage: import_dicom_study.py -p -s ...\n" "\n" "Options: \n" - "\t-p, --profile : Name of the LORIS Python configuration file (usually\n" - "\t 'database_config.py')\n" + "\t-p, --profile : Name of the LORIS Python configuration file (default:\n" + "\t 'config.py')\n" "\t-s, --source : Path of the source directory containing the DICOM files of the" "\t study.\n" "\t --overwrite : Overwrite the DICOM archive file if it already exists.\n" @@ -74,7 +74,6 @@ def main() -> None: "\t-v, --verbose : If set, be verbose\n" "\n" "Required options: \n" - "\t--profile\n" "\t--source\n" ) @@ -82,7 +81,7 @@ def main() -> None: # repeat the long names. options_dict = { "profile": { - "value": None, "required": True, "expect_arg": True, "short_opt": "p", "is_path": False + "value": None, "required": False, "expect_arg": True, "short_opt": "p", "is_path": False }, "source": { "value": None, "required": True, "expect_arg": True, "short_opt": "s", "is_path": True, diff --git a/python/scripts/ingest_eeg_bids_datasets.py b/python/scripts/ingest_eeg_bids_datasets.py index 9c762af9f..94c15daff 100755 --- a/python/scripts/ingest_eeg_bids_datasets.py +++ b/python/scripts/ingest_eeg_bids_datasets.py @@ -12,8 +12,6 @@ from lib.lorisgetopt import LorisGetOpt from scripts.delete_physiological_file import delete_physiological_file_in_db -__license__ = "GPLv3" - sys.path.append('/home/user/python') @@ -29,17 +27,14 @@ def main(): "usage : ingest_eeg_bids_datasets.py -p -d ...\n\n" "options: \n" - "\t-p, --profile : Name of the python database config file in dicom-archive/.loris_mri\n" + "\t-p, --profile : Name of the python database config file in config\n" "\t-u, --upload_id : ID of the upload (from electrophysiology_uploader) of the EEG dataset\n" "\t-v, --verbose : If set, be verbose\n\n" - - "required options are: \n" - "\t--profile\n" ) options_dict = { "profile": { - "value": None, "required": True, "expect_arg": True, "short_opt": "p", "is_path": False + "value": None, "required": False, "expect_arg": True, "short_opt": "p", "is_path": False }, "upload_id": { "value": None, "required": False, "expect_arg": True, "short_opt": "u", "is_path": False diff --git a/python/scripts/mass_electrophysiology_chunking.py b/python/scripts/mass_electrophysiology_chunking.py index 0a05a69d9..355a96ada 100755 --- a/python/scripts/mass_electrophysiology_chunking.py +++ b/python/scripts/mass_electrophysiology_chunking.py @@ -3,17 +3,14 @@ """Script to mass chunk electrophysiology datasets.""" import getopt -import os import sys import lib.exitcode +from lib.config_file import load_config from lib.database import Database from lib.database_lib.config import Config from lib.physiological import Physiological -__license__ = "GPLv3" - - sys.path.append('/home/user/python') @@ -21,7 +18,7 @@ # sys.tracebacklimit = 0 def main(): - profile = '' + profile = None verbose = False smallest_id = None largest_id = None @@ -35,8 +32,8 @@ def main(): 'usage : mass_electrophysiology_chunking.py -p -s ' '-l \n\n' 'options: \n' - '\t-p, --profile : name of the python database config file in ' - 'dicom-archive/.loris-mri\n' + '\t-p, --profile : name of the python database config file in the config' + ' directory\n' '\t-s, --smallest_id: smallest PhyiologicalFileID to chunk\n' '\t-l, --largest_id : largest PhysiologicalFileID to chunk\n' '\t-v, --verbose : be verbose\n' @@ -53,7 +50,7 @@ def main(): print(usage) sys.exit() elif opt in ('-p', '--profile'): - profile = os.environ['LORIS_CONFIG'] + "/.loris_mri/" + arg + profile = arg elif opt in ('-s', '--smallest_id'): smallest_id = int(arg) elif opt in ('-l', '--largest_id'): @@ -62,7 +59,8 @@ def main(): verbose = True # input error checking and load config_file file - config_file = input_error_checking(profile, smallest_id, largest_id, usage) + config_file = load_config(profile) + input_error_checking(smallest_id, largest_id, usage) # run chunking script on electrophysiology datasets with a PhysiologicalFileID # between smallest_id and largest_id @@ -73,32 +71,18 @@ def main(): make_chunks(file_id, config_file, verbose) -def input_error_checking(profile, smallest_id, largest_id, usage): +def input_error_checking(smallest_id, largest_id, usage): """ - Checks whether the required inputs are correctly set. If - the path to the config_file file valid, then it will import the file as a - module so the database connection information can be used to connect. + Checks whether the required inputs are correctly set. - :param profile : path to the profile file with MySQL credentials - :type profile : str :param smallest_id: smallest PhysiologicalFileID on which to run the chunking script :type smallest_id: int :param largest_id : largest PhysiologicalFileID on which to run the chunking script :type largest_id : int :param usage : script usage to be displayed when encountering an error :type usage : str - - :return: config_file module with database credentials (config_file.mysql) - :rtype: module """ - if not profile: - message = '\n\tERROR: you must specify a profile file using -p or ' \ - '--profile option' - print(message) - print(usage) - sys.exit(lib.exitcode.MISSING_ARG) - if not smallest_id: message = '\n\tERROR: you must specify a smallest PhysiologyFileID on ' \ 'which to run the chunking script using -s or --smallest_id option' @@ -120,18 +104,6 @@ def input_error_checking(profile, smallest_id, largest_id, usage): print(usage) sys.exit(lib.exitcode.INVALID_ARG) - if os.path.isfile(profile): - sys.path.append(os.path.dirname(profile)) - config_file = __import__(os.path.basename(profile[:-3])) - else: - message = '\n\tERROR: you must specify a valid profile file.\n' + \ - profile + ' does not exist!' - print(message) - print(usage) - sys.exit(lib.exitcode.INVALID_PATH) - - return config_file - def make_chunks(physiological_file_id, config_file, verbose): """ diff --git a/python/scripts/mass_nifti_pic.py b/python/scripts/mass_nifti_pic.py index 55da86190..d9d723e7c 100755 --- a/python/scripts/mass_nifti_pic.py +++ b/python/scripts/mass_nifti_pic.py @@ -8,13 +8,11 @@ import sys import lib.exitcode +from lib.config_file import load_config from lib.database import Database from lib.database_lib.config import Config from lib.imaging import Imaging -__license__ = "GPLv3" - - sys.path.append('/home/user/python') @@ -22,7 +20,7 @@ # sys.tracebacklimit = 0 def main(): - profile = '' + profile = None verbose = False force = False smallest_id = None @@ -36,8 +34,8 @@ def main(): '\n' 'usage : mass_nifti_pic.py -p -s -l \n\n' 'options: \n' - '\t-p, --profile : name of the python database config file in ' - 'dicom-archive/.loris-mri\n' + '\t-p, --profile : name of the python database config file in the config' + ' directory\n' '\t-s, --smallest_id: smallest FileID for which the pic will be created\n' '\t-l, --largest_id : largest FileID for which the pic will be created\n' '\t-f, --force : overwrite the pic already present in the filesystem with new pic\n' @@ -55,7 +53,7 @@ def main(): print(usage) sys.exit() elif opt in ('-p', '--profile'): - profile = os.environ['LORIS_CONFIG'] + "/.loris_mri/" + arg + profile = arg elif opt in ('-s', '--smallest_id'): smallest_id = int(arg) elif opt in ('-l', '--largest_id'): @@ -66,7 +64,8 @@ def main(): verbose = True # input error checking and load config_file file - config_file = input_error_checking(profile, smallest_id, largest_id, usage) + config_file = load_config(profile) + input_error_checking(smallest_id, largest_id, usage) # create pic for NIfTI files with a FileID between smallest_id and largest_id if (smallest_id == largest_id): @@ -76,41 +75,18 @@ def main(): make_pic(file_id, config_file, force, verbose) -def input_error_checking(profile, smallest_id, largest_id, usage): +def input_error_checking(smallest_id, largest_id, usage): """ - Checks whether the required inputs are correctly set. If - the path to the config_file file valid, then it will import the file as a - module so the database connection information can be used to connect. + Checks whether the required inputs are correctly set. - :param profile : path to the profile file with MySQL credentials - :type profile : str :param smallest_id: smallest FileID for which to create the pic :type smallest_id: int :param largest_id : largest FileID for which to create the pic :type largest_id : int :param usage : script usage to be displayed when encountering an error :type usage : str - - :return: config_file module with database credentials (config_file.mysql) - :rtype: module """ - if not profile: - message = '\n\tERROR: you must specify a profile file using -p or ' \ - '--profile option' - print(message) - print(usage) - sys.exit(lib.exitcode.MISSING_ARG) - - if os.path.isfile(profile): - sys.path.append(os.path.dirname(profile)) - config_file = __import__(os.path.basename(profile[:-3])) - else: - message = f'\n\tERROR: you must specify a valid profile file.\n{profile} does not exist!' - print(message) - print(usage) - sys.exit(lib.exitcode.INVALID_PATH) - if not smallest_id: message = '\n\tERROR: you must specify a smallest FileID on which to run the' \ ' mass_nifti_pic.py script using -s or --smallest_id option' @@ -132,8 +108,6 @@ def input_error_checking(profile, smallest_id, largest_id, usage): print(usage) sys.exit(lib.exitcode.INVALID_ARG) - return config_file - def make_pic(file_id, config_file, force, verbose): """ diff --git a/python/scripts/run_dicom_archive_loader.py b/python/scripts/run_dicom_archive_loader.py index fa18fbcff..67332ebd8 100755 --- a/python/scripts/run_dicom_archive_loader.py +++ b/python/scripts/run_dicom_archive_loader.py @@ -8,8 +8,6 @@ from lib.dcm2bids_imaging_pipeline_lib.dicom_archive_loader_pipeline import DicomArchiveLoaderPipeline from lib.lorisgetopt import LorisGetOpt -__license__ = "GPLv3" - sys.path.append('/home/user/python') @@ -30,7 +28,7 @@ def main(): "usage : run_dicom_archive_loader.py -p -u ...\n\n" "options: \n" - "\t-p, --profile : Name of the python database config file in dicom-archive/.loris_mri\n" + "\t-p, --profile : Name of the python database config file in config\n" "\t-t, --tarchive_path : Absolute path to the DICOM archive to process\n" "\t-u, --upload_id : ID of the upload (from mri_upload) related to the DICOM archive to process\n" "\t-s, --series_uid : Only insert the provided SeriesUID\n" @@ -38,13 +36,12 @@ def main(): "\t-v, --verbose : If set, be verbose\n\n" "required options are: \n" - "\t--profile\n" "\t--tarchive_path OR --upload_id\n" ) options_dict = { "profile": { - "value": None, "required": True, "expect_arg": True, "short_opt": "p", "is_path": False + "value": None, "required": False, "expect_arg": True, "short_opt": "p", "is_path": False }, "tarchive_path": { "value": None, "required": False, "expect_arg": True, "short_opt": "t", "is_path": True diff --git a/python/scripts/run_dicom_archive_validation.py b/python/scripts/run_dicom_archive_validation.py index 11778838d..ec3e9f3ec 100755 --- a/python/scripts/run_dicom_archive_validation.py +++ b/python/scripts/run_dicom_archive_validation.py @@ -8,9 +8,6 @@ from lib.dcm2bids_imaging_pipeline_lib.dicom_validation_pipeline import DicomValidationPipeline from lib.lorisgetopt import LorisGetOpt -__license__ = "GPLv3" - - sys.path.append('/home/user/python') # to limit the traceback when raising exceptions. @@ -35,20 +32,19 @@ def main(): "usage : dicom_archive_validation -p -t -u \n\n" "options: \n" - "\t-p, --profile : Name of the python database config file in dicom-archive/.loris_mri\n" + "\t-p, --profile : Name of the python database config file in config\n" "\t-t, --tarchive_path: Absolute path to the DICOM archive to validate\n" "\t-u, --upload_id : ID of the upload (from mri_upload) associated with the DICOM archive to validate\n" "\t-v, --verbose : If set, be verbose\n\n" "required options are: \n" - "\t--profile\n" "\t--tarchive_path\n" "\t--upload_id\n\n" ) options_dict = { "profile": { - "value": None, "required": True, "expect_arg": True, "short_opt": "p", "is_path": False + "value": None, "required": False, "expect_arg": True, "short_opt": "p", "is_path": False }, "tarchive_path": { "value": None, "required": True, "expect_arg": True, "short_opt": "t", "is_path": True diff --git a/python/scripts/run_nifti_insertion.py b/python/scripts/run_nifti_insertion.py index fb3f8eb1c..fdea2effc 100755 --- a/python/scripts/run_nifti_insertion.py +++ b/python/scripts/run_nifti_insertion.py @@ -9,8 +9,6 @@ from lib.dcm2bids_imaging_pipeline_lib.nifti_insertion_pipeline import NiftiInsertionPipeline from lib.lorisgetopt import LorisGetOpt -__license__ = "GPLv3" - sys.path.append('/home/user/python') @@ -32,7 +30,7 @@ def main(): "usage : run_nifti_insertion.py -p -n -j ...\n\n" "options: \n" - "\t-p, --profile : Name of the python database config file in dicom-archive/.loris_mri\n" + "\t-p, --profile : Name of the python database config file in config\n" "\t-n, --nifti_path : Absolute path to the NIfTI file to insert\n" "\t-j, --json_path : Absolute path to the BIDS JSON sidecar file with scan parameters\n" "\t-l, --bval_path : Absolute path to the NIfTI BVAL file for DWI acquisitions\n" @@ -46,7 +44,6 @@ def main(): "\t-v, --verbose : If set, be verbose\n\n" "required options are: \n" - "\t--profile\n" "\t--nifti_path\n" "\t--json_path OR --loris_scan_type\n" "\t--tarchive_path OR --upload_id\n" @@ -55,7 +52,7 @@ def main(): options_dict = { "profile": { - "value": None, "required": True, "expect_arg": True, "short_opt": "p", "is_path": False + "value": None, "required": False, "expect_arg": True, "short_opt": "p", "is_path": False }, "nifti_path": { "value": None, "required": True, "expect_arg": True, "short_opt": "n", "is_path": True diff --git a/python/scripts/run_push_imaging_files_to_s3_pipeline.py b/python/scripts/run_push_imaging_files_to_s3_pipeline.py index a3891e7ec..917fd87ee 100755 --- a/python/scripts/run_push_imaging_files_to_s3_pipeline.py +++ b/python/scripts/run_push_imaging_files_to_s3_pipeline.py @@ -8,8 +8,6 @@ from lib.dcm2bids_imaging_pipeline_lib.push_imaging_files_to_s3_pipeline import PushImagingFilesToS3Pipeline from lib.lorisgetopt import LorisGetOpt -__license__ = "GPLv3" - sys.path.append('/home/user/python') @@ -28,18 +26,17 @@ def main(): "usage : run_push_imaging_files_to_s3_pipeline.py -p -u ...\n\n" "options: \n" - "\t-p, --profile : Name of the python database config file in dicom-archive/.loris_mri\n" + "\t-p, --profile : Name of the python database config file in config\n" "\t-u, --upload_id : ID of the upload (from mri_upload) related to the DICOM archive to process\n" "\t-v, --verbose : If set, be verbose\n\n" "required options are: \n" - "\t--profile\n" "\t--upload_id\n" ) options_dict = { "profile": { - "value": None, "required": True, "expect_arg": True, "short_opt": "p", "is_path": False + "value": None, "required": False, "expect_arg": True, "short_opt": "p", "is_path": False }, "upload_id": { "value": None, "required": True, "expect_arg": True, "short_opt": "u", "is_path": False diff --git a/python/tests/integration/scripts/test_import_dicom_study.py b/python/tests/integration/scripts/test_import_dicom_study.py index ae9b9fd25..67eadbd86 100644 --- a/python/tests/integration/scripts/test_import_dicom_study.py +++ b/python/tests/integration/scripts/test_import_dicom_study.py @@ -10,7 +10,6 @@ def test_import_dicom_study(): process = run_integration_script([ 'import_dicom_study.py', - '--profile', 'database_config.py', '--source', '/data/loris/incoming/ROM168_400168_V2', '--insert', '--session', ]) diff --git a/python/tests/integration/scripts/test_mass_nifti_pic.py b/python/tests/integration/scripts/test_mass_nifti_pic.py index 948f7f87d..d21660ee0 100644 --- a/python/tests/integration/scripts/test_mass_nifti_pic.py +++ b/python/tests/integration/scripts/test_mass_nifti_pic.py @@ -1,5 +1,4 @@ import os -import time from datetime import datetime from lib.db.models.file import DbFile @@ -10,22 +9,6 @@ from tests.util.run_integration_script import run_integration_script -def test_missing_profile_arg(): - """ - Test running the script without the --profile argument. - """ - - process = run_integration_script([ - 'mass_nifti_pic.py', - ]) - - # Check return code, STDOUT and STDERR - message = 'ERROR: you must specify a profile file using -p or --profile option' - assert process.returncode == MISSING_ARG - assert message in process.stdout - assert process.stderr == "" - - def test_invalid_profile_arg(): """ Test running the script with an invalid --profile argument. @@ -37,10 +20,10 @@ def test_invalid_profile_arg(): ]) # Check return code, STDOUT and STDERR - message = 'ERROR: you must specify a valid profile file' + message = "ERROR: No configuration file 'invalid_profile.py' found in the '/opt/loris/bin/mri/config' directory.\n" assert process.returncode == INVALID_PATH - assert message in process.stdout - assert process.stderr == "" + assert process.stdout == "" + assert process.stderr == message def test_missing_smallest_id_arg(): @@ -50,7 +33,6 @@ def test_missing_smallest_id_arg(): process = run_integration_script([ 'mass_nifti_pic.py', - '--profile', 'database_config.py', ]) # Check return code, STDOUT and STDERR @@ -68,7 +50,6 @@ def test_missing_largest_id_arg(): process = run_integration_script([ 'mass_nifti_pic.py', - '--profile', 'database_config.py', '--smallest_id', '2', ]) @@ -87,7 +68,6 @@ def test_smallest_id_bigger_than_largest_id(): process = run_integration_script([ 'mass_nifti_pic.py', - '--profile', 'database_config.py', '--smallest_id', '6', '--largest_id', '2' ]) @@ -106,7 +86,6 @@ def test_on_invalid_file_id(): process = run_integration_script([ 'mass_nifti_pic.py', - '--profile', 'database_config.py', '--smallest_id', '999', '--largest_id', '999' ]) @@ -125,7 +104,6 @@ def test_on_file_id_that_already_has_a_pic(): process = run_integration_script([ 'mass_nifti_pic.py', - '--profile', 'database_config.py', '--smallest_id', '2', '--largest_id', '2' ]) @@ -155,7 +133,6 @@ def test_force_option(): process = run_integration_script([ 'mass_nifti_pic.py', - '--profile', 'database_config.py', '--smallest_id', '2', '--largest_id', '2', '--force' @@ -164,7 +141,7 @@ def test_force_option(): # Check return code, STDOUT and STDERR # The NIfTI file is printing a warning when the pic gets created so check that the # STDERR is exactly that error message. - message = '/opt/loris/bin/mri/python/lib/imaging.py:1179: UserWarning: Casting data from int32 to float32' \ + message = '/opt/loris/bin/mri/python/lib/imaging.py:1177: UserWarning: Casting data from int32 to float32' \ '\n plotting.plot_anat(\n' assert process.returncode == SUCCESS assert process.stdout == "" @@ -190,16 +167,16 @@ def test_running_on_a_text_file(): file_type = 'txt', session_id = 564, output_type = 'native', - insert_time = int(datetime.now().timestamp()), + insert_time = datetime.now(), inserted_by_user_id = 'test' ) + db.add(file) db.commit() # run NIfTI pic script on the inserted file process = run_integration_script([ 'mass_nifti_pic.py', - '--profile', 'database_config.py', '--smallest_id', str(file.id), '--largest_id', str(file.id) ]) @@ -239,11 +216,10 @@ def test_successful_run(): file_pic_data = try_get_parameter_value_with_file_id_parameter_name(db, 2, 'check_pic_filename') assert file_pic_data is None - current_time = time.time() + current_time = datetime.now() process = run_integration_script([ 'mass_nifti_pic.py', - '--profile', 'database_config.py', '--smallest_id', '2', '--largest_id', '2' ]) @@ -251,7 +227,7 @@ def test_successful_run(): # Check return code, STDOUT and STDERR # The NIfTI file is printing a warning when the pic gets created so check that the # STDERR is exactly that error message. - message = '/opt/loris/bin/mri/python/lib/imaging.py:1179: UserWarning: Casting data from int32 to float32' \ + message = '/opt/loris/bin/mri/python/lib/imaging.py:1177: UserWarning: Casting data from int32 to float32' \ '\n plotting.plot_anat(\n' assert process.returncode == SUCCESS assert process.stdout == "" diff --git a/python/tests/integration/scripts/test_run_dicom_archive_loader.py b/python/tests/integration/scripts/test_run_dicom_archive_loader.py index 80c165b22..5c8ab18cf 100644 --- a/python/tests/integration/scripts/test_run_dicom_archive_loader.py +++ b/python/tests/integration/scripts/test_run_dicom_archive_loader.py @@ -10,7 +10,6 @@ def test_invalid_arg(): process = run_integration_script([ 'run_dicom_archive_loader.py', - '--profile', 'database_config.py', '--invalid_arg', ]) @@ -27,7 +26,6 @@ def test_non_existent_upload_id(): # Run the script to test process = run_integration_script([ 'run_dicom_archive_loader.py', - '--profile', 'database_config.py', '--upload_id', invalid_upload_id, ]) @@ -45,7 +43,6 @@ def test_invalid_tarchive_path_arg(): # Run the script to test process = run_integration_script([ 'run_dicom_archive_loader.py', - '--profile', 'database_config.py', '--tarchive_path', invalid_tarchive_path, ]) @@ -67,7 +64,6 @@ def test_successful_run_on_valid_tarchive_path(): # Run the script to test process = run_integration_script([ 'run_dicom_archive_loader.py', - '--profile', 'database_config.py', '--tarchive_path', '/data/loris/tarchive/DCM_2015-07-07_MTL001_300001_V2_localizer_t1w.tar', ]) diff --git a/python/tests/integration/scripts/test_run_dicom_archive_validation.py b/python/tests/integration/scripts/test_run_dicom_archive_validation.py index 0351f4c44..5181a5210 100644 --- a/python/tests/integration/scripts/test_run_dicom_archive_validation.py +++ b/python/tests/integration/scripts/test_run_dicom_archive_validation.py @@ -15,7 +15,6 @@ def test_missing_upload_id_arg(): # Run the script to test process = run_integration_script([ 'run_dicom_archive_validation.py', - '--profile', 'database_config.py', '--tarchive_path', VALID_TARCHIVE_PATH, ]) @@ -38,7 +37,6 @@ def test_missing_tarchive_path_arg(): # Run the script to test process = run_integration_script([ 'run_dicom_archive_validation.py', - '--profile', 'database_config.py', '--upload_id', VALID_UPLOAD_ID, ]) @@ -60,7 +58,6 @@ def test_invalid_arg(): process = run_integration_script([ 'run_dicom_archive_validation.py', - '--profile', 'database_config.py', '--invalid_arg', ]) @@ -83,7 +80,6 @@ def test_invalid_tarchive_path_arg(): # Run the script to test process = run_integration_script([ 'run_dicom_archive_validation.py', - '--profile', 'database_config.py', '--tarchive_path', INVALID_TARCHIVE_PATH, '--upload_id', VALID_UPLOAD_ID, ]) @@ -108,7 +104,6 @@ def test_non_existent_upload_id(): # Run the script to test process = run_integration_script([ 'run_dicom_archive_validation.py', - '--profile', 'database_config.py', '--tarchive_path', VALID_TARCHIVE_PATH, '--upload_id', INVALID_UPLOAD_ID, ]) @@ -125,7 +120,6 @@ def test_mixed_up_upload_id_tarchive_path(): # Run the script to test process = run_integration_script([ 'run_dicom_archive_validation.py', - '--profile', 'database_config.py', '--tarchive_path', VALID_TARCHIVE_PATH, '--upload_id', '126', ]) @@ -143,7 +137,6 @@ def test_successful_validation(): # Run the script to test process = run_integration_script([ 'run_dicom_archive_validation.py', - '--profile', 'database_config.py', '--tarchive_path', VALID_TARCHIVE_PATH, '--upload_id', VALID_UPLOAD_ID, ]) diff --git a/python/tests/integration/scripts/test_run_nifti_insertion.py b/python/tests/integration/scripts/test_run_nifti_insertion.py index 0387c8b2d..efe17c2ec 100644 --- a/python/tests/integration/scripts/test_run_nifti_insertion.py +++ b/python/tests/integration/scripts/test_run_nifti_insertion.py @@ -29,7 +29,6 @@ def test_invalid_arg(): process = run_integration_script([ 'run_nifti_insertion.py', - '--profile', 'database_config.py', '--invalid_arg', ]) @@ -47,7 +46,6 @@ def test_missing_nifti_path_argument(): # Run the script to test process = run_integration_script([ 'run_nifti_insertion.py', - '--profile', 'database_config.py', ]) # Check return code, STDOUT and STDERR @@ -66,7 +64,6 @@ def test_invalid_nifti_path(): # Run the script to test process = run_integration_script([ 'run_nifti_insertion.py', - '--profile', 'database_config.py', '--nifti_path', nifti_path, ]) @@ -88,7 +85,6 @@ def test_missing_upload_id_or_tarchive_path(): # Run the script to test process = run_integration_script([ 'run_nifti_insertion.py', - '--profile', 'database_config.py', '--nifti_path', nifti_path, ]) @@ -112,7 +108,6 @@ def test_missing_json_path(): # Run the script to test process = run_integration_script([ 'run_nifti_insertion.py', - '--profile', 'database_config.py', '--nifti_path', nifti_path, '--upload_id', upload_id, ]) @@ -137,7 +132,6 @@ def test_invalid_json_path(): # Run the script to test process = run_integration_script([ 'run_nifti_insertion.py', - '--profile', 'database_config.py', '--nifti_path', nifti_path, '--upload_id', upload_id, '--json_path', json_path, @@ -162,7 +156,6 @@ def test_invalid_upload_id(): # Run the script to test process = run_integration_script([ 'run_nifti_insertion.py', - '--profile', 'database_config.py', '--nifti_path', nifti_path, '--upload_id', upload_id, '--json_path', json_path, @@ -187,7 +180,6 @@ def test_invalid_tarchive_path(): # Run the script to test process = run_integration_script([ 'run_nifti_insertion.py', - '--profile', 'database_config.py', '--nifti_path', nifti_path, '--tarchive_path', tarchive_path, '--json_path', json_path, @@ -213,7 +205,6 @@ def test_tarchive_path_and_upload_id_provided(): # Run the script to test process = run_integration_script([ 'run_nifti_insertion.py', - '--profile', 'database_config.py', '--nifti_path', nifti_path, '--tarchive_path', tarchive_path, '--upload_id', upload_id, @@ -241,7 +232,6 @@ def test_nifti_and_tarchive_patient_name_differ(): # Run the script to test process = run_integration_script([ 'run_nifti_insertion.py', - '--profile', 'database_config.py', '--nifti_path', nifti_path, '--upload_id', upload_id, '--json_path', json_path, @@ -267,7 +257,6 @@ def test_nifti_already_inserted(): # Run the script to test process = run_integration_script([ 'run_nifti_insertion.py', - '--profile', 'database_config.py', '--nifti_path', nifti_path, '--upload_id', upload_id, '--json_path', json_path, @@ -304,7 +293,6 @@ def test_nifti_mri_protocol_violated_scans_features(): # Run the script to test process = run_integration_script([ 'run_nifti_insertion.py', - '--profile', 'database_config.py', '--nifti_path', nifti_path, '--upload_id', upload_id, '--json_path', json_path, @@ -343,7 +331,6 @@ def test_nifti_mri_protocol_violated_scans_features(): shutil.copyfile(new_json_path, json_path) process = run_integration_script([ 'run_nifti_insertion.py', - '--profile', 'database_config.py', '--nifti_path', nifti_path, '--upload_id', upload_id, '--json_path', json_path, @@ -370,7 +357,6 @@ def test_nifti_mri_protocol_violated_scans_features(): shutil.copyfile(new_json_path, json_path) process = run_integration_script([ 'run_nifti_insertion.py', - '--profile', 'database_config.py', '--nifti_path', nifti_path, '--upload_id', upload_id, '--json_path', json_path, @@ -446,17 +432,14 @@ def test_nifti_mri_violations_log_exclude_features(): upload_id = '128' # Run the script to test - process = run_integration_script( - [ - 'run_nifti_insertion.py', - '--profile', 'database_config.py', - '--nifti_path', nifti_path, - '--upload_id', upload_id, - '--json_path', json_path, - '--bval_path', bval_path, - '--bvec_path', bvec_path - ] - ) + process = run_integration_script([ + 'run_nifti_insertion.py', + '--nifti_path', nifti_path, + '--upload_id', upload_id, + '--json_path', json_path, + '--bval_path', bval_path, + '--bvec_path', bvec_path + ]) # Check return code, STDOUT and STDERR expected_stderr = f"ERROR: {nifti_path} violates exclusionary checks listed in mri_protocol_checks." \ @@ -502,7 +485,6 @@ def test_nifti_mri_violations_log_exclude_features(): shutil.copyfile(new_bvec_path, bvec_path) process = run_integration_script([ 'run_nifti_insertion.py', - '--profile', 'database_config.py', '--nifti_path', nifti_path, '--upload_id', upload_id, '--json_path', json_path, @@ -532,7 +514,6 @@ def test_nifti_mri_violations_log_exclude_features(): shutil.copyfile(new_bvec_path, bvec_path) process = run_integration_script([ 'run_nifti_insertion.py', - '--profile', 'database_config.py', '--nifti_path', nifti_path, '--upload_id', upload_id, '--json_path', json_path, @@ -603,7 +584,6 @@ def test_dwi_insertion_with_mri_violations_log_warning(): # Run the script to test process = run_integration_script([ 'run_nifti_insertion.py', - '--profile', 'database_config.py', '--nifti_path', nifti_path, '--upload_id', upload_id, '--json_path', json_path, diff --git a/python/tests/integration/test_orm_sql_sync.py b/python/tests/integration/test_orm_sql_sync.py index 41803cdfc..8016b1665 100644 --- a/python/tests/integration/test_orm_sql_sync.py +++ b/python/tests/integration/test_orm_sql_sync.py @@ -3,7 +3,7 @@ from typing import Any from sqlalchemy import MetaData -from sqlalchemy.dialects.mysql.types import DOUBLE, TINYINT +from sqlalchemy.dialects.mysql.types import DOUBLE from sqlalchemy.types import TypeDecorator, TypeEngine from lib.db.base import Base @@ -59,9 +59,6 @@ def get_orm_python_type(orm_type: TypeEngine[Any]): def get_sql_python_type(sql_type: TypeEngine[Any]): - if isinstance(sql_type, TINYINT) and sql_type.display_width == 1: # type: ignore - return bool - if isinstance(sql_type, DOUBLE): return float diff --git a/python/tests/util/database.py b/python/tests/util/database.py index 880aa74e4..36d4b5b6e 100644 --- a/python/tests/util/database.py +++ b/python/tests/util/database.py @@ -1,9 +1,7 @@ -import os -import sys - from sqlalchemy import create_engine from sqlalchemy.orm import Session +from lib.config_file import load_config from lib.db.base import Base from lib.db.connect import get_database_engine @@ -24,9 +22,7 @@ def get_integration_database_engine(): Python configuration file. """ - config_file = os.path.join(os.environ['LORIS_CONFIG'], '.loris_mri', 'database_config.py') - sys.path.append(os.path.dirname(config_file)) - config = __import__(os.path.basename(config_file[:-3])) + config = load_config('config.py') return get_database_engine(config.mysql) diff --git a/test/imaging_install_test.sh b/test/imaging_install_test.sh index afb6ee33b..dc22a19c6 100644 --- a/test/imaging_install_test.sh +++ b/test/imaging_install_test.sh @@ -24,7 +24,7 @@ echo "Creating the data directories" sudo -S su $USER -c "mkdir -m 770 -p /data/$PROJ/assembly_bids" #holds the BIDS files derived from DICOMs sudo -S su $USER -c "mkdir -m 770 -p /data/$PROJ/batch_output" #contains the result of the SGE (queue) sudo -S su $USER -c "mkdir -m 770 -p /data/$PROJ/bids_imports" #contains imported BIDS studies - sudo -S su $USER -c "mkdir -m 770 -p $mridir/dicom-archive/.loris_mri" + sudo -S su $USER -c "mkdir -m 770 -p $mridir/config" echo ##################################################################################### @@ -96,17 +96,17 @@ echo ##################################################################################### echo "Creating MRI config file" -cp $mridir/install/templates/profileTemplate.pl $mridir/dicom-archive/.loris_mri/$prodfilename -sudo chmod 640 $mridir/dicom-archive/.loris_mri/$prodfilename -sudo chgrp $group $mridir/dicom-archive/.loris_mri/$prodfilename +cp $mridir/install/templates/profileTemplate.pl $mridir/config/$prodfilename +sudo chmod 640 $mridir/config/$prodfilename +sudo chgrp $group $mridir/config/$prodfilename -sed -e "s#DBNAME#$mysqldb#g" -e "s#DBUSER#$mysqluser#g" -e "s#DBPASS#$mysqlpass#g" -e "s#DBHOST#$mysqlhost#g" $mridir/install/templates/profileTemplate.pl > $mridir/dicom-archive/.loris_mri/$prodfilename -echo "config file is located at $mridir/dicom-archive/.loris_mri/$prodfilename" +sed -e "s#DBNAME#$mysqldb#g" -e "s#DBUSER#$mysqluser#g" -e "s#DBPASS#$mysqlpass#g" -e "s#DBHOST#$mysqlhost#g" $mridir/install/templates/profileTemplate.pl > $mridir/config/$prodfilename +echo "config file is located at $mridir/config/$prodfilename" echo echo "Creating python database config file with database credentials" -cp $mridir/install/templates/database_config_template.py $mridir/dicom-archive/.loris_mri/database_config.py -sudo chmod 640 $mridir/dicom-archive/.loris_mri/database_config.py -sudo chgrp $group $mridir/dicom-archive/.loris_mri/database_config.py -sed -e "s#DBNAME#$mysqldb#g" -e "s#DBUSER#$mysqluser#g" -e "s#DBPASS#$mysqlpass#g" -e "s#DBHOST#$mysqlhost#g" $mridir/install/templates/database_config_template.py > $mridir/dicom-archive/.loris_mri/database_config.py -echo "config file for python import scripts is located at $mridir/dicom-archive/.loris_mri/database_config.py" +cp $mridir/install/templates/config_template.py $mridir/config/config.py +sudo chmod 640 $mridir/config/config.py +sudo chgrp $group $mridir/config/config.py +sed -e "s#DBNAME#$mysqldb#g" -e "s#DBUSER#$mysqluser#g" -e "s#DBPASS#$mysqlpass#g" -e "s#DBHOST#$mysqlhost#g" $mridir/install/templates/config_template.py > $mridir/config/config.py +echo "config file for python import scripts is located at $mridir/config/config.py" diff --git a/test/mri.Dockerfile b/test/mri.Dockerfile index f73c427f2..e456b499c 100644 --- a/test/mri.Dockerfile +++ b/test/mri.Dockerfile @@ -69,18 +69,17 @@ RUN cpan App::cpanminus && \ cpanm --installdeps ./install/requirements/ && \ cpanm https://github.com/aces/Loris-MRI/raw/main/install/Digest-BLAKE2-0.02.tar.gz -# Install the Python libraries -COPY install/requirements/requirements.txt ./install/requirements/requirements.txt -RUN pip install --no-cache-dir -r ./install/requirements/requirements.txt - # Get the database credentials as parameters ARG DATABASE_NAME ARG DATABASE_USER ARG DATABASE_PASS -# Checkout the LORIS-MRI repository -COPY . /opt/loris/bin/mri +# Install LORIS-MRI Python WORKDIR /opt/loris/bin/mri +COPY . . +RUN pip install -e .[dev] + +# Run the test LORIS-MRI installer RUN bash ./test/imaging_install_test.sh $DATABASE_NAME $DATABASE_USER $DATABASE_PASS # Setup the LORIS-MRI environment variables @@ -89,9 +88,8 @@ ENV MINC_TOOLKIT_DIR=/opt/minc/1.9.18 ENV PATH=/opt/${PROJECT}/bin/mri:/opt/${PROJECT}/bin/mri/uploadNeuroDB:/opt/${PROJECT}/bin/mri/uploadNeuroDB/bin:/opt/${PROJECT}/bin/mri/dicom-archive:/opt/${PROJECT}/bin/mri/python/scripts:/opt/${PROJECT}/bin/mri/tools:/opt/${PROJECT}/bin/mri/python/react-series-data-viewer:${MINC_TOOLKIT_DIR}/bin:/usr/local/bin/tpcclib:$PATH ENV PERL5LIB=/opt/${PROJECT}/bin/mri/uploadNeuroDB:/opt/${PROJECT}/bin/mri/dicom-archive:$PERL5LIB ENV TMPDIR=/tmp -ENV LORIS_CONFIG=/opt/${PROJECT}/bin/mri/dicom-archive +ENV LORIS_CONFIG=/opt/${PROJECT}/bin/mri/config ENV LORIS_MRI=/opt/${PROJECT}/bin/mri -ENV PYTHONPATH=/opt/${PROJECT}/bin/mri/python:/opt/${PROJECT}/bin/mri/python/react-series-data-viewer ENV BEASTLIB=${MINC_TOOLKIT_DIR}/../share/beast-library-1.1 ENV MNI_MODELS=${MINC_TOOLKIT_DIR}/../share/icbm152_model_09c diff --git a/tools/BackPopulateSNRAndAcquisitionOrder.pl b/tools/BackPopulateSNRAndAcquisitionOrder.pl index 6294cb818..bae523085 100755 --- a/tools/BackPopulateSNRAndAcquisitionOrder.pl +++ b/tools/BackPopulateSNRAndAcquisitionOrder.pl @@ -16,7 +16,7 @@ =head1 SYNOPSIS Available options are: --profile : name of the config file in C<../dicom-archive/.loris_mri> +-profile : name of the config file in C<../config> -tarchive_id: ID of the DICOM archive (.tar file) to be processed from the C table @@ -66,12 +66,12 @@ =head1 DESCRIPTION my @opt_table = ( [ "-profile", "string", 1, \$profile, - "name of config file in ../dicom-archive/.loris_mri" + "name of config file in ../config" ], [ "-tarchive_id", "string", 1, \$TarchiveID, "tarchive_id of the DICOM archive (.tar files) to be processed from tarchive table" ] -); +); my $Help = < 1, CLEANUP => 1 + $template, TMPDIR => 1, CLEANUP => 1 ); -my @temp = split(/\//, $TmpDir); +my @temp = split(/\//, $TmpDir); my $templog = $temp[$#temp]; -my $LogDir = "$data_dir/logs"; -if (!-d $LogDir) { - mkdir($LogDir, 0770); +my $LogDir = "$data_dir/logs"; +if (!-d $LogDir) { + mkdir($LogDir, 0770); } my $logfile = "$LogDir/$templog.log"; @@ -187,10 +187,10 @@ =head1 DESCRIPTION my $sth = $dbh->prepare($query); $sth->execute(); - + if($sth->rows > 0) { # Create tarchive list hash with old and new location - while ( my $rowhr = $sth->fetchrow_hashref()) { + while ( my $rowhr = $sth->fetchrow_hashref()) { $TarchiveID = $rowhr->{'TarchiveID'}; my $ArchLoc = $rowhr->{'ArchiveLocation'}; my $SourceLocation = $rowhr->{'SourceLocation'}; @@ -199,9 +199,9 @@ =head1 DESCRIPTION $SourceLocation ); print "Currently updating the SNR for applicable files in parameter_file table ". - "for tarchiveID $TarchiveID at location $ArchLoc\n"; + "for tarchiveID $TarchiveID at location $ArchLoc\n"; $utility->computeSNR($TarchiveID, $upload_id); - print "Currently updating the Acquisition Order per modality in files table\n"; + print "Currently updating the Acquisition Order per modality in files table\n"; $utility->orderModalitiesByAcq($TarchiveID, $upload_id); print "Finished updating back-populating SNR and Acquisition Order ". @@ -209,7 +209,7 @@ =head1 DESCRIPTION } } else { - print "No tarchives to be updated \n"; + print "No tarchives to be updated \n"; } $db->disconnect(); diff --git a/tools/MakeArchiveLocationRelative.pl b/tools/MakeArchiveLocationRelative.pl index ad09e291e..9d0886161 100755 --- a/tools/MakeArchiveLocationRelative.pl +++ b/tools/MakeArchiveLocationRelative.pl @@ -14,7 +14,7 @@ =head1 SYNOPSIS Available option is: --profile: name of the config file in C<../dicom-archive/.loris_mri> +-profile: name of the config file in C<../config> =head1 DESCRIPTION @@ -45,14 +45,14 @@ =head2 Methods my @opt_table = ( [ "-profile", "string", 1, \$profile, - "name of config file in ../dicom-archive/.loris_mri" + "name of config file in ../config" ] -); +); my $Help = <prepare($query); $sth->execute(); - + # Create tarchive list hash with old and new location my %tarchive_list; while ( my $rowhr = $sth->fetchrow_hashref()) { - + my $TarchiveID = $rowhr->{'TarchiveID'}; my $ArchLoc = $rowhr->{'ArchiveLocation'}; my $newArchLoc = $ArchLoc; $newArchLoc =~ s/$tarchiveLibraryDir\/?//g; - + $tarchive_list{$TarchiveID}{'ArchiveLocation'} = $ArchLoc; $tarchive_list{$TarchiveID}{'NewArchiveLocation'} = $newArchLoc; - + } - + return %tarchive_list; } @@ -198,11 +198,11 @@ =head3 updateArchiveLocation($dbh, %tarchive_list) =cut sub updateArchiveLocation { - + my ( $dbh, %tarchive_list ) = @_; # Update query - (my $query = < + C<../config> =head1 DESCRIPTION @@ -45,7 +45,7 @@ =head2 Methods [ "Basic options", "section" ], [ "-profile", "string", 1, \$profile, - "name of config file in ../dicom-archive/.loris_mri" + "name of config file in ../config" ] ); @@ -81,10 +81,10 @@ =head2 Methods print STDERR "$Usage\n\tERROR: missing -profile argument\n\n"; exit $NeuroDB::ExitCodes::PROFILE_FAILURE; } -{ package Settings; do "$ENV{LORIS_CONFIG}/.loris_mri/$profile" } +{ package Settings; do "$ENV{LORIS_CONFIG}/$profile" } if ( !@Settings::db ) { print STDERR "\n\tERROR: You don't have a \@db setting in the file " - . "$ENV{LORIS_CONFIG}/.loris_mri/$profile \n\n"; + . "$ENV{LORIS_CONFIG}/$profile \n\n"; exit $NeuroDB::ExitCodes::DB_SETTINGS_FAILURE; } @@ -130,7 +130,7 @@ =head2 Methods my ($config_name, $config_value); - ## Populate the mri_upload table with necessary entries and get an upload_id + ## Populate the mri_upload table with necessary entries and get an upload_id for my $index (0 .. $#config_name_arr) { $config_name = $config_name_arr[$index]; ## This value was called if_sge in the default profileTemplate.pl, but should be called is_qsub @@ -177,7 +177,7 @@ sub updateConfigFromProd { my $config_select = $dbh->prepare($query_select); $config_select->execute($config_name); my $config_default = $config_select->fetchrow_array; - print "*** WARNING *** " . + print "*** WARNING *** " . "The Configuration Setting value for " . $config_name . " is kept at its default value of " . $config_default . " because " . $config_name . " is not found in the " . $profile . " file \n"; } diff --git a/tools/addSeriesAndFileRecords.pl b/tools/addSeriesAndFileRecords.pl index 53c6e093f..423a2fd45 100755 --- a/tools/addSeriesAndFileRecords.pl +++ b/tools/addSeriesAndFileRecords.pl @@ -1,4 +1,4 @@ -#!/usr/bin/perl +#!/usr/bin/perl # Jonathan Harlap # jharlap@bic.mni.mcgill.ca # Perl tool based on DCMSUM.pm and DICOM.pm to populate the series and file tables for a tarchive @@ -41,7 +41,7 @@ my @arg_table = ( ["Main options","section"], - ["-profile","string",1, \$profile, "Specify the name of the config file which resides in .loris_mri in the current directory."], + ["-profile","string",1, \$profile, "Specify the name of the config file which resides in the config directory."], ["General options", "section"], ["-verbose","boolean",1, \$verbose, "Be verbose."], @@ -54,8 +54,8 @@ if ($version) { print "$versionInfo\n"; exit; } # checking for profile settings -if($profile && -f "$ENV{LORIS_CONFIG}/.loris_mri/$profile") { { package Settings; do "$ENV{LORIS_CONFIG}/.loris_mri/$profile" } } -if ($profile && !@Settings::db) { print "\n\tERROR: You don't have a configuration file named '$profile' in: $ENV{LORIS_CONFIG}/.loris_mri/ \n\n"; exit 33; } +if($profile && -f "$ENV{LORIS_CONFIG}/$profile") { { package Settings; do "$ENV{LORIS_CONFIG}/$profile" } } +if ($profile && !@Settings::db) { print "\n\tERROR: You don't have a configuration file named '$profile' in: $ENV{LORIS_CONFIG}/ \n\n"; exit 33; } # basic error checking on tarchive @@ -66,7 +66,7 @@ my $dbh; $dbh = &NeuroDB::DBI::connect_to_db(@Settings::db); print "Testing for database connectivity. \n" if $verbose; $dbh->disconnect(); print "Database is available.\n\n" if $verbose; -####################### main ########################################### main ########################################### +####################### main ########################################### main ########################################### my ($studyUnique, $metaname, @metaFiles, $dcmdir, $sumTypeVersion); @@ -97,11 +97,11 @@ my $tarchiveID; my $tarchiveBasename = basename($tarchive); (my $query = <{acqu_List}}) { # insert the series my ($seriesNum, $sequName, $echoT, $repT, $invT, $seriesName, $sl_thickness, $phaseEncode, $seriesUID, $num) = split(':::',$acq); - my @values = + my @values = ( - $tarchiveID, $seriesNum, $seriesName, $sequName, - $echoT, $repT, $invT, $sl_thickness, + $tarchiveID, $seriesNum, $seriesName, $sequName, + $echoT, $repT, $invT, $sl_thickness, $phaseEncode, $num, $seriesUID ); $insert_series->execute(@values); @@ -166,15 +166,15 @@ # now create the tarchive_files records ($query = <[21]) { # file is dicom - @values = + @values = ( - $tarchiveID, $file->[1], $file->[3], $file->[2], + $tarchiveID, $file->[1], $file->[3], $file->[2], $file->[12], $file->[20], $filename ); $insert_file->execute(@values); } else { @values = ( - $tarchiveID, undef, undef, undef, - undef, $file->[20], $filename + $tarchiveID, undef, undef, undef, + undef, $file->[20], $filename ); $insert_file->execute(@values); } @@ -207,11 +207,11 @@ ######################################################################### end main #################### -=pod +=pod ################################################ Extract a tarchive into a temp dir ################################################ -=cut +=cut sub extract_tarchive { my ($tarchive, $tempdir) = @_; @@ -233,7 +233,6 @@ sub extract_tarchive { $dcmdir =~ s/\.tar\.gz$//; `cd $tempdir ; tar -xzf $dcmtar`; - + return $dcmdir; } - diff --git a/tools/batch_run_defacing_script.pl b/tools/batch_run_defacing_script.pl index fa67dbd55..de8a41702 100755 --- a/tools/batch_run_defacing_script.pl +++ b/tools/batch_run_defacing_script.pl @@ -12,7 +12,7 @@ =head1 SYNOPSIS Available options are: --profile: name of config file in ../dicom-archive/.loris_mri (typically called prod) +-profile: name of config file in ../config (typically called prod) =head1 DESCRIPTION @@ -60,7 +60,7 @@ =head1 AUTHORS my $profile; my @opt_table = ( - [ '-profile', 'string', 1, \$profile, 'name of config file in ../dicom-archive/.loris_mri' ] + [ '-profile', 'string', 1, \$profile, 'name of config file in ../config' ] ); my $Help = < +This script runs the defacing pipeline on multiple sessions. The list of +session IDs are provided through a text file (e.g. C with one sessionID per line). An example of what a C might contain for 3 session IDs @@ -100,19 +100,19 @@ =head1 AUTHORS if (!$ENV{LORIS_CONFIG}) { print STDERR "\n\tERROR: Environment variable 'LORIS_CONFIG' not set\n\n"; - exit $NeuroDB::ExitCodes::INVALID_ENVIRONMENT_VAR; + exit $NeuroDB::ExitCodes::INVALID_ENVIRONMENT_VAR; } -if ( !defined $profile || !-e "$ENV{LORIS_CONFIG}/.loris_mri/$profile") { +if ( !defined $profile || !-e "$ENV{LORIS_CONFIG}/$profile") { print $Help; print STDERR "$Usage\n\tERROR: You must specify a valid and existing profile.\n\n"; exit $NeuroDB::ExitCodes::PROFILE_FAILURE; } -{ package Settings; do "$ENV{LORIS_CONFIG}/.loris_mri/$profile" } +{ package Settings; do "$ENV{LORIS_CONFIG}/$profile" } if ( !@Settings::db ) { print STDERR "\n\tERROR: You don't have a \@db setting in the file " - . "$ENV{LORIS_CONFIG}/.loris_mri/$profile \n\n"; + . "$ENV{LORIS_CONFIG}/$profile \n\n"; exit $NeuroDB::ExitCodes::DB_SETTINGS_FAILURE; } @@ -166,7 +166,7 @@ =head1 AUTHORS ################################################################# my $counter = 0; -my $stdoutbase = "$data_dir/batch_output/defacingstdout.log"; +my $stdoutbase = "$data_dir/batch_output/defacingstdout.log"; my $stderrbase = "$data_dir/batch_output/defacingstderr.log"; foreach my $session_id (@session_ids_list) { @@ -184,11 +184,7 @@ =head1 AUTHORS } else { system($command); } -} +} exit $NeuroDB::ExitCodes::SUCCESS; - - - - diff --git a/tools/batch_run_pipeline_qc_face_script.pl b/tools/batch_run_pipeline_qc_face_script.pl index 894a4a21d..092b7c73e 100755 --- a/tools/batch_run_pipeline_qc_face_script.pl +++ b/tools/batch_run_pipeline_qc_face_script.pl @@ -12,7 +12,7 @@ =head1 SYNOPSIS Available options are: --profile: name of config file in ../dicom-archive/.loris_mri (typically called prod) +-profile: name of config file in ../config (typically called prod) -out_basedir: path to the output base directory where the jpg will be created @@ -65,8 +65,8 @@ =head1 AUTHORS my $out_basedir; my @opt_table = ( - [ '-profile', 'string', 1, \$profile, 'name of config file in ../dicom-archive/.loris_mri' ], - [ '-out_basedir', 'string', 1, \$out_basedir, 'path to the output base directory where the jpg will be created' ] + [ '-profile', 'string', 1, \$profile, 'name of config file in ../config' ], + [ '-out_basedir', 'string', 1, \$out_basedir, 'path to the output base directory where the jpg will be created' ] ); my $Help = < with one file path per line. -An example of what a C might contain for 3 files to use to +An example of what a C might contain for 3 files to use to create a 3D JPEG rendering of a scan to be defaced: /data/project/data/assembly/123456/V01/mri/processed/MINC_deface/project_123456_V01_t1w_001_t1w-defaced_001.mnc @@ -90,7 +90,7 @@ =head1 AUTHORS HELP my $Usage = < +-profile: name of the config file in C<../config> =head1 DESCRIPTION @@ -59,7 +59,7 @@ =head2 Methods #### Initiate program #### ############################## my $profile; -my $profile_desc = "name of config file in ../dicom-archive/.loris_mri"; +my $profile_desc = "name of config file in ../config"; my @opt_table = ( [ "-profile", "string", 1, \$profile, $profile_desc ] @@ -90,10 +90,10 @@ =head2 Methods print STDERR "$Usage\n\tERROR: missing -profile argument\n\n"; exit $NeuroDB::ExitCodes::PROFILE_FAILURE; } -{ package Settings; do "$ENV{LORIS_CONFIG}/.loris_mri/$profile" } +{ package Settings; do "$ENV{LORIS_CONFIG}/$profile" } if ( !@Settings::db ) { print STDERR "\n\tERROR: You don't have a \@db setting in the file " - . "$ENV{LORIS_CONFIG}/.loris_mri/$profile \n\n"; + . "$ENV{LORIS_CONFIG}/$profile \n\n"; exit $NeuroDB::ExitCodes::DB_SETTINGS_FAILURE; } @@ -160,7 +160,7 @@ =head2 Methods # Get tarchive basename my ($tarBasename_db) = &getTarchiveBasename($tarchive_db); - # Get the list of tarchives in the tarchive library folder that matches + # Get the list of tarchives in the tarchive library folder that matches # the basename of the tarchive stored in the year subfolder. my ($tarFileList) = &getTarList($tarchiveLibraryDir, $tarBasename_db); @@ -170,7 +170,7 @@ =head2 Methods . "to the database entry $tarchive_db\n"; next; } - + # Identify duplicate DICOM archives in the file system and remove them my ($duplicateTarFiles, $realTarFileFound) = &identifyDuplicates( $tarchive_db, $tarchivesList_db, $tarFileList @@ -206,7 +206,7 @@ =head3 readTarDir($tarDir, $match) sub readTarDir { my ($tarDir, $match) = @_; - # Read tarchive directory + # Read tarchive directory opendir (DIR, "$tarDir") || die "Cannot open $tarDir\n"; my @entries = readdir(DIR); closedir (DIR); @@ -214,7 +214,7 @@ sub readTarDir { ## Keep only files that match string stored in $match my @tar_list = grep(/^$match/i, @entries); @tar_list = map {"$tarDir/" . $_} @tar_list; - + return (\@tar_list); } @@ -254,12 +254,12 @@ sub getTarList { foreach my $YearDir (@$YearDirList) { my ($yearList) = readTarDir("$YearDir", $match); - ## Add year subfolder in front of each element (file) of the array + ## Add year subfolder in front of each element (file) of the array ## Push the list of tarchives in the year subfolder to the overall list of tarchives push (@$tar_list, @$yearList) if (@$yearList >= 0); - - } + + } return ($tar_list); } @@ -303,7 +303,7 @@ sub selectTarchives { exit $NeuroDB::ExitCodes::SELECT_FAILURE; } - return (\%tarchiveInfo); + return (\%tarchiveInfo); } @@ -389,16 +389,16 @@ sub identifyDuplicates { } } - # If no real tarchive file found return undef, + # If no real tarchive file found return undef, ## else return table with list of duplicates and real file found if (!$realTarFileFound) { print LOG "No tarchive file matching $tarchive_db was found in the filesystem\n"; return undef; } else { - print LOG "Duplicate tarchive(s) found for $tarchive_db.\n"; + print LOG "Duplicate tarchive(s) found for $tarchive_db.\n"; return (\@duplicateTarFiles, $realTarFileFound); } -} +} diff --git a/tools/cleanup_paths_of_violation_tables.pl b/tools/cleanup_paths_of_violation_tables.pl index 10e42619f..21e7b33d5 100755 --- a/tools/cleanup_paths_of_violation_tables.pl +++ b/tools/cleanup_paths_of_violation_tables.pl @@ -17,7 +17,7 @@ my $profile; -my $profile_desc = "name of config file in ../dicom-archive/.loris_mri"; +my $profile_desc = "name of config file in ../config"; my @opt_table = ( ["-profile", "string", 1, \$profile, $profile_desc] @@ -55,17 +55,17 @@ exit $NeuroDB::ExitCodes::INVALID_ENVIRONMENT_VAR; } -if (!defined $profile || !-e "$ENV{LORIS_CONFIG}/.loris_mri/$profile") { +if (!defined $profile || !-e "$ENV{LORIS_CONFIG}/$profile") { print $Help; print STDERR "$Usage\n\tERROR: You must specify a valid and existing profile.\n\n"; exit $NeuroDB::ExitCodes::PROFILE_FAILURE; } -{ package Settings; do "$ENV{LORIS_CONFIG}/.loris_mri/$profile" } +{ package Settings; do "$ENV{LORIS_CONFIG}/$profile" } if ( !@Settings::db ) { print STDERR "\n\tERROR: You don't have a \@db setting in the file " - . "$ENV{LORIS_CONFIG}/.loris_mri/$profile \n\n"; + . "$ENV{LORIS_CONFIG}/$profile \n\n"; exit $NeuroDB::ExitCodes::DB_SETTINGS_FAILURE; } diff --git a/tools/correct_blake2b_and_md5_hashes_in_database.py b/tools/correct_blake2b_and_md5_hashes_in_database.py index 9b5e45ad4..39e5c3118 100755 --- a/tools/correct_blake2b_and_md5_hashes_in_database.py +++ b/tools/correct_blake2b_and_md5_hashes_in_database.py @@ -10,8 +10,6 @@ from lib.database_lib.config import Config from lib.lorisgetopt import LorisGetOpt -__license__ = 'GPLv3' - def main(): usage = ( @@ -27,7 +25,7 @@ def main(): "usage : correct_blake2b_and_md5_hashes_in_database.py -p ...\n\n" "options: \n" - "\t-p, --profile : Name of the python database config file in dicom-archive/.loris_mri\n" + "\t-p, --profile : Name of the python database config file in config\n" "\t-v, --verbose : If set, be verbose\n\n" "required options are: \n" diff --git a/tools/correct_lists_incorrectly_saved_in_parameter_file.py b/tools/correct_lists_incorrectly_saved_in_parameter_file.py index 773d2cece..72626a267 100755 --- a/tools/correct_lists_incorrectly_saved_in_parameter_file.py +++ b/tools/correct_lists_incorrectly_saved_in_parameter_file.py @@ -6,9 +6,6 @@ from lib.lorisgetopt import LorisGetOpt -__license__ = 'GPLv3' - - def main(): usage = ( "\n" @@ -24,7 +21,7 @@ def main(): "usage : correct_lists_incorrectly_saved_in_parameter_file.py -p ...\n\n" "options: \n" - "\t-p, --profile : Name of the python database config file in dicom-archive/.loris_mri\n" + "\t-p, --profile : Name of the python database config file in config\n" "\t-v, --verbose : If set, be verbose\n\n" "required options are: \n" diff --git a/tools/create_nifti_bval_bvec.pl b/tools/create_nifti_bval_bvec.pl index 81bc56f27..d799af86d 100755 --- a/tools/create_nifti_bval_bvec.pl +++ b/tools/create_nifti_bval_bvec.pl @@ -14,7 +14,7 @@ =head1 SYNOPSIS Available options are: --profile: name of the config file in C<../dicom-archive/.loris_mri> +-profile: name of the config file in C<../config> -verbose: be verbose @@ -60,7 +60,7 @@ =head1 AUTHORS my $profile; my $verbose = 0; -my $profile_desc = "Name of the config file in ../dicom-archive/.loris_mri"; +my $profile_desc = "Name of the config file in ../config"; my @opt_table = ( [ "-profile", "string", 1, \$profile, $profile_desc ], @@ -101,17 +101,17 @@ =head1 AUTHORS exit $NeuroDB::ExitCodes::INVALID_ENVIRONMENT_VAR; } -if (!defined $profile || !-e "$ENV{LORIS_CONFIG}/.loris_mri/$profile") { +if (!defined $profile || !-e "$ENV{LORIS_CONFIG}/$profile") { print $Help; print STDERR "$Usage\n\tERROR: You must specify a valid and existing profile.\n\n"; exit $NeuroDB::ExitCodes::PROFILE_FAILURE; } -{ package Settings; do "$ENV{LORIS_CONFIG}/.loris_mri/$profile" } +{ package Settings; do "$ENV{LORIS_CONFIG}/$profile" } if ( !@Settings::db ) { print STDERR "\n\tERROR: You don't have a \@db setting in the file " - . "$ENV{LORIS_CONFIG}/.loris_mri/$profile \n\n"; + . "$ENV{LORIS_CONFIG}/$profile \n\n"; exit $NeuroDB::ExitCodes::DB_SETTINGS_FAILURE; } diff --git a/tools/database_files_update.pl b/tools/database_files_update.pl index 0fd07e3f0..a8284b19f 100755 --- a/tools/database_files_update.pl +++ b/tools/database_files_update.pl @@ -13,7 +13,7 @@ =head1 SYNOPSIS Available option is: --profile: name of the config file in C<../dicom-archive/.loris_mri> +-profile: name of the config file in C<../config> =head1 DESCRIPTION @@ -54,20 +54,20 @@ =head2 Methods USAGE -my @args_table = (["-profile", "string", 1, \$profile, "name of config file in ../dicom-archive/.loris_mri."] +my @args_table = (["-profile", "string", 1, \$profile, "name of config file in ../config."] ); Getopt::Tabular::SetHelp ($Usage, ''); GetOptions(\@args_table, \@ARGV, \@args) || exit 1; # Input option error checking -{ package Settings; do "$ENV{LORIS_CONFIG}/.loris_mri/$profile" } -if ($profile && !@Settings::db) { - print "\n\tERROR: You don't have a configuration file named '$profile' in: $ENV{LORIS_CONFIG}/.loris_mri/ \n\n"; - exit 33; +{ package Settings; do "$ENV{LORIS_CONFIG}/$profile" } +if ($profile && !@Settings::db) { + print "\n\tERROR: You don't have a configuration file named '$profile' in: $ENV{LORIS_CONFIG} \n\n"; + exit 33; } -if (!$profile) { - print "$Usage\n\tERROR: You must specify a profile.\n\n"; +if (!$profile) { + print "$Usage\n\tERROR: You must specify a profile.\n\n"; exit 33; } @@ -119,7 +119,7 @@ =head2 Methods my $new_minc_location = $minc_location_refs->{$fileID}; $new_minc_location =~ s/$data_dir\///i; my ($rows_affected) = update_minc_location($fileID, $new_minc_location, $dbh); # update minc location in files table. - if ($rows_affected == 1) { + if ($rows_affected == 1) { print LOG "Updated location of minc with $fileID FileID to $new_minc_location.\n"; } else { print LOG "ERROR: $rows_affected while updating minc with $fileID FileID to $new_minc_location.\n"; @@ -137,7 +137,7 @@ =head2 Methods my $new_pic_location = $pic_location_refs->{$fileID}; $new_pic_location =~ s/$data_dir\///i; my ($rows_affected) = update_parameter_file_location($fileID, $new_pic_location, 'check_pic_filename', $dbh); # update pic location in parameter_file table. - if ($rows_affected == 1) { + if ($rows_affected == 1) { print LOG "Updated pic location with $fileID FileID to $new_pic_location.\n"; } else { print LOG "ERROR: $rows_affected while updating pic location with $fileID FileID to $new_pic_location.\n"; @@ -156,7 +156,7 @@ =head2 Methods my $new_tarchive_location = $tarchive_location_refs->{$fileID}; $new_tarchive_location =~ s/$data_dir\///i; my ($rows_affected) = update_parameter_file_location($fileID, $new_tarchive_location, 'tarchiveLocation', $dbh); # update tarchive location in parameter_file table. - if ($rows_affected == 1) { + if ($rows_affected == 1) { print LOG "Updated tarchive location in parameter_file with $fileID FileID to $new_tarchive_location.\n"; } else { print LOG "ERROR: $rows_affected while updating tarchive location in parameter_file with $fileID FileID to $new_tarchive_location.\n"; @@ -197,9 +197,9 @@ sub get_minc_files { $sth->execute($like); if ($sth->rows > 0) { - while (my $row = $sth->fetchrow_hashref()) { + while (my $row = $sth->fetchrow_hashref()) { my $fileID = $row->{'FileID'}; - push (@fileIDs, $fileID); + push (@fileIDs, $fileID); $minc_locations{$fileID} = $row->{'File'}; } } else { @@ -300,13 +300,13 @@ =head3 update_parameter_file_location($fileID, $new_file_location, $parameter_ty =cut sub update_parameter_file_location { - my ($fileID, $new_file_location, $parameter_type, $dbh) = @_; + my ($fileID, $new_file_location, $parameter_type, $dbh) = @_; my $select = "SELECT ParameterTypeID " . "FROM parameter_type " . "WHERE Name=?"; my $sth = $dbh->prepare($select); - $sth->execute($parameter_type); + $sth->execute($parameter_type); my $ParameterTypeID; if ($sth->rows > 0) { @@ -321,7 +321,7 @@ sub update_parameter_file_location { my $sth_update = $dbh->prepare($query); my $rows_affected = $sth_update->execute($new_file_location,$fileID,$ParameterTypeID); - return ($rows_affected); + return ($rows_affected); } diff --git a/tools/dcmconvTarchive.pl b/tools/dcmconvTarchive.pl index 111ad822e..123fa00a0 100755 --- a/tools/dcmconvTarchive.pl +++ b/tools/dcmconvTarchive.pl @@ -1,4 +1,4 @@ -#!/usr/bin/perl +#!/usr/bin/perl # Jonathan Harlap 2006 # jharlap@bic.mni.mcgill.ca # Perl tool to run dcmconv on all the dicom files in a dicomTar archive. @@ -38,8 +38,8 @@ ( ["General options", "section"], ["-database", "boolean", 1, \$database, "Enable dicomTar's database features"], - ["-profile","string",1, \$profile, "Specify the name of the config file which resides in .loris_mri in the current directory"], - + ["-profile","string",1, \$profile, "Specify the name of the config file which resides in the config directory"], + ["-verbose", "boolean", 1, \$verbose, "Be verbose."], ["-version", "call", undef, \&handle_version_option, "Print version and revision number and exit"], ); @@ -61,10 +61,10 @@ print STDERR "$Usage\n\tERROR: missing -profile argument\n\n"; exit $NeuroDB::ExitCodes::PROFILE_FAILURE; } -{ package Settings; do "$ENV{LORIS_CONFIG}/.loris_mri/$profile" } +{ package Settings; do "$ENV{LORIS_CONFIG}/$profile" } if ( !@Settings::db ) { print STDERR "\n\tERROR: You don't have a \@db setting in the file " - . "$ENV{LORIS_CONFIG}/.loris_mri/$profile \n\n"; + . "$ENV{LORIS_CONFIG}/$profile \n\n"; exit $NeuroDB::ExitCodes::DB_SETTINGS_FAILURE; } @@ -121,12 +121,12 @@ $dicom->fill($file); my $fileIsDicom = 1; my $studyUID = $dicom->value('0020','000D'); - + # see if the file was really dicom if($studyUID eq "") { $fileIsDicom = 0; } - + if($fileIsDicom) { dcmconv($file); } @@ -180,13 +180,13 @@ sub extract_tarchive { $dcmdir =~ s/\.tar\.gz$//; `cd $tempdir ; tar -xzf $dcmtar`; - + return $dcmdir; } sub dcmconv { my ($file) = @_; - + my $cmd = "dcmconv '${file}' '${file}'"; `$cmd`; } diff --git a/tools/delete_imaging_upload.pl b/tools/delete_imaging_upload.pl index 5d12631b9..d0d75a6bd 100755 --- a/tools/delete_imaging_upload.pl +++ b/tools/delete_imaging_upload.pl @@ -14,7 +14,7 @@ =head1 SYNOPSIS Available options are: --profile : name of the config file in C<../dicom-archive/.loris_mri> (defaults to C). +-profile : name of the config file in C<../config> (defaults to C). -ignore : ignore files whose paths exist in the database but do not exist on the file system. Default is to abort if such a file is found, irrespective of whether a backup file will @@ -236,7 +236,7 @@ =head2 Methods my @opt_table = ( ['-profile' , 'string' , 1, \$options{'PROFILE'}, - 'Name of config file in ../dicom-archive/.loris_mri (defaults to "prod")'], + 'Name of config file in ../config (defaults to "prod")'], ['-backup_path', 'string' , 1, \$options{'BACKUP_PATH'}, 'Path of the backup file (defaults to "imaging_upload_backup", in the current directory)'], ['-ignore' , 'const' , 0, \$options{'DIE_ON_FILE_ERROR'}, @@ -356,18 +356,18 @@ =head2 Methods exit $NeuroDB::ExitCodes::INVALID_ENVIRONMENT_VAR; } -if (!-e "$ENV{LORIS_CONFIG}/.loris_mri/$options{'PROFILE'}") { +if (!-e "$ENV{LORIS_CONFIG}/$options{'PROFILE'}") { print $Help; - print STDERR "Cannot read profile file '$ENV{LORIS_CONFIG}/.loris_mri/$options{'PROFILE'}'\n"; + print STDERR "Cannot read profile file '$ENV{LORIS_CONFIG}/$options{'PROFILE'}'\n"; exit $NeuroDB::ExitCodes::PROFILE_FAILURE; } # Incorporate contents of profile file -{ package Settings; do "$ENV{LORIS_CONFIG}/.loris_mri/$options{'PROFILE'}" } +{ package Settings; do "$ENV{LORIS_CONFIG}/$options{'PROFILE'}" } if ( !@Settings::db ) { print STDERR "ERROR: You don't have a \@db setting in file " - . "$ENV{LORIS_CONFIG}/.loris_mri/$options{'PROFILE'}"; + . "$ENV{LORIS_CONFIG}/$options{'PROFILE'}"; exit $NeuroDB::ExitCodes::DB_SETTINGS_FAILURE; } diff --git a/tools/get_dicom_files.pl b/tools/get_dicom_files.pl index 285c1e45c..d67c393f7 100755 --- a/tools/get_dicom_files.pl +++ b/tools/get_dicom_files.pl @@ -14,7 +14,7 @@ =head1 SYNOPSIS Available options are: --profile : name of the config file in C<../dicom-archive/.loris_mri> (typically C) +-profile : name of the config file in C<../config> (typically C) -name : comma separated list of MySQL patterns for the patient names that a DICOM file has to have in order to be extracted. A DICOM file only has to match one of the @@ -100,7 +100,7 @@ =head1 DESCRIPTION my @opt_table = ( ["-profile", "string" , 1, \$profile, - "name of config file in ../dicom-archive/.loris_mri"], + "name of config file in ../config"], ["-name" , "string" , 1, \$patientNames, "comma-separated list of MySQL patterns for the patient name"], ["-type" , "string" , 1, \$scanTypes, @@ -165,9 +165,9 @@ =head1 DESCRIPTION exit $NeuroDB::ExitCodes::PROFILE_FAILURE; } -{ package Settings; do "$ENV{LORIS_CONFIG}/.loris_mri/$profile" } +{ package Settings; do "$ENV{LORIS_CONFIG}/$profile" } if(!@Settings::db) { - die "No database settings in file $ENV{LORIS_CONFIG}/.loris_mri/$profile\n"; + die "No database settings in file $ENV{LORIS_CONFIG}/$profile\n"; } my @patientNames = defined $patientNames ? split(',', $patientNames) : (); diff --git a/tools/gzip_nifti_files.pl b/tools/gzip_nifti_files.pl index 3c99d6ce8..61553d691 100755 --- a/tools/gzip_nifti_files.pl +++ b/tools/gzip_nifti_files.pl @@ -12,7 +12,7 @@ =head1 SYNOPSIS Available options are: --profile: name of the config file in C<../dicom-archive/.loris_mri> +-profile: name of the config file in C<../config> =head1 DESCRIPTION @@ -52,7 +52,7 @@ =head2 Methods my @opt_table = ( [ "-profile", "string", 1, \$profile, - "name of config file in ../dicom-archive/.loris_mri" + "name of config file in ../config" ] ); @@ -86,11 +86,11 @@ =head2 Methods exit $NeuroDB::ExitCodes::PROFILE_FAILURE; } -{ package Settings; do "$ENV{LORIS_CONFIG}/.loris_mri/$profile" } +{ package Settings; do "$ENV{LORIS_CONFIG}/$profile" } if ( !@Settings::db ) { print STDERR "\n\tERROR: You don't have a \@db setting in the file " - . "$ENV{LORIS_CONFIG}/.loris_mri/$profile \n\n"; + . "$ENV{LORIS_CONFIG}/$profile \n\n"; exit $NeuroDB::ExitCodes::DB_SETTINGS_FAILURE; } diff --git a/tools/mass_perldoc_md_creation.pl b/tools/mass_perldoc_md_creation.pl index 08e087e10..b77c0d15c 100755 --- a/tools/mass_perldoc_md_creation.pl +++ b/tools/mass_perldoc_md_creation.pl @@ -13,7 +13,7 @@ =head1 SYNOPSIS Available options are: --profile: name of the config file in C<../dicom-archive/.loris_mri> +-profile: name of the config file in C<../config> -verbose: be verbose (boolean) @@ -129,7 +129,7 @@ =head1 AUTHORS my $profile; my $verbose = 0; -my $profile_desc = "name of config file in ../dicom-archive/.loris_mri"; +my $profile_desc = "name of config file in ../config"; my @opt_table = ( [ "-profile", "string", 1, \$profile, $profile_desc ], @@ -165,10 +165,10 @@ =head1 AUTHORS print STDERR "$Usage\n\tERROR: missing -profile argument\n\n"; exit $NeuroDB::ExitCodes::PROFILE_FAILURE; } -{ package Settings; do "$ENV{LORIS_CONFIG}/.loris_mri/$profile" } +{ package Settings; do "$ENV{LORIS_CONFIG}/$profile" } if ( !@Settings::db ) { print STDERR "\n\tERROR: You don't have a \@db setting in the file " - . "$ENV{LORIS_CONFIG}/.loris_mri/$profile \n\n"; + . "$ENV{LORIS_CONFIG}/$profile \n\n"; exit $NeuroDB::ExitCodes::DB_SETTINGS_FAILURE; } diff --git a/tools/minc_to_bids_converter.pl b/tools/minc_to_bids_converter.pl index badb9521d..f126227cb 100755 --- a/tools/minc_to_bids_converter.pl +++ b/tools/minc_to_bids_converter.pl @@ -13,7 +13,7 @@ =head1 SYNOPSIS perl tools/minc_to_bids_converter.pl C<[options]> Available options are: --profile : name of the config file in C<../dicom-archive/.loris_mri> +-profile : name of the config file in C<../config> -tarchive_id : The ID of the DICOM archive to be converted into a BIDS dataset (optional, if not set, convert all DICOM archives) -dataset_name : Name/Description of the dataset to be generated in BIDS @@ -113,7 +113,7 @@ =head2 METHODS my $verbose; my $slice_order_philips = "Not Supplied"; -my $profile_desc = "Name of the config file in ../dicom-archive/.loris_mri (typically 'prod')"; +my $profile_desc = "Name of the config file in ../config (typically 'prod')"; my $tarchive_id_desc = "TarchiveID from the tarchive table of the .tar archive to be processed."; my $dataset_name_desc = "Name/Description of the BIDS dataset to be generated"; my $slice_order_desc = "Slice order for Philips acquisition: 'ascending', 'descending' or 'Not Supplied'"; @@ -190,10 +190,10 @@ =head2 METHODS exit $NeuroDB::ExitCodes::PROFILE_FAILURE; } -{ package Settings; do "$ENV{LORIS_CONFIG}/.loris_mri/$profile" } +{ package Settings; do "$ENV{LORIS_CONFIG}/$profile" } if ( !@Settings::db ) { print STDERR "\n\tERROR: You don't have a \@db setting in the file " - . "$ENV{LORIS_CONFIG}/.loris_mri/$profile \n\n"; + . "$ENV{LORIS_CONFIG}/$profile \n\n"; exit $NeuroDB::ExitCodes::DB_SETTINGS_FAILURE; } diff --git a/tools/remove_jiv_data_from_db_and_filesystem.pl b/tools/remove_jiv_data_from_db_and_filesystem.pl index 6b3861b49..8fe1564e3 100755 --- a/tools/remove_jiv_data_from_db_and_filesystem.pl +++ b/tools/remove_jiv_data_from_db_and_filesystem.pl @@ -14,7 +14,7 @@ =head1 SYNOPSIS Available option is: --profile: name of the config file in ../dicom-archive/.loris_mri +-profile: name of the config file in ../config =head1 DESCRIPTION @@ -46,11 +46,11 @@ =head2 Methods my $profile; -my $profile_desc = "name of config file in ../dicom-archive/.loris_mri"; +my $profile_desc = "name of config file in ../config"; my @opt_table = ( [ "-profile", "string", 1, \$profile, - "name of config file in ../dicom-archive/.loris_mri" + "name of config file in ../config" ] ); @@ -81,10 +81,10 @@ =head2 Methods print STDERR "$Usage\n\tERROR: missing -profile argument\n\n"; exit $NeuroDB::ExitCodes::PROFILE_FAILURE; } -{ package Settings; do "$ENV{LORIS_CONFIG}/.loris_mri/$profile" } +{ package Settings; do "$ENV{LORIS_CONFIG}/$profile" } if ( !@Settings::db ) { print STDERR "\n\tERROR: You don't have a \@db setting in the file " - . "$ENV{LORIS_CONFIG}/.loris_mri/$profile \n\n"; + . "$ENV{LORIS_CONFIG}/$profile \n\n"; exit $NeuroDB::ExitCodes::DB_SETTINGS_FAILURE; } @@ -173,7 +173,7 @@ =head2 Methods my $jiv_bkp = $data_dir . "/archive/bkp_jiv_produced_before_LORIS_20.0"; if (-d $jiv_dir) { move($jiv_dir, $jiv_bkp) or die "Cannot move $jiv_dir to $jiv_bkp: $!\n"; - print "\n==> Successfully backed up the jiv directory to $jiv_bkp.\n"; + print "\n==> Successfully backed up the jiv directory to $jiv_bkp.\n"; } diff --git a/tools/run_defacing_script.pl b/tools/run_defacing_script.pl index 5081dac3e..4507988cf 100755 --- a/tools/run_defacing_script.pl +++ b/tools/run_defacing_script.pl @@ -14,7 +14,7 @@ =head1 SYNOPSIS Available options are: -C<-profile> : name of the config file in C<../dicom-archive/.loris_mri> +C<-profile> : name of the config file in C<../config> C<-tarchive_ids>: comma-separated list of MySQL Cs @@ -95,7 +95,7 @@ =head1 METHODS my $profile; my $session_ids; my $verbose = 0; -my $profile_desc = "Name of the config file in ../dicom-archive/.loris_mri"; +my $profile_desc = "Name of the config file in ../config"; my $session_ids_desc = "Comma-separated list of SessionIDs on which to run the " . "defacing algorithm (if not set, will deface images for " . "all SessionIDs present in the database)"; @@ -142,17 +142,17 @@ =head1 METHODS exit $NeuroDB::ExitCodes::INVALID_ENVIRONMENT_VAR; } -if (!defined $profile || !-e "$ENV{LORIS_CONFIG}/.loris_mri/$profile") { +if (!defined $profile || !-e "$ENV{LORIS_CONFIG}/$profile") { print $Help; print STDERR "$Usage\n\tERROR: You must specify a valid and existing profile.\n\n"; exit $NeuroDB::ExitCodes::PROFILE_FAILURE; } -{ package Settings; do "$ENV{LORIS_CONFIG}/.loris_mri/$profile" } +{ package Settings; do "$ENV{LORIS_CONFIG}/$profile" } if ( !@Settings::db ) { print STDERR "\n\tERROR: You don't have a \@db setting in the file " - . "$ENV{LORIS_CONFIG}/.loris_mri/$profile \n\n"; + . "$ENV{LORIS_CONFIG}/$profile \n\n"; exit $NeuroDB::ExitCodes::DB_SETTINGS_FAILURE; } diff --git a/tools/seriesuid2fileid.pl b/tools/seriesuid2fileid.pl index 005a5526b..86045cd82 100755 --- a/tools/seriesuid2fileid.pl +++ b/tools/seriesuid2fileid.pl @@ -47,7 +47,7 @@ =head1 DESCRIPTION use NeuroDB::DBI; my $profile = undef; -my $profile_desc = "name of config file in ../dicom-archive/.loris_mri"; +my $profile_desc = "name of config file in ../config"; my @opt_table = ( [ "-profile", "string", 1, \$profile, $profile_desc ] @@ -98,11 +98,11 @@ =head1 DESCRIPTION exit 3; } -{ package Settings; do "$ENV{LORIS_CONFIG}/.loris_mri/$profile" } +{ package Settings; do "$ENV{LORIS_CONFIG}/$profile" } if ( $profile && !@Settings::db ) { print "\n\tERROR: You don't have a configuration file named '$profile' in: - $ENV{LORIS_CONFIG}/.loris_mri/ \n\n"; + $ENV{LORIS_CONFIG} \n\n"; exit 2; } @@ -210,7 +210,7 @@ =head1 DESCRIPTION printf ("%-16s",'| '. $FileID . $ZxT); printf ("%-36s",'| '. $FileName); print "|\n"; - + } diff --git a/tools/updateHeaders.pl b/tools/updateHeaders.pl index 19950bc32..8b1eb7db3 100755 --- a/tools/updateHeaders.pl +++ b/tools/updateHeaders.pl @@ -1,4 +1,4 @@ -#!/usr/bin/perl +#!/usr/bin/perl # Jonathan Harlap 2006 # jharlap@bic.mni.mcgill.ca # Perl tool to update headers in a dicomTar archive. @@ -26,7 +26,7 @@ =head1 SYNOPSIS -database: Enable C's database features --profile : Name of the config file in C<../dicom-archive/.loris_mri> +-profile : Name of the config file in C<../config> -verbose : Be verbose @@ -84,8 +84,8 @@ =head1 METHODS ["General options", "section"], ["-database", "boolean", 1, \$database, "Enable dicomTar's database features"], - ["-profile","string",1, \$profile, "Specify the name of the config file which resides in .loris_mri in the current directory."], - + ["-profile","string",1, \$profile, "Specify the name of the config file which resides in the config directory."], + ["-verbose", "boolean", 1, \$verbose, "Be verbose."], ["-version", "call", undef, \&handle_version_option, "Print version and revision number and exit"], ); @@ -108,10 +108,10 @@ =head1 METHODS print STDERR "$Usage\n\tERROR: missing -profile argument\n\n"; exit $NeuroDB::ExitCodes::PROFILE_FAILURE; } -{ package Settings; do "$ENV{LORIS_CONFIG}/.loris_mri/$profile" } +{ package Settings; do "$ENV{LORIS_CONFIG}/$profile" } if ( !@Settings::db ) { print STDERR "\n\tERROR: You don't have a \@db setting in the file " - . "$ENV{LORIS_CONFIG}/.loris_mri/$profile \n\n"; + . "$ENV{LORIS_CONFIG}/$profile \n\n"; exit $NeuroDB::ExitCodes::DB_SETTINGS_FAILURE; } @@ -173,12 +173,12 @@ =head1 METHODS $dicom->fill($file); my $fileIsDicom = 1; my $studyUID = $dicom->value('0020','000D'); - + # see if the file was really dicom if($studyUID eq "") { $fileIsDicom = 0; } - + if($fileIsDicom) { if(defined($targetSeriesNumber)) { my $series = trimwhitespace($dicom->value('0020','0011')) + 0; @@ -263,7 +263,7 @@ sub extract_tarchive { $dcmdir =~ s/\.tar\.gz$//; `cd $tempdir ; tar -xzf $dcmtar`; - + return $dcmdir; } @@ -281,7 +281,7 @@ =head3 update_file_headers($file, $setRef) sub update_file_headers { my ($file, $setRef) = @_; - + # if there was already a backup file, dcmodify would crush it... my $protectedFile; my $backupFile = "${file}.bak"; @@ -295,7 +295,7 @@ sub update_file_headers { $cmd .= " --insert '".$set->[0]."=".$set->[1]."' "; } $cmd .= "'${file}' 2>&1"; - + `$cmd`; if(defined($protectedFile)) { @@ -335,7 +335,7 @@ =head3 handle_set_options($opt, $args) =cut -sub handle_set_options { +sub handle_set_options { my ($opt, $args) = @_; warn ("$opt option requires two arguments\n"), return 0 unless scalar(@$args) > 1; diff --git a/tools/updateHeadersBatch.pl b/tools/updateHeadersBatch.pl index ccf19d244..6158d37d9 100755 --- a/tools/updateHeadersBatch.pl +++ b/tools/updateHeadersBatch.pl @@ -1,4 +1,4 @@ -#!/usr/bin/perl +#!/usr/bin/perl # Jonathan Harlap 2006 # jharlap@bic.mni.mcgill.ca # Perl tool to update headers in a dicomTar archive en masse. @@ -30,7 +30,7 @@ =head1 SYNOPSIS -database: Enable C's database features --profile : Name of the config file in C<../dicom-archive/.loris_mri> +-profile : Name of the config file in C<../config> -verbose : Be verbose @@ -100,8 +100,8 @@ =head1 AUTHORS ["General options", "section"], ["-database", "boolean", 1, \$database, "Enable dicomTar's database features"], - ["-profile","string",1, \$profile, "Specify the name of the config file which resides in .loris_mri in the current directory"], - + ["-profile","string",1, \$profile, "Specify the name of the config file which resides in the config directory"], + ["-verbose", "boolean", 1, \$verbose, "Be verbose."], ["-version", "call", undef, \&handle_version_option, "Print version and revision number and exit"], ); @@ -123,10 +123,10 @@ =head1 AUTHORS print STDERR "$Usage\n\tERROR: missing -profile argument\n\n"; exit $NeuroDB::ExitCodes::PROFILE_FAILURE; } -{ package Settings; do "$ENV{LORIS_CONFIG}/.loris_mri/$profile" } +{ package Settings; do "$ENV{LORIS_CONFIG}/$profile" } if ( !@Settings::db ) { print STDERR "\n\tERROR: You don't have a \@db setting in the file " - . "$ENV{LORIS_CONFIG}/.loris_mri/$profile \n\n"; + . "$ENV{LORIS_CONFIG}/$profile \n\n"; exit $NeuroDB::ExitCodes::DB_SETTINGS_FAILURE; } @@ -174,18 +174,18 @@ =head1 AUTHORS my $find_handler = sub { my $file = $File::Find::name; if(-f $file) { - + # read the file, assuming it is dicom my $dicom = DICOM->new(); $dicom->fill($file); my $fileIsDicom = 1; my $studyUID = $dicom->value('0020','000D'); - + # see if the file was really dicom if($studyUID eq "") { $fileIsDicom = 0; } - + if($fileIsDicom) { my $keyhash = ""; for(my $i = 0; $i < $keyCols; $i++) { @@ -245,7 +245,7 @@ sub parse_specfile { my @keyList = ($1, $2); push @$keyListRef, \@keyList; } - + $key .= $bits[$i+1] . "---"; } } @@ -258,7 +258,7 @@ sub parse_specfile { $setTableRef->{$key} = \@setList; } } - + sub extract_tarchive { my ($tarchive, $tempdir) = @_; @@ -281,13 +281,13 @@ sub extract_tarchive { $dcmdir =~ s/\.tar\.gz$//; `cd $tempdir ; tar -xzf $dcmtar`; - + return $dcmdir; } sub update_file_headers { my ($file, $setRef) = @_; - + # if there was already a backup file, dcmodify would crush it... my $protectedFile; my $backupFile = "${file}.bak"; @@ -301,7 +301,7 @@ sub update_file_headers { $cmd .= " --insert-tag '".$set->[0]."=".$set->[1]."' "; } $cmd .= "'${file}' 2>&1"; - + `$cmd`; if(defined($protectedFile)) { diff --git a/uploadNeuroDB/HRRT_PET_insertion.pl b/uploadNeuroDB/HRRT_PET_insertion.pl index 6e1c5a6a3..f3649b229 100755 --- a/uploadNeuroDB/HRRT_PET_insertion.pl +++ b/uploadNeuroDB/HRRT_PET_insertion.pl @@ -57,7 +57,7 @@ USAGE # Set the variable descriptions to be used by Getopt::Tabular -my $profile_desc = "Name of config file in ./dicom-archive/.loris_mri."; +my $profile_desc = "Name of config file in ./config."; my $upload_id_desc = "ID of the uploaded imaging archive"; my $bic_desc = "whether the datasets comes from the BIC HRRT scanner"; my $clobber_desc = "Use this option only if you want to replace the resulting tarball!"; @@ -96,10 +96,10 @@ exit $NeuroDB::ExitCodes::PROFILE_FAILURE; } -{ package Settings; do "$ENV{LORIS_CONFIG}/.loris_mri/$profile" } +{ package Settings; do "$ENV{LORIS_CONFIG}/$profile" } if ($profile && !@Settings::db) { print STDERR "\n\tERROR: You don't have a \@db setting in the file " - . "$ENV{LORIS_CONFIG}/.loris_mri/$profile \n\n"; + . "$ENV{LORIS_CONFIG}/$profile \n\n"; exit $NeuroDB::ExitCodes::DB_SETTINGS_FAILURE; } diff --git a/uploadNeuroDB/NeuroDB/ImagingUpload.pm b/uploadNeuroDB/NeuroDB/ImagingUpload.pm index 53d4c9b07..5a4a068de 100755 --- a/uploadNeuroDB/NeuroDB/ImagingUpload.pm +++ b/uploadNeuroDB/NeuroDB/ImagingUpload.pm @@ -109,7 +109,7 @@ sub new { # ---------------------------------------------------------- { package Settings; - do "$ENV{LORIS_CONFIG}/.loris_mri/$profile"; + do "$ENV{LORIS_CONFIG}/$profile"; } diff --git a/uploadNeuroDB/NeuroDB/MRIProcessingUtility.pm b/uploadNeuroDB/NeuroDB/MRIProcessingUtility.pm index afe38852e..90cc787ea 100755 --- a/uploadNeuroDB/NeuroDB/MRIProcessingUtility.pm +++ b/uploadNeuroDB/NeuroDB/MRIProcessingUtility.pm @@ -125,7 +125,7 @@ sub new { ############################################################ { package Settings; - do "$ENV{LORIS_CONFIG}/.loris_mri/$profile"; + do "$ENV{LORIS_CONFIG}/$profile"; } # ---------------------------------------------------------- diff --git a/uploadNeuroDB/imaging_non_minc_insertion.pl b/uploadNeuroDB/imaging_non_minc_insertion.pl index 7b5642e61..9083a3bf3 100755 --- a/uploadNeuroDB/imaging_non_minc_insertion.pl +++ b/uploadNeuroDB/imaging_non_minc_insertion.pl @@ -12,7 +12,7 @@ =head1 SYNOPSIS Available options are: --profile : name of the config file in C<../dicom-archive/.loris-mri> (required) +-profile : name of the config file in C<../config> (required) -file_path : file to register into the database (full path from the root directory is required) (required) @@ -121,7 +121,7 @@ =head2 Methods USAGE # Set the variable descriptions to be used by Getopt::Tabular -my $profile_desc = "name of config file in ./dicom-archive/.loris_mri."; +my $profile_desc = "name of config file in ./config."; my $file_path_desc = "file to register into the database (full path from " . "the root directory is required)"; my $upload_id_desc = "ID of the uploaded imaging archive containing the " @@ -174,10 +174,10 @@ =head2 Methods print STDERR "$Usage\n\tERROR: You must specify a profile.\n\n"; exit $NeuroDB::ExitCodes::PROFILE_FAILURE; } -{ package Settings; do "$ENV{LORIS_CONFIG}/.loris_mri/$profile" } +{ package Settings; do "$ENV{LORIS_CONFIG}/$profile" } if ( !@Settings::db ) { print STDERR "\n\tERROR: You don't have a \@db setting in the file " - . "$ENV{LORIS_CONFIG}/.loris_mri/$profile \n\n"; + . "$ENV{LORIS_CONFIG}/$profile \n\n"; exit $NeuroDB::ExitCodes::DB_SETTINGS_FAILURE; } @@ -593,4 +593,3 @@ =head1 AUTHORS LORIS community and McGill Centre for Integrative Neuroscience =cut - diff --git a/uploadNeuroDB/imaging_upload_file.pl b/uploadNeuroDB/imaging_upload_file.pl index 66c510818..252176981 100755 --- a/uploadNeuroDB/imaging_upload_file.pl +++ b/uploadNeuroDB/imaging_upload_file.pl @@ -13,7 +13,7 @@ =head1 SYNOPSIS Available options are: --profile : name of the config file in C<../dicom-archive/.loris_mri> +-profile : name of the config file in C<../config> -upload_id : The Upload ID of the given scan uploaded @@ -100,7 +100,7 @@ =head2 Methods [ "Basic options", "section" ], [ "-profile", "string", 1, \$profile, - "name of config file in ../dicom-archive/.loris_mri" + "name of config file in ../config" ], [ "-upload_id", "string", 1, \$upload_id, @@ -113,11 +113,11 @@ =head2 Methods my $Help = < on the file (set the dicomtar to true) 3) Run C on the file (set the minc-created to true) 4) Removes the uploaded file once the previous steps have completed - 5) Update the C table + 5) Update the C table Documentation: perldoc imaging_upload_file.pl @@ -155,10 +155,10 @@ =head2 Methods print STDERR "$Usage\n\tERROR: missing -profile argument\n\n"; exit $NeuroDB::ExitCodes::PROFILE_FAILURE; } -{ package Settings; do "$ENV{LORIS_CONFIG}/.loris_mri/$profile" } +{ package Settings; do "$ENV{LORIS_CONFIG}/$profile" } if ( !@Settings::db ) { print STDERR "\n\tERROR: You don't have a \@db setting in the file " - . "$ENV{LORIS_CONFIG}/.loris_mri/$profile \n\n"; + . "$ENV{LORIS_CONFIG}/$profile \n\n"; exit $NeuroDB::ExitCodes::DB_SETTINGS_FAILURE; } if ( !$ARGV[0] ) { @@ -237,7 +237,7 @@ =head2 Methods $mail_user, $mail_subject, $mail_message, - ); + ); exit $NeuroDB::ExitCodes::INVALID_ARG; } @@ -262,11 +262,11 @@ =head2 Methods my $imaging_upload = NeuroDB::ImagingUpload->new( \$dbh, $db, - $TmpDir_decompressed_folder, + $TmpDir_decompressed_folder, $upload_id, - $pname, + $pname, $profile, - $verbose + $verbose ); ################################################################ @@ -293,7 +293,7 @@ =head2 Methods $mail_user, $mail_subject, $mail_message, - ); + ); exit $NeuroDB::ExitCodes::INVALID_DICOM; } @@ -427,7 +427,7 @@ =head2 Methods $mail_user, $mail_subject, $mail_message, - ); + ); ################################################################ ############### getPnameUsingUploadID########################### @@ -566,9 +566,9 @@ =head3 spool() sub spool { my ( $message, $error, $verb) = @_; - $Notify->spool('mri upload runner', - $message, - 0, + $Notify->spool('mri upload runner', + $message, + 0, 'imaging_upload_file.pl', $upload_id,$error, $verb ); diff --git a/uploadNeuroDB/imaging_upload_file_cronjob.pl b/uploadNeuroDB/imaging_upload_file_cronjob.pl index 9f8139a8f..e4a918f5a 100755 --- a/uploadNeuroDB/imaging_upload_file_cronjob.pl +++ b/uploadNeuroDB/imaging_upload_file_cronjob.pl @@ -14,7 +14,7 @@ =head1 SYNOPSIS Available options are: --profile : Name of the config file in C<../dicom-archive/.loris_mri> +-profile : Name of the config file in C<../config> -verbose : If set, be verbose @@ -63,18 +63,18 @@ =head1 DESCRIPTION [ "Basic options", "section" ], [ "-profile", "string", 1, \$profile, - "name of config file in ../dicom-archive/.loris_mri" + "name of config file in ../config" ], ["-verbose", "boolean", 1, \$verbose, "Be verbose."] ); my $Help = < 1 +SELECT UploadID, UploadLocation FROM mri_upload + WHERE Inserting IS NULL AND InsertionComplete <> 1 AND (TarchiveID IS NULL AND number_of_mincInserted IS NULL); QUERY print "\n" . $query . "\n" if $debug; my $sth = $dbh->prepare($query); $sth->execute(); -while(@row = $sth->fetchrow_array()) { +while(@row = $sth->fetchrow_array()) { if ( -e $row[1] ) { my $command = "imaging_upload_file.pl -upload_id $row[0] -profile $profile $row[1]"; @@ -130,7 +130,7 @@ =head1 DESCRIPTION } else { print "\nERROR: Could not find the uploaded file $row[1] for uploadID $row[0] . \nPlease, make sure " - . "the path to the uploaded file exists. + . "the path to the uploaded file exists. Upload will exit now.\n\n\n"; } } diff --git a/uploadNeuroDB/mass_nii.pl b/uploadNeuroDB/mass_nii.pl index eb4dab19c..8bd1ea5ab 100755 --- a/uploadNeuroDB/mass_nii.pl +++ b/uploadNeuroDB/mass_nii.pl @@ -13,7 +13,7 @@ =head1 SYNOPSIS Available options are: --profile : name of the config file in C<../dicom-archive/.loris_mri> +-profile : name of the config file in C<../config> -minFileID: specifies the minimum C to operate on @@ -69,8 +69,8 @@ =head1 DESCRIPTION Date : 2015/07/28 Version : $versionInfo - This script generates NIfTI images - for the inserted MINC images that + This script generates NIfTI images + for the inserted MINC images that are missing NIfTIs. Documentation: perldoc mass_nii.pl @@ -85,15 +85,15 @@ =head1 DESCRIPTION my @arg_table = ( ["Database options", "section"], - ["-profile", "string", 1, \$profile, + ["-profile", "string", 1, \$profile, "Specify the name of the config file which resides " . - "in ../dicom-archive/.loris_mri"], + "in ../config"], ["File control", "section"], - ["-minFileID", "integer", 1, \$minFileID, - "Specify the minimum FileID to operate on."], - ["-maxFileID", "integer", 1, \$maxFileID, - "Specify the maximum FileID to operate on."], + ["-minFileID", "integer", 1, \$minFileID, + "Specify the minimum FileID to operate on."], + ["-maxFileID", "integer", 1, \$maxFileID, + "Specify the maximum FileID to operate on."], ["General options", "section"], ["-verbose", "boolean", 1, \$verbose, "Be verbose."] @@ -111,13 +111,13 @@ =head1 DESCRIPTION exit $NeuroDB::ExitCodes::PROFILE_FAILURE; } -if (-f "$ENV{LORIS_CONFIG}/.loris_mri/$profile") { - { package Settings; do "$ENV{LORIS_CONFIG}/.loris_mri/$profile" } +if (-f "$ENV{LORIS_CONFIG}/$profile") { + { package Settings; do "$ENV{LORIS_CONFIG}/$profile" } } if ( !@Settings::db ) { print STDERR "\n\tERROR: You don't have a \@db setting in the file " - . "$ENV{LORIS_CONFIG}/.loris_mri/$profile \n\n"; + . "$ENV{LORIS_CONFIG}/$profile \n\n"; exit $NeuroDB::ExitCodes::DB_SETTINGS_FAILURE; } @@ -159,17 +159,17 @@ =head1 DESCRIPTION # Base of the query my $query = < +-profile : name of the config file in C<../config> -mincFileID: integer, minimum C to operate on @@ -68,13 +68,13 @@ =head1 DESCRIPTION my @arg_table = ( ["Database options", "section"], - ["-profile","string",1, \$profile, "Specify the name of the ". - "config file which resides in ../dicom-archive/.loris_mri"], + ["-profile","string",1, \$profile, "Specify the name of the ". + "config file which resides in ../config"], ["File control", "section"], - ["-minFileID", "integer", 1, \$minFileID, - "Specify the minimum FileID to operate on."], - ["-maxFileID", "integer", 1, \$maxFileID, - "Specify the maximum FileID to operate on."], + ["-minFileID", "integer", 1, \$minFileID, + "Specify the minimum FileID to operate on."], + ["-maxFileID", "integer", 1, \$maxFileID, + "Specify the maximum FileID to operate on."], ["General options", "section"], ["-verbose", "boolean", 1, \$verbose, "Be verbose."], ); @@ -88,14 +88,14 @@ =head1 DESCRIPTION print STDERR "$Usage\n\tERROR: missing -profile argument\n\n"; exit $NeuroDB::ExitCodes::PROFILE_FAILURE; } -if (-f "$ENV{LORIS_CONFIG}/.loris_mri/$profile") { - { package Settings; do "$ENV{LORIS_CONFIG}/.loris_mri/$profile" } +if (-f "$ENV{LORIS_CONFIG}/$profile") { + { package Settings; do "$ENV{LORIS_CONFIG}/$profile" } } if ( !@Settings::db ) { print STDERR "\n\tERROR: You don't have a \@db setting in the file " - . "$ENV{LORIS_CONFIG}/.loris_mri/$profile \n\n"; + . "$ENV{LORIS_CONFIG}/$profile \n\n"; exit $NeuroDB::ExitCodes::DB_SETTINGS_FAILURE; -} +} @@ -154,7 +154,7 @@ =head1 DESCRIPTION } $query = "INSERT INTO check_pic_filenames SELECT FileID, Value FROM ". - "parameter_file WHERE ParameterTypeID=\@checkPicID AND ". + "parameter_file WHERE ParameterTypeID=\@checkPicID AND ". "Value IS NOT NULL"; $dbh->do($query); @@ -184,7 +184,7 @@ =head1 DESCRIPTION unless( &NeuroDB::MRI::make_pics( - \$file, $data_dir, + \$file, $data_dir, $pic_dir, $horizontalPics, $db ) ) { @@ -211,4 +211,3 @@ =head1 AUTHORS LORIS community and McGill Centre for Integrative Neuroscience =cut - diff --git a/uploadNeuroDB/minc_insertion.pl b/uploadNeuroDB/minc_insertion.pl index cb888accc..6112a9ceb 100755 --- a/uploadNeuroDB/minc_insertion.pl +++ b/uploadNeuroDB/minc_insertion.pl @@ -12,7 +12,7 @@ =head1 SYNOPSIS Available options are: --profile : name of the config file in C<../dicom-archive/.loris_mri> +-profile : name of the config file in C<../config> -uploadID : The upload ID from which this MINC was created @@ -95,15 +95,15 @@ =head2 Methods use constant GET_COLUMNS => 0; -my $versionInfo = sprintf "%d revision %2d", q$Revision: 1.24 $ +my $versionInfo = sprintf "%d revision %2d", q$Revision: 1.24 $ =~ /: (\d+)\.(\d+)/; -my ($sec,$min,$hour,$mday,$mon,$year,$wday,$yday,$isdst) +my ($sec,$min,$hour,$mday,$mon,$year,$wday,$yday,$isdst) =localtime(time); my $date = sprintf( "%4d-%02d-%02d %02d:%02d:%02d", $year+1900,$mon+1,$mday,$hour,$min,$sec ); -my $debug = 0; +my $debug = 0; my $message = ''; my $upload_id; my $hrrt = 0; @@ -111,14 +111,14 @@ =head2 Methods my $verbose = 0; # default, overwritten if scripts are run with -verbose my $notify_detailed = 'Y'; # notification_spool message flag for messages to be displayed - # with DETAILED OPTION in the front-end/imaging_uploader -my $notify_notsummary = 'N'; # notification_spool message flag for messages to be displayed - # with SUMMARY Option in the front-end/imaging_uploader -my $profile = undef; # this should never be set unless you are in a + # with DETAILED OPTION in the front-end/imaging_uploader +my $notify_notsummary = 'N'; # notification_spool message flag for messages to be displayed + # with SUMMARY Option in the front-end/imaging_uploader +my $profile = undef; # this should never be set unless you are in a # stable production environment -my $reckless = 0; # this is only for playing and testing. Don't +my $reckless = 0; # this is only for playing and testing. Don't # set it to 1!!! -my $force = 0; # This is a flag to force the script to run +my $force = 0; # This is a flag to force the script to run # Even if the validation has failed my $xlog = 0; # default should be 0 my $bypass_extra_file_checks = 0; # If you need to bypass the extra_file_checks, set to 1. @@ -137,25 +137,25 @@ =head2 Methods my @opt_table = ( ["Basic options","section"], - ["-profile","string",1, \$profile, "name of config file". - " in ../dicom-archive/.loris_mri"], + ["-profile","string",1, \$profile, "name of config file". + " in ../config"], ["-uploadID", "string", 1, \$upload_id, "The upload ID " . "from which this MINC was created"], ["Advanced options","section"], - ["-reckless", "boolean", 1, \$reckless,"Upload data to". + ["-reckless", "boolean", 1, \$reckless,"Upload data to". " database even if study protocol is not ". "defined or violated."], - ["-force", "boolean", 1, \$force,"Forces the script to run". + ["-force", "boolean", 1, \$force,"Forces the script to run". " even if the DICOM archive validation has failed."], - - ["-mincPath","string",1, \$minc, "The absolute path". + + ["-mincPath","string",1, \$minc, "The absolute path". " to minc-file"], - ["-tarchivePath","string",1, \$tarchive, "The absolute path". + ["-tarchivePath","string",1, \$tarchive, "The absolute path". " to tarchive-file"], ["-uploadID", "string", 1, \$upload_id, "The upload ID " . @@ -189,11 +189,11 @@ =head2 Methods my $Help = <writeErrorLog($message,6,$logfile); + $utility->writeErrorLog($message,6,$logfile); $notifier->spool('tarchive validation', $message, 0, 'minc_insertion.pl', $upload_id, 'Y', $notify_notsummary); @@ -571,7 +571,7 @@ =head2 Methods ####### Get the $sessionID #################################### ################################################################ my($sessionRef, $errMsg) = NeuroDB::MRI::getSessionInformation( - $subjectIDsref, + $subjectIDsref, $studyInfo{'DateAcquired'}, $dbh, $db @@ -614,7 +614,7 @@ =head2 Methods $notify_notsummary ); exit $NeuroDB::ExitCodes::FILE_NOT_UNIQUE; -} +} ################################################################ ## at this point things will appear in the database ############ @@ -663,7 +663,7 @@ =head2 Methods print LOG $message; print $message; $notifier->spool('minc insertion', $message, 0, - 'minc_insertion.pl', $upload_id, 'Y', + 'minc_insertion.pl', $upload_id, 'Y', $notify_notsummary); exit $NeuroDB::ExitCodes::UNKNOWN_PROTOCOL; } diff --git a/uploadNeuroDB/register_processed_data.pl b/uploadNeuroDB/register_processed_data.pl index 11295c102..61bef0b58 100755 --- a/uploadNeuroDB/register_processed_data.pl +++ b/uploadNeuroDB/register_processed_data.pl @@ -13,7 +13,7 @@ =head1 SYNOPSIS Available options are: --profile : name of config file in C<../dicom-archive/.loris_mri> +-profile : name of config file in C<../config> -file : file that will be registered in the database (full path from the root directory is required) @@ -100,7 +100,7 @@ =head2 Methods USAGE my @args_table = ( - ["-profile", "string", 1, \$profile, "name of config file in ../dicom-archive/.loris_mri."], + ["-profile", "string", 1, \$profile, "name of config file in ../config."], ["-file", "string", 1, \$filename, "file that will be registered in the database (full path from the root directory is required)"], ["-sourceFileID", "string", 1, \$sourceFileID, "FileID of the raw input dataset that was processed to obtain the file to be registered in the database"], ["-sourcePipeline", "string", 1, \$sourcePipeline, "Pipeline name that was used to obtain the file to be registered (example: DTIPrep_pipeline)"], @@ -122,10 +122,10 @@ =head2 Methods print STDERR "$Usage\n\tERROR: missing -profile argument\n\n"; exit $NeuroDB::ExitCodes::PROFILE_FAILURE; } -{ package Settings; do "$ENV{LORIS_CONFIG}/.loris_mri/$profile" } +{ package Settings; do "$ENV{LORIS_CONFIG}/$profile" } if ( !@Settings::db ) { print STDERR "\n\tERROR: You don't have a \@db setting in the file " - . "$ENV{LORIS_CONFIG}/.loris_mri/$profile \n\n"; + . "$ENV{LORIS_CONFIG}/$profile \n\n"; exit $NeuroDB::ExitCodes::DB_SETTINGS_FAILURE; } diff --git a/uploadNeuroDB/tarchiveLoader.pl b/uploadNeuroDB/tarchiveLoader.pl index 9f0cf800e..5f878ec59 100755 --- a/uploadNeuroDB/tarchiveLoader.pl +++ b/uploadNeuroDB/tarchiveLoader.pl @@ -22,7 +22,7 @@ =head1 SYNOPSIS Available options are: --profile : Name of the config file in C<../dicom-archive/.loris_mri> +-profile : Name of the config file in C<../config> -uploadID : UploadID associated to this upload @@ -90,33 +90,33 @@ =head2 Methods -# Turn on autoflush for standard output buffer so that we immediately see +# Turn on autoflush for standard output buffer so that we immediately see #the results of print statements. $|++; ## Starting the program -my $versionInfo = sprintf "%d revision %2d", q$Revision: 1.24 $ +my $versionInfo = sprintf "%d revision %2d", q$Revision: 1.24 $ =~ /: (\d+)\.(\d+)/; ## needed for log and template -my ($sec,$min,$hour,$mday,$mon,$year,$wday,$yday,$isdst) +my ($sec,$min,$hour,$mday,$mon,$year,$wday,$yday,$isdst) =localtime(time); my $date = sprintf( "%4d-%02d-%02d %02d:%02d:%02d", $year+1900,$mon+1,$mday,$hour,$min,$sec ); -my $debug = 0; +my $debug = 0; my $message = ''; my $upload_id; my $verbose = 0; # default, overwritten if the scripts are run with -verbose -my $notify_detailed = 'Y'; # notification_spool message flag for messages to be displayed - # with DETAILED OPTION in the front-end/imaging_uploader -my $notify_notsummary = 'N'; # notification_spool message flag for messages to be displayed - # with SUMMARY Option in the front-end/imaging_uploader +my $notify_detailed = 'Y'; # notification_spool message flag for messages to be displayed + # with DETAILED OPTION in the front-end/imaging_uploader +my $notify_notsummary = 'N'; # notification_spool message flag for messages to be displayed + # with SUMMARY Option in the front-end/imaging_uploader my $profile = undef; # this should never be set unless you are in a # stable production environment -my $reckless = 0; # this is only for playing and testing. Don't +my $reckless = 0; # this is only for playing and testing. Don't #set it to 1!!! -my $force = 0; # This is a flag to force the script to run +my $force = 0; # This is a flag to force the script to run # Even if the validation has failed my $xlog = 0; # default should be 0 my $valid_study = 0; @@ -129,7 +129,7 @@ =head2 Methods my @opt_table = ( ["Basic options","section"], ["-profile ","string",1, \$profile, - "Name of config file in ../dicom-archive/.loris_mri" + "Name of config file in ../config" ], ["-uploadID", "string", 1, \$upload_id, "UploadID associated to ". "this upload."], @@ -156,22 +156,22 @@ =head2 Methods my $Help = <); -# fixme there are better ways +# fixme there are better ways my @progs = ("convert", "Mincinfo_wrapper.pl", "mincpik.pl", $converter); # create the temp dir my $TmpDir = tempdir( - $template, TMPDIR => 1, CLEANUP => 1 + $template, TMPDIR => 1, CLEANUP => 1 ); # create logdir(if !exists) and logfile -my @temp = split(/\//, $TmpDir); +my @temp = split(/\//, $TmpDir); my $templog = $temp[$#temp]; -my $LogDir = "$data_dir/logs"; -if (!-d $LogDir) { - mkdir($LogDir, 0770); +my $LogDir = "$data_dir/logs"; +if (!-d $LogDir) { + mkdir($LogDir, 0770); } my $logfile = "$LogDir/$templog.log"; open LOG, ">$logfile"; @@ -294,9 +294,9 @@ =head2 Methods ################################################################ ############### If xlog is set, fork a tail on log file. ####### ################################################################ -my $childPID; -if ($xlog) { - $childPID = fork(); +my $childPID; +if ($xlog) { + $childPID = fork(); if ($childPID == 0) { my $command = "xterm -geometry 130x70 -e tail -f $logfile"; exec($command) or die "Command $command failed: $!\n"; @@ -353,7 +353,7 @@ =head2 Methods ###### To the actual exit value, shift right by ################ ###### eight as done below ##################################### ################################################################ -my $output = system($script); +my $output = system($script); $output = $output >> 8; ################################################################ @@ -366,7 +366,7 @@ =head2 Methods $utility->writeErrorLog( $message, $NeuroDB::ExitCodes::PROGRAM_EXECUTION_FAILURE, $logfile ); - $notifier->spool('tarchive validation', $message, 0, + $notifier->spool('tarchive validation', $message, 0, 'tarchiveLoader.pl', $upload_id, 'Y', $notify_notsummary); exit $NeuroDB::ExitCodes::PROGRAM_EXECUTION_FAILURE; @@ -397,7 +397,7 @@ =head2 Methods ###### Extract the tarchive and feed the dicom data ############ ###### Dir to the uploader ##################################### ################################################################ -my ($ExtractSuffix,$study_dir,$header) = +my ($ExtractSuffix,$study_dir,$header) = $utility->extractAndParseTarchive($tarchive, $upload_id, $seriesuid); @@ -454,13 +454,13 @@ =head2 Methods # ($valid_study undefined)-> move the tarchive from the #### # inbox into the tarchive library ########################## ############################################################ - if ((!defined($tarchivePath)) || - (defined($tarchivePath) && - ($tarchive =~ m/$tarchivePath\/\d\d\d\d\//i))) { - $newTarchiveLocation = $tarchive; + if ((!defined($tarchivePath)) || + (defined($tarchivePath) && + ($tarchive =~ m/$tarchivePath\/\d\d\d\d\//i))) { + $newTarchiveLocation = $tarchive; } elsif (!$valid_study) { - $newTarchiveLocation = + $newTarchiveLocation = $utility->moveAndUpdateTarchive( $tarchive, \%tarchiveInfo, $upload_id ); @@ -528,7 +528,7 @@ =head2 Methods my $mri_upload_update = $dbh->prepare($query); $mri_upload_update->execute($newCount, $mcount, $upload_id); - + ############################################################ ############# Create minc-pics ############################# ############################################################ @@ -537,9 +537,9 @@ =head2 Methods $tarchiveInfo{TarchiveID}, $profile, 0, # minFileID $row[0], maxFileID $row[1] - $debug, + $debug, $verbose); - + ############################################################ # spool a new study message ################################ ############################################################ @@ -575,7 +575,7 @@ =head2 Methods ? $NeuroDB::ExitCodes::CREATE_SESSION_FAILURE : $NeuroDB::ExitCodes::GET_SESSION_ID_FAILURE); } - + $query = "UPDATE tarchive SET SessionID=? WHERE TarchiveID=?"; $sth = $dbh->prepare($query); print $query . "\n" if $debug; diff --git a/uploadNeuroDB/tarchive_validation.pl b/uploadNeuroDB/tarchive_validation.pl index 93526be13..343ad144c 100755 --- a/uploadNeuroDB/tarchive_validation.pl +++ b/uploadNeuroDB/tarchive_validation.pl @@ -13,7 +13,7 @@ =head1 SYNOPSIS Available options are: --profile : name of the config file in C<../dicom-archive/.loris-mri> +-profile : name of the config file in C<../config> -uploadID : UploadID associated to the DICOM archive to validate @@ -86,14 +86,14 @@ =head2 Methods use NeuroDB::objectBroker::ConfigOB; -my $versionInfo = sprintf "%d revision %2d", q$Revision: 1.24 $ +my $versionInfo = sprintf "%d revision %2d", q$Revision: 1.24 $ =~ /: (\d+)\.(\d+)/; my ($sec,$min,$hour,$mday,$mon,$year,$wday,$yday,$isdst) =localtime(time); my $date = sprintf( "%4d-%02d-%02d %02d:%02d:%02d", $year+1900,$mon+1,$mday,$hour,$min,$sec ); -my $debug = 0 ; +my $debug = 0 ; my $where = ''; my $sth = undef; my $query = ''; @@ -111,7 +111,7 @@ =head2 Methods my @opt_table = ( ["Basic options","section"], ["-profile","string",1, \$profile, - "name of config file in ../dicom-archive/.loris_mri"], + "name of config file in ../config"], ["-uploadID", "string", 1, \$upload_id, "UploadID associated to ". "the DICOM archive to validate."], ["-reckless", "boolean", 1, \$reckless, @@ -127,11 +127,11 @@ =head2 Methods my $Help = < 1, CLEANUP => 1 ); my @temp = split(/\//, $TmpDir); my $templog = $temp[$#temp]; -my $LogDir = "$data_dir/logs"; -if (!-d $LogDir) { - mkdir($LogDir, 0770); +my $LogDir = "$data_dir/logs"; +if (!-d $LogDir) { + mkdir($LogDir, 0770); } my $logfile = "$LogDir/$templog.log"; open LOG, ">>", $logfile or die "Error Opening $logfile"; @@ -279,14 +279,14 @@ =head2 Methods $utility->validateArchive($tarchive, \%tarchiveInfo, $upload_id); ################################################################ -### Verify PSC information using whatever field ################ +### Verify PSC information using whatever field ################ ### contains site string ####################################### ################################################################ my ($center_name, $centerID) = $utility->determinePSC(\%tarchiveInfo, 1, $upload_id); ################################################################ ################################################################ -### Determine the ScannerID (optionally create a ############### +### Determine the ScannerID (optionally create a ############### ### new one if necessary) ###################################### ################################################################ ################################################################ @@ -352,4 +352,3 @@ =head1 AUTHORS LORIS community and McGill Centre for Integrative Neuroscience =cut -