diff --git a/ci/Jenkinsfile4AWS b/ci/Jenkinsfile4AWS index 5efc18bf89f..6ac458ad5f8 100644 --- a/ci/Jenkinsfile4AWS +++ b/ci/Jenkinsfile4AWS @@ -4,10 +4,12 @@ def CUSTOM_WORKSPACE = 'none' def HOMEgfs = 'none' def CI_CASES = '' def GH = 'none' +//Trivial change // Location of the custom workspaces for each machine in the CI system. They are persistent for each iteration of the PR. -def NodeName = [hera: 'Hera-EMC', orion: 'Orion-EMC', hercules: 'Hercules-EMC', gaea: 'Gaea', noaacloud: 'awsepicglobalworkflow'] -def custom_workspace = [hera: '/scratch1/NCEPDEV/global/CI', orion: '/work2/noaa/stmp/CI/ORION', hercules: '/work2/noaa/global/CI/HERCULES', gaea: '/gpfs/f5/epic/proj-shared/global/CI', noaacloud: '/lustre/jenkins'] -def repo_url = 'git@github.com:NOAA-EMC/global-workflow.git' +def NodeName = [hera: 'Hera-EMC', orion: 'Orion-EMC', hercules: 'Hercules-EMC', gaea: 'Gaea', awsepicglobalworkflow: 'Awsepicglobalworkflow'] +def custom_workspace = [hera: '/scratch1/NCEPDEV/global/CI', orion: '/work2/noaa/stmp/CI/ORION', hercules: '/work2/noaa/global/CI/HERCULES', gaea: '/gpfs/f5/epic/proj-shared/global/CI', awsepicglobalworkflow: '/lustre/jenkins'] +def repo_url = 'https://github.com/NOAA-EMC/global-workflow.git' +def aws_gw_name = 'global-workflow' def STATUS = 'Passed' pipeline { @@ -44,7 +46,9 @@ pipeline { for (label in pullRequest.labels) { if (label.matches("CI-(.*?)-Ready")) { def machine_name = label.split('-')[1].toString().toLowerCase() + print machine_name jenkins.model.Jenkins.get().computers.each { c -> + print c.node.selfLabel.name if (c.node.selfLabel.name == NodeName[machine_name]) { run_nodes.add(c.node.selfLabel.name) } @@ -56,7 +60,7 @@ pipeline { run_nodes.init().each { node -> def machine_name = node.split('-')[0].toLowerCase() echo "Spawning job on node: ${node} with machine name: ${machine_name}" - build job: "/global-workflow/EMC-Global-Pipeline/PR-${env.CHANGE_ID}", parameters: [ + build job: "/${aws_gw_name}/EPIC-AWS-CI-Pipeline/PR-${env.CHANGE_ID}", parameters: [ string(name: 'machine', value: machine_name), string(name: 'Node', value: node) ], wait: false @@ -79,11 +83,11 @@ pipeline { Machine = machine[0].toUpperCase() + machine.substring(1) echo "Getting Common Workspace for ${Machine}" ws("${custom_workspace[machine]}/${env.CHANGE_ID}") { - properties([parameters([[$class: 'NodeParameterDefinition', allowedSlaves: ['built-in', 'Hercules-EMC', 'Hera-EMC', 'Orion-EMC', 'Gaea', 'awsepicglobalworkflow'], defaultSlaves: ['built-in'], name: '', nodeEligibility: [$class: 'AllNodeEligibility'], triggerIfResult: 'allCases']])]) + properties([parameters([[$class: 'NodeParameterDefinition', allowedSlaves: ['built-in', 'Hercules-EMC', 'Hera-EMC', 'Orion-EMC', 'Gaea', 'Awsepicglobalworkflow'], defaultSlaves: ['built-in'], name: '', nodeEligibility: [$class: 'AllNodeEligibility'], triggerIfResult: 'allCases']])]) GH = sh(script: "which gh || echo '~/bin/gh'", returnStdout: true).trim() - CUSTOM_WORKSPACE = "${WORKSPACE}" - HOMEgfs = "${CUSTOM_WORKSPACE}/global-workflow" - sh(script: "rm -Rf ${CUSTOM_WORKSPACE}/global-workflow; mkdir -p ${CUSTOM_WORKSPACE}/global-workflow") + CUSTOM_WORKSPACE = "${custom_workspace[machine]}/${env.CHANGE_ID}/${aws_gw_name}" + HOMEgfs = "${CUSTOM_WORKSPACE}/${aws_gw_name}" + sh(script: "rm -Rf ${CUSTOM_WORKSPACE}; mkdir -p ${CUSTOM_WORKSPACE}/${aws_gw_name}") sh(script: "rm -Rf ${CUSTOM_WORKSPACE}/RUNTESTS; mkdir -p ${CUSTOM_WORKSPACE}/RUNTESTS") sh(script: """${GH} pr edit ${env.CHANGE_ID} --repo ${repo_url} --add-label "CI-${Machine}-Building" --remove-label "CI-${Machine}-Ready" """) } @@ -95,17 +99,23 @@ pipeline { stage('3. Build System') { agent { label NodeName[machine].toLowerCase() } steps { + dir("${HOMEgfs}") { + checkout scm + } catchError(buildResult: 'UNSTABLE', stageResult: 'FAILURE') { script { ws(HOMEgfs) { echo "Checking out the code on ${Machine} using scm in ${HOMEgfs}" try { - checkout scm + //sh(script: "git clone --recursive ${repo_url}") + sh(script: "ls ${HOMEgfs}") } catch (Exception e) { echo "Failed to checkout the code on ${Machine} using scm in ${HOMEgfs}, try again ..." sleep time: 45, unit: 'SECONDS' try { - checkout scm + // checkout scm + //sh(script: "git clone --recursive ${repo_url}") + sh(script: "ls ${HOMEgfs}/sorc") } catch (Exception ee) { echo "Failed to checkout the code on ${Machine} using scm in ${HOMEgfs}: ${ee.getMessage()}" if (env.CHANGE_ID) { @@ -120,7 +130,9 @@ pipeline { def error_logs_message = "" dir("${HOMEgfs}/sorc") { try { - sh(script: './build_compute.sh all') // build the global-workflow executables + sh(script: 'ls ./build_compute.sh') // list files here to make sure all files exist. + sh(script: './build_compute.sh gfs') // build the global-workflow executables + sh(script: './build_compute.sh gefs') // build the global-workflow executables } catch (Exception error_build) { echo "Failed to build global-workflow: ${error_build.getMessage()}" if ( fileExists("logs/error.logs") ) { diff --git a/ci/cases/pr/C48_S2SW.yaml b/ci/cases/pr/C48_S2SW.yaml index 63675645148..256299812dc 100644 --- a/ci/cases/pr/C48_S2SW.yaml +++ b/ci/cases/pr/C48_S2SW.yaml @@ -12,3 +12,4 @@ arguments: idate: 2021032312 edate: 2021032312 yaml: {{ HOMEgfs }}/ci/cases/yamls/gfs_defaults_ci.yaml + diff --git a/ci/cases/pr/C48_S2SW_extended.yaml b/ci/cases/pr/C48_S2SW_extended.yaml index 11e0af9cb94..06d3ffff4c0 100644 --- a/ci/cases/pr/C48_S2SW_extended.yaml +++ b/ci/cases/pr/C48_S2SW_extended.yaml @@ -19,4 +19,5 @@ skip_ci_on_hosts: - gaeac6 - orion - hercules + - awsepicglobalworkflow - wcoss2 # TODO run on WCOSS2 once the gfs_waveawipsbulls job is fixed diff --git a/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml b/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml index fcfb3caadc6..f3627b4c1a1 100644 --- a/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml +++ b/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml @@ -21,3 +21,4 @@ skip_ci_on_hosts: - gaeac6 - gaeac5 - orion + - awsepicglobalworkflow diff --git a/ci/cases/pr/C48mx500_hybAOWCDA.yaml b/ci/cases/pr/C48mx500_hybAOWCDA.yaml index 36ea62b2dfd..1d9a5c526b8 100644 --- a/ci/cases/pr/C48mx500_hybAOWCDA.yaml +++ b/ci/cases/pr/C48mx500_hybAOWCDA.yaml @@ -22,3 +22,4 @@ skip_ci_on_hosts: - gaeac5 - gaeac6 - orion + - awsepicglobalworkflow diff --git a/ci/cases/pr/C96C48_hybatmDA.yaml b/ci/cases/pr/C96C48_hybatmDA.yaml index c0833acf141..abb6c1b0c55 100644 --- a/ci/cases/pr/C96C48_hybatmDA.yaml +++ b/ci/cases/pr/C96C48_hybatmDA.yaml @@ -17,3 +17,6 @@ arguments: interval: 24 start: cold yaml: {{ HOMEgfs }}/ci/cases/yamls/gfs_defaults_ci.yaml + +skip_ci_on_hosts: + - awsepicglobalworkflow diff --git a/ci/cases/pr/C96C48_hybatmaerosnowDA.yaml b/ci/cases/pr/C96C48_hybatmaerosnowDA.yaml index e231f30b047..78106beb3e3 100644 --- a/ci/cases/pr/C96C48_hybatmaerosnowDA.yaml +++ b/ci/cases/pr/C96C48_hybatmaerosnowDA.yaml @@ -22,3 +22,4 @@ skip_ci_on_hosts: - gaeac5 - gaeac6 - hercules + - awsepicglobalworkflow diff --git a/ci/cases/pr/C96C48_ufs_hybatmDA.yaml b/ci/cases/pr/C96C48_ufs_hybatmDA.yaml index 18fe4168c4e..74f31a27940 100644 --- a/ci/cases/pr/C96C48_ufs_hybatmDA.yaml +++ b/ci/cases/pr/C96C48_ufs_hybatmDA.yaml @@ -22,3 +22,4 @@ skip_ci_on_hosts: - gaeac6 - orion - hercules + - awsepicglobalworkflow diff --git a/ci/cases/pr/C96_atm3DVar.yaml b/ci/cases/pr/C96_atm3DVar.yaml index 0b242fbfd38..74490f69aaa 100644 --- a/ci/cases/pr/C96_atm3DVar.yaml +++ b/ci/cases/pr/C96_atm3DVar.yaml @@ -18,3 +18,4 @@ arguments: skip_ci_on_hosts: - wcoss2 + - awsepicglobalworkflow diff --git a/ci/cases/pr/C96_atm3DVar_extended.yaml b/ci/cases/pr/C96_atm3DVar_extended.yaml index a76c87b5fad..27a839f076f 100644 --- a/ci/cases/pr/C96_atm3DVar_extended.yaml +++ b/ci/cases/pr/C96_atm3DVar_extended.yaml @@ -22,3 +22,4 @@ skip_ci_on_hosts: - gaeac6 - orion - hercules + - awsepicglobalworkflow diff --git a/ci/cases/pr/C96mx100_S2S.yaml b/ci/cases/pr/C96mx100_S2S.yaml index 0f61c48fe62..4ce7c5e206d 100644 --- a/ci/cases/pr/C96mx100_S2S.yaml +++ b/ci/cases/pr/C96mx100_S2S.yaml @@ -20,3 +20,4 @@ arguments: skip_ci_on_hosts: - gaeac6 - gaeac5 + - awsepicglobalworkflow diff --git a/ci/scripts/utils/ci_utils.sh b/ci/scripts/utils/ci_utils.sh index 5922243893b..5122ec68fc0 100755 --- a/ci/scripts/utils/ci_utils.sh +++ b/ci/scripts/utils/ci_utils.sh @@ -119,11 +119,20 @@ function create_experiment () { case=$(basename "${yaml_config}" .yaml) || true export pslot=${case}_${pr_sha} - source "${HOMEgfs}/ci/platforms/config.${MACHINE_ID}" + if [[ ${MACHINE_ID} == "noaacloud" ]]; then + source "${HOMEgfs}/ci/platforms/config.${PW_CSP}" + else + source "${HOMEgfs}/ci/platforms/config.${MACHINE_ID}" + fi + source "${HOMEgfs}/workflow/gw_setup.sh" # Remove RUNDIRS dir incase this is a retry (STMP now in host file) - STMP=$("${HOMEgfs}/ci/scripts/utils/parse_yaml.py" -y "${HOMEgfs}/workflow/hosts/${MACHINE_ID}.yaml" -k STMP -s) + if [[ ${MACHINE_ID} == "noaacloud" ]]; then + STMP=$("${HOMEgfs}/ci/scripts/utils/parse_yaml.py" -y "${HOMEgfs}/workflow/hosts/${PW_CSP}pw.yaml" -k STMP -s) + else + STMP=$("${HOMEgfs}/ci/scripts/utils/parse_yaml.py" -y "${HOMEgfs}/workflow/hosts/${MACHINE_ID}.yaml" -k STMP -s) + fi echo "Removing ${STMP}/RUNDIRS/${pslot} directory incase this is a retry" rm -Rf "${STMP}/RUNDIRS/${pslot}" diff --git a/ci/scripts/utils/launch_java_agent.sh b/ci/scripts/utils/launch_java_agent.sh index 9d9bd130d1f..725399158ad 100755 --- a/ci/scripts/utils/launch_java_agent.sh +++ b/ci/scripts/utils/launch_java_agent.sh @@ -76,6 +76,8 @@ source "${HOMEGFS_}/ush/detect_machine.sh" case ${MACHINE_ID} in hera | orion | hercules | wcoss2 | gaeac5 | gaeac6 ) echo "Launch Jenkins Java Controler on ${MACHINE_ID}";; + noaacloud ) + echo "Launch Jenkins Java Controler on ${PW_CSP}";; *) echo "Unsupported platform. Exiting with error." exit 1;; @@ -87,7 +89,12 @@ rm -f "${LOG}" HOMEgfs="${HOMEGFS_}" source "${HOMEGFS_}/ush/module-setup.sh" module use "${HOMEGFS_}/modulefiles" module load "module_gwsetup.${MACHINE_ID}" -source "${HOMEGFS_}/ci/platforms/config.${MACHINE_ID}" + +if [[ ${MACHINE_ID} == "noaacloud" ]]; then + source "${HOMEgfs_}/ci/platforms/config.${PW_CSP}" +else + source "${HOMEgfs_}/ci/platforms/config.${MACHINE_ID}" +fi JAVA_HOME="${JENKINS_AGENT_LAUNCH_DIR}/JAVA/jdk-17.0.10" if [[ ! -d "${JAVA_HOME}" ]]; then diff --git a/parm/config/gfs/config.resources.AWSPW b/parm/config/gfs/config.resources.AWSPW index d044c475fb3..0255a9adad3 100644 --- a/parm/config/gfs/config.resources.AWSPW +++ b/parm/config/gfs/config.resources.AWSPW @@ -4,13 +4,7 @@ export is_exclusive="True" unset memory - -# shellcheck disable=SC2312 -for mem_var in $(env | grep '^memory_' | cut -d= -f1); do - unset "${mem_var}" -done - -step=$1 +unset "memory_${RUN}" case ${step} in "fcst" | "efcs") @@ -24,6 +18,12 @@ case ${step} in ;; + "atmos_products" | "oceanice_products" | "wavepostsbs" ) + export PARTITION_BATCH="process" + max_tasks_per_node=24 + ;; + + *) export PARTITION_BATCH="process" max_tasks_per_node=24 diff --git a/ush/fetch-fix-data.py b/ush/fetch-fix-data.py new file mode 100644 index 00000000000..aa71f7e4f96 --- /dev/null +++ b/ush/fetch-fix-data.py @@ -0,0 +1,334 @@ +#!/usr/bin/env python +# cfetch-fix-data.py +# wei.huang@noaa.gov +# 2025-02-26 +# script to download a subset of FIX data to local machines. +import os +import time +import sys +import getopt +import argparse +import subprocess +from pathlib import Path +import logging + +# Create and configure logger +logging.basicConfig(filename="cfetch-fix-data.log", + format='%(asctime)s %(message)s', + filemode='w') + +# Creating an object +logger = logging.getLogger() + +# Setting the threshold of logger to DEBUG +logger.setLevel(logging.DEBUG) + +# ------------------------------------------------------------------------------ + + +class FetchFIXdata(): + + def __init__(self, atmgridarray=['C48'], ocngridarray=['500'], + fix_bucket=None, localdir=None, verbose=0): + + # self.aws_fix_bucket = f's3://noaa-nws-global-pds/fix' + self.aws_fix_bucket = fix_bucket + self.aws_cp = f'aws --no-sign-request s3 cp' + self.aws_sync = f'aws --no-sign-request s3 sync' + + self.atmgridarray = atmgridarray + self.ocngridarray = ocngridarray + self.localdir = localdir + self.verbose = verbose + + if (os.path.isdir(localdir)): + logger.info(f'Prepare to download FIX data for {atmgrid} and {ocngrid} to {localdir}') + else: + logger.info(f'local dir: <{localdir}> does not exist. Stop') + sys.exit(-1) + + self.verdict = {} + self.s3dict = {} + self.s3dict['raworog'] = f'raw/orog' + + if (self.localdir.find('fix') < 0): + self.targetdir = f'{self.localdir}/fix.subset' + else: + self.targetdir = self.localdir + + # -------------------------------------------------------------------------- + def update_s3dict(self): + + self.update_s3dick_grid_independent() + self.add_grid_data() + + if (self.verbose): + self.printinfo() + + # -------------------------------------------------------------------------- + def update_s3dick_grid_independent(self): + + for key in self.fix_ver_dict.keys(): + val = self.fix_ver_dict[key] + if (key == 'aer_ver'): + self.s3dict['aer'] = f'aer/{val}' + elif (key == 'am_ver'): + self.s3dict['am'] = f'am/{val}' + elif (key == 'chem_ver'): + self.s3dict['fimdata_chem'] = f'chem/{val}/fimdata_chem' + self.s3dict['Emission_data'] = f'chem/{val}/Emission_data' + elif (key == 'datm_ver'): + self.s3dict['cfsr'] = f'datm/{val}/cfsr' + self.s3dict['gefs'] = f'datm/{val}/gefs' + self.s3dict['gfs'] = f'datm/{val}/gfs' + self.s3dict['mom6'] = f'datm/{val}/mom6' + elif (key == 'glwu_ver'): + self.s3dict['glwu'] = f'glwu/{val}' + elif (key == 'gsi_ver'): + self.s3dict['gsi'] = f'gsi/{val}' + elif (key == 'lut_ver'): + self.s3dict['lut'] = f'lut/{val}' + elif (key == 'mom6_ver'): + self.s3dict['mom6post'] = f'mom6/{val}/post' + elif (key == 'reg2grb2_ver'): + self.s3dict['reg2grb2'] = f'reg2grb2/{val}' + elif (key == 'sfc_climb_ver'): + self.s3dict['sfc_climo'] = f'sfc_climo/{val}' + elif (key == 'verif_ver'): + self.s3dict['verif'] = f'verif/{val}' + elif (key == 'wave_ver'): + self.s3dict['wave'] = f'wave/{val}' + + # -------------------------------------------------------------------------- + def add_grid_data(self): + + for key in self.fix_ver_dict.keys(): + val = self.fix_ver_dict[key] + if (key == 'orog_ver'): + self.add_atmgrid2s3dict('orog', key, val) + elif (key == 'ugwd_ver'): + self.add_atmgrid2s3dict('ugwd', key, val) + elif (key == 'mom6_ver'): + self.add_ocngrid2s3dict('mom6', key, val) + elif (key == 'cice_ver'): + self.add_ocngrid2s3dict('cice', key, val) + elif (key == 'cpl_ver'): + self.add_cpl2s3dict('cpl', key, val) + + # -------------------------------------------------------------------------- + def add_atmgrid2s3dict(self, varname, key, val): + + for atmgrid in self.atmgridarray: + newkey = f'{key}_{atmgrid}' + self.s3dict[newkey] = f'{varname}/{val}/{atmgrid}' + + # ------------------------------------------------------------------------- + def add_ocngrid2s3dict(self, varname, key, val): + + for ocngrid in self.ocngridarray: + newkey = f'{key}_{atmgrid}' + self.s3dict[newkey] = f'{varname}/{val}/{ocngrid}' + + # ------------------------------------------------------------------------- + def add_cpl2s3dict(self, varname, key, val): + + for atmgrid in self.atmgridarray: + for ocngrid in self.ocngridarray: + newkey = f'{key}_a{atmgrid}o{ocngrid}' + self.s3dict[newkey] = f'{varname}/{val}/a{atmgrid}o{ocngrid}' + + # ------------------------------------------------------------------------- + def printinfo(self): + + logger.info(f'Preparing to fetch') + logger.info(f'ATM grid: {self.atmgridarray}') + logger.info(f'ONC grid: {self.ocngridarray}') + logger.info(f'From: {self.aws_fix_bucket}') + logger.info(f'To: {self.targetdir}') + for key in self.s3dict.keys(): + val = self.s3dict[key] + logger.info(f'{key}: {val}') + + # ------------------------------------------------------------------------- + def fetchdata(self): + + if (self.verbose): + logger.info(f'Create local fix dir: {self.targetdir}') + + path = Path(self.targetdir) + path.mkdir(parents=True, exist_ok=True) + + self.fetch_ugwp_limb_tau() + + for key in self.s3dict.keys(): + self.fetch_dir(self.s3dict[key]) + + # ------------------------------------------------------------------------- + def fetch_dir(self, dir): + + remotedir = f'{self.aws_fix_bucket}/{dir}' + localdir = f'{self.targetdir}/{dir}' + cmd = f'{self.aws_sync} {remotedir} {localdir}' + self.download_dir(cmd, localdir) + + # -------------------------------------------------------------------------- + def download_dir(self, cmd, localdir): + + # returned_value = os.system(cmd) # returns the exit code in unix + # logger.info('returned value:', returned_value) + + if (os.path.isdir(localdir)): + logger.info(f'{localdir} already exist. skip') + else: + parentdir, dirname = os.path.split(localdir) + if (self.verbose): + logger.info(f'Create local {parentdir} dir:') + path = Path(parentdir) + path.mkdir(parents=True, exist_ok=True) + if (self.verbose): + logger.info(cmd) + logger.info(f'Downloading {localdir}') + returned_value = subprocess.call(cmd, shell=True) # returns the exit code in unix + if (self.verbose): + logger.info('returned value:', returned_value) + + # -------------------------------------------------------------------------- + def fetch_ugwp_limb_tau(self): + + ugwd_ver = self.fix_ver_dict['ugwd_ver'] + ugwp_limb_tau_remotepath = f'{self.aws_fix_bucket}/ugwd/{ugwd_ver}/ugwp_limb_tau.nc' + ugwp_limb_tau_localdir = f'{self.targetdir}/ugwd/{ugwd_ver}' + filename = f'{ugwp_limb_tau_localdir}/ugwp_limb_tau.nc' + path = Path(ugwp_limb_tau_localdir) + path.mkdir(parents=True, exist_ok=True) + cmd = f'{self.aws_cp} {ugwp_limb_tau_remotepath} {filename}' + self.download_file(cmd, filename) + + # ------------------------------------------------------------------------- + def download_file(self, cmd, filename): + + # returned_value = os.system(cmd) # returns the exit code in unix + # logger.info('returned value:', returned_value) + + if (os.path.isfile(filename)): + logger.info(f'{filename} already exist. skip') + else: + if (self.verbose): + logger.info(cmd) + logger.info(f'Downloading {filename}') + returned_value = subprocess.call(cmd, shell=True) # returns the exit code in unix + if (self.verbose): + logger.info('returned value:', returned_value) + + # -------------------------------------------------------------------------- + def set_fix_ver_from_gwhome(self, gwhome, verdict): + + fix_ver_file = f'{gwhome}/versions/fix.ver' + self.fix_ver_dict = verdict + if (os.path.isfile(fix_ver_file)): + with open(fix_ver_file, "r") as file: + for line in file.readlines(): + if (line.find('export ') >= 0): + headstr, _, value = line.strip().partition('=') + exphead, _, key = headstr.partition(' ') + self.fix_ver_dict[key] = value + else: + logger.info(f'fix_ver_file: {fix_ver_file}s does not exist.') + + # ------------------------------------------------------------------------ + def set_default_fix_ver(self, verdict): + + self.fix_ver_dict = verdict + +# ----------------------------------------------------------------------------- + + +def namespace_to_dict(namespace): + return { + k: namespace_to_dict(v) if isinstance(v, argparse.Namespace) else v + for k, v in vars(namespace).items() + } + +# ------------------------------------------------------------------------------ + + +if __name__ == '__main__': + + atmgridlist = ['C48', 'C96', 'C192', 'C384', 'C768', 'C1152'] + ocngridlist = ['500', '100', '050', '025'] + + parser = argparse.ArgumentParser() + parser.add_argument("-v", "--verbose", action="store_true", + help="increase output verbosity") + parser.add_argument("-a", "--atmgrid", type=str, required=True, + help="ATM grid, like: C48, C96, C192, C384, C768, C1152") + parser.add_argument("-o", "--ocngrid", type=str, required=True, + help="OCN grid, like: 500, 100, 050, 025") + parser.add_argument("--localdir", type=str, required=True, + help="local directory to store FIX data subset") + parser.add_argument("--gwhome", type=str, default='unknown', + help="GW home diretory where can find fix.ver") + parser.add_argument("--fix_bucket", type=str, default='s3://noaa-nws-global-pds/fix', + help="S3 Bucket directory of FIX data") + parser.add_argument("--aer_ver", type=str, default='20220805', help="AER version") + parser.add_argument("--am_ver", type=str, default='20220805', help="AM version") + parser.add_argument("--chem_ver", type=str, default='20220805', help="chem version") + parser.add_argument("--cice_ver", type=str, default='20240416', help="cice version") + parser.add_argument("--cpl_ver", type=str, default='20230526', help="cpl version") + parser.add_argument("--datm_ver", type=str, default='20220805', help="datm version") + parser.add_argument("--glwu_ver", type=str, default='20220805', help="glwu version") + parser.add_argument("--gsi_ver", type=str, default='20240208', help="gsi version") + parser.add_argument("--lut_ver", type=str, default='20220805', help="lut version") + parser.add_argument("--mom6_ver", type=str, default='20240416', help="mom6 version") + parser.add_argument("--orog_ver", type=str, default='20231027', help="orog version") + parser.add_argument("--reg2grb2_ver", type=str, default='20220805', help="reg2grb2 version") + parser.add_argument("--sfc_climo_ver", type=str, default='20220805', help="sfc_climo version") + parser.add_argument("--ugwd_ver", type=str, default='20220805', help="ugwd version") + parser.add_argument("--verif_ver", type=str, default='20220805', help="verif version") + parser.add_argument("--wave_ver", type=str, default='20220805', help="wave version") + args = parser.parse_args() + + if args.verbose: + logger.info(f"the atmgrid is {args.atmgrid}") + else: + logger.info(f"the atmgrid is {args.atmgrid}") + + atmgrid = args.atmgrid + if (atmgrid.find(',') > 0): + atmgridarray = atmgrid.split(',') + else: + atmgridarray = [atmgrid] + + for grid in atmgridarray: + if (grid not in atmgridlist): + logger.info(f'atmgrid: {grid}') + logger.info(f'is not in supported grids: {atmgridlist}') + sys.exit(-1) + + ocngrid = args.ocngrid + if (ocngrid.find(',') > 0): + ocngridarray = ocngrid.split(',') + else: + ocngridarray = [ocngrid] + + for grid in ocngridarray: + if (grid not in ocngridlist): + logger.info(f'ocngrid: {grid}') + logger.info(f'is not in supported grids: {ocngridlist}') + sys.exit(-1) + + verdict = namespace_to_dict(args) + + # ------------------------------------------------------------------ + ffd = FetchFIXdata(atmgridarray=atmgridarray, + ocngridarray=ocngridarray, + fix_bucket=args.fix_bucket, + localdir=args.localdir, verbose=args.verbose) + + if (args.gwhome is None): + ffd.set_default_fix_ver(verdict) + else: + ffd.set_fix_ver_from_gwhome(args.gwhome, verdict) + + ffd.update_s3dict() + ffd.fetchdata()