From aadb0a87a8e326602de1e2f25c5fb63f969f2cbd Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Wed, 4 Feb 2026 15:35:51 -0500 Subject: [PATCH 01/71] update gfs ->global --- dev/ci/Jenkinsfile | 20 +-- dev/ci/Jenkinsfile4AWS | 64 +++---- dev/ci/scripts/driver_weekly.sh | 4 +- dev/ci/scripts/run_check_ci.sh | 10 +- dev/ci/scripts/run_check_gitlab_ci.sh | 18 +- .../unittests/test_create_experiment.py | 6 +- dev/ci/scripts/unittests/test_setup.py | 10 +- dev/ci/scripts/utils/get_host_case_list.py | 10 +- dev/ci/scripts/utils/launch_java_agent.sh | 2 +- dev/ci/scripts/utils/parse_yaml.py | 4 +- dev/ctests/scripts/execute.sh.in | 8 +- dev/job_cards/rocoto/aeroanlfinal.sh | 4 +- dev/job_cards/rocoto/aeroanlgenb.sh | 4 +- dev/job_cards/rocoto/aeroanlinit.sh | 4 +- dev/job_cards/rocoto/aeroanlvar.sh | 4 +- dev/job_cards/rocoto/aerosol_init.sh | 4 +- dev/job_cards/rocoto/anal.sh | 4 +- dev/job_cards/rocoto/analcalc.sh | 4 +- dev/job_cards/rocoto/analcalc_fv3jedi.sh | 4 +- dev/job_cards/rocoto/analdiag.sh | 4 +- dev/job_cards/rocoto/anlstat.sh | 4 +- dev/job_cards/rocoto/arch_tars.sh | 4 +- dev/job_cards/rocoto/arch_vrfy.sh | 4 +- dev/job_cards/rocoto/atmanlfinal.sh | 4 +- dev/job_cards/rocoto/atmanlfv3inc.sh | 4 +- dev/job_cards/rocoto/atmanlinit.sh | 4 +- dev/job_cards/rocoto/atmanlvar.sh | 4 +- dev/job_cards/rocoto/atmensanlfinal.sh | 4 +- dev/job_cards/rocoto/atmensanlfv3inc.sh | 4 +- dev/job_cards/rocoto/atmensanlinit.sh | 4 +- dev/job_cards/rocoto/atmensanlletkf.sh | 4 +- dev/job_cards/rocoto/atmensanlobs.sh | 4 +- dev/job_cards/rocoto/atmensanlsol.sh | 4 +- dev/job_cards/rocoto/atmos_ensstat.sh | 4 +- dev/job_cards/rocoto/atmos_products.sh | 4 +- dev/job_cards/rocoto/awips.sh | 4 +- dev/job_cards/rocoto/awips_20km_1p0deg.sh | 8 +- dev/job_cards/rocoto/cleanup.sh | 4 +- dev/job_cards/rocoto/earc_tars.sh | 4 +- dev/job_cards/rocoto/earc_vrfy.sh | 4 +- dev/job_cards/rocoto/ecen.sh | 4 +- dev/job_cards/rocoto/ecen_fv3jedi.sh | 4 +- dev/job_cards/rocoto/echgres.sh | 4 +- dev/job_cards/rocoto/ediag.sh | 4 +- dev/job_cards/rocoto/eobs.sh | 4 +- dev/job_cards/rocoto/epos.sh | 4 +- dev/job_cards/rocoto/esfc.sh | 4 +- dev/job_cards/rocoto/esnowanl.sh | 4 +- dev/job_cards/rocoto/eupd.sh | 4 +- dev/job_cards/rocoto/extractvars.sh | 4 +- dev/job_cards/rocoto/fbwind.sh | 4 +- dev/job_cards/rocoto/fcst.sh | 8 +- dev/job_cards/rocoto/fetch.sh | 4 +- dev/job_cards/rocoto/fit2obs.sh | 4 +- dev/job_cards/rocoto/gempak.sh | 4 +- dev/job_cards/rocoto/gempakgrb2spec.sh | 4 +- dev/job_cards/rocoto/gempakmeta.sh | 4 +- dev/job_cards/rocoto/gempakmetancdc.sh | 4 +- dev/job_cards/rocoto/gempakncdcupapgif.sh | 4 +- dev/job_cards/rocoto/gen_control_ic.sh | 4 +- dev/job_cards/rocoto/genesis.sh | 4 +- dev/job_cards/rocoto/genesis_fsu.sh | 4 +- dev/job_cards/rocoto/globus_arch.sh | 4 +- dev/job_cards/rocoto/globus_earc.sh | 4 +- dev/job_cards/rocoto/marineanlchkpt.sh | 4 +- dev/job_cards/rocoto/marineanlecen.sh | 6 +- dev/job_cards/rocoto/marineanlfinal.sh | 4 +- dev/job_cards/rocoto/marineanlinit.sh | 4 +- dev/job_cards/rocoto/marineanlletkf.sh | 4 +- dev/job_cards/rocoto/marineanlvar.sh | 4 +- dev/job_cards/rocoto/marinebmat.sh | 4 +- dev/job_cards/rocoto/marinebmatinit.sh | 4 +- dev/job_cards/rocoto/metp.sh | 4 +- dev/job_cards/rocoto/npoess.sh | 4 +- dev/job_cards/rocoto/oceanice_products.sh | 4 +- dev/job_cards/rocoto/offlineanl.sh | 4 +- dev/job_cards/rocoto/postsnd.sh | 4 +- dev/job_cards/rocoto/prep.sh | 14 +- dev/job_cards/rocoto/prep_emissions.sh | 4 +- dev/job_cards/rocoto/prep_sfc.sh | 4 +- dev/job_cards/rocoto/prepoceanobs.sh | 4 +- dev/job_cards/rocoto/sfcanl.sh | 4 +- dev/job_cards/rocoto/snowanl.sh | 4 +- dev/job_cards/rocoto/stage_ic.sh | 4 +- dev/job_cards/rocoto/tracker.sh | 4 +- dev/job_cards/rocoto/upp.sh | 8 +- dev/job_cards/rocoto/verfozn.sh | 4 +- dev/job_cards/rocoto/verfrad.sh | 4 +- dev/job_cards/rocoto/vminmon.sh | 4 +- dev/job_cards/rocoto/wave_stat.sh | 4 +- dev/job_cards/rocoto/wave_stat_pnt.sh | 4 +- dev/job_cards/rocoto/waveawipsbulls.sh | 4 +- dev/job_cards/rocoto/waveawipsgridded.sh | 4 +- dev/job_cards/rocoto/wavegempak.sh | 4 +- dev/job_cards/rocoto/waveinit.sh | 6 +- dev/job_cards/rocoto/wavepostbndpnt.sh | 6 +- dev/job_cards/rocoto/wavepostbndpntbll.sh | 6 +- dev/job_cards/rocoto/wavepostpnt.sh | 6 +- dev/job_cards/rocoto/wavepostsbs.sh | 6 +- dev/job_cards/rocoto/waveprep.sh | 6 +- dev/jobs/JGDAS_AERO_ANALYSIS_GENERATE_BMATRIX | 4 +- dev/jobs/JGDAS_ATMOS_CHGRES_FORENKF | 4 +- dev/jobs/JGDAS_ATMOS_GEMPAK | 6 +- dev/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC | 14 +- dev/jobs/JGDAS_ATMOS_VERFOZN | 4 +- dev/jobs/JGDAS_ATMOS_VERFRAD | 4 +- dev/jobs/JGDAS_ENKF_POST | 4 +- dev/jobs/JGDAS_FIT2OBS | 2 +- dev/jobs/JGEFS_WAVE_STAT | 6 +- dev/jobs/JGEFS_WAVE_STAT_PNT | 4 +- dev/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG | 4 +- dev/jobs/JGFS_ATMOS_CYCLONE_GENESIS | 2 +- dev/jobs/JGFS_ATMOS_CYCLONE_TRACKER | 2 +- dev/jobs/JGFS_ATMOS_FBWIND | 4 +- dev/jobs/JGFS_ATMOS_FSU_GENESIS | 2 +- dev/jobs/JGFS_ATMOS_GEMPAK | 24 +-- dev/jobs/JGFS_ATMOS_GEMPAK_META | 6 +- dev/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF | 4 +- dev/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC | 6 +- dev/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS | 4 +- dev/jobs/JGFS_ATMOS_POSTSND | 4 +- dev/jobs/JGFS_ATMOS_VERIFICATION | 4 +- dev/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE | 4 +- dev/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE | 4 +- dev/jobs/JGLOBAL_AERO_ANALYSIS_VARIATIONAL | 4 +- dev/jobs/JGLOBAL_ANALYSIS_STATS | 4 +- dev/jobs/JGLOBAL_ARCHIVE_TARS | 6 +- dev/jobs/JGLOBAL_ARCHIVE_VRFY | 4 +- dev/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE | 4 +- .../JGLOBAL_ATMENS_ANALYSIS_FV3_INCREMENT | 4 +- dev/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE | 4 +- dev/jobs/JGLOBAL_ATMENS_ANALYSIS_LETKF | 4 +- dev/jobs/JGLOBAL_ATMENS_ANALYSIS_OBS | 4 +- dev/jobs/JGLOBAL_ATMENS_ANALYSIS_SOL | 4 +- dev/jobs/JGLOBAL_ATMOS_ANALYSIS | 6 +- dev/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC | 4 +- dev/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_FV3JEDI | 4 +- dev/jobs/JGLOBAL_ATMOS_ANALYSIS_DIAG | 4 +- dev/jobs/JGLOBAL_ATMOS_CHGRES_GEN_CONTROL | 4 +- dev/jobs/JGLOBAL_ATMOS_ENSSTAT | 4 +- dev/jobs/JGLOBAL_ATMOS_POST_MANAGER | 4 +- dev/jobs/JGLOBAL_ATMOS_PREP_SFC | 4 +- dev/jobs/JGLOBAL_ATMOS_PRODUCTS | 4 +- dev/jobs/JGLOBAL_ATMOS_SFCANL | 4 +- dev/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC | 4 +- dev/jobs/JGLOBAL_ATMOS_UPP | 4 +- dev/jobs/JGLOBAL_ATMOS_VMINMON | 4 +- dev/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE | 4 +- dev/jobs/JGLOBAL_ATM_ANALYSIS_FV3_INCREMENT | 4 +- dev/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE | 4 +- dev/jobs/JGLOBAL_ATM_ANALYSIS_VARIATIONAL | 4 +- dev/jobs/JGLOBAL_CLEANUP | 4 +- dev/jobs/JGLOBAL_ENKF_ARCHIVE_TARS | 4 +- dev/jobs/JGLOBAL_ENKF_ARCHIVE_VRFY | 4 +- dev/jobs/JGLOBAL_ENKF_DIAG | 4 +- dev/jobs/JGLOBAL_ENKF_ECEN | 4 +- dev/jobs/JGLOBAL_ENKF_ECEN_FV3JEDI | 4 +- dev/jobs/JGLOBAL_ENKF_SELECT_OBS | 4 +- dev/jobs/JGLOBAL_ENKF_SFC | 4 +- dev/jobs/JGLOBAL_ENKF_UPDATE | 4 +- dev/jobs/JGLOBAL_ENS_GLOBUS_ARCH | 4 +- dev/jobs/JGLOBAL_EXTRACTVARS | 6 +- dev/jobs/JGLOBAL_FETCH | 4 +- dev/jobs/JGLOBAL_FORECAST | 7 +- dev/jobs/JGLOBAL_GLOBUS_ARCH | 4 +- dev/jobs/JGLOBAL_MARINE_ANALYSIS_CHECKPOINT | 4 +- dev/jobs/JGLOBAL_MARINE_ANALYSIS_ECEN | 4 +- dev/jobs/JGLOBAL_MARINE_ANALYSIS_FINALIZE | 4 +- dev/jobs/JGLOBAL_MARINE_ANALYSIS_INITIALIZE | 4 +- dev/jobs/JGLOBAL_MARINE_ANALYSIS_LETKF | 4 +- dev/jobs/JGLOBAL_MARINE_ANALYSIS_VARIATIONAL | 4 +- dev/jobs/JGLOBAL_MARINE_BMAT | 4 +- dev/jobs/JGLOBAL_MARINE_BMAT_INITIALIZE | 4 +- dev/jobs/JGLOBAL_OCEANICE_PRODUCTS | 4 +- dev/jobs/JGLOBAL_OFFLINE_ATMOS_ANALYSIS | 6 +- dev/jobs/JGLOBAL_PREP_EMISSIONS | 4 +- dev/jobs/JGLOBAL_PREP_OCEAN_OBS | 4 +- dev/jobs/JGLOBAL_SNOWENS_ANALYSIS | 4 +- dev/jobs/JGLOBAL_SNOW_ANALYSIS | 4 +- dev/jobs/JGLOBAL_STAGE_IC | 4 +- dev/jobs/JGLOBAL_WAVE_GEMPAK | 6 +- dev/jobs/JGLOBAL_WAVE_INIT | 4 +- dev/jobs/JGLOBAL_WAVE_POST_BNDPNT | 4 +- dev/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL | 4 +- dev/jobs/JGLOBAL_WAVE_POST_PNT | 4 +- dev/jobs/JGLOBAL_WAVE_POST_SBS | 6 +- dev/jobs/JGLOBAL_WAVE_PRDGEN_BULLS | 4 +- dev/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED | 6 +- dev/jobs/JGLOBAL_WAVE_PREP | 4 +- dev/scripts/exgdas_atmos_chgres_forenkf.sh | 10 +- dev/scripts/exgdas_atmos_gempak_gif_ncdc.sh | 4 +- dev/scripts/exgdas_atmos_nawips.sh | 4 +- dev/scripts/exgdas_atmos_verfozn.sh | 2 +- dev/scripts/exgdas_atmos_verfrad.sh | 16 +- dev/scripts/exgdas_enkf_post.sh | 6 +- dev/scripts/exgfs_aero_init_aerosol.py | 8 +- dev/scripts/exgfs_atmos_awips_20km_1p0deg.sh | 8 +- dev/scripts/exgfs_atmos_fbwind.sh | 8 +- .../exgfs_atmos_gempak_gif_ncdc_skew_t.sh | 12 +- dev/scripts/exgfs_atmos_gempak_meta.sh | 6 +- dev/scripts/exgfs_atmos_goes_nawips.sh | 6 +- .../exgfs_atmos_grib2_special_npoess.sh | 2 +- dev/scripts/exgfs_atmos_nawips.sh | 4 +- dev/scripts/exgfs_atmos_postsnd.sh | 10 +- dev/scripts/exgfs_wave_init.sh | 14 +- dev/scripts/exgfs_wave_nawips.sh | 6 +- dev/scripts/exgfs_wave_post_gridded_sbs.sh | 12 +- dev/scripts/exgfs_wave_post_pnt.sh | 36 ++-- dev/scripts/exgfs_wave_prdgen_bulls.sh | 4 +- dev/scripts/exgfs_wave_prdgen_gridded.sh | 4 +- dev/scripts/exgfs_wave_prep.sh | 8 +- dev/scripts/exglobal_atmos_analysis.sh | 66 +++---- dev/scripts/exglobal_atmos_analysis_calc.sh | 18 +- .../exglobal_atmos_chgres_gen_control.sh | 26 +-- dev/scripts/exglobal_atmos_ensstat.sh | 6 +- dev/scripts/exglobal_atmos_products.sh | 8 +- dev/scripts/exglobal_atmos_sfcanl.sh | 10 +- dev/scripts/exglobal_atmos_tropcy_qc_reloc.sh | 4 +- dev/scripts/exglobal_atmos_upp.py | 2 +- dev/scripts/exglobal_atmos_vminmon.sh | 6 +- dev/scripts/exglobal_diag.sh | 2 +- dev/scripts/exglobal_enkf_ecen.sh | 22 +-- dev/scripts/exglobal_enkf_select_obs.sh | 2 +- dev/scripts/exglobal_enkf_sfc.sh | 16 +- dev/scripts/exglobal_enkf_update.sh | 26 +-- dev/scripts/exglobal_extractvars.sh | 8 +- dev/scripts/exglobal_fetch.py | 2 +- dev/scripts/exglobal_forecast.sh | 30 ++-- dev/scripts/exglobal_globus_arch.py | 4 +- dev/scripts/exglobal_globus_earc.py | 4 +- dev/scripts/exglobal_oceanice_products.py | 2 +- dev/scripts/exglobal_prep_sfc.sh | 28 +-- dev/test/f90nmlcmp.sh | 10 +- dev/test/g2cmp.sh | 6 +- dev/test/nccmp.sh | 6 +- dev/ush/README_NET_CONVERSION.md | 132 ++++++++++++++ dev/ush/convert_from_net.sh | 57 ++++++ dev/ush/convert_to_net.sh | 62 +++++++ dev/ush/gw_setup.sh | 24 +-- dev/ush/load_modules.sh | 50 +++--- dev/workflow/create_experiment.py | 10 +- dev/workflow/generate_workflows.sh | 40 ++--- dev/workflow/rocoto/gcafs_tasks.py | 62 +++---- dev/workflow/rocoto/gefs_tasks.py | 38 ++-- dev/workflow/rocoto/gfs_tasks.py | 166 +++++++++--------- dev/workflow/rocoto/rocoto_xml.py | 2 +- dev/workflow/rocoto/sfs_tasks.py | 32 ++-- dev/workflow/rocoto/tasks.py | 4 +- dev/workflow/setup_buildxml.py | 10 +- dev/workflow/setup_expt.py | 2 +- dev/workflow/tests/test_configuration.py | 2 +- dev/workflow/workflow_suite.py | 2 +- .../analysis/create/jenkfgdas_diag.ecf | 2 +- .../analysis/create/jenkfgdas_select_obs.ecf | 2 +- .../analysis/create/jenkfgdas_update.ecf | 2 +- .../analysis/recenter/ecen/jenkfgdas_ecen.ecf | 2 +- .../analysis/recenter/jenkfgdas_sfc.ecf | 2 +- .../enkfgdas/forecast/jenkfgdas_fcst.ecf | 2 +- .../enkfgdas/post/jenkfgdas_post_master.ecf | 2 +- .../atmos/analysis/jgdas_atmos_analysis.ecf | 2 +- .../analysis/jgdas_atmos_analysis_calc.ecf | 2 +- .../analysis/jgdas_atmos_analysis_diag.ecf | 2 +- .../gdas/atmos/gempak/jgdas_atmos_gempak.ecf | 2 +- .../gempak/jgdas_atmos_gempak_meta_ncdc.ecf | 2 +- .../dump/jgdas_atmos_tropcy_qc_reloc.ecf | 2 +- .../obsproc/prep/jgdas_atmos_prep_sfc.ecf | 2 +- .../atmos/post/jgdas_atmos_post_manager.ecf | 2 +- .../atmos/post/jgdas_atmos_post_master.ecf | 2 +- .../jgdas_atmos_chgres_forenkf.ecf | 2 +- .../gdas/atmos/verf/jgdas_atmos_verfozn.ecf | 2 +- .../gdas/atmos/verf/jgdas_atmos_verfrad.ecf | 2 +- .../gdas/atmos/verf/jgdas_atmos_vminmon.ecf | 2 +- ecf/scripts/gdas/jgdas_forecast.ecf | 2 +- .../gdas/wave/init/jgdas_wave_init.ecf | 2 +- .../gdas/wave/post/jgdas_wave_postpnt.ecf | 2 +- .../gdas/wave/post/jgdas_wave_postsbs.ecf | 2 +- .../gdas/wave/prep/jgdas_wave_prep.ecf | 2 +- .../atmos/analysis/jgfs_atmos_analysis.ecf | 2 +- .../analysis/jgfs_atmos_analysis_calc.ecf | 2 +- .../gfs/atmos/gempak/jgfs_atmos_gempak.ecf | 2 +- .../atmos/gempak/jgfs_atmos_gempak_meta.ecf | 2 +- .../gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf | 2 +- .../gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf | 2 +- .../gempak/jgfs_atmos_pgrb2_spec_gempak.ecf | 2 +- .../dump/jgfs_atmos_tropcy_qc_reloc.ecf | 2 +- .../obsproc/prep/jgfs_atmos_prep_sfc.ecf | 2 +- .../atmos/post/jgfs_atmos_post_manager.ecf | 2 +- .../gfs/atmos/post/jgfs_atmos_post_master.ecf | 2 +- .../jgfs_atmos_awips_master.ecf | 2 +- .../bufr_sounding/jgfs_atmos_postsnd.ecf | 2 +- .../bulletins/jgfs_atmos_fbwind.ecf | 2 +- .../gfs/atmos/verf/jgfs_atmos_vminmon.ecf | 2 +- ecf/scripts/gfs/jgfs_forecast.ecf | 2 +- .../gfs/wave/gempak/jgfs_wave_gempak.ecf | 2 +- ecf/scripts/gfs/wave/init/jgfs_wave_init.ecf | 2 +- .../gfs/wave/post/jgfs_wave_post_bndpnt.ecf | 2 +- .../wave/post/jgfs_wave_post_bndpntbll.ecf | 2 +- .../gfs/wave/post/jgfs_wave_postpnt.ecf | 2 +- .../gfs/wave/post/jgfs_wave_postsbs.ecf | 2 +- .../gfs/wave/post/jgfs_wave_prdgen_bulls.ecf | 2 +- .../wave/post/jgfs_wave_prdgen_gridded.ecf | 2 +- ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf | 2 +- gempak/ush/gdas_ecmwf_meta_ver.sh | 4 +- gempak/ush/gdas_meta_loop.sh | 2 +- gempak/ush/gdas_meta_na.sh | 12 +- gempak/ush/gdas_ukmet_meta_ver.sh | 4 +- gempak/ush/gempak_gdas_f000_gif.sh | 6 +- gempak/ush/gempak_gfs_f000_gif.sh | 8 +- gempak/ush/gempak_gfs_fhhh_gif.sh | 6 +- gempak/ush/gfs_meta_ak.sh | 4 +- gempak/ush/gfs_meta_bwx.sh | 4 +- gempak/ush/gfs_meta_comp.sh | 4 +- gempak/ush/gfs_meta_crb.sh | 4 +- gempak/ush/gfs_meta_hi.sh | 22 +-- gempak/ush/gfs_meta_hur.sh | 4 +- gempak/ush/gfs_meta_mar_atl.sh | 4 +- gempak/ush/gfs_meta_mar_comp.sh | 4 +- gempak/ush/gfs_meta_mar_pac.sh | 4 +- gempak/ush/gfs_meta_mar_ql.sh | 4 +- gempak/ush/gfs_meta_mar_skewt.sh | 4 +- gempak/ush/gfs_meta_mar_ver.sh | 4 +- gempak/ush/gfs_meta_nhsh.sh | 20 +-- gempak/ush/gfs_meta_opc_na_ver.sh | 4 +- gempak/ush/gfs_meta_opc_np_ver.sh | 4 +- gempak/ush/gfs_meta_precip.sh | 4 +- gempak/ush/gfs_meta_qpf.sh | 4 +- gempak/ush/gfs_meta_sa.sh | 4 +- gempak/ush/gfs_meta_sa2.sh | 4 +- gempak/ush/gfs_meta_trop.sh | 4 +- gempak/ush/gfs_meta_us.sh | 16 +- gempak/ush/gfs_meta_usext.sh | 20 +-- gempak/ush/gfs_meta_ver.sh | 4 +- ush/atmos_ensstat.sh | 2 +- ush/calcanl_gfs.py | 2 +- ush/forecast_postdet.sh | 16 +- ush/forecast_predet.sh | 110 ++++++------ ush/gaussian_sfcanl.sh | 8 +- ush/gfs_bufr.sh | 10 +- ush/gfs_sndp.sh | 4 +- ush/global_cycle.sh | 50 +++--- ush/interp_atmos_master.sh | 2 +- ush/jjob_header.sh | 10 +- ush/make_tif.sh | 2 +- ush/module-setup.sh | 2 +- ush/ozn_xtrct.sh | 4 +- ush/parsing_model_configure_FV3.sh | 4 +- ush/parsing_namelists_CICE.sh | 2 +- ush/parsing_namelists_FV3.sh | 10 +- ush/parsing_namelists_FV3_nest.sh | 8 +- ush/parsing_namelists_MOM6.sh | 4 +- ush/parsing_namelists_WW3.sh | 10 +- ush/parsing_ufs_configure.sh | 2 +- ush/preamble.sh | 2 +- ush/prep_sfc_ice_blend.sh | 4 +- ush/prep_sfc_snow.sh | 8 +- ush/python/pygfs/task/aero_analysis.py | 2 +- ush/python/pygfs/task/aero_bmatrix.py | 2 +- ush/python/pygfs/task/archive.py | 12 +- ush/python/pygfs/task/chem_fire_emission.py | 8 +- ush/python/pygfs/task/fetch.py | 2 +- ush/python/pygfs/task/globus_hpss.py | 2 +- ush/python/pygfs/task/marine_analysis.py | 2 +- ush/python/pygfs/task/marine_bmat.py | 4 +- ush/python/pygfs/task/marine_letkf.py | 2 +- ush/python/pygfs/task/marine_recenter.py | 2 +- ush/python/pygfs/task/nexus_emission.py | 6 +- ush/python/pygfs/task/offline_analysis.py | 2 +- ush/python/pygfs/task/snow_analysis.py | 2 +- ush/python/pygfs/task/snowens_analysis.py | 2 +- ush/python/pygfs/utils/archive_tar_vars.py | 6 +- ush/python/pygfs/utils/archive_vrfy_vars.py | 2 +- ush/python/pygfs/utils/marine_da_utils.py | 2 +- ush/radmon_verf_angle.sh | 6 +- ush/radmon_verf_bcoef.sh | 6 +- ush/radmon_verf_bcor.sh | 6 +- ush/radmon_verf_time.sh | 10 +- ush/regrid_gsiSfcIncr_to_tile.sh | 8 +- ush/run_mpmd.sh | 2 +- ush/syndat_getjtbul.sh | 4 +- ush/syndat_qctropcy.sh | 20 +-- ush/tropcy_relocate.sh | 36 ++-- ush/tropcy_relocate_extrkr.sh | 4 +- ush/wave_extractvars.sh | 2 +- ush/wave_grib2_sbs.sh | 4 +- ush/wave_grid_interp_sbs.sh | 12 +- ush/wave_grid_moddef.sh | 4 +- ush/wave_outp_spec.sh | 6 +- ush/wave_prnc_cur.sh | 8 +- ush/wave_prnc_ice.sh | 4 +- 389 files changed, 1588 insertions(+), 1336 deletions(-) create mode 100644 dev/ush/README_NET_CONVERSION.md create mode 100755 dev/ush/convert_from_net.sh create mode 100755 dev/ush/convert_to_net.sh diff --git a/dev/ci/Jenkinsfile b/dev/ci/Jenkinsfile index af508d63427..ab8cffe4dab 100644 --- a/dev/ci/Jenkinsfile +++ b/dev/ci/Jenkinsfile @@ -1,7 +1,7 @@ def Machine = 'none' def machine = 'none' def CUSTOM_WORKSPACE = 'none' -def HOMEgfs = 'none' +def HOMEglobal = 'none' def HOMEgfs_dev = 'none' def CI_CASES = '' def GH = 'none' @@ -90,7 +90,7 @@ pipeline { properties([parameters([[$class: 'NodeParameterDefinition', allowedSlaves: ['built-in', 'hercules-emc', 'hera-emc', 'ursa-emc', 'orion-emc', 'gaeaC5', 'gaeaC6-emc'], defaultSlaves: ['built-in'], name: '', nodeEligibility: [$class: 'AllNodeEligibility'], triggerIfResult: 'allCases']])]) GH = sh(script: "which gh || echo '~/bin/gh'", returnStdout: true).trim() CUSTOM_WORKSPACE = "${WORKSPACE}" - HOMEgfs = "${CUSTOM_WORKSPACE}/global-workflow" + HOMEglobal = "${CUSTOM_WORKSPACE}/global-workflow" HOMEgfs_dev = "${CUSTOM_WORKSPACE}/global-workflow/dev" sh(script: "rm -Rf ${CUSTOM_WORKSPACE}/global-workflow; mkdir -p ${CUSTOM_WORKSPACE}/global-workflow") sh(script: "rm -Rf ${CUSTOM_WORKSPACE}/RUNTESTS; mkdir -p ${CUSTOM_WORKSPACE}/RUNTESTS") @@ -106,17 +106,17 @@ pipeline { steps { catchError(buildResult: 'UNSTABLE', stageResult: 'FAILURE') { script { - ws(HOMEgfs) { - echo "Checking out the code on ${Machine} using scm in ${HOMEgfs}" + ws(HOMEglobal) { + echo "Checking out the code on ${Machine} using scm in ${HOMEglobal}" try { checkout scm } catch (Exception e) { - echo "Failed to checkout the code on ${Machine} using scm in ${HOMEgfs}, try again ..." + echo "Failed to checkout the code on ${Machine} using scm in ${HOMEglobal}, try again ..." sleep time: 45, unit: 'SECONDS' try { checkout scm } catch (Exception ee) { - echo "Failed to checkout the code on ${Machine} using scm in ${HOMEgfs}: ${ee.getMessage()}" + echo "Failed to checkout the code on ${Machine} using scm in ${HOMEglobal}: ${ee.getMessage()}" if (env.CHANGE_ID) { sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Checkout **Failed** on ${Machine} in Build# ${env.BUILD_NUMBER}: ${ee.getMessage()}" """) } @@ -127,7 +127,7 @@ pipeline { def gist_url = "" def error_logs = "" def error_logs_message = "" - dir("${HOMEgfs}/sorc") { + dir("${HOMEglobal}/sorc") { try { sh(script: "${HOMEgfs_dev}/ci/scripts/utils/ci_utils.sh build") // build the global-workflow executables } catch (Exception error_build) { @@ -140,8 +140,8 @@ pipeline { if (fileExists("${line}") && readFile("${line}").length() > 0 ){ try { archiveArtifacts artifacts: "${line}", fingerprint: true - error_logs = error_logs + "${HOMEgfs}/sorc/${line} " - error_logs_message = error_logs_message + "${HOMEgfs}/sorc/${line}\n" + error_logs = error_logs + "${HOMEglobal}/sorc/${line} " + error_logs_message = error_logs_message + "${HOMEglobal}/sorc/${line}\n" } catch (Exception error_arch) { echo "Failed to archive error log ${line}: ${error_arch.getMessage()}" } } @@ -193,7 +193,7 @@ pipeline { agent { label NodeName[machine].toLowerCase() } steps { script { - ws(HOMEgfs) { + ws(HOMEglobal) { def parallelStages = CI_CASES.collectEntries { caseName -> ["${caseName}": { stage("Create ${caseName}") { diff --git a/dev/ci/Jenkinsfile4AWS b/dev/ci/Jenkinsfile4AWS index 62237587a18..4de4bc9ef0e 100644 --- a/dev/ci/Jenkinsfile4AWS +++ b/dev/ci/Jenkinsfile4AWS @@ -1,7 +1,7 @@ def Machine = 'none' def machine = 'none' def CUSTOM_WORKSPACE = 'none' -def HOMEgfs = 'none' +def HOMEglobal = 'none' def CI_CASES = '' def GH = 'none' //Trivial change @@ -89,7 +89,7 @@ pipeline { properties([parameters([[$class: 'NodeParameterDefinition', allowedSlaves: ['built-in', 'Hercules-EMC', 'Hera-EMC', 'Orion-EMC', 'GaeaC5', 'GaeaC6-EMC', 'Awsepicglobalworkflow'], defaultSlaves: ['built-in'], name: '', nodeEligibility: [$class: 'AllNodeEligibility'], triggerIfResult: 'allCases']])]) GH = sh(script: "which gh || echo '~/bin/gh'", returnStdout: true).trim() CUSTOM_WORKSPACE = "${custom_workspace[machine]}/${env.CHANGE_ID}/${aws_gw_name}" - HOMEgfs = "${CUSTOM_WORKSPACE}/${aws_gw_name}" + HOMEglobal = "${CUSTOM_WORKSPACE}/${aws_gw_name}" sh(script: "rm -Rf ${CUSTOM_WORKSPACE}; mkdir -p ${CUSTOM_WORKSPACE}/${aws_gw_name}") sh(script: "rm -Rf ${CUSTOM_WORKSPACE}/RUNTESTS; mkdir -p ${CUSTOM_WORKSPACE}/RUNTESTS") sh(script: """${GH} pr edit ${env.CHANGE_ID} --repo ${repo_url} --add-label "CI-${Machine}-Building" --remove-label "CI-${Machine}-Ready" """) @@ -102,26 +102,26 @@ pipeline { stage('3. Build System') { agent { label NodeName[machine].toLowerCase() } steps { - dir("${HOMEgfs}") { + dir("${HOMEglobal}") { checkout scm } catchError(buildResult: 'UNSTABLE', stageResult: 'FAILURE') { script { - ws(HOMEgfs) { - echo "Checking out the code on ${Machine} using scm in ${HOMEgfs}" + ws(HOMEglobal) { + echo "Checking out the code on ${Machine} using scm in ${HOMEglobal}" try { // checkout scm // sh(script: "git clone --recursive ${repo_url}") - sh(script: "ls ${HOMEgfs}") + sh(script: "ls ${HOMEglobal}") } catch (Exception e) { - echo "Failed to checkout the code on ${Machine} using scm in ${HOMEgfs}, try again ..." + echo "Failed to checkout the code on ${Machine} using scm in ${HOMEglobal}, try again ..." sleep time: 45, unit: 'SECONDS' try { // checkout scm // sh(script: "git clone --recursive ${repo_url}") - sh(script: "ls ${HOMEgfs}/sorc") + sh(script: "ls ${HOMEglobal}/sorc") } catch (Exception ee) { - echo "Failed to checkout the code on ${Machine} using scm in ${HOMEgfs}: ${ee.getMessage()}" + echo "Failed to checkout the code on ${Machine} using scm in ${HOMEglobal}: ${ee.getMessage()}" if (env.CHANGE_ID) { sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Checkout **Failed** on ${Machine} in Build# ${env.BUILD_NUMBER}: ${ee.getMessage()}" """) } @@ -132,9 +132,9 @@ pipeline { def gist_url = "" def error_logs = "" def error_logs_message = "" - dir("${HOMEgfs}/sorc") { + dir("${HOMEglobal}/sorc") { try { - // sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils.sh build") // build the global-workflow executables + // sh(script: "${HOMEglobal}/ci/scripts/utils/ci_utils.sh build") // build the global-workflow executables sh(script: './build_compute.sh -A ${USER} gfs gefs sfs') // build the global-workflow executables } catch (Exception error_build) { echo "Failed to build global-workflow: ${error_build.getMessage()}" @@ -146,20 +146,20 @@ pipeline { if (fileExists("${line}") && readFile("${line}").length() > 0 ){ try { archiveArtifacts artifacts: "${line}", fingerprint: true - error_logs = error_logs + "${HOMEgfs}/sorc/${line} " - error_logs_message = error_logs_message + "${HOMEgfs}/sorc/${line}\n" + error_logs = error_logs + "${HOMEglobal}/sorc/${line} " + error_logs_message = error_logs_message + "${HOMEglobal}/sorc/${line}\n" } catch (Exception error_arch) { echo "Failed to archive error log ${line}: ${error_arch.getMessage()}" } } } try { sh(script: """ - source ${HOMEgfs}/workflow/gw_setup.sh - ${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --gist PR_BUILD_${env.CHANGE_ID} | tail -n 1 + source ${HOMEglobal}/workflow/gw_setup.sh + ${HOMEglobal}/ci/scripts/utils/publish_logs.py --file ${error_logs} --gist PR_BUILD_${env.CHANGE_ID} | tail -n 1 """) gist_url=sh(script: """ - source ${HOMEgfs}/workflow/gw_setup.sh - ${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --gist PR_BUILD_${env.CHANGE_ID} + source ${HOMEglobal}/workflow/gw_setup.sh + ${HOMEglobal}/ci/scripts/utils/publish_logs.py --file ${error_logs} --gist PR_BUILD_${env.CHANGE_ID} """, returnStdout: true).trim() sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body 'Build **FAILED** on **${Machine}** in Build# ${env.BUILD_NUMBER} with error logs:\n```\n${error_logs_message}```\n\nFollow link here to view the contents of the above file(s): [(link)](${gist_url})' """) } catch (Exception error_comment) { @@ -182,8 +182,8 @@ pipeline { } // Get a list of CI cases to run CI_CASES = sh(script: """ - source ${HOMEgfs}/workflow/gw_setup.sh - ${HOMEgfs}/ci/scripts/utils/get_host_case_list.py ${machine} + source ${HOMEglobal}/workflow/gw_setup.sh + ${HOMEglobal}/ci/scripts/utils/get_host_case_list.py ${machine} """, returnStdout: true).trim().split() echo "Cases to run: ${CI_CASES}" } @@ -206,8 +206,8 @@ pipeline { env.RUNTESTS = "${CUSTOM_WORKSPACE}/RUNTESTS" try { error_output = sh(script: \"\"\" - source ${HOMEgfs}/workflow/gw_setup.sh - ${HOMEgfs}/ci/scripts/utils/ci_utils.sh create_experiment ${HOMEgfs}/ci/cases/pr/${caseName}.yaml + source ${HOMEglobal}/workflow/gw_setup.sh + ${HOMEglobal}/ci/scripts/utils/ci_utils.sh create_experiment ${HOMEglobal}/ci/cases/pr/${caseName}.yaml \"\"\", returnStdout: true).trim() } catch (Exception error_create) { sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "${Case} **FAILED** to create experiment on ${Machine} in BUILD# ${env.BUILD_NUMBER}\n with the error:\n\\`\\`\\`\n${error_output}\\`\\`\\`" """) @@ -219,22 +219,22 @@ pipeline { stage("Running ${caseName}") { catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') { script { - def pslot = sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils.sh get_pslot ${CUSTOM_WORKSPACE}/RUNTESTS ${caseName}", returnStdout: true).trim() + def pslot = sh(script: "${HOMEglobal}/ci/scripts/utils/ci_utils.sh get_pslot ${CUSTOM_WORKSPACE}/RUNTESTS ${caseName}", returnStdout: true).trim() def error_file = "${CUSTOM_WORKSPACE}/RUNTESTS/${pslot}_error.logs" sh(script: " rm -f ${error_file}") try { sh(script: """ - source ${HOMEgfs}/workflow/gw_setup.sh - ${HOMEgfs}/ci/scripts/run_check_ci.sh ${CUSTOM_WORKSPACE} ${pslot} 'global-workflow' + source ${HOMEglobal}/workflow/gw_setup.sh + ${HOMEglobal}/ci/scripts/run_check_ci.sh ${CUSTOM_WORKSPACE} ${pslot} 'global-workflow' """) sh(script: """ - source ${HOMEgfs}/workflow/gw_setup.sh - ${HOMEgfs}/ci/scripts/utils/ci_utils.sh cleanup_experiment ${CUSTOM_WORKSPACE}/RUNTESTS/EXPDIR/${pslot} + source ${HOMEglobal}/workflow/gw_setup.sh + ${HOMEglobal}/ci/scripts/utils/ci_utils.sh cleanup_experiment ${CUSTOM_WORKSPACE}/RUNTESTS/EXPDIR/${pslot} """) } catch (Exception error_experment) { sh(script: """ - source ${HOMEgfs}/workflow/gw_setup.sh - ${HOMEgfs}/ci/scripts/utils/ci_utils.sh cancel_batch_jobs ${pslot} + source ${HOMEglobal}/workflow/gw_setup.sh + ${HOMEglobal}/ci/scripts/utils/ci_utils.sh cancel_batch_jobs ${pslot} """) ws(CUSTOM_WORKSPACE) { def error_logs = "" @@ -256,13 +256,13 @@ pipeline { } try { gist_url = sh(script: """ - source ${HOMEgfs}/workflow/gw_setup.sh - ${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --gist PR_${env.CHANGE_ID} + source ${HOMEglobal}/workflow/gw_setup.sh + ${HOMEglobal}/ci/scripts/utils/publish_logs.py --file ${error_logs} --gist PR_${env.CHANGE_ID} """, returnStdout: true).trim() sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Experiment ${caseName} **FAILED** on ${Machine} in Build# ${env.BUILD_NUMBER} with error logs:\n\\`\\`\\`\n${error_logs_message}\\`\\`\\`\n\nFollow link here to view the contents of the above file(s): [(link)](${gist_url})" """) sh(script: """ - source ${HOMEgfs}/workflow/gw_setup.sh - ${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --repo PR_${env.CHANGE_ID} + source ${HOMEglobal}/workflow/gw_setup.sh + ${HOMEglobal}/ci/scripts/utils/publish_logs.py --file ${error_logs} --repo PR_${env.CHANGE_ID} """) } catch (Exception error_comment) { echo "Failed to comment on PR: ${error_comment.getMessage()}" diff --git a/dev/ci/scripts/driver_weekly.sh b/dev/ci/scripts/driver_weekly.sh index f2545f62661..edea6b4ed85 100755 --- a/dev/ci/scripts/driver_weekly.sh +++ b/dev/ci/scripts/driver_weekly.sh @@ -8,11 +8,11 @@ set -eux # # Abstract: # -# This script runs the high resolution cases found in ${HOMEgfs}/ci/cases/weekly +# This script runs the high resolution cases found in ${HOMEglobal}/ci/cases/weekly # from the develop branch for the global-workflow repo that are intended to run on a weekly basis # from a cron job. When run it will clone and build a new branch from the EMC's global-workflow and # and create a pr using GitHub CLI by moving and replacing the yaml case files in -# ${HOMEgfs}/ci/cases/weekly to {HOMEgfs}/ci/cases/pr. Then the requisite labels are added +# ${HOMEglobal}/ci/cases/weekly to {HOMEglobal}/ci/cases/pr. Then the requisite labels are added # so that the current BASH CI framework can then run these cases. Since this script # creates a PR with the CI-Ready labels, the BASH CI framework will automatically run these cases # from that point so it is only required to run this script once from a single machine. diff --git a/dev/ci/scripts/run_check_ci.sh b/dev/ci/scripts/run_check_ci.sh index f310cc7d059..31a3ab1c8f8 100755 --- a/dev/ci/scripts/run_check_ci.sh +++ b/dev/ci/scripts/run_check_ci.sh @@ -12,11 +12,11 @@ pslot=${2:-${pslot:-?}} # Name of the experiment being tested b SYSTEM_BUILD_DIR=${3:-"global-workflow"} # Name of the system build directory, default is "global-workflow # TEST_DIR contains 2 directories; -# 1. HOMEgfs: clone of the global-workflow +# 1. HOMEglobal: clone of the global-workflow # 2. RUNTESTS: A directory containing EXPDIR and COMROOT for experiments # # e.g. $> tree ./TEST_DIR # ./TEST_DIR -# ├── HOMEgfs +# ├── HOMEglobal # └── RUNTESTS # ├── COMROOT # │   └── ${pslot} @@ -24,13 +24,13 @@ SYSTEM_BUILD_DIR=${3:-"global-workflow"} # Name of the system build directory, d # └── ${pslot} # Two system build directories created at build time gfs, and gdas # TODO: Make this configurable (for now all scripts run from gfs for CI at runtime) -HOMEgfs="${TEST_DIR}/${SYSTEM_BUILD_DIR}" +HOMEglobal="${TEST_DIR}/${SYSTEM_BUILD_DIR}" RUNTESTS="${TEST_DIR}/RUNTESTS" run_check_logfile="${RUNTESTS}/ci-run_check.log" # Source modules and setup logging echo "Source modules." -source "${HOMEgfs}/dev/ush/gw_setup.sh" +source "${HOMEglobal}/dev/ush/gw_setup.sh" # cd into the experiment directory echo "cd ${RUNTESTS}/EXPDIR/${pslot}" @@ -71,7 +71,7 @@ while true; do # Get job statistics echo "Gather Rocoto statistics" - full_state=$("${HOMEgfs}/dev/ci/scripts/utils/rocotostat.py" -w "${xml}" -d "${db}" -v) + full_state=$("${HOMEglobal}/dev/ci/scripts/utils/rocotostat.py" -w "${xml}" -d "${db}" -v) error_stat=$? for state in CYCLES_TOTAL CYCLES_DONE SUCCEEDED FAIL DEAD; do diff --git a/dev/ci/scripts/run_check_gitlab_ci.sh b/dev/ci/scripts/run_check_gitlab_ci.sh index 69a6afe7079..f9da53f8db9 100755 --- a/dev/ci/scripts/run_check_gitlab_ci.sh +++ b/dev/ci/scripts/run_check_gitlab_ci.sh @@ -12,11 +12,11 @@ pslot=${2:-${pslot:-?}} # Name of the experiment being tested b SYSTEM_BUILD_DIR=${3:-"global-workflow"} # Name of the system build directory, default is "global-workflow # TEST_DIR contains 2 directories; -# 1. HOMEgfs: clone of the global-workflow +# 1. HOMEglobal: clone of the global-workflow # 2. RUNTESTS: A directory containing EXPDIR and COMROOT for experiments # # e.g. $> tree ./TEST_DIR # ./TEST_DIR -# ├── HOMEgfs +# ├── HOMEglobal # └── RUNTESTS # ├── COMROOT # │   └── ${pslot} @@ -41,7 +41,7 @@ SYSTEM_BUILD_DIR=${3:-"global-workflow"} # Name of the system build directory, d # relevant GitHub PR. If any are missing, PR reporting will be skipped for failed cases. # ----------------------------------------------------------------------------------- -HOMEgfs="${TEST_DIR}/${SYSTEM_BUILD_DIR}" +HOMEglobal="${TEST_DIR}/${SYSTEM_BUILD_DIR}" RUNTESTS="${TEST_DIR}/RUNTESTS" run_check_logfile="${RUNTESTS}/ci-run_check.log" @@ -79,15 +79,15 @@ report_failure_to_github() { if [[ -n "${error_logs_for_gist}" ]]; then # Generate gist URLs with formatted markdown links - source "${HOMEgfs}/dev/ush/gw_setup.sh" + source "${HOMEglobal}/dev/ush/gw_setup.sh" # shellcheck disable=SC2027,SC2086,SC2155 - local gist_links=$("${HOMEgfs}/dev/ci/scripts/utils/publish_logs.py" \ + local gist_links=$("${HOMEglobal}/dev/ci/scripts/utils/publish_logs.py" \ --file ${error_logs_for_gist} --multiple --format=github \ --gist "PR_${PR_NUMBER}_${caseName}" | tail -n 1) || true # Upload to repo as well for backup # shellcheck disable=SC2027,SC2086 - "${HOMEgfs}/dev/ci/scripts/utils/publish_logs.py" \ + "${HOMEglobal}/dev/ci/scripts/utils/publish_logs.py" \ --file ${error_logs_for_gist} --repo "PR_${PR_NUMBER}_${caseName}" || true # Prepare markdown section for files links to gist for GitHub comment @@ -120,7 +120,7 @@ EOF ) # Post GitHub comment - cd "${HOMEgfs}" + cd "${HOMEglobal}" "${GH}" pr comment "${PR_NUMBER}" --repo "${GW_REPO_URL}" --body "${comment_body}" || true # Move processed error log to prevent reprocessing @@ -134,7 +134,7 @@ EOF # Source modules and setup logging echo "Source modules." -source "${HOMEgfs}/dev/ush/gw_setup.sh" +source "${HOMEglobal}/dev/ush/gw_setup.sh" # TODO We need to add local python env to support PyGitHub PYTHONPATH="${PYTHONPATH}:$(python3 -m site --user-site)" || true echo "Updated PYTHONPATH: ${PYTHONPATH}" @@ -179,7 +179,7 @@ while true; do caseName="${pslot%_*-*}" # caseName recovered from pslot: (caseName_- (eg. C48_ATM_90f10fc1-3517) echo "Gather Rocoto statistics for (${caseName} on ${MACHINE_ID^})" export ROCOTOSTAT_LOG_FILE="${RUNTESTS}/EXPDIR/${pslot}/logs/${caseName}_rocotostat.log" - source <("${HOMEgfs}/dev/ci/scripts/utils/rocotostat.py" -w "${xml}" -d "${db}" --declare --thread-logging) || true + source <("${HOMEglobal}/dev/ci/scripts/utils/rocotostat.py" -w "${xml}" -d "${db}" --declare --thread-logging) || true echo -e "\tCompleted Cycles: ${CYCLES_DONE}/${CYCLES_TOTAL} \tCompleted Jobs : ${JOBS_DONE}/${JOBS_TOTAL} diff --git a/dev/ci/scripts/unittests/test_create_experiment.py b/dev/ci/scripts/unittests/test_create_experiment.py index fd82119a3b0..6f58385b9b6 100644 --- a/dev/ci/scripts/unittests/test_create_experiment.py +++ b/dev/ci/scripts/unittests/test_create_experiment.py @@ -3,7 +3,7 @@ from pathlib import Path from wxflow import Executable, find_upward -HOMEgfs = find_upward('.github') +HOMEglobal = find_upward('.github') current_dir = os.path.dirname(os.path.abspath(__file__)) RUNDIR_FAKE = os.path.join(current_dir, 'testdata/RUNTESTS') ICSDIR_FAKE = os.path.join(current_dir, 'testdata/ICSDIR') @@ -11,9 +11,9 @@ def test_create_experiment(): - create_experiment = Executable(f'{HOMEgfs}/dev/workflow/create_experiment.py') + create_experiment = Executable(f'{HOMEglobal}/dev/workflow/create_experiment.py') create_experiment.add_default_arg(['--overwrite']) - yaml_dir = yaml_dir = os.path.join(HOMEgfs, 'dev/ci/cases/pr') + yaml_dir = yaml_dir = os.path.join(HOMEglobal, 'dev/ci/cases/pr') env = os.environ.copy() env['RUNTESTS'] = RUNDIR_FAKE env['ICSDIR_ROOT'] = ICSDIR_FAKE diff --git a/dev/ci/scripts/unittests/test_setup.py b/dev/ci/scripts/unittests/test_setup.py index 3e8bd0212d5..1c8662cbf53 100755 --- a/dev/ci/scripts/unittests/test_setup.py +++ b/dev/ci/scripts/unittests/test_setup.py @@ -5,7 +5,7 @@ from wxflow import Executable, Configuration, ProcessError, find_upward -HOMEgfs = find_upward('.github') +HOMEglobal = find_upward('.github') current_dir = os.path.dirname(os.path.abspath(__file__)) RUNDIR = os.path.join(current_dir, 'testdata/RUNTESTS') pslot = "C48_ATM" @@ -36,7 +36,7 @@ def test_setup_expt(): "--idate", "2021032312", "--edate", "2021032312", "--overwrite", "--gwrc", temp_gwrc_path ] - setup_expt_script = Executable(os.path.join(HOMEgfs, "dev/workflow/setup_expt.py")) + setup_expt_script = Executable(os.path.join(HOMEglobal, "dev/workflow/setup_expt.py")) setup_expt_script.add_default_arg(arguments) setup_expt_script() assert (setup_expt_script.returncode == 0) @@ -60,7 +60,7 @@ def test_setup_expt(): def test_setup_workflow(): - setup_workflow_script = Executable(os.path.join(HOMEgfs, "dev/workflow/setup_workflow.py")) + setup_workflow_script = Executable(os.path.join(HOMEglobal, "dev/workflow/setup_workflow.py")) cmd_args = [f"{RUNDIR}/{pslot}", "rocoto"] setup_workflow_script(*cmd_args) assert (setup_workflow_script.returncode == 0) @@ -79,10 +79,10 @@ def test_setup_workflow(): def test_setup_workflow_fail_config_env_cornercase(tmp_path): - setup_workflow_script = Executable(os.path.join(HOMEgfs, "dev/workflow/setup_workflow.py")) + setup_workflow_script = Executable(os.path.join(HOMEglobal, "dev/workflow/setup_workflow.py")) cmd_args = [f"{RUNDIR}/{pslot}", "rocoto"] env = os.environ.copy() - env['HOMEgfs'] = 'foobar' # Intentionally incorrect to trigger failure + env['HOMEglobal'] = 'foobar' # Intentionally incorrect to trigger failure try: setup_workflow_script(*cmd_args, env=env) diff --git a/dev/ci/scripts/utils/get_host_case_list.py b/dev/ci/scripts/utils/get_host_case_list.py index 546ef6ac757..dfca8edc961 100755 --- a/dev/ci/scripts/utils/get_host_case_list.py +++ b/dev/ci/scripts/utils/get_host_case_list.py @@ -6,26 +6,26 @@ from wxflow import AttrDict, parse_j2yaml, find_upward -def get_host_cases(host, HOMEgfs=None): +def get_host_cases(host, HOMEglobal=None): """ Get list of test cases supported on a host Args: host (str): Host name to check - HOMEgfs (str, optional): Path to the global-workflow repository root directory + HOMEglobal (str, optional): Path to the global-workflow repository root directory Returns: list: List of case names (without extension) supported on the host """ - HOMEgfs = HOMEgfs or find_upward('.github') + HOMEglobal = HOMEglobal or find_upward('.github') case_list = [] # Set up data for template rendering - data = AttrDict(HOMEgfs=HOMEgfs) + data = AttrDict(HOMEglobal=HOMEglobal) data.update(os.environ) # Get all case files - case_files = glob.glob(f'{HOMEgfs}/dev/ci/cases/pr/*.yaml') + case_files = glob.glob(f'{HOMEglobal}/dev/ci/cases/pr/*.yaml') for case_yaml in case_files: # Parse the case configuration diff --git a/dev/ci/scripts/utils/launch_java_agent.sh b/dev/ci/scripts/utils/launch_java_agent.sh index 5df17cf1784..1be50215a73 100755 --- a/dev/ci/scripts/utils/launch_java_agent.sh +++ b/dev/ci/scripts/utils/launch_java_agent.sh @@ -94,7 +94,7 @@ esac LOG=launched_agent-$(date +%Y%m%d%M).log rm -f "${LOG}" -HOMEgfs="${HOMEgfs_}" source "${HOMEgfs_}/ush/module-setup.sh" +HOMEglobal="${HOMEgfs_}" source "${HOMEgfs_}/ush/module-setup.sh" module use "${HOMEgfs_}/modulefiles" module load "gw_setup.${MACHINE_ID}" diff --git a/dev/ci/scripts/utils/parse_yaml.py b/dev/ci/scripts/utils/parse_yaml.py index 980ca1b1039..acc99968e41 100755 --- a/dev/ci/scripts/utils/parse_yaml.py +++ b/dev/ci/scripts/utils/parse_yaml.py @@ -42,8 +42,8 @@ def yq(yamlfile, key): The value of the specified key in the yaml file. """ - HOMEgfs = find_upward('.github') - ydict = parse_j2yaml(path=yamlfile, data={'HOMEgfs': HOMEgfs}) + HOMEglobal = find_upward('.github') + ydict = parse_j2yaml(path=yamlfile, data={'HOMEglobal': HOMEglobal}) if key == 'all': return ydict list_keys = key.split('.') diff --git a/dev/ctests/scripts/execute.sh.in b/dev/ctests/scripts/execute.sh.in index 2e18ca665fe..a8e87d28b25 100755 --- a/dev/ctests/scripts/execute.sh.in +++ b/dev/ctests/scripts/execute.sh.in @@ -2,9 +2,9 @@ set -xe -HOMEgfs="@PROJECT_SOURCE_DIR@" +HOMEglobal="@PROJECT_SOURCE_DIR@" TEST_NAME=${1:?"Name of the test is required"} -TEST_NAME=$("${HOMEgfs}/dev/ci/scripts/utils/ci_utils.sh" get_pslot "@CMAKE_CURRENT_BINARY_DIR@/RUNTESTS" "${TEST_NAME}") +TEST_NAME=$("${HOMEglobal}/dev/ci/scripts/utils/ci_utils.sh" get_pslot "@CMAKE_CURRENT_BINARY_DIR@/RUNTESTS" "${TEST_NAME}") JOB=${2:?"Job name is required"} idate=$3 @@ -29,8 +29,8 @@ fi # Use custom Rocoto build with dryrun feature for testing # When GFS_CI_ROCOTO_PATH is unset, the system Rocoto will be used -source "${HOMEgfs}/ush/detect_machine.sh" -source "${HOMEgfs}/dev/ci/platforms/config.${MACHINE_ID}" +source "${HOMEglobal}/ush/detect_machine.sh" +source "${HOMEglobal}/dev/ci/platforms/config.${MACHINE_ID}" if [[ -x "${GFS_CI_ROCOTO_PATH}/rocotoboot" ]]; then yes | "${GFS_CI_ROCOTO_PATH}/rocotoboot" --dryrun -d "${TEST_NAME}.db" -w "${TEST_NAME}.xml" -v 10 -c "${idate}00" -t "${JOB}" 2> jobcard || true diff --git a/dev/job_cards/rocoto/aeroanlfinal.sh b/dev/job_cards/rocoto/aeroanlfinal.sh index a002c5c1940..b6e758541ee 100755 --- a/dev/job_cards/rocoto/aeroanlfinal.sh +++ b/dev/job_cards/rocoto/aeroanlfinal.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source UFSDA workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" ufsda +source "${HOMEglobal}/dev/ush/load_modules.sh" ufsda status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,6 +15,6 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE" +"${HOMEglobal}/dev/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/aeroanlgenb.sh b/dev/job_cards/rocoto/aeroanlgenb.sh index af72ffe6b8b..29d30eea739 100755 --- a/dev/job_cards/rocoto/aeroanlgenb.sh +++ b/dev/job_cards/rocoto/aeroanlgenb.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source UFSDA workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" ufsda +source "${HOMEglobal}/dev/ush/load_modules.sh" ufsda status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -16,6 +16,6 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGDAS_AERO_ANALYSIS_GENERATE_BMATRIX" +"${HOMEglobal}/dev/jobs/JGDAS_AERO_ANALYSIS_GENERATE_BMATRIX" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/aeroanlinit.sh b/dev/job_cards/rocoto/aeroanlinit.sh index e1df69aabdf..1507a280a67 100755 --- a/dev/job_cards/rocoto/aeroanlinit.sh +++ b/dev/job_cards/rocoto/aeroanlinit.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source UFSDA workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" ufsda +source "${HOMEglobal}/dev/ush/load_modules.sh" ufsda status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,6 +15,6 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE" +"${HOMEglobal}/dev/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/aeroanlvar.sh b/dev/job_cards/rocoto/aeroanlvar.sh index 681d30c9428..d379364d547 100755 --- a/dev/job_cards/rocoto/aeroanlvar.sh +++ b/dev/job_cards/rocoto/aeroanlvar.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source UFSDA workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" ufsda +source "${HOMEglobal}/dev/ush/load_modules.sh" ufsda status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,6 +15,6 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_AERO_ANALYSIS_VARIATIONAL" +"${HOMEglobal}/dev/jobs/JGLOBAL_AERO_ANALYSIS_VARIATIONAL" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/aerosol_init.sh b/dev/job_cards/rocoto/aerosol_init.sh index cfd9d3745aa..bc27fa6355d 100755 --- a/dev/job_cards/rocoto/aerosol_init.sh +++ b/dev/job_cards/rocoto/aerosol_init.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source FV3GFS workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -29,7 +29,7 @@ if [[ ${status} -ne 0 ]]; then exit "${status}" fi -"${HOMEgfs}/scripts/exgfs_aero_init_aerosol.py" +"${HOMEglobal}/scripts/exgfs_aero_init_aerosol.py" status=$? if [[ ${status} -ne 0 ]]; then diff --git a/dev/job_cards/rocoto/anal.sh b/dev/job_cards/rocoto/anal.sh index a2178f92334..b7d6b96f468 100755 --- a/dev/job_cards/rocoto/anal.sh +++ b/dev/job_cards/rocoto/anal.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source FV3GFS workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" gsi +source "${HOMEglobal}/dev/ush/load_modules.sh" gsi status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,7 +15,7 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_ATMOS_ANALYSIS" +"${HOMEglobal}/dev/jobs/JGLOBAL_ATMOS_ANALYSIS" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/analcalc.sh b/dev/job_cards/rocoto/analcalc.sh index 77e5f5d27a5..59d7f92aba3 100755 --- a/dev/job_cards/rocoto/analcalc.sh +++ b/dev/job_cards/rocoto/analcalc.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source GSI workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" gsi +source "${HOMEglobal}/dev/ush/load_modules.sh" gsi status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,7 +15,7 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC" +"${HOMEglobal}/dev/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/analcalc_fv3jedi.sh b/dev/job_cards/rocoto/analcalc_fv3jedi.sh index 4984ab0eab7..3f31e4c5992 100755 --- a/dev/job_cards/rocoto/analcalc_fv3jedi.sh +++ b/dev/job_cards/rocoto/analcalc_fv3jedi.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source UFSDA workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" ufsda +source "${HOMEglobal}/dev/ush/load_modules.sh" ufsda status=$? [[ ${status} -ne 0 ]] && exit "${status}" @@ -13,6 +13,6 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}"/dev/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_FV3JEDI +"${HOMEglobal}"/dev/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_FV3JEDI status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/analdiag.sh b/dev/job_cards/rocoto/analdiag.sh index 005e49471a8..5c739830c89 100755 --- a/dev/job_cards/rocoto/analdiag.sh +++ b/dev/job_cards/rocoto/analdiag.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source FV3GFS workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" gsi +source "${HOMEglobal}/dev/ush/load_modules.sh" gsi status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,7 +15,7 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_ATMOS_ANALYSIS_DIAG" +"${HOMEglobal}/dev/jobs/JGLOBAL_ATMOS_ANALYSIS_DIAG" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/anlstat.sh b/dev/job_cards/rocoto/anlstat.sh index 55a4c00a293..e9a61fd3487 100755 --- a/dev/job_cards/rocoto/anlstat.sh +++ b/dev/job_cards/rocoto/anlstat.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source UFSDA workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" ufsda +source "${HOMEglobal}/dev/ush/load_modules.sh" ufsda err=$? if [[ ${err} -ne 0 ]]; then echo "FATAL ERROR Failed to load UFSDA modules!" @@ -16,5 +16,5 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_ANALYSIS_STATS" +"${HOMEglobal}/dev/jobs/JGLOBAL_ANALYSIS_STATS" exit $? diff --git a/dev/job_cards/rocoto/arch_tars.sh b/dev/job_cards/rocoto/arch_tars.sh index 24f0ab9c688..e1fd39e25bd 100755 --- a/dev/job_cards/rocoto/arch_tars.sh +++ b/dev/job_cards/rocoto/arch_tars.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source FV3GFS workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,7 +15,7 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_ARCHIVE_TARS" +"${HOMEglobal}/dev/jobs/JGLOBAL_ARCHIVE_TARS" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/arch_vrfy.sh b/dev/job_cards/rocoto/arch_vrfy.sh index f289ea5ecf4..408328e11cc 100755 --- a/dev/job_cards/rocoto/arch_vrfy.sh +++ b/dev/job_cards/rocoto/arch_vrfy.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source FV3GFS workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,7 +15,7 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_ARCHIVE_VRFY" +"${HOMEglobal}/dev/jobs/JGLOBAL_ARCHIVE_VRFY" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/atmanlfinal.sh b/dev/job_cards/rocoto/atmanlfinal.sh index 2dc364af25e..96f0e29fdb2 100755 --- a/dev/job_cards/rocoto/atmanlfinal.sh +++ b/dev/job_cards/rocoto/atmanlfinal.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source UFSDA workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" ufsda +source "${HOMEglobal}/dev/ush/load_modules.sh" ufsda status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,6 +15,6 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE" +"${HOMEglobal}/dev/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/atmanlfv3inc.sh b/dev/job_cards/rocoto/atmanlfv3inc.sh index 5e6c9d4cbbb..c747f9bd35f 100755 --- a/dev/job_cards/rocoto/atmanlfv3inc.sh +++ b/dev/job_cards/rocoto/atmanlfv3inc.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source UFSDA workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" ufsda +source "${HOMEglobal}/dev/ush/load_modules.sh" ufsda status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,6 +15,6 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_ATM_ANALYSIS_FV3_INCREMENT" +"${HOMEglobal}/dev/jobs/JGLOBAL_ATM_ANALYSIS_FV3_INCREMENT" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/atmanlinit.sh b/dev/job_cards/rocoto/atmanlinit.sh index 8ec82291a7f..d4cad7be18a 100755 --- a/dev/job_cards/rocoto/atmanlinit.sh +++ b/dev/job_cards/rocoto/atmanlinit.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source UFSDA workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" ufsda +source "${HOMEglobal}/dev/ush/load_modules.sh" ufsda status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,6 +15,6 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE" +"${HOMEglobal}/dev/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/atmanlvar.sh b/dev/job_cards/rocoto/atmanlvar.sh index 98b1063c985..a978d47cb25 100755 --- a/dev/job_cards/rocoto/atmanlvar.sh +++ b/dev/job_cards/rocoto/atmanlvar.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source UFSDA workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" ufsda +source "${HOMEglobal}/dev/ush/load_modules.sh" ufsda status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,6 +15,6 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_ATM_ANALYSIS_VARIATIONAL" +"${HOMEglobal}/dev/jobs/JGLOBAL_ATM_ANALYSIS_VARIATIONAL" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/atmensanlfinal.sh b/dev/job_cards/rocoto/atmensanlfinal.sh index 42b51412ced..44d2776a427 100755 --- a/dev/job_cards/rocoto/atmensanlfinal.sh +++ b/dev/job_cards/rocoto/atmensanlfinal.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source UFSDA workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" ufsda +source "${HOMEglobal}/dev/ush/load_modules.sh" ufsda status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,6 +15,6 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE" +"${HOMEglobal}/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/atmensanlfv3inc.sh b/dev/job_cards/rocoto/atmensanlfv3inc.sh index 43cb11cc7a5..65b94ecb01a 100755 --- a/dev/job_cards/rocoto/atmensanlfv3inc.sh +++ b/dev/job_cards/rocoto/atmensanlfv3inc.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source UFSDA workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" ufsda +source "${HOMEglobal}/dev/ush/load_modules.sh" ufsda status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,6 +15,6 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_FV3_INCREMENT" +"${HOMEglobal}/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_FV3_INCREMENT" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/atmensanlinit.sh b/dev/job_cards/rocoto/atmensanlinit.sh index 08786874a4a..e07e9e83508 100755 --- a/dev/job_cards/rocoto/atmensanlinit.sh +++ b/dev/job_cards/rocoto/atmensanlinit.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source UFSDA workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" ufsda +source "${HOMEglobal}/dev/ush/load_modules.sh" ufsda status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,6 +15,6 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE" +"${HOMEglobal}/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/atmensanlletkf.sh b/dev/job_cards/rocoto/atmensanlletkf.sh index e33f2d03206..8d9af743379 100755 --- a/dev/job_cards/rocoto/atmensanlletkf.sh +++ b/dev/job_cards/rocoto/atmensanlletkf.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source UFSDA workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" ufsda +source "${HOMEglobal}/dev/ush/load_modules.sh" ufsda status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,6 +15,6 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_LETKF" +"${HOMEglobal}/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_LETKF" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/atmensanlobs.sh b/dev/job_cards/rocoto/atmensanlobs.sh index fbf45a07b3d..0c7c6f4b77b 100755 --- a/dev/job_cards/rocoto/atmensanlobs.sh +++ b/dev/job_cards/rocoto/atmensanlobs.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source UFSDA workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" ufsda +source "${HOMEglobal}/dev/ush/load_modules.sh" ufsda status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,6 +15,6 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_OBS" +"${HOMEglobal}/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_OBS" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/atmensanlsol.sh b/dev/job_cards/rocoto/atmensanlsol.sh index 81bf908c068..3305025513c 100755 --- a/dev/job_cards/rocoto/atmensanlsol.sh +++ b/dev/job_cards/rocoto/atmensanlsol.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source UFSDA workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" ufsda +source "${HOMEglobal}/dev/ush/load_modules.sh" ufsda status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,6 +15,6 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_SOL" +"${HOMEglobal}/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_SOL" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/atmos_ensstat.sh b/dev/job_cards/rocoto/atmos_ensstat.sh index ed64b5f79c3..a8d0c9b77a9 100755 --- a/dev/job_cards/rocoto/atmos_ensstat.sh +++ b/dev/job_cards/rocoto/atmos_ensstat.sh @@ -8,7 +8,7 @@ set -x ############################################################### # Source FV3GFS workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? if ((status != 0)); then exit "${status}"; fi @@ -24,7 +24,7 @@ for FORECAST_HOUR in "${fhr_list[@]}"; do ############################################################### # Execute the JJOB ############################################################### - "${HOMEgfs}/dev/jobs/JGLOBAL_ATMOS_ENSSTAT" + "${HOMEglobal}/dev/jobs/JGLOBAL_ATMOS_ENSSTAT" status=$? [[ ${status} -ne 0 ]] && exit "${status}" done diff --git a/dev/job_cards/rocoto/atmos_products.sh b/dev/job_cards/rocoto/atmos_products.sh index ab2952c5977..2c077726fcb 100755 --- a/dev/job_cards/rocoto/atmos_products.sh +++ b/dev/job_cards/rocoto/atmos_products.sh @@ -8,7 +8,7 @@ set -x ############################################################### # Source FV3GFS workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? if ((status != 0)); then exit "${status}"; fi @@ -24,7 +24,7 @@ for FORECAST_HOUR in "${fhr_list[@]}"; do ############################################################### # Execute the JJOB ############################################################### - "${HOMEgfs}/dev/jobs/JGLOBAL_ATMOS_PRODUCTS" + "${HOMEglobal}/dev/jobs/JGLOBAL_ATMOS_PRODUCTS" status=$? [[ ${status} -ne 0 ]] && exit "${status}" done diff --git a/dev/job_cards/rocoto/awips.sh b/dev/job_cards/rocoto/awips.sh index 158407387ed..84633c9c55e 100755 --- a/dev/job_cards/rocoto/awips.sh +++ b/dev/job_cards/rocoto/awips.sh @@ -1,7 +1,7 @@ #! /usr/bin/env bash set -x -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -19,7 +19,7 @@ for fhr in "${fhr_list[@]}"; do ############################################################### # Execute the JJOB # TODO ############################################################### - # "${HOMEgfs}/dev/jobs/J${RUN^^}_ATMOS_AWIPS" + # "${HOMEglobal}/dev/jobs/J${RUN^^}_ATMOS_AWIPS" err=$? if [[ ${err} -ne 0 ]]; then exit "${err}" diff --git a/dev/job_cards/rocoto/awips_20km_1p0deg.sh b/dev/job_cards/rocoto/awips_20km_1p0deg.sh index 233e2f02b01..fa60ee6ecb9 100755 --- a/dev/job_cards/rocoto/awips_20km_1p0deg.sh +++ b/dev/job_cards/rocoto/awips_20km_1p0deg.sh @@ -5,7 +5,7 @@ set -x ############################################################### ## Abstract: ## Inline awips driver script -## HOMEgfs : /full/path/to/workflow +## HOMEglobal : /full/path/to/workflow ## EXPDIR : /full/path/to/config/files ## RUN : cycle name (gdas / gfs) ## PDY : current date (YYYYMMDD) @@ -14,7 +14,7 @@ set -x ############################################################### # Source FV3GFS workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -38,14 +38,14 @@ for fhr3 in ${fhrlst}; do if ((fhr % 3 == 0)); then export fcsthr="${fhr3}" export DATA="${DATAROOT}/${jobid}.${fcsthr}" - "${HOMEgfs}/dev/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG" + "${HOMEglobal}/dev/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG" fi # Process every 6 hrs from hour 90 up to hour 240 elif [[ ${fhr} -ge 90 ]] && [[ ${fhr} -le 240 ]]; then if ((fhr % 6 == 0)); then export fcsthr="${fhr3}" export DATA="${DATAROOT}/${jobid}.${fcsthr}" - "${HOMEgfs}/dev/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG" + "${HOMEglobal}/dev/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG" fi fi done diff --git a/dev/job_cards/rocoto/cleanup.sh b/dev/job_cards/rocoto/cleanup.sh index 7af2d3eb942..2b01031b49a 100755 --- a/dev/job_cards/rocoto/cleanup.sh +++ b/dev/job_cards/rocoto/cleanup.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source FV3GFS workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,7 +15,7 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_CLEANUP" +"${HOMEglobal}/dev/jobs/JGLOBAL_CLEANUP" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/earc_tars.sh b/dev/job_cards/rocoto/earc_tars.sh index e0cecadf1c5..5544bae7320 100755 --- a/dev/job_cards/rocoto/earc_tars.sh +++ b/dev/job_cards/rocoto/earc_tars.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source FV3GFS workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,7 +15,7 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_ENKF_ARCHIVE_TARS" +"${HOMEglobal}/dev/jobs/JGLOBAL_ENKF_ARCHIVE_TARS" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/earc_vrfy.sh b/dev/job_cards/rocoto/earc_vrfy.sh index 25e0d31ad41..3143fb0ec0b 100755 --- a/dev/job_cards/rocoto/earc_vrfy.sh +++ b/dev/job_cards/rocoto/earc_vrfy.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source FV3GFS workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,7 +15,7 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_ENKF_ARCHIVE_VRFY" +"${HOMEglobal}/dev/jobs/JGLOBAL_ENKF_ARCHIVE_VRFY" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/ecen.sh b/dev/job_cards/rocoto/ecen.sh index 7e267ca0ed2..c1c8b67dd00 100755 --- a/dev/job_cards/rocoto/ecen.sh +++ b/dev/job_cards/rocoto/ecen.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source FV3GFS workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" gsi +source "${HOMEglobal}/dev/ush/load_modules.sh" gsi status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -22,7 +22,7 @@ for fhr in ${fhrlst}; do export job=ecen export jobid="${job}.$$" - "${HOMEgfs}/dev/jobs/JGLOBAL_ENKF_ECEN" + "${HOMEglobal}/dev/jobs/JGLOBAL_ENKF_ECEN" status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" diff --git a/dev/job_cards/rocoto/ecen_fv3jedi.sh b/dev/job_cards/rocoto/ecen_fv3jedi.sh index 9e84069279c..614755f6baa 100755 --- a/dev/job_cards/rocoto/ecen_fv3jedi.sh +++ b/dev/job_cards/rocoto/ecen_fv3jedi.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source UFSDA workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" ufsda +source "${HOMEglobal}/dev/ush/load_modules.sh" ufsda status=$? [[ ${status} -ne 0 ]] && exit "${status}" @@ -13,6 +13,6 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}"/dev/jobs/JGLOBAL_ENKF_ECEN_FV3JEDI +"${HOMEglobal}"/dev/jobs/JGLOBAL_ENKF_ECEN_FV3JEDI status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/echgres.sh b/dev/job_cards/rocoto/echgres.sh index 0b14cf7f311..ef98e4282e8 100755 --- a/dev/job_cards/rocoto/echgres.sh +++ b/dev/job_cards/rocoto/echgres.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source FV3GFS workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" gsi +source "${HOMEglobal}/dev/ush/load_modules.sh" gsi status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,7 +15,7 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGDAS_ATMOS_CHGRES_FORENKF" +"${HOMEglobal}/dev/jobs/JGDAS_ATMOS_CHGRES_FORENKF" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/ediag.sh b/dev/job_cards/rocoto/ediag.sh index c0710d0acdb..7c49111b5c5 100755 --- a/dev/job_cards/rocoto/ediag.sh +++ b/dev/job_cards/rocoto/ediag.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source FV3GFS workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" gsi +source "${HOMEglobal}/dev/ush/load_modules.sh" gsi status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,7 +15,7 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_ENKF_DIAG" +"${HOMEglobal}/dev/jobs/JGLOBAL_ENKF_DIAG" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/eobs.sh b/dev/job_cards/rocoto/eobs.sh index 517a6a64402..87506421aaf 100755 --- a/dev/job_cards/rocoto/eobs.sh +++ b/dev/job_cards/rocoto/eobs.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source FV3GFS workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" gsi +source "${HOMEglobal}/dev/ush/load_modules.sh" gsi status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,7 +15,7 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_ENKF_SELECT_OBS" +"${HOMEglobal}/dev/jobs/JGLOBAL_ENKF_SELECT_OBS" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/epos.sh b/dev/job_cards/rocoto/epos.sh index 6e2dfa0b6ef..a4bc3fc8df2 100755 --- a/dev/job_cards/rocoto/epos.sh +++ b/dev/job_cards/rocoto/epos.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source GSI workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" gsi +source "${HOMEglobal}/dev/ush/load_modules.sh" gsi status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -23,7 +23,7 @@ for fhr in ${fhrlst}; do export FHMIN_EPOS=${fhr} export FHMAX_EPOS=${fhr} export FHOUT_EPOS=${fhr} - "${HOMEgfs}/dev/jobs/JGDAS_ENKF_POST" + "${HOMEglobal}/dev/jobs/JGDAS_ENKF_POST" status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" diff --git a/dev/job_cards/rocoto/esfc.sh b/dev/job_cards/rocoto/esfc.sh index b052431d296..05d95a21dca 100755 --- a/dev/job_cards/rocoto/esfc.sh +++ b/dev/job_cards/rocoto/esfc.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source GSI workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" gsi +source "${HOMEglobal}/dev/ush/load_modules.sh" gsi status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,7 +15,7 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_ENKF_SFC" +"${HOMEglobal}/dev/jobs/JGLOBAL_ENKF_SFC" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/esnowanl.sh b/dev/job_cards/rocoto/esnowanl.sh index d1112b26c7e..2d17f6b77f5 100755 --- a/dev/job_cards/rocoto/esnowanl.sh +++ b/dev/job_cards/rocoto/esnowanl.sh @@ -2,7 +2,7 @@ ############################################################### # Source UFSDA workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" ufsda +source "${HOMEglobal}/dev/ush/load_modules.sh" ufsda status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -13,6 +13,6 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_SNOWENS_ANALYSIS" +"${HOMEglobal}/dev/jobs/JGLOBAL_SNOWENS_ANALYSIS" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/eupd.sh b/dev/job_cards/rocoto/eupd.sh index 58bc59c1c28..41f3c8bd22e 100755 --- a/dev/job_cards/rocoto/eupd.sh +++ b/dev/job_cards/rocoto/eupd.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source FV3GFS workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" gsi +source "${HOMEglobal}/dev/ush/load_modules.sh" gsi status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,7 +15,7 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_ENKF_UPDATE" +"${HOMEglobal}/dev/jobs/JGLOBAL_ENKF_UPDATE" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/extractvars.sh b/dev/job_cards/rocoto/extractvars.sh index 125ff8394f5..ff9f7162230 100755 --- a/dev/job_cards/rocoto/extractvars.sh +++ b/dev/job_cards/rocoto/extractvars.sh @@ -5,7 +5,7 @@ set -x ############################################################### echo echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? if [[ "${status}" -ne 0 ]]; then exit "${status}" @@ -18,7 +18,7 @@ export jobid="${job}.$$" echo echo "=============== START TO RUN EXTRACTVARS ===============" # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_EXTRACTVARS" +"${HOMEglobal}/dev/jobs/JGLOBAL_EXTRACTVARS" status=$? if [[ "${status}" -ne 0 ]]; then exit "${status}" diff --git a/dev/job_cards/rocoto/fbwind.sh b/dev/job_cards/rocoto/fbwind.sh index e2a253cb0ee..1de7c5cf454 100755 --- a/dev/job_cards/rocoto/fbwind.sh +++ b/dev/job_cards/rocoto/fbwind.sh @@ -3,7 +3,7 @@ set -x ############################################################### -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? if ((status != 0)); then exit "${status}"; fi @@ -11,7 +11,7 @@ export job="fbwind" export jobid="${job}.$$" # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGFS_ATMOS_FBWIND" +"${HOMEglobal}/dev/jobs/JGFS_ATMOS_FBWIND" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/fcst.sh b/dev/job_cards/rocoto/fcst.sh index 57cd65bd798..ef76816c1c3 100755 --- a/dev/job_cards/rocoto/fcst.sh +++ b/dev/job_cards/rocoto/fcst.sh @@ -5,11 +5,11 @@ set -x ############################################################### # Source FV3GFS workflow modules # TODO clean this up once ncdiag/1.1.2 is installed on WCOSS2 -source "${HOMEgfs}/ush/detect_machine.sh" +source "${HOMEglobal}/ush/detect_machine.sh" if [[ "${MACHINE_ID}" == "wcoss2" ]]; then - source "${HOMEgfs}/dev/ush/load_modules.sh" ufswm + source "${HOMEglobal}/dev/ush/load_modules.sh" ufswm else - source "${HOMEgfs}/dev/ush/load_modules.sh" run + source "${HOMEglobal}/dev/ush/load_modules.sh" run fi status=$? if [[ ${status} -ne 0 ]]; then @@ -20,7 +20,7 @@ export job="fcst" export jobid="${job}.$$" # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_FORECAST" +"${HOMEglobal}/dev/jobs/JGLOBAL_FORECAST" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/fetch.sh b/dev/job_cards/rocoto/fetch.sh index 4c55258de75..c8de8913d91 100755 --- a/dev/job_cards/rocoto/fetch.sh +++ b/dev/job_cards/rocoto/fetch.sh @@ -3,7 +3,7 @@ set -x # Source FV3GFS workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? if [[ "${status}" -ne 0 ]]; then exit "${status}" @@ -13,7 +13,7 @@ export job="fetch" export jobid="${job}.$$" # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_FETCH" +"${HOMEglobal}/dev/jobs/JGLOBAL_FETCH" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/fit2obs.sh b/dev/job_cards/rocoto/fit2obs.sh index 77fd9731fce..1b51ea9477e 100755 --- a/dev/job_cards/rocoto/fit2obs.sh +++ b/dev/job_cards/rocoto/fit2obs.sh @@ -5,7 +5,7 @@ set -x ############################################################### echo echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -18,7 +18,7 @@ export jobid="${job}.$$" echo echo "=============== START TO RUN FIT2OBS ===============" # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGDAS_FIT2OBS" +"${HOMEglobal}/dev/jobs/JGDAS_FIT2OBS" status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" diff --git a/dev/job_cards/rocoto/gempak.sh b/dev/job_cards/rocoto/gempak.sh index c20b3a121f7..48b87f1c03a 100755 --- a/dev/job_cards/rocoto/gempak.sh +++ b/dev/job_cards/rocoto/gempak.sh @@ -1,7 +1,7 @@ #! /usr/bin/env bash set -x -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? if ((status != 0)); then exit "${status}"; fi @@ -17,7 +17,7 @@ for fhr in "${fhr_list[@]}"; do ############################################################### # Execute the JJOB ############################################################### - "${HOMEgfs}/dev/jobs/J${RUN^^}_ATMOS_GEMPAK" + "${HOMEglobal}/dev/jobs/J${RUN^^}_ATMOS_GEMPAK" err=$? [[ ${err} -ne 0 ]] && exit "${err}" done diff --git a/dev/job_cards/rocoto/gempakgrb2spec.sh b/dev/job_cards/rocoto/gempakgrb2spec.sh index e0594ec8bde..1f245fe49e9 100755 --- a/dev/job_cards/rocoto/gempakgrb2spec.sh +++ b/dev/job_cards/rocoto/gempakgrb2spec.sh @@ -1,7 +1,7 @@ #! /usr/bin/env bash set -x -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? if ((status != 0)); then exit "${status}"; fi @@ -9,7 +9,7 @@ export job="gempakpgrb2spec" export jobid="${job}.$$" # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC" +"${HOMEglobal}/dev/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/gempakmeta.sh b/dev/job_cards/rocoto/gempakmeta.sh index d980d9e8bdd..4faa94f9100 100755 --- a/dev/job_cards/rocoto/gempakmeta.sh +++ b/dev/job_cards/rocoto/gempakmeta.sh @@ -3,7 +3,7 @@ set -x ############################################################### -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? if ((status != 0)); then exit "${status}"; fi @@ -11,7 +11,7 @@ export job="gempakmeta" export jobid="${job}.$$" # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGFS_ATMOS_GEMPAK_META" +"${HOMEglobal}/dev/jobs/JGFS_ATMOS_GEMPAK_META" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/gempakmetancdc.sh b/dev/job_cards/rocoto/gempakmetancdc.sh index 423e48e7c24..0e0a6daf9f6 100755 --- a/dev/job_cards/rocoto/gempakmetancdc.sh +++ b/dev/job_cards/rocoto/gempakmetancdc.sh @@ -3,7 +3,7 @@ set -x ############################################################### -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? if ((status != 0)); then exit "${status}"; fi @@ -11,7 +11,7 @@ export job="gempakmetancdc" export jobid="${job}.$$" # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC" +"${HOMEglobal}/dev/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/gempakncdcupapgif.sh b/dev/job_cards/rocoto/gempakncdcupapgif.sh index 8e95860300a..8650c429311 100755 --- a/dev/job_cards/rocoto/gempakncdcupapgif.sh +++ b/dev/job_cards/rocoto/gempakncdcupapgif.sh @@ -3,7 +3,7 @@ set -x ############################################################### -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? if ((status != 0)); then exit "${status}"; fi @@ -11,7 +11,7 @@ export job="gempakncdcupapgif" export jobid="${job}.$$" # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF" +"${HOMEglobal}/dev/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/gen_control_ic.sh b/dev/job_cards/rocoto/gen_control_ic.sh index cfe6299501e..aca1efa7d6e 100755 --- a/dev/job_cards/rocoto/gen_control_ic.sh +++ b/dev/job_cards/rocoto/gen_control_ic.sh @@ -1,5 +1,5 @@ #! /usr/bin/env bash -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -10,7 +10,7 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_ATMOS_CHGRES_GEN_CONTROL" +"${HOMEglobal}/dev/jobs/JGLOBAL_ATMOS_CHGRES_GEN_CONTROL" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/genesis.sh b/dev/job_cards/rocoto/genesis.sh index 0f3102faa1c..943d46daa2e 100755 --- a/dev/job_cards/rocoto/genesis.sh +++ b/dev/job_cards/rocoto/genesis.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source FV3GFS workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? ((status != 0)) && exit "${status}" @@ -14,7 +14,7 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +"${HOMEglobal}/dev/jobs/JGFS_ATMOS_CYCLONE_GENESIS" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/genesis_fsu.sh b/dev/job_cards/rocoto/genesis_fsu.sh index 0290ef1659b..714fe98e92c 100755 --- a/dev/job_cards/rocoto/genesis_fsu.sh +++ b/dev/job_cards/rocoto/genesis_fsu.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source FV3GFS workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? ((status != 0)) && exit "${status}" @@ -14,7 +14,7 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGFS_ATMOS_FSU_GENESIS" +"${HOMEglobal}/dev/jobs/JGFS_ATMOS_FSU_GENESIS" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/globus_arch.sh b/dev/job_cards/rocoto/globus_arch.sh index 2d33c047678..cf4dcd8694a 100755 --- a/dev/job_cards/rocoto/globus_arch.sh +++ b/dev/job_cards/rocoto/globus_arch.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source FV3GFS workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? [[ ${status} -ne 0 ]] && exit "${status}" @@ -13,7 +13,7 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_GLOBUS_ARCH" +"${HOMEglobal}/dev/jobs/JGLOBAL_GLOBUS_ARCH" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/globus_earc.sh b/dev/job_cards/rocoto/globus_earc.sh index 61f7d5f6f49..12001f0bd4c 100755 --- a/dev/job_cards/rocoto/globus_earc.sh +++ b/dev/job_cards/rocoto/globus_earc.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source FV3GFS workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? if [[ ${status} -ne 0 ]]; then exit "${status}"; fi @@ -13,7 +13,7 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_ENS_GLOBUS_ARCH" +"${HOMEglobal}/dev/jobs/JGLOBAL_ENS_GLOBUS_ARCH" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/marineanlchkpt.sh b/dev/job_cards/rocoto/marineanlchkpt.sh index 7734d330de4..3d23107816f 100755 --- a/dev/job_cards/rocoto/marineanlchkpt.sh +++ b/dev/job_cards/rocoto/marineanlchkpt.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source UFSDA workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" ufsda +source "${HOMEglobal}/dev/ush/load_modules.sh" ufsda status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,6 +15,6 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}"/dev/jobs/JGLOBAL_MARINE_ANALYSIS_CHECKPOINT +"${HOMEglobal}"/dev/jobs/JGLOBAL_MARINE_ANALYSIS_CHECKPOINT status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/marineanlecen.sh b/dev/job_cards/rocoto/marineanlecen.sh index 8c1f633b97a..17b49a54355 100755 --- a/dev/job_cards/rocoto/marineanlecen.sh +++ b/dev/job_cards/rocoto/marineanlecen.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source UFSDA workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" ufsda +source "${HOMEglobal}/dev/ush/load_modules.sh" ufsda status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,11 +15,11 @@ export jobid="${job}.$$" ############################################################### # Setup Python path for GDASApp ush -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/sorc/gdas.cd/ush" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEglobal}/sorc/gdas.cd/ush" export PYTHONPATH ############################################################### # Execute the JJOB -"${HOMEgfs}"/dev/jobs/JGLOBAL_MARINE_ANALYSIS_ECEN +"${HOMEglobal}"/dev/jobs/JGLOBAL_MARINE_ANALYSIS_ECEN status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/marineanlfinal.sh b/dev/job_cards/rocoto/marineanlfinal.sh index 991cd7cda49..661c1d177c1 100755 --- a/dev/job_cards/rocoto/marineanlfinal.sh +++ b/dev/job_cards/rocoto/marineanlfinal.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source UFSDA workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" ufsda +source "${HOMEglobal}/dev/ush/load_modules.sh" ufsda status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,6 +15,6 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}"/dev/jobs/JGLOBAL_MARINE_ANALYSIS_FINALIZE +"${HOMEglobal}"/dev/jobs/JGLOBAL_MARINE_ANALYSIS_FINALIZE status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/marineanlinit.sh b/dev/job_cards/rocoto/marineanlinit.sh index 5357a8f24c3..e69f91e9194 100755 --- a/dev/job_cards/rocoto/marineanlinit.sh +++ b/dev/job_cards/rocoto/marineanlinit.sh @@ -5,7 +5,7 @@ set -x ############################################################### # Source UFSDA workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" ufsda +source "${HOMEglobal}/dev/ush/load_modules.sh" ufsda status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -16,6 +16,6 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}"/dev/jobs/JGLOBAL_MARINE_ANALYSIS_INITIALIZE +"${HOMEglobal}"/dev/jobs/JGLOBAL_MARINE_ANALYSIS_INITIALIZE status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/marineanlletkf.sh b/dev/job_cards/rocoto/marineanlletkf.sh index 70add214186..97ae4fb8ef5 100755 --- a/dev/job_cards/rocoto/marineanlletkf.sh +++ b/dev/job_cards/rocoto/marineanlletkf.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source UFSDA workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" ufsda +source "${HOMEglobal}/dev/ush/load_modules.sh" ufsda status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,6 +15,6 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_MARINE_ANALYSIS_LETKF" +"${HOMEglobal}/dev/jobs/JGLOBAL_MARINE_ANALYSIS_LETKF" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/marineanlvar.sh b/dev/job_cards/rocoto/marineanlvar.sh index facad6c38ad..475db9e568f 100755 --- a/dev/job_cards/rocoto/marineanlvar.sh +++ b/dev/job_cards/rocoto/marineanlvar.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source UFSDA workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" ufsda +source "${HOMEglobal}/dev/ush/load_modules.sh" ufsda status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,6 +15,6 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_MARINE_ANALYSIS_VARIATIONAL" +"${HOMEglobal}/dev/jobs/JGLOBAL_MARINE_ANALYSIS_VARIATIONAL" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/marinebmat.sh b/dev/job_cards/rocoto/marinebmat.sh index d2b898ce4ad..ea891aade17 100755 --- a/dev/job_cards/rocoto/marinebmat.sh +++ b/dev/job_cards/rocoto/marinebmat.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source UFSDA workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" ufsda +source "${HOMEglobal}/dev/ush/load_modules.sh" ufsda status=$? if [[ "${status}" -ne 0 ]]; then exit "${status}" @@ -15,6 +15,6 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}"/dev/jobs/JGLOBAL_MARINE_BMAT +"${HOMEglobal}"/dev/jobs/JGLOBAL_MARINE_BMAT status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/marinebmatinit.sh b/dev/job_cards/rocoto/marinebmatinit.sh index db9685d6c04..2ae394072d2 100755 --- a/dev/job_cards/rocoto/marinebmatinit.sh +++ b/dev/job_cards/rocoto/marinebmatinit.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source UFSDA workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" ufsda +source "${HOMEglobal}/dev/ush/load_modules.sh" ufsda status=$? if [[ "${status}" -ne 0 ]]; then exit "${status}" @@ -15,6 +15,6 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}"/dev/jobs/JGLOBAL_MARINE_BMAT_INITIALIZE +"${HOMEglobal}"/dev/jobs/JGLOBAL_MARINE_BMAT_INITIALIZE status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/metp.sh b/dev/job_cards/rocoto/metp.sh index 38fd4272d45..f833281e7b3 100755 --- a/dev/job_cards/rocoto/metp.sh +++ b/dev/job_cards/rocoto/metp.sh @@ -3,13 +3,13 @@ set -x ############################################################### -source "${HOMEgfs}/dev/ush/load_modules.sh" verif +source "${HOMEglobal}/dev/ush/load_modules.sh" verif status=$? if ((status != 0)); then exit "${status}"; fi export job="metp${METPCASE}" export jobid="${job}.$$" -"${HOMEgfs}/dev/jobs/JGFS_ATMOS_VERIFICATION" +"${HOMEglobal}/dev/jobs/JGFS_ATMOS_VERIFICATION" exit $? diff --git a/dev/job_cards/rocoto/npoess.sh b/dev/job_cards/rocoto/npoess.sh index 7c5d59b8abb..06d23dbef9e 100755 --- a/dev/job_cards/rocoto/npoess.sh +++ b/dev/job_cards/rocoto/npoess.sh @@ -3,7 +3,7 @@ set -x ############################################################### -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? if ((status != 0)); then exit "${status}"; fi @@ -11,7 +11,7 @@ export job="npoess_pgrb2_0p5deg" export jobid="${job}.$$" # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS" +"${HOMEglobal}/dev/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/oceanice_products.sh b/dev/job_cards/rocoto/oceanice_products.sh index b8564ec4f4c..3dc14f6b6cd 100755 --- a/dev/job_cards/rocoto/oceanice_products.sh +++ b/dev/job_cards/rocoto/oceanice_products.sh @@ -8,7 +8,7 @@ set -x ############################################################### # Source FV3GFS workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? if ((status != 0)); then exit "${status}"; fi @@ -24,7 +24,7 @@ for FORECAST_HOUR in "${fhr_list[@]}"; do ############################################################### # Execute the JJOB ############################################################### - "${HOMEgfs}/dev/jobs/JGLOBAL_OCEANICE_PRODUCTS" + "${HOMEglobal}/dev/jobs/JGLOBAL_OCEANICE_PRODUCTS" status=$? [[ ${status} -ne 0 ]] && exit "${status}" done diff --git a/dev/job_cards/rocoto/offlineanl.sh b/dev/job_cards/rocoto/offlineanl.sh index 74a8da33182..7eb6864364a 100755 --- a/dev/job_cards/rocoto/offlineanl.sh +++ b/dev/job_cards/rocoto/offlineanl.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source FV3GFS workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,6 +15,6 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_OFFLINE_ATMOS_ANALYSIS" +"${HOMEglobal}/dev/jobs/JGLOBAL_OFFLINE_ATMOS_ANALYSIS" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/postsnd.sh b/dev/job_cards/rocoto/postsnd.sh index f8d5e15e65a..a0581af1fa1 100755 --- a/dev/job_cards/rocoto/postsnd.sh +++ b/dev/job_cards/rocoto/postsnd.sh @@ -2,7 +2,7 @@ set -x -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -13,6 +13,6 @@ export jobid="${job}.$$" ################################################################ # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGFS_ATMOS_POSTSND" +"${HOMEglobal}/dev/jobs/JGFS_ATMOS_POSTSND" err=$? exit "${err}" diff --git a/dev/job_cards/rocoto/prep.sh b/dev/job_cards/rocoto/prep.sh index 578b5bbf0f6..1ef51167a2d 100755 --- a/dev/job_cards/rocoto/prep.sh +++ b/dev/job_cards/rocoto/prep.sh @@ -2,7 +2,7 @@ ############################################################### # Source FV3GFS workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? if [[ ${status} -ne 0 ]]; then err_exit "${status}" @@ -11,7 +11,7 @@ fi ############################################################### export job="prep" export jobid="${job}.$$" -source "${HOMEgfs}/ush/jjob_header.sh" -e "prep" -c "base prep" +source "${HOMEglobal}/ush/jjob_header.sh" -e "prep" -c "base prep" # Strip 'enkf' from RUN for pulling data RUN_local="${RUN/enkf/}" @@ -46,7 +46,7 @@ mkdir -p "${COMOUT_OBS}" ############################################################### # Copy IODA files to ROTDIR if [[ ${USE_IODADIR:-"NO"} == "YES" ]]; then - "${HOMEgfs}/ush/getioda.sh" "${PDY}" "${cyc}" "${RUN_local}" "${COMINobsforge}" "${COMOUT_OBS}" + "${HOMEglobal}/ush/getioda.sh" "${PDY}" "${cyc}" "${RUN_local}" "${COMINobsforge}" "${COMOUT_OBS}" status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -59,7 +59,7 @@ if [[ "${RUN_local}" == "gcdas" ]]; then fi # Copy dump files to ROTDIR -"${HOMEgfs}/ush/getdump.sh" "${PDY}" "${cyc}" "${RUN_local}" "${COMINobsproc}" "${COMOUT_OBS}" +"${HOMEglobal}/ush/getdump.sh" "${PDY}" "${cyc}" "${RUN_local}" "${COMINobsproc}" "${COMOUT_OBS}" status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -67,7 +67,7 @@ fi # Ensure previous cycle gdas dumps are available (used by cycle & downstream) if [[ ! -s "${COMINobsproc_PREV}/${GDUMP}.t${gcyc}z.updated.status.tm00.bufr_d" ]]; then - "${HOMEgfs}/ush/getdump.sh" "${gPDY}" "${gcyc}" "${GDUMP}" "${COMINobsproc_PREV}" "${COMOUT_OBS_PREV}" + "${HOMEglobal}/ush/getdump.sh" "${gPDY}" "${gcyc}" "${GDUMP}" "${COMINobsproc_PREV}" "${COMOUT_OBS_PREV}" status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -100,7 +100,7 @@ if [[ ${PROCESS_TROPCY} == "YES" ]]; then rm -f "${COMOUT_OBS}/${RUN_local}.t${cyc}z.syndata.tcvitals.tm00" - "${HOMEgfs}/dev/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC" + "${HOMEglobal}/dev/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC" status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -200,7 +200,7 @@ fi # If requested, create radiance bias correction files for JEDI if [[ ${RUN} == "gdas" && ${CONVERT_BIASCOR:-"NO"} == "YES" ]]; then cd "${DATAROOT}" || true - "${HOMEgfs}/ush/gsi_satbias2ioda_all.sh" + "${HOMEglobal}/ush/gsi_satbias2ioda_all.sh" export err=$? if [[ ${err} -ne 0 ]]; then err_exit "gsi_satbias2ioda failed, ABORT!" diff --git a/dev/job_cards/rocoto/prep_emissions.sh b/dev/job_cards/rocoto/prep_emissions.sh index 6410a1e0bd0..b438175bcb3 100755 --- a/dev/job_cards/rocoto/prep_emissions.sh +++ b/dev/job_cards/rocoto/prep_emissions.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source UFSDA workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? ((status != 0)) && exit "${status}" @@ -24,6 +24,6 @@ done ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_PREP_EMISSIONS" +"${HOMEglobal}/dev/jobs/JGLOBAL_PREP_EMISSIONS" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/prep_sfc.sh b/dev/job_cards/rocoto/prep_sfc.sh index fd524f04fc5..e530893e5bb 100755 --- a/dev/job_cards/rocoto/prep_sfc.sh +++ b/dev/job_cards/rocoto/prep_sfc.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source FV3GFS workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? if [[ "${status}" -ne 0 ]]; then exit "${status}" @@ -15,7 +15,7 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_ATMOS_PREP_SFC" +"${HOMEglobal}/dev/jobs/JGLOBAL_ATMOS_PREP_SFC" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/prepoceanobs.sh b/dev/job_cards/rocoto/prepoceanobs.sh index ee68a3464c6..5bf8d4cc9bd 100755 --- a/dev/job_cards/rocoto/prepoceanobs.sh +++ b/dev/job_cards/rocoto/prepoceanobs.sh @@ -4,7 +4,7 @@ export STRICT="NO" ############################################################### # Source UFSDA workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" ufsda +source "${HOMEglobal}/dev/ush/load_modules.sh" ufsda status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,6 +15,6 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}"/dev/jobs/JGLOBAL_PREP_OCEAN_OBS +"${HOMEglobal}"/dev/jobs/JGLOBAL_PREP_OCEAN_OBS status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/sfcanl.sh b/dev/job_cards/rocoto/sfcanl.sh index d6738fe3a11..46f9b3f68a7 100755 --- a/dev/job_cards/rocoto/sfcanl.sh +++ b/dev/job_cards/rocoto/sfcanl.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source FV3GFS workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,7 +15,7 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_ATMOS_SFCANL" +"${HOMEglobal}/dev/jobs/JGLOBAL_ATMOS_SFCANL" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/snowanl.sh b/dev/job_cards/rocoto/snowanl.sh index eb4c7a92f98..118ef76bc3a 100755 --- a/dev/job_cards/rocoto/snowanl.sh +++ b/dev/job_cards/rocoto/snowanl.sh @@ -2,7 +2,7 @@ ############################################################### # Source UFSDA workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" ufsda +source "${HOMEglobal}/dev/ush/load_modules.sh" ufsda status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -13,6 +13,6 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_SNOW_ANALYSIS" +"${HOMEglobal}/dev/jobs/JGLOBAL_SNOW_ANALYSIS" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/stage_ic.sh b/dev/job_cards/rocoto/stage_ic.sh index cb5c215f438..8d9ddf476cf 100755 --- a/dev/job_cards/rocoto/stage_ic.sh +++ b/dev/job_cards/rocoto/stage_ic.sh @@ -3,7 +3,7 @@ set -x # Source FV3GFS workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? if [[ "${status}" -ne 0 ]]; then exit "${status}" @@ -13,7 +13,7 @@ export job="stage_ic" export jobid="${job}.$$" # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_STAGE_IC" +"${HOMEglobal}/dev/jobs/JGLOBAL_STAGE_IC" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/tracker.sh b/dev/job_cards/rocoto/tracker.sh index c13de1848fe..b125e8529a8 100755 --- a/dev/job_cards/rocoto/tracker.sh +++ b/dev/job_cards/rocoto/tracker.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source FV3GFS workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? ((status != 0)) && exit "${status}" @@ -14,7 +14,7 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGFS_ATMOS_CYCLONE_TRACKER" +"${HOMEglobal}/dev/jobs/JGFS_ATMOS_CYCLONE_TRACKER" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/upp.sh b/dev/job_cards/rocoto/upp.sh index ca6dd65a23c..2b265b2a8a5 100755 --- a/dev/job_cards/rocoto/upp.sh +++ b/dev/job_cards/rocoto/upp.sh @@ -9,12 +9,12 @@ set -x ############################################################### # Source FV3GFS workflow modules -#source "${HOMEgfs}/dev/ush/load_modules.sh" run +#source "${HOMEglobal}/dev/ush/load_modules.sh" run #status=$? #if (( status != 0 )); then exit "${status}"; fi # Temporarily load modules from UPP on WCOSS2 -source "${HOMEgfs}/ush/detect_machine.sh" -source "${HOMEgfs}/dev/ush/load_modules.sh" upp +source "${HOMEglobal}/ush/detect_machine.sh" +source "${HOMEglobal}/dev/ush/load_modules.sh" upp status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -28,6 +28,6 @@ export FORECAST_HOUR=$((10#${FHR3})) ############################################################### # Execute the JJOB ############################################################### -"${HOMEgfs}/dev/jobs/JGLOBAL_ATMOS_UPP" +"${HOMEglobal}/dev/jobs/JGLOBAL_ATMOS_UPP" exit $? diff --git a/dev/job_cards/rocoto/verfozn.sh b/dev/job_cards/rocoto/verfozn.sh index 018be19a002..945f0f7dfbf 100755 --- a/dev/job_cards/rocoto/verfozn.sh +++ b/dev/job_cards/rocoto/verfozn.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source GSI workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" gsi +source "${HOMEglobal}/dev/ush/load_modules.sh" gsi status=$? if [[ status -ne 0 ]]; then exit "${status}" @@ -18,7 +18,7 @@ export jobid="${job}.$$" echo echo "=============== START TO RUN OZMON DATA EXTRACTION ===============" -"${HOMEgfs}/dev/jobs/JGDAS_ATMOS_VERFOZN" +"${HOMEglobal}/dev/jobs/JGDAS_ATMOS_VERFOZN" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/verfrad.sh b/dev/job_cards/rocoto/verfrad.sh index 6e8b13049f0..0feb55da471 100755 --- a/dev/job_cards/rocoto/verfrad.sh +++ b/dev/job_cards/rocoto/verfrad.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source GSI workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" gsi +source "${HOMEglobal}/dev/ush/load_modules.sh" gsi status=$? if [[ status -ne 0 ]]; then exit "${status}" @@ -18,7 +18,7 @@ export jobid="${job}.$$" echo echo "=============== START TO RUN RADMON DATA EXTRACTION ===============" -"${HOMEgfs}/dev/jobs/JGDAS_ATMOS_VERFRAD" +"${HOMEglobal}/dev/jobs/JGDAS_ATMOS_VERFRAD" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/vminmon.sh b/dev/job_cards/rocoto/vminmon.sh index b05c2963859..2321ebafd1c 100755 --- a/dev/job_cards/rocoto/vminmon.sh +++ b/dev/job_cards/rocoto/vminmon.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source FV3GFS workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" gsi +source "${HOMEglobal}/dev/ush/load_modules.sh" gsi status=$? if [[ status -ne 0 ]]; then exit "${status}" @@ -18,7 +18,7 @@ export jobid="${job}.$$" echo echo "=============== START TO RUN MINMON ===============" -"${HOMEgfs}/dev/jobs/JGLOBAL_ATMOS_VMINMON" +"${HOMEglobal}/dev/jobs/JGLOBAL_ATMOS_VMINMON" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/wave_stat.sh b/dev/job_cards/rocoto/wave_stat.sh index a2b8260d5f5..baa772dadc2 100755 --- a/dev/job_cards/rocoto/wave_stat.sh +++ b/dev/job_cards/rocoto/wave_stat.sh @@ -1,7 +1,7 @@ #! /usr/bin/env bash ############################################################### -source "${HOMEgfs}/dev/ush/load_modules.sh" ufswm +source "${HOMEglobal}/dev/ush/load_modules.sh" ufswm err=$? if [[ "${err}" -ne 0 ]]; then exit "${err}" @@ -22,7 +22,7 @@ for FORECAST_HOUR in "${fhr_list[@]}"; do FHR3=$(printf '%03d' "${FORECAST_HOUR}") jobid="${job}_f${FHR3}.$$" # Execute the JJOB - "${HOMEgfs}/dev/jobs/JGEFS_WAVE_STAT" + "${HOMEglobal}/dev/jobs/JGEFS_WAVE_STAT" err=$? if [[ "${err}" -ne 0 ]]; then exit "${err}" diff --git a/dev/job_cards/rocoto/wave_stat_pnt.sh b/dev/job_cards/rocoto/wave_stat_pnt.sh index 71c3fbab146..d084fbf679f 100755 --- a/dev/job_cards/rocoto/wave_stat_pnt.sh +++ b/dev/job_cards/rocoto/wave_stat_pnt.sh @@ -1,7 +1,7 @@ #! /usr/bin/env bash ############################################################### -source "${HOMEgfs}/dev/ush/load_modules.sh" ufswm +source "${HOMEglobal}/dev/ush/load_modules.sh" ufswm err=$? if [[ "${err}" -ne 0 ]]; then exit "${err}" @@ -14,7 +14,7 @@ export jobid="${job}.$$" echo echo "=============== START TO RUN WAVESTAT PNT ===============" # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGEFS_WAVE_STAT_PNT" +"${HOMEglobal}/dev/jobs/JGEFS_WAVE_STAT_PNT" err=$? if [[ "${err}" -ne 0 ]]; then exit "${err}" diff --git a/dev/job_cards/rocoto/waveawipsbulls.sh b/dev/job_cards/rocoto/waveawipsbulls.sh index 68304158c0c..83972e8acb2 100755 --- a/dev/job_cards/rocoto/waveawipsbulls.sh +++ b/dev/job_cards/rocoto/waveawipsbulls.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source FV3GFS workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,7 +15,7 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_WAVE_PRDGEN_BULLS" +"${HOMEglobal}/dev/jobs/JGLOBAL_WAVE_PRDGEN_BULLS" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/waveawipsgridded.sh b/dev/job_cards/rocoto/waveawipsgridded.sh index 100bf545d31..d52311f3019 100755 --- a/dev/job_cards/rocoto/waveawipsgridded.sh +++ b/dev/job_cards/rocoto/waveawipsgridded.sh @@ -4,7 +4,7 @@ set -x ############################################################### # Source FV3GFS workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,7 +15,7 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED" +"${HOMEglobal}/dev/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/wavegempak.sh b/dev/job_cards/rocoto/wavegempak.sh index 60a66e749eb..c84f1db2599 100755 --- a/dev/job_cards/rocoto/wavegempak.sh +++ b/dev/job_cards/rocoto/wavegempak.sh @@ -3,7 +3,7 @@ set -x ############################################################### -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run err=$? if [[ "${err}" -ne 0 ]]; then exit "${err}" @@ -21,7 +21,7 @@ for FORECAST_HOUR in "${fhr_list[@]}"; do fhr3=$(printf '%03d' "${FORECAST_HOUR}") jobid="${job}_f${fhr3}.$$" # Execute the JJOB - "${HOMEgfs}/dev/jobs/JGLOBAL_WAVE_GEMPAK" + "${HOMEglobal}/dev/jobs/JGLOBAL_WAVE_GEMPAK" err=$? if [[ "${err}" -ne 0 ]]; then exit "${err}" diff --git a/dev/job_cards/rocoto/waveinit.sh b/dev/job_cards/rocoto/waveinit.sh index 4770c56356a..7c3dab5cbfa 100755 --- a/dev/job_cards/rocoto/waveinit.sh +++ b/dev/job_cards/rocoto/waveinit.sh @@ -3,8 +3,8 @@ set -x ############################################################### -#source "${HOMEgfs}/dev/ush/load_modules.sh" run -source "${HOMEgfs}/dev/ush/load_modules.sh" ufswm +#source "${HOMEglobal}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" ufswm status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,7 +15,7 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_WAVE_INIT" +"${HOMEglobal}/dev/jobs/JGLOBAL_WAVE_INIT" status=$? exit "${status}" diff --git a/dev/job_cards/rocoto/wavepostbndpnt.sh b/dev/job_cards/rocoto/wavepostbndpnt.sh index 8bd2969a178..e4b4815b561 100755 --- a/dev/job_cards/rocoto/wavepostbndpnt.sh +++ b/dev/job_cards/rocoto/wavepostbndpnt.sh @@ -5,8 +5,8 @@ set -x ############################################################### echo echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -#source "${HOMEgfs}/dev/ush/load_modules.sh" run -source "${HOMEgfs}/dev/ush/load_modules.sh" ufswm +#source "${HOMEglobal}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" ufswm err=$? if [[ "${err}" -ne 0 ]]; then exit "${err}" @@ -19,7 +19,7 @@ export jobid="${job}.$$" echo echo "=============== START TO RUN WAVE_POST_BNDPNT ===============" # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_WAVE_POST_BNDPNT" +"${HOMEglobal}/dev/jobs/JGLOBAL_WAVE_POST_BNDPNT" err=$? if [[ "${err}" -ne 0 ]]; then exit "${err}" diff --git a/dev/job_cards/rocoto/wavepostbndpntbll.sh b/dev/job_cards/rocoto/wavepostbndpntbll.sh index 91e34d691b3..f70d53cc885 100755 --- a/dev/job_cards/rocoto/wavepostbndpntbll.sh +++ b/dev/job_cards/rocoto/wavepostbndpntbll.sh @@ -5,8 +5,8 @@ set -x ############################################################### echo echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -#source "${HOMEgfs}/dev/ush/load_modules.sh" run -source "${HOMEgfs}/dev/ush/load_modules.sh" ufswm +#source "${HOMEglobal}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" ufswm err=$? if [[ "${err}" -ne 0 ]]; then exit "${err}" @@ -19,7 +19,7 @@ export jobid="${job}.$$" echo echo "=============== START TO RUN WAVE_POST_BNDPNT ===============" # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL" +"${HOMEglobal}/dev/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL" err=$? if [[ "${err}" -ne 0 ]]; then exit "${err}" diff --git a/dev/job_cards/rocoto/wavepostpnt.sh b/dev/job_cards/rocoto/wavepostpnt.sh index ad18783810d..e7d9d77ead0 100755 --- a/dev/job_cards/rocoto/wavepostpnt.sh +++ b/dev/job_cards/rocoto/wavepostpnt.sh @@ -5,8 +5,8 @@ set -x ############################################################### echo echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -#source "${HOMEgfs}/dev/ush/load_modules.sh" run -source "${HOMEgfs}/dev/ush/load_modules.sh" ufswm +#source "${HOMEglobal}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" ufswm status=$? if [[ "${status}" -ne 0 ]]; then exit "${status}"; fi @@ -17,7 +17,7 @@ export jobid="${job}.$$" echo echo "=============== START TO RUN WAVE_POST_PNT ===============" # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_WAVE_POST_PNT" +"${HOMEglobal}/dev/jobs/JGLOBAL_WAVE_POST_PNT" err=$? if [[ "${err}" -ne 0 ]]; then exit "${err}" diff --git a/dev/job_cards/rocoto/wavepostsbs.sh b/dev/job_cards/rocoto/wavepostsbs.sh index 4acfe335914..beee0cb74b6 100755 --- a/dev/job_cards/rocoto/wavepostsbs.sh +++ b/dev/job_cards/rocoto/wavepostsbs.sh @@ -4,8 +4,8 @@ set -x ############################################################### # Source FV3GFS workflow modules -#source "${HOMEgfs}/dev/ush/load_modules.sh" run -source "${HOMEgfs}/dev/ush/load_modules.sh" ufswm +#source "${HOMEglobal}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" ufswm err=$? if [[ "${err}" -ne 0 ]]; then exit "${err}" @@ -22,7 +22,7 @@ for FORECAST_HOUR in "${fhr_list[@]}"; do fhr3=$(printf '%03d' "${FORECAST_HOUR}") jobid="${job}_f${fhr3}.$$" # Execute the JJOB - "${HOMEgfs}/dev/jobs/JGLOBAL_WAVE_POST_SBS" + "${HOMEglobal}/dev/jobs/JGLOBAL_WAVE_POST_SBS" err=$? if [[ "${err}" -ne 0 ]]; then exit "${err}" diff --git a/dev/job_cards/rocoto/waveprep.sh b/dev/job_cards/rocoto/waveprep.sh index d24086f93a4..d819a1c0578 100755 --- a/dev/job_cards/rocoto/waveprep.sh +++ b/dev/job_cards/rocoto/waveprep.sh @@ -3,8 +3,8 @@ set -x ############################################################### -#source "${HOMEgfs}/dev/ush/load_modules.sh" run -source "${HOMEgfs}/dev/ush/load_modules.sh" ufswm +#source "${HOMEglobal}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" ufswm status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -15,7 +15,7 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_WAVE_PREP" +"${HOMEglobal}/dev/jobs/JGLOBAL_WAVE_PREP" status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" diff --git a/dev/jobs/JGDAS_AERO_ANALYSIS_GENERATE_BMATRIX b/dev/jobs/JGDAS_AERO_ANALYSIS_GENERATE_BMATRIX index 11e35d01590..fdd3b992566 100755 --- a/dev/jobs/JGDAS_AERO_ANALYSIS_GENERATE_BMATRIX +++ b/dev/jobs/JGDAS_AERO_ANALYSIS_GENERATE_BMATRIX @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "aeroanlgenb" -c "base aeroanl aeroanlgenb" +source "${HOMEglobal}/ush/jjob_header.sh" -e "aeroanlgenb" -c "base aeroanl aeroanlgenb" ############################################## # Set variables used in the script @@ -22,7 +22,7 @@ mkdir -p "${COMOUT_CONF}" ############################################################### # Run relevant script -EXSCRIPT=${GDASAEROBMATPY:-${SCRgfs}/exgdas_aero_analysis_generate_bmatrix.py} +EXSCRIPT=${GDASAEROBMATPY:-${SCRglobal}/exgdas_aero_analysis_generate_bmatrix.py} ${EXSCRIPT} && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGDAS_ATMOS_CHGRES_FORENKF b/dev/jobs/JGDAS_ATMOS_CHGRES_FORENKF index ad6fa3c3f4b..7b7ae8e6c11 100755 --- a/dev/jobs/JGDAS_ATMOS_CHGRES_FORENKF +++ b/dev/jobs/JGDAS_ATMOS_CHGRES_FORENKF @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "anal" -c "base anal echgres" +source "${HOMEglobal}/ush/jjob_header.sh" -e "anal" -c "base anal echgres" ############################################## # Set variables used in the script @@ -23,7 +23,7 @@ MEMDIR="mem001" YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ ############################################################### # Run relevant script -${CHGRESFCSTSH:-${SCRgfs}/exgdas_atmos_chgres_forenkf.sh} && true +${CHGRESFCSTSH:-${SCRglobal}/exgdas_atmos_chgres_forenkf.sh} && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGDAS_ATMOS_GEMPAK b/dev/jobs/JGDAS_ATMOS_GEMPAK index f0b7cec2c8f..4650e226d6d 100755 --- a/dev/jobs/JGDAS_ATMOS_GEMPAK +++ b/dev/jobs/JGDAS_ATMOS_GEMPAK @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "gempak" -c "base gempak" +source "${HOMEglobal}/ush/jjob_header.sh" -e "gempak" -c "base gempak" export GRIB=${GRIB:-pgrb2f} export EXT="" @@ -26,7 +26,7 @@ done # Execute the script for one degree grib ######################################################## -"${SCRgfs}/exgdas_atmos_nawips.sh" "1p00" "${FHR3}" "GDAS_GEMPAK" "${COMOUT_ATMOS_GEMPAK_1p00}" && true +"${SCRglobal}/exgdas_atmos_nawips.sh" "1p00" "${FHR3}" "GDAS_GEMPAK" "${COMOUT_ATMOS_GEMPAK_1p00}" && true export err=$? if [[ ${err} -ne 0 ]]; then @@ -37,7 +37,7 @@ fi # Execute the script for quater-degree grib ######################################################## -"${SCRgfs}/exgdas_atmos_nawips.sh" "0p25" "${FHR3}" "GDAS_GEMPAK" "${COMOUT_ATMOS_GEMPAK_0p25}" && true +"${SCRglobal}/exgdas_atmos_nawips.sh" "0p25" "${FHR3}" "GDAS_GEMPAK" "${COMOUT_ATMOS_GEMPAK_0p25}" && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC b/dev/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC index 6286f837696..e52c61d4961 100755 --- a/dev/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC +++ b/dev/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC @@ -4,12 +4,12 @@ # GDAS GEMPAK META NCDC PRODUCT GENERATION ############################################ -source "${HOMEgfs}/ush/jjob_header.sh" -e "gempak_meta" -c "base gempak" +source "${HOMEglobal}/ush/jjob_header.sh" -e "gempak_meta" -c "base gempak" # Now set up GEMPAK/NTRANS environment # datatype.tbl specifies the paths and filenames of files -cpreq "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl +cpreq "${HOMEglobal}/gempak/fix/datatype.tbl" datatype.tbl ################################### # Specify NET and RUN Name and model @@ -51,26 +51,26 @@ export pgmout=OUTPUT.$$ ######################################################## # Execute the script. -"${HOMEgfs}/gempak/ush/gdas_meta_na.sh" && true +"${HOMEglobal}/gempak/ush/gdas_meta_na.sh" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit fi -"${HOMEgfs}/gempak/ush/gdas_meta_loop.sh" && true +"${HOMEglobal}/gempak/ush/gdas_meta_loop.sh" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit fi if [[ "${cyc}" == '06' ]]; then - "${HOMEgfs}/gempak/ush/gdas_ecmwf_meta_ver.sh" && true + "${HOMEglobal}/gempak/ush/gdas_ecmwf_meta_ver.sh" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit fi - "${HOMEgfs}/gempak/ush/gdas_ukmet_meta_ver.sh" && true + "${HOMEglobal}/gempak/ush/gdas_ukmet_meta_ver.sh" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit @@ -85,7 +85,7 @@ fi ######################################################## # Execute the script. if ((cyc % 12 == 0)); then - "${SCRgfs}/exgdas_atmos_gempak_gif_ncdc.sh" && true + "${SCRglobal}/exgdas_atmos_gempak_gif_ncdc.sh" && true fi export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGDAS_ATMOS_VERFOZN b/dev/jobs/JGDAS_ATMOS_VERFOZN index 764c2454014..bfac31b90c6 100755 --- a/dev/jobs/JGDAS_ATMOS_VERFOZN +++ b/dev/jobs/JGDAS_ATMOS_VERFOZN @@ -3,7 +3,7 @@ ############################################################# # Set up environment for GDAS Ozone Monitor job ############################################################# -source "${HOMEgfs}/ush/jjob_header.sh" -e "verfozn" -c "base verfozn" +source "${HOMEglobal}/ush/jjob_header.sh" -e "verfozn" -c "base verfozn" ############################################# # determine PDY and cyc for previous cycle @@ -29,7 +29,7 @@ if [[ ! -d ${TANKverf_ozn} ]]; then mkdir -p "${TANKverf_ozn}"; fi #------------------------------------------------------- # Execute the script. # -"${SCRgfs}/exgdas_atmos_verfozn.sh" && true +"${SCRglobal}/exgdas_atmos_verfozn.sh" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGDAS_ATMOS_VERFRAD b/dev/jobs/JGDAS_ATMOS_VERFRAD index 9268afd37d4..35bf51feb8d 100755 --- a/dev/jobs/JGDAS_ATMOS_VERFRAD +++ b/dev/jobs/JGDAS_ATMOS_VERFRAD @@ -3,7 +3,7 @@ ############################################################# # Set up environment for GDAS Radiance Monitor job ############################################################# -source "${HOMEgfs}/ush/jjob_header.sh" -e "verfrad" -c "base verfrad" +source "${HOMEglobal}/ush/jjob_header.sh" -e "verfrad" -c "base verfrad" ############################################# # determine PDY and cyc for previous cycle @@ -33,7 +33,7 @@ if [[ ! -d ${TANKverf_rad} ]]; then mkdir -p "${TANKverf_rad}"; fi ######################################################## # Execute the script. -"${SCRgfs}/exgdas_atmos_verfrad.sh" && true +"${SCRglobal}/exgdas_atmos_verfrad.sh" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGDAS_ENKF_POST b/dev/jobs/JGDAS_ENKF_POST index 1184c82cf7d..bdf0eba0c4a 100755 --- a/dev/jobs/JGDAS_ENKF_POST +++ b/dev/jobs/JGDAS_ENKF_POST @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "epos" -c "base epos" +source "${HOMEglobal}/ush/jjob_header.sh" -e "epos" -c "base epos" ############################################## # Set variables used in the script @@ -18,7 +18,7 @@ export LEVS=$((LEVS - 1)) ############################################################### # Run relevant script -${ENKFPOSTSH:-${SCRgfs}/exgdas_enkf_post.sh} && true +${ENKFPOSTSH:-${SCRglobal}/exgdas_enkf_post.sh} && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGDAS_FIT2OBS b/dev/jobs/JGDAS_FIT2OBS index c07b20dd14c..87aa8649dd0 100755 --- a/dev/jobs/JGDAS_FIT2OBS +++ b/dev/jobs/JGDAS_FIT2OBS @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "fit2obs" -c "base fit2obs" +source "${HOMEglobal}/ush/jjob_header.sh" -e "fit2obs" -c "base fit2obs" ############################################## # Set variables used in the script diff --git a/dev/jobs/JGEFS_WAVE_STAT b/dev/jobs/JGEFS_WAVE_STAT index f03a917cc28..49a8b5fb8d5 100755 --- a/dev/jobs/JGEFS_WAVE_STAT +++ b/dev/jobs/JGEFS_WAVE_STAT @@ -1,7 +1,7 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "wave_stat" -c "base wave wave_stat" -source "${USHgfs}/wave_domain_grid.sh" +source "${HOMEglobal}/ush/jjob_header.sh" -e "wave_stat" -c "base wave wave_stat" +source "${USHglobal}/wave_domain_grid.sh" # Set COM Paths @@ -21,7 +21,7 @@ if [[ ! -d "${COMOUT_WAVE_STATION_ENS}" ]]; then fi # Execute the Script -#"${SCRgfs}/exgefs_wave_stat.sh" +#"${SCRglobal}/exgefs_wave_stat.sh" export err=$? err_chk ########################################## diff --git a/dev/jobs/JGEFS_WAVE_STAT_PNT b/dev/jobs/JGEFS_WAVE_STAT_PNT index ff7e51a3ae3..2e2fc8ee42b 100755 --- a/dev/jobs/JGEFS_WAVE_STAT_PNT +++ b/dev/jobs/JGEFS_WAVE_STAT_PNT @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "wave_stat_pnt" -c "base wave wave_stat_pnt" +source "${HOMEglobal}/ush/jjob_header.sh" -e "wave_stat_pnt" -c "base wave wave_stat_pnt" # Set COM Paths @@ -11,7 +11,7 @@ if [[ ! -d "${COMOUT_WAVE_STATION_ENS}" ]]; then fi # Execute the Script -#"${SCRgfs}/exgefs_wave_stat_pnt.sh" +#"${SCRglobal}/exgefs_wave_stat_pnt.sh" export err=$? err_chk ########################################## diff --git a/dev/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG b/dev/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG index 3f8fd1f9f17..52388492eb7 100755 --- a/dev/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +++ b/dev/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "awips" -c "base awips" +source "${HOMEglobal}/ush/jjob_header.sh" -e "awips" -c "base awips" export OMP_NUM_THREADS=${OMP_NUM_THREADS:-1} @@ -29,7 +29,7 @@ export pgmout=OUTPUT.$$ ######################################################## # Execute the script. -"${SCRgfs}/exgfs_atmos_awips_20km_1p0deg.sh" "${fcsthr}" && true +"${SCRglobal}/exgfs_atmos_awips_20km_1p0deg.sh" "${fcsthr}" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGFS_ATMOS_CYCLONE_GENESIS b/dev/jobs/JGFS_ATMOS_CYCLONE_GENESIS index c5cddebce08..35a1289c6ae 100755 --- a/dev/jobs/JGFS_ATMOS_CYCLONE_GENESIS +++ b/dev/jobs/JGFS_ATMOS_CYCLONE_GENESIS @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "genesis" -c "base genesis" +source "${HOMEglobal}/ush/jjob_header.sh" -e "genesis" -c "base genesis" ############################################## # Set variables used in the exglobal script diff --git a/dev/jobs/JGFS_ATMOS_CYCLONE_TRACKER b/dev/jobs/JGFS_ATMOS_CYCLONE_TRACKER index 3b69021ab29..385af2889a7 100755 --- a/dev/jobs/JGFS_ATMOS_CYCLONE_TRACKER +++ b/dev/jobs/JGFS_ATMOS_CYCLONE_TRACKER @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "tracker" -c "base tracker" +source "${HOMEglobal}/ush/jjob_header.sh" -e "tracker" -c "base tracker" export COMPONENT="atmos" diff --git a/dev/jobs/JGFS_ATMOS_FBWIND b/dev/jobs/JGFS_ATMOS_FBWIND index 5dd539447aa..5e3784a99b0 100755 --- a/dev/jobs/JGFS_ATMOS_FBWIND +++ b/dev/jobs/JGFS_ATMOS_FBWIND @@ -5,7 +5,7 @@ ############################################ # GFS FBWIND PRODUCT GENERATION ############################################ -source "${HOMEgfs}/ush/jjob_header.sh" -e "fbwind" -c "base fbwind" +source "${HOMEglobal}/ush/jjob_header.sh" -e "fbwind" -c "base fbwind" export COMPONENT="atmos" @@ -21,7 +21,7 @@ fi ######################################################## # Execute the script. -"${SCRgfs}/exgfs_atmos_fbwind.sh" && true +"${SCRglobal}/exgfs_atmos_fbwind.sh" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGFS_ATMOS_FSU_GENESIS b/dev/jobs/JGFS_ATMOS_FSU_GENESIS index a52a8c24bb9..e1d05041cb4 100755 --- a/dev/jobs/JGFS_ATMOS_FSU_GENESIS +++ b/dev/jobs/JGFS_ATMOS_FSU_GENESIS @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "genesis_fsu" -c "base genesis_fsu" +source "${HOMEglobal}/ush/jjob_header.sh" -e "genesis_fsu" -c "base genesis_fsu" ############################################## # Define COM and Data directories diff --git a/dev/jobs/JGFS_ATMOS_GEMPAK b/dev/jobs/JGFS_ATMOS_GEMPAK index 29fd97b5faf..a5495bc226b 100755 --- a/dev/jobs/JGFS_ATMOS_GEMPAK +++ b/dev/jobs/JGFS_ATMOS_GEMPAK @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "gempak" -c "base gempak" +source "${HOMEglobal}/ush/jjob_header.sh" -e "gempak" -c "base gempak" ############################################ # Set up model and cycle specific variables @@ -41,7 +41,7 @@ fhmin=0 fhmax=240 if [[ "${fhr}" -ge "${fhmin}" ]] && [[ "${fhr}" -le "${fhmax}" ]]; then if ((fhr % 3 == 0)); then - "${SCRgfs}/exgfs_atmos_nawips.sh" "1p00" "${FHR3}" "GFS_GEMPAK" "${COMOUT_ATMOS_GEMPAK_1p00}" && true + "${SCRglobal}/exgfs_atmos_nawips.sh" "1p00" "${FHR3}" "GFS_GEMPAK" "${COMOUT_ATMOS_GEMPAK_1p00}" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit @@ -53,7 +53,7 @@ fhmin=252 fhmax=384 if [[ "${fhr}" -ge "${fhmin}" ]] && [[ "${fhr}" -le "${fhmax}" ]]; then if ((fhr % 12 == 0)); then - "${SCRgfs}/exgfs_atmos_nawips.sh" "1p00" "${FHR3}" "GFS_GEMPAK" "${COMOUT_ATMOS_GEMPAK_1p00}" && true + "${SCRglobal}/exgfs_atmos_nawips.sh" "1p00" "${FHR3}" "GFS_GEMPAK" "${COMOUT_ATMOS_GEMPAK_1p00}" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit @@ -68,7 +68,7 @@ fhmin=0 fhmax=240 if [[ "${fhr}" -ge "${fhmin}" ]] && [[ "${fhr}" -le "${fhmax}" ]]; then if ((fhr % 3 == 0)); then - "${SCRgfs}/exgfs_atmos_nawips.sh" "0p50" "${FHR3}" "GFS_GEMPAK" "${COMOUT_ATMOS_GEMPAK_0p50}" && true + "${SCRglobal}/exgfs_atmos_nawips.sh" "0p50" "${FHR3}" "GFS_GEMPAK" "${COMOUT_ATMOS_GEMPAK_0p50}" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit @@ -80,7 +80,7 @@ fhmin=246 fhmax=276 if [[ "${fhr}" -ge "${fhmin}" ]] && [[ "${fhr}" -le "${fhmax}" ]]; then if ((fhr % 6 == 0)); then - "${SCRgfs}/exgfs_atmos_nawips.sh" "0p50" "${FHR3}" "GFS_GEMPAK" "${COMOUT_ATMOS_GEMPAK_0p50}" && true + "${SCRglobal}/exgfs_atmos_nawips.sh" "0p50" "${FHR3}" "GFS_GEMPAK" "${COMOUT_ATMOS_GEMPAK_0p50}" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit @@ -92,7 +92,7 @@ fhmin=288 fhmax=384 if [[ "${fhr}" -ge "${fhmin}" ]] && [[ "${fhr}" -le "${fhmax}" ]]; then if ((fhr % 12 == 0)); then - "${SCRgfs}/exgfs_atmos_nawips.sh" "0p50" "${FHR3}" "GFS_GEMPAK" "${COMOUT_ATMOS_GEMPAK_0p50}" && true + "${SCRglobal}/exgfs_atmos_nawips.sh" "0p50" "${FHR3}" "GFS_GEMPAK" "${COMOUT_ATMOS_GEMPAK_0p50}" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit @@ -107,7 +107,7 @@ fhmin=0 fhmax=120 if [[ "${fhr}" -ge "${fhmin}" ]] && [[ "${fhr}" -le "${fhmax}" ]]; then if ((fhr % 1 == 0)); then - "${SCRgfs}/exgfs_atmos_nawips.sh" "0p25" "${FHR3}" "GFS_GEMPAK" "${COMOUT_ATMOS_GEMPAK_0p25}" && true + "${SCRglobal}/exgfs_atmos_nawips.sh" "0p25" "${FHR3}" "GFS_GEMPAK" "${COMOUT_ATMOS_GEMPAK_0p25}" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit @@ -119,7 +119,7 @@ fhmin=123 fhmax=240 if [[ "${fhr}" -ge "${fhmin}" ]] && [[ "${fhr}" -le "${fhmax}" ]]; then if ((fhr % 3 == 0)); then - "${SCRgfs}/exgfs_atmos_nawips.sh" "0p25" "${FHR3}" "GFS_GEMPAK" "${COMOUT_ATMOS_GEMPAK_0p25}" && true + "${SCRglobal}/exgfs_atmos_nawips.sh" "0p25" "${FHR3}" "GFS_GEMPAK" "${COMOUT_ATMOS_GEMPAK_0p25}" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit @@ -131,7 +131,7 @@ fhmin=252 fhmax=384 if [[ "${fhr}" -ge "${fhmin}" ]] && [[ "${fhr}" -le "${fhmax}" ]]; then if ((fhr % 12 == 0)); then - "${SCRgfs}/exgfs_atmos_nawips.sh" "0p25" "${FHR3}" "GFS_GEMPAK" "${COMOUT_ATMOS_GEMPAK_0p25}" && true + "${SCRglobal}/exgfs_atmos_nawips.sh" "0p25" "${FHR3}" "GFS_GEMPAK" "${COMOUT_ATMOS_GEMPAK_0p25}" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit @@ -146,19 +146,19 @@ fhmin=0 fhmax="${ocean_domain_max}" if [[ "${fhr}" -ge "${fhmin}" ]] && [[ "${fhr}" -le "${fhmax}" ]]; then if ((fhr % 3 == 0)); then - "${SCRgfs}/exgfs_atmos_nawips.sh" "35km_pac" "${FHR3}" "GFS_GEMPAK_WWB" "${COMOUT_ATMOS_GEMPAK_35km_pac}" && true + "${SCRglobal}/exgfs_atmos_nawips.sh" "35km_pac" "${FHR3}" "GFS_GEMPAK_WWB" "${COMOUT_ATMOS_GEMPAK_35km_pac}" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit fi - "${SCRgfs}/exgfs_atmos_nawips.sh" "35km_atl" "${FHR3}" "GFS_GEMPAK_WWB" "${COMOUT_ATMOS_GEMPAK_35km_atl}" && true + "${SCRglobal}/exgfs_atmos_nawips.sh" "35km_atl" "${FHR3}" "GFS_GEMPAK_WWB" "${COMOUT_ATMOS_GEMPAK_35km_atl}" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit fi - "${SCRgfs}/exgfs_atmos_nawips.sh" "40km" "${FHR3}" "GFS_GEMPAK_WWB" "${COMOUT_ATMOS_GEMPAK_40km}" && true + "${SCRglobal}/exgfs_atmos_nawips.sh" "40km" "${FHR3}" "GFS_GEMPAK_WWB" "${COMOUT_ATMOS_GEMPAK_40km}" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGFS_ATMOS_GEMPAK_META b/dev/jobs/JGFS_ATMOS_GEMPAK_META index 3537ecf93f4..da9908cdd78 100755 --- a/dev/jobs/JGFS_ATMOS_GEMPAK_META +++ b/dev/jobs/JGFS_ATMOS_GEMPAK_META @@ -5,7 +5,7 @@ ############################################ # GFS GEMPAK META PRODUCT GENERATION ############################################ -source "${HOMEgfs}/ush/jjob_header.sh" -e "gempak_meta" -c "base gempak" +source "${HOMEglobal}/ush/jjob_header.sh" -e "gempak_meta" -c "base gempak" ############################################### # Set MP variables @@ -16,7 +16,7 @@ export MP_LABELIO=yes export MP_PULSE=0 export MP_DEBUG_NOTIMEOUT=yes -cpreq "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl +cpreq "${HOMEglobal}/gempak/fix/datatype.tbl" datatype.tbl ############################################# #set the fcst hrs for all the cycles @@ -50,7 +50,7 @@ mkdir -m 775 -p "${COMOUT_ATMOS_GEMPAK_META}" ######################################################## # Execute the script. -"${SCRgfs}/exgfs_atmos_gempak_meta.sh" && true +"${SCRglobal}/exgfs_atmos_gempak_meta.sh" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF b/dev/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF index 064a3447bf1..79a7f4a8f14 100755 --- a/dev/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF +++ b/dev/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF @@ -3,7 +3,7 @@ ############################################ # GFS GEMPAK NCDC PRODUCT GENERATION ############################################ -source "${HOMEgfs}/ush/jjob_header.sh" -e "gempak_gif" -c "base gempak" +source "${HOMEglobal}/ush/jjob_header.sh" -e "gempak_gif" -c "base gempak" export MP_PULSE=0 export MP_TIMEOUT=2000 @@ -42,7 +42,7 @@ export pgmout=OUTPUT.$$ ######################################################## # Execute the script. -"${SCRgfs}/exgfs_atmos_gempak_gif_ncdc_skew_t.sh" +"${SCRglobal}/exgfs_atmos_gempak_gif_ncdc_skew_t.sh" export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC b/dev/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC index 373bdb50870..701e4e559a3 100755 --- a/dev/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC +++ b/dev/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "gempak_spec" -c "base gempak" +source "${HOMEglobal}/ush/jjob_header.sh" -e "gempak_spec" -c "base gempak" export COMPONENT="atmos" export EXT="" @@ -26,7 +26,7 @@ export RUN2=gfs_goessim export GRIB=goessimpgrb2.0p25.f export EXT="" -"${SCRgfs}/exgfs_atmos_goes_nawips.sh" "${FHR3}" +"${SCRglobal}/exgfs_atmos_goes_nawips.sh" "${FHR3}" ################################################################# # Execute the script for the 221 grib @@ -40,7 +40,7 @@ export RUN2=gfs_goessim221 export GRIB=goessimpgrb2f export EXT=".grd221" -"${SCRgfs}/exgfs_atmos_goes_nawips.sh" "${FHR3}" && true +"${SCRglobal}/exgfs_atmos_goes_nawips.sh" "${FHR3}" && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS b/dev/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS index ea20ffd2d59..68bc4ca9d18 100755 --- a/dev/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS +++ b/dev/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS @@ -5,7 +5,7 @@ ############################################ # GFS PGRB2_SPECIAL_POST PRODUCT GENERATION ############################################ -source "${HOMEgfs}/ush/jjob_header.sh" -e "npoess" -c "base npoess" +source "${HOMEglobal}/ush/jjob_header.sh" -e "npoess" -c "base npoess" export OMP_NUM_THREADS=${OMP_NUM_THREADS:-1} @@ -23,7 +23,7 @@ mkdir -m 775 -p "${COMOUT_ATMOS_GOES}" ############################################################# # Execute the script -"${SCRgfs}/exgfs_atmos_grib2_special_npoess.sh" && true +"${SCRglobal}/exgfs_atmos_grib2_special_npoess.sh" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGFS_ATMOS_POSTSND b/dev/jobs/JGFS_ATMOS_POSTSND index 10f6248081e..a45b61ff728 100755 --- a/dev/jobs/JGFS_ATMOS_POSTSND +++ b/dev/jobs/JGFS_ATMOS_POSTSND @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "postsnd" -c "base postsnd" +source "${HOMEglobal}/ush/jjob_header.sh" -e "postsnd" -c "base postsnd" ######################################## # Runs GFS BUFR SOUNDINGS @@ -29,7 +29,7 @@ fi ######################################################## # Execute the script. -"${SCRgfs}/exgfs_atmos_postsnd.sh" && true +"${SCRglobal}/exgfs_atmos_postsnd.sh" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGFS_ATMOS_VERIFICATION b/dev/jobs/JGFS_ATMOS_VERIFICATION index 61d31700da8..993708c2d63 100755 --- a/dev/jobs/JGFS_ATMOS_VERIFICATION +++ b/dev/jobs/JGFS_ATMOS_VERIFICATION @@ -1,11 +1,11 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "metp" -c "base metp" +source "${HOMEglobal}/ush/jjob_header.sh" -e "metp" -c "base metp" ############################################################### ## Abstract: ## Inline METplus verification and diagnostics driver script -## HOMEgfs : /full/path/to/workflow +## HOMEglobal : /full/path/to/workflow ## EXPDIR : /full/path/to/config/files ## PDY : current date (YYYYMMDD) ## RUN : cycle name (gdas / gfs) diff --git a/dev/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE b/dev/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE index e2e2f065967..091886f60d3 100755 --- a/dev/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE +++ b/dev/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE @@ -2,7 +2,7 @@ export WIPE_DATA="NO" export DATA=${DATA:-${DATAROOT}/${RUN}aeroanl_${cyc}} -source "${HOMEgfs}/ush/jjob_header.sh" -e "aeroanlfinal" -c "base aeroanl aeroanlfinal" +source "${HOMEglobal}/ush/jjob_header.sh" -e "aeroanlfinal" -c "base aeroanl aeroanlfinal" ############################################## # Set variables used in the script @@ -25,7 +25,7 @@ mkdir -m 755 -p "${COMOUT_CONF}" ############################################################### # Run relevant script -EXSCRIPT=${GDASAEROFINALPY:-${SCRgfs}/exglobal_aero_analysis_finalize.py} +EXSCRIPT=${GDASAEROFINALPY:-${SCRglobal}/exglobal_aero_analysis_finalize.py} ${EXSCRIPT} && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE b/dev/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE index fca06c3b8cb..de52664a3b9 100755 --- a/dev/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE +++ b/dev/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE @@ -1,7 +1,7 @@ #! /usr/bin/env bash export DATA=${DATA:-${DATAROOT}/${RUN}aeroanl_${cyc}} -source "${HOMEgfs}/ush/jjob_header.sh" -e "aeroanlinit" -c "base aeroanl aeroanlinit" +source "${HOMEglobal}/ush/jjob_header.sh" -e "aeroanlinit" -c "base aeroanl aeroanlinit" ############################################## # Set variables used in the script @@ -34,7 +34,7 @@ RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ ############################################################### # Run relevant script -EXSCRIPT=${GDASAEROINITPY:-${SCRgfs}/exglobal_aero_analysis_initialize.py} +EXSCRIPT=${GDASAEROINITPY:-${SCRglobal}/exglobal_aero_analysis_initialize.py} ${EXSCRIPT} && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGLOBAL_AERO_ANALYSIS_VARIATIONAL b/dev/jobs/JGLOBAL_AERO_ANALYSIS_VARIATIONAL index 4c3aafca453..4b063a6cbc7 100755 --- a/dev/jobs/JGLOBAL_AERO_ANALYSIS_VARIATIONAL +++ b/dev/jobs/JGLOBAL_AERO_ANALYSIS_VARIATIONAL @@ -2,7 +2,7 @@ export WIPE_DATA="NO" export DATA=${DATA:-${DATAROOT}/${RUN}aeroanl_${cyc}} -source "${HOMEgfs}/ush/jjob_header.sh" -e "aeroanlvar" -c "base aeroanl aeroanlvar" +source "${HOMEglobal}/ush/jjob_header.sh" -e "aeroanlvar" -c "base aeroanl aeroanlvar" ############################################## # Set variables used in the script @@ -15,7 +15,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "aeroanlvar" -c "base aeroanl aeroanlv ############################################################### # Run relevant script -EXSCRIPT=${GDASAEROVARSH:-${SCRgfs}/exglobal_aero_analysis_variational.py} +EXSCRIPT=${GDASAEROVARSH:-${SCRglobal}/exglobal_aero_analysis_variational.py} ${EXSCRIPT} && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGLOBAL_ANALYSIS_STATS b/dev/jobs/JGLOBAL_ANALYSIS_STATS index 21d624d4f8e..f0f7f28045e 100755 --- a/dev/jobs/JGLOBAL_ANALYSIS_STATS +++ b/dev/jobs/JGLOBAL_ANALYSIS_STATS @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "anlstat" -c "base anlstat" +source "${HOMEglobal}/ush/jjob_header.sh" -e "anlstat" -c "base anlstat" ############################################## # Set variables used in the script @@ -43,7 +43,7 @@ fi ############################################################### # Run relevant script -EXSCRIPT=${ANLSTATSPY:-${SCRgfs}/exglobal_analysis_stats.py} +EXSCRIPT=${ANLSTATSPY:-${SCRglobal}/exglobal_analysis_stats.py} ${EXSCRIPT} && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGLOBAL_ARCHIVE_TARS b/dev/jobs/JGLOBAL_ARCHIVE_TARS index 4b059b930d2..acf65a55546 100755 --- a/dev/jobs/JGLOBAL_ARCHIVE_TARS +++ b/dev/jobs/JGLOBAL_ARCHIVE_TARS @@ -1,9 +1,9 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "arch_vrfy" -c "base arch_tars" +source "${HOMEglobal}/ush/jjob_header.sh" -e "arch_vrfy" -c "base arch_tars" if [[ "${DO_WAVE}" == "YES" ]]; then source "${EXPDIR}/config.wave" - source "${USHgfs}/wave_domain_grid.sh" + source "${USHglobal}/wave_domain_grid.sh" fi ############################################## @@ -83,7 +83,7 @@ fi # Do not export shell opts to the bash scripts in the htar/hsi wrappers export -n SHELLOPTS unset_strict -${GLOBALARCHIVESH:-${SCRgfs}/exglobal_archive_tars.py} +${GLOBALARCHIVESH:-${SCRglobal}/exglobal_archive_tars.py} export err=$? set_strict if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGLOBAL_ARCHIVE_VRFY b/dev/jobs/JGLOBAL_ARCHIVE_VRFY index 2e6e9a42d37..49513e064f7 100755 --- a/dev/jobs/JGLOBAL_ARCHIVE_VRFY +++ b/dev/jobs/JGLOBAL_ARCHIVE_VRFY @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "arch_vrfy" -c "base arch_vrfy" +source "${HOMEglobal}/ush/jjob_header.sh" -e "arch_vrfy" -c "base arch_vrfy" ############################################## # Set variables used in the script @@ -30,7 +30,7 @@ fi # Run archive script ############################################################### -${GLOBALARCHIVESH:-${SCRgfs}/exglobal_archive_vrfy.py} +${GLOBALARCHIVESH:-${SCRglobal}/exglobal_archive_vrfy.py} err=$? if [[ ${err} -ne 0 ]]; then exit "${err}" diff --git a/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE b/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE index 6e39de57c88..791ed93564f 100755 --- a/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE +++ b/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE @@ -2,7 +2,7 @@ export WIPE_DATA="NO" export DATA=${DATA:-${DATAROOT}/${RUN}atmensanl_${cyc}} -source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlfinal" -c "base atmensanl atmensanlfinal" +source "${HOMEglobal}/ush/jjob_header.sh" -e "atmensanlfinal" -c "base atmensanl atmensanlfinal" ############################################## # Set variables used in the script @@ -24,7 +24,7 @@ mkdir -m 755 -p "${COMOUT_CONF}" ############################################################### # Run relevant script -EXSCRIPT=${GDASATMENSFINALPY:-${SCRgfs}/exglobal_atmens_analysis_finalize.py} +EXSCRIPT=${GDASATMENSFINALPY:-${SCRglobal}/exglobal_atmens_analysis_finalize.py} ${EXSCRIPT} && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_FV3_INCREMENT b/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_FV3_INCREMENT index e0fac0e3292..d2abdfcd30d 100755 --- a/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_FV3_INCREMENT +++ b/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_FV3_INCREMENT @@ -2,7 +2,7 @@ export WIPE_DATA="NO" export DATA=${DATA:-${DATAROOT}/${RUN}atmensanl_${cyc}} -source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlfv3inc" -c "base atmensanl atmensanlfv3inc" +source "${HOMEglobal}/ush/jjob_header.sh" -e "atmensanlfv3inc" -c "base atmensanl atmensanlfv3inc" ############################################## # Set variables used in the script @@ -15,7 +15,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlfv3inc" -c "base atmensanl a ############################################################### # Run relevant script -EXSCRIPT=${GDASATMENSRUNSH:-${SCRgfs}/exglobal_atmens_analysis_fv3_increment.py} +EXSCRIPT=${GDASATMENSRUNSH:-${SCRglobal}/exglobal_atmens_analysis_fv3_increment.py} ${EXSCRIPT} && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE b/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE index 35e2b21611c..99f655fb97f 100755 --- a/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE +++ b/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE @@ -1,7 +1,7 @@ #! /usr/bin/env bash export DATA=${DATA:-${DATAROOT}/${RUN}atmensanl_${cyc}} -source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlinit" -c "base atmensanl atmensanlinit" +source "${HOMEglobal}/ush/jjob_header.sh" -e "atmensanlinit" -c "base atmensanl atmensanlinit" ############################################## # Set variables used in the script @@ -25,7 +25,7 @@ RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ ############################################################### # Run relevant script -EXSCRIPT=${GDASATMENSINITPY:-${SCRgfs}/exglobal_atmens_analysis_initialize.py} +EXSCRIPT=${GDASATMENSINITPY:-${SCRglobal}/exglobal_atmens_analysis_initialize.py} ${EXSCRIPT} && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_LETKF b/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_LETKF index b6bc77a4000..033138fc41b 100755 --- a/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_LETKF +++ b/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_LETKF @@ -2,7 +2,7 @@ export WIPE_DATA="NO" export DATA=${DATA:-${DATAROOT}/${RUN}atmensanl_${cyc}} -source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlletkf" -c "base atmensanl atmensanlletkf" +source "${HOMEglobal}/ush/jjob_header.sh" -e "atmensanlletkf" -c "base atmensanl atmensanlletkf" ############################################## # Set variables used in the script @@ -15,7 +15,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlletkf" -c "base atmensanl at ############################################################### # Run relevant script -EXSCRIPT=${GDASATMENSRUNSH:-${SCRgfs}/exglobal_atmens_analysis_letkf.py} +EXSCRIPT=${GDASATMENSRUNSH:-${SCRglobal}/exglobal_atmens_analysis_letkf.py} ${EXSCRIPT} && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_OBS b/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_OBS index 1b71ad578a3..1cca171f7b6 100755 --- a/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_OBS +++ b/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_OBS @@ -2,7 +2,7 @@ export WIPE_DATA="NO" export DATA=${DATA:-${DATAROOT}/${RUN}atmensanl_${cyc}} -source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlobs" -c "base atmensanl atmensanlobs" +source "${HOMEglobal}/ush/jjob_header.sh" -e "atmensanlobs" -c "base atmensanl atmensanlobs" ############################################## # Set variables used in the script @@ -15,7 +15,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlobs" -c "base atmensanl atme ############################################################### # Run relevant script -EXSCRIPT=${GDASATMENSOBSSH:-${SCRgfs}/exglobal_atmens_analysis_obs.py} +EXSCRIPT=${GDASATMENSOBSSH:-${SCRglobal}/exglobal_atmens_analysis_obs.py} ${EXSCRIPT} && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_SOL b/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_SOL index ac2291ffe7d..9fedf9dc5b7 100755 --- a/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_SOL +++ b/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_SOL @@ -2,7 +2,7 @@ export WIPE_DATA="NO" export DATA=${DATA:-${DATAROOT}/${RUN}atmensanl_${cyc}} -source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlsol" -c "base atmensanl atmensanlsol" +source "${HOMEglobal}/ush/jjob_header.sh" -e "atmensanlsol" -c "base atmensanl atmensanlsol" ############################################## # Set variables used in the script @@ -15,7 +15,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlsol" -c "base atmensanl atme ############################################################### # Run relevant script -EXSCRIPT=${GDASATMENSSOLSH:-${SCRgfs}/exglobal_atmens_analysis_sol.py} +EXSCRIPT=${GDASATMENSSOLSH:-${SCRglobal}/exglobal_atmens_analysis_sol.py} ${EXSCRIPT} && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGLOBAL_ATMOS_ANALYSIS b/dev/jobs/JGLOBAL_ATMOS_ANALYSIS index 88dc666d4b6..90171cd04af 100755 --- a/dev/jobs/JGLOBAL_ATMOS_ANALYSIS +++ b/dev/jobs/JGLOBAL_ATMOS_ANALYSIS @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "anal" -c "base anal" +source "${HOMEglobal}/ush/jjob_header.sh" -e "anal" -c "base anal" ############################################## # Set variables used in the script @@ -75,13 +75,13 @@ export PREPQCPF="${COMIN_OBS}/${OPREFIX}prepbufr.acft_profiles" # Copy fix file for obsproc # TODO: Why is this necessary? if [[ ${RUN} == "gfs" ]]; then mkdir -p "${ROTDIR}/fix" - cpreq "${FIXgfs}/gsi/prepobs_errtable.global" "${ROTDIR}/fix/" + cpreq "${FIXglobal}/gsi/prepobs_errtable.global" "${ROTDIR}/fix/" fi ############################################################### # Run relevant script -${ANALYSISSH:-${SCRgfs}/exglobal_atmos_analysis.sh} && true +${ANALYSISSH:-${SCRglobal}/exglobal_atmos_analysis.sh} && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC b/dev/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC index 33ca4018636..4dc1f380bb3 100755 --- a/dev/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC +++ b/dev/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "analcalc" -c "base anal analcalc" +source "${HOMEglobal}/ush/jjob_header.sh" -e "analcalc" -c "base anal analcalc" ############################################## # Set variables used in the script @@ -60,7 +60,7 @@ export DOGAUSFCANL=${DOGAUSFCANL:-"YES"} ############################################################### # Run relevant script -${ANALCALCSH:-${SCRgfs}/exglobal_atmos_analysis_calc.sh} && true +${ANALCALCSH:-${SCRglobal}/exglobal_atmos_analysis_calc.sh} && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_FV3JEDI b/dev/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_FV3JEDI index 1af6a422b3d..2def665f5dd 100755 --- a/dev/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_FV3JEDI +++ b/dev/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_FV3JEDI @@ -3,7 +3,7 @@ # Ignore possible spelling error (nothing is misspelled) # shellcheck disable=SC2153 -source "${HOMEgfs}/ush/jjob_header.sh" -e "analcalc_fv3jedi" -c "base analcalc_fv3jedi" +source "${HOMEglobal}/ush/jjob_header.sh" -e "analcalc_fv3jedi" -c "base analcalc_fv3jedi" ############################################## # Set variables used in the script @@ -35,7 +35,7 @@ mkdir -m 775 -p "${COMOUT_CONF}" # Run relevant script ############################################## -EXSCRIPT=${GDASATMRUNPY:-${SCRgfs}/exglobal_atmos_analysis_calc_fv3jedi.py} +EXSCRIPT=${GDASATMRUNPY:-${SCRglobal}/exglobal_atmos_analysis_calc_fv3jedi.py} ${EXSCRIPT} && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGLOBAL_ATMOS_ANALYSIS_DIAG b/dev/jobs/JGLOBAL_ATMOS_ANALYSIS_DIAG index 553a58e1160..929df55484a 100755 --- a/dev/jobs/JGLOBAL_ATMOS_ANALYSIS_DIAG +++ b/dev/jobs/JGLOBAL_ATMOS_ANALYSIS_DIAG @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "analdiag" -c "base anal analdiag" +source "${HOMEglobal}/ush/jjob_header.sh" -e "analdiag" -c "base anal analdiag" ############################################## # Set variables used in the script @@ -28,7 +28,7 @@ mkdir -m 775 -p "${COMOUT_ATMOS_ANALYSIS}" ############################################################### # Run relevant script -${ANALDIAGSH:-${SCRgfs}/exglobal_diag.sh} && true +${ANALDIAGSH:-${SCRglobal}/exglobal_diag.sh} && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGLOBAL_ATMOS_CHGRES_GEN_CONTROL b/dev/jobs/JGLOBAL_ATMOS_CHGRES_GEN_CONTROL index 1d9c8f922d0..2298ba5784d 100755 --- a/dev/jobs/JGLOBAL_ATMOS_CHGRES_GEN_CONTROL +++ b/dev/jobs/JGLOBAL_ATMOS_CHGRES_GEN_CONTROL @@ -1,6 +1,6 @@ #! /usr/bin/env bash # shellcheck disable=SC2153 # PDY is always set in environment -source "${HOMEgfs}/ush/jjob_header.sh" -e "gen_control_ic" -c "base gen_control_ic" +source "${HOMEglobal}/ush/jjob_header.sh" -e "gen_control_ic" -c "base gen_control_ic" # Initial conditions are from previous GFS cycle, but valid at the end of the cycle GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") @@ -23,7 +23,7 @@ export ATM_FILE="${COMINgfs:-}/gfs.${GDATE:0:8}/${GDATE:8:2}/atmos/history/gfs.t export SFC_FILE="${COMINgfs:-}/gfs.${GDATE:0:8}/${GDATE:8:2}/atmos/history/gfs.t${GDATE:8:2}.sfc.f003.nc" ############################################################### # Run relevant script -${CHGRESFCSTSH:-${SCRgfs}/exglobal_atmos_chgres_gen_control.sh} && true +${CHGRESFCSTSH:-${SCRglobal}/exglobal_atmos_chgres_gen_control.sh} && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit "Failed to run chgres_cube, ABORT!" diff --git a/dev/jobs/JGLOBAL_ATMOS_ENSSTAT b/dev/jobs/JGLOBAL_ATMOS_ENSSTAT index e11a9db67bc..aee58515c2f 100755 --- a/dev/jobs/JGLOBAL_ATMOS_ENSSTAT +++ b/dev/jobs/JGLOBAL_ATMOS_ENSSTAT @@ -4,7 +4,7 @@ # Caculate the mean, spread, and other probabilistic fields. # -source "${HOMEgfs}/ush/jjob_header.sh" -e "atmos_ensstat" -c "base atmos_ensstat" +source "${HOMEglobal}/ush/jjob_header.sh" -e "atmos_ensstat" -c "base atmos_ensstat" ############################################## # Begin JOB SPECIFIC work @@ -21,7 +21,7 @@ done ############################################################### # Run exglobal script -"${SCRgfs}/exglobal_atmos_ensstat.sh" && true +"${SCRglobal}/exglobal_atmos_ensstat.sh" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGLOBAL_ATMOS_POST_MANAGER b/dev/jobs/JGLOBAL_ATMOS_POST_MANAGER index d03e527135d..fb27cb88091 100755 --- a/dev/jobs/JGLOBAL_ATMOS_POST_MANAGER +++ b/dev/jobs/JGLOBAL_ATMOS_POST_MANAGER @@ -2,7 +2,7 @@ # TODO (#1227) This job is not used in the rocoto suite -source "${HOMEgfs}/ush/jjob_header.sh" -e "post" -c "base post" +source "${HOMEglobal}/ush/jjob_header.sh" -e "post" -c "base post" #################################### # Specify NET and RUN Name and model @@ -19,7 +19,7 @@ YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COMIN_ATMOS_HISTORY:COM_ATMOS_HISTORY ######################################################## # Execute the script. -"${SCRgfs}/exglobal_atmos_pmgr.sh" && true +"${SCRglobal}/exglobal_atmos_pmgr.sh" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGLOBAL_ATMOS_PREP_SFC b/dev/jobs/JGLOBAL_ATMOS_PREP_SFC index 4f1aec6ff73..f6a4b432910 100755 --- a/dev/jobs/JGLOBAL_ATMOS_PREP_SFC +++ b/dev/jobs/JGLOBAL_ATMOS_PREP_SFC @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "prep_sfc" -c "base prep_sfc" +source "${HOMEglobal}/ush/jjob_header.sh" -e "prep_sfc" -c "base prep_sfc" ############################################## # Set date and cycle @@ -25,7 +25,7 @@ mkdir -p "${COMOUT_OBS}" # Run relevant script ############################################################### -"${SCRgfs}/exglobal_prep_sfc.sh" +"${SCRglobal}/exglobal_prep_sfc.sh" export err=$? if [[ ${err} -ne 0 ]]; then err_exit "Surface observation preparation failed!" diff --git a/dev/jobs/JGLOBAL_ATMOS_PRODUCTS b/dev/jobs/JGLOBAL_ATMOS_PRODUCTS index 8625d8e9a3d..1849c072756 100755 --- a/dev/jobs/JGLOBAL_ATMOS_PRODUCTS +++ b/dev/jobs/JGLOBAL_ATMOS_PRODUCTS @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "atmos_products" -c "base atmos_products" +source "${HOMEglobal}/ush/jjob_header.sh" -e "atmos_products" -c "base atmos_products" ############################################## # Begin JOB SPECIFIC work @@ -23,7 +23,7 @@ export PREFIX="${RUN}.t${cyc}z." ############################################################### # Run exglobal script -"${SCRgfs}/exglobal_atmos_products.sh" && true +"${SCRglobal}/exglobal_atmos_products.sh" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGLOBAL_ATMOS_SFCANL b/dev/jobs/JGLOBAL_ATMOS_SFCANL index 59f542b729d..f8f5fad506a 100755 --- a/dev/jobs/JGLOBAL_ATMOS_SFCANL +++ b/dev/jobs/JGLOBAL_ATMOS_SFCANL @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "sfcanl" -c "base sfcanl" +source "${HOMEglobal}/ush/jjob_header.sh" -e "sfcanl" -c "base sfcanl" ############################################## # Begin JOB SPECIFIC work @@ -38,7 +38,7 @@ export USE_CFP=YES ############################################################### # Run relevant script -${SFCANALSH:-${SCRgfs}/exglobal_atmos_sfcanl.sh} && true +${SFCANALSH:-${SCRglobal}/exglobal_atmos_sfcanl.sh} && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC b/dev/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC index 21d948c3ca2..8941cbd9443 100755 --- a/dev/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC +++ b/dev/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "prep" -c "base prep" +source "${HOMEglobal}/ush/jjob_header.sh" -e "prep" -c "base prep" ############################################## # Begin JOB SPECIFIC work @@ -32,7 +32,7 @@ export BKGFREQ=1 # for hourly relocation # Run relevant script ############################################## -${TROPCYQCRELOSH:-${SCRgfs}/exglobal_atmos_tropcy_qc_reloc.sh} +${TROPCYQCRELOSH:-${SCRglobal}/exglobal_atmos_tropcy_qc_reloc.sh} export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGLOBAL_ATMOS_UPP b/dev/jobs/JGLOBAL_ATMOS_UPP index 8452e69ec25..87252622d38 100755 --- a/dev/jobs/JGLOBAL_ATMOS_UPP +++ b/dev/jobs/JGLOBAL_ATMOS_UPP @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "upp" -c "base upp" +source "${HOMEglobal}/ush/jjob_header.sh" -e "upp" -c "base upp" ############################################## # Set variables used in the exglobal script @@ -20,7 +20,7 @@ mkdir -p "${COMOUT_ATMOS_MASTER}" ############################################################### # Run relevant exglobal script -"${SCRgfs}/exglobal_atmos_upp.py" && true +"${SCRglobal}/exglobal_atmos_upp.py" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGLOBAL_ATMOS_VMINMON b/dev/jobs/JGLOBAL_ATMOS_VMINMON index 4c859bfeb9f..4e4e90074e7 100755 --- a/dev/jobs/JGLOBAL_ATMOS_VMINMON +++ b/dev/jobs/JGLOBAL_ATMOS_VMINMON @@ -3,7 +3,7 @@ ########################################################### # Global Minimization Monitor (MinMon) job ########################################################### -source "${HOMEgfs}/ush/jjob_header.sh" -e "vminmon" -c "base vminmon" +source "${HOMEglobal}/ush/jjob_header.sh" -e "vminmon" -c "base vminmon" ############################################# # Determine PDY and cyc for previous cycle @@ -31,7 +31,7 @@ mkdir -p "${M_TANKverf}" ######################################################## # Execute the script. -"${SCRgfs}/exglobal_atmos_vminmon.sh" && true +"${SCRglobal}/exglobal_atmos_vminmon.sh" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE b/dev/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE index cdb8726127b..fab630702d0 100755 --- a/dev/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE +++ b/dev/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE @@ -2,7 +2,7 @@ export WIPE_DATA="NO" export DATA=${DATA:-${DATAROOT}/${RUN}atmanl_${cyc}} -source "${HOMEgfs}/ush/jjob_header.sh" -e "atmanlfinal" -c "base atmanl atmanlfinal" +source "${HOMEglobal}/ush/jjob_header.sh" -e "atmanlfinal" -c "base atmanl atmanlfinal" ############################################## # Set variables used in the script @@ -23,7 +23,7 @@ mkdir -m 775 -p "${COMOUT_CONF}" ############################################################### # Run relevant script -EXSCRIPT=${GDASATMFINALPY:-${SCRgfs}/exglobal_atm_analysis_finalize.py} +EXSCRIPT=${GDASATMFINALPY:-${SCRglobal}/exglobal_atm_analysis_finalize.py} ${EXSCRIPT} && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGLOBAL_ATM_ANALYSIS_FV3_INCREMENT b/dev/jobs/JGLOBAL_ATM_ANALYSIS_FV3_INCREMENT index 16c10a29067..dd000c5c075 100755 --- a/dev/jobs/JGLOBAL_ATM_ANALYSIS_FV3_INCREMENT +++ b/dev/jobs/JGLOBAL_ATM_ANALYSIS_FV3_INCREMENT @@ -2,7 +2,7 @@ export WIPE_DATA="NO" export DATA=${DATA:-${DATAROOT}/${RUN}atmanl_${cyc}} -source "${HOMEgfs}/ush/jjob_header.sh" -e "atmanlfv3inc" -c "base atmanl atmanlfv3inc" +source "${HOMEglobal}/ush/jjob_header.sh" -e "atmanlfv3inc" -c "base atmanl atmanlfv3inc" ############################################## # Set variables used in the script @@ -15,7 +15,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "atmanlfv3inc" -c "base atmanl atmanlf ############################################################### # Run relevant script -EXSCRIPT=${GDASATMRUNSH:-${SCRgfs}/exglobal_atm_analysis_fv3_increment.py} +EXSCRIPT=${GDASATMRUNSH:-${SCRglobal}/exglobal_atm_analysis_fv3_increment.py} ${EXSCRIPT} && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE b/dev/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE index 8419405f040..0fd0d478206 100755 --- a/dev/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE +++ b/dev/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE @@ -1,7 +1,7 @@ #! /usr/bin/env bash export DATA=${DATA:-${DATAROOT}/${RUN}atmanl_${cyc}} -source "${HOMEgfs}/ush/jjob_header.sh" -e "atmanlinit" -c "base atmanl atmanlinit" +source "${HOMEglobal}/ush/jjob_header.sh" -e "atmanlinit" -c "base atmanl atmanlinit" ############################################## # Set variables used in the script @@ -27,7 +27,7 @@ RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ ############################################################### # Run relevant script -EXSCRIPT=${GDASATMINITPY:-${SCRgfs}/exglobal_atm_analysis_initialize.py} +EXSCRIPT=${GDASATMINITPY:-${SCRglobal}/exglobal_atm_analysis_initialize.py} ${EXSCRIPT} && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGLOBAL_ATM_ANALYSIS_VARIATIONAL b/dev/jobs/JGLOBAL_ATM_ANALYSIS_VARIATIONAL index 5be049694e3..cbd3c965b5c 100755 --- a/dev/jobs/JGLOBAL_ATM_ANALYSIS_VARIATIONAL +++ b/dev/jobs/JGLOBAL_ATM_ANALYSIS_VARIATIONAL @@ -2,7 +2,7 @@ export WIPE_DATA="NO" export DATA=${DATA:-${DATAROOT}/${RUN}atmanl_${cyc}} -source "${HOMEgfs}/ush/jjob_header.sh" -e "atmanlvar" -c "base atmanl atmanlvar" +source "${HOMEglobal}/ush/jjob_header.sh" -e "atmanlvar" -c "base atmanl atmanlvar" ############################################## # Set variables used in the script @@ -15,7 +15,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "atmanlvar" -c "base atmanl atmanlvar" ############################################################### # Run relevant script -EXSCRIPT=${GDASATMRUNSH:-${SCRgfs}/exglobal_atm_analysis_variational.py} +EXSCRIPT=${GDASATMRUNSH:-${SCRglobal}/exglobal_atm_analysis_variational.py} ${EXSCRIPT} && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGLOBAL_CLEANUP b/dev/jobs/JGLOBAL_CLEANUP index 1325d1f9a32..6557bd14dff 100755 --- a/dev/jobs/JGLOBAL_CLEANUP +++ b/dev/jobs/JGLOBAL_CLEANUP @@ -1,8 +1,8 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "cleanup" -c "base cleanup" +source "${HOMEglobal}/ush/jjob_header.sh" -e "cleanup" -c "base cleanup" -"${SCRgfs}/exglobal_cleanup.sh" && true +"${SCRglobal}/exglobal_cleanup.sh" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGLOBAL_ENKF_ARCHIVE_TARS b/dev/jobs/JGLOBAL_ENKF_ARCHIVE_TARS index bad4e6a9633..9da47ddc254 100755 --- a/dev/jobs/JGLOBAL_ENKF_ARCHIVE_TARS +++ b/dev/jobs/JGLOBAL_ENKF_ARCHIVE_TARS @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "earc_tars" -c "base arch_tars earc_tars earc_groups" +source "${HOMEglobal}/ush/jjob_header.sh" -e "earc_tars" -c "base arch_tars earc_tars earc_groups" ############################################## # Set variables used in the script @@ -25,7 +25,7 @@ mkdir -p "${COMIN_CONF}" # Calls an external bash command; do not fail on unassigned/error export -n SHELLOPTS unset_strict -"${SCRgfs}/exglobal_enkf_earc_tars.py" +"${SCRglobal}/exglobal_enkf_earc_tars.py" export err=$? set_strict if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGLOBAL_ENKF_ARCHIVE_VRFY b/dev/jobs/JGLOBAL_ENKF_ARCHIVE_VRFY index 77c3d1973fc..a6adb522fc3 100755 --- a/dev/jobs/JGLOBAL_ENKF_ARCHIVE_VRFY +++ b/dev/jobs/JGLOBAL_ENKF_ARCHIVE_VRFY @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "earc_vrfy" -c "base earc_vrfy" +source "${HOMEglobal}/ush/jjob_header.sh" -e "earc_vrfy" -c "base earc_vrfy" ############################################## # Set variables used in the script @@ -14,7 +14,7 @@ MEMDIR="ensstat" YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ # Run archive script ############################################################### -"${SCRgfs}/exglobal_enkf_earc_vrfy.py" +"${SCRglobal}/exglobal_enkf_earc_vrfy.py" err=$? if [[ ${err} -ne 0 ]]; then exit "${err}" diff --git a/dev/jobs/JGLOBAL_ENKF_DIAG b/dev/jobs/JGLOBAL_ENKF_DIAG index fa494dfb847..4688ca8967c 100755 --- a/dev/jobs/JGLOBAL_ENKF_DIAG +++ b/dev/jobs/JGLOBAL_ENKF_DIAG @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "ediag" -c "base anal eobs analdiag ediag" +source "${HOMEglobal}/ush/jjob_header.sh" -e "ediag" -c "base anal eobs analdiag ediag" ############################################## # Set variables used in the script @@ -84,7 +84,7 @@ export DIAG_COMPRESS="NO" ############################################################### # Run relevant script -${ANALDIAGSH:-${SCRgfs}/exglobal_diag.sh} && true +${ANALDIAGSH:-${SCRglobal}/exglobal_diag.sh} && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGLOBAL_ENKF_ECEN b/dev/jobs/JGLOBAL_ENKF_ECEN index 0d8a67fc324..e507e8267b6 100755 --- a/dev/jobs/JGLOBAL_ENKF_ECEN +++ b/dev/jobs/JGLOBAL_ENKF_ECEN @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "ecen" -c "base ecen" +source "${HOMEglobal}/ush/jjob_header.sh" -e "ecen" -c "base ecen" ############################################## # Set variables used in the script @@ -36,7 +36,7 @@ MEMDIR="ensstat" RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ ############################################################### # Run relevant script -${ENKFRECENSH:-${SCRgfs}/exglobal_enkf_ecen.sh} && true +${ENKFRECENSH:-${SCRglobal}/exglobal_enkf_ecen.sh} && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGLOBAL_ENKF_ECEN_FV3JEDI b/dev/jobs/JGLOBAL_ENKF_ECEN_FV3JEDI index 12fe96146db..9ce01a29861 100755 --- a/dev/jobs/JGLOBAL_ENKF_ECEN_FV3JEDI +++ b/dev/jobs/JGLOBAL_ENKF_ECEN_FV3JEDI @@ -3,7 +3,7 @@ # Ignore possible spelling error (nothing is misspelled) # shellcheck disable=SC2153 -source "${HOMEgfs}/ush/jjob_header.sh" -e "ecen_fv3jedi" -c "base ecen_fv3jedi" +source "${HOMEglobal}/ush/jjob_header.sh" -e "ecen_fv3jedi" -c "base ecen_fv3jedi" ############################################## # Set variables used in the script @@ -36,7 +36,7 @@ mkdir -m 755 -p "${COMOUT_CONF}" # Run relevant script ############################################## -EXSCRIPT=${GDASATMRUNPY:-${SCRgfs}/exglobal_enkf_ecen_fv3jedi.py} +EXSCRIPT=${GDASATMRUNPY:-${SCRglobal}/exglobal_enkf_ecen_fv3jedi.py} ${EXSCRIPT} && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGLOBAL_ENKF_SELECT_OBS b/dev/jobs/JGLOBAL_ENKF_SELECT_OBS index af264f7d9c6..9d3de80d0bd 100755 --- a/dev/jobs/JGLOBAL_ENKF_SELECT_OBS +++ b/dev/jobs/JGLOBAL_ENKF_SELECT_OBS @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "eobs" -c "base anal eobs" +source "${HOMEglobal}/ush/jjob_header.sh" -e "eobs" -c "base anal eobs" ############################################## # Set variables used in the script @@ -109,7 +109,7 @@ done ############################################################### # Run relevant script -${INVOBSSH:-${SCRgfs}/exglobal_enkf_select_obs.sh} && true +${INVOBSSH:-${SCRglobal}/exglobal_enkf_select_obs.sh} && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGLOBAL_ENKF_SFC b/dev/jobs/JGLOBAL_ENKF_SFC index 0e33d9e4dac..417b49fa7ef 100755 --- a/dev/jobs/JGLOBAL_ENKF_SFC +++ b/dev/jobs/JGLOBAL_ENKF_SFC @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "esfc" -c "base esfc" +source "${HOMEglobal}/ush/jjob_header.sh" -e "esfc" -c "base esfc" ############################################## # Set variables used in the script @@ -42,7 +42,7 @@ export USE_CFP=YES ############################################################### # Run relevant script -${ENKFRESFCSH:-${SCRgfs}/exglobal_enkf_sfc.sh} && true +${ENKFRESFCSH:-${SCRglobal}/exglobal_enkf_sfc.sh} && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGLOBAL_ENKF_UPDATE b/dev/jobs/JGLOBAL_ENKF_UPDATE index 47d2fb0640b..381f54cc60b 100755 --- a/dev/jobs/JGLOBAL_ENKF_UPDATE +++ b/dev/jobs/JGLOBAL_ENKF_UPDATE @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "eupd" -c "base anal eupd" +source "${HOMEglobal}/ush/jjob_header.sh" -e "eupd" -c "base anal eupd" ############################################## # Set variables used in the script @@ -28,7 +28,7 @@ MEMDIR="ensstat" RUN="enkfgdas" YMD=${GDATE:0:8} HH=${GDATE:8:2} declare_from_tm ############################################################### # Run relevant script -${ENKFUPDSH:-${SCRgfs}/exglobal_enkf_update.sh} && true +${ENKFUPDSH:-${SCRglobal}/exglobal_enkf_update.sh} && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGLOBAL_ENS_GLOBUS_ARCH b/dev/jobs/JGLOBAL_ENS_GLOBUS_ARCH index 6a6924383dd..198e1077952 100755 --- a/dev/jobs/JGLOBAL_ENS_GLOBUS_ARCH +++ b/dev/jobs/JGLOBAL_ENS_GLOBUS_ARCH @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "globus_earc" -c "base globus earc_groups" +source "${HOMEglobal}/ush/jjob_header.sh" -e "globus_earc" -c "base globus earc_groups" ############################################## # Set variables used in the script @@ -13,7 +13,7 @@ MEMDIR="ensstat" YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ ############################################################### export -n SHELLOPTS -${GLOBALGLOBUSARCHSH:-${SCRgfs}/exglobal_globus_earc.py} +${GLOBALGLOBUSARCHSH:-${SCRglobal}/exglobal_globus_earc.py} err=$? if [[ ${err} -ne 0 ]]; then exit "${err}" diff --git a/dev/jobs/JGLOBAL_EXTRACTVARS b/dev/jobs/JGLOBAL_EXTRACTVARS index 12aee592ad5..8b0a9d2d2bd 100755 --- a/dev/jobs/JGLOBAL_EXTRACTVARS +++ b/dev/jobs/JGLOBAL_EXTRACTVARS @@ -1,7 +1,7 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "extractvars" -c "base extractvars" -source "${USHgfs}/wave_domain_grid.sh" +source "${HOMEglobal}/ush/jjob_header.sh" -e "extractvars" -c "base extractvars" +source "${USHglobal}/wave_domain_grid.sh" # Set COM Paths for grid in '0p25' '0p50' '1p00'; do @@ -38,7 +38,7 @@ if [[ "${DO_WAVE}" == "YES" ]]; then fi # Execute the Script -"${SCRgfs}/exglobal_extractvars.sh" +"${SCRglobal}/exglobal_extractvars.sh" err=$? if [[ ${err} -ne 0 ]]; then exit "${err}" diff --git a/dev/jobs/JGLOBAL_FETCH b/dev/jobs/JGLOBAL_FETCH index 3648bd269da..50692408211 100755 --- a/dev/jobs/JGLOBAL_FETCH +++ b/dev/jobs/JGLOBAL_FETCH @@ -1,11 +1,11 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "fetch" -c "base fetch" +source "${HOMEglobal}/ush/jjob_header.sh" -e "fetch" -c "base fetch" # Execute fetching # Do not export shell opts to the bash scripts in the htar/hsi wrappers export -n SHELLOPTS -"${SCRgfs}/exglobal_fetch.py" +"${SCRglobal}/exglobal_fetch.py" err=$? ############################################################### diff --git a/dev/jobs/JGLOBAL_FORECAST b/dev/jobs/JGLOBAL_FORECAST index 816803d6ee2..44f1357447a 100755 --- a/dev/jobs/JGLOBAL_FORECAST +++ b/dev/jobs/JGLOBAL_FORECAST @@ -3,11 +3,11 @@ if ((10#${ENSMEM:-0} > 0)); then export DATAjob="${DATAROOT}/${RUN}efcs${ENSMEM}.${PDY:-}${cyc}" export DATA="${DATAjob}/${jobid}" - source "${HOMEgfs}/ush/jjob_header.sh" -e "efcs" -c "base fcst efcs" + source "${HOMEglobal}/ush/jjob_header.sh" -e "efcs" -c "base fcst efcs" else export DATAjob="${DATAROOT}/${RUN}fcst.${PDY:-}${cyc}" export DATA="${DATAjob}/${jobid}" - source "${HOMEgfs}/ush/jjob_header.sh" -e "fcst" -c "base fcst" + source "${HOMEglobal}/ush/jjob_header.sh" -e "fcst" -c "base fcst" fi # Create the directory to hold restarts and output from the model in stmp @@ -98,7 +98,8 @@ fi ############################################################### # Run relevant exglobal script ############################################################### -"${FORECASTSH:-${SCRgfs}/exglobal_forecast.sh}" && true # The && true prevents the shell from exiting when set -e +: "${FORECASTSH:=${SCRglobal}/exglobal_forecast.sh}" +"${FORECASTSH}" && true # The && true prevents the shell from exiting when set -e export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGLOBAL_GLOBUS_ARCH b/dev/jobs/JGLOBAL_GLOBUS_ARCH index 61bb7bdb8cc..f9965081f21 100755 --- a/dev/jobs/JGLOBAL_GLOBUS_ARCH +++ b/dev/jobs/JGLOBAL_GLOBUS_ARCH @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "globus_arch" -c "base globus" +source "${HOMEglobal}/ush/jjob_header.sh" -e "globus_arch" -c "base globus" ############################################## # Set variables used in the script @@ -13,7 +13,7 @@ YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ ############################################################### export -n SHELLOPTS -${GLOBALGLOBUSARCHSH:-${SCRgfs}/exglobal_globus_arch.py} +${GLOBALGLOBUSARCHSH:-${SCRglobal}/exglobal_globus_arch.py} err=$? [[ ${err} -ne 0 ]] && exit "${err}" diff --git a/dev/jobs/JGLOBAL_MARINE_ANALYSIS_CHECKPOINT b/dev/jobs/JGLOBAL_MARINE_ANALYSIS_CHECKPOINT index 28491144b2c..0f24bdef1f3 100755 --- a/dev/jobs/JGLOBAL_MARINE_ANALYSIS_CHECKPOINT +++ b/dev/jobs/JGLOBAL_MARINE_ANALYSIS_CHECKPOINT @@ -3,7 +3,7 @@ export WIPE_DATA="NO" export DATAjob="${DATAROOT}/marineanalysis.${PDY:-}${cyc}" export DATAens="${DATAjob}/ensdata" export DATA="${DATAjob}/marineanlvar" -source "${HOMEgfs}/ush/jjob_header.sh" -e "marineanlchkpt" -c "base marineanl marineanlchkpt" +source "${HOMEglobal}/ush/jjob_header.sh" -e "marineanlchkpt" -c "base marineanl marineanlchkpt" ############################################## # Set variables used in the script @@ -16,7 +16,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "marineanlchkpt" -c "base marineanl ma ############################################################### # Run relevant script -EXSCRIPT=${GDASMARINEANALYSIS:-${SCRgfs}/exglobal_marine_analysis_checkpoint.py} +EXSCRIPT=${GDASMARINEANALYSIS:-${SCRglobal}/exglobal_marine_analysis_checkpoint.py} ${EXSCRIPT} && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGLOBAL_MARINE_ANALYSIS_ECEN b/dev/jobs/JGLOBAL_MARINE_ANALYSIS_ECEN index e85cd6666c8..74d826678b9 100755 --- a/dev/jobs/JGLOBAL_MARINE_ANALYSIS_ECEN +++ b/dev/jobs/JGLOBAL_MARINE_ANALYSIS_ECEN @@ -4,7 +4,7 @@ export DATAjob="${DATAROOT/enkf/}/marineanalysis.${PDY:-}${cyc}" export DATA="${DATAjob}/marineanlecen" export DATAens="${DATAjob}/ensdata" -source "${HOMEgfs}/ush/jjob_header.sh" -e "marineanlecen" -c "base marineanlecen" +source "${HOMEglobal}/ush/jjob_header.sh" -e "marineanlecen" -c "base marineanlecen" ############################################## # Set variables used in the script @@ -41,7 +41,7 @@ fi ############################################################### # Run relevant script -EXSCRIPT=${GDASOCNCENPY:-${HOMEgfs}/scripts/exglobal_marine_analysis_ecen.py} +EXSCRIPT=${GDASOCNCENPY:-${HOMEglobal}/scripts/exglobal_marine_analysis_ecen.py} ${EXSCRIPT} && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGLOBAL_MARINE_ANALYSIS_FINALIZE b/dev/jobs/JGLOBAL_MARINE_ANALYSIS_FINALIZE index c5ce8515a00..01265e48507 100755 --- a/dev/jobs/JGLOBAL_MARINE_ANALYSIS_FINALIZE +++ b/dev/jobs/JGLOBAL_MARINE_ANALYSIS_FINALIZE @@ -3,7 +3,7 @@ export WIPE_DATA="NO" export DATAjob="${DATAROOT}/marineanalysis.${PDY:-}${cyc}" export DATAens="${DATAjob}/ensdata" export DATA="${DATAjob}/marineanlvar" -source "${HOMEgfs}/ush/jjob_header.sh" -e "marineanlfinal" -c "base marineanl marineanlfinal" +source "${HOMEglobal}/ush/jjob_header.sh" -e "marineanlfinal" -c "base marineanl marineanlfinal" ############################################## # Set variables used in the script @@ -31,7 +31,7 @@ mkdir -m 775 -p "${COMOUT_CONF}" # Run relevant script ############################################################### -EXSCRIPT=${GDASMARINEANALYSIS:-${SCRgfs}/exglobal_marine_analysis_finalize.py} +EXSCRIPT=${GDASMARINEANALYSIS:-${SCRglobal}/exglobal_marine_analysis_finalize.py} ${EXSCRIPT} && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGLOBAL_MARINE_ANALYSIS_INITIALIZE b/dev/jobs/JGLOBAL_MARINE_ANALYSIS_INITIALIZE index de8beb20afc..55c725c6d00 100755 --- a/dev/jobs/JGLOBAL_MARINE_ANALYSIS_INITIALIZE +++ b/dev/jobs/JGLOBAL_MARINE_ANALYSIS_INITIALIZE @@ -3,7 +3,7 @@ export DATAjob="${DATAROOT}/marineanalysis.${PDY:-}${cyc}" export DATAens="${DATAjob}/ensdata" export DATA="${DATAjob}/marineanlvar" -source "${HOMEgfs}/ush/jjob_header.sh" -e "marineanlinit" -c "base marineanl marineanlinit" +source "${HOMEglobal}/ush/jjob_header.sh" -e "marineanlinit" -c "base marineanl marineanlinit" ############################################## # Set variables used in the script @@ -35,7 +35,7 @@ YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ ############################################################### # Run relevant script -EXSCRIPT=${GDASMARINEANALYSIS:-${SCRgfs}/exglobal_marine_analysis_initialize.py} +EXSCRIPT=${GDASMARINEANALYSIS:-${SCRglobal}/exglobal_marine_analysis_initialize.py} ${EXSCRIPT} && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGLOBAL_MARINE_ANALYSIS_LETKF b/dev/jobs/JGLOBAL_MARINE_ANALYSIS_LETKF index 2b6552b78b6..5a4af70e202 100755 --- a/dev/jobs/JGLOBAL_MARINE_ANALYSIS_LETKF +++ b/dev/jobs/JGLOBAL_MARINE_ANALYSIS_LETKF @@ -6,7 +6,7 @@ export DATA="${DATAjob}/marineanlletkf" export DATAens="${DATAjob}/ensdata" if [[ ! -d "${DATAens}" ]]; then mkdir -p "${DATAens}"; fi -source "${HOMEgfs}/ush/jjob_header.sh" -e "marineanlletkf" -c "base marineanlletkf" +source "${HOMEglobal}/ush/jjob_header.sh" -e "marineanlletkf" -c "base marineanlletkf" ############################################## # Set variables used in the script @@ -53,7 +53,7 @@ if [[ ! -d ${COMOUT_CONF} ]]; then mkdir -p "${COMOUT_CONF}"; fi ############################################################### # Run relevant script -EXSCRIPT=${GDASOCNLETKFPY:-${HOMEgfs}/scripts/exglobal_marine_analysis_letkf.py} +EXSCRIPT=${GDASOCNLETKFPY:-${HOMEglobal}/scripts/exglobal_marine_analysis_letkf.py} ${EXSCRIPT} && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGLOBAL_MARINE_ANALYSIS_VARIATIONAL b/dev/jobs/JGLOBAL_MARINE_ANALYSIS_VARIATIONAL index 6cb9030f851..ead13713e05 100755 --- a/dev/jobs/JGLOBAL_MARINE_ANALYSIS_VARIATIONAL +++ b/dev/jobs/JGLOBAL_MARINE_ANALYSIS_VARIATIONAL @@ -4,7 +4,7 @@ export WIPE_DATA="NO" export DATAjob="${DATAROOT}/marineanalysis.${PDY:-}${cyc}" export DATAens="${DATAjob}/ensdata" export DATA="${DATAjob}/marineanlvar" -source "${HOMEgfs}/ush/jjob_header.sh" -e "marineanlvar" -c "base marineanl marineanlvar" +source "${HOMEglobal}/ush/jjob_header.sh" -e "marineanlvar" -c "base marineanl marineanlvar" ############################################## # Set variables used in the script @@ -17,7 +17,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "marineanlvar" -c "base marineanl mari ############################################################### # Run relevant script -EXSCRIPT=${GDASMARINERUNSH:-${SCRgfs}/exglobal_marine_analysis_variational.py} +EXSCRIPT=${GDASMARINERUNSH:-${SCRglobal}/exglobal_marine_analysis_variational.py} ${EXSCRIPT} && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGLOBAL_MARINE_BMAT b/dev/jobs/JGLOBAL_MARINE_BMAT index e4cb3ef9bb3..b0dedbfbe07 100755 --- a/dev/jobs/JGLOBAL_MARINE_BMAT +++ b/dev/jobs/JGLOBAL_MARINE_BMAT @@ -12,7 +12,7 @@ if [[ ! -d "${DATAstaticb}" ]]; then mkdir -p "${DATAstaticb}"; fi # source config.base, config.ocnanal and config.marinebmat # and pass marinebmat to ${machine}.env -source "${HOMEgfs}/ush/jjob_header.sh" -e "marinebmat" -c "base marinebmat" +source "${HOMEglobal}/ush/jjob_header.sh" -e "marinebmat" -c "base marinebmat" ############################################## # Set variables used in the script @@ -58,7 +58,7 @@ mkdir -m 775 -p "${COMOUT_CONF}" ############################################################### # Run relevant script -EXSCRIPT=${GDASMARINEBMATRUNPY:-${SCRgfs}/exglobal_marinebmat.py} +EXSCRIPT=${GDASMARINEBMATRUNPY:-${SCRglobal}/exglobal_marinebmat.py} ${EXSCRIPT} && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGLOBAL_MARINE_BMAT_INITIALIZE b/dev/jobs/JGLOBAL_MARINE_BMAT_INITIALIZE index ed6e74599dc..a1ccb4dd118 100755 --- a/dev/jobs/JGLOBAL_MARINE_BMAT_INITIALIZE +++ b/dev/jobs/JGLOBAL_MARINE_BMAT_INITIALIZE @@ -11,7 +11,7 @@ if [[ ! -d "${DATAstaticb}" ]]; then mkdir -p "${DATAstaticb}"; fi # source config.base, config.ocnanal and config.marinebmatinit # and pass marinebmat to ${machine}.env -source "${HOMEgfs}/ush/jjob_header.sh" -e "marinebmatinit" -c "base marinebmat marinebmatinit" +source "${HOMEglobal}/ush/jjob_header.sh" -e "marinebmatinit" -c "base marinebmat marinebmatinit" ############################################## # Set variables used in the script @@ -55,7 +55,7 @@ mkdir -p "${COMOUT_ICE_BMATRIX}" ############################################################### # Run relevant script -EXSCRIPT=${GDASMARINEBMATRUNPY:-${SCRgfs}/exglobal_marinebmat_initialize.py} +EXSCRIPT=${GDASMARINEBMATRUNPY:-${SCRglobal}/exglobal_marinebmat_initialize.py} ${EXSCRIPT} && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGLOBAL_OCEANICE_PRODUCTS b/dev/jobs/JGLOBAL_OCEANICE_PRODUCTS index 2d7718017ca..76857354966 100755 --- a/dev/jobs/JGLOBAL_OCEANICE_PRODUCTS +++ b/dev/jobs/JGLOBAL_OCEANICE_PRODUCTS @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "oceanice_products" -c "base oceanice_products" +source "${HOMEglobal}/ush/jjob_header.sh" -e "oceanice_products" -c "base oceanice_products" ############################################## # Begin JOB SPECIFIC work @@ -13,7 +13,7 @@ YMD="${PDY}" HH="${cyc}" declare_from_tmpl -rx "COMOUT_${COMPONENT^^}_NETCDF":"C ############################################################### # Run exglobal script -"${SCRgfs}/exglobal_oceanice_products.py" && true +"${SCRglobal}/exglobal_oceanice_products.py" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGLOBAL_OFFLINE_ATMOS_ANALYSIS b/dev/jobs/JGLOBAL_OFFLINE_ATMOS_ANALYSIS index 4e457b185d7..9a2e1b029d8 100755 --- a/dev/jobs/JGLOBAL_OFFLINE_ATMOS_ANALYSIS +++ b/dev/jobs/JGLOBAL_OFFLINE_ATMOS_ANALYSIS @@ -1,7 +1,7 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "offlineanl" -c "base offlineanl" +source "${HOMEglobal}/ush/preamble.sh" +source "${HOMEglobal}/ush/jjob_header.sh" -e "offlineanl" -c "base offlineanl" ############################################## # Set variables used in the script @@ -35,7 +35,7 @@ mkdir -p "${COMOUT_ATMOS_ANALYSIS}" mkdir -p "${COMOUT_OBS}" # Execute generation of increments from offline analysis -EXSCRIPT=${OFFLINEANLPY:-${SCRgfs}/exglobal_offline_atmos_analysis.py} +EXSCRIPT=${OFFLINEANLPY:-${SCRglobal}/exglobal_offline_atmos_analysis.py} ${EXSCRIPT} export err=$? diff --git a/dev/jobs/JGLOBAL_PREP_EMISSIONS b/dev/jobs/JGLOBAL_PREP_EMISSIONS index 0d843a68ce7..49505b8162a 100755 --- a/dev/jobs/JGLOBAL_PREP_EMISSIONS +++ b/dev/jobs/JGLOBAL_PREP_EMISSIONS @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "prep_emissions" -c "base prep_emissions" +source "${HOMEglobal}/ush/jjob_header.sh" -e "prep_emissions" -c "base prep_emissions" ############################################## # Set variables used in the script @@ -17,7 +17,7 @@ mkdir -p "${COMOUT_CHEM_INPUT}" ############################################################### # Run relevant script -EXSCRIPT=${PREP_EMISSIONS_PY:-${SCRgfs}/exglobal_prep_emissions.py} +EXSCRIPT=${PREP_EMISSIONS_PY:-${SCRglobal}/exglobal_prep_emissions.py} ${EXSCRIPT} && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGLOBAL_PREP_OCEAN_OBS b/dev/jobs/JGLOBAL_PREP_OCEAN_OBS index fd51214b4cc..544be20c4db 100755 --- a/dev/jobs/JGLOBAL_PREP_OCEAN_OBS +++ b/dev/jobs/JGLOBAL_PREP_OCEAN_OBS @@ -1,5 +1,5 @@ #!/bin/bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "prepoceanobs" -c "base marineanl prepoceanobs" +source "${HOMEglobal}/ush/jjob_header.sh" -e "prepoceanobs" -c "base marineanl prepoceanobs" ############################################## # Set variables used in the script @@ -14,7 +14,7 @@ YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COMOUT_OBS:COM_OBS_TMPL ############################################################### # Run relevant script -EXSCRIPT=${GDASPREPOCNOBSPY:-${SCRgfs}/exglobal_prep_ocean_obs.py} +EXSCRIPT=${GDASPREPOCNOBSPY:-${SCRglobal}/exglobal_prep_ocean_obs.py} ${EXSCRIPT} && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGLOBAL_SNOWENS_ANALYSIS b/dev/jobs/JGLOBAL_SNOWENS_ANALYSIS index fef9a4aa92c..48bb690afdf 100755 --- a/dev/jobs/JGLOBAL_SNOWENS_ANALYSIS +++ b/dev/jobs/JGLOBAL_SNOWENS_ANALYSIS @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "esnowanl" -c "base esnowanl" +source "${HOMEglobal}/ush/jjob_header.sh" -e "esnowanl" -c "base esnowanl" ############################################## # Set variables used in the script @@ -43,7 +43,7 @@ mkdir -p "${COMOUT_SNOW_ANALYSIS}" "${COMOUT_CONF}" ############################################################### # Run relevant script -EXSCRIPT=${SNOWANLPY:-${SCRgfs}/exglobal_snowens_analysis.py} +EXSCRIPT=${SNOWANLPY:-${SCRglobal}/exglobal_snowens_analysis.py} ${EXSCRIPT} && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGLOBAL_SNOW_ANALYSIS b/dev/jobs/JGLOBAL_SNOW_ANALYSIS index 5bd19608a16..eed4b3f0294 100755 --- a/dev/jobs/JGLOBAL_SNOW_ANALYSIS +++ b/dev/jobs/JGLOBAL_SNOW_ANALYSIS @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "snowanl" -c "base snowanl" +source "${HOMEglobal}/ush/jjob_header.sh" -e "snowanl" -c "base snowanl" ############################################## # Set variables used in the script @@ -29,7 +29,7 @@ mkdir -m 775 -p "${COMOUT_SNOW_ANALYSIS}" "${COMOUT_CONF}" ############################################################### # Run relevant script -EXSCRIPT=${SNOWANLPY:-${SCRgfs}/exglobal_snow_analysis.py} +EXSCRIPT=${SNOWANLPY:-${SCRglobal}/exglobal_snow_analysis.py} ${EXSCRIPT} && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/jobs/JGLOBAL_STAGE_IC b/dev/jobs/JGLOBAL_STAGE_IC index 6a2f9571166..5fcde67a278 100755 --- a/dev/jobs/JGLOBAL_STAGE_IC +++ b/dev/jobs/JGLOBAL_STAGE_IC @@ -1,9 +1,9 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "stage_ic" -c "base stage_ic" +source "${HOMEglobal}/ush/jjob_header.sh" -e "stage_ic" -c "base stage_ic" # Execute staging -"${SCRgfs}/exglobal_stage_ic.py" +"${SCRglobal}/exglobal_stage_ic.py" err=$? ############################################################### diff --git a/dev/jobs/JGLOBAL_WAVE_GEMPAK b/dev/jobs/JGLOBAL_WAVE_GEMPAK index 288a2643a38..fe817c88922 100755 --- a/dev/jobs/JGLOBAL_WAVE_GEMPAK +++ b/dev/jobs/JGLOBAL_WAVE_GEMPAK @@ -1,7 +1,7 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "wavegempak" -c "base wave wavegempak" -source "${USHgfs}/wave_domain_grid.sh" +source "${HOMEglobal}/ush/jjob_header.sh" -e "wavegempak" -c "base wave wavegempak" +source "${USHglobal}/wave_domain_grid.sh" ################################### # Set COM Paths @@ -25,7 +25,7 @@ if [[ ! -d ${COMOUT_WAVE_GEMPAK} ]]; then mkdir -p "${COMOUT_WAVE_GEMPAK}"; fi ######################################################## # Execute the script. -"${SCRgfs}/exgfs_wave_nawips.sh" && true +"${SCRglobal}/exgfs_wave_nawips.sh" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGLOBAL_WAVE_INIT b/dev/jobs/JGLOBAL_WAVE_INIT index d97d6887a88..810c69c40fa 100755 --- a/dev/jobs/JGLOBAL_WAVE_INIT +++ b/dev/jobs/JGLOBAL_WAVE_INIT @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "waveinit" -c "base wave waveinit" +source "${HOMEglobal}/ush/jjob_header.sh" -e "waveinit" -c "base wave waveinit" export MP_PULSE=0 @@ -15,7 +15,7 @@ export wavempexec=${wavempexec:-"mpirun -n"} export wave_mpmd=${wave_mpmd:-"cfp"} # Execute the Script -"${SCRgfs}/exgfs_wave_init.sh" && true +"${SCRglobal}/exgfs_wave_init.sh" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGLOBAL_WAVE_POST_BNDPNT b/dev/jobs/JGLOBAL_WAVE_POST_BNDPNT index 93a428dcd27..21089583af1 100755 --- a/dev/jobs/JGLOBAL_WAVE_POST_BNDPNT +++ b/dev/jobs/JGLOBAL_WAVE_POST_BNDPNT @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "wavepostbndpnt" -c "base wave wavepostsbs wavepostbndpnt" +source "${HOMEglobal}/ush/jjob_header.sh" -e "wavepostbndpnt" -c "base wave wavepostsbs wavepostbndpnt" export MP_PULSE=0 @@ -20,7 +20,7 @@ export DOBLL_WAV='NO' # Bulletin post export DOBNDPNT_WAV='YES' # Boundary points # Execute the Script -"${SCRgfs}/exgfs_wave_post_pnt.sh" && true +"${SCRglobal}/exgfs_wave_post_pnt.sh" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit "ex-script of JGLOBAL_WAVE_POST_BNDPNT failed!" diff --git a/dev/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL b/dev/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL index 756ebe37299..2e4734ef8a7 100755 --- a/dev/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL +++ b/dev/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "wavepostbndpntbll" -c "base wave wavepostsbs wavepostbndpntbll" +source "${HOMEglobal}/ush/jjob_header.sh" -e "wavepostbndpntbll" -c "base wave wavepostsbs wavepostbndpntbll" export COMPONENT="wave" @@ -22,7 +22,7 @@ export DOBLL_WAV='YES' # Bulletin post export DOBNDPNT_WAV='YES' # Boundary points # Execute the Script -"${SCRgfs}/exgfs_wave_post_pnt.sh" && true +"${SCRglobal}/exgfs_wave_post_pnt.sh" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGLOBAL_WAVE_POST_PNT b/dev/jobs/JGLOBAL_WAVE_POST_PNT index 39f29acb1ed..37a82c19e28 100755 --- a/dev/jobs/JGLOBAL_WAVE_POST_PNT +++ b/dev/jobs/JGLOBAL_WAVE_POST_PNT @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "wavepostpnt" -c "base wave wavepostsbs wavepostpnt" +source "${HOMEglobal}/ush/jjob_header.sh" -e "wavepostpnt" -c "base wave wavepostsbs wavepostpnt" export MP_PULSE=0 @@ -20,7 +20,7 @@ export DOBLL_WAV='YES' # Bulletin post export DOBNDPNT_WAV='NO' #not boundary points # Execute the Script -"${SCRgfs}/exgfs_wave_post_pnt.sh" && true +"${SCRglobal}/exgfs_wave_post_pnt.sh" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit "ex-script of JGLOBAL_WAVE_POST_PNT failed!" diff --git a/dev/jobs/JGLOBAL_WAVE_POST_SBS b/dev/jobs/JGLOBAL_WAVE_POST_SBS index ff92d88b450..df5d3ee6c1f 100755 --- a/dev/jobs/JGLOBAL_WAVE_POST_SBS +++ b/dev/jobs/JGLOBAL_WAVE_POST_SBS @@ -1,7 +1,7 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "wavepostsbs" -c "base wave wavepostsbs" -source "${USHgfs}/wave_domain_grid.sh" +source "${HOMEglobal}/ush/jjob_header.sh" -e "wavepostsbs" -c "base wave wavepostsbs" +source "${USHglobal}/wave_domain_grid.sh" # Set COM Paths YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ @@ -25,7 +25,7 @@ else fi # Execute the Script -"${SCRgfs}/exgfs_wave_post_gridded_sbs.sh" && true +"${SCRglobal}/exgfs_wave_post_gridded_sbs.sh" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit "ex-script of gridded wave post failed!" diff --git a/dev/jobs/JGLOBAL_WAVE_PRDGEN_BULLS b/dev/jobs/JGLOBAL_WAVE_PRDGEN_BULLS index d7f2ec152ef..e60df88b1bd 100755 --- a/dev/jobs/JGLOBAL_WAVE_PRDGEN_BULLS +++ b/dev/jobs/JGLOBAL_WAVE_PRDGEN_BULLS @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "waveawipsbulls" -c "base wave waveawipsbulls" +source "${HOMEglobal}/ush/jjob_header.sh" -e "waveawipsbulls" -c "base wave waveawipsbulls" ################################### # Set COM Paths @@ -15,7 +15,7 @@ mkdir -p "${COMOUT_WAVE_WMO}" ################################### # Execute the Script -"${SCRgfs}/exgfs_wave_prdgen_bulls.sh" && true +"${SCRglobal}/exgfs_wave_prdgen_bulls.sh" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED b/dev/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED index 2dacae9efba..a15a7da8f8c 100755 --- a/dev/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED +++ b/dev/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED @@ -1,7 +1,7 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "waveawipsgridded" -c "base wave waveawipsgridded" -source "${USHgfs}/wave_domain_grid.sh" +source "${HOMEglobal}/ush/jjob_header.sh" -e "waveawipsgridded" -c "base wave waveawipsgridded" +source "${USHglobal}/wave_domain_grid.sh" ################################### # Set COM Paths @@ -26,7 +26,7 @@ fi ################################### # Execute the Script ################################### -"${SCRgfs}/exgfs_wave_prdgen_gridded.sh" && true +"${SCRglobal}/exgfs_wave_prdgen_gridded.sh" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/jobs/JGLOBAL_WAVE_PREP b/dev/jobs/JGLOBAL_WAVE_PREP index 7ecf936a73c..c58541a0a46 100755 --- a/dev/jobs/JGLOBAL_WAVE_PREP +++ b/dev/jobs/JGLOBAL_WAVE_PREP @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/jjob_header.sh" -e "waveprep" -c "base wave waveprep" +source "${HOMEglobal}/ush/jjob_header.sh" -e "waveprep" -c "base wave waveprep" # Set rtofs PDY # shellcheck disable=SC2153 @@ -20,7 +20,7 @@ YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ if [[ ! -d ${COMOUT_WAVE_PREP} ]]; then mkdir -p "${COMOUT_WAVE_PREP}"; fi # Execute the Script -"${SCRgfs}/exgfs_wave_prep.sh" && true +"${SCRglobal}/exgfs_wave_prep.sh" && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/scripts/exgdas_atmos_chgres_forenkf.sh b/dev/scripts/exgdas_atmos_chgres_forenkf.sh index f51be67b285..3c9d54d4de8 100755 --- a/dev/scripts/exgdas_atmos_chgres_forenkf.sh +++ b/dev/scripts/exgdas_atmos_chgres_forenkf.sh @@ -33,7 +33,7 @@ export bcyc=${BDATE:8:2} # Utilities export CHGRP_CMD=${CHGRP_CMD:-"chgrp ${group_name:-rstprod}"} -export NCLEN=${NCLEN:-${USHgfs}/getncdimlen} +export NCLEN=${NCLEN:-${USHglobal}/getncdimlen} # IAU DOIAU=${DOIAU:-"NO"} @@ -41,12 +41,12 @@ export IAUFHRS=${IAUFHRS:-"6,"} # Dependent Scripts and Executables export APRUN_CHGRES=${APRUN_CHGRES:-${APRUN:-""}} -export CHGRESNCEXEC=${CHGRESNCEXEC:-${EXECgfs}/enkf_chgres_recenter_nc.x} +export CHGRESNCEXEC=${CHGRESNCEXEC:-${EXECglobal}/enkf_chgres_recenter_nc.x} export NTHREADS_CHGRES=${NTHREADS_CHGRES:-1} APRUNCFP=${APRUNCFP:-""} # level info file -SIGLEVEL=${SIGLEVEL:-${FIXgfs}/am/global_hyblev.l${LEVS}.txt} +SIGLEVEL=${SIGLEVEL:-${FIXglobal}/am/global_hyblev.l${LEVS}.txt} # forecast files APREFIX=${APREFIX:-""} @@ -113,7 +113,7 @@ if [[ ${DO_CALC_ANALYSIS} == "YES" ]]; then ${NLN} "${ATMF09ENS}" fcst.ensres.09 fi export OMP_NUM_THREADS=${NTHREADS_CHGRES} - SIGLEVEL=${SIGLEVEL:-${FIXgfs}/am/global_hyblev.l${LEVS_ENKF}.txt} + SIGLEVEL=${SIGLEVEL:-${FIXglobal}/am/global_hyblev.l${LEVS_ENKF}.txt} if [[ "${USE_CFP}" == "YES" ]]; then rm -f "${DATA}/mp_chgres.sh" @@ -139,7 +139,7 @@ EOF done # Run with MPMD - "${USHgfs}/run_mpmd.sh" "${DATA}/mp_chgres.sh" && true + "${USHglobal}/run_mpmd.sh" "${DATA}/mp_chgres.sh" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/scripts/exgdas_atmos_gempak_gif_ncdc.sh b/dev/scripts/exgdas_atmos_gempak_gif_ncdc.sh index aaa4072cc7f..8ce2628c72e 100755 --- a/dev/scripts/exgdas_atmos_gempak_gif_ncdc.sh +++ b/dev/scripts/exgdas_atmos_gempak_gif_ncdc.sh @@ -8,7 +8,7 @@ cd "${DATA}" || exit 2 -export NTS="${HOMEgfs}/gempak/ush/restore" +export NTS="${HOMEglobal}/gempak/ush/restore" if [[ ${MODEL} == GDAS ]]; then fcsthrs="000" @@ -34,7 +34,7 @@ if [[ ${MODEL} == GDAS ]]; then cpreq "${gempak_file}" "gem_grids${fhr3}.gem" - "${HOMEgfs}/gempak/ush/gempak_${RUN}_f${fhr3}_gif.sh" && true + "${HOMEglobal}/gempak/ush/gempak_${RUN}_f${fhr3}_gif.sh" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit diff --git a/dev/scripts/exgdas_atmos_nawips.sh b/dev/scripts/exgdas_atmos_nawips.sh index 7bebb7766df..171b1a15919 100755 --- a/dev/scripts/exgdas_atmos_nawips.sh +++ b/dev/scripts/exgdas_atmos_nawips.sh @@ -15,10 +15,10 @@ mkdir -p "${DATA_RUN}" cd "${DATA_RUN}" || exit 1 # "Import" functions used in this script -source "${USHgfs}/product_functions.sh" +source "${USHglobal}/product_functions.sh" for table in g2varswmo2.tbl g2vcrdwmo2.tbl g2varsncep1.tbl g2vcrdncep1.tbl; do - source_table="${HOMEgfs}/gempak/fix/${table}" + source_table="${HOMEglobal}/gempak/fix/${table}" if [[ ! -f "${source_table}" ]]; then err_exit "${table} is missing" fi diff --git a/dev/scripts/exgdas_atmos_verfozn.sh b/dev/scripts/exgdas_atmos_verfozn.sh index e6f3a369da8..a4c0c46a3a3 100755 --- a/dev/scripts/exgdas_atmos_verfozn.sh +++ b/dev/scripts/exgdas_atmos_verfozn.sh @@ -29,7 +29,7 @@ if [[ -s "${oznstat}" ]]; then export OZNMON_NETCDF=${netcdf} - "${USHgfs}/ozn_xtrct.sh" && true + "${USHglobal}/ozn_xtrct.sh" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit "ozn_xtrct.sh failed!" diff --git a/dev/scripts/exgdas_atmos_verfrad.sh b/dev/scripts/exgdas_atmos_verfrad.sh index 034d7067720..58684072a67 100755 --- a/dev/scripts/exgdas_atmos_verfrad.sh +++ b/dev/scripts/exgdas_atmos_verfrad.sh @@ -126,34 +126,34 @@ export RADMON_NETCDF=${netcdf} # Run the child scripts. #------------------------------------------------------------------ -"${USHgfs}/radmon_verf_angle.sh" && true +"${USHglobal}/radmon_verf_angle.sh" && true rc_angle=$? -"${USHgfs}/rstprod.sh" +"${USHglobal}/rstprod.sh" # Allow all scripts to run. Call err_exit at the end, after files are restricted. if [[ ${rc_angle} -ne 0 ]]; then echo "FATAL ERROR: radmon_verf_angle.sh failed!" fi -"${USHgfs}/radmon_verf_bcoef.sh" && true +"${USHglobal}/radmon_verf_bcoef.sh" && true rc_bcoef=$? -"${USHgfs}/rstprod.sh" +"${USHglobal}/rstprod.sh" if [[ ${rc_bcoef} -ne 0 ]]; then echo "FATAL ERROR: radmon_verf_bcoef.sh failed!" fi -"${USHgfs}/radmon_verf_bcor.sh" && true +"${USHglobal}/radmon_verf_bcor.sh" && true rc_bcor=$? -"${USHgfs}/rstprod.sh" +"${USHglobal}/rstprod.sh" if [[ ${rc_bcoef} -ne 0 ]]; then echo "FATAL ERROR: radmon_verf_bcor.sh failed!" fi -"${USHgfs}/radmon_verf_time.sh" && true +"${USHglobal}/radmon_verf_time.sh" && true rc_time=$? -"${USHgfs}/rstprod.sh" +"${USHglobal}/rstprod.sh" if [[ ${rc_bcoef} -ne 0 ]]; then echo "FATAL ERROR: radmon_verf_time.sh failed!" diff --git a/dev/scripts/exgdas_enkf_post.sh b/dev/scripts/exgdas_enkf_post.sh index 7dc00d1d236..689ae0d3043 100755 --- a/dev/scripts/exgdas_enkf_post.sh +++ b/dev/scripts/exgdas_enkf_post.sh @@ -25,11 +25,11 @@ NTHREADS_EPOS=${NTHREADS_EPOS:-1} # Fix files LEVS=${LEVS:-64} -HYBENSMOOTH=${HYBENSMOOTH:-${FIXgfs}/gsi/global_hybens_smoothinfo.l${LEVS}.txt} +HYBENSMOOTH=${HYBENSMOOTH:-${FIXglobal}/gsi/global_hybens_smoothinfo.l${LEVS}.txt} # Executables. -GETATMENSMEANEXEC=${GETATMENSMEANEXEC:-${EXECgfs}/getsigensmeanp_smooth.x} -GETSFCENSMEANEXEC=${GETSFCENSMEANEXEC:-${EXECgfs}/getsfcensmeanp.x} +GETATMENSMEANEXEC=${GETATMENSMEANEXEC:-${EXECglobal}/getsigensmeanp_smooth.x} +GETSFCENSMEANEXEC=${GETSFCENSMEANEXEC:-${EXECglobal}/getsfcensmeanp.x} # Other variables. PREFIX=${PREFIX:-""} diff --git a/dev/scripts/exgfs_aero_init_aerosol.py b/dev/scripts/exgfs_aero_init_aerosol.py index 3a33e27a9ed..c4a887b2997 100755 --- a/dev/scripts/exgfs_aero_init_aerosol.py +++ b/dev/scripts/exgfs_aero_init_aerosol.py @@ -17,8 +17,8 @@ FHMAX_GFS: Forecast length in hours RUN: Forecast phase (gfs or gdas). Currently always expected to be gfs. ROTDIR: Rotating (COM) directory -USHgfs: Path to global-workflow `ush` directory -PARMgfs: Path to global-workflow `parm` directory +USHglobal: Path to global-workflow `ush` directory +PARMglobal: Path to global-workflow `parm` directory Additionally, the following data files are used: @@ -82,8 +82,8 @@ def main() -> None: fcst_length = int(get_env_var('FHMAX_GFS')) run = get_env_var("RUN") rot_dir = get_env_var("ROTDIR") - ush_gfs = get_env_var("USHgfs") - parm_gfs = get_env_var("PARMgfs") + ush_gfs = get_env_var("USHglobal") + parm_gfs = get_env_var("PARMglobal") # os.chdir(data) diff --git a/dev/scripts/exgfs_atmos_awips_20km_1p0deg.sh b/dev/scripts/exgfs_atmos_awips_20km_1p0deg.sh index 9989b0db0b3..98e5110e0ad 100755 --- a/dev/scripts/exgfs_atmos_awips_20km_1p0deg.sh +++ b/dev/scripts/exgfs_atmos_awips_20km_1p0deg.sh @@ -36,7 +36,7 @@ fi cd "${DATA}" || exit 2 # "Import" functions used in this script -source "${USHgfs}/product_functions.sh" +source "${USHglobal}/product_functions.sh" ############################################### # Wait for the availability of the pgrb file @@ -82,7 +82,7 @@ export opt28=' -new_grid_interpolation budget -fi ' cpreq "${COMIN_ATMOS_GRIB_0p25}/gfs.t${cyc}z.pres_a.0p25.f${fcsthr}.grib2" "tmpfile2${fcsthr}" cpreq "${COMIN_ATMOS_GRIB_0p25}/gfs.t${cyc}z.pres_b.0p25.f${fcsthr}.grib2" "tmpfile2b${fcsthr}" cat "tmpfile2${fcsthr}" "tmpfile2b${fcsthr}" > "tmpfile${fcsthr}" -${WGRIB2} "tmpfile${fcsthr}" | grep -F -f "${PARMgfs}/product/gfs_awips_parmlist_g2" | +${WGRIB2} "tmpfile${fcsthr}" | grep -F -f "${PARMglobal}/product/gfs_awips_parmlist_g2" | ${WGRIB2} -i -grib masterfile "tmpfile${fcsthr}" && true export err=$? if [[ ${err} -ne 0 ]]; then @@ -174,7 +174,7 @@ for GRID in conus ak prico pac 003; do export FORT31="awps_file_fi${fcsthr}_${GRID}" export FORT51="grib2.awpgfs${fcsthr}.${GRID}" - cpreq "${PARMgfs}/wmo/grib2_awpgfs${fcsthr}.${GRID}" "parm_list" + cpreq "${PARMglobal}/wmo/grib2_awpgfs${fcsthr}.${GRID}" "parm_list" ${TOCGRIB2} < "parm_list" >> "${pgmout}" 2> errfile && true export err=$? @@ -204,7 +204,7 @@ for GRID in conus ak prico pac 003; do export FORT31="awps_file_fi${fcsthr}_${GRID}" export FORT51="grib2.awpgfs_20km_${GRID}_f${fcsthr}" - cpreq "${PARMgfs}/wmo/grib2_awpgfs_20km_${GRID}f${fcsthr}" "parm_list" + cpreq "${PARMglobal}/wmo/grib2_awpgfs_20km_${GRID}f${fcsthr}" "parm_list" ${TOCGRIB2} < "parm_list" >> "${pgmout}" 2> errfile && true export err=$? diff --git a/dev/scripts/exgfs_atmos_fbwind.sh b/dev/scripts/exgfs_atmos_fbwind.sh index 33aa30c90c0..7233db9a057 100755 --- a/dev/scripts/exgfs_atmos_fbwind.sh +++ b/dev/scripts/exgfs_atmos_fbwind.sh @@ -38,7 +38,7 @@ for fhr3 in 006 012 024; do cpreq "${COMIN_ATMOS_GRIB_0p25}/gfs.${cycle}.pres_a.0p25.f${fhr3}.grib2" "tmp_pgrb2_0p25${fhr3}" cpreq "${COMIN_ATMOS_GRIB_0p25}/gfs.${cycle}.pres_b.0p25.f${fhr3}.grib2" "tmp_pgrb2b_0p25${fhr3}" cat "tmp_pgrb2_0p25${fhr3}" "tmp_pgrb2b_0p25${fhr3}" > "tmp0p25filef${fhr3}" - ${WGRIB2} "tmp0p25filef${fhr3}" | grep -F -f "${PARMgfs}/product/gfs_fbwnd_parmlist_g2" | + ${WGRIB2} "tmp0p25filef${fhr3}" | grep -F -f "${PARMglobal}/product/gfs_fbwnd_parmlist_g2" | ${WGRIB2} -i -grib "tmpfilef${fhr3}" "tmp0p25filef${fhr3}" ${CNVGRIB} -g21 "tmpfilef${fhr3}" "gfs.t${cyc}z.grbf${fhr3}_grb1" ${GRBINDEX} "gfs.t${cyc}z.grbf${fhr3}_grb1" "gfs.t${cyc}z.grbf${fhr3}_grb1.idx" @@ -60,14 +60,14 @@ export FORT33="gfs.t${cyc}z.grbf024_grb1.idx" export FORT51="tran.fbwnd_pacific" -cpreq "${PARMgfs}/product/fbwnd_pacific.stnlist" fbwnd_pacific.stnlist +cpreq "${PARMglobal}/product/fbwnd_pacific.stnlist" fbwnd_pacific.stnlist -"${EXECgfs}/fbwndgfs.x" < fbwnd_pacific.stnlist >> "${pgmout}" 2> errfile && true +"${EXECglobal}/fbwndgfs.x" < fbwnd_pacific.stnlist >> "${pgmout}" 2> errfile && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit "Failed to run fbwnd for the Pacific!" fi -"${USHgfs}/make_ntc_bull.pl" WMOBH NONE KWNO NONE tran.fbwnd_pacific "${outfile_name}" +"${USHglobal}/make_ntc_bull.pl" WMOBH NONE KWNO NONE tran.fbwnd_pacific "${outfile_name}" ############################### END OF SCRIPT ####################### diff --git a/dev/scripts/exgfs_atmos_gempak_gif_ncdc_skew_t.sh b/dev/scripts/exgfs_atmos_gempak_gif_ncdc_skew_t.sh index 0e84508a149..ca509b4f607 100755 --- a/dev/scripts/exgfs_atmos_gempak_gif_ncdc_skew_t.sh +++ b/dev/scripts/exgfs_atmos_gempak_gif_ncdc_skew_t.sh @@ -9,7 +9,7 @@ cd "${DATA}" || exit 1 -export NTS="${HOMEgfs}/gempak/ush/restore" +export NTS="${HOMEglobal}/gempak/ush/restore" if [[ "${MODEL}" == GDAS ]] || [[ "${MODEL}" == GFS ]]; then case "${MODEL}" in @@ -34,9 +34,9 @@ if [[ "${MODEL}" == GDAS ]] || [[ "${MODEL}" == GFS ]]; then cpreq "${GRIBFILE}" "gem_grids${fhr3}.gem" export fhr3 if [[ ${fhr} -eq 0 ]]; then - "${HOMEgfs}/gempak/ush/gempak_${RUN}_f000_gif.sh" + "${HOMEglobal}/gempak/ush/gempak_${RUN}_f000_gif.sh" else - "${HOMEgfs}/gempak/ush/gempak_${RUN}_fhhh_gif.sh" + "${HOMEglobal}/gempak/ush/gempak_${RUN}_fhhh_gif.sh" fi done fi @@ -45,12 +45,12 @@ cd "${DATA}" || exit 1 export RSHPDY="${PDY:4}${PDY:2:2}" -cpreq "${HOMEgfs}/gempak/dictionaries/sonde.land.tbl" sonde.land.tbl -cpreq "${HOMEgfs}/gempak/dictionaries/metar.tbl" metar.tbl +cpreq "${HOMEglobal}/gempak/dictionaries/sonde.land.tbl" sonde.land.tbl +cpreq "${HOMEglobal}/gempak/dictionaries/metar.tbl" metar.tbl sort -k 2n,2 metar.tbl > metar_stnm.tbl cpreq "${COMIN_OBS}/${RUN}.${cycle}.adpupa.tm00.bufr_d" fort.40 -"${HOMEgfs}/exec/rdbfmsua.x" >> "${pgmout}" 2> errfile +"${HOMEglobal}/exec/rdbfmsua.x" >> "${pgmout}" 2> errfile export err=$? if [[ ${err} -ne 0 ]]; then err_exit "Failed to run rdbfmsua!" diff --git a/dev/scripts/exgfs_atmos_gempak_meta.sh b/dev/scripts/exgfs_atmos_gempak_meta.sh index 87f411d38b7..d219010b64a 100755 --- a/dev/scripts/exgfs_atmos_gempak_meta.sh +++ b/dev/scripts/exgfs_atmos_gempak_meta.sh @@ -49,14 +49,14 @@ while [[ ${fhr} -le ${fhend} ]]; do if [[ ${do_all} -eq 1 ]]; then do_all=0 - awk '{print $1}' "${HOMEgfs}/gempak/fix/gfs_meta" | envsubst > "poescript" + awk '{print $1}' "${HOMEglobal}/gempak/fix/gfs_meta" | envsubst > "poescript" else # # Do not try to grep out 12, it will grab the 12 from 126. # This will work as long as we don't need 12 fhr metafiles # if [[ ${fhr} -ne 12 ]]; then - grep "${fhr}" "${HOMEgfs}/gempak/fix/gfs_meta" | awk -F" [0-9]" '{print $1}' | envsubst > "poescript" + grep "${fhr}" "${HOMEglobal}/gempak/fix/gfs_meta" | awk -F" [0-9]" '{print $1}' | envsubst > "poescript" fi fi @@ -71,7 +71,7 @@ while [[ ${fhr} -le ${fhend} ]]; do cat poescript - "${HOMEgfs}/ush/run_mpmd.sh" poescript && true + "${HOMEglobal}/ush/run_mpmd.sh" poescript && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit "Failed to generate one or more gempak meta plots!" diff --git a/dev/scripts/exgfs_atmos_goes_nawips.sh b/dev/scripts/exgfs_atmos_goes_nawips.sh index 323f8f7701b..0f1fa3c50e4 100755 --- a/dev/scripts/exgfs_atmos_goes_nawips.sh +++ b/dev/scripts/exgfs_atmos_goes_nawips.sh @@ -8,17 +8,17 @@ cd "${DATA}" || exit 1 fhr3=$1 # "Import" functions used in this script -source "${USHgfs}/product_functions.sh" +source "${USHglobal}/product_functions.sh" for table in g2varswmo2.tbl g2vcrdwmo2.tbl g2varsncep1.tbl g2vcrdncep1.tbl; do - source_table="${HOMEgfs}/gempak/fix/${table}" + source_table="${HOMEglobal}/gempak/fix/${table}" if [[ ! -f "${source_table}" ]]; then err_exit "FATAL ERROR: ${table} is missing" fi cpreq "${source_table}" "${table}" done -NAGRIB_TABLE="${HOMEgfs}/gempak/fix/nagrib.tbl" +NAGRIB_TABLE="${HOMEglobal}/gempak/fix/nagrib.tbl" NAGRIB="${GEMEXE}/nagrib2" entry=$(grep "^${RUN2} " "${NAGRIB_TABLE}" | awk 'index($1,"#") != 1 {print $0}' || echo "") diff --git a/dev/scripts/exgfs_atmos_grib2_special_npoess.sh b/dev/scripts/exgfs_atmos_grib2_special_npoess.sh index 51ce9674539..b472cdc7e6f 100755 --- a/dev/scripts/exgfs_atmos_grib2_special_npoess.sh +++ b/dev/scripts/exgfs_atmos_grib2_special_npoess.sh @@ -106,7 +106,7 @@ for ((fhr = SHOUR; fhr <= FHOUR; fhr = fhr + FHINC)); do ###################################################################### # Process Global NPOESS 0.50 GFS GRID PRODUCTS IN GRIB2 F000 - F024 # ###################################################################### - paramlist="${PARMgfs}/product/global_npoess_paramlist_g2" + paramlist="${PARMglobal}/product/global_npoess_paramlist_g2" cpreq "${COMIN_ATMOS_GRIB_0p50}/gfs.t${cyc}z.pres_a.0p50.f${fhr3}.grib2" tmpfile2 cpreq "${COMIN_ATMOS_GRIB_0p50}/gfs.t${cyc}z.pres_b.0p50.f${fhr3}.grib2" tmpfile2b cat tmpfile2 tmpfile2b > tmpfile diff --git a/dev/scripts/exgfs_atmos_nawips.sh b/dev/scripts/exgfs_atmos_nawips.sh index 66cbfa2d8d0..f6039628b5f 100755 --- a/dev/scripts/exgfs_atmos_nawips.sh +++ b/dev/scripts/exgfs_atmos_nawips.sh @@ -20,7 +20,7 @@ mkdir -p "${DATA_RUN}" cd "${DATA_RUN}" || exit 1 # "Import" functions used in this script -source "${USHgfs}/product_functions.sh" +source "${USHglobal}/product_functions.sh" NAGRIB="${GEMEXE}/nagrib2" @@ -41,7 +41,7 @@ mkdir -p "lock.${fhr3}" cd "lock.${fhr3}" || exit 1 for table in g2varswmo2.tbl g2vcrdwmo2.tbl g2varsncep1.tbl g2vcrdncep1.tbl; do - source_table="${HOMEgfs}/gempak/fix/${table}" + source_table="${HOMEglobal}/gempak/fix/${table}" if [[ ! -f "${source_table}" ]]; then err_exit "FATAL ERROR: ${table} is missing" fi diff --git a/dev/scripts/exgfs_atmos_postsnd.sh b/dev/scripts/exgfs_atmos_postsnd.sh index 725acf5548b..596c9ed287f 100755 --- a/dev/scripts/exgfs_atmos_postsnd.sh +++ b/dev/scripts/exgfs_atmos_postsnd.sh @@ -23,7 +23,7 @@ # it requires 7 nodes & allocate 21 processes per node(num_ppn=21) ################################################################ -runscript="${USHgfs}/gfs_bufr.sh" +runscript="${USHglobal}/gfs_bufr.sh" cd "${DATA}" || exit 2 @@ -45,7 +45,7 @@ export NINT1=${FHOUT_HF_GFS:-1} export NEND1=${FHMAX_HF_GFS:-120} export NINT3=${FHOUT_GFS:-3} -GETDIM="${USHgfs}/getncdimlen" +GETDIM="${USHglobal}/getncdimlen" LEVS=$(${GETDIM} "${COMIN_ATMOS_HISTORY}/${RUN}.${cycle}.atm.f000.${atmfm}" pfull) declare -x LEVS @@ -114,7 +114,7 @@ for fhr in "${hour_list[@]}"; do done # Run with MPMD -"${USHgfs}/run_mpmd.sh" "${DATA}/poescript_bufr" && true +"${USHglobal}/run_mpmd.sh" "${DATA}/poescript_bufr" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit "One or more BUFR MPMD tasks failed!" @@ -172,7 +172,7 @@ fi ######################################## rm -rf poe_col for ((m = 1; m <= NUM_SND_COLLECTIVES; m++)); do - echo "${USHgfs}/gfs_sndp.sh ${m} " >> poe_col + echo "${USHglobal}/gfs_sndp.sh ${m} " >> poe_col done if [[ "${CFP_MP:-"NO"}" == "YES" ]]; then @@ -191,7 +191,7 @@ ${APRUN_POSTSNDCFP} cmdfile # GEMPAK surface and sounding data files ######################################## if [[ "${DO_GEMPAK:-"NO"}" == "YES" ]]; then - sh "${USHgfs}/gfs_bfr2gpk.sh" + sh "${USHglobal}/gfs_bfr2gpk.sh" fi ############## END OF SCRIPT ####################### diff --git a/dev/scripts/exgfs_wave_init.sh b/dev/scripts/exgfs_wave_init.sh index 19f3bc6992f..cf8f059f707 100755 --- a/dev/scripts/exgfs_wave_init.sh +++ b/dev/scripts/exgfs_wave_init.sh @@ -38,24 +38,24 @@ done for grdID in "${grdALL[@]}"; do echo "INFO: Setting up to generate mod_def file for ${grdID}" - if [[ -f "${FIXgfs}/wave/ww3_grid.inp.${grdID}" ]]; then - cpreq "${FIXgfs}/wave/ww3_grid.inp.${grdID}" "ww3_grid.inp.${grdID}" - echo "INFO: ww3_grid.inp.${grdID} copied (${FIXgfs}/wave/ww3_grid.inp.${grdID})." + if [[ -f "${FIXglobal}/wave/ww3_grid.inp.${grdID}" ]]; then + cpreq "${FIXglobal}/wave/ww3_grid.inp.${grdID}" "ww3_grid.inp.${grdID}" + echo "INFO: ww3_grid.inp.${grdID} copied (${FIXglobal}/wave/ww3_grid.inp.${grdID})." else export err=2 err_exit "No inp file for model definition file for grid ${grdID}" fi - if [[ -f "${FIXgfs}/wave/${grdID}.msh" ]]; then - cpreq "${FIXgfs}/wave/${grdID}.msh" "${grdID}.msh" + if [[ -f "${FIXglobal}/wave/${grdID}.msh" ]]; then + cpreq "${FIXglobal}/wave/${grdID}.msh" "${grdID}.msh" fi #TODO: how do we say "it's unstructured, and therefore need to have error check here" - echo "${USHgfs}/wave_grid_moddef.sh ${grdID}" >> mpmd_script + echo "${USHglobal}/wave_grid_moddef.sh ${grdID}" >> mpmd_script done # 1.a.1 Execute MPMD or process serially -"${USHgfs}/run_mpmd.sh" "${DATA}/mpmd_script" && true +"${USHglobal}/run_mpmd.sh" "${DATA}/mpmd_script" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit "run_mpmd.sh failed!" diff --git a/dev/scripts/exgfs_wave_nawips.sh b/dev/scripts/exgfs_wave_nawips.sh index 5400a29962d..8ab6bcd2b07 100755 --- a/dev/scripts/exgfs_wave_nawips.sh +++ b/dev/scripts/exgfs_wave_nawips.sh @@ -10,13 +10,13 @@ # March-2020 Roberto.Padilla@noaa.gov ##################################################################### -source "${USHgfs}/wave_domain_grid.sh" -source "${USHgfs}/atparse.bash" +source "${USHglobal}/wave_domain_grid.sh" +source "${USHglobal}/atparse.bash" NAGRIB="nagrib2" fhr3=$(printf "%03d" "${FORECAST_HOUR}") -cpreq "${HOMEgfs}/gempak/fix/g2varswmo2.tbl" "${DATA}/" +cpreq "${HOMEglobal}/gempak/fix/g2varswmo2.tbl" "${DATA}/" grids=${GEMPAK_GRIDS:-${waveinterpGRD:-'aoc_9km gnh_10m gsh_15m'}} diff --git a/dev/scripts/exgfs_wave_post_gridded_sbs.sh b/dev/scripts/exgfs_wave_post_gridded_sbs.sh index 0d12e48075c..5ec5b806b9e 100755 --- a/dev/scripts/exgfs_wave_post_gridded_sbs.sh +++ b/dev/scripts/exgfs_wave_post_gridded_sbs.sh @@ -21,7 +21,7 @@ # ############################################################################### -source "${USHgfs}/wave_domain_grid.sh" +source "${USHglobal}/wave_domain_grid.sh" DOGRI_WAV=${DOGRI_WAV:-"NO"} # Interpolate to a grid DOGRB_WAV=${DOGRB_WAV:-"YES"} # Create grib2 files @@ -51,7 +51,7 @@ cpreq "${COMIN_WAVE_HISTORY}/${RUN}.t${cyc}z.${waveGRD}.f${fhr3}.bin" "./out_grd # Check for input templates for grib2 products (copying will be done in the grib2 script) if [[ "${DOGRB_WAV}" == "YES" ]]; then for grbGRD in ${waveinterpGRD} ${wavepostGRD}; do - if [[ ! -f "${PARMgfs}/wave/ww3_grib2.${grbGRD}.inp.tmpl" ]]; then + if [[ ! -f "${PARMglobal}/wave/ww3_grib2.${grbGRD}.inp.tmpl" ]]; then export err=1 err_exit "No template for grib generation" fi @@ -84,11 +84,11 @@ if [[ "${DOGRI_WAV}" == "YES" ]]; then for grdID in ${waveinterpGRD}; do count=$((count + 1)) echo "#!/bin/bash" > "cmdfile.${count}" - echo "${USHgfs}/wave_grid_interp_sbs.sh ${grdID} ${ymdh_int} ${dt_int} ${n_int} > ${DATA}/grid_interp_${grdID}.out 2>&1" >> "${DATA}/cmdfile.${count}" + echo "${USHglobal}/wave_grid_interp_sbs.sh ${grdID} ${ymdh_int} ${dt_int} ${n_int} > ${DATA}/grid_interp_${grdID}.out 2>&1" >> "${DATA}/cmdfile.${count}" echo "cat ${DATA}/grid_interp_${grdID}.out" >> "cmdfile.${count}" if [[ "${DOGRB_WAV}" == "YES" ]]; then process_grdID "${grdID}" - echo "${USHgfs}/wave_grib2_sbs.sh ${grdID} ${GRIDNR} ${MODNR} ${valid_time} ${FORECAST_HOUR} ${GRDREGION} ${GRDRES} '${OUTPARS_WAV}' > ${DATA}/grib2_${grdID}.out 2>&1" >> "${DATA}/cmdfile.${count}" + echo "${USHglobal}/wave_grib2_sbs.sh ${grdID} ${GRIDNR} ${MODNR} ${valid_time} ${FORECAST_HOUR} ${GRDREGION} ${GRDRES} '${OUTPARS_WAV}' > ${DATA}/grib2_${grdID}.out 2>&1" >> "${DATA}/cmdfile.${count}" echo "cat ${DATA}/grib2_${grdID}.out" >> "${DATA}/cmdfile.${count}" fi chmod 755 "cmdfile.${count}" @@ -102,7 +102,7 @@ if [[ "${DOGRB_WAV}" == "YES" ]]; then count=$((count + 1)) process_grdID "${grdID}" echo "#!/bin/bash" > "cmdfile.${count}" - echo "${USHgfs}/wave_grib2_sbs.sh ${grdID} ${GRIDNR} ${MODNR} ${valid_time} ${FORECAST_HOUR} ${GRDREGION} ${GRDRES} '${OUTPARS_WAV}' > grib2_${grdID}.out 2>&1" >> "${DATA}/cmdfile.${count}" + echo "${USHglobal}/wave_grib2_sbs.sh ${grdID} ${GRIDNR} ${MODNR} ${valid_time} ${FORECAST_HOUR} ${GRDREGION} ${GRDRES} '${OUTPARS_WAV}' > grib2_${grdID}.out 2>&1" >> "${DATA}/cmdfile.${count}" echo "cat ${DATA}/grib2_${grdID}.out" >> "${DATA}/cmdfile.${count}" chmod 755 "cmdfile.${count}" echo "${DATA}/cmdfile.${count}" >> "${DATA}/cmdfile" @@ -119,7 +119,7 @@ fi # Execute command file echo "INFO: Running MPMD job with ${count} commands" -"${USHgfs}/run_mpmd.sh" "${DATA}/cmdfile" && true +"${USHglobal}/run_mpmd.sh" "${DATA}/cmdfile" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit "run_mpmd.sh failed!" diff --git a/dev/scripts/exgfs_wave_post_pnt.sh b/dev/scripts/exgfs_wave_post_pnt.sh index 7f5ebfd3c7d..435b6c45dfc 100755 --- a/dev/scripts/exgfs_wave_post_pnt.sh +++ b/dev/scripts/exgfs_wave_post_pnt.sh @@ -82,12 +82,12 @@ done # 1.b Output locations file rm -f buoy.loc -if [[ -f "${PARMgfs}/wave/wave_${NET}.buoys" ]]; then - cpreq -f "${PARMgfs}/wave/wave_${NET}.buoys" buoy.loc.temp +if [[ -f "${PARMglobal}/wave/wave_${NET}.buoys" ]]; then + cpreq -f "${PARMglobal}/wave/wave_${NET}.buoys" buoy.loc.temp if [[ "${DOBNDPNT_WAV}" == "YES" ]]; then #only do boundary points sed -n '/^\$.*/!p' buoy.loc.temp | grep IBP > buoy.loc || { - echo "WARNING: No boundary points found in buoy file ${PARMgfs}/wave/wave_${NET}.buoys" + echo "WARNING: No boundary points found in buoy file ${PARMglobal}/wave/wave_${NET}.buoys" echo " Ending job without doing anything." exit 0 } @@ -98,7 +98,7 @@ if [[ -f "${PARMgfs}/wave/wave_${NET}.buoys" ]]; then fi if [[ -s buoy.loc ]]; then - echo " buoy.loc and buoy.ibp copied and processed (${PARMgfs}/wave/wave_${NET}.buoys)." + echo " buoy.loc and buoy.ibp copied and processed (${PARMglobal}/wave/wave_${NET}.buoys)." else export err=3 err_exit 'NO BUOY LOCATION FILE' @@ -106,8 +106,8 @@ fi # 1.c Input template files -if [[ -f "${PARMgfs}/wave/ww3_outp_spec.inp.tmpl" ]]; then - cpreq -f "${PARMgfs}/wave/ww3_outp_spec.inp.tmpl" ww3_outp_spec.inp.tmpl +if [[ -f "${PARMglobal}/wave/ww3_outp_spec.inp.tmpl" ]]; then + cpreq -f "${PARMglobal}/wave/ww3_outp_spec.inp.tmpl" ww3_outp_spec.inp.tmpl fi if [[ -f ww3_outp_spec.inp.tmpl ]]; then @@ -117,8 +117,8 @@ else err_exit "NO TEMPLATE FOR SPEC INPUT FILE" fi -if [[ -f "${PARMgfs}/wave/ww3_outp_bull.inp.tmpl" ]]; then - cpreq "${PARMgfs}/wave/ww3_outp_bull.inp.tmpl" ww3_outp_bull.inp.tmpl +if [[ -f "${PARMglobal}/wave/ww3_outp_bull.inp.tmpl" ]]; then + cpreq "${PARMglobal}/wave/ww3_outp_bull.inp.tmpl" ww3_outp_bull.inp.tmpl fi if [[ -f ww3_outp_bull.inp.tmpl ]]; then @@ -171,7 +171,7 @@ ${NLN} "./mod_def.${waveuoutpGRD}" ./mod_def.ww3 export pgm="${NET,,}_ww3_outp.x" source prep_step -"${EXECgfs}/${pgm}" > buoy_lst.loc 2>&1 +"${EXECglobal}/${pgm}" > buoy_lst.loc 2>&1 export err=$? if [[ ${err} -ne 0 && ! -f buoy_log.ww3 ]]; then cat buoy_tmp.loc || true @@ -232,7 +232,7 @@ if [[ "${DOSPC_WAV}" == "YES" ]]; then ww3_outp_spec.inp.tmpl > ww3_outp.inp export pgm="${NET,,}_ww3_outp.x" - "${EXECgfs}/${pgm}" + "${EXECglobal}/${pgm}" fi if [[ "${DOBLL_WAV}" == "YES" ]]; then @@ -244,7 +244,7 @@ if [[ "${DOBLL_WAV}" == "YES" ]]; then -e "s/REFT/${truntime}/g" \ ww3_outp_bull.inp.tmpl > ww3_outp.inp export pgm="${NET,,}_ww3_outp.x" - "${EXECgfs}/${pgm}" + "${EXECglobal}/${pgm}" fi # --------------------------------------------------------------------------- # @@ -262,19 +262,19 @@ printf "\n Making command file for taring all point output files." if [[ "${DOBNDPNT_WAV}" == "YES" ]]; then if [[ "${DOSPC_WAV}" == "YES" ]]; then - echo "${USHgfs}/wave_tar.sh ${WAV_MOD_TAG} ibp ${Nb} 2>&1 | tee ${WAV_MOD_TAG}_ibp_tar.out" >> cmdtarfile + echo "${USHglobal}/wave_tar.sh ${WAV_MOD_TAG} ibp ${Nb} 2>&1 | tee ${WAV_MOD_TAG}_ibp_tar.out" >> cmdtarfile fi if [[ "${DOBLL_WAV}" == "YES" ]]; then - echo "${USHgfs}/wave_tar.sh ${WAV_MOD_TAG} ibpbull ${Nb} 2>&1 | tee ${WAV_MOD_TAG}_ibpbull_tar.out" >> cmdtarfile - echo "${USHgfs}/wave_tar.sh ${WAV_MOD_TAG} ibpcbull ${Nb} 2>&1 | tee ${WAV_MOD_TAG}_ibpcbull_tar.out" >> cmdtarfile + echo "${USHglobal}/wave_tar.sh ${WAV_MOD_TAG} ibpbull ${Nb} 2>&1 | tee ${WAV_MOD_TAG}_ibpbull_tar.out" >> cmdtarfile + echo "${USHglobal}/wave_tar.sh ${WAV_MOD_TAG} ibpcbull ${Nb} 2>&1 | tee ${WAV_MOD_TAG}_ibpcbull_tar.out" >> cmdtarfile fi else if [[ "${DOSPC_WAV}" == "YES" ]]; then - echo "${USHgfs}/wave_tar.sh ${WAV_MOD_TAG} spec ${Nb} 2>&1 | tee ${WAV_MOD_TAG}_spec_tar.out" >> cmdtarfile + echo "${USHglobal}/wave_tar.sh ${WAV_MOD_TAG} spec ${Nb} 2>&1 | tee ${WAV_MOD_TAG}_spec_tar.out" >> cmdtarfile fi if [[ "${DOBLL_WAV}" == "YES" ]]; then - echo "${USHgfs}/wave_tar.sh ${WAV_MOD_TAG} bull ${Nb} 2>&1 | tee ${WAV_MOD_TAG}_bull_tar.out" >> cmdtarfile - echo "${USHgfs}/wave_tar.sh ${WAV_MOD_TAG} cbull ${Nb} 2>&1 | tee ${WAV_MOD_TAG}_cbull_tar.out" >> cmdtarfile + echo "${USHglobal}/wave_tar.sh ${WAV_MOD_TAG} bull ${Nb} 2>&1 | tee ${WAV_MOD_TAG}_bull_tar.out" >> cmdtarfile + echo "${USHglobal}/wave_tar.sh ${WAV_MOD_TAG} cbull ${Nb} 2>&1 | tee ${WAV_MOD_TAG}_cbull_tar.out" >> cmdtarfile fi fi @@ -287,7 +287,7 @@ if [[ ${NTASKS} -lt ${ncmds} ]]; then fi fi -"${USHgfs}/run_mpmd.sh" "${DATA}/cmdtarfile" && true +"${USHglobal}/run_mpmd.sh" "${DATA}/cmdtarfile" && true export err=$? if [[ ${err} -ne 0 ]]; then export pgm="run_mpmd.sh" diff --git a/dev/scripts/exgfs_wave_prdgen_bulls.sh b/dev/scripts/exgfs_wave_prdgen_bulls.sh index 9d6140979e5..1a3795c3e01 100755 --- a/dev/scripts/exgfs_wave_prdgen_bulls.sh +++ b/dev/scripts/exgfs_wave_prdgen_bulls.sh @@ -63,8 +63,8 @@ echo " Number of bulletin files : ${Nb}" echo ' --------------------------' echo ' ' # 1.c Get the datat cards -if [[ -f "${PARMgfs}/wave/bull_awips_gfswave.${waveGRD}" ]]; then - cpreq "${PARMgfs}/wave/bull_awips_gfswave.${waveGRD}" "awipsbull.data" +if [[ -f "${PARMglobal}/wave/bull_awips_gfswave.${waveGRD}" ]]; then + cpreq "${PARMglobal}/wave/bull_awips_gfswave.${waveGRD}" "awipsbull.data" else export err=3 err_exit "Bulletin header data file missing." diff --git a/dev/scripts/exgfs_wave_prdgen_gridded.sh b/dev/scripts/exgfs_wave_prdgen_gridded.sh index 31c0c57c670..8bc34954e6d 100755 --- a/dev/scripts/exgfs_wave_prdgen_gridded.sh +++ b/dev/scripts/exgfs_wave_prdgen_gridded.sh @@ -21,7 +21,7 @@ # --------------------------------------------------------------------------- # # 0. Preparations -source "${USHgfs}/wave_domain_grid.sh" +source "${USHglobal}/wave_domain_grid.sh" # 0.a Basic modes of operation @@ -113,7 +113,7 @@ while [[ "${fhcnt}" -le "${FHMAX_WAV}" ]]; do cpreq "${GRIBIN}" "gribfile.${grdID}.f${fhr}" # 1.d Input template files - parmfile="${PARMgfs}/wave/grib2_${RUN}wave.${grdOut}.f${fhr}" + parmfile="${PARMglobal}/wave/grib2_${RUN}wave.${grdOut}.f${fhr}" if [[ -f "${parmfile}" ]]; then ${NLN} "${parmfile}" "awipsgrb.${grdID}.f${fhr}" else diff --git a/dev/scripts/exgfs_wave_prep.sh b/dev/scripts/exgfs_wave_prep.sh index bb139147b35..ae532763c0f 100755 --- a/dev/scripts/exgfs_wave_prep.sh +++ b/dev/scripts/exgfs_wave_prep.sh @@ -167,7 +167,7 @@ for type in "${itype[@]}"; do err_exit 'Input type not yet implemented' ;; esac - cpreq "${PARMgfs}/wave/ww3_prnc.${type}.${grdID}.inp.tmpl" ./ + cpreq "${PARMglobal}/wave/ww3_prnc.${type}.${grdID}.inp.tmpl" ./ done # --------------------------------------------------------------------------- # @@ -182,7 +182,7 @@ if [[ "${WW3ICEINP}" == 'YES' ]]; then # ensemble members if [[ "${RUNMEM}" == "-1" || "${WW3ICEIENS}" == "T" || "${waveMEMB}" == "00" ]]; then - "${USHgfs}/wave_prnc_ice.sh" > wave_prnc_ice.out && true + "${USHglobal}/wave_prnc_ice.sh" > wave_prnc_ice.out && true ERR=$? if [[ -d ice || ${ERR} -ne 0 ]]; then @@ -300,7 +300,7 @@ if [[ "${WW3CURINP}" == 'YES' ]]; then err_exit "NO CURRENT FILE (RTOFS): ${curfile}" fi - echo "${USHgfs}/wave_prnc_cur.sh ${ymdh_rtofs} ${curfile} ${fhr_rtofs} ${FLGFIRST} > cur_${ymdh_rtofs}.out 2>&1" >> cmdfile + echo "${USHglobal}/wave_prnc_cur.sh ${ymdh_rtofs} ${curfile} ${fhr_rtofs} ${FLGFIRST} > cur_${ymdh_rtofs}.out 2>&1" >> cmdfile if [[ "${FLGFIRST}" == "T" ]]; then FLGFIRST='F' @@ -312,7 +312,7 @@ if [[ "${WW3CURINP}" == 'YES' ]]; then ymdh_rtofs=$(date --utc +%Y%m%d%H -d "${ymdh_rtofs} + ${DATE_DT} hours") done - "${USHgfs}/run_mpmd.sh" "${DATA}/cmdfile" && true + "${USHglobal}/run_mpmd.sh" "${DATA}/cmdfile" && true export err=$? if [[ ${err} -ne 0 ]]; then export pgm="run_mpmd.sh" diff --git a/dev/scripts/exglobal_atmos_analysis.sh b/dev/scripts/exglobal_atmos_analysis.sh index 7ce7ede9db7..9b4305289b5 100755 --- a/dev/scripts/exglobal_atmos_analysis.sh +++ b/dev/scripts/exglobal_atmos_analysis.sh @@ -37,7 +37,7 @@ export bcyc=${BDATE:8:2} # Utilities export CHGRP_CMD=${CHGRP_CMD:-"chgrp ${group_name:-rstprod}"} -export NCLEN=${NCLEN:-${USHgfs}/getncdimlen} +export NCLEN=${NCLEN:-${USHglobal}/getncdimlen} COMPRESS=${COMPRESS:-gzip} UNCOMPRESS=${UNCOMPRESS:-gunzip} APRUN_GSI=${APRUN_GSI:-${APRUN:-""}} @@ -68,19 +68,19 @@ DOIAU=${DOIAU:-"NO"} export IAUFHRS=${IAUFHRS:-"6,"} # Dependent Scripts and Executables -GSIEXEC=${GSIEXEC:-${EXECgfs}/gsi.x} +GSIEXEC=${GSIEXEC:-${EXECglobal}/gsi.x} export NTHREADS_CALCINC=${NTHREADS_CALCINC:-1} export APRUN_CALCINC=${APRUN_CALCINC:-${APRUN:-""}} export APRUN_CALCANL=${APRUN_CALCANL:-${APRUN:-""}} export APRUN_CHGRES=${APRUN_CALCANL:-${APRUN:-""}} -export CALCINCEXEC=${CALCINCEXEC:-${EXECgfs}/calc_increment_ens.x} -export CALCINCNCEXEC=${CALCINCNCEXEC:-${EXECgfs}/calc_increment_ens_ncio.x} -export CALCANLEXEC=${CALCANLEXEC:-${EXECgfs}/calc_analysis.x} -export CHGRESNCEXEC=${CHGRESNCEXEC:-${EXECgfs}/enkf_chgres_recenter_nc.x} -export CHGRESINCEXEC=${CHGRESINCEXEC:-${EXECgfs}/interp_inc.x} -CHGRESEXEC=${CHGRESEXEC:-${EXECgfs}/enkf_chgres_recenter.x} +export CALCINCEXEC=${CALCINCEXEC:-${EXECglobal}/calc_increment_ens.x} +export CALCINCNCEXEC=${CALCINCNCEXEC:-${EXECglobal}/calc_increment_ens_ncio.x} +export CALCANLEXEC=${CALCANLEXEC:-${EXECglobal}/calc_analysis.x} +export CHGRESNCEXEC=${CHGRESNCEXEC:-${EXECglobal}/enkf_chgres_recenter_nc.x} +export CHGRESINCEXEC=${CHGRESINCEXEC:-${EXECglobal}/interp_inc.x} +CHGRESEXEC=${CHGRESEXEC:-${EXECglobal}/enkf_chgres_recenter.x} export NTHREADS_CHGRES=${NTHREADS_CHGRES:-24} -CALCINCPY=${CALCINCPY:-${USHgfs}/calcinc_gfs.py} +CALCINCPY=${CALCINCPY:-${USHglobal}/calcinc_gfs.py} export hofx_2m_sfcfile=${hofx_2m_sfcfile:-".false."} export ignore_2mQM=${ignore_2mQM:-".false."} @@ -285,24 +285,24 @@ else fi # GSI Fix files -BERROR=${BERROR:-${FIXgfs}/gsi/Big_Endian/global_berror.l${LEVS}y${NLAT_A}.f77} -SATANGL=${SATANGL:-${FIXgfs}/gsi/global_satangbias.txt} -SATINFO=${SATINFO:-${FIXgfs}/gsi/global_satinfo.txt} -RADCLOUDINFO=${RADCLOUDINFO:-${FIXgfs}/gsi/cloudy_radiance_info.txt} -ATMSFILTER=${ATMSFILTER:-${FIXgfs}/gsi/atms_beamwidth.txt} -ANAVINFO=${ANAVINFO:-${FIXgfs}/gsi/global_anavinfo.l${LEVS}.txt} -CONVINFO=${CONVINFO:-${FIXgfs}/gsi/global_convinfo.txt} -vqcdat=${vqcdat:-${FIXgfs}/gsi/vqctp001.dat} -INSITUINFO=${INSITUINFO:-${FIXgfs}/gsi/global_insituinfo.txt} -OZINFO=${OZINFO:-${FIXgfs}/gsi/global_ozinfo.txt} -PCPINFO=${PCPINFO:-${FIXgfs}/gsi/global_pcpinfo.txt} -AEROINFO=${AEROINFO:-${FIXgfs}/gsi/global_aeroinfo.txt} -SCANINFO=${SCANINFO:-${FIXgfs}/gsi/global_scaninfo.txt} -HYBENSINFO=${HYBENSINFO:-${FIXgfs}/gsi/global_hybens_info.l${LEVS}.txt} -OBERROR=${OBERROR:-${FIXgfs}/gsi/prepobs_errtable.global} +BERROR=${BERROR:-${FIXglobal}/gsi/Big_Endian/global_berror.l${LEVS}y${NLAT_A}.f77} +SATANGL=${SATANGL:-${FIXglobal}/gsi/global_satangbias.txt} +SATINFO=${SATINFO:-${FIXglobal}/gsi/global_satinfo.txt} +RADCLOUDINFO=${RADCLOUDINFO:-${FIXglobal}/gsi/cloudy_radiance_info.txt} +ATMSFILTER=${ATMSFILTER:-${FIXglobal}/gsi/atms_beamwidth.txt} +ANAVINFO=${ANAVINFO:-${FIXglobal}/gsi/global_anavinfo.l${LEVS}.txt} +CONVINFO=${CONVINFO:-${FIXglobal}/gsi/global_convinfo.txt} +vqcdat=${vqcdat:-${FIXglobal}/gsi/vqctp001.dat} +INSITUINFO=${INSITUINFO:-${FIXglobal}/gsi/global_insituinfo.txt} +OZINFO=${OZINFO:-${FIXglobal}/gsi/global_ozinfo.txt} +PCPINFO=${PCPINFO:-${FIXglobal}/gsi/global_pcpinfo.txt} +AEROINFO=${AEROINFO:-${FIXglobal}/gsi/global_aeroinfo.txt} +SCANINFO=${SCANINFO:-${FIXglobal}/gsi/global_scaninfo.txt} +HYBENSINFO=${HYBENSINFO:-${FIXglobal}/gsi/global_hybens_info.l${LEVS}.txt} +OBERROR=${OBERROR:-${FIXglobal}/gsi/prepobs_errtable.global} OBS_INPUT=${OBS_INPUT:-${BUILD_GSINFO_DIR}/obs_input/obs_input_ops.txt} HIRS_FIX=${HIRS_FIX:-${CRTM_FIX}} -BLACKLST=${BLACKLST:-${FIXgfs}/gsi/rejectlist_global.txt} +BLACKLST=${BLACKLST:-${FIXglobal}/gsi/rejectlist_global.txt} # GSI namelist SETUP=${SETUP:-""} @@ -354,7 +354,7 @@ ${NLN} "${BERROR}" berror_stats ${NLN} "${SATANGL}" satbias_angle if [[ "${SATINFO}" == "generate" ]]; then # shellcheck disable=SC2153 - "${USHgfs}/create_gsi_info.sh" sat "${PDY}${cyc}" "${DATA}" + "${USHglobal}/create_gsi_info.sh" sat "${PDY}${cyc}" "${DATA}" else ${NLN} "${SATINFO}" satinfo fi @@ -362,14 +362,14 @@ ${NLN} "${RADCLOUDINFO}" cloudy_radiance_info.txt ${NLN} "${ATMSFILTER}" atms_beamwidth.txt ${NLN} "${ANAVINFO}" anavinfo if [[ "${CONVINFO}" == "generate" ]]; then - "${USHgfs}/create_gsi_info.sh" conv "${PDY}${cyc}" "${DATA}" "${USE_2M_OBS}" + "${USHglobal}/create_gsi_info.sh" conv "${PDY}${cyc}" "${DATA}" "${USE_2M_OBS}" else ${NLN} "${CONVINFO}" convinfo fi ${NLN} "${vqcdat}" vqctp001.dat ${NLN} "${INSITUINFO}" insituinfo if [[ "${OZINFO}" == "generate" ]]; then - "${USHgfs}/create_gsi_info.sh" oz "${PDY}${cyc}" "${DATA}" + "${USHglobal}/create_gsi_info.sh" oz "${PDY}${cyc}" "${DATA}" else ${NLN} "${OZINFO}" ozinfo fi @@ -380,14 +380,14 @@ ${NLN} "${HYBENSINFO}" hybens_info ${NLN} "${OBERROR}" errtable ${NLN} "${BLACKLST}" blacklist -${NLN} "${FIXgfs}/gsi/AIRS_CLDDET.NL" AIRS_CLDDET.NL -${NLN} "${FIXgfs}/gsi/CRIS_CLDDET.NL" CRIS_CLDDET.NL -${NLN} "${FIXgfs}/gsi/IASI_CLDDET.NL" IASI_CLDDET.NL +${NLN} "${FIXglobal}/gsi/AIRS_CLDDET.NL" AIRS_CLDDET.NL +${NLN} "${FIXglobal}/gsi/CRIS_CLDDET.NL" CRIS_CLDDET.NL +${NLN} "${FIXglobal}/gsi/IASI_CLDDET.NL" IASI_CLDDET.NL #If using correlated error, link to the covariance files if [[ "${USE_CORRELATED_OBERRS}" == "YES" ]]; then if grep -q "Rcov" "${ANAVINFO}"; then - mapfile -t covfile_array < <(find "${FIXgfs}/gsi/" -name "Rcov*") + mapfile -t covfile_array < <(find "${FIXglobal}/gsi/" -name "Rcov*") if ((${#covfile_array[@]} > 0)); then for covfile in "${covfile_array[@]}"; do covfile_base=$(basename "${covfile}") @@ -685,7 +685,7 @@ EOF echo "${DATA}/unzip_diag.sh ${diag_file} ${DIAG_SUFFIX:-}.nc4" >> "${DATA}/cmdfile" done - "${USHgfs}/run_mpmd.sh" "${DATA}/cmdfile" && true + "${USHglobal}/run_mpmd.sh" "${DATA}/cmdfile" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit "Failed to unzip rad diag file!" diff --git a/dev/scripts/exglobal_atmos_analysis_calc.sh b/dev/scripts/exglobal_atmos_analysis_calc.sh index 0ce814e624c..af7f62b85ad 100755 --- a/dev/scripts/exglobal_atmos_analysis_calc.sh +++ b/dev/scripts/exglobal_atmos_analysis_calc.sh @@ -28,7 +28,7 @@ GDUMP=${GDUMP:-"gdas"} # Utilities export CHGRP_CMD=${CHGRP_CMD:-"chgrp ${group_name:-rstprod}"} -export NCLEN=${NCLEN:-"${USHgfs}/getncdimlen"} +export NCLEN=${NCLEN:-"${USHglobal}/getncdimlen"} COMPRESS=${COMPRESS:-gzip} UNCOMPRESS=${UNCOMPRESS:-gunzip} @@ -46,19 +46,19 @@ export APRUN_CALCINC=${APRUN_CALCINC:-${APRUN:-""}} export APRUN_CALCANL=${APRUN_CALCANL:-${APRUN:-""}} export APRUN_CHGRES=${APRUN_CALCANL:-${APRUN:-""}} -export CALCANLEXEC=${CALCANLEXEC:-"${EXECgfs}/calc_analysis.x"} -export CHGRESNCEXEC=${CHGRESNCEXEC:-"${EXECgfs}/enkf_chgres_recenter_nc.x"} -export CHGRESINCEXEC=${CHGRESINCEXEC:-"${EXECgfs}/interp_inc.x"} +export CALCANLEXEC=${CALCANLEXEC:-"${EXECglobal}/calc_analysis.x"} +export CHGRESNCEXEC=${CHGRESNCEXEC:-"${EXECglobal}/enkf_chgres_recenter_nc.x"} +export CHGRESINCEXEC=${CHGRESINCEXEC:-"${EXECglobal}/interp_inc.x"} export NTHREADS_CHGRES=${NTHREADS_CHGRES:-1} -CALCINCPY=${CALCINCPY:-"${USHgfs}/calcinc_gfs.py"} +CALCINCPY=${CALCINCPY:-"${USHglobal}/calcinc_gfs.py"} if [[ "${RUN}" == "gcdas" ]]; then - CALCANLPY=${CALCANLPY:-"${USHgfs}/calcanl_gcafs.py"} + CALCANLPY=${CALCANLPY:-"${USHglobal}/calcanl_gcafs.py"} else - CALCANLPY=${CALCANLPY:-"${USHgfs}/calcanl_gfs.py"} + CALCANLPY=${CALCANLPY:-"${USHglobal}/calcanl_gfs.py"} fi DOGAUSFCANL=${DOGAUSFCANL-"NO"} -GAUSFCANLSH=${GAUSFCANLSH:-"${USHgfs}/gaussian_sfcanl.sh"} -export GAUSFCANLEXE=${GAUSFCANLEXE:-"${EXECgfs}/gaussian_sfcanl.x"} +GAUSFCANLSH=${GAUSFCANLSH:-"${USHglobal}/gaussian_sfcanl.sh"} +export GAUSFCANLEXE=${GAUSFCANLEXE:-"${EXECglobal}/gaussian_sfcanl.x"} NTHREADS_GAUSFCANL=${NTHREADS_GAUSFCANL:-1} APRUN_GAUSFCANL=${APRUN_GAUSFCANL:-${APRUN:-""}} diff --git a/dev/scripts/exglobal_atmos_chgres_gen_control.sh b/dev/scripts/exglobal_atmos_chgres_gen_control.sh index 368c86096cf..41eff5181da 100755 --- a/dev/scripts/exglobal_atmos_chgres_gen_control.sh +++ b/dev/scripts/exglobal_atmos_chgres_gen_control.sh @@ -6,23 +6,23 @@ # Script description: Runs chgres on changing resolution of GEFS stage ic control member ################################################################################ # copy input files to DATA from the source directory -cpreq "${FIXgfs}/am/global_hyblev.l${LEVS}.txt" "${DATA}/" -cpreq "${FIXgfs}/orog/${CASE}/${CASE}_mosaic.nc" "${DATA}/" +cpreq "${FIXglobal}/am/global_hyblev.l${LEVS}.txt" "${DATA}/" +cpreq "${FIXglobal}/orog/${CASE}/${CASE}_mosaic.nc" "${DATA}/" cpreq "${ATM_FILE}" "${DATA}/atm_input.nc" cpreq "${SFC_FILE}" "${DATA}/sfc_input.nc" ############################################################################### # copy orography,surface, and ancillary files to DATA from the source directory for i in {1..6}; do - cpreq "${FIXgfs}/orog/${CASE}/${CASE}_grid.tile${i}.nc" "${DATA}/" - cpreq "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${i}.nc" "${DATA}/" - cpreq "${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.slope_type.tile${i}.nc" "${DATA}/" - cpreq "${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.maximum_snow_albedo.tile${i}.nc" "${DATA}/" - cpreq "${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.snowfree_albedo.tile${i}.nc" "${DATA}/" - cpreq "${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.soil_type.tile${i}.nc" "${DATA}/" - cpreq "${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.vegetation_type.tile${i}.nc" "${DATA}/" - cpreq "${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.substrate_temperature.tile${i}.nc" "${DATA}/" - cpreq "${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.vegetation_greenness.tile${i}.nc" "${DATA}/" - cpreq "${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.facsf.tile${i}.nc" "${DATA}/" + cpreq "${FIXglobal}/orog/${CASE}/${CASE}_grid.tile${i}.nc" "${DATA}/" + cpreq "${FIXglobal}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${i}.nc" "${DATA}/" + cpreq "${FIXglobal}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.slope_type.tile${i}.nc" "${DATA}/" + cpreq "${FIXglobal}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.maximum_snow_albedo.tile${i}.nc" "${DATA}/" + cpreq "${FIXglobal}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.snowfree_albedo.tile${i}.nc" "${DATA}/" + cpreq "${FIXglobal}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.soil_type.tile${i}.nc" "${DATA}/" + cpreq "${FIXglobal}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.vegetation_type.tile${i}.nc" "${DATA}/" + cpreq "${FIXglobal}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.substrate_temperature.tile${i}.nc" "${DATA}/" + cpreq "${FIXglobal}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.vegetation_greenness.tile${i}.nc" "${DATA}/" + cpreq "${FIXglobal}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.facsf.tile${i}.nc" "${DATA}/" done ################################################################################ # add the namelist and run chgres @@ -77,7 +77,7 @@ cat << EOF > ./fort.41 / EOF -${APRUN_CHGRES} "${HOMEgfs}/sorc/ufs_utils.fd/exec/chgres_cube" +${APRUN_CHGRES} "${HOMEglobal}/sorc/ufs_utils.fd/exec/chgres_cube" export err=$? if [[ ${err} -ne 0 ]]; then err_exit "chgres_cube failed to create cold start ICs, ABORT!" diff --git a/dev/scripts/exglobal_atmos_ensstat.sh b/dev/scripts/exglobal_atmos_ensstat.sh index e6bccaae101..d8e893418d8 100755 --- a/dev/scripts/exglobal_atmos_ensstat.sh +++ b/dev/scripts/exglobal_atmos_ensstat.sh @@ -8,12 +8,12 @@ fi { for grid in '0p25' '0p50' '1p00'; do - echo "${USHgfs}/atmos_ensstat.sh ${grid} ${fhr3}" - # echo "${USHgfs}/atmos_ensstat.sh ${grid} ${fhr3} b" + echo "${USHglobal}/atmos_ensstat.sh ${grid} ${fhr3}" + # echo "${USHglobal}/atmos_ensstat.sh ${grid} ${fhr3} b" done } > mpmd_script -"${USHgfs}/run_mpmd.sh" mpmd_script +"${USHglobal}/run_mpmd.sh" mpmd_script export err=$? if [[ ${err} -ne 0 ]]; then err_exit "One or more MPMD jobs failed to calculate ensemble statistics!" diff --git a/dev/scripts/exglobal_atmos_products.sh b/dev/scripts/exglobal_atmos_products.sh index 8ba75e0c40e..190b997ff7a 100755 --- a/dev/scripts/exglobal_atmos_products.sh +++ b/dev/scripts/exglobal_atmos_products.sh @@ -37,7 +37,7 @@ fi # Also transform the ${grid_string} into an array for processing IFS=':' read -ra grids <<< "${grid_string}" -# Files needed by ${USHgfs}/interp_atmos_master.sh +# Files needed by ${USHglobal}/interp_atmos_master.sh MASTER_FILE="${COMIN_ATMOS_MASTER}/${PREFIX}master.${fhr3}.grib2" for ((nset = 1; nset <= downset; nset++)); do @@ -106,7 +106,7 @@ for ((nset = 1; nset <= downset; nset++)); do fi input_file="${tmpfile}_${iproc}" output_file_prefix="pgb2${grp}file_${fhr3}_${iproc}" - echo "${USHgfs}/interp_atmos_master.sh ${input_file} ${output_file_prefix} ${grid_string}" >> "${DATA}/cmdfile" + echo "${USHglobal}/interp_atmos_master.sh ${input_file} ${output_file_prefix} ${grid_string}" >> "${DATA}/cmdfile" # if at final record and have not reached the final processor then write echo's to # cmdfile for remaining processors @@ -119,7 +119,7 @@ for ((nset = 1; nset <= downset; nset++)); do done # for (( iproc = 1 ; iproc <= nproc ; iproc++ )); do # Run with MPMD or serial - "${USHgfs}/run_mpmd.sh" "${DATA}/cmdfile" && true + "${USHglobal}/run_mpmd.sh" "${DATA}/cmdfile" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit "FATAL ERROR: Some or all interpolations of the master grib file failed during MPMD execution!" @@ -170,7 +170,7 @@ if [[ "${FLXGF:-}" == "YES" ]]; then input_file="${FLUX_FILE}" output_file_prefix="sflux_${fhr3}" grid_string="1p00" - "${USHgfs}/interp_atmos_sflux.sh" "${input_file}" "${output_file_prefix}" "${grid_string}" && true + "${USHglobal}/interp_atmos_sflux.sh" "${input_file}" "${output_file_prefix}" "${grid_string}" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit "FATAL ERROR: Unable to interpolate the surface flux grib2 files!" diff --git a/dev/scripts/exglobal_atmos_sfcanl.sh b/dev/scripts/exglobal_atmos_sfcanl.sh index d0e7fdf2d1f..48134ef627c 100755 --- a/dev/scripts/exglobal_atmos_sfcanl.sh +++ b/dev/scripts/exglobal_atmos_sfcanl.sh @@ -22,9 +22,9 @@ # Derived base variables # Dependent Scripts and Executables -CYCLESH=${CYCLESH:-"${USHgfs}/global_cycle.sh"} -REGRIDSH=${REGRIDSH:-"${USHgfs}/regrid_gsiSfcIncr_to_tile.sh"} -export CYCLEXEC=${CYCLEXEC:-"${EXECgfs}/global_cycle"} +CYCLESH=${CYCLESH:-"${USHglobal}/global_cycle.sh"} +REGRIDSH=${REGRIDSH:-"${USHglobal}/regrid_gsiSfcIncr_to_tile.sh"} +export CYCLEXEC=${CYCLEXEC:-"${EXECglobal}/global_cycle"} NTHREADS_CYCLE=${NTHREADS_CYCLE:-24} APRUN_CYCLE=${APRUN_CYCLE:-${APRUN:-""}} @@ -103,8 +103,8 @@ export MAX_TASKS_CY="${ntiles}" # Copy fix files required by global_cycle to DATA just once for ((nn = 1; nn <= ntiles; nn++)); do - cpreq "${FIXgfs}/orog/${CASE}/${CASE}_grid.tile${nn}.nc" "${DATA}/fngrid.00${nn}" - cpreq "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${nn}.nc" "${DATA}/fnorog.00${nn}" + cpreq "${FIXglobal}/orog/${CASE}/${CASE}_grid.tile${nn}.nc" "${DATA}/fngrid.00${nn}" + cpreq "${FIXglobal}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${nn}.nc" "${DATA}/fnorog.00${nn}" done # Copy the NSST analysis file for global_cycle diff --git a/dev/scripts/exglobal_atmos_tropcy_qc_reloc.sh b/dev/scripts/exglobal_atmos_tropcy_qc_reloc.sh index f85bb2adc43..d84216ef874 100755 --- a/dev/scripts/exglobal_atmos_tropcy_qc_reloc.sh +++ b/dev/scripts/exglobal_atmos_tropcy_qc_reloc.sh @@ -43,7 +43,7 @@ if [[ "${PROCESS_TROPCY}" = 'YES' ]]; then #echo $PDY - "${USHgfs}/syndat_qctropcy.sh" "${cdate10}" + "${USHglobal}/syndat_qctropcy.sh" "${cdate10}" errsc=$? if [[ ${errsc} -ne 0 ]]; then echo "syndat_qctropcy.sh failed. exit" @@ -90,7 +90,7 @@ if [[ "${DO_RELOCATE}" = 'YES' ]]; then ################################################### export MP_LABELIO=${MP_LABELIO:-yes} - "${USHgfs}/tropcy_relocate.sh" "${cdate10}" + "${USHglobal}/tropcy_relocate.sh" "${cdate10}" export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/scripts/exglobal_atmos_upp.py b/dev/scripts/exglobal_atmos_upp.py index f87f06d2c95..531579241cc 100755 --- a/dev/scripts/exglobal_atmos_upp.py +++ b/dev/scripts/exglobal_atmos_upp.py @@ -18,7 +18,7 @@ def main(): upp = UPP(config) # Pull out all the configuration keys needed to run the rest of UPP steps - keys = ['HOMEgfs', 'DATA', 'current_cycle', 'RUN', 'NET', + keys = ['HOMEglobal', 'DATA', 'current_cycle', 'RUN', 'NET', 'COMIN_ATMOS_ANALYSIS', 'COMIN_ATMOS_HISTORY', 'COMOUT_ATMOS_MASTER', 'upp_run', 'APRUN_UPP', diff --git a/dev/scripts/exglobal_atmos_vminmon.sh b/dev/scripts/exglobal_atmos_vminmon.sh index aa399f58a3f..002384c30f1 100755 --- a/dev/scripts/exglobal_atmos_vminmon.sh +++ b/dev/scripts/exglobal_atmos_vminmon.sh @@ -42,15 +42,15 @@ fi #------------------------------------------------------------------ # Run the child sccripts. #------------------------------------------------------------------ -"${USHgfs}/minmon_xtrct_costs.pl" "${MINMON_SUFFIX}" "${PDY}" "${cyc}" "${gsistat}" +"${USHglobal}/minmon_xtrct_costs.pl" "${MINMON_SUFFIX}" "${PDY}" "${cyc}" "${gsistat}" rc_costs=$? echo "rc_costs = ${rc_costs}" -"${USHgfs}/minmon_xtrct_gnorms.pl" "${MINMON_SUFFIX}" "${PDY}" "${cyc}" "${gsistat}" +"${USHglobal}/minmon_xtrct_gnorms.pl" "${MINMON_SUFFIX}" "${PDY}" "${cyc}" "${gsistat}" rc_gnorms=$? echo "rc_gnorms = ${rc_gnorms}" -"${USHgfs}/minmon_xtrct_reduct.pl" "${MINMON_SUFFIX}" "${PDY}" "${cyc}" "${gsistat}" +"${USHglobal}/minmon_xtrct_reduct.pl" "${MINMON_SUFFIX}" "${PDY}" "${cyc}" "${gsistat}" rc_reduct=$? echo "rc_reduct = ${rc_reduct}" diff --git a/dev/scripts/exglobal_diag.sh b/dev/scripts/exglobal_diag.sh index 4d9aa5de94e..0ded405d0df 100755 --- a/dev/scripts/exglobal_diag.sh +++ b/dev/scripts/exglobal_diag.sh @@ -177,7 +177,7 @@ fi split -l "${tasks_per_node}" ./cmdfile cmdfile_part_ cmdfile_parts=$(ls cmdfile_part_*) for partfile in ${cmdfile_parts}; do - "${USHgfs}/run_mpmd.sh" "${partfile}" && true + "${USHglobal}/run_mpmd.sh" "${partfile}" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit "Failed to create one or more observation diagnostic files for ${partfile}!" diff --git a/dev/scripts/exglobal_enkf_ecen.sh b/dev/scripts/exglobal_enkf_ecen.sh index 556addce04d..9afb65a3ace 100755 --- a/dev/scripts/exglobal_enkf_ecen.sh +++ b/dev/scripts/exglobal_enkf_ecen.sh @@ -24,16 +24,16 @@ pwd=$(pwd) ntiles=${ntiles:-6} # Utilities -NCLEN=${NCLEN:-${USHgfs}/getncdimlen} +NCLEN=${NCLEN:-${USHglobal}/getncdimlen} # Scripts # Executables. -GETATMENSMEANEXEC=${GETATMENSMEANEXEC:-${EXECgfs}/getsigensmeanp_smooth.x} -GETSFCENSMEANEXEC=${GETSFCENSMEANEXEC:-${EXECgfs}/getsfcensmeanp.x} -RECENATMEXEC=${RECENATMEXEC:-${EXECgfs}/recentersigp.x} -CALCINCNEMSEXEC=${CALCINCNEMSEXEC:-${EXECgfs}/calc_increment_ens.x} -CALCINCNCEXEC=${CALCINCEXEC:-${EXECgfs}/calc_increment_ens_ncio.x} +GETATMENSMEANEXEC=${GETATMENSMEANEXEC:-${EXECglobal}/getsigensmeanp_smooth.x} +GETSFCENSMEANEXEC=${GETSFCENSMEANEXEC:-${EXECglobal}/getsfcensmeanp.x} +RECENATMEXEC=${RECENATMEXEC:-${EXECglobal}/recentersigp.x} +CALCINCNEMSEXEC=${CALCINCNEMSEXEC:-${EXECglobal}/calc_increment_ens.x} +CALCINCNCEXEC=${CALCINCEXEC:-${EXECglobal}/calc_increment_ens_ncio.x} # Files. OPREFIX=${OPREFIX:-""} @@ -64,14 +64,14 @@ else fi # global_chgres stuff -CHGRESNEMS=${CHGRESNEMS:-${EXECgfs}/enkf_chgres_recenter.x} -CHGRESNC=${CHGRESNC:-${EXECgfs}/enkf_chgres_recenter_nc.x} +CHGRESNEMS=${CHGRESNEMS:-${EXECglobal}/enkf_chgres_recenter.x} +CHGRESNC=${CHGRESNC:-${EXECglobal}/enkf_chgres_recenter_nc.x} NTHREADS_CHGRES=${NTHREADS_CHGRES:-24} APRUN_CHGRES=${APRUN_CHGRES:-""} # global_cycle stuff -CYCLESH=${CYCLESH:-${USHgfs}/global_cycle.sh} -export CYCLEXEC=${CYCLEXEC:-${EXECgfs}/global_cycle} +CYCLESH=${CYCLESH:-${USHglobal}/global_cycle.sh} +export CYCLEXEC=${CYCLEXEC:-${EXECglobal}/global_cycle} APRUN_CYCLE=${APRUN_CYCLE:-${APRUN:-""}} NTHREADS_CYCLE=${NTHREADS_CYCLE:-${NTHREADS:-1}} export CYCLVARS=${CYCLVARS:-"FSNOL=-2.,FSNOS=99999.,"} @@ -219,7 +219,7 @@ for FHR in $(seq "${FHMIN}" "${FHOUT}" "${FHMAX}"); do else cpreq "${ATMANL_GSI}" atmanl_gsi cpreq "${ATMANL_GSI_ENSRES}" atmanl_gsi_ensres - SIGLEVEL="${SIGLEVEL:-"${FIXgfs}/am/global_hyblev.l${LEVS}.txt"}" + SIGLEVEL="${SIGLEVEL:-"${FIXglobal}/am/global_hyblev.l${LEVS}.txt"}" ${NLN} "${CHGRESNC}" chgres.x chgresnml=chgres_nc_gauss.nml nmltitle=chgres diff --git a/dev/scripts/exglobal_enkf_select_obs.sh b/dev/scripts/exglobal_enkf_select_obs.sh index 921057bb3f9..744ab05f157 100755 --- a/dev/scripts/exglobal_enkf_select_obs.sh +++ b/dev/scripts/exglobal_enkf_select_obs.sh @@ -21,7 +21,7 @@ pwd=$(pwd) # Scripts. -ANALYSISSH=${ANALYSISSH:-${SCRgfs}/exglobal_atmos_analysis.sh} +ANALYSISSH=${ANALYSISSH:-${SCRglobal}/exglobal_atmos_analysis.sh} # Select obs export RUN_SELECT=${RUN_SELECT:-"YES"} diff --git a/dev/scripts/exglobal_enkf_sfc.sh b/dev/scripts/exglobal_enkf_sfc.sh index 50affb233ea..f78e3beb9dd 100755 --- a/dev/scripts/exglobal_enkf_sfc.sh +++ b/dev/scripts/exglobal_enkf_sfc.sh @@ -28,7 +28,7 @@ export CASE=${CASE:-384} ntiles=${ntiles:-6} # Utilities -NCLEN=${NCLEN:-${USHgfs}/getncdimlen} +NCLEN=${NCLEN:-${USHglobal}/getncdimlen} # Scripts @@ -55,9 +55,9 @@ fi DOIAU=${DOIAU_ENKF:-"NO"} # Global_cycle stuff -CYCLESH=${CYCLESH:-${USHgfs}/global_cycle.sh} -REGRIDSH=${REGRIDSH:-"${USHgfs}/regrid_gsiSfcIncr_to_tile.sh"} -export CYCLEXEC=${CYCLEXEC:-${EXECgfs}/global_cycle} +CYCLESH=${CYCLESH:-${USHglobal}/global_cycle.sh} +REGRIDSH=${REGRIDSH:-"${USHglobal}/regrid_gsiSfcIncr_to_tile.sh"} +export CYCLEXEC=${CYCLEXEC:-${EXECglobal}/global_cycle} APRUN_CYCLE=${APRUN_CYCLE:-${APRUN:-""}} NTHREADS_CYCLE=${NTHREADS_CYCLE:-${NTHREADS:-1}} export CYCLVARS=${CYCLVARS:-"FSNOL=-2.,FSNOS=99999.,"} @@ -200,8 +200,8 @@ if [[ "${DOIAU}" == "YES" ]]; then cpreq "${sfcdata_dir}/${bPDY}.${bcyc}0000.${snow_prefix}sfc_data.tile${n}.nc" \ "${DATA}/fnbgsi.${cmem}" cpreq "${DATA}/fnbgsi.${cmem}" "${DATA}/fnbgso.${cmem}" - cpreq "${FIXgfs}/orog/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" - cpreq "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" + cpreq "${FIXglobal}/orog/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" + cpreq "${FIXglobal}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" if [[ "${DO_GSISOILDA}" == "YES" ]] && [[ "${GCYCLE_DO_SOILINCR}" == ".true." ]]; then cpreq "${COMIN_ATMOS_ANALYSIS_MEM}/increment.sfc.i00${LFHR}.tile${n}.nc" \ @@ -279,8 +279,8 @@ if [[ "${DOSFCANL_ENKF}" == "YES" ]]; then cpreq "${sfcdata_dir}/${PDY}.${cyc}0000.${snow_prefix}sfc_data.tile${n}.nc" \ "${DATA}/fnbgsi.${cmem}" cpreq "${DATA}/fnbgsi.${cmem}" "${DATA}/fnbgso.${cmem}" - cpreq "${FIXgfs}/orog/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" - cpreq "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" + cpreq "${FIXglobal}/orog/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" + cpreq "${FIXglobal}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" if [[ "${DO_GSISOILDA}" == "YES" ]] && [[ "${GCYCLE_DO_SOILINCR}" == ".true." ]]; then cpreq "${COMIN_ATMOS_ANALYSIS_MEM}/${APREFIX}increment.sfc.i00${LFHR}.tile${n}.nc" \ diff --git a/dev/scripts/exglobal_enkf_update.sh b/dev/scripts/exglobal_enkf_update.sh index 32f39a9b34c..b8b4cecf2b6 100755 --- a/dev/scripts/exglobal_enkf_update.sh +++ b/dev/scripts/exglobal_enkf_update.sh @@ -21,12 +21,12 @@ cd "${DATA}" || exit 1 # Utilities -NCLEN=${NCLEN:-${USHgfs}/getncdimlen} +NCLEN=${NCLEN:-${USHglobal}/getncdimlen} APRUN_ENKF=${APRUN_ENKF:-${APRUN:-""}} NTHREADS_ENKF=${NTHREADS_ENKF:-${NTHREADS:-1}} # Executables -ENKFEXEC=${ENKFEXEC:-${EXECgfs}/enkf.x} +ENKFEXEC=${ENKFEXEC:-${EXECglobal}/enkf.x} APREFIX=${APREFIX:-${RUN}.t${cyc}z.} GPREFIX=${GPREFIX:-${RUN}.t${GDATE:8:2}z.} @@ -74,14 +74,14 @@ else fi LATA_ENKF=${LATA_ENKF:-${LATB_ENKF}} LONA_ENKF=${LONA_ENKF:-${LONB_ENKF}} -SATANGL=${SATANGL:-${FIXgfs}/gsi/global_satangbias.txt} -SATINFO=${SATINFO:-${FIXgfs}/gsi/global_satinfo.txt} -CONVINFO=${CONVINFO:-${FIXgfs}/gsi/global_convinfo.txt} -OZINFO=${OZINFO:-${FIXgfs}/gsi/global_ozinfo.txt} -SCANINFO=${SCANINFO:-${FIXgfs}/gsi/global_scaninfo.txt} -HYBENSINFO=${HYBENSINFO:-${FIXgfs}/gsi/global_hybens_info.l${LEVS_ENKF}.txt} -ANAVINFO=${ANAVINFO:-${FIXgfs}/gsi/global_anavinfo.l${LEVS_ENKF}.txt} -VLOCALEIG=${VLOCALEIG:-${FIXgfs}/gsi/vlocal_eig_l${LEVS_ENKF}.dat} +SATANGL=${SATANGL:-${FIXglobal}/gsi/global_satangbias.txt} +SATINFO=${SATINFO:-${FIXglobal}/gsi/global_satinfo.txt} +CONVINFO=${CONVINFO:-${FIXglobal}/gsi/global_convinfo.txt} +OZINFO=${OZINFO:-${FIXglobal}/gsi/global_ozinfo.txt} +SCANINFO=${SCANINFO:-${FIXglobal}/gsi/global_scaninfo.txt} +HYBENSINFO=${HYBENSINFO:-${FIXglobal}/gsi/global_hybens_info.l${LEVS_ENKF}.txt} +ANAVINFO=${ANAVINFO:-${FIXglobal}/gsi/global_anavinfo.l${LEVS_ENKF}.txt} +VLOCALEIG=${VLOCALEIG:-${FIXglobal}/gsi/vlocal_eig_l${LEVS_ENKF}.dat} ENKF_SUFFIX="s" if [[ "${SMOOTH_ENKF:-YES}" == "NO" ]]; then ENKF_SUFFIX="" @@ -95,17 +95,17 @@ cpreq "${HYBENSINFO}" hybens_info cpreq "${ANAVINFO}" anavinfo cpreq "${VLOCALEIG}" vlocal_eig.dat if [[ "${SATINFO}" == "generate" ]]; then - "${USHgfs}/create_gsi_info.sh" sat "${PDY}${cyc}" "${DATA}" + "${USHglobal}/create_gsi_info.sh" sat "${PDY}${cyc}" "${DATA}" else cpreq "${SATINFO}" satinfo fi if [[ "${CONVINFO}" == "generate" ]]; then - "${USHgfs}/create_gsi_info.sh" conv "${PDY}${cyc}" "${DATA}" "${USE_2M_OBS}" + "${USHglobal}/create_gsi_info.sh" conv "${PDY}${cyc}" "${DATA}" "${USE_2M_OBS}" else cpreq "${CONVINFO}" convinfo fi if [[ "${OZINFO}" == "generate" ]]; then - "${USHgfs}/create_gsi_info.sh" oz "${PDY}${cyc}" "${DATA}" + "${USHglobal}/create_gsi_info.sh" oz "${PDY}${cyc}" "${DATA}" else cpreq "${OZINFO}" ozinfo fi diff --git a/dev/scripts/exglobal_extractvars.sh b/dev/scripts/exglobal_extractvars.sh index 952bf83f694..cb0514dc9e2 100755 --- a/dev/scripts/exglobal_extractvars.sh +++ b/dev/scripts/exglobal_extractvars.sh @@ -9,12 +9,12 @@ # Main body starts here ####################### -source "${USHgfs}/extractvars_tools.sh" +source "${USHglobal}/extractvars_tools.sh" # Scripts used -EXTRCTVARA="${USHgfs}/atmos_extractvars.sh" -EXTRCTVARO="${USHgfs}/ocnice_extractvars.sh" -EXTRCTVARW="${USHgfs}/wave_extractvars.sh" +EXTRCTVARA="${USHglobal}/atmos_extractvars.sh" +EXTRCTVARO="${USHglobal}/ocnice_extractvars.sh" +EXTRCTVARW="${USHglobal}/wave_extractvars.sh" # Set FHMAX_HF_GFS equal to FHMAX_GFS if FHMAX_HF_GFS is greater than FHMAX_GFS if ((FHMAX_GFS < FHMAX_HF_GFS)); then diff --git a/dev/scripts/exglobal_fetch.py b/dev/scripts/exglobal_fetch.py index 161851c05ee..68af12cf92f 100755 --- a/dev/scripts/exglobal_fetch.py +++ b/dev/scripts/exglobal_fetch.py @@ -27,7 +27,7 @@ def main(): fetch = Fetch(config) # Pull out all the configuration keys needed to run the fetch step - keys = ['current_cycle', 'previous_cycle', 'RUN', 'PDY', 'PARMgfs', 'PSLOT', 'ROTDIR', + keys = ['current_cycle', 'previous_cycle', 'RUN', 'PDY', 'PARMglobal', 'PSLOT', 'ROTDIR', 'FETCH_YAML_TMPL', 'FETCHDIR', 'ntiles', 'DATA', 'DATAROOT', 'waveGRD', 'gdas_version'] fetch_dict = AttrDict() diff --git a/dev/scripts/exglobal_forecast.sh b/dev/scripts/exglobal_forecast.sh index 6367cefdb62..fcb0b8b44fb 100755 --- a/dev/scripts/exglobal_forecast.sh +++ b/dev/scripts/exglobal_forecast.sh @@ -41,16 +41,16 @@ ## 1. computing grid, ${FIXorog}/$CASE/${CASE}_grid.tile${n}.nc ## 2. orography data, ${FIXorog}/$CASE/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc ## 3. mosaic data, ${FIXorog}/$CASE/${CASE}_mosaic.nc -## 4. Global O3 data, ${FIXgfs}/am/${O3FORC} -## 5. Global H2O data, ${FIXgfs}/am/${H2OFORC} -## 6. Global solar constant data, ${FIXgfs}/am/global_solarconstant_noaa_an.txt -## 7. Global surface emissivity, ${FIXgfs}/am/global_sfc_emissivity_idx.txt -## 8. Global CO2 historical data, ${FIXgfs}/am/global_co2historicaldata_glob.txt -## 8. Global CO2 monthly data, ${FIXgfs}/am/co2monthlycyc.txt -## 10. Additional global CO2 data, ${FIXgfs}/am/fix_co2_proj/global_co2historicaldata +## 4. Global O3 data, ${FIXglobal}/am/${O3FORC} +## 5. Global H2O data, ${FIXglobal}/am/${H2OFORC} +## 6. Global solar constant data, ${FIXglobal}/am/global_solarconstant_noaa_an.txt +## 7. Global surface emissivity, ${FIXglobal}/am/global_sfc_emissivity_idx.txt +## 8. Global CO2 historical data, ${FIXglobal}/am/global_co2historicaldata_glob.txt +## 8. Global CO2 monthly data, ${FIXglobal}/am/co2monthlycyc.txt +## 10. Additional global CO2 data, ${FIXglobal}/am/fix_co2_proj/global_co2historicaldata ## 11. Climatological aerosol global distribution -## ${FIXgfs}/am/global_climaeropac_global.txt -## 12. Monthly volcanic forcing ${FIXgfs}/am/global_volcanic_aerosols_YYYY-YYYY.txt +## ${FIXglobal}/am/global_climaeropac_global.txt +## 12. Monthly volcanic forcing ${FIXglobal}/am/global_volcanic_aerosols_YYYY-YYYY.txt ## ## Data output (location, name) ## If quilting=true and output grid is gaussian grid: @@ -78,12 +78,12 @@ ####################### # include all subroutines. Executions later. -source "${USHgfs}/forecast_predet.sh" # include functions for variable definition -source "${USHgfs}/forecast_det.sh" # include functions for run type determination -source "${USHgfs}/forecast_postdet.sh" # include functions for variables after run type determination -source "${USHgfs}/parsing_ufs_configure.sh" # include functions for ufs_configure processing +source "${USHglobal}/forecast_predet.sh" # include functions for variable definition +source "${USHglobal}/forecast_det.sh" # include functions for run type determination +source "${USHglobal}/forecast_postdet.sh" # include functions for variables after run type determination +source "${USHglobal}/parsing_ufs_configure.sh" # include functions for ufs_configure processing -source "${USHgfs}/atparse.bash" # include function atparse for parsing @[XYZ] templated files +source "${USHglobal}/atparse.bash" # include function atparse for parsing @[XYZ] templated files # Coupling control switches, for coupling purpose, off by default cpl=${cpl:-.false.} @@ -175,7 +175,7 @@ else export OMP_NUM_THREADS=${UFS_THREADS:-1} fi -cpreq "${EXECgfs}/${FCSTEXEC}" "${DATA}/" +cpreq "${EXECglobal}/${FCSTEXEC}" "${DATA}/" ${APRUN_UFS} "${DATA}/${FCSTEXEC}" 1>&1 2>&2 && true export err=$? if [[ ${err} -ne 0 ]]; then diff --git a/dev/scripts/exglobal_globus_arch.py b/dev/scripts/exglobal_globus_arch.py index 19698faa4c8..42b6b7cc38f 100755 --- a/dev/scripts/exglobal_globus_arch.py +++ b/dev/scripts/exglobal_globus_arch.py @@ -17,8 +17,8 @@ def main(): # Instantiate the globus object globus = GlobusHpss(config) - keys = ['STAGE_DIR', 'current_cycle', 'RUN', 'PDY', 'HOMEgfs', 'sven_dropbox', - 'doorman_gendel', 'DATASETS_YAML', 'PARMgfs', 'COMIN_CONF', 'KEEPDATA', + keys = ['STAGE_DIR', 'current_cycle', 'RUN', 'PDY', 'HOMEglobal', 'sven_dropbox', + 'doorman_gendel', 'DATASETS_YAML', 'PARMglobal', 'COMIN_CONF', 'KEEPDATA', 'jobid', 'hpss_target_dir', 'server_home', 'SERVER_NAME', 'DOORMAN_ROOT', 'CLIENT_GLOBUS_UUID', 'SERVER_GLOBUS_UUID', 'PSLOT'] diff --git a/dev/scripts/exglobal_globus_earc.py b/dev/scripts/exglobal_globus_earc.py index e528483c575..6667c381b73 100755 --- a/dev/scripts/exglobal_globus_earc.py +++ b/dev/scripts/exglobal_globus_earc.py @@ -17,8 +17,8 @@ def main(): # Instantiate the globus object globus = GlobusHpss(config) - keys = ['STAGE_DIR', 'current_cycle', 'RUN', 'PDY', 'HOMEgfs', 'sven_dropbox', - 'doorman_gendel', 'DATASETS_YAML', 'PARMgfs', 'COMIN_CONF', 'KEEPDATA', + keys = ['STAGE_DIR', 'current_cycle', 'RUN', 'PDY', 'HOMEglobal', 'sven_dropbox', + 'doorman_gendel', 'DATASETS_YAML', 'PARMglobal', 'COMIN_CONF', 'KEEPDATA', 'jobid', 'hpss_target_dir', 'server_home', 'SERVER_NAME', 'DOORMAN_ROOT', 'CLIENT_GLOBUS_UUID', 'SERVER_GLOBUS_UUID', 'PSLOT', 'ENSGRP'] diff --git a/dev/scripts/exglobal_oceanice_products.py b/dev/scripts/exglobal_oceanice_products.py index bb03840842a..4a6e784bb2e 100755 --- a/dev/scripts/exglobal_oceanice_products.py +++ b/dev/scripts/exglobal_oceanice_products.py @@ -18,7 +18,7 @@ def main(): oceanice = OceanIceProducts(config) # Pull out all the configuration keys needed to run the rest of steps - keys = ['HOMEgfs', 'DATA', 'current_cycle', 'RUN', 'NET', + keys = ['HOMEglobal', 'DATA', 'current_cycle', 'RUN', 'NET', f'COMIN_{oceanice.task_config.component.upper()}_HISTORY', f'COMOUT_{oceanice.task_config.component.upper()}_GRIB', 'APRUN_OCNICEPOST', diff --git a/dev/scripts/exglobal_prep_sfc.sh b/dev/scripts/exglobal_prep_sfc.sh index ad0fdfa92a1..9bb8ee3edb9 100755 --- a/dev/scripts/exglobal_prep_sfc.sh +++ b/dev/scripts/exglobal_prep_sfc.sh @@ -67,7 +67,7 @@ export IMS_FILE="${COMINobsproc}/${RUN}.t${cyc}z.imssnow96.grib2" export FIVE_MIN_ICE_FILE="${COMINobsproc}/${RUN}.t${cyc}z.seaice.5min.grib2" # landmask file for global 5-minute data (grib 2) -export FIVE_MIN_ICE_MASK_FILE="${FIXgfs}/am/emcsfc_gland5min.grib2" +export FIVE_MIN_ICE_MASK_FILE="${FIXglobal}/am/emcsfc_gland5min.grib2" # afwa snow depth data export AFWA_GLOBAL_FILE="${COMINobsproc}/${RUN}.t${cyc}z.snow.usaf.grib2" @@ -81,7 +81,7 @@ export BLENDED_ICE_FILE="${RUN}.t${cyc}z.seaice.5min.blend.grb" export BLENDED_ICE_FILE_PREV="${COMINobsproc_PREV}/${RUN}.t${gcyc}z.seaice.5min.blend.grb" # the emcsfc_ice_blend executable -export BLENDICEEXEC=${BLENDICEEXEC:-${EXECgfs}/emcsfc_ice_blend} +export BLENDICEEXEC=${BLENDICEEXEC:-${EXECglobal}/emcsfc_ice_blend} # standard output file export pgmout=${pgmout:-OUTPUT} @@ -91,7 +91,7 @@ export pgmout=${pgmout:-OUTPUT} #----------------------------------------------------------------------- echo "Create blended ice data." -"${USHgfs}/prep_sfc_ice_blend.sh" +"${USHglobal}/prep_sfc_ice_blend.sh" export err=$? #----------------------------------------------------------------------- @@ -120,20 +120,20 @@ fi # and (if a gdas run) enkf gaussian grid. #----------------------------------------------------------------------- -export SNOW2MDLEXEC="${EXECgfs}/emcsfc_snow2mdl" +export SNOW2MDLEXEC="${EXECglobal}/emcsfc_snow2mdl" LONB_CASE=$((4 * ${CASE:1})) LATB_CASE=$((2 * ${CASE:1})) -export MODEL_SLMASK_FILE=${SLMASK:-${FIXgfs}/am/global_slmask.t${CASE:1}.${LONB_CASE}.${LATB_CASE}.grb} -export MODEL_LATITUDE_FILE=${MDL_LATS:-${FIXgfs}/am/global_latitudes.t${CASE:1}.${LONB_CASE}.${LATB_CASE}.grb} -export MODEL_LONGITUDE_FILE=${MDL_LONS:-${FIXgfs}/am/global_longitudes.t${CASE:1}.${LONB_CASE}.${LATB_CASE}.grb} -export GFS_LONSPERLAT_FILE=${LONSPERLAT:-${FIXgfs}/am/global_lonsperlat.t${CASE:1}.${LONB_CASE}.${LATB_CASE}.txt} +export MODEL_SLMASK_FILE=${SLMASK:-${FIXglobal}/am/global_slmask.t${CASE:1}.${LONB_CASE}.${LATB_CASE}.grb} +export MODEL_LATITUDE_FILE=${MDL_LATS:-${FIXglobal}/am/global_latitudes.t${CASE:1}.${LONB_CASE}.${LATB_CASE}.grb} +export MODEL_LONGITUDE_FILE=${MDL_LONS:-${FIXglobal}/am/global_longitudes.t${CASE:1}.${LONB_CASE}.${LATB_CASE}.grb} +export GFS_LONSPERLAT_FILE=${LONSPERLAT:-${FIXglobal}/am/global_lonsperlat.t${CASE:1}.${LONB_CASE}.${LATB_CASE}.txt} export MODEL_SNOW_FILE=${RUN}.t${cyc}z.snogrb_t${CASE:1}.${LONB_CASE}.${LATB_CASE} export MODEL_SNOW_FILE_PREV=${COMINobsproc_PREV}/${RUN}.t${gcyc}z.snogrb_t${CASE:1}.${LONB_CASE}.${LATB_CASE} echo "Create ${CASE} snow data." -"${USHgfs}/prep_sfc_snow.sh" +"${USHglobal}/prep_sfc_snow.sh" export err=$? #---------------------------------------------------------------------- @@ -166,15 +166,15 @@ if [[ "${EUPD_CYC}" = "${RUN}" ]] || [[ "${EUPD_CYC^^}" = "BOTH" ]]; then LONB_CASE_ENS=$((4 * ${CASE_ENS:1})) LATB_CASE_ENS=$((2 * ${CASE_ENS:1})) - export MODEL_SLMASK_FILE=${SLMASK_ENKF:-${FIXgfs}/am/global_slmask.t${CASE_ENS:1}.${LONB_CASE_ENS}.${LATB_CASE_ENS}.grb} - export MODEL_LATITUDE_FILE=${MDL_LATS_ENKF:-${FIXgfs}/am/global_latitudes.t${CASE_ENS:1}.${LONB_CASE_ENS}.${LATB_CASE_ENS}.grb} - export MODEL_LONGITUDE_FILE=${MDL_LONS_ENKF:-${FIXgfs}/am/global_longitudes.t${CASE_ENS:1}.${LONB_CASE_ENS}.${LATB_CASE_ENS}.grb} - export GFS_LONSPERLAT_FILE=${LONSPERLAT_ENKF:-${FIXgfs}/am/global_lonsperlat.t${CASE_ENS:1}.${LONB_CASE_ENS}.${LATB_CASE_ENS}.txt} + export MODEL_SLMASK_FILE=${SLMASK_ENKF:-${FIXglobal}/am/global_slmask.t${CASE_ENS:1}.${LONB_CASE_ENS}.${LATB_CASE_ENS}.grb} + export MODEL_LATITUDE_FILE=${MDL_LATS_ENKF:-${FIXglobal}/am/global_latitudes.t${CASE_ENS:1}.${LONB_CASE_ENS}.${LATB_CASE_ENS}.grb} + export MODEL_LONGITUDE_FILE=${MDL_LONS_ENKF:-${FIXglobal}/am/global_longitudes.t${CASE_ENS:1}.${LONB_CASE_ENS}.${LATB_CASE_ENS}.grb} + export GFS_LONSPERLAT_FILE=${LONSPERLAT_ENKF:-${FIXglobal}/am/global_lonsperlat.t${CASE_ENS:1}.${LONB_CASE_ENS}.${LATB_CASE_ENS}.txt} export MODEL_SNOW_FILE=${RUN}.t${cyc}z.snogrb_t${CASE_ENS:1}.${LONB_CASE_ENS}.${LATB_CASE_ENS} export MODEL_SNOW_FILE_PREV=${COMINobsproc_PREV}/${RUN}.t${gcyc}z.snogrb_t${CASE_ENS:1}.${LONB_CASE_ENS}.${LATB_CASE_ENS} echo "Create enkf snow data." - "${USHgfs}/prep_sfc_snow.sh" + "${USHglobal}/prep_sfc_snow.sh" export err=$? #----------------------------------------------------------------------- diff --git a/dev/test/f90nmlcmp.sh b/dev/test/f90nmlcmp.sh index 14671e876b9..db298cd2a07 100755 --- a/dev/test/f90nmlcmp.sh +++ b/dev/test/f90nmlcmp.sh @@ -2,15 +2,15 @@ # Compare two F90 namelists (forward and backward) -HOMEgfs=$(cd "$(dirname "${BASH_SOURCE[0]}")" && git rev-parse --show-toplevel) -declare -rx HOMEgfs +HOMEglobal=$(cd "$(dirname "${BASH_SOURCE[0]}")" && git rev-parse --show-toplevel) +declare -rx HOMEglobal -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run file1=${1:?} file2=${2:?} -"${HOMEgfs}/dev/ush/compare_f90nml.py" "${file1}" "${file2}" +"${HOMEglobal}/dev/ush/compare_f90nml.py" "${file1}" "${file2}" echo " " -"${HOMEgfs}/dev/ush/compare_f90nml.py" "${file2}" "${file1}" +"${HOMEglobal}/dev/ush/compare_f90nml.py" "${file2}" "${file1}" echo " " diff --git a/dev/test/g2cmp.sh b/dev/test/g2cmp.sh index e8ff0cc5e59..b2069b06e4a 100755 --- a/dev/test/g2cmp.sh +++ b/dev/test/g2cmp.sh @@ -3,13 +3,13 @@ # Compare two grib2 files with wgrib2 # The files must have the same fields in the same order -HOMEgfs=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}")")" && git rev-parse --show-toplevel) -declare -rx HOMEgfs +HOMEglobal=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}")")" && git rev-parse --show-toplevel) +declare -rx HOMEglobal file1=${1:?} file2=${2:?} -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run # Use wgrib2 to compute correlations and print any record that does not have corr=1 for mismatch wgrib2 "${file2}" -var -lev -rpn "sto_1" -import_grib "${file1}" -rpn "rcl_1:print_corr:print_rms" | grep -v "rpn_corr=1" diff --git a/dev/test/nccmp.sh b/dev/test/nccmp.sh index 6e77cd314fb..eb3fa834cbf 100755 --- a/dev/test/nccmp.sh +++ b/dev/test/nccmp.sh @@ -1,9 +1,9 @@ #! /usr/bin/env bash -HOMEgfs=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}")")" && git rev-parse --show-toplevel) -declare -rx HOMEgfs +HOMEglobal=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}")")" && git rev-parse --show-toplevel) +declare -rx HOMEglobal -source "${HOMEgfs}/dev/ush/load_modules.sh" run +source "${HOMEglobal}/dev/ush/load_modules.sh" run set +eu module load "nccmp/${nccmp_ver:-"1.9.0.1"}" diff --git a/dev/ush/README_NET_CONVERSION.md b/dev/ush/README_NET_CONVERSION.md new file mode 100644 index 00000000000..98c89ce877f --- /dev/null +++ b/dev/ush/README_NET_CONVERSION.md @@ -0,0 +1,132 @@ +# NET Variable Conversion Scripts + +## Overview + +These scripts facilitate the conversion between development and operational variable naming conventions for NOAA's global-workflow system, in accordance with EE2 standards. + +## Variable Mapping + +Development (global-workflow) → Operational (NCO): +- `HOMEglobal` → `HOME${NET}` (e.g., `HOMEgfs`) +- `PARMglobal` → `PARM${NET}` (e.g., `PARMgfs`) +- `USHglobal` → `USH${NET}` (e.g., `USHgfs`) +- `SCRglobal` → `SCR${NET}` (e.g., `SCRgfs`) +- `EXECglobal` → `EXEC${NET}` (e.g., `EXECgfs`) +- `FIXglobal` → `FIX${NET}` (e.g., `FIXgfs`) + +## Scripts + +### convert_to_net.sh + +Converts development variables (global) to NET-specific variables for operational handoff. + +**Usage:** +```bash +./convert_to_net.sh [target_directory] +``` + +**Example:** +```bash +# Convert for GFS operational deployment +./convert_to_net.sh gfs /path/to/deployment + +# Convert for GEFS +./convert_to_net.sh gefs /path/to/deployment +``` + +### convert_from_net.sh + +Converts NET-specific variables back to development variables (global). + +**Usage:** +```bash +./convert_from_net.sh [target_directory] +``` + +**Example:** +```bash +# Restore development variables +./convert_from_net.sh gfs /path/to/code +``` + +## Scope + +The scripts process the following file types: +- Shell scripts (*.sh, *.bash) +- Python files (*.py) +- Environment files (*.env) +- Configuration files (*.config) +- ecFlow files (*.ecf) +- Job scripts (J*) +- Execution scripts (ex*) + +The scripts **exclude** these directories: +- .git (version control) +- sorc/ (source code - uses build-time substitution) +- exec/ (compiled executables) +- lib/ (libraries) +- fix/ (static data files) + +## Verification + +After conversion, always verify: + +1. **Check file changes:** + ```bash + git diff | head -100 + ``` + +2. **Syntax validation:** + ```bash + bash -n dev/jobs/JGLOBAL_FORECAST + bash -n scripts/exglobal_forecast.sh + ``` + +3. **Count changes:** + ```bash + git diff --shortstat + ``` + +## Workflow Integration + +### Pre-Operational Handoff (Development → NCO) + +1. Complete all development and testing in global-workflow +2. Create a deployment branch +3. Run convert_to_net.sh on deployment directory +4. Verify all syntax checks pass +5. Test the converted package +6. Hand off to NCO SPAs for installation + +### Post-Operational Update (NCO → Development) + +1. Receive operational code from NCO +2. Run convert_from_net.sh to restore development variables +3. Integrate changes back into global-workflow repository + +## CI/CD Integration + +These scripts support continuous integration by: +- Using word boundary matching (`\b`) to prevent partial replacements +- Excluding compiled/static content +- Providing verification guidance +- Supporting automated testing workflows + +## EE2 Compliance + +This approach aligns with EE2 standards by: +- Separating development from operational naming +- Providing clear conversion paths +- Maintaining consistency across the codebase +- Supporting multiple NET values (gfs, gefs, sfs, gcafs) + +## Notes + +- The conversion is **selective**, not a blanket search/replace +- Word boundaries ensure `globalworkflow` is NOT changed to `gfsworkflow` +- Scripts are idempotent - running twice produces the same result +- Always create backups before conversion + +## Support + +For issues or questions, contact the global-workflow development team. diff --git a/dev/ush/convert_from_net.sh b/dev/ush/convert_from_net.sh new file mode 100755 index 00000000000..30e38253a83 --- /dev/null +++ b/dev/ush/convert_from_net.sh @@ -0,0 +1,57 @@ +#!/bin/bash +# convert_from_net.sh +# Script to convert HOME${NET}, PARM${NET}, etc. back to HOMEglobal, PARMglobal, etc. +# for development +# +# Usage: convert_from_net.sh +# +# Example: convert_from_net.sh gfs /path/to/code +# +# This script performs selective search/replace to restore development variables + +set -eux + +if [[ $# -lt 1 ]]; then + echo "ERROR: NET value required" + echo "Usage: $0 [target_directory]" + exit 1 +fi + +NET="$1" +TARGET_DIR="${2:-.}" + +if [[ ! -d "${TARGET_DIR}" ]]; then + echo "ERROR: Target directory ${TARGET_DIR} does not exist" + exit 1 +fi + +echo "Converting ${NET}-specific variables back to global-workflow standard in ${TARGET_DIR}" + +# List of patterns to convert (reverse of convert_to_net.sh) +declare -A patterns=( + ["HOME${NET}"]="HOMEglobal" + ["PARM${NET}"]="PARMglobal" + ["USH${NET}"]="USHglobal" + ["SCR${NET}"]="SCRglobal" + ["EXEC${NET}"]="EXECglobal" + ["FIX${NET}"]="FIXglobal" +) + +# Find all relevant files +file_list=$(find "${TARGET_DIR}" \ + -type d \( -name .git -o -name sorc -o -name exec -o -name lib -o -name fix \) -prune -o \ + -type f \( -name "*.sh" -o -name "*.bash" -o -name "*.py" -o -name "*.env" -o -name "*.config" -o -name "*.ecf" -o -name "J*" -o -name "ex*" \) -print) + +# Perform the replacements +for file in ${file_list}; do + if [[ -f "${file}" ]]; then + for pattern in "${!patterns[@]}"; do + replacement="${patterns[$pattern]}" + sed -i "s/\b${pattern}\b/${replacement}/g" "${file}" + done + echo "Processed: ${file}" + fi +done + +echo "Conversion complete!" +echo "Restored global-workflow development variables" diff --git a/dev/ush/convert_to_net.sh b/dev/ush/convert_to_net.sh new file mode 100755 index 00000000000..2ad0d1f434e --- /dev/null +++ b/dev/ush/convert_to_net.sh @@ -0,0 +1,62 @@ +#!/bin/bash +# convert_to_net.sh +# Script to convert HOMEglobal, PARMglobal, etc. to HOME${NET}, PARM${NET}, etc. +# for NCO operational handoff +# +# Usage: convert_to_net.sh [target_directory] +# +# Example: convert_to_net.sh gfs /path/to/deployment +# +# This script performs selective search/replace of standard EE2 variables: +# HOMEglobal -> HOME${NET} +# PARMglobal -> PARM${NET} +# USHglobal -> USH${NET} +# SCRglobal -> SCR${NET} +# EXECglobal -> EXEC${NET} +# FIXglobal -> FIX${NET} + +set -eux + +if [[ $# -lt 1 ]]; then + echo "ERROR: NET value required" + echo "Usage: $0 [target_directory]" + exit 1 +fi + +NET="$1" +TARGET_DIR="${2:-.}" + +if [[ ! -d "${TARGET_DIR}" ]]; then + echo "ERROR: Target directory ${TARGET_DIR} does not exist" + exit 1 +fi + +echo "Converting global-workflow standard variables to ${NET} in ${TARGET_DIR}" + +# Find all shell scripts, Python files, and configuration files +# Exclude .git, sorc/*, exec/*, lib/*, fix/* to avoid modifying compiled/static content +file_list=$(find "${TARGET_DIR}" \ + -type d \( -name .git -o -name sorc -o -name exec -o -name lib -o -name fix \) -prune -o \ + -type f \( -name "*.sh" -o -name "*.bash" -o -name "*.py" -o -name "*.env" -o -name "*.config" -o -name "*.ecf" -o -name "J*" -o -name "ex*" \) -print) + +# Perform the replacements using word boundaries +echo "Performing replacements..." +for file in ${file_list}; do + if [[ -f "${file}" ]]; then + sed -i "s/\bHOMEglobal\b/HOME${NET}/g" "${file}" + sed -i "s/\bPARMglobal\b/PARM${NET}/g" "${file}" + sed -i "s/\bUSHglobal\b/USH${NET}/g" "${file}" + sed -i "s/\bSCRglobal\b/SCR${NET}/g" "${file}" + sed -i "s/\bEXECglobal\b/EXEC${NET}/g" "${file}" + sed -i "s/\bFIXglobal\b/FIX${NET}/g" "${file}" + fi +done + +echo "Conversion complete!" +echo "Converted global-workflow variables to ${NET}-specific variables" +echo "" +echo "Files processed: $(echo "${file_list}" | wc -w)" +echo "" +echo "IMPORTANT: Before handoff to NCO, verify the changes with:" +echo " git diff | head -100" +echo " bash -n " diff --git a/dev/ush/gw_setup.sh b/dev/ush/gw_setup.sh index 261e27a2adc..0dcdf2a35d7 100755 --- a/dev/ush/gw_setup.sh +++ b/dev/ush/gw_setup.sh @@ -8,18 +8,18 @@ # This script should be SOURCED to properly setup the environment. # -# Determine if HOMEgfs is already set +# Determine if HOMEglobal is already set unset_homegfs=NO -if [[ -z "${HOMEgfs+x}" ]]; then +if [[ -z "${HOMEglobal+x}" ]]; then script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)" - HOMEgfs=$(cd "${script_dir}" && git rev-parse --show-toplevel) - export HOMEgfs + HOMEglobal=$(cd "${script_dir}" && git rev-parse --show-toplevel) + export HOMEglobal unset_homegfs=YES fi -source "${HOMEgfs}/ush/detect_machine.sh" -source "${HOMEgfs}/ush/module-setup.sh" +source "${HOMEglobal}/ush/detect_machine.sh" +source "${HOMEglobal}/ush/module-setup.sh" -module use "${HOMEgfs}/modulefiles" +module use "${HOMEglobal}/modulefiles" module load "gw_setup.${MACHINE_ID}" err=$? if [[ "${err}" -ne 0 ]]; then @@ -27,15 +27,15 @@ if [[ "${err}" -ne 0 ]]; then exit 1 fi -# Set up the PYTHONPATH to include wxflow from HOMEgfs -if [[ -d "${HOMEgfs}/sorc/wxflow/src" ]]; then - PYTHONPATH="${HOMEgfs}/sorc/wxflow/src${PYTHONPATH:+:${PYTHONPATH}}" +# Set up the PYTHONPATH to include wxflow from HOMEglobal +if [[ -d "${HOMEglobal}/sorc/wxflow/src" ]]; then + PYTHONPATH="${HOMEglobal}/sorc/wxflow/src${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH fi # Source rocoto helper functions for use in the global-workflow -source "${HOMEgfs}/dev/ush/rocoto_helpers.sh" +source "${HOMEglobal}/dev/ush/rocoto_helpers.sh" if [[ ${unset_homegfs} == "YES" ]]; then - unset HOMEgfs + unset HOMEglobal fi diff --git a/dev/ush/load_modules.sh b/dev/ush/load_modules.sh index 673122d95d0..9cb32dfe157 100755 --- a/dev/ush/load_modules.sh +++ b/dev/ush/load_modules.sh @@ -34,16 +34,16 @@ fi # Setup runtime environment by loading modules ulimit_s=$(ulimit -S -s) -# Test if HOMEgfs is defined. If not, then try to determine it with git rev-parse +# Test if HOMEglobal is defined. If not, then try to determine it with git rev-parse _unset_homegfs="NO" -if [[ -z ${HOMEgfs+x} ]]; then - echo "INFO: HOMEgfs is not defined. Attempting to find the global-workflow root directory" - # HOMEgfs will be removed from the environment at the end of this script +if [[ -z ${HOMEglobal+x} ]]; then + echo "INFO: HOMEglobal is not defined. Attempting to find the global-workflow root directory" + # HOMEglobal will be removed from the environment at the end of this script _unset_homegfs="YES" script_dir=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &> /dev/null && pwd) - HOMEgfs=$(cd "${script_dir}" && git rev-parse --show-toplevel) - export HOMEgfs + HOMEglobal=$(cd "${script_dir}" && git rev-parse --show-toplevel) + export HOMEglobal err=$? if [[ ${err} -ne 0 ]]; then is_git_dir=$(cd -- "${script_dir}" &> /dev/null && git rev-parse --is-inside-work-tree) @@ -59,14 +59,14 @@ if [[ -z ${HOMEgfs+x} ]]; then fi # Find module command and purge: -source "${HOMEgfs}/ush/detect_machine.sh" -source "${HOMEgfs}/ush/module-setup.sh" +source "${HOMEglobal}/ush/detect_machine.sh" +source "${HOMEglobal}/ush/module-setup.sh" # Handle different module types case "${MODULE_TYPE}" in "ufswm") # UFS Weather Model modules - special handling - module use "${HOMEgfs}/sorc/ufs_model.fd/modulefiles" + module use "${HOMEglobal}/sorc/ufs_model.fd/modulefiles" module load "ufs_${MACHINE_ID}.intel" export err=$? if [[ ${err} -ne 0 ]]; then @@ -84,7 +84,7 @@ case "${MODULE_TYPE}" in module load wgrib2 else export UTILROOT=${prod_util_ROOT} - source "${HOMEgfs}/versions/run.ver" + source "${HOMEglobal}/versions/run.ver" module load "wgrib2/${wgrib2_ver}" fi export WGRIB2=wgrib2 @@ -95,7 +95,7 @@ case "${MODULE_TYPE}" in "ufsda") # UFSDA modules - special handling - module use "${HOMEgfs}/sorc/gdas.cd/modulefiles" + module use "${HOMEglobal}/sorc/gdas.cd/modulefiles" case "${MACHINE_ID}" in "hera" | "orion" | "hercules" | "wcoss2" | "gaeac5" | "gaeac6" | "ursa" | "noaacloud") @@ -103,7 +103,7 @@ case "${MODULE_TYPE}" in if [[ "${MACHINE_ID}" == "wcoss2" ]]; then export LMOD_TMOD_FIND_FIRST=yes # TODO: Add path to GDASApp libraries and cray-mpich as temporary patches - export LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:${HOMEgfs}/sorc/gdas.cd/build/lib" + export LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:${HOMEglobal}/sorc/gdas.cd/build/lib" # TODO: Remove LD_LIBRARY_PATH line as soon as permanent solution is available export LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:/opt/cray/pe/mpich/8.1.29/ofi/intel/2022.1/lib" fi @@ -149,8 +149,8 @@ case "${MODULE_TYPE}" in # setup python path for ioda utilities # TODO: a better solution should be created for setting paths to package python scripts # shellcheck disable=SC2311 - pyiodaPATH="${HOMEgfs}/sorc/gdas.cd/build/lib/python${PYTHON_VERSION}/" - pybufrPATH="${HOMEgfs}/sorc/gdas.cd/build/lib/python${PYTHON_VERSION}/site-packages/" + pyiodaPATH="${HOMEglobal}/sorc/gdas.cd/build/lib/python${PYTHON_VERSION}/" + pybufrPATH="${HOMEglobal}/sorc/gdas.cd/build/lib/python${PYTHON_VERSION}/site-packages/" PYTHONPATH="${pyiodaPATH}:${pybufrPATH}${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH ;; @@ -158,14 +158,14 @@ case "${MODULE_TYPE}" in "run" | "gsi" | "verif" | "setup" | "upp") # Test that the version file exists - if [[ ! -f "${HOMEgfs}/versions/run.ver" ]]; then - echo "FATAL ERROR: ${HOMEgfs}/versions/run.ver does not exist!" + if [[ ! -f "${HOMEglobal}/versions/run.ver" ]]; then + echo "FATAL ERROR: ${HOMEglobal}/versions/run.ver does not exist!" echo "HINT: Run link_workflow.sh first." exit 1 fi # Load our modules: - module use "${HOMEgfs}/modulefiles" + module use "${HOMEglobal}/modulefiles" # Determine target module based on type and machine target_module="gw_${MODULE_TYPE}.${MACHINE_ID}" @@ -183,7 +183,7 @@ case "${MODULE_TYPE}" in # Source versions file (except for upp) if [[ "${mod_type}" != "upp" ]]; then - source "${HOMEgfs}/versions/run.ver" + source "${HOMEglobal}/versions/run.ver" fi if [[ -n "${target_module}" ]]; then @@ -216,20 +216,20 @@ case "${MODULE_TYPE}" in esac -# Set up the PYTHONPATH to include wxflow from HOMEgfs -if [[ -d "${HOMEgfs}/sorc/wxflow/src" ]]; then - PYTHONPATH="${HOMEgfs}/sorc/wxflow/src${PYTHONPATH:+:${PYTHONPATH}}" +# Set up the PYTHONPATH to include wxflow from HOMEglobal +if [[ -d "${HOMEglobal}/sorc/wxflow/src" ]]; then + PYTHONPATH="${HOMEglobal}/sorc/wxflow/src${PYTHONPATH:+:${PYTHONPATH}}" fi -# Add HOMEgfs/ush/python to PYTHONPATH -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/ush/python" +# Add HOMEglobal/ush/python to PYTHONPATH +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEglobal}/ush/python" export PYTHONPATH # Restore stack soft limit: ulimit -S -s "${ulimit_s}" unset ulimit_s -# Unset HOMEgfs if it was not set at the beginning of this script +# Unset HOMEglobal if it was not set at the beginning of this script if [[ ${_unset_homegfs} == "YES" ]]; then - unset HOMEgfs + unset HOMEglobal fi diff --git a/dev/workflow/create_experiment.py b/dev/workflow/create_experiment.py index f3e502f455a..584ddab6f27 100755 --- a/dev/workflow/create_experiment.py +++ b/dev/workflow/create_experiment.py @@ -2,11 +2,11 @@ """ Basic python script to create an experiment directory on the fly from a given -yaml file for the arguments to the two scripts below in ${HOMEgfs}/dev/workflow -where ${HOMEgfs} is determined from the location of this script. +yaml file for the arguments to the two scripts below in ${HOMEglobal}/dev/workflow +where ${HOMEglobal} is determined from the location of this script. - ${HOMEgfs}/dev/workflow/setup_expt.py - ${HOMEgfs}/dev/workflow/setup_workflow.py + ${HOMEglobal}/dev/workflow/setup_expt.py + ${HOMEglobal}/dev/workflow/setup_workflow.py The yaml file are simply the arguments for these two scripts. After this scripts runs the experiment is ready for launch. @@ -85,7 +85,7 @@ def input_args(): user_inputs = input_args() # Create a dictionary to pass to parse_j2yaml for parsing the yaml file - data = AttrDict(HOMEgfs=_top) + data = AttrDict(HOMEglobal=_top) data.update(os.environ) testconf = parse_j2yaml(path=user_inputs.yaml, data=data) diff --git a/dev/workflow/generate_workflows.sh b/dev/workflow/generate_workflows.sh index ae63779b5c5..b2cfd404ec2 100755 --- a/dev/workflow/generate_workflows.sh +++ b/dev/workflow/generate_workflows.sh @@ -32,7 +32,7 @@ function _usage() { -D Delete the RUNTESTS and DATAROOT directories if they already exist -Y /path/to/directory/with/YAMLs - If this option is not specified, then the \${HOMEgfs}/dev/ci/cases/pr + If this option is not specified, then the \${HOMEglobal}/dev/ci/cases/pr directory is used. -G Run all valid GFS cases in the specified YAML directory. @@ -54,7 +54,7 @@ function _usage() { -A "HPC account name" Set the HPC account name. If this is not set, the default in - \$HOMEgfs/dev/ci/platform/config.\$machine + \$HOMEglobal/dev/ci/platform/config.\$machine will be used. -c Append the chosen set of tests to your existing crontab @@ -84,7 +84,7 @@ EOF set -eu # Set default options -HOMEgfs="" +HOMEglobal="" _specified_home=false _build=false _compute_build=false @@ -92,7 +92,7 @@ _build_flags="" _update_submods=false declare -a _yaml_list=("C48_ATM") _specified_yaml_list=false -_yaml_dir="" # Will be set based off of HOMEgfs if not specified explicitly +_yaml_dir="" # Will be set based off of HOMEglobal if not specified explicitly _specified_yaml_dir=false _run_all_gfs=false _run_all_gefs=false @@ -117,10 +117,10 @@ while [[ $# -gt 0 && "$1" != "--" ]]; do while getopts ":H:bBDuy:Y:GESCA:ce:t:vVdh" option; do case "${option}" in H) - HOMEgfs="${OPTARG}" + HOMEglobal="${OPTARG}" _specified_home=true - if [[ ! -d "${HOMEgfs}" ]]; then - echo "Specified HOMEgfs directory (${HOMEgfs}) does not exist" + if [[ ! -d "${HOMEglobal}" ]]; then + echo "Specified HOMEglobal directory (${HOMEglobal}) does not exist" exit 1 fi ;; @@ -279,18 +279,18 @@ if [[ "${_run_all_gfs}" == "true" || _yaml_list=() fi -# Set HOMEgfs if it wasn't set by the user +# Set HOMEglobal if it wasn't set by the user if [[ "${_specified_home}" == "false" ]]; then script_relpath="$(dirname "${BASH_SOURCE[0]}")" - HOMEgfs="$(cd "${script_relpath}" && git rev-parse --show-toplevel)" + HOMEglobal="$(cd "${script_relpath}" && git rev-parse --show-toplevel)" if [[ "${_verbose}" == "true" ]]; then - printf "Setting HOMEgfs to %s\n\n" "${HOMEgfs}" + printf "Setting HOMEglobal to %s\n\n" "${HOMEglobal}" fi fi -# Set the _yaml_dir to HOMEgfs/dev/ci/cases/pr if not explicitly set +# Set the _yaml_dir to HOMEglobal/dev/ci/cases/pr if not explicitly set if [[ "${_specified_yaml_dir}" == false ]]; then - _yaml_dir="${HOMEgfs}/dev/ci/cases/pr" + _yaml_dir="${HOMEglobal}/dev/ci/cases/pr" fi function select_all_yamls() { @@ -426,9 +426,9 @@ fi if [[ "${_debug}" == "true" ]]; then set +x fi -if ! source "${HOMEgfs}/dev/ush/gw_setup.sh" >&stdout; then +if ! source "${HOMEglobal}/dev/ush/gw_setup.sh" >&stdout; then cat stdout - echo "Failed to source ${HOMEgfs}/dev/ush/gw_setup.sh!" + echo "Failed to source ${HOMEglobal}/dev/ush/gw_setup.sh!" exit 7 fi if [[ "${_verbose}" == "true" ]]; then @@ -440,9 +440,9 @@ if [[ "${_debug}" == "true" ]]; then fi set -u machine=${MACHINE_ID} -platform_config="${HOMEgfs}/dev/ci/platforms/config.${machine}" +platform_config="${HOMEglobal}/dev/ci/platforms/config.${machine}" if [[ -f "${platform_config}" ]]; then - source "${HOMEgfs}/dev/ci/platforms/config.${machine}" + source "${HOMEglobal}/dev/ci/platforms/config.${machine}" else if [[ "${_set_account}" == "false" ]]; then echo "ERROR Unknown HPC account! Please use the -A option to specify." @@ -450,9 +450,9 @@ else fi fi -# If _yaml_dir is not set, set it to $HOMEgfs/dev/ci/cases/pr +# If _yaml_dir is not set, set it to $HOMEglobal/dev/ci/cases/pr if [[ -z ${_yaml_dir} ]]; then - _yaml_dir="${HOMEgfs}/dev/ci/cases/pr" + _yaml_dir="${HOMEglobal}/dev/ci/cases/pr" fi # Build the system if requested @@ -463,14 +463,14 @@ if [[ "${_build}" == "true" ]]; then _compute_build_flag="-c -A ${HPC_ACCOUNT}" fi #shellcheck disable=SC2086,SC2248 - ${HOMEgfs}/sorc/build_all.sh ${_compute_build_flag:-} ${_verbose_flag} ${_build_flags} + ${HOMEglobal}/sorc/build_all.sh ${_compute_build_flag:-} ${_verbose_flag} ${_build_flags} fi # Link the workflow silently unless there's an error if [[ "${_verbose}" == true ]]; then printf "Linking the workflow\n\n" fi -if ! "${HOMEgfs}/sorc/link_workflow.sh" >&stdout; then +if ! "${HOMEglobal}/sorc/link_workflow.sh" >&stdout; then cat stdout echo "link_workflow.sh failed!" if [[ "${_set_email}" == true ]]; then diff --git a/dev/workflow/rocoto/gcafs_tasks.py b/dev/workflow/rocoto/gcafs_tasks.py index dc19040409d..2b04a22e29e 100644 --- a/dev/workflow/rocoto/gcafs_tasks.py +++ b/dev/workflow/rocoto/gcafs_tasks.py @@ -61,7 +61,7 @@ def fetch(self): 'resources': resources, 'envars': self.envars, 'cycledef': cycledef, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/fetch.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/fetch.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -101,7 +101,7 @@ def stage_ic(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': cycledef, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/stage_ic.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/stage_ic.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -144,7 +144,7 @@ def prep(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': cycledef, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/prep.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/prep.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -173,7 +173,7 @@ def prep_emissions(self): 'resources': resources, 'envars': self.envars, 'cycledef': cycledef, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/prep_emissions.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/prep_emissions.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -209,7 +209,7 @@ def offlineanl(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': 'gcdas', - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/offlineanl.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/offlineanl.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -244,7 +244,7 @@ def sfcanl(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': 'gcdas', - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/sfcanl.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/sfcanl.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -289,7 +289,7 @@ def atmanlinit(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': cycledef, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/atmanlinit.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/atmanlinit.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -322,7 +322,7 @@ def atmanlvar(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/atmanlvar.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/atmanlvar.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -355,7 +355,7 @@ def atmanlfv3inc(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/atmanlfv3inc.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/atmanlfv3inc.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -388,7 +388,7 @@ def atmanlfinal(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/atmanlfinal.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/atmanlfinal.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -421,7 +421,7 @@ def aeroanlgenb(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': 'gcdas_half,gcdas', - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/aeroanlgenb.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/aeroanlgenb.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -456,7 +456,7 @@ def aeroanlinit(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/aeroanlinit.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/aeroanlinit.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -491,7 +491,7 @@ def aeroanlvar(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/aeroanlvar.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/aeroanlvar.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -524,7 +524,7 @@ def aeroanlfinal(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/aeroanlfinal.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/aeroanlfinal.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -552,7 +552,7 @@ def analcalc(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/analcalc.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/analcalc.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -614,7 +614,7 @@ def aerosol_init(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': cycledef, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/aerosol_init.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/aerosol_init.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -691,7 +691,7 @@ def _fcst_forecast_only(self): 'dependency': dependencies, 'envars': fcst_vars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/fcst.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/fcst.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -760,7 +760,7 @@ def _fcst_cycled(self): 'dependency': dependencies, 'envars': fcst_vars, 'cycledef': cycledef, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/fcst.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/fcst.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -829,7 +829,7 @@ def efcs(self): 'dependency': dependencies, 'envars': efcsenvars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/fcst.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/fcst.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -889,7 +889,7 @@ def atmanlupp(self): 'dependency': dependencies, 'envars': postenvars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/upp.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/upp.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -928,7 +928,7 @@ def atmanlprod(self): 'dependency': dependencies, 'envars': postenvars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/atmos_products.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/atmos_products.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1015,7 +1015,7 @@ def _upptask(self, upp_run="forecast", task_id="atmupp"): 'dependency': dependencies, 'envars': postenvars, 'cycledef': cycledef, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/upp.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/upp.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1111,7 +1111,7 @@ def _atmosoceaniceprod(self, component: str): 'dependency': dependencies, 'envars': postenvars, 'cycledef': cycledef, - 'command': f"{self.HOMEgfs}/dev/job_cards/rocoto/{config}.sh", + 'command': f"{self.HOMEglobal}/dev/job_cards/rocoto/{config}.sh", 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1167,7 +1167,7 @@ def atmos_ensstat(self): 'dependency': dependencies, 'envars': postenvars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/atmos_ensstat.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/atmos_ensstat.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;'} @@ -1231,7 +1231,7 @@ def metp(self): 'dependency': dependencies, 'envars': metpenvars, 'cycledef': 'metp,last_gfs', - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/metp.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/metp.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1272,7 +1272,7 @@ def anlstat(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/anlstat.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/anlstat.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1321,7 +1321,7 @@ def extractvars(self): 'dependency': dependencies, 'envars': extractvars_envars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/extractvars.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/extractvars.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1382,7 +1382,7 @@ def arch_vrfy(self): 'envars': self.envars, 'cycledef': self.run, 'dependency': dependencies, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/arch_vrfy.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/arch_vrfy.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1435,7 +1435,7 @@ def arch_tars(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/arch_tars.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/arch_tars.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1468,7 +1468,7 @@ def globus(self): 'envars': self.envars, 'cycledef': 'gefs', 'dependency': dependencies, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/globus_arch.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/globus_arch.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1511,7 +1511,7 @@ def cleanup(self): 'envars': self.envars, 'cycledef': self.run, 'dependency': dependencies, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/cleanup.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/cleanup.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' diff --git a/dev/workflow/rocoto/gefs_tasks.py b/dev/workflow/rocoto/gefs_tasks.py index dcede32b627..24c959afb32 100644 --- a/dev/workflow/rocoto/gefs_tasks.py +++ b/dev/workflow/rocoto/gefs_tasks.py @@ -16,7 +16,7 @@ def stage_ic(self): 'resources': resources, 'envars': self.envars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/stage_ic.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/stage_ic.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -37,7 +37,7 @@ def gen_control_ic(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/gen_control_ic.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/gen_control_ic.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -54,7 +54,7 @@ def waveinit(self): 'resources': resources, 'envars': self.envars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/waveinit.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/waveinit.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -71,7 +71,7 @@ def prep_emissions(self): 'resources': resources, 'envars': self.envars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/prep_emissions.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/prep_emissions.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -112,7 +112,7 @@ def fcst(self): 'dependency': dependencies, 'envars': fcst_vars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/fcst.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/fcst.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -167,7 +167,7 @@ def efcs(self): 'dependency': dependencies, 'envars': efcsenvars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/fcst.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/fcst.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -263,7 +263,7 @@ def _atmosoceaniceprod(self, component: str): 'dependency': dependencies, 'envars': postenvars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/{config}.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/{config}.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;'} @@ -305,7 +305,7 @@ def postsnd(self): 'dependency': dependencies, 'envars': postsnd_envars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/postsnd.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/postsnd.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -355,7 +355,7 @@ def gempak(self): 'dependency': dependencies, 'envars': postenvars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/gempak.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/gempak.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -407,7 +407,7 @@ def atmos_ensstat(self): 'dependency': dependencies, 'envars': postenvars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/atmos_ensstat.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/atmos_ensstat.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;'} @@ -449,7 +449,7 @@ def awips(self): 'dependency': dependencies, 'envars': postenvars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/awips.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/awips.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;'} @@ -498,7 +498,7 @@ def wavepostsbs(self): 'dependency': dependencies, 'envars': wave_post_envars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/wavepostsbs.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/wavepostsbs.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -556,7 +556,7 @@ def wave_stat(self): 'dependency': dependencies, 'envars': wave_stat_envars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/wave_stat.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/wave_stat.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -585,7 +585,7 @@ def wave_stat_pnt(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/wave_stat_pnt.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/wave_stat_pnt.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -624,7 +624,7 @@ def extractvars(self): 'dependency': dependencies, 'envars': extractvars_envars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/extractvars.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/extractvars.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -669,7 +669,7 @@ def arch_vrfy(self): 'envars': self.envars, 'cycledef': self.run, 'dependency': dependencies, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/arch_vrfy.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/arch_vrfy.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -708,7 +708,7 @@ def arch_tars(self): 'envars': self.envars, 'cycledef': self.run, 'dependency': dependencies, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/arch_tars.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/arch_tars.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -731,7 +731,7 @@ def globus(self): 'envars': self.envars, 'cycledef': self.run, 'dependency': dependencies, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/globus_arch.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/globus_arch.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -762,7 +762,7 @@ def cleanup(self): 'envars': self.envars, 'cycledef': self.run, 'dependency': dependencies, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/cleanup.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/cleanup.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' diff --git a/dev/workflow/rocoto/gfs_tasks.py b/dev/workflow/rocoto/gfs_tasks.py index d8c89e84058..f1a7392affc 100644 --- a/dev/workflow/rocoto/gfs_tasks.py +++ b/dev/workflow/rocoto/gfs_tasks.py @@ -26,7 +26,7 @@ def fetch(self): 'resources': resources, 'envars': self.envars, 'cycledef': cycledef, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/fetch.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/fetch.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -55,7 +55,7 @@ def stage_ic(self): 'resources': resources, 'envars': self.envars, 'cycledef': cycledef, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/stage_ic.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/stage_ic.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;', @@ -84,7 +84,7 @@ def prep_sfc(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/prep_sfc.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/prep_sfc.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -148,7 +148,7 @@ def prep(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': cycledef, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/prep.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/prep.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -178,7 +178,7 @@ def waveinit(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': cycledef, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/waveinit.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/waveinit.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -202,7 +202,7 @@ def waveprep(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': cycledef, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/waveprep.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/waveprep.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -254,7 +254,7 @@ def aerosol_init(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': cycledef, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/aerosol_init.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/aerosol_init.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -282,7 +282,7 @@ def anal(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/anal.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/anal.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -318,7 +318,7 @@ def sfcanl(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/sfcanl.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/sfcanl.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -350,7 +350,7 @@ def analcalc(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/analcalc.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/analcalc.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -374,7 +374,7 @@ def analdiag(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/analdiag.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/analdiag.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -410,7 +410,7 @@ def atmanlinit(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': cycledef, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/atmanlinit.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/atmanlinit.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -434,7 +434,7 @@ def atmanlvar(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/atmanlvar.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/atmanlvar.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -458,7 +458,7 @@ def atmanlfv3inc(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/atmanlfv3inc.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/atmanlfv3inc.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -482,7 +482,7 @@ def atmanlfinal(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/atmanlfinal.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/atmanlfinal.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -506,7 +506,7 @@ def aeroanlgenb(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': 'gdas_half,gdas', - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/aeroanlgenb.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/aeroanlgenb.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -533,7 +533,7 @@ def aeroanlinit(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/aeroanlinit.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/aeroanlinit.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -564,7 +564,7 @@ def aeroanlvar(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/aeroanlvar.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/aeroanlvar.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -588,7 +588,7 @@ def aeroanlfinal(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/aeroanlfinal.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/aeroanlfinal.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -612,7 +612,7 @@ def snowanl(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/snowanl.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/snowanl.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -637,7 +637,7 @@ def esnowanl(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/esnowanl.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/esnowanl.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -670,7 +670,7 @@ def prepoceanobs(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/prepoceanobs.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/prepoceanobs.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -696,7 +696,7 @@ def marineanlletkf(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/marineanlletkf.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/marineanlletkf.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -726,7 +726,7 @@ def marinebmatinit(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/marinebmatinit.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/marinebmatinit.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -750,7 +750,7 @@ def marinebmat(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/marinebmat.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/marinebmat.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -778,7 +778,7 @@ def marineanlinit(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/marineanlinit.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/marineanlinit.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -802,7 +802,7 @@ def marineanlvar(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/marineanlvar.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/marineanlvar.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -827,7 +827,7 @@ def marineanlecen(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/marineanlecen.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/marineanlecen.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -862,7 +862,7 @@ def marineanlchkpt(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/marineanlchkpt.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/marineanlchkpt.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -886,7 +886,7 @@ def marineanlfinal(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/marineanlfinal.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/marineanlfinal.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -955,7 +955,7 @@ def _fcst_forecast_only(self): 'dependency': dependencies, 'envars': fcst_vars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/fcst.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/fcst.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1035,7 +1035,7 @@ def _fcst_cycled(self): 'dependency': dependencies, 'envars': fcst_vars, 'cycledef': cycledef, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/fcst.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/fcst.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1084,7 +1084,7 @@ def atmanlupp(self): 'dependency': dependencies, 'envars': postenvars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/upp.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/upp.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1113,7 +1113,7 @@ def atmanlprod(self): 'dependency': dependencies, 'envars': postenvars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/atmos_products.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/atmos_products.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1162,7 +1162,7 @@ def _upptask(self, upp_run="forecast", task_id="atmupp"): 'dependency': dependencies, 'envars': postenvars, 'cycledef': cycledef, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/upp.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/upp.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1250,7 +1250,7 @@ def _atmosoceaniceprod(self, component: str): 'dependency': dependencies, 'envars': postenvars, 'cycledef': cycledef, - 'command': f"{self.HOMEgfs}/dev/job_cards/rocoto/{config}.sh", + 'command': f"{self.HOMEglobal}/dev/job_cards/rocoto/{config}.sh", 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1295,7 +1295,7 @@ def wavepostsbs(self): 'dependency': dependencies, 'envars': wave_post_envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/wavepostsbs.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/wavepostsbs.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1334,7 +1334,7 @@ def _wavepostbndpnt(self, name_in): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/{name_in}.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/{name_in}.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1357,7 +1357,7 @@ def wavepostpnt(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/wavepostpnt.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/wavepostpnt.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1407,7 +1407,7 @@ def wavegempak(self): 'dependency': dependencies, 'envars': wave_post_envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/wavegempak.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/wavegempak.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1436,7 +1436,7 @@ def waveawipsbulls(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/waveawipsbulls.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/waveawipsbulls.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1459,7 +1459,7 @@ def waveawipsgridded(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/waveawipsgridded.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/waveawipsgridded.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1482,7 +1482,7 @@ def postsnd(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/postsnd.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/postsnd.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1519,7 +1519,7 @@ def fbwind(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/fbwind.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/fbwind.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1591,7 +1591,7 @@ def awips_20km_1p0deg(self): 'dependency': dependencies, 'envars': awipsenvars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/awips_20km_1p0deg.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/awips_20km_1p0deg.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1646,7 +1646,7 @@ def gempak(self): 'dependency': dependencies, 'envars': gempak_vars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/gempak.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/gempak.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1673,7 +1673,7 @@ def gempakmeta(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/gempakmeta.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/gempakmeta.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1696,7 +1696,7 @@ def gempakmetancdc(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/gempakmetancdc.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/gempakmetancdc.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1719,7 +1719,7 @@ def gempakncdcupapgif(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/gempakncdcupapgif.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/gempakncdcupapgif.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1747,7 +1747,7 @@ def gempakpgrb2spec(self): 'dependency': dependencies, 'envars': gempak_vars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/gempakgrb2spec.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/gempakgrb2spec.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1789,7 +1789,7 @@ def npoess_pgrb2_0p5deg(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/npoess.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/npoess.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1812,7 +1812,7 @@ def verfozn(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/verfozn.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/verfozn.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1835,7 +1835,7 @@ def verfrad(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/verfrad.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/verfrad.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1858,7 +1858,7 @@ def vminmon(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/vminmon.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/vminmon.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1895,7 +1895,7 @@ def anlstat(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/anlstat.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/anlstat.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1918,7 +1918,7 @@ def tracker(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/tracker.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/tracker.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1941,7 +1941,7 @@ def genesis(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/genesis.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/genesis.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1964,7 +1964,7 @@ def genesis_fsu(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/genesis_fsu.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/genesis_fsu.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -1987,7 +1987,7 @@ def fit2obs(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/fit2obs.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/fit2obs.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -2065,7 +2065,7 @@ def metp(self): 'dependency': dependencies, 'envars': metpenvars, 'cycledef': 'metp,last_gfs', - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/metp.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/metp.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -2132,7 +2132,7 @@ def arch_vrfy(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/arch_vrfy.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/arch_vrfy.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -2318,7 +2318,7 @@ def arch_tars(self): 'dependency': dependencies, 'envars': archenvars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/arch_tars.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/arch_tars.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -2347,7 +2347,7 @@ def globus_arch(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/globus_arch.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/globus_arch.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -2380,7 +2380,7 @@ def globus_earc(self): 'dependency': dependencies, 'envars': earcenvars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/globus_earc.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/globus_earc.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -2627,7 +2627,7 @@ def cleanup(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': cycledef, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/cleanup.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/cleanup.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -2653,7 +2653,7 @@ def eobs(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/eobs.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/eobs.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -2676,7 +2676,7 @@ def ediag(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/ediag.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/ediag.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -2699,7 +2699,7 @@ def eupd(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/eupd.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/eupd.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -2725,7 +2725,7 @@ def atmensanlinit(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': cycledef, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/atmensanlinit.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/atmensanlinit.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -2751,7 +2751,7 @@ def atmensanlobs(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/atmensanlobs.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/atmensanlobs.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -2777,7 +2777,7 @@ def atmensanlsol(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/atmensanlsol.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/atmensanlsol.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -2803,7 +2803,7 @@ def atmensanlletkf(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/atmensanlletkf.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/atmensanlletkf.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -2832,7 +2832,7 @@ def atmensanlfv3inc(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/atmensanlfv3inc.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/atmensanlfv3inc.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -2856,7 +2856,7 @@ def atmensanlfinal(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/atmensanlfinal.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/atmensanlfinal.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -2916,7 +2916,7 @@ def _get_ecengroups(): 'dependency': dependencies, 'envars': ecenenvars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/ecen.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/ecen.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -2946,7 +2946,7 @@ def ecen_fv3jedi(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/ecen_fv3jedi.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/ecen_fv3jedi.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -2976,7 +2976,7 @@ def analcalc_fv3jedi(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/analcalc_fv3jedi.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/analcalc_fv3jedi.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -3009,7 +3009,7 @@ def esfc(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/esfc.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/esfc.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -3054,7 +3054,7 @@ def efcs(self): 'dependency': dependencies, 'envars': efcsenvars, 'cycledef': cycledef, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/fcst.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/fcst.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -3090,7 +3090,7 @@ def echgres(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': cycledef, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/echgres.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/echgres.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -3149,7 +3149,7 @@ def _get_eposgroups(epos): 'dependency': dependencies, 'envars': eposenvars, 'cycledef': cycledef, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/epos.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/epos.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -3189,7 +3189,7 @@ def earc_vrfy(self): 'dependency': dependencies, 'envars': earcenvars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/earc_vrfy.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/earc_vrfy.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -3244,7 +3244,7 @@ def earc_tars(self): 'dependency': dependencies, 'envars': earcenvars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/earc_tars.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/earc_tars.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' diff --git a/dev/workflow/rocoto/rocoto_xml.py b/dev/workflow/rocoto/rocoto_xml.py index cdf85c1a0d7..3aaf04e95af 100644 --- a/dev/workflow/rocoto/rocoto_xml.py +++ b/dev/workflow/rocoto/rocoto_xml.py @@ -143,7 +143,7 @@ def _get_scron_script_content(self, rocotorunstr: str, replyto: str) -> str: # Format the template with experiment-specific values context = { - 'HOMEgfs': self.HOMEgfs, + 'HOMEglobal': self.HOMEglobal, 'rocotorunstr': rocotorunstr, 'expdir': self.expdir, 'pslot': self.pslot, diff --git a/dev/workflow/rocoto/sfs_tasks.py b/dev/workflow/rocoto/sfs_tasks.py index 0d29390e945..b58adc40131 100644 --- a/dev/workflow/rocoto/sfs_tasks.py +++ b/dev/workflow/rocoto/sfs_tasks.py @@ -16,7 +16,7 @@ def stage_ic(self): 'resources': resources, 'envars': self.envars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/stage_ic.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/stage_ic.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -33,7 +33,7 @@ def waveinit(self): 'resources': resources, 'envars': self.envars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/waveinit.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/waveinit.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -50,7 +50,7 @@ def prep_emissions(self): 'resources': resources, 'envars': self.envars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/prep_emissions.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/prep_emissions.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -88,7 +88,7 @@ def fcst(self): 'dependency': dependencies, 'envars': fcst_vars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/fcst.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/fcst.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -145,7 +145,7 @@ def efcs(self): 'dependency': dependencies, 'envars': efcsenvars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/fcst.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/fcst.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -241,7 +241,7 @@ def _atmosoceaniceprod(self, component: str): 'dependency': dependencies, 'envars': postenvars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/{config}.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/{config}.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;'} @@ -291,7 +291,7 @@ def atmos_ensstat(self): 'dependency': dependencies, 'envars': postenvars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/atmos_ensstat.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/atmos_ensstat.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;'} @@ -340,7 +340,7 @@ def wavepostsbs(self): 'dependency': dependencies, 'envars': wave_post_envars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/wavepostsbs.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/wavepostsbs.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -379,7 +379,7 @@ def wavepostbndpnt(self): 'dependency': dependencies, 'envars': wave_post_bndpnt_envars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/wavepostbndpnt.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/wavepostbndpnt.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -424,7 +424,7 @@ def wavepostbndpntbll(self): 'dependency': dependencies, 'envars': wave_post_bndpnt_bull_envars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/wavepostbndpntbll.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/wavepostbndpntbll.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -463,7 +463,7 @@ def wavepostpnt(self): 'dependency': dependencies, 'envars': wave_post_pnt_envars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/wavepostpnt.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/wavepostpnt.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -508,7 +508,7 @@ def extractvars(self): 'dependency': dependencies, 'envars': extractvars_envars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/extractvars.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/extractvars.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -558,7 +558,7 @@ def arch_vrfy(self): 'envars': self.envars, 'cycledef': self.run, 'dependency': dependencies, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/arch_vrfy.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/arch_vrfy.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -602,7 +602,7 @@ def arch_tars(self): 'envars': self.envars, 'cycledef': self.run, 'dependency': dependencies, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/arch_tars.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/arch_tars.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -626,7 +626,7 @@ def globus(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run, - 'command': f'{self.HOMEgfs}/dev/jobs/globus_arch.sh', + 'command': f'{self.HOMEglobal}/dev/jobs/globus_arch.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -676,7 +676,7 @@ def cleanup(self): 'envars': self.envars, 'cycledef': self.run, 'dependency': dependencies, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/cleanup.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/cleanup.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' diff --git a/dev/workflow/rocoto/tasks.py b/dev/workflow/rocoto/tasks.py index b5d48e994bb..ef2ad39c166 100644 --- a/dev/workflow/rocoto/tasks.py +++ b/dev/workflow/rocoto/tasks.py @@ -54,7 +54,7 @@ def __init__(self, app_config: AppConfig, run: str) -> None: # Save base in the internal state (never know where it may be needed) self._base = self._configs['base'] - self.HOMEgfs = self._base['HOMEgfs'] + self.HOMEglobal = self._base['HOMEglobal'] self.rotdir = self._base['ROTDIR'] self.pslot = self._base['PSLOT'] if self.run == "enkfgfs": @@ -67,7 +67,7 @@ def __init__(self, app_config: AppConfig, run: str) -> None: # DATAROOT is set by prod_envir in ops. Here, we use `STMP` to construct DATAROOT dataroot_str = f"{self._base.get('STMP')}/RUNDIRS/{self._base.get('PSLOT')}/{self.run}.@Y@m@d@H" envar_dict = {'RUN_ENVIR': self._base.get('RUN_ENVIR', 'emc'), - 'HOMEgfs': self.HOMEgfs, + 'HOMEglobal': self.HOMEglobal, 'EXPDIR': self._base.get('EXPDIR'), 'NET': self._base.get('NET'), 'RUN': self.run, diff --git a/dev/workflow/setup_buildxml.py b/dev/workflow/setup_buildxml.py index 03617b7482c..76597d03f58 100755 --- a/dev/workflow/setup_buildxml.py +++ b/dev/workflow/setup_buildxml.py @@ -15,7 +15,7 @@ _here = os.path.dirname(__file__) -HOMEgfs = os.path.abspath(os.path.join(os.path.abspath(_here), '../..')) +HOMEglobal = os.path.abspath(os.path.join(os.path.abspath(_here), '../..')) def input_args(*argv): @@ -63,9 +63,9 @@ def get_task_spec(task_name: str, task_spec: Dict, host_spec: Dict) -> Dict: task_dict.task_name = task_name task_dict.cycledef = "build" task_dict.maxtries = 1 - task_dict.command = f"cd {HOMEgfs}/sorc/; {task_spec.command} -j {task_spec.cores}" + task_dict.command = f"cd {HOMEglobal}/sorc/; {task_spec.command} -j {task_spec.cores}" task_dict.job_name = task_name - task_dict.log = f"{HOMEgfs}/sorc/logs/{task_name}.log" + task_dict.log = f"{HOMEglobal}/sorc/logs/{task_name}.log" task_dict.resources = AttrDict() task_dict.resources.account = host_spec.account @@ -232,7 +232,7 @@ def main(*argv): strings = ['', '', ']>', f'', - f'\t{HOMEgfs}/sorc/logs/build.log', + f'\t{HOMEglobal}/sorc/logs/build.log', '\t190001010000 190001010000 24:00:00', '\n'] xml_header = '\n'.join(strings) @@ -244,7 +244,7 @@ def main(*argv): xml_tasks = '\n'.join(task_list) xml = ''.join([xml_header, xml_tasks, xml_footer]) - xml_file = f"{HOMEgfs}/sorc/build.xml" + xml_file = f"{HOMEglobal}/sorc/build.xml" with open(xml_file, 'w') as fh: fh.write(xml) diff --git a/dev/workflow/setup_expt.py b/dev/workflow/setup_expt.py index d668ea415a5..03e15e79449 100755 --- a/dev/workflow/setup_expt.py +++ b/dev/workflow/setup_expt.py @@ -84,7 +84,7 @@ def _update_defaults(dict_in: dict) -> dict: # Combine host.info and inputs_dict into a single dict, add some additional keys host_plus_inputs_dict = AttrDict(host.info, **inputs_dict_remapped) - host_plus_inputs_dict.HOMEgfs = _top + host_plus_inputs_dict.HOMEglobal = _top host_plus_inputs_dict.MACHINE = str(host).upper() # Read in the YAML file diff --git a/dev/workflow/tests/test_configuration.py b/dev/workflow/tests/test_configuration.py index 32e40c67af0..40455853343 100644 --- a/dev/workflow/tests/test_configuration.py +++ b/dev/workflow/tests/test_configuration.py @@ -19,7 +19,7 @@ base = cfg.parse_config('config.base') cfg.print_config('config.base') print(type(base)) -print(base.HOMEgfs) +print(base.HOMEglobal) print('*' * 80) print('config.anal...') diff --git a/dev/workflow/workflow_suite.py b/dev/workflow/workflow_suite.py index 495bfc5d5b6..5333f0a28e6 100644 --- a/dev/workflow/workflow_suite.py +++ b/dev/workflow/workflow_suite.py @@ -55,7 +55,7 @@ def __init__(self, app_config: AppConfig, workflow_config: Dict) -> None: self.use_scrontab = self.host_info.get("USE_SCRONTAB", False) # Add ACCOUNT to host_info, with that from config.base self.host_info.ACCOUNT = self._base['ACCOUNT'] - self.HOMEgfs = self._base['HOMEgfs'] + self.HOMEglobal = self._base['HOMEglobal'] self.expdir = self._base['EXPDIR'] self.pslot = self._base['PSLOT'] diff --git a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_diag.ecf b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_diag.ecf index 03835172b9c..38d0b47122b 100755 --- a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_diag.ecf +++ b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_diag.ecf @@ -41,7 +41,7 @@ export USE_CFP=YES ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGDAS_ENKF_DIAG +${HOMEglobal}/jobs/JGDAS_ENKF_DIAG if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_select_obs.ecf b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_select_obs.ecf index bc289b8006b..65a0e3fd9e8 100755 --- a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_select_obs.ecf +++ b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_select_obs.ecf @@ -41,7 +41,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGDAS_ENKF_SELECT_OBS +${HOMEglobal}/jobs/JGDAS_ENKF_SELECT_OBS if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_update.ecf b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_update.ecf index 91eef7fb74b..0c59f75b1de 100755 --- a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_update.ecf +++ b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_update.ecf @@ -42,7 +42,7 @@ export USE_CFP=YES ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGDAS_ENKF_UPDATE +${HOMEglobal}/jobs/JGDAS_ENKF_UPDATE if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/enkfgdas/analysis/recenter/ecen/jenkfgdas_ecen.ecf b/ecf/scripts/enkfgdas/analysis/recenter/ecen/jenkfgdas_ecen.ecf index 044a65c8433..591621b6e5d 100755 --- a/ecf/scripts/enkfgdas/analysis/recenter/ecen/jenkfgdas_ecen.ecf +++ b/ecf/scripts/enkfgdas/analysis/recenter/ecen/jenkfgdas_ecen.ecf @@ -44,7 +44,7 @@ export FHOUT_ECEN=$FHRGRP ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGDAS_ENKF_ECEN +${HOMEglobal}/jobs/JGDAS_ENKF_ECEN if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/enkfgdas/analysis/recenter/jenkfgdas_sfc.ecf b/ecf/scripts/enkfgdas/analysis/recenter/jenkfgdas_sfc.ecf index 954ca495331..a78112af576 100755 --- a/ecf/scripts/enkfgdas/analysis/recenter/jenkfgdas_sfc.ecf +++ b/ecf/scripts/enkfgdas/analysis/recenter/jenkfgdas_sfc.ecf @@ -42,7 +42,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGDAS_ENKF_SFC +${HOMEglobal}/jobs/JGDAS_ENKF_SFC if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/enkfgdas/forecast/jenkfgdas_fcst.ecf b/ecf/scripts/enkfgdas/forecast/jenkfgdas_fcst.ecf index 2fd692d1dfe..f9083772777 100755 --- a/ecf/scripts/enkfgdas/forecast/jenkfgdas_fcst.ecf +++ b/ecf/scripts/enkfgdas/forecast/jenkfgdas_fcst.ecf @@ -40,7 +40,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGDAS_ENKF_FCST +${HOMEglobal}/jobs/JGDAS_ENKF_FCST if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/enkfgdas/post/jenkfgdas_post_master.ecf b/ecf/scripts/enkfgdas/post/jenkfgdas_post_master.ecf index 4f682a7a0a8..3273945b650 100755 --- a/ecf/scripts/enkfgdas/post/jenkfgdas_post_master.ecf +++ b/ecf/scripts/enkfgdas/post/jenkfgdas_post_master.ecf @@ -42,7 +42,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -$HOMEgfs/jobs/JGDAS_ENKF_POST +$HOMEglobal/jobs/JGDAS_ENKF_POST if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis.ecf b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis.ecf index cc6eee326d1..0934c56127c 100755 --- a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis.ecf +++ b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis.ecf @@ -50,7 +50,7 @@ export PREINP='' ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGLOBAL_ATMOS_ANALYSIS +${HOMEglobal}/jobs/JGLOBAL_ATMOS_ANALYSIS if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" ecflow_client --abort diff --git a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf index 92c8c0551ef..e4e38a29c7d 100755 --- a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf +++ b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf @@ -40,7 +40,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC +${HOMEglobal}/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" ecflow_client --abort diff --git a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_diag.ecf b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_diag.ecf index 53d9daf734e..a1cceef6ecd 100755 --- a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_diag.ecf +++ b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_diag.ecf @@ -41,7 +41,7 @@ export USE_CFP=YES ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGDAS_ATMOS_ANALYSIS_DIAG +${HOMEglobal}/jobs/JGDAS_ATMOS_ANALYSIS_DIAG if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" ecflow_client --abort diff --git a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf index b3bb579ca33..d83b25c38e4 100755 --- a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf +++ b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf @@ -43,7 +43,7 @@ export FHR3=%FHR3% ############################################################ export model=gdas -${HOMEgfs}/jobs/JGDAS_ATMOS_GEMPAK +${HOMEglobal}/jobs/JGDAS_ATMOS_GEMPAK if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf index 312d3dcdaa6..2870519efbb 100755 --- a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf +++ b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf @@ -34,7 +34,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC +${HOMEglobal}/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gdas/atmos/obsproc/dump/jgdas_atmos_tropcy_qc_reloc.ecf b/ecf/scripts/gdas/atmos/obsproc/dump/jgdas_atmos_tropcy_qc_reloc.ecf index c5f838fb5f9..30e5d0cad44 100755 --- a/ecf/scripts/gdas/atmos/obsproc/dump/jgdas_atmos_tropcy_qc_reloc.ecf +++ b/ecf/scripts/gdas/atmos/obsproc/dump/jgdas_atmos_tropcy_qc_reloc.ecf @@ -37,7 +37,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC +${HOMEglobal}/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_prep_sfc.ecf b/ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_prep_sfc.ecf index 2fea72a8e7c..ab3cab62c82 100755 --- a/ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_prep_sfc.ecf +++ b/ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_prep_sfc.ecf @@ -35,7 +35,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGLOBAL_ATMOS_PREP_SFC +${HOMEglobal}/jobs/JGLOBAL_ATMOS_PREP_SFC if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_manager.ecf b/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_manager.ecf index 9792253ec88..ce94f1cbcd0 100755 --- a/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_manager.ecf +++ b/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_manager.ecf @@ -29,7 +29,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGLOBAL_ATMOS_POST_MANAGER +${HOMEglobal}/jobs/JGLOBAL_ATMOS_POST_MANAGER if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_master.ecf b/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_master.ecf index b65be6586ee..bf090e26e01 100755 --- a/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_master.ecf +++ b/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_master.ecf @@ -51,7 +51,7 @@ export g2tmpl_ver=v${g2tmpl_ver} ############################################################ # CALL executable job script here ############################################################ -$HOMEgfs/jobs/JGLOBAL_ATMOS_NCEPPOST +$HOMEglobal/jobs/JGLOBAL_ATMOS_NCEPPOST if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gdas/atmos/post_processing/jgdas_atmos_chgres_forenkf.ecf b/ecf/scripts/gdas/atmos/post_processing/jgdas_atmos_chgres_forenkf.ecf index 32b024f6632..b73a40d7800 100755 --- a/ecf/scripts/gdas/atmos/post_processing/jgdas_atmos_chgres_forenkf.ecf +++ b/ecf/scripts/gdas/atmos/post_processing/jgdas_atmos_chgres_forenkf.ecf @@ -42,7 +42,7 @@ export USE_CFP=YES ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGDAS_ATMOS_CHGRES_FORENKF +${HOMEglobal}/jobs/JGDAS_ATMOS_CHGRES_FORENKF if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfozn.ecf b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfozn.ecf index 938611b4bce..69d71c18e96 100755 --- a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfozn.ecf +++ b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfozn.ecf @@ -40,7 +40,7 @@ export VERBOSE=YES ############################################################ # CALL executable job script here ############################################################ -$HOMEgfs/jobs/JGDAS_ATMOS_VERFOZN +$HOMEglobal/jobs/JGDAS_ATMOS_VERFOZN if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfrad.ecf b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfrad.ecf index dd0c19d6f09..cd7c6ce3106 100755 --- a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfrad.ecf +++ b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfrad.ecf @@ -40,7 +40,7 @@ export VERBOSE=YES ############################################################ # CALL executable job script here ############################################################ -$HOMEgfs/jobs/JGDAS_ATMOS_VERFRAD +$HOMEglobal/jobs/JGDAS_ATMOS_VERFRAD if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_vminmon.ecf b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_vminmon.ecf index b538a18a3dc..6a217efca78 100755 --- a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_vminmon.ecf +++ b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_vminmon.ecf @@ -38,7 +38,7 @@ export VERBOSE=YES ############################################################ # CALL executable job script here ############################################################ -$HOMEgfs/jobs/JGDAS_ATMOS_VMINMON +$HOMEglobal/jobs/JGDAS_ATMOS_VMINMON if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gdas/jgdas_forecast.ecf b/ecf/scripts/gdas/jgdas_forecast.ecf index 392d5f362fa..a58855db35c 100755 --- a/ecf/scripts/gdas/jgdas_forecast.ecf +++ b/ecf/scripts/gdas/jgdas_forecast.ecf @@ -39,7 +39,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGLOBAL_FORECAST +${HOMEglobal}/jobs/JGLOBAL_FORECAST if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gdas/wave/init/jgdas_wave_init.ecf b/ecf/scripts/gdas/wave/init/jgdas_wave_init.ecf index 1f73e43eb11..30c59ea1641 100755 --- a/ecf/scripts/gdas/wave/init/jgdas_wave_init.ecf +++ b/ecf/scripts/gdas/wave/init/jgdas_wave_init.ecf @@ -37,7 +37,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGLOBAL_WAVE_INIT +${HOMEglobal}/jobs/JGLOBAL_WAVE_INIT if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gdas/wave/post/jgdas_wave_postpnt.ecf b/ecf/scripts/gdas/wave/post/jgdas_wave_postpnt.ecf index fb45d8fda54..e00f11a1d50 100755 --- a/ecf/scripts/gdas/wave/post/jgdas_wave_postpnt.ecf +++ b/ecf/scripts/gdas/wave/post/jgdas_wave_postpnt.ecf @@ -38,7 +38,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGLOBAL_WAVE_POST_PNT +${HOMEglobal}/jobs/JGLOBAL_WAVE_POST_PNT if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gdas/wave/post/jgdas_wave_postsbs.ecf b/ecf/scripts/gdas/wave/post/jgdas_wave_postsbs.ecf index 5212a026d93..2f297ab8f9e 100755 --- a/ecf/scripts/gdas/wave/post/jgdas_wave_postsbs.ecf +++ b/ecf/scripts/gdas/wave/post/jgdas_wave_postsbs.ecf @@ -41,7 +41,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGLOBAL_WAVE_POST_SBS +${HOMEglobal}/jobs/JGLOBAL_WAVE_POST_SBS if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gdas/wave/prep/jgdas_wave_prep.ecf b/ecf/scripts/gdas/wave/prep/jgdas_wave_prep.ecf index b1fd9fe32e3..622fdc96ed3 100755 --- a/ecf/scripts/gdas/wave/prep/jgdas_wave_prep.ecf +++ b/ecf/scripts/gdas/wave/prep/jgdas_wave_prep.ecf @@ -45,7 +45,7 @@ export USE_CFP=YES ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGLOBAL_WAVE_PREP +${HOMEglobal}/jobs/JGLOBAL_WAVE_PREP if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis.ecf b/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis.ecf index 12653d0e95d..7f9793131cb 100755 --- a/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis.ecf +++ b/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis.ecf @@ -51,7 +51,7 @@ export PREINP='' ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGLOBAL_ATMOS_ANALYSIS +${HOMEglobal}/jobs/JGLOBAL_ATMOS_ANALYSIS if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis_calc.ecf b/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis_calc.ecf index 92c8c0551ef..e4e38a29c7d 100755 --- a/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis_calc.ecf +++ b/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis_calc.ecf @@ -40,7 +40,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC +${HOMEglobal}/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" ecflow_client --abort diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf index 5f56e7ac17f..8cfb574a962 100755 --- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf @@ -41,7 +41,7 @@ export FHR3=%FHR3% ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGFS_ATMOS_GEMPAK +${HOMEglobal}/jobs/JGFS_ATMOS_GEMPAK if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_meta.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_meta.ecf index 4798e2a06ac..515b25e16bf 100755 --- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_meta.ecf +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_meta.ecf @@ -40,7 +40,7 @@ export USE_CFP=YES ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGFS_ATMOS_GEMPAK_META +${HOMEglobal}/jobs/JGFS_ATMOS_GEMPAK_META if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf index 25659058f8c..506d1adb14e 100755 --- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf @@ -37,7 +37,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF +${HOMEglobal}/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf index da66dfe7f6d..7a9d1b71f21 100755 --- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf @@ -36,7 +36,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS +${HOMEglobal}/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf index df0f9f90f18..d18daca1739 100755 --- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf @@ -39,7 +39,7 @@ export FHR3=%FHR3% ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC +${HOMEglobal}/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/atmos/obsproc/dump/jgfs_atmos_tropcy_qc_reloc.ecf b/ecf/scripts/gfs/atmos/obsproc/dump/jgfs_atmos_tropcy_qc_reloc.ecf index c5f838fb5f9..30e5d0cad44 100755 --- a/ecf/scripts/gfs/atmos/obsproc/dump/jgfs_atmos_tropcy_qc_reloc.ecf +++ b/ecf/scripts/gfs/atmos/obsproc/dump/jgfs_atmos_tropcy_qc_reloc.ecf @@ -37,7 +37,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC +${HOMEglobal}/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/atmos/obsproc/prep/jgfs_atmos_prep_sfc.ecf b/ecf/scripts/gfs/atmos/obsproc/prep/jgfs_atmos_prep_sfc.ecf index 9e677316bf6..6397bbc4ec5 100755 --- a/ecf/scripts/gfs/atmos/obsproc/prep/jgfs_atmos_prep_sfc.ecf +++ b/ecf/scripts/gfs/atmos/obsproc/prep/jgfs_atmos_prep_sfc.ecf @@ -35,7 +35,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGLOBAL_ATMOS_PREP_SFC +${HOMEglobal}/jobs/JGLOBAL_ATMOS_PREP_SFC if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_manager.ecf b/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_manager.ecf index 50a71a44ba6..16dad405575 100755 --- a/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_manager.ecf +++ b/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_manager.ecf @@ -29,7 +29,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGLOBAL_ATMOS_POST_MANAGER +${HOMEglobal}/jobs/JGLOBAL_ATMOS_POST_MANAGER if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_master.ecf b/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_master.ecf index d8b1e2b5310..8f7d3785291 100755 --- a/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_master.ecf +++ b/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_master.ecf @@ -56,7 +56,7 @@ export g2tmpl_ver=v${g2tmpl_ver} ############################################################ # CALL executable job script here ############################################################ -$HOMEgfs/jobs/JGLOBAL_ATMOS_NCEPPOST +$HOMEglobal/jobs/JGLOBAL_ATMOS_NCEPPOST if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf b/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf index c559de3394b..464a74cd74f 100755 --- a/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf @@ -40,7 +40,7 @@ export FHRGRP=%FHRGRP% FHRLST=%FHRLST% FCSTHR=%FCSTHR% TRDRUN=%TRDRUN% fcsthr=%F ############################################################ # CALL executable job script here ############################################################ -$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +$HOMEglobal/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/atmos/post_processing/bufr_sounding/jgfs_atmos_postsnd.ecf b/ecf/scripts/gfs/atmos/post_processing/bufr_sounding/jgfs_atmos_postsnd.ecf index 3322aceeb1f..26928b3543e 100755 --- a/ecf/scripts/gfs/atmos/post_processing/bufr_sounding/jgfs_atmos_postsnd.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/bufr_sounding/jgfs_atmos_postsnd.ecf @@ -44,7 +44,7 @@ export OMP_NUM_THREADS=1 ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGFS_ATMOS_POSTSND +${HOMEglobal}/jobs/JGFS_ATMOS_POSTSND if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf b/ecf/scripts/gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf index 4afac0d2734..cb5ea471d8d 100755 --- a/ecf/scripts/gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf @@ -38,7 +38,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGFS_ATMOS_FBWIND +${HOMEglobal}/jobs/JGFS_ATMOS_FBWIND if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/atmos/verf/jgfs_atmos_vminmon.ecf b/ecf/scripts/gfs/atmos/verf/jgfs_atmos_vminmon.ecf index 2d9e8814ab2..2682194d8da 100755 --- a/ecf/scripts/gfs/atmos/verf/jgfs_atmos_vminmon.ecf +++ b/ecf/scripts/gfs/atmos/verf/jgfs_atmos_vminmon.ecf @@ -38,7 +38,7 @@ export VERBOSE=YES ############################################################ # CALL executable job script here ############################################################ -$HOMEgfs/jobs/JGFS_ATMOS_VMINMON +$HOMEglobal/jobs/JGFS_ATMOS_VMINMON if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/jgfs_forecast.ecf b/ecf/scripts/gfs/jgfs_forecast.ecf index 26d0c3b80d0..81bd5c9cbb5 100755 --- a/ecf/scripts/gfs/jgfs_forecast.ecf +++ b/ecf/scripts/gfs/jgfs_forecast.ecf @@ -39,7 +39,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGLOBAL_FORECAST +${HOMEglobal}/jobs/JGLOBAL_FORECAST if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" ecflow_client --abort diff --git a/ecf/scripts/gfs/wave/gempak/jgfs_wave_gempak.ecf b/ecf/scripts/gfs/wave/gempak/jgfs_wave_gempak.ecf index 8406f0449c9..55579fa9d9f 100755 --- a/ecf/scripts/gfs/wave/gempak/jgfs_wave_gempak.ecf +++ b/ecf/scripts/gfs/wave/gempak/jgfs_wave_gempak.ecf @@ -36,7 +36,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGLOBAL_WAVE_GEMPAK +${HOMEglobal}/jobs/JGLOBAL_WAVE_GEMPAK if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/wave/init/jgfs_wave_init.ecf b/ecf/scripts/gfs/wave/init/jgfs_wave_init.ecf index 1f73e43eb11..30c59ea1641 100755 --- a/ecf/scripts/gfs/wave/init/jgfs_wave_init.ecf +++ b/ecf/scripts/gfs/wave/init/jgfs_wave_init.ecf @@ -37,7 +37,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGLOBAL_WAVE_INIT +${HOMEglobal}/jobs/JGLOBAL_WAVE_INIT if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpnt.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpnt.ecf index d4de0a97251..1e54b042374 100755 --- a/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpnt.ecf +++ b/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpnt.ecf @@ -38,7 +38,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGLOBAL_WAVE_POST_BNDPNT +${HOMEglobal}/jobs/JGLOBAL_WAVE_POST_BNDPNT if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpntbll.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpntbll.ecf index 528068f0574..d1e3abd0bac 100755 --- a/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpntbll.ecf +++ b/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpntbll.ecf @@ -36,7 +36,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL +${HOMEglobal}/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_postpnt.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_postpnt.ecf index d09204cb2d0..ac0b43748c9 100755 --- a/ecf/scripts/gfs/wave/post/jgfs_wave_postpnt.ecf +++ b/ecf/scripts/gfs/wave/post/jgfs_wave_postpnt.ecf @@ -38,7 +38,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGLOBAL_WAVE_POST_PNT +${HOMEglobal}/jobs/JGLOBAL_WAVE_POST_PNT if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_postsbs.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_postsbs.ecf index 52179a56e2f..c09c3d53342 100755 --- a/ecf/scripts/gfs/wave/post/jgfs_wave_postsbs.ecf +++ b/ecf/scripts/gfs/wave/post/jgfs_wave_postsbs.ecf @@ -41,7 +41,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGLOBAL_WAVE_POST_SBS +${HOMEglobal}/jobs/JGLOBAL_WAVE_POST_SBS if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_bulls.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_bulls.ecf index f7d0ea1be7a..17f73e92c13 100755 --- a/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_bulls.ecf +++ b/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_bulls.ecf @@ -36,7 +36,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGLOBAL_WAVE_PRDGEN_BULLS +${HOMEglobal}/jobs/JGLOBAL_WAVE_PRDGEN_BULLS if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_gridded.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_gridded.ecf index 1c6ba47c93e..4b316932ab0 100755 --- a/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_gridded.ecf +++ b/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_gridded.ecf @@ -38,7 +38,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED +${HOMEglobal}/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf b/ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf index 171e737692c..899cf327f98 100755 --- a/ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf +++ b/ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf @@ -45,7 +45,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEgfs}/jobs/JGLOBAL_WAVE_PREP +${HOMEglobal}/jobs/JGLOBAL_WAVE_PREP if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" ecflow_client --abort diff --git a/gempak/ush/gdas_ecmwf_meta_ver.sh b/gempak/ush/gdas_ecmwf_meta_ver.sh index e1ddc3c575e..1198577a145 100755 --- a/gempak/ush/gdas_ecmwf_meta_ver.sh +++ b/gempak/ush/gdas_ecmwf_meta_ver.sh @@ -4,7 +4,7 @@ # of ecmwf fcsts # -source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEglobal}/ush/preamble.sh" export pgm=gdplot2_nc source prep_step @@ -16,7 +16,7 @@ device="nc | ecmwfver.meta" # Copy in datatype table to define gdfile type # -cpreq "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl +cpreq "${HOMEglobal}/gempak/fix/datatype.tbl" datatype.tbl # TODO: Add only necessary files and remove unneeded ones to minimize data volume # TODO: remove live links and refer https://github.com/NOAA-EMC/global-workflow/issues/4406 diff --git a/gempak/ush/gdas_meta_loop.sh b/gempak/ush/gdas_meta_loop.sh index eecc0ed9f0e..41a5c93cc1b 100755 --- a/gempak/ush/gdas_meta_loop.sh +++ b/gempak/ush/gdas_meta_loop.sh @@ -3,7 +3,7 @@ # Metafile Script : gdas_meta_loop # -source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEglobal}/ush/preamble.sh" device="nc | gdasloop.meta" diff --git a/gempak/ush/gdas_meta_na.sh b/gempak/ush/gdas_meta_na.sh index 61d043d345f..52973b004d6 100755 --- a/gempak/ush/gdas_meta_na.sh +++ b/gempak/ush/gdas_meta_na.sh @@ -3,7 +3,7 @@ # Metafile Script : gdas_meta_na # -source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEglobal}/ush/preamble.sh" device="nc | gdas.meta" @@ -44,7 +44,7 @@ PROJ = str/90;-105;0 LATLON = 1 -restore ${HOMEgfs}/gempak/ush/restore/pmsl_thkn.2.nts +restore ${HOMEglobal}/gempak/ush/restore/pmsl_thkn.2.nts CLRBAR = 1 HLSYM = 2;1.5//21//hw TEXT = 1/21//hw @@ -53,7 +53,7 @@ l ru -restore ${HOMEgfs}/gempak/ush/restore/850mb_hght_tmpc.2.nts +restore ${HOMEglobal}/gempak/ush/restore/850mb_hght_tmpc.2.nts CLRBAR = 1 TEXT = 1/21//hw SKIP = 0 !0 !0 !0 !/3 @@ -63,7 +63,7 @@ l ru -restore ${HOMEgfs}/gempak/ush/restore/700mb_hght_relh_omeg.2.nts +restore ${HOMEglobal}/gempak/ush/restore/700mb_hght_relh_omeg.2.nts CLRBAR = 1 TEXT = 1/21//hw TITLE = 5/-2/~ ? ${m_title} @ HGT, REL HUMIDITY AND OMEGA|~@ HGT, RH AND OMEGA!0 @@ -71,7 +71,7 @@ l ru -restore ${HOMEgfs}/gempak/ush/restore/500mb_hght_absv.2.nts +restore ${HOMEglobal}/gempak/ush/restore/500mb_hght_absv.2.nts CLRBAR = 1 TEXT = 1/21//hw TITLE = 5/-2/~ ? ${m_title} @ HGT AND VORTICITY|~@ HGT AND VORTICITY!0 @@ -79,7 +79,7 @@ l ru -restore ${HOMEgfs}/gempak/ush/restore/250mb_hght_wnd.2.nts +restore ${HOMEglobal}/gempak/ush/restore/250mb_hght_wnd.2.nts CLRBAR = 1 TEXT = 1/21//hw TITLE = 5/-2/~ ? ${m_title} @ HGT, ISOTACHS AND WIND (KTS)|~@ HGT AND WIND!0 diff --git a/gempak/ush/gdas_ukmet_meta_ver.sh b/gempak/ush/gdas_ukmet_meta_ver.sh index ab3f5c1d6eb..3d475e9a7b4 100755 --- a/gempak/ush/gdas_ukmet_meta_ver.sh +++ b/gempak/ush/gdas_ukmet_meta_ver.sh @@ -13,13 +13,13 @@ # M. Klein/HPC 11/2006 Modify to run in production. # -source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEglobal}/ush/preamble.sh" export pgm=gdplot2_nc source prep_step device="nc | ukmetver_12.meta" -cpreq "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl +cpreq "${HOMEglobal}/gempak/fix/datatype.tbl" datatype.tbl # SET CURRENT CYCLE AS THE VERIFICATION GRIDDED FILE. # TODO: Add only necessary files and remove unneeded ones to minimize data volume diff --git a/gempak/ush/gempak_gdas_f000_gif.sh b/gempak/ush/gempak_gdas_f000_gif.sh index 0d2ec15dc61..65424425a06 100755 --- a/gempak/ush/gempak_gdas_f000_gif.sh +++ b/gempak/ush/gempak_gdas_f000_gif.sh @@ -7,13 +7,13 @@ # ######################################################################### -source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEglobal}/ush/preamble.sh" LATVAL="1/1/1/1/5;5" LATSOUTH="1/1/1/1;4/5;5" pixels="1728;1472" -cpreq "${HOMEgfs}/gempak/fix/coltbl.spc" coltbl.xwp +cpreq "${HOMEglobal}/gempak/fix/coltbl.spc" coltbl.xwp ################################################################# # NORTHERN HEMISPHERE ANALYSIS CHARTS # @@ -23,7 +23,7 @@ cpreq "${HOMEgfs}/gempak/fix/coltbl.spc" coltbl.xwp echo "0000${PDY}${cyc}" > dates export FORT55="title.output" -"${HOMEgfs}/exec/webtitle.x" < dates +"${HOMEglobal}/exec/webtitle.x" < dates TITLE="$(cat title.output)" echo "TITLE = ${TITLE}" diff --git a/gempak/ush/gempak_gfs_f000_gif.sh b/gempak/ush/gempak_gfs_f000_gif.sh index d44782a864f..1b4cd06520a 100755 --- a/gempak/ush/gempak_gfs_f000_gif.sh +++ b/gempak/ush/gempak_gfs_f000_gif.sh @@ -15,11 +15,11 @@ # ######################################################################### -source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEglobal}/ush/preamble.sh" LATVAL="1/1/1/1/5;5" pixels="1728;1472" -cpreq "${HOMEgfs}/gempak/fix/coltbl.spc" coltbl.xwp +cpreq "${HOMEglobal}/gempak/fix/coltbl.spc" coltbl.xwp ################################################################# # ANALYSIS CHARTS # @@ -29,7 +29,7 @@ cpreq "${HOMEgfs}/gempak/fix/coltbl.spc" coltbl.xwp echo "0000${PDY}${cyc}" > dates export FORT55="title.output" -"${HOMEgfs}/exec/webtitle.x" < dates +"${HOMEglobal}/exec/webtitle.x" < dates TITLE="$(cat title.output)" echo "TITLE = ${TITLE}" @@ -576,6 +576,6 @@ fi export input=${COMOUT_ATMOS_GEMPAK_GIF}/${hgttmp500dev} export HEADER=YES export OUTPATH=${DATA}/gfs_500_hgt_tmp_nh_anl_${cyc}.tif -"${USHgfs}/make_tif.sh" +"${USHglobal}/make_tif.sh" exit diff --git a/gempak/ush/gempak_gfs_fhhh_gif.sh b/gempak/ush/gempak_gfs_fhhh_gif.sh index 34225a18bfd..30a0a2c7340 100755 --- a/gempak/ush/gempak_gfs_fhhh_gif.sh +++ b/gempak/ush/gempak_gfs_fhhh_gif.sh @@ -7,11 +7,11 @@ # ######################################################################### -source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEglobal}/ush/preamble.sh" LATVAL="1/1/1/1/5;5" pixels="1728;1472" -cpreq "${HOMEgfs}/gempak/fix/coltbl.spc" coltbl.xwp +cpreq "${HOMEglobal}/gempak/fix/coltbl.spc" coltbl.xwp ########################################################## # FORECAST CHARTS # @@ -21,7 +21,7 @@ cpreq "${HOMEgfs}/gempak/fix/coltbl.spc" coltbl.xwp echo "0${fhr3}${PDY}${cyc}" > dates export FORT55="title.output" -"${HOMEgfs}/exec/webtitle.x" < dates +"${HOMEglobal}/exec/webtitle.x" < dates TITLE="$(cat title.output)" echo "TITLE = ${TITLE}" diff --git a/gempak/ush/gfs_meta_ak.sh b/gempak/ush/gfs_meta_ak.sh index 4dda590b96d..ea15c06c601 100755 --- a/gempak/ush/gfs_meta_ak.sh +++ b/gempak/ush/gfs_meta_ak.sh @@ -3,14 +3,14 @@ # Metafile Script : gfs_meta_ak.sh # -source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEglobal}/ush/preamble.sh" cd "${DATA}" || exit 2 rm -rf "${DATA}/ak" mkdir -p -m 775 "${DATA}/ak" cd "${DATA}/ak" || exit 2 -cpreq "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl +cpreq "${HOMEglobal}/gempak/fix/datatype.tbl" datatype.tbl device="nc | gfs.meta.ak" diff --git a/gempak/ush/gfs_meta_bwx.sh b/gempak/ush/gfs_meta_bwx.sh index c80abadb58c..59336fb6a45 100755 --- a/gempak/ush/gfs_meta_bwx.sh +++ b/gempak/ush/gfs_meta_bwx.sh @@ -5,11 +5,11 @@ # Set up Local Variables # -source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEglobal}/ush/preamble.sh" mkdir -p -m 775 "${DATA}/BWX" cd "${DATA}/BWX" || exit 2 -cpreq "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl +cpreq "${HOMEglobal}/gempak/fix/datatype.tbl" datatype.tbl metatype="bwx" metaname="${RUN}_${PDY}_${cyc}_us_${metatype}" diff --git a/gempak/ush/gfs_meta_comp.sh b/gempak/ush/gfs_meta_comp.sh index f1ceef46fe8..94957442ba0 100755 --- a/gempak/ush/gfs_meta_comp.sh +++ b/gempak/ush/gfs_meta_comp.sh @@ -9,12 +9,12 @@ # Set up Local Variables # -source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEglobal}/ush/preamble.sh" rm -Rf "${DATA}/COMP" "${DATA}/GEMPAK_META_COMP" mkdir -p -m 775 "${DATA}/COMP" "${DATA}/GEMPAK_META_COMP" cd "${DATA}/COMP" || exit 2 -cpreq "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl +cpreq "${HOMEglobal}/gempak/fix/datatype.tbl" datatype.tbl mdl=gfs MDL=GFS diff --git a/gempak/ush/gfs_meta_crb.sh b/gempak/ush/gfs_meta_crb.sh index afd3532ae9a..387188413e4 100755 --- a/gempak/ush/gfs_meta_crb.sh +++ b/gempak/ush/gfs_meta_crb.sh @@ -5,11 +5,11 @@ # Set Up Local Variables # -source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEglobal}/ush/preamble.sh" mkdir -p -m 775 "${DATA}/crb" cd "${DATA}/crb" || exit 2 -cpreq "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl +cpreq "${HOMEglobal}/gempak/fix/datatype.tbl" datatype.tbl # mdl=gfs MDL=GFS diff --git a/gempak/ush/gfs_meta_hi.sh b/gempak/ush/gfs_meta_hi.sh index 3461db4a9bd..3e445d0705b 100755 --- a/gempak/ush/gfs_meta_hi.sh +++ b/gempak/ush/gfs_meta_hi.sh @@ -3,11 +3,11 @@ # Metafile Script : gfs_meta_hi.sh # -source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEglobal}/ush/preamble.sh" mkdir -p -m 775 "${DATA}/mrfhi" cd "${DATA}/mrfhi" || exit 2 -cpreq "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl +cpreq "${HOMEglobal}/gempak/fix/datatype.tbl" datatype.tbl device="nc | mrfhi.meta" @@ -40,9 +40,9 @@ MAP = 1 CLEAR = yes CLRBAR = 1 -restore ${HOMEgfs}/gempak/ush/restore/garea_hi.nts +restore ${HOMEglobal}/gempak/ush/restore/garea_hi.nts -restore ${HOMEgfs}/gempak/ush/restore/pmsl_thkn.2.nts +restore ${HOMEglobal}/gempak/ush/restore/pmsl_thkn.2.nts CLRBAR = 1 HLSYM = 2;1.5//21//hw TEXT = 1/21//hw @@ -51,7 +51,7 @@ l ru -restore ${HOMEgfs}/gempak/ush/restore/850mb_hght_tmpc.2.nts +restore ${HOMEglobal}/gempak/ush/restore/850mb_hght_tmpc.2.nts CLRBAR = 1 TEXT = 1/21//hw TITLE = 5/-2/~ ? ${m_title} @ HGHTS, TEMPERATURE AND WIND (KTS)|~@ HGHT, TMP, WIND!0!0!0 @@ -59,7 +59,7 @@ l ru -restore ${HOMEgfs}/gempak/ush/restore/700mb_hght_relh_omeg.2.nts +restore ${HOMEglobal}/gempak/ush/restore/700mb_hght_relh_omeg.2.nts CLRBAR = 1 TEXT = 1/21//hw TITLE = 5/-2/~ ? ${m_title} @ HGHTS, REL HUMIDITY AND OMEGA|~@ HGHT, RH AND OMEGA!0 @@ -67,7 +67,7 @@ l ru -restore ${HOMEgfs}/gempak/ush/restore/500mb_hght_absv.2.nts +restore ${HOMEglobal}/gempak/ush/restore/500mb_hght_absv.2.nts CLRBAR = 1 TEXT = 1/21//hw TITLE = 5/-2/~ ? ${m_title} @ HEIGHTS AND VORTICITY|~@ HGHT AND VORTICITY!0 @@ -75,7 +75,7 @@ l ru -restore ${HOMEgfs}/gempak/ush/restore/200mb_hght_wnd.2.nts +restore ${HOMEglobal}/gempak/ush/restore/200mb_hght_wnd.2.nts CLRBAR = 1 TEXT = 1/21//hw TITLE = 5/-2/~ ? ${m_title} @ HEIGHTS, ISOTACHS AND WIND (KTS)|~@ HGHT AND WIND!0 @@ -83,7 +83,7 @@ l ru -restore ${HOMEgfs}/gempak/ush/restore/250mb_hght_wnd.2.nts +restore ${HOMEglobal}/gempak/ush/restore/250mb_hght_wnd.2.nts CLRBAR = 1 TEXT = 1/21//hw TITLE = 5/-2/~ ? ${m_title} @ HEIGHTS, ISOTACHS AND WIND (KTS)|~@ HGHT AND WIND!0 @@ -91,7 +91,7 @@ l ru -restore ${HOMEgfs}/gempak/ush/restore/300mb_hght_wnd.2.nts +restore ${HOMEglobal}/gempak/ush/restore/300mb_hght_wnd.2.nts CLRBAR = 1 TEXT = 1/21//hw TITLE = 5/-2/~ ? ${m_title} @ HEIGHTS, ISOTACHS AND WIND (KTS)|~@ HGHT AND WIND!0 @@ -144,7 +144,7 @@ CLRBAR = 1 r -restore ${HOMEgfs}/gempak/ush/restore/precip.2.nts +restore ${HOMEglobal}/gempak/ush/restore/precip.2.nts CLRBAR = 1 TEXT = 1/21//hw GDATTIM = F12-F192-06; F214-F384-12 diff --git a/gempak/ush/gfs_meta_hur.sh b/gempak/ush/gfs_meta_hur.sh index b2bef3d33ce..a0f3e1d512e 100755 --- a/gempak/ush/gfs_meta_hur.sh +++ b/gempak/ush/gfs_meta_hur.sh @@ -5,11 +5,11 @@ # Set up Local Variables # -source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEglobal}/ush/preamble.sh" mkdir -p -m 775 "${DATA}/hur" cd "${DATA}/hur" || exit 2 -cpreq "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl +cpreq "${HOMEglobal}/gempak/fix/datatype.tbl" datatype.tbl mdl=gfs MDL=GFS diff --git a/gempak/ush/gfs_meta_mar_atl.sh b/gempak/ush/gfs_meta_mar_atl.sh index 14af5cc29ea..e03947e3855 100755 --- a/gempak/ush/gfs_meta_mar_atl.sh +++ b/gempak/ush/gfs_meta_mar_atl.sh @@ -5,11 +5,11 @@ # Set up Local Variables # -source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEglobal}/ush/preamble.sh" mkdir -p -m 775 "${DATA}/MAR_ATL" cd "${DATA}/MAR_ATL" || exit 2 -cpreq "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl +cpreq "${HOMEglobal}/gempak/fix/datatype.tbl" datatype.tbl # # Link data into DATA to sidestep gempak path limits diff --git a/gempak/ush/gfs_meta_mar_comp.sh b/gempak/ush/gfs_meta_mar_comp.sh index 4471e97d473..d91711865ee 100755 --- a/gempak/ush/gfs_meta_mar_comp.sh +++ b/gempak/ush/gfs_meta_mar_comp.sh @@ -5,13 +5,13 @@ # Set up Local Variables # -source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEglobal}/ush/preamble.sh" rm -Rf "${DATA}/GEMPAK_META_MAR" mkdir -p -m 775 "${DATA}/GEMPAK_META_MAR" "${DATA}/MAR_COMP" cd "${DATA}/MAR_COMP" || exit 2 -cpreq "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl +cpreq "${HOMEglobal}/gempak/fix/datatype.tbl" datatype.tbl export COMIN="gfs.multi" mkdir -p "${COMIN}" diff --git a/gempak/ush/gfs_meta_mar_pac.sh b/gempak/ush/gfs_meta_mar_pac.sh index fb630607458..44c20d75a75 100755 --- a/gempak/ush/gfs_meta_mar_pac.sh +++ b/gempak/ush/gfs_meta_mar_pac.sh @@ -5,11 +5,11 @@ # Set up Local Variables # -source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEglobal}/ush/preamble.sh" mkdir -p -m 775 "${DATA}/MAR_PAC" cd "${DATA}/MAR_PAC" || exit 2 -cpreq "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl +cpreq "${HOMEglobal}/gempak/fix/datatype.tbl" datatype.tbl # # Link data into DATA to sidestep gempak path limits diff --git a/gempak/ush/gfs_meta_mar_ql.sh b/gempak/ush/gfs_meta_mar_ql.sh index 737edb84414..69f116a79e2 100755 --- a/gempak/ush/gfs_meta_mar_ql.sh +++ b/gempak/ush/gfs_meta_mar_ql.sh @@ -5,11 +5,11 @@ # Set up Local Variables # -source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEglobal}/ush/preamble.sh" mkdir -p -m 775 "${DATA}/MAR_QL" cd "${DATA}/MAR_QL" || exit 2 -cpreq "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl +cpreq "${HOMEglobal}/gempak/fix/datatype.tbl" datatype.tbl # # Link data into DATA to sidestep gempak path limits diff --git a/gempak/ush/gfs_meta_mar_skewt.sh b/gempak/ush/gfs_meta_mar_skewt.sh index c39854f03fe..a0da07942b2 100755 --- a/gempak/ush/gfs_meta_mar_skewt.sh +++ b/gempak/ush/gfs_meta_mar_skewt.sh @@ -5,11 +5,11 @@ # Set up Local Variables # -source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEglobal}/ush/preamble.sh" mkdir -p -m 775 "${DATA}/MAR_SKEWT" cd "${DATA}/MAR_SKEWT" || exit 2 -cpreq "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl +cpreq "${HOMEglobal}/gempak/fix/datatype.tbl" datatype.tbl # # Link data into DATA to sidestep gempak path limits diff --git a/gempak/ush/gfs_meta_mar_ver.sh b/gempak/ush/gfs_meta_mar_ver.sh index 849475746d8..b35f0459db5 100755 --- a/gempak/ush/gfs_meta_mar_ver.sh +++ b/gempak/ush/gfs_meta_mar_ver.sh @@ -5,11 +5,11 @@ # Set up Local Variables # -source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEglobal}/ush/preamble.sh" mkdir -p -m 775 "${DATA}/MAR_VER" cd "${DATA}/MAR_VER" || exit 2 -cpreq "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl +cpreq "${HOMEglobal}/gempak/fix/datatype.tbl" datatype.tbl # # Link data into DATA to sidestep gempak path limits diff --git a/gempak/ush/gfs_meta_nhsh.sh b/gempak/ush/gfs_meta_nhsh.sh index 9e6cddaaa71..93654b71a1d 100755 --- a/gempak/ush/gfs_meta_nhsh.sh +++ b/gempak/ush/gfs_meta_nhsh.sh @@ -3,11 +3,11 @@ # Metafile Script : mrf_meta_nhsh # -source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEglobal}/ush/preamble.sh" mkdir -p -m 775 "${DATA}/mrfnhsh" cd "${DATA}/mrfnhsh" || exit 2 -cpreq "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl +cpreq "${HOMEglobal}/gempak/fix/datatype.tbl" datatype.tbl # # Link data into DATA to sidestep gempak path limits @@ -39,9 +39,9 @@ MAP = 1 CLEAR = yes CLRBAR = 1 -restore ${HOMEgfs}/gempak/ush/restore/garea_nh.nts +restore ${HOMEglobal}/gempak/ush/restore/garea_nh.nts -restore ${HOMEgfs}/gempak/ush/restore/500mb_hght_absv.2.nts +restore ${HOMEglobal}/gempak/ush/restore/500mb_hght_absv.2.nts CLRBAR = 1 TEXT = 1/21//hw SKIP = 0 !0 !1 @@ -61,7 +61,7 @@ l ru -restore ${HOMEgfs}/gempak/ush/restore/garea_sh.nts +restore ${HOMEglobal}/gempak/ush/restore/garea_sh.nts DEVICE = nc | Nmeta_sh TITLE = 5//~ ? ${m_title} @ HEIGHTS AND VORTICITY|~ @ HGHT AND VORTICITY!0 @@ -69,10 +69,10 @@ l ru -restore ${HOMEgfs}/gempak/ush/restore/garea_nh.nts +restore ${HOMEglobal}/gempak/ush/restore/garea_nh.nts DEVICE = nc | Nmeta_nh -restore ${HOMEgfs}/gempak/ush/restore/250mb_hght_wnd.2.nts +restore ${HOMEglobal}/gempak/ush/restore/250mb_hght_wnd.2.nts CLRBAR = 1 TEXT = 1/21//hw GDPFUN = knts((mag(wnd))) !sm9s(hght) @@ -81,11 +81,11 @@ l ru -restore ${HOMEgfs}/gempak/ush/restore/garea_sh.nts +restore ${HOMEglobal}/gempak/ush/restore/garea_sh.nts DEVICE = nc | Nmeta_sh ru -restore ${HOMEgfs}/gempak/ush/restore/precip.2.nts +restore ${HOMEglobal}/gempak/ush/restore/precip.2.nts CLRBAR = 1 TEXT = 1/21//hw GDATTIM = F12-F240-12 @@ -95,7 +95,7 @@ TITLE = 5//~ ? ${m_title} 12-HOUR TOTAL PRECIPITATION (IN)|~ 12-HOURLY TOTAL P l r -restore ${HOMEgfs}/gempak/ush/restore/garea_sh.nts +restore ${HOMEglobal}/gempak/ush/restore/garea_sh.nts DEVICE = nc | Nmeta_sh ru diff --git a/gempak/ush/gfs_meta_opc_na_ver.sh b/gempak/ush/gfs_meta_opc_na_ver.sh index a38342ed1ab..a7b83fef28f 100755 --- a/gempak/ush/gfs_meta_opc_na_ver.sh +++ b/gempak/ush/gfs_meta_opc_na_ver.sh @@ -5,11 +5,11 @@ # Set up Local Variables # -source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEglobal}/ush/preamble.sh" mkdir -p -m 775 "${DATA}/OPC_NA_VER_F${fend}" cd "${DATA}/OPC_NA_VER_F${fend}" || exit 2 -cpreq "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl +cpreq "${HOMEglobal}/gempak/fix/datatype.tbl" datatype.tbl # # Link data into DATA to sidestep gempak path limits diff --git a/gempak/ush/gfs_meta_opc_np_ver.sh b/gempak/ush/gfs_meta_opc_np_ver.sh index fbd3f8ce653..1a446fde543 100755 --- a/gempak/ush/gfs_meta_opc_np_ver.sh +++ b/gempak/ush/gfs_meta_opc_np_ver.sh @@ -5,11 +5,11 @@ # Set up Local Variables # -source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEglobal}/ush/preamble.sh" mkdir -p -m 775 "${DATA}/OPC_NP_VER_F${fend}" cd "${DATA}/OPC_NP_VER_F${fend}" || exit 2 -cpreq "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl +cpreq "${HOMEglobal}/gempak/fix/datatype.tbl" datatype.tbl # # Link data into DATA to sidestep gempak path limits diff --git a/gempak/ush/gfs_meta_precip.sh b/gempak/ush/gfs_meta_precip.sh index 3cb39251a93..659280df260 100755 --- a/gempak/ush/gfs_meta_precip.sh +++ b/gempak/ush/gfs_meta_precip.sh @@ -5,11 +5,11 @@ # Set up Local Variables # -source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEglobal}/ush/preamble.sh" mkdir -p -m 775 "${DATA}/precip" cd "${DATA}/precip" || exit 2 -cpreq "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl +cpreq "${HOMEglobal}/gempak/fix/datatype.tbl" datatype.tbl # # Link data into DATA to sidestep gempak path limits diff --git a/gempak/ush/gfs_meta_qpf.sh b/gempak/ush/gfs_meta_qpf.sh index 3b29621269d..e03e1f53a10 100755 --- a/gempak/ush/gfs_meta_qpf.sh +++ b/gempak/ush/gfs_meta_qpf.sh @@ -5,11 +5,11 @@ # Set up Local Variables # -source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEglobal}/ush/preamble.sh" mkdir -p -m 775 "${DATA}/qpf" cd "${DATA}/qpf" || exit 2 -cpreq "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl +cpreq "${HOMEglobal}/gempak/fix/datatype.tbl" datatype.tbl # # Link data into DATA to sidestep gempak path limits diff --git a/gempak/ush/gfs_meta_sa.sh b/gempak/ush/gfs_meta_sa.sh index 7725727519a..62659cbdebc 100755 --- a/gempak/ush/gfs_meta_sa.sh +++ b/gempak/ush/gfs_meta_sa.sh @@ -5,11 +5,11 @@ # Set Up Local Variables # -source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEglobal}/ush/preamble.sh" mkdir -p -m 775 "${DATA}/SA" cd "${DATA}/SA" || exit 2 -cpreq "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl +cpreq "${HOMEglobal}/gempak/fix/datatype.tbl" datatype.tbl # # Link data into DATA to sidestep gempak path limits diff --git a/gempak/ush/gfs_meta_sa2.sh b/gempak/ush/gfs_meta_sa2.sh index 8332b535613..de15cf2139c 100755 --- a/gempak/ush/gfs_meta_sa2.sh +++ b/gempak/ush/gfs_meta_sa2.sh @@ -6,12 +6,12 @@ # comparisons to the ecmwf and ukmet # -source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEglobal}/ush/preamble.sh" mkdir SA2 cd SA2 || exit 1 -cpreq "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl +cpreq "${HOMEglobal}/gempak/fix/datatype.tbl" datatype.tbl # # Link data into DATA to sidestep gempak path limits diff --git a/gempak/ush/gfs_meta_trop.sh b/gempak/ush/gfs_meta_trop.sh index f29d7fb0b98..7d10063a164 100755 --- a/gempak/ush/gfs_meta_trop.sh +++ b/gempak/ush/gfs_meta_trop.sh @@ -5,11 +5,11 @@ # Set Up Local Variables # -source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEglobal}/ush/preamble.sh" mkdir -p -m 775 "${DATA}/TROP" cd "${DATA}/TROP" || exit 2 -cpreq "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl +cpreq "${HOMEglobal}/gempak/fix/datatype.tbl" datatype.tbl # # Link data into DATA to sidestep gempak path limits diff --git a/gempak/ush/gfs_meta_us.sh b/gempak/ush/gfs_meta_us.sh index 9cd24601201..6c9c68d5c0a 100755 --- a/gempak/ush/gfs_meta_us.sh +++ b/gempak/ush/gfs_meta_us.sh @@ -3,13 +3,13 @@ # Metafile Script : gfs_meta_us.sh # -source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEglobal}/ush/preamble.sh" cd "${DATA}" || exit 2 rm -rf "${DATA}/us" mkdir -p -m 775 "${DATA}/us" cd "${DATA}/us" || exit 2 -cpreq "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl +cpreq "${HOMEglobal}/gempak/fix/datatype.tbl" datatype.tbl # # Link data into DATA to sidestep gempak path limits @@ -49,7 +49,7 @@ GAREA = 17.529;-129.296;53.771;-22.374 PROJ = str/90;-105;0 LATLON = 0 -restore ${HOMEgfs}/gempak/ush/restore/pmsl_thkn.2.nts +restore ${HOMEglobal}/gempak/ush/restore/pmsl_thkn.2.nts CLRBAR = 1 HLSYM = 2;1.5//21//hw TEXT = 1/21//hw @@ -58,7 +58,7 @@ l run -restore ${HOMEgfs}/gempak/ush/restore/850mb_hght_tmpc.2.nts +restore ${HOMEglobal}/gempak/ush/restore/850mb_hght_tmpc.2.nts CLRBAR = 1 HLSYM = 2;1.5//21//hw TEXT = 1/21//hw @@ -67,7 +67,7 @@ l run -restore ${HOMEgfs}/gempak/ush/restore/700mb_hght_relh_omeg.2.nts +restore ${HOMEglobal}/gempak/ush/restore/700mb_hght_relh_omeg.2.nts CLRBAR = 1 HLSYM = 2;1.5//21//hw TEXT = 1/21//hw @@ -76,7 +76,7 @@ l run -restore ${HOMEgfs}/gempak/ush/restore/500mb_hght_absv.2.nts +restore ${HOMEglobal}/gempak/ush/restore/500mb_hght_absv.2.nts CLRBAR = 1 HLSYM = 2;1.5//21//hw TEXT = 1/21//hw @@ -85,7 +85,7 @@ l run -restore ${HOMEgfs}/gempak/ush/restore/250mb_hght_wnd.2.nts +restore ${HOMEglobal}/gempak/ush/restore/250mb_hght_wnd.2.nts CLRBAR = 1 HLSYM = 2;1.5//21//hw TEXT = 1/21//hw @@ -95,7 +95,7 @@ run -restore ${HOMEgfs}/gempak/ush/restore/p06m_pmsl.2.nts +restore ${HOMEglobal}/gempak/ush/restore/p06m_pmsl.2.nts CLRBAR = 1 HLSYM = 2;1.5//21//hw HLSYM = 2;1.5//21//hw diff --git a/gempak/ush/gfs_meta_usext.sh b/gempak/ush/gfs_meta_usext.sh index dbba2f5142d..abc7e178dae 100755 --- a/gempak/ush/gfs_meta_usext.sh +++ b/gempak/ush/gfs_meta_usext.sh @@ -3,12 +3,12 @@ # Metafile Script : gfs_meta_usext.sh # -source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEglobal}/ush/preamble.sh" mkdir -p -m 775 "${DATA}/mrfus" cd "${DATA}/mrfus" || exit 2 -cpreq "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl -cpreq "${HOMEgfs}/gempak/fix/ak_sfstns.tbl" alaska.tbl +cpreq "${HOMEglobal}/gempak/fix/datatype.tbl" datatype.tbl +cpreq "${HOMEglobal}/gempak/fix/ak_sfstns.tbl" alaska.tbl # # Link data into DATA to sidestep gempak path limits @@ -57,7 +57,7 @@ GAREA = 17.529;-129.296;53.771;-22.374 PROJ = str/90;-105;0 LATLON = 18/2 -restore ${HOMEgfs}/gempak/ush/restore/pmsl_thkn.2.nts +restore ${HOMEglobal}/gempak/ush/restore/pmsl_thkn.2.nts CLRBAR = 1 HLSYM = 2;1.5//21//hw TEXT = 1/21//hw @@ -65,35 +65,35 @@ TITLE = 1/-2/~ ? ${m_title} PMSL, 1000-500 MB THICKNESS|~MSLP, 1000-500 THKN!0 l run -restore ${HOMEgfs}/gempak/ush/restore/850mb_hght_tmpc.2.nts +restore ${HOMEglobal}/gempak/ush/restore/850mb_hght_tmpc.2.nts CLRBAR = 1 TEXT = 1/21//hw TITLE = 1/-2/~ ? ${m_title} @ HGT, TEMPERATURE AND WIND (KTS)|~@ HGT, TMP, WIND!0 l run -restore ${HOMEgfs}/gempak/ush/restore/700mb_hght_relh_omeg.2.nts +restore ${HOMEglobal}/gempak/ush/restore/700mb_hght_relh_omeg.2.nts CLRBAR = 1 TEXT = 1/21//hw TITLE = 1/-2/~ ? ${m_title} @ HGT, REL HUMIDITY AND OMEGA|~@ HGT, RH AND OMEGA!0 l run -restore ${HOMEgfs}/gempak/ush/restore/500mb_hght_absv.2.nts +restore ${HOMEglobal}/gempak/ush/restore/500mb_hght_absv.2.nts CLRBAR = 1 TEXT = 1/21//hw TITLE = 1/-2/~ ? ${m_title} @ HGT AND VORTICITY|~@ HGT AND VORTICITY!0 l run -restore ${HOMEgfs}/gempak/ush/restore/500mb_hght_gabsv.2.nts +restore ${HOMEglobal}/gempak/ush/restore/500mb_hght_gabsv.2.nts CLRBAR = 1 TEXT = 1/21//hw TITLE = 1/-2/~ ? ${m_title} @ HGT AND GEO ABS VORT|~@ HGT, GEO ABS VORT!0 l run -restore ${HOMEgfs}/gempak/ush/restore/250mb_hght_wnd.2.nts +restore ${HOMEglobal}/gempak/ush/restore/250mb_hght_wnd.2.nts CLRBAR = 1 TEXT = 1/21//hw TITLE = 1/-2/~ ? ${m_title} @ HGT, ISOTACHS AND WIND (KTS)|~@ HGT AND WIND!0 @@ -123,7 +123,7 @@ CLEAR = YES l run -restore ${HOMEgfs}/gempak/ush/restore/precip.2.nts +restore ${HOMEglobal}/gempak/ush/restore/precip.2.nts CLRBAR = 1 TEXT = 1/21//hw HILO = 31;0/x#2/.25-10///y diff --git a/gempak/ush/gfs_meta_ver.sh b/gempak/ush/gfs_meta_ver.sh index 1209592f200..b31c434147e 100755 --- a/gempak/ush/gfs_meta_ver.sh +++ b/gempak/ush/gfs_meta_ver.sh @@ -5,11 +5,11 @@ # Set up Local Variables # -source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEglobal}/ush/preamble.sh" mkdir -p -m 775 "${DATA}/VER" cd "${DATA}/VER" || exit 2 -cpreq "${HOMEgfs}/gempak/fix/datatype.tbl" datatype.tbl +cpreq "${HOMEglobal}/gempak/fix/datatype.tbl" datatype.tbl MDL=GFS metaname="gfsver_${cyc}.meta" diff --git a/ush/atmos_ensstat.sh b/ush/atmos_ensstat.sh index 4be0a96d56b..256a5a1ef11 100755 --- a/ush/atmos_ensstat.sh +++ b/ush/atmos_ensstat.sh @@ -58,7 +58,7 @@ EOF cat input.nml # Run ensstat -"${EXECgfs}/ensstat.x" < input.nml +"${EXECglobal}/ensstat.x" < input.nml export err=$? if [[ "${err}" -ne 0 ]]; then diff --git a/ush/calcanl_gfs.py b/ush/calcanl_gfs.py index d4f4b4c419f..82bd1a30cf2 100755 --- a/ush/calcanl_gfs.py +++ b/ush/calcanl_gfs.py @@ -344,7 +344,7 @@ def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix, ComOut = os.getenv('COMOUT_ATMOS_ANALYSIS', './') APrefix = os.getenv('APREFIX', '') NThreads = os.getenv('NTHREADS_CHGRES', 1) - FixDir = os.path.join(os.getenv('FIXgfs', './'), 'am') + FixDir = os.path.join(os.getenv('FIXglobal', './'), 'am') atmges_ens_mean = os.getenv('ATMGES_ENSMEAN', './atmges_ensmean') RunDir = os.getenv('DATA', './') ExecCMD = os.getenv('APRUN_CALCANL', '') diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index 2d6a4cb378c..e745ee06d54 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -107,7 +107,7 @@ FV3_postdet() { # Check for consistency # TODO: the checker has a --fatal option, which is not used here. This needs to be decided how to handle. if [[ "${CHECK_LAND_RESTART_OROG:-NO}" == "YES" ]]; then - "${USHgfs}/check_land_input_orography.py" \ + "${USHglobal}/check_land_input_orography.py" \ --input_dir "${DATA}/INPUT" --orog_dir "${DATA}/INPUT" err=$? if [[ ${err} -ne 0 ]]; then @@ -348,12 +348,12 @@ FV3_nml() { # namelist output for a certain component echo "SUB ${FUNCNAME[0]}: Creating name lists and model configure file for FV3" - source "${USHgfs}/parsing_namelists_FV3.sh" - source "${USHgfs}/parsing_model_configure_FV3.sh" + source "${USHglobal}/parsing_namelists_FV3.sh" + source "${USHglobal}/parsing_model_configure_FV3.sh" # Call the appropriate namelist functions if [[ "${DO_NEST:-NO}" == "YES" ]]; then - source "${USHgfs}/parsing_namelists_FV3_nest.sh" + source "${USHglobal}/parsing_namelists_FV3_nest.sh" FV3_namelists_nest global FV3_namelists_nest nest else @@ -521,7 +521,7 @@ WW3_postdet() { WW3_nml() { echo "SUB ${FUNCNAME[0]}: Copying input files for WW3" - source "${USHgfs}/parsing_namelists_WW3.sh" + source "${USHglobal}/parsing_namelists_WW3.sh" WW3_namelists } @@ -664,7 +664,7 @@ MOM6_postdet() { MOM6_nml() { echo "SUB ${FUNCNAME[0]}: Creating name list for MOM6" - source "${USHgfs}/parsing_namelists_MOM6.sh" + source "${USHglobal}/parsing_namelists_MOM6.sh" MOM6_namelists } @@ -799,7 +799,7 @@ CICE_postdet() { CICE_nml() { echo "SUB ${FUNCNAME[0]}: Creating name list for CICE" - source "${USHgfs}/parsing_namelists_CICE.sh" + source "${USHglobal}/parsing_namelists_CICE.sh" CICE_namelists } @@ -853,7 +853,7 @@ GOCART_rc() { fi fi - source "${USHgfs}/parsing_namelists_GOCART.sh" + source "${USHglobal}/parsing_namelists_GOCART.sh" GOCART_namelists } diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh index 00bb2b43aa3..dcce5927145 100755 --- a/ush/forecast_predet.sh +++ b/ush/forecast_predet.sh @@ -245,7 +245,7 @@ FV3_predet() { # <0 means older adiabatic pre-conditioning na_init=${na_init:-1} - local suite_file="${HOMEgfs}/sorc/ufs_model.fd/UFSATM/ccpp/suites/suite_${CCPP_SUITE}.xml" + local suite_file="${HOMEglobal}/sorc/ufs_model.fd/UFSATM/ccpp/suites/suite_${CCPP_SUITE}.xml" if [[ ! -f "${suite_file}" ]]; then echo "FATAL ERROR: CCPP Suite file ${suite_file} does not exist, ABORT!" exit 2 @@ -407,16 +407,16 @@ FV3_predet() { #-------------------------------------------------------------------------- # Fix files - FNGLAC=${FNGLAC:-"${FIXgfs}/am/global_glacier.2x2.grb"} - FNMXIC=${FNMXIC:-"${FIXgfs}/am/global_maxice.2x2.grb"} - FNTSFC=${FNTSFC:-"${FIXgfs}/am/RTGSST.1982.2012.monthly.clim.grb"} - FNSNOC=${FNSNOC:-"${FIXgfs}/am/global_snoclim.1.875.grb"} + FNGLAC=${FNGLAC:-"${FIXglobal}/am/global_glacier.2x2.grb"} + FNMXIC=${FNMXIC:-"${FIXglobal}/am/global_maxice.2x2.grb"} + FNTSFC=${FNTSFC:-"${FIXglobal}/am/RTGSST.1982.2012.monthly.clim.grb"} + FNSNOC=${FNSNOC:-"${FIXglobal}/am/global_snoclim.1.875.grb"} FNZORC=${FNZORC:-"igbp"} - FNAISC=${FNAISC:-"${FIXgfs}/am/IMS-NIC.blended.ice.monthly.clim.grb"} + FNAISC=${FNAISC:-"${FIXglobal}/am/IMS-NIC.blended.ice.monthly.clim.grb"} FNALBC2=${FNALBC2:-"${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.facsf.tileX.nc"} FNTG3C=${FNTG3C:-"${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.substrate_temperature.tileX.nc"} FNVEGC=${FNVEGC:-"${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.vegetation_greenness.tileX.nc"} - FNMSKH=${FNMSKH:-"${FIXgfs}/am/global_slmask.t1534.3072.1536.grb"} + FNMSKH=${FNMSKH:-"${FIXglobal}/am/global_slmask.t1534.3072.1536.grb"} FNVMNC=${FNVMNC:-"${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.vegetation_greenness.tileX.nc"} FNVMXC=${FNVMXC:-"${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.vegetation_greenness.tileX.nc"} FNSLPC=${FNSLPC:-"${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.slope_type.tileX.nc"} @@ -425,11 +425,11 @@ FV3_predet() { FNSOTC=${FNSOTC:-"${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.soil_type.tileX.nc"} FNSOCC=${FNSOCC:-"${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.soil_color.tileX.nc"} FNABSC=${FNABSC:-"${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.maximum_snow_albedo.tileX.nc"} - FNSMCC=${FNSMCC:-"${FIXgfs}/am/global_soilmgldas.statsgo.t${JCAP}.${LONB}.${LATB}.grb"} + FNSMCC=${FNSMCC:-"${FIXglobal}/am/global_soilmgldas.statsgo.t${JCAP}.${LONB}.${LATB}.grb"} # If the appropriate resolution fix file is not present, use the highest resolution available (T1534) if [[ ! -f "${FNSMCC}" ]]; then - FNSMCC="${FIXgfs}/am/global_soilmgldas.statsgo.t1534.3072.1536.grb" + FNSMCC="${FIXglobal}/am/global_soilmgldas.statsgo.t1534.3072.1536.grb" fi # Grid and orography data @@ -459,15 +459,15 @@ FV3_predet() { fi # NoahMP table - local noahmptablefile="${PARMgfs}/ufs/noahmptable.tbl" + local noahmptablefile="${PARMglobal}/ufs/noahmptable.tbl" cpreq "${noahmptablefile}" "${DATA}/noahmptable.tbl" # Thompson microphysics fix files if ((imp_physics == 8)); then - cpreq "${FIXgfs}/am/CCN_ACTIVATE.BIN" "${DATA}/CCN_ACTIVATE.BIN" - cpreq "${FIXgfs}/am/freezeH2O.dat" "${DATA}/freezeH2O.dat" - cpreq "${FIXgfs}/am/qr_acr_qgV2.dat" "${DATA}/qr_acr_qgV2.dat" - cpreq "${FIXgfs}/am/qr_acr_qsV2.dat" "${DATA}/qr_acr_qsV2.dat" + cpreq "${FIXglobal}/am/CCN_ACTIVATE.BIN" "${DATA}/CCN_ACTIVATE.BIN" + cpreq "${FIXglobal}/am/freezeH2O.dat" "${DATA}/freezeH2O.dat" + cpreq "${FIXglobal}/am/qr_acr_qgV2.dat" "${DATA}/qr_acr_qgV2.dat" + cpreq "${FIXglobal}/am/qr_acr_qsV2.dat" "${DATA}/qr_acr_qsV2.dat" fi if [[ "${new_o3forc:-YES}" == "YES" ]]; then @@ -480,15 +480,15 @@ FV3_predet() { O3FORC="global_o3prdlos.f77" fi H2OFORC=${H2OFORC:-"global_h2o_pltc.f77"} - cpreq "${FIXgfs}/am/${O3FORC}" "${DATA}/global_o3prdlos.f77" - cpreq "${FIXgfs}/am/${H2OFORC}" "${DATA}/global_h2oprdlos.f77" + cpreq "${FIXglobal}/am/${O3FORC}" "${DATA}/global_o3prdlos.f77" + cpreq "${FIXglobal}/am/${H2OFORC}" "${DATA}/global_h2oprdlos.f77" # GFS standard input data ISOL=${ISOL:-2} - cpreq "${FIXgfs}/am/global_solarconstant_noaa_an.txt" "${DATA}/solarconstant_noaa_an.txt" - cpreq "${FIXgfs}/am/global_sfc_emissivity_idx.txt" "${DATA}/sfc_emissivity_idx.txt" + cpreq "${FIXglobal}/am/global_solarconstant_noaa_an.txt" "${DATA}/solarconstant_noaa_an.txt" + cpreq "${FIXglobal}/am/global_sfc_emissivity_idx.txt" "${DATA}/sfc_emissivity_idx.txt" # Aerosol options IAER=${IAER:-1011} @@ -499,7 +499,7 @@ FV3_predet() { # local month mm for ((month = 1; month <= 12; month++)); do mm=$(printf %02d "${month}") - cpreq "${FIXgfs}/aer/merra2.aerclim.2014-2023.m${mm}.nc" "aeroclim.m${mm}.nc" + cpreq "${FIXglobal}/aer/merra2.aerclim.2014-2023.m${mm}.nc" "aeroclim.m${mm}.nc" done elif [[ "${MERRA2_6ym}" == ".true." ]]; then year=${current_cycle:0:4} @@ -512,30 +512,30 @@ FV3_predet() { done for ((month = 1; month <= 12; month++)); do mm=$(printf %02d "${month}") - cpreq "${FIXgfs}/aer/y${Syear}-${Eyear}/merra2_${Syear}-${Eyear}_${mm}.nc" "aeroclim.m${mm}.nc" + cpreq "${FIXglobal}/aer/y${Syear}-${Eyear}/merra2_${Syear}-${Eyear}_${mm}.nc" "aeroclim.m${mm}.nc" done fi # if [[ "${MERRA2_6ym}" == ".true." ]]; fi # if (( IAER == 1011 )) - cpreq "${FIXgfs}/am/global_climaeropac_global.txt" "${DATA}/aerosol.dat" + cpreq "${FIXglobal}/am/global_climaeropac_global.txt" "${DATA}/aerosol.dat" if ((IAER > 0)); then local file - for file in "${FIXgfs}/am/global_volcanic_aerosols"*; do + for file in "${FIXglobal}/am/global_volcanic_aerosols"*; do cpreq "${file}" "${DATA}/$(basename "${file//global_/}")" done fi - cpreq "${FIXgfs}/lut/optics_BC.v1_3.dat" "${DATA}/optics_BC.dat" - cpreq "${FIXgfs}/lut/optics_OC.v1_3.dat" "${DATA}/optics_OC.dat" - cpreq "${FIXgfs}/lut/optics_DU.v15_3.dat" "${DATA}/optics_DU.dat" - cpreq "${FIXgfs}/lut/optics_SS.v3_3.dat" "${DATA}/optics_SS.dat" - cpreq "${FIXgfs}/lut/optics_SU.v1_3.dat" "${DATA}/optics_SU.dat" + cpreq "${FIXglobal}/lut/optics_BC.v1_3.dat" "${DATA}/optics_BC.dat" + cpreq "${FIXglobal}/lut/optics_OC.v1_3.dat" "${DATA}/optics_OC.dat" + cpreq "${FIXglobal}/lut/optics_DU.v15_3.dat" "${DATA}/optics_DU.dat" + cpreq "${FIXglobal}/lut/optics_SS.v3_3.dat" "${DATA}/optics_SS.dat" + cpreq "${FIXglobal}/lut/optics_SU.v1_3.dat" "${DATA}/optics_SU.dat" # CO2 options ICO2=${ICO2:-2} - cpreq "${FIXgfs}/am/global_co2historicaldata_glob.txt" "${DATA}/co2historicaldata_glob.txt" - cpreq "${FIXgfs}/am/co2monthlycyc.txt" "${DATA}/co2monthlycyc.txt" + cpreq "${FIXglobal}/am/global_co2historicaldata_glob.txt" "${DATA}/co2historicaldata_glob.txt" + cpreq "${FIXglobal}/am/co2monthlycyc.txt" "${DATA}/co2monthlycyc.txt" # Set historical CO2 values based on whether this is a reforecast run or not # Ref. issue 2403 local co2dir @@ -545,35 +545,35 @@ FV3_predet() { fi if ((ICO2 > 0)); then local file - for file in "${FIXgfs}/am/${co2dir}/global_co2historicaldata"*; do + for file in "${FIXglobal}/am/${co2dir}/global_co2historicaldata"*; do cpreq "${file}" "${DATA}/$(basename "${file//global_/}")" done fi # Inline UPP fix files if [[ "${WRITE_DOPOST:-}" == ".true." ]]; then - cpreq "${POSTGRB2TBL:-${PARMgfs}/post/params_grib2_tbl_new}" "${DATA}/params_grib2_tbl_new" - cpreq "${PARMgfs}/ufs/post_itag_gfs" "${DATA}/itag" # TODO: Need a GEFS version when available in the UFS-weather-model + cpreq "${POSTGRB2TBL:-${PARMglobal}/post/params_grib2_tbl_new}" "${DATA}/params_grib2_tbl_new" + cpreq "${PARMglobal}/ufs/post_itag_gfs" "${DATA}/itag" # TODO: Need a GEFS version when available in the UFS-weather-model # TODO: These should be replaced with ones from the ufs-weather-model when available there case ${NET} in gfs) - cpreq "${PARMgfs}/post/gfs/postxconfig-NT-gfs-two.txt" "${DATA}/postxconfig-NT.txt" - cpreq "${PARMgfs}/post/gfs/postxconfig-NT-gfs-f00-two.txt" "${DATA}/postxconfig-NT_FH00.txt" + cpreq "${PARMglobal}/post/gfs/postxconfig-NT-gfs-two.txt" "${DATA}/postxconfig-NT.txt" + cpreq "${PARMglobal}/post/gfs/postxconfig-NT-gfs-f00-two.txt" "${DATA}/postxconfig-NT_FH00.txt" ;; gcafs) - cpreq "${PARMgfs}/post/gcafs/postxconfig-NT-gcafs.txt" "${DATA}/postxconfig-NT.txt" - cpreq "${PARMgfs}/post/gcafs/postxconfig-NT-gcafs.txt" "${DATA}/postxconfig-NT_FH00.txt" - cpreq "${PARMgfs}/ufs/post_itag_gcafs" "${DATA}/itag" - cpreq "${PARMgfs}/post/optics_luts_DUST_nasa.dat" "${DATA}/." - cpreq "${PARMgfs}/post/optics_luts_NITR_nasa.dat" "${DATA}/." - cpreq "${PARMgfs}/post/optics_luts_SALT_nasa.dat" "${DATA}/." - cpreq "${PARMgfs}/post/optics_luts_SOOT_nasa.dat" "${DATA}/." - cpreq "${PARMgfs}/post/optics_luts_SUSO_nasa.dat" "${DATA}/." - cpreq "${PARMgfs}/post/optics_luts_WASO_nasa.dat" "${DATA}/." + cpreq "${PARMglobal}/post/gcafs/postxconfig-NT-gcafs.txt" "${DATA}/postxconfig-NT.txt" + cpreq "${PARMglobal}/post/gcafs/postxconfig-NT-gcafs.txt" "${DATA}/postxconfig-NT_FH00.txt" + cpreq "${PARMglobal}/ufs/post_itag_gcafs" "${DATA}/itag" + cpreq "${PARMglobal}/post/optics_luts_DUST_nasa.dat" "${DATA}/." + cpreq "${PARMglobal}/post/optics_luts_NITR_nasa.dat" "${DATA}/." + cpreq "${PARMglobal}/post/optics_luts_SALT_nasa.dat" "${DATA}/." + cpreq "${PARMglobal}/post/optics_luts_SOOT_nasa.dat" "${DATA}/." + cpreq "${PARMglobal}/post/optics_luts_SUSO_nasa.dat" "${DATA}/." + cpreq "${PARMglobal}/post/optics_luts_WASO_nasa.dat" "${DATA}/." ;; gefs) - cpreq "${PARMgfs}/post/gefs/postxconfig-NT-gefs.txt" "${DATA}/postxconfig-NT.txt" - cpreq "${PARMgfs}/post/gefs/postxconfig-NT-gefs-f00.txt" "${DATA}/postxconfig-NT_FH00.txt" + cpreq "${PARMglobal}/post/gefs/postxconfig-NT-gefs.txt" "${DATA}/postxconfig-NT.txt" + cpreq "${PARMglobal}/post/gefs/postxconfig-NT-gefs-f00.txt" "${DATA}/postxconfig-NT_FH00.txt" # Provide ensemble header information for GEFS if [[ "${ENSMEM}" == "000" ]]; then export e1=1 @@ -584,8 +584,8 @@ FV3_predet() { export e3="${NMEM_ENS}" ;; sfs) - cpreq "${PARMgfs}/post/sfs/postxconfig-NT-sfs.txt" "${DATA}/postxconfig-NT.txt" - cpreq "${PARMgfs}/post/sfs/postxconfig-NT-sfs.txt" "${DATA}/postxconfig-NT_FH00.txt" + cpreq "${PARMglobal}/post/sfs/postxconfig-NT-sfs.txt" "${DATA}/postxconfig-NT.txt" + cpreq "${PARMglobal}/post/sfs/postxconfig-NT-sfs.txt" "${DATA}/postxconfig-NT_FH00.txt" # Provide ensemble header information for SFS if [[ "${ENSMEM}" == "000" ]]; then export e1=1 @@ -625,8 +625,8 @@ WW3_predet() { #If pnt_wght file exists, use it to speed up initialization for unstructured grids # this file does not exist for structured, and the model can run without it (just slower init) - if [[ -f "${FIXgfs}/wave/pnt_wght.${waveGRD}.nc" ]]; then - cpreq "${FIXgfs}/wave/pnt_wght.${waveGRD}.nc" "${DATA}/pnt_wght.ww3.nc" + if [[ -f "${FIXglobal}/wave/pnt_wght.${waveGRD}.nc" ]]; then + cpreq "${FIXglobal}/wave/pnt_wght.${waveGRD}.nc" "${DATA}/pnt_wght.ww3.nc" fi # TODO: These are generated by waveprep job, but that job is not used in v17 @@ -656,7 +656,7 @@ WW3_predet() { echo "Wave is on the same mesh as ocean" else echo "Wave is NOT on the same mesh as ocean" - cpreq "${FIXgfs}/wave/${MESH_WAV}" "${DATA}/" + cpreq "${FIXglobal}/wave/${MESH_WAV}" "${DATA}/" fi } @@ -679,9 +679,9 @@ CICE_predet() { mapfile -t CICE_OUTPUT_FH < <(seq "${FHMIN}" "${FHOUT_ICE}" "${FHMAX}") || exit 10 # Fix files - cpreq "${FIXgfs}/cice/${ICERES}/${CICE_GRID}" "${DATA}/" - cpreq "${FIXgfs}/cice/${ICERES}/${CICE_MASK}" "${DATA}/" - cpreq "${FIXgfs}/cice/${ICERES}/${MESH_ICE}" "${DATA}/" + cpreq "${FIXglobal}/cice/${ICERES}/${CICE_GRID}" "${DATA}/" + cpreq "${FIXglobal}/cice/${ICERES}/${CICE_MASK}" "${DATA}/" + cpreq "${FIXglobal}/cice/${ICERES}/${MESH_ICE}" "${DATA}/" } @@ -719,7 +719,7 @@ MOM6_predet() { # Fix files for ocean; ocean_hgrid, ocean_mosaic, ocean_mask, etc. # MOM_channels is configurable based on resolution, but is treated as a fix file # MOM_override is a template that allows user to override default namelist settings, but is also treated as a fix file - cpreq "${FIXgfs}/mom6/${OCNRES}/"* "${DATA}/INPUT/" # TODO: These need to be explicit + cpreq "${FIXglobal}/mom6/${OCNRES}/"* "${DATA}/INPUT/" # TODO: These need to be explicit # Add to the MOM_override file, to have ISO timestamp cat >> "${DATA}/INPUT/MOM_override" << EOF @@ -730,7 +730,7 @@ EOF # Copy coupled grid_spec local spec_file - spec_file="${FIXgfs}/cpl/a${CASE}o${OCNRES}/grid_spec.nc" + spec_file="${FIXglobal}/cpl/a${CASE}o${OCNRES}/grid_spec.nc" # Test that the file exists and is not zero-sized if [[ -s "${spec_file}" ]]; then cpreq "${spec_file}" "${DATA}/INPUT/" diff --git a/ush/gaussian_sfcanl.sh b/ush/gaussian_sfcanl.sh index ba692404fe5..43c57563bef 100755 --- a/ush/gaussian_sfcanl.sh +++ b/ush/gaussian_sfcanl.sh @@ -28,7 +28,7 @@ # (if nonexistent will be made) # defaults to current working directory # GAUSFCANLEXE Program executable. -# Defaults to $EXECgfs/gaussian_sfcanl.x +# Defaults to $EXECglobal/gaussian_sfcanl.x # gfs_ver Version number of gfs directory. Default is # v15.0.0. # OMP_NUM_ @@ -47,7 +47,7 @@ # # fixed data : ${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile*.nc # ${FIXWGTS} -# ${FIXgfs}/am/global_hyblev.l65.txt +# ${FIXglobal}/am/global_hyblev.l65.txt # # input data : ${COMIN_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile*.nc # ${COMIN_ATMOS_ANALYSIS}/increment.sfc.i006.tile${i}.nc" @@ -83,8 +83,8 @@ LEVSP1=$((LEVS + 1)) FIXWGTS=${FIXWGTS:-${FIXorog}/${CASE}/fv3_SCRIP_${CASE}_GRIDSPEC_lon${LONB_SFC}_lat${LATB_SFC}.gaussian.neareststod.nc} # Filenames. -GAUSFCANLEXE=${GAUSFCANLEXE:-${EXECgfs}/gaussian_sfcanl.x} -SIGLEVEL=${SIGLEVEL:-${FIXgfs}/am/global_hyblev.l${LEVSP1}.txt} +GAUSFCANLEXE=${GAUSFCANLEXE:-${EXECglobal}/gaussian_sfcanl.x} +SIGLEVEL=${SIGLEVEL:-${FIXglobal}/am/global_hyblev.l${LEVSP1}.txt} # Other variables. diff --git a/ush/gfs_bufr.sh b/ush/gfs_bufr.sh index b2dad96aebb..ca6f588ff08 100755 --- a/ush/gfs_bufr.sh +++ b/ush/gfs_bufr.sh @@ -49,9 +49,9 @@ fi export CASE=${CASE_HIST:-${CASE}} -if [[ -s "${PARMgfs}/product/bufr_ij_gfs_${CASE}.txt" ]]; then +if [[ -s "${PARMglobal}/product/bufr_ij_gfs_${CASE}.txt" ]]; then # use predetermined grid point(i,j) in bufr_gfs_${CASE}.txt - ${NLN} "${PARMgfs}/product/bufr_ij_gfs_${CASE}.txt" fort.7 + ${NLN} "${PARMglobal}/product/bufr_ij_gfs_${CASE}.txt" fort.7 np1=0 else # find the nearest neighbor grid point(i,j) in the code @@ -97,11 +97,11 @@ cpreq "${COMIN_ATMOS_HISTORY}/${RUN}.${cycle}.sfc.f${fhr}.${atmfm}" "flxf${fhr}" cpreq "${COMIN_ATMOS_HISTORY}/${RUN}.${cycle}.sfc.f${fhr_p}.${atmfm}" "flxf${fhr_p}" # define input BUFR table file. -${NLN} "${PARMgfs}/product/bufr_gfs_${CLASS}.tbl" fort.1 -${NLN} "${STNLIST:-${PARMgfs}/product/bufr_stalist.meteo.gfs}" fort.8 +${NLN} "${PARMglobal}/product/bufr_gfs_${CLASS}.tbl" fort.1 +${NLN} "${STNLIST:-${PARMglobal}/product/bufr_stalist.meteo.gfs}" fort.8 #------------------------------------------------------------------ -"${EXECgfs}/${pgm}" < gfsparm > "out_gfs_bufr_${fhr}" +"${EXECglobal}/${pgm}" < gfsparm > "out_gfs_bufr_${fhr}" export err=$? diff --git a/ush/gfs_sndp.sh b/ush/gfs_sndp.sh index 5ebd81c121c..99b13a4f2ac 100755 --- a/ush/gfs_sndp.sh +++ b/ush/gfs_sndp.sh @@ -14,7 +14,7 @@ export m=$1 mkdir -p "${DATA}/${m}" cd "${DATA}/${m}" || exit 2 -cpreq "${FIXgfs}/product/gfs_collective${m}.list" "${DATA}/${m}/" +cpreq "${FIXglobal}/product/gfs_collective${m}.list" "${DATA}/${m}/" CCCC=KWBC file_list=gfs_collective${m}.list @@ -32,7 +32,7 @@ while IFS= read -r stn; do #. prep_step export FORT11="${DATA}/${m}/bufrin" export FORT51=./bufrout - "${EXECgfs}/${pgm}" << EOF + "${EXECglobal}/${pgm}" << EOF &INPUT BULHED="${WMOHEAD}",KWBX="${CCCC}", NCEP2STD=.TRUE., diff --git a/ush/global_cycle.sh b/ush/global_cycle.sh index b0685f3d969..019b8fd6e71 100755 --- a/ush/global_cycle.sh +++ b/ush/global_cycle.sh @@ -27,13 +27,13 @@ # LONB_CASE j-dimension of the global climatology files. NOT the # j-dimension of the model grid. Computed from CASE by default. # OCNRES Ocean grid resolution. '100' is one degree. -# HOMEgfs Directory for gfs. Default is +# HOMEglobal Directory for gfs. Default is # PACKAGEROOT/gfs.v15.0.0. # PACKAGEROOT Location of gfs package. -# FIXgfs Directory for fixed data. Default is $HOMEgfs/fix. -# FIXorog Directory for fixed orography data. Default is $FIXgfs/orog -# EXECgfs Directory of the program executable. Defaults to -# $HOMEgfs/exec +# FIXglobal Directory for fixed data. Default is $HOMEglobal/fix. +# FIXorog Directory for fixed orography data. Default is $FIXglobal/orog +# EXECglobal Directory of the program executable. Defaults to +# $HOMEglobal/exec # DATA Working directory # (if nonexistent will be made, used and deleted) # Defaults to current working directory @@ -46,27 +46,27 @@ # SUFINP Suffix to add to input analysis files. # Defaults to none. # CYCLEXEC Program executable. -# Defaults to ${EXECgfs}/global_cycle$XC +# Defaults to ${EXECglobal}/global_cycle$XC # FNGLAC Input glacier climatology GRIB file. -# Defaults to ${FIXgfs}/am/global_glacier.2x2.grb +# Defaults to ${FIXglobal}/am/global_glacier.2x2.grb # FNMXIC Input maximum sea ice climatology GRIB file. -# Defaults to ${FIXgfs}/am/global_maxice.2x2.grb +# Defaults to ${FIXglobal}/am/global_maxice.2x2.grb # FNTSFC Input SST climatology GRIB file. -# Defaults to ${FIXgfs}/am/RTGSST.1982.2012.monthly.clim.grb +# Defaults to ${FIXglobal}/am/RTGSST.1982.2012.monthly.clim.grb # FNSALC Input Salinity climatology netcdf file. -# Defaults to ${FIXgfs}/am/global_salclm.t1534.3072.1536.nc +# Defaults to ${FIXglobal}/am/global_salclm.t1534.3072.1536.nc # FNSNOC Input snow climatology GRIB file. -# Defaults to ${FIXgfs}/am/global_snoclim.1.875.grb +# Defaults to ${FIXglobal}/am/global_snoclim.1.875.grb # FNZORC Input roughness climatology. # Defaults to igbp vegetation type-based lookup table # FNVETC must be set to igbp file: -# ${FIXgfs}/am/global_vegtype.igbp.t$JCAP_CASE.$LONB_CASE.$LATB_CASE.rg.grb +# ${FIXglobal}/am/global_vegtype.igbp.t$JCAP_CASE.$LONB_CASE.$LATB_CASE.rg.grb # FNALBC Input 4-component albedo climatology GRIB file. # defaults to ${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.snowfree_albedo.tileX.nc # FNALBC2 Input 'facsf' and 'facwf' albedo climatology GRIB file. # Defaults to ${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.facsf.tileX.nc # FNAISC Input sea ice climatology GRIB file. -# Defaults to ${FIXgfs}/am/IMS-NIC.blended.ice.monthly.clim.grb +# Defaults to ${FIXglobal}/am/IMS-NIC.blended.ice.monthly.clim.grb # FNTG3C Input deep soil temperature climatology GRIB file. # Defaults to ${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.substrate_temperature.tileX.nc # FNVEGC Input vegetation fraction climatology GRIB file. @@ -76,7 +76,7 @@ # FNSOTC Input soil type climatology GRIB file. # Defaults to ${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.soil_type.tileX.nc # FNSMCC Input soil moisture climatology GRIB file. -# Defaults to ${FIXgfs}/am/global_soilmgldas.statsgo.t$JCAP_CASE.$LONB_CASE.$LATB_CASE.grb +# Defaults to ${FIXglobal}/am/global_soilmgldas.statsgo.t$JCAP_CASE.$LONB_CASE.$LATB_CASE.grb # FNVMNC Input min veg frac climatology GRIB file. # Defaults to ${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.vegetation_greenness.tileX.nc # FNVMXC Input max veg frac climatology GRIB file. @@ -87,7 +87,7 @@ # Defaults to ${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.maximum_snow_albedo.tileX.nc # FNMSKH Input high resolution land mask GRIB file. Use to set mask for # some of the input climatology fields. This is NOT the model mask. -# Defaults to ${FIXgfs}/am/global_slmask.t1534.3072.1536.grb +# Defaults to ${FIXglobal}/am/global_slmask.t1534.3072.1536.grb # NST_FILE GSI file on the gaussian grid containing NST increments. # Defaults to NULL (no file). # FNTSFA Input SST analysis GRIB file. @@ -200,7 +200,7 @@ XC=${XC:-" "} PREINP=${PREINP:-" "} SUFINP=${SUFINP:-" "} -CYCLEXEC=${CYCLEXEC:-${EXECgfs}/global_cycle${XC}} +CYCLEXEC=${CYCLEXEC:-${EXECglobal}/global_cycle${XC}} FHOUR=${FHOUR:-00} @@ -237,14 +237,14 @@ MAX_TASKS_CY=${MAX_TASKS_CY:-99999} FRAC_GRID=${FRAC_GRID:-.false.} COUPLED=${COUPLED:-.false.} -FNGLAC=${FNGLAC:-${FIXgfs}/am/global_glacier.2x2.grb} -FNMXIC=${FNMXIC:-${FIXgfs}/am/global_maxice.2x2.grb} -FNTSFC=${FNTSFC:-${FIXgfs}/am/RTGSST.1982.2012.monthly.clim.grb} -FNSALC=${FNSALC:-${FIXgfs}/am/global_salclm.t1534.3072.1536.nc} -FNSNOC=${FNSNOC:-${FIXgfs}/am/global_snoclim.1.875.grb} +FNGLAC=${FNGLAC:-${FIXglobal}/am/global_glacier.2x2.grb} +FNMXIC=${FNMXIC:-${FIXglobal}/am/global_maxice.2x2.grb} +FNTSFC=${FNTSFC:-${FIXglobal}/am/RTGSST.1982.2012.monthly.clim.grb} +FNSALC=${FNSALC:-${FIXglobal}/am/global_salclm.t1534.3072.1536.nc} +FNSNOC=${FNSNOC:-${FIXglobal}/am/global_snoclim.1.875.grb} FNZORC=${FNZORC:-igbp} -FNAISC=${FNAISC:-${FIXgfs}/am/IMS-NIC.blended.ice.monthly.clim.grb} -FNSMCC=${FNSMCC:-${FIXgfs}/am/global_soilmgldas.statsgo.t${JCAP_CASE}.${LONB_CASE}.${LATB_CASE}.grb} +FNAISC=${FNAISC:-${FIXglobal}/am/IMS-NIC.blended.ice.monthly.clim.grb} +FNSMCC=${FNSMCC:-${FIXglobal}/am/global_soilmgldas.statsgo.t${JCAP_CASE}.${LONB_CASE}.${LATB_CASE}.grb} FNALBC2=${FNALBC2:-${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.facsf.tileX.nc} FNTG3C=${FNTG3C:-${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.substrate_temperature.tileX.nc} FNVEGC=${FNVEGC:-${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.vegetation_greenness.tileX.nc} @@ -255,7 +255,7 @@ FNABSC=${FNABSC:-${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.maximum_snow_albedo. FNVMNC=${FNVMNC:-${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.vegetation_greenness.tileX.nc} FNVMXC=${FNVMXC:-${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.vegetation_greenness.tileX.nc} FNSLPC=${FNSLPC:-${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.slope_type.tileX.nc} -FNMSKH=${FNMSKH:-${FIXgfs}/am/global_slmask.t1534.3072.1536.grb} +FNMSKH=${FNMSKH:-${FIXglobal}/am/global_slmask.t1534.3072.1536.grb} NST_FILE=${NST_FILE:-"NULL"} FNTSFA=${FNTSFA:-${COMIN_OBS}/${PREINP}sstgrb${SUFINP}} FNACNA=${FNACNA:-${COMIN_OBS}/${PREINP}engicegrb${SUFINP}} @@ -271,7 +271,7 @@ ln -fs "${FNTSFC}" sstclm ln -fs "${FNSALC}" salclm # If the appropriate resolution fix file is not present, use the highest resolution available (T1534) -[[ ! -f ${FNSMCC} ]] && FNSMCC="${FIXgfs}/am/global_soilmgldas.statsgo.t1534.3072.1536.grb" +[[ ! -f ${FNSMCC} ]] && FNSMCC="${FIXglobal}/am/global_soilmgldas.statsgo.t1534.3072.1536.grb" ################################################################################ # Make surface analysis diff --git a/ush/interp_atmos_master.sh b/ush/interp_atmos_master.sh index d592fbfb18a..d7a3bd8e2c7 100755 --- a/ush/interp_atmos_master.sh +++ b/ush/interp_atmos_master.sh @@ -25,7 +25,7 @@ grid0p50="latlon 0:720:0.5 90:361:-0.5" grid1p00="latlon 0:360:1.0 90:181:-1.0" # "Import" functions used in this script -source "${USHgfs}/product_functions.sh" +source "${USHglobal}/product_functions.sh" # Transform the input ${grid_string} into an array for processing IFS=':' read -ra grids <<< "${grid_string}" diff --git a/ush/jjob_header.sh b/ush/jjob_header.sh index f69524b732a..f68d5722ca3 100755 --- a/ush/jjob_header.sh +++ b/ush/jjob_header.sh @@ -22,7 +22,7 @@ # # Script requires the following variables to already be # defined in the environment: -# - $HOMEgfs +# - $HOMEglobal # - $DATAROOT (unless $DATA is overriden) # - $jobid # - $PDY @@ -32,7 +32,7 @@ # Additionally, there are a couple of optional settings that # can be set before calling the script: # - $EXPDIR : Override the default $EXPDIR -# [default: ${HOMEgfs}/dev/parm/config] +# [default: ${HOMEglobal}/dev/parm/config] # - $DATA : Override the default $DATA location # [default: ${DATAROOT}/${jobid}] # - $WIPE_DATA : Set whether to delete any existing $DATA @@ -41,7 +41,7 @@ # [default: $$] _calling_script="${BASH_SOURCE[1]}" -source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEglobal}/ush/preamble.sh" OPTIND=1 while getopts "c:e:" option; do @@ -98,7 +98,7 @@ source ./PDY || true ############################# # Source relevant config files ############################# -export EXPDIR="${EXPDIR:-${HOMEgfs}/dev/parm/config}" +export EXPDIR="${EXPDIR:-${HOMEglobal}/dev/parm/config}" for config in "${configs[@]:-''}"; do source "${EXPDIR}/config.${config}" && true export err=$? @@ -110,7 +110,7 @@ done ########################################## # Source machine runtime environment ########################################## -source "${HOMEgfs}/env/${machine}.env" "${env_job}" && true +source "${HOMEglobal}/env/${machine}.env" "${env_job}" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit "[${BASH_SOURCE[0]}]: Error while sourcing machine environment ${machine}.env for job ${env_job}" diff --git a/ush/make_tif.sh b/ush/make_tif.sh index 70987cf51ec..c61ff67c223 100755 --- a/ush/make_tif.sh +++ b/ush/make_tif.sh @@ -17,7 +17,7 @@ PDYHH="${PDY}${cyc}" if [[ "${HEADER}" == "YES" ]]; then INPATH="${DATA}/${outname}" SUB=DFAX1064 - "${HOMEgfs}/ush/make_NTC_file.pl" "${WMO}" "${ORIG}" "${PDYHH}" "${SUB}" "${INPATH}" "${OUTPATH}" + "${HOMEglobal}/ush/make_NTC_file.pl" "${WMO}" "${ORIG}" "${PDYHH}" "${SUB}" "${INPATH}" "${OUTPATH}" # # Send the graphic to TOC diff --git a/ush/module-setup.sh b/ush/module-setup.sh index 13e41879804..c07162bd7c2 100755 --- a/ush/module-setup.sh +++ b/ush/module-setup.sh @@ -1,7 +1,7 @@ #!/bin/bash set -u -source "${HOMEgfs}/ush/detect_machine.sh" +source "${HOMEglobal}/ush/detect_machine.sh" if [[ ${MACHINE_ID} = hera* ]]; then # We are on NOAA Hera diff --git a/ush/ozn_xtrct.sh b/ush/ozn_xtrct.sh index d45674bdb12..b3595e45574 100755 --- a/ush/ozn_xtrct.sh +++ b/ush/ozn_xtrct.sh @@ -130,8 +130,8 @@ else #-------------------------------------------------------------------- # Copy extraction programs to working directory # - cpreq "${EXECgfs}/oznmon_time.x" ./oznmon_time.x - cpreq "${EXECgfs}/oznmon_horiz.x" ./oznmon_horiz.x + cpreq "${EXECglobal}/oznmon_time.x" ./oznmon_time.x + cpreq "${EXECglobal}/oznmon_horiz.x" ./oznmon_horiz.x #--------------------------------------------------------------------------- # Outer loop over $ozn_ptype (default values 'ges', 'anl') diff --git a/ush/parsing_model_configure_FV3.sh b/ush/parsing_model_configure_FV3.sh index a5f141cd253..e7d15073e8a 100755 --- a/ush/parsing_model_configure_FV3.sh +++ b/ush/parsing_model_configure_FV3.sh @@ -64,9 +64,9 @@ FV3_model_configure() { if [[ "${DO_NEST:-NO}" == "YES" ]]; then local NEST_IMO=${npx_nest} local NEST_JMO=${npy_nest} - template="${PARMgfs}/ufs/input_global_nest.nml.IN" + template="${PARMglobal}/ufs/input_global_nest.nml.IN" else - template="${PARMgfs}/ufs/model_configure.IN" + template="${PARMglobal}/ufs/model_configure.IN" fi if [[ ! -f ${template} ]]; then echo "FATAL ERROR: template '${template}' does not exist, ABORT!" diff --git a/ush/parsing_namelists_CICE.sh b/ush/parsing_namelists_CICE.sh index 6bcb52cc10a..99ccb2e3428 100755 --- a/ush/parsing_namelists_CICE.sh +++ b/ush/parsing_namelists_CICE.sh @@ -131,7 +131,7 @@ CICE_namelists() { local stream_files_dice="none" # Ensure the template exists - local template=${CICE_TEMPLATE:-"${PARMgfs}/ufs/ice_in.IN"} + local template=${CICE_TEMPLATE:-"${PARMglobal}/ufs/ice_in.IN"} if [[ ! -f "${template}" ]]; then echo "FATAL ERROR: template '${template}' does not exist, ABORT!" exit 1 diff --git a/ush/parsing_namelists_FV3.sh b/ush/parsing_namelists_FV3.sh index 0d871ccbe17..ef29f4d3529 100755 --- a/ush/parsing_namelists_FV3.sh +++ b/ush/parsing_namelists_FV3.sh @@ -7,10 +7,10 @@ FV3_namelists() { # setup the tables - DIAG_TABLE=${DIAG_TABLE:-${PARMgfs}/ufs/fv3/diag_table} - DIAG_TABLE_APPEND=${DIAG_TABLE_APPEND:-${PARMgfs}/ufs/fv3/diag_table_aod} - DATA_TABLE=${DATA_TABLE:-${PARMgfs}/ufs/MOM6_data_table.IN} - FIELD_TABLE=${FIELD_TABLE:-${PARMgfs}/ufs/fv3/field_table} + DIAG_TABLE=${DIAG_TABLE:-${PARMglobal}/ufs/fv3/diag_table} + DIAG_TABLE_APPEND=${DIAG_TABLE_APPEND:-${PARMglobal}/ufs/fv3/diag_table_aod} + DATA_TABLE=${DATA_TABLE:-${PARMglobal}/ufs/MOM6_data_table.IN} + FIELD_TABLE=${FIELD_TABLE:-${PARMglobal}/ufs/fv3/field_table} # set cdmbgwd if ((gwd_opt == 2)) && [[ ${do_gsl_drag_ls_bl} == ".true." ]]; then @@ -491,7 +491,7 @@ FV3_namelists() { local HIDE_LIAU="!" fi - local global_template="${HOMEgfs}/parm/ufs/global_control.nml.IN" + local global_template="${HOMEglobal}/parm/ufs/global_control.nml.IN" atparse < "${global_template}" >> "input.nml" } diff --git a/ush/parsing_namelists_FV3_nest.sh b/ush/parsing_namelists_FV3_nest.sh index 268c600516a..9ba8a30b795 100755 --- a/ush/parsing_namelists_FV3_nest.sh +++ b/ush/parsing_namelists_FV3_nest.sh @@ -20,10 +20,10 @@ FV3_namelists_nest() { fi # setup the tables - DIAG_TABLE=${DIAG_TABLE:-${PARMgfs}/ufs/fv3/diag_table} - DIAG_TABLE_APPEND=${DIAG_TABLE_APPEND:-${PARMgfs}/ufs/fv3/diag_table_aod} - DATA_TABLE=${DATA_TABLE:-${PARMgfs}/ufs/MOM6_data_table.IN} - FIELD_TABLE=${FIELD_TABLE:-${PARMgfs}/ufs/fv3/field_table} + DIAG_TABLE=${DIAG_TABLE:-${PARMglobal}/ufs/fv3/diag_table} + DIAG_TABLE_APPEND=${DIAG_TABLE_APPEND:-${PARMglobal}/ufs/fv3/diag_table_aod} + DATA_TABLE=${DATA_TABLE:-${PARMglobal}/ufs/MOM6_data_table.IN} + FIELD_TABLE=${FIELD_TABLE:-${PARMglobal}/ufs/fv3/field_table} # set cdmbgwd if ((gwd_opt == 2)) && [[ ${do_gsl_drag_ls_bl} == ".true." ]]; then diff --git a/ush/parsing_namelists_MOM6.sh b/ush/parsing_namelists_MOM6.sh index b8c7293681c..8f4dbd4ed38 100755 --- a/ush/parsing_namelists_MOM6.sh +++ b/ush/parsing_namelists_MOM6.sh @@ -61,7 +61,7 @@ MOM6_namelists() { fi local MOM6_HFREEZE=20.0 # Ensure the template exists - local template=${MOM6_INPUT_TEMPLATE:-"${PARMgfs}/ufs/MOM_input_${OCNRES}.IN"} + local template=${MOM6_INPUT_TEMPLATE:-"${PARMglobal}/ufs/MOM_input_${OCNRES}.IN"} if [[ ! -f "${template}" ]]; then echo "FATAL ERROR: template '${template}' does not exist, ABORT!" exit 1 @@ -78,7 +78,7 @@ MOM6_namelists() { local MOM6_FRUNOFF=${FRUNOFF} # Ensure the template exists - local template=${MOM6_DATA_TABLE_TEMPLATE:-"${PARMgfs}/ufs/MOM6_data_table.IN"} + local template=${MOM6_DATA_TABLE_TEMPLATE:-"${PARMglobal}/ufs/MOM6_data_table.IN"} if [[ ! -f "${template}" ]]; then echo "FATAL ERROR: template '${template}' does not exist, ABORT!" exit 1 diff --git a/ush/parsing_namelists_WW3.sh b/ush/parsing_namelists_WW3.sh index 5f19fdfc58a..27133bbfc77 100755 --- a/ush/parsing_namelists_WW3.sh +++ b/ush/parsing_namelists_WW3.sh @@ -9,16 +9,16 @@ WW3_namelists() { # --------------------------------------------------------------------------- # # Buoy location file - if [[ -f "${PARMgfs}/wave/wave_${NET}.buoys" ]]; then - cpreq "${PARMgfs}/wave/wave_${NET}.buoys" "${DATA}/ww3_points.list" + if [[ -f "${PARMglobal}/wave/wave_${NET}.buoys" ]]; then + cpreq "${PARMglobal}/wave/wave_${NET}.buoys" "${DATA}/ww3_points.list" fi if [[ -f "${DATA}/ww3_points.list" ]]; then set +x - echo "ww3_points.list copied (${PARMgfs}/wave/wave_${NET}.buoys)." + echo "ww3_points.list copied (${PARMglobal}/wave/wave_${NET}.buoys)." set_trace else - echo "FATAL ERROR : ww3_points.list (${PARMgfs}/wave/wave_${NET}.buoys) NOT FOUND" + echo "FATAL ERROR : ww3_points.list (${PARMglobal}/wave/wave_${NET}.buoys) NOT FOUND" exit 12 fi @@ -69,7 +69,7 @@ WW3_namelists() { export WW3_RST_OUTDIR="./WW3_RESTART/" # Ensure the template exists - local template=${WW3_INPUT_TEMPLATE:-"${PARMgfs}/ufs/ww3_shel.nml.IN"} + local template=${WW3_INPUT_TEMPLATE:-"${PARMglobal}/ufs/ww3_shel.nml.IN"} if [[ ! -f "${template}" ]]; then echo "FATAL ERROR: template '${template}' does not exist, ABORT!" exit 1 diff --git a/ush/parsing_ufs_configure.sh b/ush/parsing_ufs_configure.sh index 64c7e9576da..26b75a7de84 100755 --- a/ush/parsing_ufs_configure.sh +++ b/ush/parsing_ufs_configure.sh @@ -128,7 +128,7 @@ UFS_configure() { echo "Rendered ufs.configure:" cat ufs.configure - cpreq "${HOMEgfs}/sorc/ufs_model.fd/tests/parm/fd_ufs.yaml" fd_ufs.yaml + cpreq "${HOMEglobal}/sorc/ufs_model.fd/tests/parm/fd_ufs.yaml" fd_ufs.yaml echo "SUB ${FUNCNAME[0]}: ufs.configure ends" diff --git a/ush/preamble.sh b/ush/preamble.sh index 8e96496e911..4028c4b5cb0 100755 --- a/ush/preamble.sh +++ b/ush/preamble.sh @@ -182,7 +182,7 @@ function err_exit() { trap "postamble ${_calling_script} ${start_time} \$?" EXIT # shellcheck disable= -source "${HOMEgfs}/ush/bash_utils.sh" +source "${HOMEglobal}/ush/bash_utils.sh" # Turn on our settings shopt -s nullglob # Allow null globs instead of treating * as literal diff --git a/ush/prep_sfc_ice_blend.sh b/ush/prep_sfc_ice_blend.sh index 19a411312d3..d0ca5ed8720 100755 --- a/ush/prep_sfc_ice_blend.sh +++ b/ush/prep_sfc_ice_blend.sh @@ -48,13 +48,13 @@ export pgm=emcsfc_ice_blend # the input data. ims may be grib1 or grib2. five_min files are grib 2. IMS_FILE=${IMS_FILE:-"ims.grib2"} FIVE_MIN_ICE_FILE=${FIVE_MIN_ICE_FILE:-"seaice.5min.grib2"} -FIVE_MIN_ICE_MASK_FILE=${FIVE_MIN_ICE_MASK_FILE:-"${FIXgfs}/am/emcsfc_gland5min.grib2"} +FIVE_MIN_ICE_MASK_FILE=${FIVE_MIN_ICE_MASK_FILE:-"${FIXglobal}/am/emcsfc_gland5min.grib2"} # the output ice blend data (grib) BLENDED_ICE_FILE=${BLENDED_ICE_FILE:-"seaice.5min.blend"} # the program executable -BLENDICEEXEC=${BLENDICEEXEC:-"${EXECgfs}/emcsfc_ice_blend"} +BLENDICEEXEC=${BLENDICEEXEC:-"${EXECglobal}/emcsfc_ice_blend"} # standard output file pgmout=${pgmout:-"OUTPUT"} diff --git a/ush/prep_sfc_snow.sh b/ush/prep_sfc_snow.sh index edd523abc17..83098717af3 100755 --- a/ush/prep_sfc_snow.sh +++ b/ush/prep_sfc_snow.sh @@ -44,14 +44,14 @@ # ######################################################################### -source "${USHgfs}/atparse.bash" # include function atparse for parsing @[XYZ] templated files +source "${USHglobal}/atparse.bash" # include function atparse for parsing @[XYZ] templated files #------------------------------------------------------------------------ # The snow2mdl executable and namelist #------------------------------------------------------------------------ -SNOW2MDLEXEC=${SNOW2MDLEXEC:-"${EXECgfs}/emcsfc_snow2mdl"} -SNOW2MDLNMLTMPL=${SNOW2MDLNMLTMPL:-"${PARMgfs}/prep_sfc/snow2mdl.nml.tmpl"} +SNOW2MDLEXEC=${SNOW2MDLEXEC:-"${EXECglobal}/emcsfc_snow2mdl"} +SNOW2MDLNMLTMPL=${SNOW2MDLNMLTMPL:-"${PARMglobal}/prep_sfc/snow2mdl.nml.tmpl"} #------------------------------------------------------------------------ # Fixed files that describe the model grid: landmask, latitudes/longitudes. @@ -77,7 +77,7 @@ IMS_FILE=${IMS_FILE:-"imssnow96.grib2"} # File of snow cover climo used to qc the input snow data #------------------------------------------------------------------------ -CLIMO_QC=${CLIMO_QC:-"${FIXgfs}/am/emcsfc_snow_cover_climo.grib2"} +CLIMO_QC=${CLIMO_QC:-"${FIXglobal}/am/emcsfc_snow_cover_climo.grib2"} #------------------------------------------------------------------------ # Output snow analysis on model grid diff --git a/ush/python/pygfs/task/aero_analysis.py b/ush/python/pygfs/task/aero_analysis.py index 1b9e7b4926f..6a3695ed171 100644 --- a/ush/python/pygfs/task/aero_analysis.py +++ b/ush/python/pygfs/task/aero_analysis.py @@ -170,7 +170,7 @@ def _add_fms_cube_sphere_increments(self) -> None: inc_template = os.path.join(self.task_config.DATA, 'anl', 'aeroinc.' + increment_template) bkg_template = os.path.join(self.task_config.DATA, 'anl', restart_template) # get list of increment vars - incvars_list_path = os.path.join(self.task_config['PARMgfs'], 'gdas', 'aero', 'aero_det_inc_vars.yaml') + incvars_list_path = os.path.join(self.task_config['PARMglobal'], 'gdas', 'aero', 'aero_det_inc_vars.yaml') incvars = YAMLFile(path=incvars_list_path)['incvars'] self.add_fv3_increments(inc_template, bkg_template, incvars) diff --git a/ush/python/pygfs/task/aero_bmatrix.py b/ush/python/pygfs/task/aero_bmatrix.py index 2b8332deb73..50d3ff93807 100644 --- a/ush/python/pygfs/task/aero_bmatrix.py +++ b/ush/python/pygfs/task/aero_bmatrix.py @@ -47,7 +47,7 @@ def __init__(self, config): 'npz_anl': self.task_config['LEVS'] - 1, 'npz': self.task_config.LEVS - 1, 'BERROR_YAML': f'aero_background_error_static_{self.task_config.STATICB_TYPE}', - 'BERROR_DATA_DIR': f'{self.task_config.FIXgfs}/gdas/aero/clim_b', + 'BERROR_DATA_DIR': f'{self.task_config.FIXglobal}/gdas/aero/clim_b', 'AERO_BMATRIX_RESCALE_YAML': 'aero_gen_bmatrix_rescale_default.yaml.j2', } )) diff --git a/ush/python/pygfs/task/archive.py b/ush/python/pygfs/task/archive.py index 5fef14ffe99..3e9c317905c 100644 --- a/ush/python/pygfs/task/archive.py +++ b/ush/python/pygfs/task/archive.py @@ -59,7 +59,7 @@ def configure_vrfy(self, arch_dict: AttrDict) -> (AttrDict): # TODO This really doesn't belong in archiving and should be moved elsewhere Archive._rename_cyclone_expt(arch_dict) - archive_parm = os.path.join(arch_dict.PARMgfs, "archive") + archive_parm = os.path.join(arch_dict.PARMglobal, "archive") # Collect the dataset to archive locally # Select template based on RUN type: ensemble (enkf) or deterministic (NET) @@ -124,7 +124,7 @@ def configure_tars(self, arch_dict: AttrDict) -> (List[AttrDict]): f"as no warm restarts or warm ICs are to be archived.") return [] - archive_parm = os.path.join(arch_dict.PARMgfs, "archive") + archive_parm = os.path.join(arch_dict.PARMglobal, "archive") # Add the glob.glob function for capturing log filenames # TODO remove this kludge once log filenames are explicit @@ -688,8 +688,8 @@ def _pop_git_info(self, arch_dict: AttrDict) -> None: EXPDIR: str Location of the EXPDIR - HOMEgfs: str - Location of the HOMEgfs (the global workflow) + HOMEglobal: str + Location of the HOMEglobal (the global workflow) ARCH_HASHES: bool Whether to archive git hashes of the workflow and submodules ARCH_DIFFS: bool @@ -699,7 +699,7 @@ def _pop_git_info(self, arch_dict: AttrDict) -> None: # Get commonly used variables arch_hashes = arch_dict.ARCH_HASHES arch_diffs = arch_dict.ARCH_DIFFS - homegfs = arch_dict.HOMEgfs + homegfs = arch_dict.HOMEglobal expdir = arch_dict.EXPDIR # Find the git command @@ -708,7 +708,7 @@ def _pop_git_info(self, arch_dict: AttrDict) -> None: raise FileNotFoundError("FATAL ERROR: the git command could not be found!") output = "" - # Navigate to HOMEgfs to run the git commands + # Navigate to HOMEglobal to run the git commands with chdir(homegfs): # Are we running git to get hashes? diff --git a/ush/python/pygfs/task/chem_fire_emission.py b/ush/python/pygfs/task/chem_fire_emission.py index 75cdb4b07ac..86d36e3a141 100644 --- a/ush/python/pygfs/task/chem_fire_emission.py +++ b/ush/python/pygfs/task/chem_fire_emission.py @@ -94,7 +94,7 @@ def initialize(self) -> None: Notes ----- The method expects the following configuration to be available: - - HOMEgfs : str + - HOMEglobal : str Base directory containing workflow configuration - DATA : str Working directory path @@ -121,7 +121,7 @@ def initialize(self) -> None: logger.info(f'Using AERO_EMIS_FIRE: {aero_emis_fire}') logger.info(f'Using AERO_EMIS_FIRE_VERSION: {aero_emis_fire_version}') - fire_emission_template = os.path.join(self.task_config.HOMEgfs, 'parm', 'chem', 'fire_emission.yaml.j2') + fire_emission_template = os.path.join(self.task_config.HOMEglobal, 'parm', 'chem', 'fire_emission.yaml.j2') if not os.path.exists(fire_emission_template): raise WorkflowException(f"Fire emission template file not found: {fire_emission_template}") @@ -215,7 +215,7 @@ def initialize(self) -> None: } # Parse template and update task configuration - yaml_template = os.path.join(self.task_config.HOMEgfs, 'parm', 'chem', 'fire_emission.yaml.j2') + yaml_template = os.path.join(self.task_config.HOMEglobal, 'parm', 'chem', 'fire_emission.yaml.j2') if not os.path.exists(yaml_template): logger.warning(f"Template file not found: {yaml_template}, using default configuration") yaml_config = {'fire_emission': {}} @@ -937,7 +937,7 @@ def render_template(self, tmpl_dict: Dict[str, Any]) -> None: """ logger.info("Rendering YAML template") # Parse template and update task configuration - yaml_template = os.path.join(self.task_config.HOMEgfs, 'parm', 'chem', 'fire_emission.yaml.j2') + yaml_template = os.path.join(self.task_config.HOMEglobal, 'parm', 'chem', 'fire_emission.yaml.j2') if not os.path.exists(yaml_template): logger.warning(f"Template file not found: {yaml_template}, using default configuration") yaml_config = {'fire_emission': {}} diff --git a/ush/python/pygfs/task/fetch.py b/ush/python/pygfs/task/fetch.py index cedebc9abaa..a33b2316996 100755 --- a/ush/python/pygfs/task/fetch.py +++ b/ush/python/pygfs/task/fetch.py @@ -50,7 +50,7 @@ def configure(self, fetch_dict: Dict[str, Any]): """ fetch_yaml = fetch_dict.FETCH_YAML_TMPL - fetch_parm = os.path.join(fetch_dict.PARMgfs, "fetch") + fetch_parm = os.path.join(fetch_dict.PARMglobal, "fetch") parsed_fetch = parse_j2yaml(os.path.join(fetch_parm, fetch_yaml), fetch_dict) diff --git a/ush/python/pygfs/task/globus_hpss.py b/ush/python/pygfs/task/globus_hpss.py index f147e44ea78..fa7018c1a37 100644 --- a/ush/python/pygfs/task/globus_hpss.py +++ b/ush/python/pygfs/task/globus_hpss.py @@ -123,7 +123,7 @@ def configure(self, globus_dict: Dict[str, Any]) -> (Dict[str, Any], List[Dict[s Sets of tarballs and instructions for sending them to HPSS via Globus """ - globus_parm = os.path.join(globus_dict.PARMgfs, "globus") + globus_parm = os.path.join(globus_dict.PARMglobal, "globus") com_conf = globus_dict.COMIN_CONF diff --git a/ush/python/pygfs/task/marine_analysis.py b/ush/python/pygfs/task/marine_analysis.py index 6f96014ed0a..5ba5e5b0304 100644 --- a/ush/python/pygfs/task/marine_analysis.py +++ b/ush/python/pygfs/task/marine_analysis.py @@ -77,7 +77,7 @@ def __init__(self, config): # Create a local dictionary that is repeatedly used across this class self.task_config.update(AttrDict( { - 'PARMmarine': os.path.join(self.task_config.PARMgfs, 'gdas', 'marine'), + 'PARMmarine': os.path.join(self.task_config.PARMglobal, 'gdas', 'marine'), 'ENSPERT_RELPATH': _enspert_relpath, 'berror_model': _berror_model, 'rst_date': _rst_date, diff --git a/ush/python/pygfs/task/marine_bmat.py b/ush/python/pygfs/task/marine_bmat.py index 62adf797af8..4f82f320385 100644 --- a/ush/python/pygfs/task/marine_bmat.py +++ b/ush/python/pygfs/task/marine_bmat.py @@ -40,7 +40,7 @@ def __init__(self, config): """ super().__init__(config) - _calc_scale_exec = os.path.join(self.task_config.HOMEgfs, 'ush', 'python', 'soca', 'calc_scales.py') + _calc_scale_exec = os.path.join(self.task_config.HOMEglobal, 'ush', 'python', 'soca', 'calc_scales.py') # compute the relative path from self.task_config.DATA to self.task_config.DATAenspert _enspert_relpath = os.path.relpath(self.task_config.DATAens, self.task_config.DATA) @@ -48,7 +48,7 @@ def __init__(self, config): # Create a local dictionary that is repeatedly used across this class self.task_config.update(AttrDict( { - 'PARMmarine': os.path.join(self.task_config.PARMgfs, 'gdas', 'marine'), + 'PARMmarine': os.path.join(self.task_config.PARMglobal, 'gdas', 'marine'), 'CALC_SCALE_EXEC': _calc_scale_exec, 'ENSPERT_RELPATH': _enspert_relpath, 'CALC_SCALE_EXEC': _calc_scale_exec, diff --git a/ush/python/pygfs/task/marine_letkf.py b/ush/python/pygfs/task/marine_letkf.py index 7732d5f1d4e..8b057f19015 100644 --- a/ush/python/pygfs/task/marine_letkf.py +++ b/ush/python/pygfs/task/marine_letkf.py @@ -46,7 +46,7 @@ def __init__(self, config: Dict) -> None: # Create a local dictionary that is repeatedly used across this class self.task_config.update(AttrDict( { - 'PARMmarine': os.path.join(self.task_config.PARMgfs, 'gdas', 'marine'), + 'PARMmarine': os.path.join(self.task_config.PARMglobal, 'gdas', 'marine'), 'ENSPERT_RELPATH': _enspert_relpath, 'letkf_app': 'true', 'DIST_HALO_SIZE': 3500000, diff --git a/ush/python/pygfs/task/marine_recenter.py b/ush/python/pygfs/task/marine_recenter.py index 418aafb463b..9b2f757aef7 100644 --- a/ush/python/pygfs/task/marine_recenter.py +++ b/ush/python/pygfs/task/marine_recenter.py @@ -51,7 +51,7 @@ def __init__(self, config: Dict) -> None: # Create a local dictionary that is repeatedly used across this class self.task_config.update(AttrDict( { - 'PARMmarine': os.path.join(self.task_config.PARMgfs, 'gdas', 'marine'), + 'PARMmarine': os.path.join(self.task_config.PARMglobal, 'gdas', 'marine'), 'ENSPERT_RELPATH': _enspert_relpath, 'cice_rst_date': _cice_rst_date, } diff --git a/ush/python/pygfs/task/nexus_emission.py b/ush/python/pygfs/task/nexus_emission.py index dfc4a3ef360..4a079fc96c8 100644 --- a/ush/python/pygfs/task/nexus_emission.py +++ b/ush/python/pygfs/task/nexus_emission.py @@ -116,7 +116,7 @@ def initialize(self) -> None: Notes ----- The method expects the following configuration to be available: - - HOMEgfs : str + - HOMEglobal : str Base directory containing workflow configuration - DATA : str Working directory path @@ -206,7 +206,7 @@ def initialize(self) -> None: 'NEXUS_YMIN': self.task_config.NEXUS_YMIN, 'NEXUS_YMAX': self.task_config.NEXUS_YMAX, 'LOCAL_INPUT_DIR': os.path.join(self.task_config.DATA, 'INPUT'), - 'NEXUS_EXECUTABLE': os.path.join(self.task_config.get('HOMEgfs', None), "exec/nexus.x"), + 'NEXUS_EXECUTABLE': os.path.join(self.task_config.get('HOMEglobal', None), "exec/nexus.x"), "DATA": self.task_config.DATA, "NEXUS_DO_MEGAN": self.task_config.get('NEXUS_DO_MEGAN', False), "NEXUS_DO_CEDS2019": self.task_config.get('NEXUS_DO_CEDS2019', True), @@ -293,7 +293,7 @@ def initialize(self) -> None: dest_dir = os.path.dirname(dest_file) os.makedirs(dest_dir, exist_ok=True) - yaml_template = os.path.join(self.task_config.HOMEgfs, 'parm', 'chem', 'nexus_emission.yaml.j2') + yaml_template = os.path.join(self.task_config.HOMEglobal, 'parm', 'chem', 'nexus_emission.yaml.j2') if not os.path.exists(yaml_template): logger.warning(f"Template file not found: {yaml_template}, using default configuration") yaml_config = {'nexus_emission': {}} diff --git a/ush/python/pygfs/task/offline_analysis.py b/ush/python/pygfs/task/offline_analysis.py index b75f1570cd2..33c90bec7ae 100644 --- a/ush/python/pygfs/task/offline_analysis.py +++ b/ush/python/pygfs/task/offline_analysis.py @@ -153,7 +153,7 @@ def initialize(self) -> None: executables_to_copy = [] executable_list = ['enkf_chgres_recenter_nc.x', 'calc_increment_ens_ncio.x', 'tref_calc.x'] for exec_name in executable_list: - executables_to_copy.append([os.path.join(self.task_config.EXECgfs, exec_name), + executables_to_copy.append([os.path.join(self.task_config.EXECglobal, exec_name), os.path.join(self.task_config.DATA, exec_name)]) FileHandler({'copy': executables_to_copy}).sync() diff --git a/ush/python/pygfs/task/snow_analysis.py b/ush/python/pygfs/task/snow_analysis.py index 9c31421bcd4..efc74a916d2 100644 --- a/ush/python/pygfs/task/snow_analysis.py +++ b/ush/python/pygfs/task/snow_analysis.py @@ -372,7 +372,7 @@ def add_increments(self) -> None: 'current_cycle': bkgtime, 'CASE': self.task_config.CASE, 'DATA': self.task_config.DATA, - 'HOMEgfs': self.task_config.HOMEgfs, + 'HOMEglobal': self.task_config.HOMEglobal, 'OCNRES': self.task_config.OCNRES, 'ens_size': self.task_config.ens_size, 'ntiles': self.task_config.ntiles, diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index 5f289aff01c..f3d2937f712 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -367,7 +367,7 @@ def add_increments(self) -> None: 'current_cycle': bkgtime, 'CASE': self.task_config.CASE, 'DATA': self.task_config.DATA, - 'HOMEgfs': self.task_config.HOMEgfs, + 'HOMEglobal': self.task_config.HOMEglobal, 'OCNRES': self.task_config.OCNRES, 'MYMEM': f"{mem:03d}", 'CASE_ENS': self.task_config.CASE_ENS, diff --git a/ush/python/pygfs/utils/archive_tar_vars.py b/ush/python/pygfs/utils/archive_tar_vars.py index b88a50878f3..3372d32cc79 100644 --- a/ush/python/pygfs/utils/archive_tar_vars.py +++ b/ush/python/pygfs/utils/archive_tar_vars.py @@ -157,7 +157,7 @@ def add_config_vars(config_dict: AttrDict) -> AttrDict: Configuration keys extracted (if present): - Basic: ATARDIR, current_cycle, IAUFHRS, RUN, PDY, PSLOT - - Archive control: DO_ARCHCOM, ARCHCOM_TO, ROTDIR, PARMgfs, ARCDIR, SDATE, MODE + - Archive control: DO_ARCHCOM, ARCHCOM_TO, ROTDIR, PARMglobal, ARCDIR, SDATE, MODE - Ensemble: ENSGRP, NMEM_EARCGRP, NMEM_ENS, NMEM_ENS_GFS - EnKF operations: DO_CALC_INCREMENT_ENKF_GFS, DO_JEDIATMENS, DO_JEDIATMENS_SPLIT_OBSSOL - Forecast: FHMIN_ENKF, FHMAX_ENKF_GFS, FHOUT_ENKF_GFS, FHMAX_ENKF, FHOUT_ENKF @@ -187,7 +187,7 @@ def add_config_vars(config_dict: AttrDict) -> AttrDict: config_keys = [ # Basic configuration 'ATARDIR', 'current_cycle', 'RUN', 'PDY', 'PSLOT', 'NET', 'MODE', - 'PARMgfs', 'ROTDIR', 'SDATE', + 'PARMglobal', 'ROTDIR', 'SDATE', # Archive control 'DO_ARCHCOM', 'ARCHCOM_TO', 'ARCDIR', # Data assimilation @@ -237,7 +237,7 @@ def add_config_vars(config_dict: AttrDict) -> AttrDict: 'OCNRES', 'ICERES', 'waveGRD', 'WAVE_OUT_GRIDS', # Other 'DO_BUFRSND', 'NUM_SND_COLLECTIVES', 'DOBNDPNT_WAVE', - 'OFFSET_START_HOUR', 'EXPDIR', 'EDATE', 'HOMEgfs', + 'OFFSET_START_HOUR', 'EXPDIR', 'EDATE', 'HOMEglobal', 'DO_GEMPAK', 'DATASETS_YAML', 'TARBALL_TYPE', ]) diff --git a/ush/python/pygfs/utils/archive_vrfy_vars.py b/ush/python/pygfs/utils/archive_vrfy_vars.py index 621e0012d08..b9388b703da 100644 --- a/ush/python/pygfs/utils/archive_vrfy_vars.py +++ b/ush/python/pygfs/utils/archive_vrfy_vars.py @@ -121,7 +121,7 @@ def add_config_vars(config_dict: AttrDict) -> AttrDict: config_dict[key] = f"{config_dict[key]:03d}" # Configuration keys to extract (if present) - config_keys = ['current_cycle', 'RUN', 'PSLOT', 'ROTDIR', 'PARMgfs', + config_keys = ['current_cycle', 'RUN', 'PSLOT', 'ROTDIR', 'PARMglobal', 'ARCDIR', 'MODE', 'DO_JEDIATMENS', 'DO_FIT2OBS', 'DO_JEDIATMVAR', 'DO_JEDISNOWDA', 'DO_AERO_ANL', 'DO_PREP_OBS_AERO', 'NET', 'FHOUT_GFS', 'FHMAX_HF_GFS', 'FHMAX_FITS', 'FHMAX', 'FHOUT', diff --git a/ush/python/pygfs/utils/marine_da_utils.py b/ush/python/pygfs/utils/marine_da_utils.py index e185c43d578..a42f473b1f6 100644 --- a/ush/python/pygfs/utils/marine_da_utils.py +++ b/ush/python/pygfs/utils/marine_da_utils.py @@ -37,7 +37,7 @@ def link_executable(task_config: AttrDict, exe_name: str) -> None: """ logger.info(f"Link executable {exe_name}") logger.warn("WARNING: Linking is not permitted per EE2.") - exe_src = os.path.join(task_config.EXECgfs, exe_name) + exe_src = os.path.join(task_config.EXECglobal, exe_name) exe_dest = os.path.join(task_config.DATA, exe_name) if os.path.exists(exe_dest): os.remove(exe_dest) diff --git a/ush/radmon_verf_angle.sh b/ush/radmon_verf_angle.sh index 1a4d8d5aa3c..fb30dd9555c 100755 --- a/ush/radmon_verf_angle.sh +++ b/ush/radmon_verf_angle.sh @@ -90,13 +90,13 @@ else fi angle_exec=radmon_angle.x -shared_scaninfo="${shared_scaninfo:-${PARMgfs}/monitor/gdas_radmon_scaninfo.txt}" +shared_scaninfo="${shared_scaninfo:-${PARMglobal}/monitor/gdas_radmon_scaninfo.txt}" scaninfo=scaninfo.txt #-------------------------------------------------------------------- # Copy extraction program and supporting files to working directory -cpreq "${EXECgfs}/${angle_exec}" ./ +cpreq "${EXECglobal}/${angle_exec}" ./ cpreq "${shared_scaninfo}" "./${scaninfo}" #-------------------------------------------------------------------- @@ -183,7 +183,7 @@ EOF done # for type in ${SATYPE} loop -"${USHgfs}/rstprod.sh" +"${USHglobal}/rstprod.sh" tar_file=radmon_angle.tar if compgen -G "angle*.ieee_d*" > /dev/null || compgen -G "angle*.ctl*" > /dev/null; then diff --git a/ush/radmon_verf_bcoef.sh b/ush/radmon_verf_bcoef.sh index fa92f0d0432..99d0750b0ee 100755 --- a/ush/radmon_verf_bcoef.sh +++ b/ush/radmon_verf_bcoef.sh @@ -27,7 +27,7 @@ # Imported Shell Variables: # RADMON_SUFFIX data source suffix # defauls to opr -# EXECgfs executable directory +# EXECglobal executable directory # RAD_AREA global or regional flag # defaults to global # TANKverf_rad data repository @@ -88,7 +88,7 @@ fi #-------------------------------------------------------------------- # Copy extraction program and supporting files to working directory -cpreq "${EXECgfs}/${bcoef_exec}" "./${bcoef_exec}" +cpreq "${EXECglobal}/${bcoef_exec}" "./${bcoef_exec}" cpreq "${biascr}" ./biascr.txt #-------------------------------------------------------------------- @@ -175,7 +175,7 @@ EOF done # dtype in $gesanl loop done # type in $SATYPE loop -"${USHgfs}/rstprod.sh" +"${USHglobal}/rstprod.sh" if compgen -G "bcoef*.ieee_d*" > /dev/null || compgen -G "bcoef*.ctl*" > /dev/null; then tar_file=radmon_bcoef.tar diff --git a/ush/radmon_verf_bcor.sh b/ush/radmon_verf_bcor.sh index dcf9e2d2ec0..f07e07ad36e 100755 --- a/ush/radmon_verf_bcor.sh +++ b/ush/radmon_verf_bcor.sh @@ -27,7 +27,7 @@ # Imported Shell Variables: # RADMON_SUFFIX data source suffix # defauls to opr -# EXECgfs executable directory +# EXECglobal executable directory # defaults to current directory # RAD_AREA global or regional flag # defaults to global @@ -86,7 +86,7 @@ fi #-------------------------------------------------------------------- # Copy extraction program to working directory -cpreq "${EXECgfs}/${bcor_exec}" "./${bcor_exec}" +cpreq "${EXECglobal}/${bcor_exec}" "./${bcor_exec}" #-------------------------------------------------------------------- # Run program for given time @@ -172,7 +172,7 @@ EOF done # dtype in $gesanl loop done # type in $SATYPE loop -"${USHgfs}/rstprod.sh" +"${USHglobal}/rstprod.sh" tar_file=radmon_bcor.tar if compgen -G "bcor*.ieee_d*" > /dev/null || compgen -G "bcor*.ctl*" > /dev/null; then diff --git a/ush/radmon_verf_time.sh b/ush/radmon_verf_time.sh index 9f9ab5471df..5cc248ebc33 100755 --- a/ush/radmon_verf_time.sh +++ b/ush/radmon_verf_time.sh @@ -71,8 +71,8 @@ # File names -radmon_err_rpt=${radmon_err_rpt:-${USHgfs}/radmon_err_rpt.sh} -base_file=${base_file:-${PARMgfs}/monitor/gdas_radmon_base.tar} +radmon_err_rpt=${radmon_err_rpt:-${USHglobal}/radmon_err_rpt.sh} +base_file=${base_file:-${PARMglobal}/monitor/gdas_radmon_base.tar} report=report.txt disclaimer=disclaimer.txt @@ -116,7 +116,7 @@ fi #-------------------------------------------------------------------- # Copy extraction program and base files to working directory #------------------------------------------------------------------- -cpreq "${EXECgfs}/${time_exec}" ./ +cpreq "${EXECglobal}/${time_exec}" ./ iyy="${PDY:0:4}" imm="${PDY:4:2}" @@ -223,7 +223,7 @@ EOF done done -"${USHgfs}/rstprod.sh" +"${USHglobal}/rstprod.sh" if compgen -G "time*.ieee_d*" > /dev/null || compgen -G "time*.ctl*" > /dev/null; then tar_file=radmon_time.tar @@ -265,7 +265,7 @@ EOF # tmp_satype="./tmp_satype.txt" echo "${SATYPE}" > "${tmp_satype}" - "${USHgfs}/radmon_diag_ck.sh" --rad "${radstat}" --sat "${tmp_satype}" --out "${diag}" + "${USHglobal}/radmon_diag_ck.sh" --rad "${radstat}" --sat "${tmp_satype}" --out "${diag}" if [[ -s "${diag}" ]]; then cat << EOF > "${diag_hdr}" diff --git a/ush/regrid_gsiSfcIncr_to_tile.sh b/ush/regrid_gsiSfcIncr_to_tile.sh index ffc3d9b8200..9243d7b5e89 100755 --- a/ush/regrid_gsiSfcIncr_to_tile.sh +++ b/ush/regrid_gsiSfcIncr_to_tile.sh @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${HOMEgfs}/ush/atparse.bash" +source "${HOMEglobal}/ush/atparse.bash" #------------------------------------------------------------------------------------------------- # Script to regrid surface increment from GSI grid @@ -42,7 +42,7 @@ export jres=${LATB_CASE_IN} export ireso=${CASE_OUT:1} export jreso=${CASE_OUT:1} -regrid_nml_tmpl="${PARMgfs}/regrid_sfc/regrid.nml_tmpl" +regrid_nml_tmpl="${PARMglobal}/regrid_sfc/regrid.nml_tmpl" if [[ "${LFHR}" -ge 0 ]]; then soilinc_fhrs=("${LFHR}") @@ -211,7 +211,7 @@ if [[ "${NMEM_REGRID}" -gt 1 ]]; then fi # Run MPMD to stage input files -"${USHgfs}/run_mpmd.sh" "cmdfile_in" && true +"${USHglobal}/run_mpmd.sh" "cmdfile_in" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit "run_mpmd.sh failed to copy input and fix data!" @@ -268,7 +268,7 @@ if [[ "${DO_LAND_IAU}" = ".true." ]]; then fi # Run MPMD to save output files -"${USHgfs}/run_mpmd.sh" "cmdfile_out" && true +"${USHglobal}/run_mpmd.sh" "cmdfile_out" && true export err=$? if [[ ${err} -ne 0 ]]; then err_exit "run_mpmd.sh failed to copy output files to COMOUT, ABORT!" diff --git a/ush/run_mpmd.sh b/ush/run_mpmd.sh index 773f8042d34..dba1f0f5415 100755 --- a/ush/run_mpmd.sh +++ b/ush/run_mpmd.sh @@ -31,7 +31,7 @@ # ################################################################################ -source "${USHgfs}/preamble.sh" +source "${USHglobal}/preamble.sh" cmdfile=${1:?"run_mpmd requires an input file containing commands to execute in MPMD/serial mode"} diff --git a/ush/syndat_getjtbul.sh b/ush/syndat_getjtbul.sh index 78337be415c..0c8b8fdf047 100755 --- a/ush/syndat_getjtbul.sh +++ b/ush/syndat_getjtbul.sh @@ -99,7 +99,7 @@ if [[ -s jtwcbul ]]; then echo "Processing JTWC bulletin halfs into tcvitals records" fi -pgm=$(basename "${EXECgfs}/syndat_getjtbul.x") +pgm=$(basename "${EXECglobal}/syndat_getjtbul.x") export pgm if [[ -s prep_step ]]; then unset_strict @@ -115,7 +115,7 @@ rm -f fnoc export FORT11=jtwcbul export FORT51=fnoc -time -p "${EXECgfs}/${pgm}" 2> errfile +time -p "${EXECglobal}/${pgm}" 2> errfile errget=$? cat errfile rm errfile diff --git a/ush/syndat_qctropcy.sh b/ush/syndat_qctropcy.sh index 74db606b0a0..77496960a49 100755 --- a/ush/syndat_qctropcy.sh +++ b/ush/syndat_qctropcy.sh @@ -55,7 +55,7 @@ # data base # (Default: /dcom/us007003) # slmask - path to t126 32-bit gaussian land/sea mask file -# (Default: ${FIXgfs}/am/syndat_slmask.t126.gaussian) +# (Default: ${FIXglobal}/am/syndat_slmask.t126.gaussian) # copy_back - switch to copy updated files back to archive directory and # to tcvitals directory # (Default: YES) @@ -67,7 +67,7 @@ ARCHSYND=${ARCHSYND:-${COMROOTp3}/gfs/prod/syndat} HOMENHC=${HOMENHC:-/gpfs/dell2/nhc/save/guidance/storm-data/ncep} TANK_TROPCY=${TANK_TROPCY:-${DCOMROOT}/us007003} -slmask=${slmask:-${FIXgfs}/am/syndat_slmask.t126.gaussian} +slmask=${slmask:-${FIXglobal}/am/syndat_slmask.t126.gaussian} copy_back=${copy_back:-YES} files_override=${files_override:-""} @@ -173,12 +173,12 @@ if [[ -n "${files_override}" ]]; then # for testing, typically want FILES=F fi echo " &INPUT RUNID = '${net}_${tmmark}_${cyc}', FILES = ${files} " > vitchk.inp -cat "${PARMgfs}/relo/syndat_qctropcy.${RUN}.parm" >> vitchk.inp +cat "${PARMglobal}/relo/syndat_qctropcy.${RUN}.parm" >> vitchk.inp # Copy the fixed fields -cpreq "${FIXgfs}/am/syndat_fildef.vit" fildef.vit -cpreq "${FIXgfs}/am/syndat_stmnames" stmnames +cpreq "${FIXglobal}/am/syndat_fildef.vit" fildef.vit +cpreq "${FIXglobal}/am/syndat_stmnames" stmnames rm -f nhc fnoc lthistry @@ -202,19 +202,19 @@ if [[ "${copy_back}" == 'YES' ]]; then fi mv -f nhc nhc1 -"${USHgfs}/parse-storm-type.pl" nhc1 > nhc +"${USHglobal}/parse-storm-type.pl" nhc1 > nhc cpreq -p nhc nhc.ORIG # JTWC/FNOC ... execute syndat_getjtbul script to write into working directory # as fnoc; copy to archive -"${USHgfs}/syndat_getjtbul.sh" "${run_date}" +"${USHglobal}/syndat_getjtbul.sh" "${run_date}" touch fnoc if [[ "${copy_back}" == 'YES' ]]; then cat fnoc >> "${ARCHSYND}/syndat_tcvitals.${year}" fi mv -f fnoc fnoc1 -"${USHgfs}/parse-storm-type.pl" fnoc1 > fnoc +"${USHglobal}/parse-storm-type.pl" fnoc1 > fnoc if [[ "${SENDDBN}" == "YES" ]]; then "${DBNROOT}/bin/dbn_alert" MODEL SYNDAT_TCVITALS "${job}" "${ARCHSYND}/syndat_tcvitals.${year}" @@ -226,7 +226,7 @@ cpreq "${slmask}" slmask.126 # Execute program syndat_qctropcy -pgm=$(basename "${EXECgfs}/syndat_qctropcy.x") +pgm=$(basename "${EXECglobal}/syndat_qctropcy.x") export pgm if [[ -s prep_step ]]; then unset_strict @@ -241,7 +241,7 @@ fi echo "${run_date}" > run_date.dat export FORT11=slmask.126 export FORT12=run_date.dat -"${EXECgfs}/${pgm}" +"${EXECglobal}/${pgm}" errqct=$? set +x echo diff --git a/ush/tropcy_relocate.sh b/ush/tropcy_relocate.sh index d7b21daba56..17c2645c302 100755 --- a/ush/tropcy_relocate.sh +++ b/ush/tropcy_relocate.sh @@ -81,12 +81,12 @@ # or 'test') # Default is "prod" # envir_getges String indicating environment under which GETGES utility -# ush runs (see documentation in ${USHgfs}/getges.sh for +# ush runs (see documentation in ${USHglobal}/getges.sh for # more information) # Default is "$envir" # network_getges # String indicating job network under which GETGES utility -# ush runs (see documentation in ${USHgfs}/getges.sh for +# ush runs (see documentation in ${USHglobal}/getges.sh for # more information) # Default is "global" unless the center relocation processing # date/time is not a multiple of 3-hrs, then the default is @@ -113,13 +113,13 @@ # -stdoutmode ordered" # RELOX String indicating executable path for RELOCATE_MV_NVORTEX # program -# Default is "${EXECgfs}/relocate_mv_nvortex" +# Default is "${EXECglobal}/relocate_mv_nvortex" # SUPVX String indicating executable path for SUPVIT utility # program -# Default is "${EXECgfs}/supvit.x" +# Default is "${EXECglobal}/supvit.x" # GETTX String indicating executable path for GETTRK utility # program -# Default is "${EXECgfs}/gettrk" +# Default is "${EXECglobal}/gettrk" # BKGFREQ Frequency of background files for relocation # Default is "3" # SENDDBN String when set to "YES" alerts output files to $COMSP @@ -139,12 +139,12 @@ # # Modules and files referenced: # Herefile: RELOCATE_GES -# ${USHgfs}/tropcy_relocate_extrkr.sh -# ${USHgfs}/getges.sh +# ${USHglobal}/tropcy_relocate_extrkr.sh +# ${USHglobal}/getges.sh # /usr/bin/poe # postmsg # $DATA/prep_step (here and in child script -# ${USHgfs}/tropcy_relocate_extrkr.sh) +# ${USHglobal}/tropcy_relocate_extrkr.sh) # NOTE: The last script above is not a required utility. # If $DATA/prep_step not found, a scaled down version of it is # executed in-line. @@ -152,7 +152,7 @@ # programs : # RELOCATE_MV_NVORTEX - executable $RELOX # T126 GRIB global land/sea mask: -# ${FIXgfs}/am/global_slmask.t126.grb +# ${FIXglobal}/am/global_slmask.t126.grb # SUPVIT - executable $SUPVX # GETTRK - executable $GETTX # @@ -214,10 +214,10 @@ GRIBVERSION=${GRIBVERSION:-"grib2"} pgmout=${pgmout:-/dev/null} tstsp=${tstsp:-/tmp/null/} tmmark=${tmmark:-tm00} -RELOX=${RELOX:-${EXECgfs}/relocate_mv_nvortex} +RELOX=${RELOX:-${EXECglobal}/relocate_mv_nvortex} export BKGFREQ=${BKGFREQ:-1} -SUPVX=${SUPVX:-${EXECgfs}/supvit.x} -GETTX=${GETTX:-${EXECgfs}/gettrk} +SUPVX=${SUPVX:-${EXECglobal}/supvit.x} +GETTX=${GETTX:-${EXECglobal}/gettrk} ################################################ # EXECUTE TROPICAL CYCLONE RELOCATION PROCESSING @@ -253,7 +253,7 @@ VVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVV AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA EOF - "${USHgfs}/getges.sh" -e "${envir_getges}" -n "${network_getges}" \ + "${USHglobal}/getges.sh" -e "${envir_getges}" -n "${network_getges}" \ -v "${run_date}" -f "${fhr}" -t tcvges "tcvitals.m${fhr}" cat << EOF @@ -307,7 +307,7 @@ VVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVV AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA EOF - "${USHgfs}/getges.sh" -e "${envir_getges}" -n "${network_getges}" \ + "${USHglobal}/getges.sh" -e "${envir_getges}" -n "${network_getges}" \ -v "${run_date}" -t "${stype}" "${sges}" errges=$? if [[ "${errges}" -ne 0 ]]; then @@ -330,7 +330,7 @@ EOF # ---------------------------------------------------------------------------- if [[ ${fhr} -eq 0 ]]; then - "${USHgfs}/getges.sh" -e "${envir_getges}" -n "${network_getges}" -v "${run_date}" \ + "${USHglobal}/getges.sh" -e "${envir_getges}" -n "${network_getges}" -v "${run_date}" \ -t "${stype}" > "${COMOUT_OBS}/${RUN}.${cycle}.sgesprep_pre-relocate_pathname.${tmmark}" cpfs "${COMOUT_OBS}/${RUN}.${cycle}.sgesprep_pre-relocate_pathname.${tmmark}" \ "${COMOUT_OBS}/${RUN}.${cycle}.sgesprep_pathname.${tmmark}" @@ -350,7 +350,7 @@ VVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVV AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA EOF - "${USHgfs}/getges.sh" -e "${envir_getges}" -n "${network_getges}" \ + "${USHglobal}/getges.sh" -e "${envir_getges}" -n "${network_getges}" \ -v "${run_date}" -t "${ptype}" "${pges}" errges=$? if [[ "${errges}" -ne 0 ]]; then @@ -421,14 +421,14 @@ else # $DATA/$RUN.$cycle.relocate.model_track.tm00 # -------------------------------------------- - "${USHgfs}/tropcy_relocate_extrkr.sh" + "${USHglobal}/tropcy_relocate_extrkr.sh" err=$? if [[ "${err}" -ne 0 ]]; then # problem: script tropcy_relocate_extrkr.sh failed # ------------------------------------------------ export err - echo "FATAL ERROR: ${USHgfs}/tropcy_relocate_extrkr.sh failed" + echo "FATAL ERROR: ${USHglobal}/tropcy_relocate_extrkr.sh failed" err_exit "${msg}" fi diff --git a/ush/tropcy_relocate_extrkr.sh b/ush/tropcy_relocate_extrkr.sh index 33539aee36f..4ec91f07fcc 100755 --- a/ush/tropcy_relocate_extrkr.sh +++ b/ush/tropcy_relocate_extrkr.sh @@ -1436,9 +1436,9 @@ ${NLN} "${vdir}/trak.${cmodel}.radii.${symdh}" fort.63 ${NLN} "${vdir}/trak.${cmodel}.atcfunix.${symdh}" fort.64 if [[ "${BKGFREQ}" -eq 1 ]]; then - ${NLN} "${FIXgfs}/am/${cmodel}.tracker_leadtimes_hrly" "fort.15" + ${NLN} "${FIXglobal}/am/${cmodel}.tracker_leadtimes_hrly" "fort.15" elif [[ "${BKGFREQ}" -eq 3 ]]; then - ${NLN} "${FIXgfs}/am/${cmodel}.tracker_leadtimes" "fort.15" + ${NLN} "${FIXglobal}/am/${cmodel}.tracker_leadtimes" "fort.15" fi ${TIMEIT} "${APRNGETTX}" "${GETTX}" < "${namelist}" > outout 2> errfile diff --git a/ush/wave_extractvars.sh b/ush/wave_extractvars.sh index 2bef8368813..eb278b9b1ec 100755 --- a/ush/wave_extractvars.sh +++ b/ush/wave_extractvars.sh @@ -9,7 +9,7 @@ # Main body starts here ####################### -source "${USHgfs}/wave_domain_grid.sh" +source "${USHglobal}/wave_domain_grid.sh" process_grdID "${waveGRD}" com_varname="COMIN_WAVE_GRID_${GRDREGION}_${GRDRES}" diff --git a/ush/wave_grib2_sbs.sh b/ush/wave_grib2_sbs.sh index d36b7d25632..0ae11b059f7 100755 --- a/ush/wave_grib2_sbs.sh +++ b/ush/wave_grib2_sbs.sh @@ -60,7 +60,7 @@ if [[ -s "${com_dir}/${outfile}" ]] && [[ -s "${com_dir}/${outfile}.idx" ]]; the fi # Copy template files to grib_DATA (required for ww3_grib.x) -cpreq "${PARMgfs}/wave/ww3_grib2.${grdID}.inp.tmpl" "./ww3_grib2.${grdID}.inp.tmpl" +cpreq "${PARMglobal}/wave/ww3_grib2.${grdID}.inp.tmpl" "./ww3_grib2.${grdID}.inp.tmpl" # Link mod_def files from DATA into grib_DATA ${NLN} "${DATA}/mod_def.${grdID}" "./mod_def.ww3" @@ -85,7 +85,7 @@ cat ww3_grib.inp # Run the ww3_grib generation code export pgm="${NET,,}_ww3_grib.x" source prep_step -"${EXECgfs}/${pgm}" > "grib2_${grid_region}_${FH3}.out" 2>&1 +"${EXECglobal}/${pgm}" > "grib2_${grid_region}_${FH3}.out" 2>&1 export err=$? if [[ ${err} -ne 0 ]]; then echo "FATAL ERROR: ${pgm} returned non-zero status: ${err}; exiting!" diff --git a/ush/wave_grid_interp_sbs.sh b/ush/wave_grid_interp_sbs.sh index 3e4fc585687..cac4603fd0a 100755 --- a/ush/wave_grid_interp_sbs.sh +++ b/ush/wave_grid_interp_sbs.sh @@ -34,7 +34,7 @@ mkdir -p "${interp_DATA}" cd "${interp_DATA}" || exit 99 # Copy template files to interp_DATA (required for interpolation) -cpreq "${PARMgfs}/wave/ww3_gint.inp.tmpl" "ww3_gint.inp.tmpl" +cpreq "${PARMglobal}/wave/ww3_gint.inp.tmpl" "ww3_gint.inp.tmpl" # Link input files (WW3 output) from DATA into interp_DATA ${NLN} "${DATA}/out_grd.${waveGRD}" "./out_grd.${waveGRD}" @@ -45,12 +45,12 @@ for ID in ${waveGRD} ${grdID}; do done # Check if there is an interpolation weights file available, and copy it if so -if [[ -f "${FIXgfs}/wave/ww3_gint.WHTGRIDINT.bin.${waveGRD}.${grdID}" ]]; then - echo "INFO: Interpolation weights found at: '${FIXgfs}/wave/ww3_gint.WHTGRIDINT.bin.${waveGRD}.${grdID}'" - cpreq "${FIXgfs}/wave/ww3_gint.WHTGRIDINT.bin.${waveGRD}.${grdID}" "./WHTGRIDINT.bin" +if [[ -f "${FIXglobal}/wave/ww3_gint.WHTGRIDINT.bin.${waveGRD}.${grdID}" ]]; then + echo "INFO: Interpolation weights found at: '${FIXglobal}/wave/ww3_gint.WHTGRIDINT.bin.${waveGRD}.${grdID}'" + cpreq "${FIXglobal}/wave/ww3_gint.WHTGRIDINT.bin.${waveGRD}.${grdID}" "./WHTGRIDINT.bin" weights_found=1 else - echo "WARNING: No weights file found at: '${FIXgfs}/wave/ww3_gint.WHTGRIDINT.bin.${waveGRD}.${grdID}'" + echo "WARNING: No weights file found at: '${FIXglobal}/wave/ww3_gint.WHTGRIDINT.bin.${waveGRD}.${grdID}'" echo "INFO: Interpolation will create a new weights file" weights_found=0 fi @@ -69,7 +69,7 @@ cat ww3_gint.inp export pgm="${NET,,}_ww3_gint.x" source prep_step echo "INFO: Executing '${pgm}'" -"${EXECgfs}/${pgm}" > "grid_interp.${grdID}.out" 2>&1 +"${EXECglobal}/${pgm}" > "grid_interp.${grdID}.out" 2>&1 cat "grid_interp.${grdID}.out" if [[ ${err} -ne 0 ]]; then echo "FATAL ERROR: '${pgm}' failed!" diff --git a/ush/wave_grid_moddef.sh b/ush/wave_grid_moddef.sh index 65ee1550337..c422bb73204 100755 --- a/ush/wave_grid_moddef.sh +++ b/ush/wave_grid_moddef.sh @@ -41,9 +41,9 @@ fi export pgm="${NET,,}_ww3_grid.x" -echo "INFO: Executing ${EXECgfs}/${NET,,}_ww3_grid.x" +echo "INFO: Executing ${EXECglobal}/${NET,,}_ww3_grid.x" -"${EXECgfs}/${pgm}" +"${EXECglobal}/${pgm}" export err=$? if [[ "${err}" != '0' ]]; then diff --git a/ush/wave_outp_spec.sh b/ush/wave_outp_spec.sh index 96bfc196d9c..5a3bd3053fd 100755 --- a/ush/wave_outp_spec.sh +++ b/ush/wave_outp_spec.sh @@ -46,7 +46,7 @@ fi # 0.c Define directories and the search path. # The tested variables should be exported by the postprocessor script. -if [[ -z "${PDY+0}" || -z "${cyc+0}" || -z "${dtspec+0}" || -z "${EXECgfs+0}" || -z "${WAV_MOD_TAG+0}" || -z "${STA_DIR+0}" ]]; then +if [[ -z "${PDY+0}" || -z "${cyc+0}" || -z "${dtspec+0}" || -z "${EXECglobal+0}" || -z "${WAV_MOD_TAG+0}" || -z "${STA_DIR+0}" ]]; then echo 'FATAL ERROR: EXPORTED VARIABLES IN ww3_outp_spec.sh NOT SET' exit 3 fi @@ -109,9 +109,9 @@ fi export pgm="${NET,,}_ww3_outp.x" source prep_step -echo " Executing ${EXECgfs}/${pgm}" +echo " Executing ${EXECglobal}/${pgm}" -"${EXECgfs}/${pgm}" 1> "outp_${specdir}_${buoy}.out" 2>&1 +"${EXECglobal}/${pgm}" 1> "outp_${specdir}_${buoy}.out" 2>&1 export err=$? if [[ ${err} -ne 0 ]]; then echo "FATAL ERROR : ERROR IN ${pgm} *** " diff --git a/ush/wave_prnc_cur.sh b/ush/wave_prnc_cur.sh index a52b9336dc6..cc12f788d5e 100755 --- a/ush/wave_prnc_cur.sh +++ b/ush/wave_prnc_cur.sh @@ -44,7 +44,7 @@ mv -f "cur_temp3.nc" "cur_uv_${PDY}_${fext}${fh3}_flat.nc" # Convert to regular lat lon file # If weights need to be regenerated due to CDO ver change, use: # $CDO genbil,r4320x2160 rtofs_glo_2ds_f000_3hrly_prog.nc weights.nc -cpreq "${FIXgfs}/wave/weights_rtofs_to_r4320x2160.nc" ./weights.nc +cpreq "${FIXglobal}/wave/weights_rtofs_to_r4320x2160.nc" ./weights.nc # Interpolate to regular 5 min grid ${CDO} remap,r4320x2160,weights.nc "cur_uv_${PDY}_${fext}${fh3}_flat.nc" "cur_5min_01.nc" @@ -62,9 +62,9 @@ fi rm -f cur_temp[123].nc cur_5min_??.nc "cur_glo_uv_${PDY}_${fext}${fh3}.nc weights.nc" if [[ "${flagfirst}" = "T" ]]; then - sed -e "s/HDRFL/T/g" "${PARMgfs}/wave/ww3_prnc.cur.${WAVECUR_FID}.inp.tmpl" > ww3_prnc.inp + sed -e "s/HDRFL/T/g" "${PARMglobal}/wave/ww3_prnc.cur.${WAVECUR_FID}.inp.tmpl" > ww3_prnc.inp else - sed -e "s/HDRFL/F/g" "${PARMgfs}/wave/ww3_prnc.cur.${WAVECUR_FID}.inp.tmpl" > ww3_prnc.inp + sed -e "s/HDRFL/F/g" "${PARMglobal}/wave/ww3_prnc.cur.${WAVECUR_FID}.inp.tmpl" > ww3_prnc.inp fi rm -f cur.nc @@ -74,7 +74,7 @@ ${NLN} "${DATA}/mod_def.${WAVECUR_FID}" ./mod_def.ww3 export pgm="${NET,,}_ww3_prnc.x" source prep_step -"${EXECgfs}/${pgm}" 1> "prnc_${WAVECUR_FID}_${ymdh_rtofs}.out" 2>&1 +"${EXECglobal}/${pgm}" 1> "prnc_${WAVECUR_FID}_${ymdh_rtofs}.out" 2>&1 export err=$? err_chk if [[ "${err}" -ne 0 ]]; then diff --git a/ush/wave_prnc_ice.sh b/ush/wave_prnc_ice.sh index caeba3a0d05..a1ba739daf1 100755 --- a/ush/wave_prnc_ice.sh +++ b/ush/wave_prnc_ice.sh @@ -52,7 +52,7 @@ Making ice fields. EOF if [[ -z "${YMDH}" ]] || [[ -z "${cycle}" ]] || - [[ -z "${COMOUT_WAVE_PREP}" ]] || [[ -z "${FIXgfs}" ]] || [[ -z "${EXECgfs}" ]] || + [[ -z "${COMOUT_WAVE_PREP}" ]] || [[ -z "${FIXglobal}" ]] || [[ -z "${EXECglobal}" ]] || [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${WAVEICE_FID}" ]] || [[ -z "${COMIN_OBS}" ]]; then echo 'ERROR: EXPORTED VARIABLES IN preprocessor NOT SET ***' @@ -110,7 +110,7 @@ cpreq -f "${DATA}/ww3_prnc.ice.${WAVEICE_FID}.inp.tmpl" ww3_prnc.inp export pgm="${NET,,}_ww3_prnc.x" source prep_step -"${EXECgfs}/${pgm}" 1> "prnc_${WAVEICE_FID}_${cycle}.out" 2>&1 +"${EXECglobal}/${pgm}" 1> "prnc_${WAVEICE_FID}_${cycle}.out" 2>&1 export err=$? if [[ ${err} -ne 0 ]]; then cat "prnc_${WAVEICE_FID}_${cycle}.out" From db3689010333841dbb85deeb7b52a382cd2960ea Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Thu, 5 Feb 2026 11:17:07 -0500 Subject: [PATCH 02/71] second batch --- .github/copilot-instructions.md | 10 +- .github/workflows/pw_aws_ci.yaml | 16 +- dev/ci/Jenkinsfile | 44 ++-- dev/ci/cases/gcafsv1/C384_gcafs_cycled.yaml | 2 +- .../cases/gcafsv1/C384_gcafs_cycled_noDA.yaml | 2 +- .../gcafsv1/C384_gcafs_cycled_noDA_dev.yaml | 2 +- dev/ci/cases/gcafsv1/C96_gcafs_cycled.yaml | 2 +- .../cases/gcafsv1/C96_gcafs_cycled_noDA.yaml | 2 +- .../gcafsv1/C96_gcafs_cycled_noDA_dev.yaml | 2 +- dev/ci/cases/gfsv17/C1152mx025_S2SW.yaml | 2 +- .../cases/gfsv17/C1152mx025_S2SW_rdhpcs.yaml | 2 +- .../cases/gfsv17/C384mx025_3DVarAOWCDA.yaml | 2 +- dev/ci/cases/gfsv17/C384mx025_hybAOWCDA.yaml | 2 +- dev/ci/cases/gfsv17/marine3dvar.yaml | 12 +- dev/ci/cases/gfsv17/marinehyb.yaml | 22 +- dev/ci/cases/gfsv17/retrov17_realtime.yaml | 2 +- dev/ci/cases/gfsv17/retrov17_stream1a.yaml | 2 +- dev/ci/cases/gfsv17/retrov17_stream1b.yaml | 2 +- dev/ci/cases/gfsv17/retrov17_stream2.yaml | 2 +- dev/ci/cases/gfsv17/retrov17_stream3.yaml | 2 +- dev/ci/cases/gfsv17/retrov17_stream4.yaml | 2 +- dev/ci/cases/gfsv17/s2sw.yaml | 22 +- dev/ci/cases/gfsv17/s2sw_rdhpcs.yaml | 22 +- dev/ci/cases/gfsv17/s2sw_realtime.yaml | 22 +- dev/ci/cases/gfsv17/s2sw_stream1a.yaml | 22 +- dev/ci/cases/gfsv17/s2sw_stream1b.yaml | 22 +- dev/ci/cases/gfsv17/s2sw_stream2.yaml | 22 +- dev/ci/cases/gfsv17/s2sw_stream3.yaml | 22 +- dev/ci/cases/gfsv17/s2sw_stream4.yaml | 22 +- dev/ci/cases/hires/C1152_S2SW.yaml | 2 +- dev/ci/cases/hires/C768_S2SW.yaml | 2 +- dev/ci/cases/pr/C48_ATM.yaml | 2 +- dev/ci/cases/pr/C48_ATM_ecflow.yaml | 2 +- dev/ci/cases/pr/C48_S2SW.yaml | 2 +- dev/ci/cases/pr/C48_S2SWA_gefs.yaml | 2 +- dev/ci/cases/pr/C48_S2SWA_gefs_RT.yaml | 2 +- dev/ci/cases/pr/C48_S2SW_extended.yaml | 2 +- dev/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml | 2 +- dev/ci/cases/pr/C48mx500_hybAOWCDA.yaml | 2 +- dev/ci/cases/pr/C96C48_hybatmDA.yaml | 2 +- dev/ci/cases/pr/C96C48_hybatmsnowDA.yaml | 2 +- dev/ci/cases/pr/C96C48_hybatmsoilDA.yaml | 2 +- dev/ci/cases/pr/C96C48_ufs_hybatmDA.yaml | 2 +- dev/ci/cases/pr/C96C48_ufsgsi_hybatmDA.yaml | 2 +- dev/ci/cases/pr/C96C48mx500_S2SW_cyc_gfs.yaml | 2 +- dev/ci/cases/pr/C96_atm3DVar.yaml | 2 +- dev/ci/cases/pr/C96_atm3DVar_extended.yaml | 2 +- dev/ci/cases/pr/C96_gcafs_cycled.yaml | 2 +- dev/ci/cases/pr/C96_gcafs_cycled_noDA.yaml | 2 +- dev/ci/cases/pr/C96mx100_S2S.yaml | 2 +- dev/ci/cases/sfsv1/C96mx025_S2S.yaml | 2 +- dev/ci/cases/sfsv1/C96mx100_S2S.yaml | 2 +- dev/ci/cases/weekly/C384C192_hybatmda.yaml | 2 +- dev/ci/cases/weekly/C384_atm3DVar.yaml | 2 +- dev/ci/cases/yamls/atmsnowDA_defaults_ci.yaml | 2 +- dev/ci/cases/yamls/atmsoilDA_defaults_ci.yaml | 2 +- .../cases/yamls/gcafs_cycled_defaults_ci.yaml | 2 +- .../yamls/gcafs_cycled_noDA_defaults_ci.yaml | 2 +- .../yamls/gcafs_cycled_noDA_defaults_dev.yaml | 2 +- dev/ci/cases/yamls/gcafs_defaults_ci.yaml | 2 +- dev/ci/cases/yamls/gefs_defaults_ci.yaml | 2 +- dev/ci/cases/yamls/gfs_cyc_defaults_ci.yaml | 2 +- dev/ci/cases/yamls/gfs_defaults_ci.yaml | 2 +- dev/ci/cases/yamls/gfs_extended_ci.yaml | 2 +- dev/ci/cases/yamls/gfs_hyb_defaults_ci.yaml | 2 +- dev/ci/cases/yamls/gfs_metp_ci.yaml | 2 +- dev/ci/cases/yamls/sfs_CPC_defaults.yaml | 2 +- dev/ci/cases/yamls/sfs_defaults.yaml | 2 +- dev/ci/cases/yamls/sfs_full.yaml | 2 +- dev/ci/cases/yamls/soca_gfs_defaults_ci.yaml | 4 +- .../cases/yamls/soca_hyb_gfs_defaults_ci.yaml | 2 +- .../cases/yamls/ufs_hybatmDA_defaults.ci.yaml | 18 +- .../yamls/ufsgsi_hybatmDA_defaults.ci.yaml | 8 +- dev/ci/platforms/config.gaeac6 | 2 +- dev/ci/platforms/config.hera | 2 +- dev/ci/platforms/config.ursa | 2 +- .../unittests/test_data/test_config.yaml | 2 +- dev/ci/scripts/unittests/test_parse_yaml.py | 2 +- dev/ci/scripts/utils/ci_utils.sh | 40 ++-- .../utils/gitlab/launch_gitlab_runner.sh | 8 +- dev/ci/scripts/utils/launch_java_agent.sh | 14 +- dev/ctests/CMakeLists.txt | 10 +- dev/ctests/README.md | 14 +- dev/ctests/cases/C48_S2SW-gfs_waveinit.yaml | 2 +- dev/ctests/scripts/setup.sh.in | 6 +- dev/ctests/scripts/stage.sh.in | 14 +- dev/ctests/scripts/validate.sh.in | 14 +- dev/parm/config/gcafs/config.aeroanl.j2 | 4 +- dev/parm/config/gcafs/config.aeroanlgenb | 4 +- dev/parm/config/gcafs/config.anlstat | 2 +- dev/parm/config/gcafs/config.atmos_products | 12 +- dev/parm/config/gcafs/config.base.j2 | 50 ++--- dev/parm/config/gcafs/config.fcst.j2 | 18 +- dev/parm/config/gcafs/config.fetch | 6 +- dev/parm/config/gcafs/config.metp | 2 +- dev/parm/config/gcafs/config.upp | 2 +- dev/parm/config/gcafs/yaml/defaults.yaml | 24 +-- dev/parm/config/gcafs/yaml/test_ci.yaml | 2 +- dev/parm/config/gefs/config.atmos_products | 8 +- dev/parm/config/gefs/config.base.j2 | 24 +-- dev/parm/config/gefs/config.extractvars | 12 +- dev/parm/config/gefs/config.fcst.j2 | 14 +- dev/parm/config/gefs/config.oceanice_products | 2 +- dev/parm/config/gefs/config.stage_ic.j2 | 8 +- dev/parm/config/gefs/config.ufs | 14 +- dev/parm/config/gfs/config.aero.j2 | 14 +- dev/parm/config/gfs/config.aeroanl.j2 | 4 +- dev/parm/config/gfs/config.aeroanlgenb | 4 +- dev/parm/config/gfs/config.anal | 50 ++--- dev/parm/config/gfs/config.analcalc_fv3jedi | 2 +- dev/parm/config/gfs/config.anlstat | 2 +- dev/parm/config/gfs/config.atmanl.j2 | 2 +- dev/parm/config/gfs/config.atmensanl.j2 | 2 +- dev/parm/config/gfs/config.atmos_products | 8 +- dev/parm/config/gfs/config.base.j2 | 50 ++--- dev/parm/config/gfs/config.ecen_fv3jedi | 2 +- dev/parm/config/gfs/config.efcs | 4 +- dev/parm/config/gfs/config.esfc | 10 +- dev/parm/config/gfs/config.esnowanl.j2 | 16 +- dev/parm/config/gfs/config.fcst.j2 | 18 +- dev/parm/config/gfs/config.fetch | 2 +- dev/parm/config/gfs/config.fit2obs | 4 +- dev/parm/config/gfs/config.marineanl.j2 | 2 +- dev/parm/config/gfs/config.marineanlecen.j2 | 2 +- dev/parm/config/gfs/config.marineanlletkf.j2 | 2 +- dev/parm/config/gfs/config.marinebmat.j2 | 2 +- dev/parm/config/gfs/config.metp | 2 +- dev/parm/config/gfs/config.oceanice_products | 2 +- dev/parm/config/gfs/config.prep.j2 | 8 +- dev/parm/config/gfs/config.prepoceanobs.j2 | 2 +- dev/parm/config/gfs/config.sfcanl | 4 +- dev/parm/config/gfs/config.snowanl.j2 | 16 +- dev/parm/config/gfs/config.stage_ic.j2 | 4 +- dev/parm/config/gfs/config.ufs | 14 +- dev/parm/config/gfs/config.upp | 2 +- dev/parm/config/gfs/config.verfozn | 6 +- dev/parm/config/gfs/config.verfrad | 4 +- dev/parm/config/gfs/config.vminmon | 4 +- dev/parm/config/gfs/yaml/defaults.yaml | 24 +-- dev/parm/config/sfs/config.base.j2 | 24 +-- dev/parm/config/sfs/config.fcst.j2 | 14 +- dev/ush/README_NET_CONVERSION.md | 12 +- dev/workflow/README_ecflow.md | 4 +- dev/workflow/rocoto/rocoto_scron.sh.j2 | 2 +- docs/source/errors_faq.rst | 2 +- docs/source/gcafs.rst | 2 +- docs/source/testing.rst | 76 +++---- gempak/fix/gfs_meta | 46 ++-- modulefiles/gw_run.gaeac6.lua | 6 +- modulefiles/gw_run.hera.lua | 6 +- modulefiles/gw_run.hercules.lua | 6 +- modulefiles/gw_run.noaacloud.lua | 6 +- modulefiles/gw_run.orion.lua | 6 +- modulefiles/gw_run.ursa.lua | 6 +- modulefiles/gw_upp.wcoss2.lua | 4 +- .../gdas/staging/snow_ims_scf_to_ioda.yaml.j2 | 2 +- parm/post/oceanice_products_gefs.yaml | 18 +- parm/post/oceanice_products_gfs.yaml | 18 +- parm/post/oceanice_products_sfs.yaml | 18 +- parm/post/upp.yaml | 14 +- parm/post/upp_gcafs.yaml | 20 +- parm/ufs/fix/gfs/atmos.fixed_files.yaml | 96 ++++----- sorc/build_all.sh | 16 +- sorc/build_gdas.sh | 6 +- sorc/build_gfs_utils.sh | 6 +- sorc/build_gsi_enkf.sh | 6 +- sorc/build_gsi_monitor.sh | 6 +- sorc/build_gsi_utils.sh | 6 +- sorc/build_nexus.sh | 4 +- sorc/build_ufs.sh | 12 +- sorc/build_ufs_utils.sh | 6 +- sorc/build_upp.sh | 8 +- sorc/build_ww3prepost.sh | 12 +- sorc/link_workflow.sh | 202 +++++++++--------- ush/python/pygfs/task/archive.py | 4 +- versions/build.gaeac6.ver | 2 +- versions/build.hera.ver | 2 +- versions/build.hercules.ver | 2 +- versions/build.noaacloud.ver | 2 +- versions/build.orion.ver | 2 +- versions/build.ursa.ver | 2 +- versions/run.gaeac6.ver | 2 +- versions/run.hera.ver | 2 +- versions/run.hercules.ver | 2 +- versions/run.noaacloud.ver | 2 +- versions/run.orion.ver | 2 +- versions/run.ursa.ver | 2 +- 187 files changed, 892 insertions(+), 892 deletions(-) diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index 1141d52b82e..e2b4450130b 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -241,8 +241,8 @@ def get_resource(self, task_name): ### Environment Setup ```bash # From gw_setup.sh - CRITICAL for Python imports -if [[ -d "${HOMEgfs}/sorc/wxflow/src" ]]; then - PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/sorc/wxflow/src" +if [[ -d "${HOMEglobal}/sorc/wxflow/src" ]]; then + PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEglobal}/sorc/wxflow/src" export PYTHONPATH fi ``` @@ -335,13 +335,13 @@ meta_tasks_state = {} # State tracking per metatask ### Machine Detection ```bash -source "${HOMEgfs}/ush/detect_machine.sh" +source "${HOMEglobal}/ush/detect_machine.sh" # Sets MACHINE_ID for host-specific configurations ``` ### Module Loading ```bash -module use "${HOMEgfs}/modulefiles" +module use "${HOMEglobal}/modulefiles" module load "module_gwsetup.${MACHINE_ID}" ``` @@ -364,7 +364,7 @@ module load "module_gwsetup.${MACHINE_ID}" # Standard environment setup in tasks envar_dict = { 'RUN_ENVIR': 'emc', - 'HOMEgfs': self.HOMEgfs, + 'HOMEglobal': self.HOMEglobal, 'EXPDIR': self._base.get('EXPDIR'), 'NET': self._base.get('NET'), 'RUN': self.run, diff --git a/.github/workflows/pw_aws_ci.yaml b/.github/workflows/pw_aws_ci.yaml index e7c05f3e296..ed422f2b343 100644 --- a/.github/workflows/pw_aws_ci.yaml +++ b/.github/workflows/pw_aws_ci.yaml @@ -1,10 +1,10 @@ name: gw-ci-aws # TEST_DIR contains 2 directories; -# 1. HOMEgfs: clone of the global-workflow +# 1. HOMEglobal: clone of the global-workflow # 2. RUNTESTS: A directory containing EXPDIR and COMROT for experiments # e.g. $> tree ./TEST_DIR # ./TEST_DIR -# ├── HOMEgfs +# ├── HOMEglobal # └── RUNTESTS # ├── COMROT # │ └── ${pslot} @@ -98,7 +98,7 @@ jobs: - name: Checkout global-workflow uses: actions/checkout@v4 with: - path: ${{ github.run_id }}/HOMEgfs + path: ${{ github.run_id }}/HOMEglobal submodules: 'recursive' repository: ${{ needs.fetch-branch.outputs.repo }} ref: ${{ needs.fetch-branch.outputs.branch }} @@ -113,12 +113,12 @@ jobs: steps: - name: Build components run: | - cd ${{ env.TEST_DIR }}/HOMEgfs/sorc + cd ${{ env.TEST_DIR }}/HOMEglobal/sorc ./build_all.sh -j 8 - name: Link artifacts run: | - cd ${{ env.TEST_DIR }}/HOMEgfs/sorc + cd ${{ env.TEST_DIR }}/HOMEglobal/sorc ./link_workflow.sh create-experiments: @@ -138,7 +138,7 @@ jobs: pslot: ${{ matrix.case }}.${{ github.run_id }} run: | mkdir -p ${{ env.RUNTESTS }} - cd ${{ env.TEST_DIR }}/HOMEgfs + cd ${{ env.TEST_DIR }}/HOMEglobal source dev/ush/gw_setup.sh source dev/ci/platforms/config.noaacloud ./dev/workflow/create_experiment.py --yaml dev/ci/cases/pr/${{ matrix.case }}.yaml --overwrite @@ -156,8 +156,8 @@ jobs: steps: - name: Run Experiment ${{ matrix.case }} run: | - cd ${{ env.TEST_DIR }}/HOMEgfs - ./dev/ci/scripts/run_check_ci.sh ${{ env.TEST_DIR }} ${{ matrix.case }}.${{ github.run_id }} HOMEgfs + cd ${{ env.TEST_DIR }}/HOMEglobal + ./dev/ci/scripts/run_check_ci.sh ${{ env.TEST_DIR }} ${{ matrix.case }}.${{ github.run_id }} HOMEglobal clean-up: needs: run-experiments diff --git a/dev/ci/Jenkinsfile b/dev/ci/Jenkinsfile index ab8cffe4dab..29af1a8a158 100644 --- a/dev/ci/Jenkinsfile +++ b/dev/ci/Jenkinsfile @@ -2,7 +2,7 @@ def Machine = 'none' def machine = 'none' def CUSTOM_WORKSPACE = 'none' def HOMEglobal = 'none' -def HOMEgfs_dev = 'none' +def HOMEglobal_dev = 'none' def CI_CASES = '' def GH = 'none' // Map of the machine names (MACHINE_ID) to the Jenkins Node names @@ -91,7 +91,7 @@ pipeline { GH = sh(script: "which gh || echo '~/bin/gh'", returnStdout: true).trim() CUSTOM_WORKSPACE = "${WORKSPACE}" HOMEglobal = "${CUSTOM_WORKSPACE}/global-workflow" - HOMEgfs_dev = "${CUSTOM_WORKSPACE}/global-workflow/dev" + HOMEglobal_dev = "${CUSTOM_WORKSPACE}/global-workflow/dev" sh(script: "rm -Rf ${CUSTOM_WORKSPACE}/global-workflow; mkdir -p ${CUSTOM_WORKSPACE}/global-workflow") sh(script: "rm -Rf ${CUSTOM_WORKSPACE}/RUNTESTS; mkdir -p ${CUSTOM_WORKSPACE}/RUNTESTS") sh(script: """${GH} pr edit ${env.CHANGE_ID} --repo ${repo_url} --add-label "CI-${Machine}-Building" --remove-label "CI-${Machine}-Ready" """) @@ -129,7 +129,7 @@ pipeline { def error_logs_message = "" dir("${HOMEglobal}/sorc") { try { - sh(script: "${HOMEgfs_dev}/ci/scripts/utils/ci_utils.sh build") // build the global-workflow executables + sh(script: "${HOMEglobal_dev}/ci/scripts/utils/ci_utils.sh build") // build the global-workflow executables } catch (Exception error_build) { echo "Failed to build global-workflow: ${error_build.getMessage()}" if ( fileExists("logs/error.logs") ) { @@ -148,12 +148,12 @@ pipeline { } try { sh(script: """ - source ${HOMEgfs_dev}/ush/gw_setup.sh - ${HOMEgfs_dev}/ci/scripts/utils/publish_logs.py --file ${error_logs} --repo PR_BUILD_${env.CHANGE_ID} + source ${HOMEglobal_dev}/ush/gw_setup.sh + ${HOMEglobal_dev}/ci/scripts/utils/publish_logs.py --file ${error_logs} --repo PR_BUILD_${env.CHANGE_ID} """) gist_url=sh(script: """ - source ${HOMEgfs_dev}/ush/gw_setup.sh - ${HOMEgfs_dev}/ci/scripts/utils/publish_logs.py --file ${error_logs} --multiple --format=github --gist PR_BUILD_${env.CHANGE_ID} | tail -n 1 + source ${HOMEglobal_dev}/ush/gw_setup.sh + ${HOMEglobal_dev}/ci/scripts/utils/publish_logs.py --file ${error_logs} --multiple --format=github --gist PR_BUILD_${env.CHANGE_ID} | tail -n 1 """, returnStdout: true).trim() sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body 'Build **FAILED** on **${Machine}** in Build# ${env.BUILD_NUMBER} with error logs:\n```\n${error_logs_message}```\n\nFollow link here to view the contents of the above file(s): ${gist_url}' """) } catch (Exception error_comment) { @@ -176,8 +176,8 @@ pipeline { } // Get a list of CI cases to run CI_CASES = sh(script: """ - source ${HOMEgfs_dev}/ush/gw_setup.sh - ${HOMEgfs_dev}/ci/scripts/utils/get_host_case_list.py ${machine} + source ${HOMEglobal_dev}/ush/gw_setup.sh + ${HOMEglobal_dev}/ci/scripts/utils/get_host_case_list.py ${machine} """, returnStdout: true).trim().split() echo "Cases to run: ${CI_CASES}" } @@ -202,8 +202,8 @@ pipeline { def error_output = "" try { error_output = sh(script: """ - source ${HOMEgfs_dev}/ush/gw_setup.sh - ${HOMEgfs_dev}/ci/scripts/utils/ci_utils.sh create_experiment ${HOMEgfs_dev}/ci/cases/pr/${caseName}.yaml + source ${HOMEglobal_dev}/ush/gw_setup.sh + ${HOMEglobal_dev}/ci/scripts/utils/ci_utils.sh create_experiment ${HOMEglobal_dev}/ci/cases/pr/${caseName}.yaml """, returnStdout: true).trim() } catch (Exception error_create) { sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body '${caseName} **FAILED** to create experiment on ${Machine} in BUILD# ${env.BUILD_NUMBER}\n with the error:\n```\n${error_output}```' """) @@ -215,22 +215,22 @@ pipeline { stage("Running ${caseName}") { catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') { script { - def pslot = sh(script: "${HOMEgfs_dev}/ci/scripts/utils/ci_utils.sh get_pslot ${CUSTOM_WORKSPACE}/RUNTESTS ${caseName}", returnStdout: true).trim() + def pslot = sh(script: "${HOMEglobal_dev}/ci/scripts/utils/ci_utils.sh get_pslot ${CUSTOM_WORKSPACE}/RUNTESTS ${caseName}", returnStdout: true).trim() def error_file = "${CUSTOM_WORKSPACE}/RUNTESTS/EXPDIR/${pslot}/${pslot}_error.logs" sh(script: " rm -f ${error_file}") try { sh(script: """ - source ${HOMEgfs_dev}/ush/gw_setup.sh - ${HOMEgfs_dev}/ci/scripts/run_check_ci.sh ${CUSTOM_WORKSPACE} ${pslot} 'global-workflow' + source ${HOMEglobal_dev}/ush/gw_setup.sh + ${HOMEglobal_dev}/ci/scripts/run_check_ci.sh ${CUSTOM_WORKSPACE} ${pslot} 'global-workflow' """) sh(script: """ - source ${HOMEgfs_dev}/ush/gw_setup.sh - ${HOMEgfs_dev}/ci/scripts/utils/ci_utils.sh cleanup_experiment ${CUSTOM_WORKSPACE}/RUNTESTS/EXPDIR/${pslot} + source ${HOMEglobal_dev}/ush/gw_setup.sh + ${HOMEglobal_dev}/ci/scripts/utils/ci_utils.sh cleanup_experiment ${CUSTOM_WORKSPACE}/RUNTESTS/EXPDIR/${pslot} """) } catch (Exception error_experment) { sh(script: """ - source ${HOMEgfs_dev}/ush/gw_setup.sh - ${HOMEgfs_dev}/ci/scripts/utils/ci_utils.sh cancel_batch_jobs ${pslot} + source ${HOMEglobal_dev}/ush/gw_setup.sh + ${HOMEglobal_dev}/ci/scripts/utils/ci_utils.sh cancel_batch_jobs ${pslot} """) ws(CUSTOM_WORKSPACE) { def error_logs = "" @@ -252,13 +252,13 @@ pipeline { } try { gist_url = sh(script: """ - source ${HOMEgfs_dev}/ush/gw_setup.sh - ${HOMEgfs_dev}/ci/scripts/utils/publish_logs.py --file ${error_logs} --multiple --format=github --gist PR_${env.CHANGE_ID} | tail -n 1 + source ${HOMEglobal_dev}/ush/gw_setup.sh + ${HOMEglobal_dev}/ci/scripts/utils/publish_logs.py --file ${error_logs} --multiple --format=github --gist PR_${env.CHANGE_ID} | tail -n 1 """, returnStdout: true).trim() sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body 'Experiment ${caseName} **FAILED** on ${Machine} in Build# ${env.BUILD_NUMBER} with error logs:\n```\n${error_logs_message}```\n\nFollow link here to view the contents of the above file(s): ${gist_url}' """) sh(script: """ - source ${HOMEgfs_dev}/ush/gw_setup.sh - ${HOMEgfs_dev}/ci/scripts/utils/publish_logs.py --file ${error_logs} --repo PR_${env.CHANGE_ID} + source ${HOMEglobal_dev}/ush/gw_setup.sh + ${HOMEglobal_dev}/ci/scripts/utils/publish_logs.py --file ${error_logs} --repo PR_${env.CHANGE_ID} """) } catch (Exception error_comment) { echo "Failed to comment on PR: ${error_comment.getMessage()}" diff --git a/dev/ci/cases/gcafsv1/C384_gcafs_cycled.yaml b/dev/ci/cases/gcafsv1/C384_gcafs_cycled.yaml index e67af8d6c73..2a2677fa08d 100644 --- a/dev/ci/cases/gcafsv1/C384_gcafs_cycled.yaml +++ b/dev/ci/cases/gcafsv1/C384_gcafs_cycled.yaml @@ -12,7 +12,7 @@ experiment: nens: 0 interval: 12 start: cold - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/gcafs_cycled_defaults_ci.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/gcafs_cycled_defaults_ci.yaml workflow: engine: rocoto diff --git a/dev/ci/cases/gcafsv1/C384_gcafs_cycled_noDA.yaml b/dev/ci/cases/gcafsv1/C384_gcafs_cycled_noDA.yaml index 3accc2f5ed9..ecb8ec678f1 100644 --- a/dev/ci/cases/gcafsv1/C384_gcafs_cycled_noDA.yaml +++ b/dev/ci/cases/gcafsv1/C384_gcafs_cycled_noDA.yaml @@ -12,7 +12,7 @@ experiment: nens: 0 interval: 12 start: cold - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/gcafs_cycled_noDA_defaults_ci.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/gcafs_cycled_noDA_defaults_ci.yaml workflow: engine: rocoto diff --git a/dev/ci/cases/gcafsv1/C384_gcafs_cycled_noDA_dev.yaml b/dev/ci/cases/gcafsv1/C384_gcafs_cycled_noDA_dev.yaml index bfda39b9624..773258b85fb 100644 --- a/dev/ci/cases/gcafsv1/C384_gcafs_cycled_noDA_dev.yaml +++ b/dev/ci/cases/gcafsv1/C384_gcafs_cycled_noDA_dev.yaml @@ -12,7 +12,7 @@ experiment: nens: 0 interval: 0 start: cold - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/gcafs_cycled_noDA_defaults_dev.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/gcafs_cycled_noDA_defaults_dev.yaml workflow: engine: rocoto diff --git a/dev/ci/cases/gcafsv1/C96_gcafs_cycled.yaml b/dev/ci/cases/gcafsv1/C96_gcafs_cycled.yaml index 138fd7c356b..937c1263397 100644 --- a/dev/ci/cases/gcafsv1/C96_gcafs_cycled.yaml +++ b/dev/ci/cases/gcafsv1/C96_gcafs_cycled.yaml @@ -12,7 +12,7 @@ experiment: nens: 0 interval: 12 start: cold - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/gcafs_cycled_defaults_ci.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/gcafs_cycled_defaults_ci.yaml workflow: engine: rocoto diff --git a/dev/ci/cases/gcafsv1/C96_gcafs_cycled_noDA.yaml b/dev/ci/cases/gcafsv1/C96_gcafs_cycled_noDA.yaml index 65164ae4297..1e70b22bc75 100644 --- a/dev/ci/cases/gcafsv1/C96_gcafs_cycled_noDA.yaml +++ b/dev/ci/cases/gcafsv1/C96_gcafs_cycled_noDA.yaml @@ -12,7 +12,7 @@ experiment: nens: 0 interval: 12 start: cold - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/gcafs_cycled_noDA_defaults_ci.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/gcafs_cycled_noDA_defaults_ci.yaml workflow: engine: rocoto diff --git a/dev/ci/cases/gcafsv1/C96_gcafs_cycled_noDA_dev.yaml b/dev/ci/cases/gcafsv1/C96_gcafs_cycled_noDA_dev.yaml index f22670f274d..20fde3e50c1 100644 --- a/dev/ci/cases/gcafsv1/C96_gcafs_cycled_noDA_dev.yaml +++ b/dev/ci/cases/gcafsv1/C96_gcafs_cycled_noDA_dev.yaml @@ -12,7 +12,7 @@ experiment: nens: 0 interval: 0 start: cold - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/gcafs_cycled_noDA_defaults_dev.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/gcafs_cycled_noDA_defaults_dev.yaml workflow: engine: rocoto diff --git a/dev/ci/cases/gfsv17/C1152mx025_S2SW.yaml b/dev/ci/cases/gfsv17/C1152mx025_S2SW.yaml index 67375d8d3ee..7b67f086dea 100644 --- a/dev/ci/cases/gfsv17/C1152mx025_S2SW.yaml +++ b/dev/ci/cases/gfsv17/C1152mx025_S2SW.yaml @@ -14,7 +14,7 @@ experiment: idate: 2024111506 edate: 2025011600 icsdir: /lfs/h2/emc/ptmp/emc.global/RETRO_GFSv17/IC/v2 - yaml: {{ HOMEgfs }}/dev/ci/cases/gfsv17/s2sw.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/gfsv17/s2sw.yaml workflow: engine: rocoto diff --git a/dev/ci/cases/gfsv17/C1152mx025_S2SW_rdhpcs.yaml b/dev/ci/cases/gfsv17/C1152mx025_S2SW_rdhpcs.yaml index 09e7be8e8c9..51f7d9cfdd9 100644 --- a/dev/ci/cases/gfsv17/C1152mx025_S2SW_rdhpcs.yaml +++ b/dev/ci/cases/gfsv17/C1152mx025_S2SW_rdhpcs.yaml @@ -17,7 +17,7 @@ experiment: icsdir: /gpfs/f6/drsa-precip3/world-shared/role.glopara/data/ICSDIR/retro_ICs #msu #icsdir: /work2/noaa/global/role-global/data/ICSDIR/retro_ICs - yaml: {{ HOMEgfs }}/dev/ci/cases/gfsv17/s2sw_rdhpcs.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/gfsv17/s2sw_rdhpcs.yaml workflow: engine: rocoto rocoto: diff --git a/dev/ci/cases/gfsv17/C384mx025_3DVarAOWCDA.yaml b/dev/ci/cases/gfsv17/C384mx025_3DVarAOWCDA.yaml index cbd4c74ed20..b3b884c3a1d 100644 --- a/dev/ci/cases/gfsv17/C384mx025_3DVarAOWCDA.yaml +++ b/dev/ci/cases/gfsv17/C384mx025_3DVarAOWCDA.yaml @@ -14,7 +14,7 @@ experiment: edate: 2021070306 # NOTE: this directory is read-only icsdir: - yaml: {{ HOMEgfs }}/dev/ci/cases/gfsv17/marine3dvar.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/gfsv17/marine3dvar.yaml workflow: engine: rocoto diff --git a/dev/ci/cases/gfsv17/C384mx025_hybAOWCDA.yaml b/dev/ci/cases/gfsv17/C384mx025_hybAOWCDA.yaml index 41060e89975..a5fb719f2fb 100644 --- a/dev/ci/cases/gfsv17/C384mx025_hybAOWCDA.yaml +++ b/dev/ci/cases/gfsv17/C384mx025_hybAOWCDA.yaml @@ -14,7 +14,7 @@ experiment: idate: 2021063018 edate: 2021070306 icsdir: - yaml: {{ HOMEgfs }}/dev/ci/cases/gfsv17/marinehyb.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/gfsv17/marinehyb.yaml workflow: engine: rocoto diff --git a/dev/ci/cases/gfsv17/marine3dvar.yaml b/dev/ci/cases/gfsv17/marine3dvar.yaml index 54f6aaee611..3516a7c1fdb 100644 --- a/dev/ci/cases/gfsv17/marine3dvar.yaml +++ b/dev/ci/cases/gfsv17/marine3dvar.yaml @@ -1,5 +1,5 @@ defaults: - !INC {{ HOMEgfs }}/dev/parm/config/gfs/yaml/defaults.yaml + !INC {{ HOMEglobal }}/dev/parm/config/gfs/yaml/defaults.yaml base: DOIAU: "YES" @@ -21,11 +21,11 @@ prepoceanobs: dmpdir_exp: "${BASE_DATA}/experimental_obs" marinebmat: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca marineanl: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca - SOCA_OBS_LIST: {{ HOMEgfs }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca + SOCA_OBS_LIST: {{ HOMEglobal }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 SOCA_NINNER: 100 diff --git a/dev/ci/cases/gfsv17/marinehyb.yaml b/dev/ci/cases/gfsv17/marinehyb.yaml index 6559dd96e66..17c18c0b08a 100644 --- a/dev/ci/cases/gfsv17/marinehyb.yaml +++ b/dev/ci/cases/gfsv17/marinehyb.yaml @@ -1,5 +1,5 @@ defaults: - !INC {{ HOMEgfs }}/dev/parm/config/gfs/yaml/defaults.yaml + !INC {{ HOMEglobal }}/dev/parm/config/gfs/yaml/defaults.yaml base: DOIAU: "YES" @@ -21,20 +21,20 @@ prepoceanobs: dmpdir_exp: "${BASE_DATA}/experimental_obs" marinebmat: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca marineanl: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca - SOCA_OBS_LIST: {{ HOMEgfs }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca + SOCA_OBS_LIST: {{ HOMEglobal }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 SOCA_NINNER: 100 marineanlecen: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca marineanlletkf: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca - SOCA_OBS_LIST: {{ HOMEgfs }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca + SOCA_OBS_LIST: {{ HOMEglobal }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 diff --git a/dev/ci/cases/gfsv17/retrov17_realtime.yaml b/dev/ci/cases/gfsv17/retrov17_realtime.yaml index 52d7745525c..b7a5e415d99 100644 --- a/dev/ci/cases/gfsv17/retrov17_realtime.yaml +++ b/dev/ci/cases/gfsv17/retrov17_realtime.yaml @@ -14,7 +14,7 @@ experiment: idate: 2025111306 edate: 2027011600 icsdir: /lfs/h2/emc/gfstemp/emc.global/comroot/rt17_upd03_realtime - yaml: {{ HOMEgfs }}/dev/ci/cases/gfsv17/s2sw_realtime.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/gfsv17/s2sw_realtime.yaml workflow: engine: rocoto diff --git a/dev/ci/cases/gfsv17/retrov17_stream1a.yaml b/dev/ci/cases/gfsv17/retrov17_stream1a.yaml index f5f1459c6a0..09ad67b9a0b 100644 --- a/dev/ci/cases/gfsv17/retrov17_stream1a.yaml +++ b/dev/ci/cases/gfsv17/retrov17_stream1a.yaml @@ -14,7 +14,7 @@ experiment: idate: 2022081506 edate: 2022101518 icsdir: /gpfs/f6/gfs-cpu/world-shared/role.glopara/RETRO_GFSv17/IC/v3 - yaml: {{ HOMEgfs }}/dev/ci/cases/gfsv17/s2sw_stream1a.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/gfsv17/s2sw_stream1a.yaml workflow: engine: rocoto diff --git a/dev/ci/cases/gfsv17/retrov17_stream1b.yaml b/dev/ci/cases/gfsv17/retrov17_stream1b.yaml index 68e8e794d0c..bf769360d38 100644 --- a/dev/ci/cases/gfsv17/retrov17_stream1b.yaml +++ b/dev/ci/cases/gfsv17/retrov17_stream1b.yaml @@ -14,7 +14,7 @@ experiment: idate: 2024021506 edate: 2024053118 icsdir: /gpfs/f6/gfs-cpu/world-shared/role.glopara/RETRO_GFSv17/IC/v3 - yaml: {{ HOMEgfs }}/dev/ci/cases/gfsv17/s2sw_stream1b.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/gfsv17/s2sw_stream1b.yaml workflow: engine: rocoto diff --git a/dev/ci/cases/gfsv17/retrov17_stream2.yaml b/dev/ci/cases/gfsv17/retrov17_stream2.yaml index 9a624e1fdfa..6c810f5a89a 100644 --- a/dev/ci/cases/gfsv17/retrov17_stream2.yaml +++ b/dev/ci/cases/gfsv17/retrov17_stream2.yaml @@ -14,7 +14,7 @@ experiment: idate: 2024051506 edate: 2024113018 icsdir: /lfs/h2/emc/gfstemp/emc.global/IC/v3 - yaml: {{ HOMEgfs }}/dev/ci/cases/gfsv17/s2sw_stream2.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/gfsv17/s2sw_stream2.yaml workflow: engine: rocoto diff --git a/dev/ci/cases/gfsv17/retrov17_stream3.yaml b/dev/ci/cases/gfsv17/retrov17_stream3.yaml index 60718c817dc..e56220c7cef 100644 --- a/dev/ci/cases/gfsv17/retrov17_stream3.yaml +++ b/dev/ci/cases/gfsv17/retrov17_stream3.yaml @@ -14,7 +14,7 @@ experiment: idate: 2024111506 edate: 2025053118 icsdir: /lfs/h2/emc/gfstemp/emc.global/IC/v3 - yaml: {{ HOMEgfs }}/dev/ci/cases/gfsv17/s2sw_stream3.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/gfsv17/s2sw_stream3.yaml workflow: engine: rocoto diff --git a/dev/ci/cases/gfsv17/retrov17_stream4.yaml b/dev/ci/cases/gfsv17/retrov17_stream4.yaml index 0c7055a483b..d87deaeb107 100644 --- a/dev/ci/cases/gfsv17/retrov17_stream4.yaml +++ b/dev/ci/cases/gfsv17/retrov17_stream4.yaml @@ -14,7 +14,7 @@ experiment: idate: 2025051506 edate: 2025093018 icsdir: /gpfs/f6/gfs-cpu/world-shared/role.glopara/RETRO_GFSv17/IC/v3 - yaml: {{ HOMEgfs }}/dev/ci/cases/gfsv17/s2sw_stream4.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/gfsv17/s2sw_stream4.yaml workflow: engine: rocoto diff --git a/dev/ci/cases/gfsv17/s2sw.yaml b/dev/ci/cases/gfsv17/s2sw.yaml index 7b1bc248c21..ec6d46fc1c8 100644 --- a/dev/ci/cases/gfsv17/s2sw.yaml +++ b/dev/ci/cases/gfsv17/s2sw.yaml @@ -1,5 +1,5 @@ defaults: - !INC {{ HOMEgfs }}/dev/parm/config/gfs/yaml/defaults.yaml + !INC {{ HOMEglobal }}/dev/parm/config/gfs/yaml/defaults.yaml base: DOIAU: "YES" @@ -32,20 +32,20 @@ prepoceanobs: dmpdir_exp: /lfs/h2/emc/da/noscrub/mindo.choi/MARINE_obs/COMROOT/realtime marinebmat: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca marineanl: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca - SOCA_OBS_LIST: {{ HOMEgfs }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca + SOCA_OBS_LIST: {{ HOMEglobal }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 SOCA_NINNER: 100 marineanlecen: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca marineanlletkf: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca - SOCA_OBS_LIST: {{ HOMEgfs }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca + SOCA_OBS_LIST: {{ HOMEglobal }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 diff --git a/dev/ci/cases/gfsv17/s2sw_rdhpcs.yaml b/dev/ci/cases/gfsv17/s2sw_rdhpcs.yaml index 78a7782c02b..d65a42f9632 100644 --- a/dev/ci/cases/gfsv17/s2sw_rdhpcs.yaml +++ b/dev/ci/cases/gfsv17/s2sw_rdhpcs.yaml @@ -1,5 +1,5 @@ defaults: - !INC {{ HOMEgfs }}/dev/parm/config/gfs/yaml/defaults.yaml + !INC {{ HOMEglobal }}/dev/parm/config/gfs/yaml/defaults.yaml base: DOIAU: "YES" @@ -32,20 +32,20 @@ prepoceanobs: # dmpdir_exp: /scratch3/NCEPDEV/da/common_obsForge marinebmat: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca marineanl: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca - SOCA_OBS_LIST: {{ HOMEgfs }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca + SOCA_OBS_LIST: {{ HOMEglobal }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 SOCA_NINNER: 100 marineanlecen: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca marineanlletkf: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca - SOCA_OBS_LIST: {{ HOMEgfs }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca + SOCA_OBS_LIST: {{ HOMEglobal }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 diff --git a/dev/ci/cases/gfsv17/s2sw_realtime.yaml b/dev/ci/cases/gfsv17/s2sw_realtime.yaml index dbd82361add..89281f0e848 100644 --- a/dev/ci/cases/gfsv17/s2sw_realtime.yaml +++ b/dev/ci/cases/gfsv17/s2sw_realtime.yaml @@ -1,5 +1,5 @@ defaults: - !INC {{ HOMEgfs }}/dev/parm/config/gfs/yaml/defaults.yaml + !INC {{ HOMEglobal }}/dev/parm/config/gfs/yaml/defaults.yaml base: DOIAU: "YES" @@ -32,20 +32,20 @@ prepoceanobs: dmpdir_exp: /lfs/h2/emc/da/noscrub/emc.da/obsForge/COMROOT/realtime marinebmat: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca marineanl: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca - SOCA_OBS_LIST: {{ HOMEgfs }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca + SOCA_OBS_LIST: {{ HOMEglobal }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 SOCA_NINNER: 100 marineanlecen: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca marineanlletkf: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca - SOCA_OBS_LIST: {{ HOMEgfs }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca + SOCA_OBS_LIST: {{ HOMEglobal }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 diff --git a/dev/ci/cases/gfsv17/s2sw_stream1a.yaml b/dev/ci/cases/gfsv17/s2sw_stream1a.yaml index 2078dbd1ca3..d4838f84038 100644 --- a/dev/ci/cases/gfsv17/s2sw_stream1a.yaml +++ b/dev/ci/cases/gfsv17/s2sw_stream1a.yaml @@ -1,5 +1,5 @@ defaults: - !INC {{ HOMEgfs }}/dev/parm/config/gfs/yaml/defaults.yaml + !INC {{ HOMEglobal }}/dev/parm/config/gfs/yaml/defaults.yaml base: DOIAU: "YES" @@ -32,20 +32,20 @@ prepoceanobs: # dmpdir_exp: /scratch3/NCEPDEV/da/common_obsForge marinebmat: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca marineanl: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca - SOCA_OBS_LIST: {{ HOMEgfs }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca + SOCA_OBS_LIST: {{ HOMEglobal }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 SOCA_NINNER: 100 marineanlecen: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca marineanlletkf: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca - SOCA_OBS_LIST: {{ HOMEgfs }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca + SOCA_OBS_LIST: {{ HOMEglobal }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 diff --git a/dev/ci/cases/gfsv17/s2sw_stream1b.yaml b/dev/ci/cases/gfsv17/s2sw_stream1b.yaml index 1c414a9a44f..398e48bc4c7 100644 --- a/dev/ci/cases/gfsv17/s2sw_stream1b.yaml +++ b/dev/ci/cases/gfsv17/s2sw_stream1b.yaml @@ -1,5 +1,5 @@ defaults: - !INC {{ HOMEgfs }}/dev/parm/config/gfs/yaml/defaults.yaml + !INC {{ HOMEglobal }}/dev/parm/config/gfs/yaml/defaults.yaml base: DOIAU: "YES" @@ -32,20 +32,20 @@ prepoceanobs: # dmpdir_exp: /scratch3/NCEPDEV/da/common_obsForge marinebmat: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca marineanl: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca - SOCA_OBS_LIST: {{ HOMEgfs }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca + SOCA_OBS_LIST: {{ HOMEglobal }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 SOCA_NINNER: 100 marineanlecen: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca marineanlletkf: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca - SOCA_OBS_LIST: {{ HOMEgfs }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca + SOCA_OBS_LIST: {{ HOMEglobal }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 diff --git a/dev/ci/cases/gfsv17/s2sw_stream2.yaml b/dev/ci/cases/gfsv17/s2sw_stream2.yaml index 9fd4ebd8ec5..dbb468ffcf9 100644 --- a/dev/ci/cases/gfsv17/s2sw_stream2.yaml +++ b/dev/ci/cases/gfsv17/s2sw_stream2.yaml @@ -1,5 +1,5 @@ defaults: - !INC {{ HOMEgfs }}/dev/parm/config/gfs/yaml/defaults.yaml + !INC {{ HOMEglobal }}/dev/parm/config/gfs/yaml/defaults.yaml base: DOIAU: "YES" @@ -32,20 +32,20 @@ prepoceanobs: # dmpdir_exp: /lfs/h2/emc/da/noscrub/mindo.choi/MARINE_obs/COMROOT/realtime marinebmat: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca marineanl: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca - SOCA_OBS_LIST: {{ HOMEgfs }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca + SOCA_OBS_LIST: {{ HOMEglobal }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 SOCA_NINNER: 100 marineanlecen: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca marineanlletkf: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca - SOCA_OBS_LIST: {{ HOMEgfs }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca + SOCA_OBS_LIST: {{ HOMEglobal }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 diff --git a/dev/ci/cases/gfsv17/s2sw_stream3.yaml b/dev/ci/cases/gfsv17/s2sw_stream3.yaml index f13c44b9637..9e13585b5c9 100644 --- a/dev/ci/cases/gfsv17/s2sw_stream3.yaml +++ b/dev/ci/cases/gfsv17/s2sw_stream3.yaml @@ -1,5 +1,5 @@ defaults: - !INC {{ HOMEgfs }}/dev/parm/config/gfs/yaml/defaults.yaml + !INC {{ HOMEglobal }}/dev/parm/config/gfs/yaml/defaults.yaml base: DOIAU: "YES" @@ -32,20 +32,20 @@ prepoceanobs: # dmpdir_exp: /lfs/h2/emc/da/noscrub/mindo.choi/MARINE_obs/COMROOT/realtime marinebmat: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca marineanl: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca - SOCA_OBS_LIST: {{ HOMEgfs }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca + SOCA_OBS_LIST: {{ HOMEglobal }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 SOCA_NINNER: 100 marineanlecen: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca marineanlletkf: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca - SOCA_OBS_LIST: {{ HOMEgfs }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca + SOCA_OBS_LIST: {{ HOMEglobal }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 diff --git a/dev/ci/cases/gfsv17/s2sw_stream4.yaml b/dev/ci/cases/gfsv17/s2sw_stream4.yaml index af79fc60217..15e7184aaa0 100644 --- a/dev/ci/cases/gfsv17/s2sw_stream4.yaml +++ b/dev/ci/cases/gfsv17/s2sw_stream4.yaml @@ -1,5 +1,5 @@ defaults: - !INC {{ HOMEgfs }}/dev/parm/config/gfs/yaml/defaults.yaml + !INC {{ HOMEglobal }}/dev/parm/config/gfs/yaml/defaults.yaml base: DOIAU: "YES" @@ -32,20 +32,20 @@ prepoceanobs: # dmpdir_exp: /scratch3/NCEPDEV/da/common_obsForge marinebmat: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca marineanl: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca - SOCA_OBS_LIST: {{ HOMEgfs }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca + SOCA_OBS_LIST: {{ HOMEglobal }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 SOCA_NINNER: 100 marineanlecen: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca marineanlletkf: - SOCA_INPUT_FIX_DIR: {{ HOMEgfs }}/fix/gdas/soca/1440x1080x75/soca - SOCA_ANL_GEOM: {{ HOMEgfs }}/fix/gdas/soca/720x540x75/soca - SOCA_OBS_LIST: {{ HOMEgfs }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 + SOCA_INPUT_FIX_DIR: {{ HOMEglobal }}/fix/gdas/soca/1440x1080x75/soca + SOCA_ANL_GEOM: {{ HOMEglobal }}/fix/gdas/soca/720x540x75/soca + SOCA_OBS_LIST: {{ HOMEglobal }}/parm/gdas/marine/obs/obs_list_gfsv17.yaml.j2 diff --git a/dev/ci/cases/hires/C1152_S2SW.yaml b/dev/ci/cases/hires/C1152_S2SW.yaml index cb7dfd8f87b..a264644fa48 100644 --- a/dev/ci/cases/hires/C1152_S2SW.yaml +++ b/dev/ci/cases/hires/C1152_S2SW.yaml @@ -9,7 +9,7 @@ experiment: expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR idate: 2019120300 edate: 2019120300 - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/gfs_defaults_ci.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/gfs_defaults_ci.yaml workflow: engine: rocoto diff --git a/dev/ci/cases/hires/C768_S2SW.yaml b/dev/ci/cases/hires/C768_S2SW.yaml index 02963759188..aa8d39693c6 100644 --- a/dev/ci/cases/hires/C768_S2SW.yaml +++ b/dev/ci/cases/hires/C768_S2SW.yaml @@ -9,7 +9,7 @@ experiment: expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR idate: 2019120300 edate: 2019120300 - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/gfs_defaults_ci.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/gfs_defaults_ci.yaml workflow: engine: rocoto diff --git a/dev/ci/cases/pr/C48_ATM.yaml b/dev/ci/cases/pr/C48_ATM.yaml index abee60a924f..e85cd034142 100644 --- a/dev/ci/cases/pr/C48_ATM.yaml +++ b/dev/ci/cases/pr/C48_ATM.yaml @@ -8,7 +8,7 @@ experiment: expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR idate: 2021032312 edate: 2021032312 - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/gfs_defaults_ci.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/gfs_defaults_ci.yaml workflow: engine: rocoto diff --git a/dev/ci/cases/pr/C48_ATM_ecflow.yaml b/dev/ci/cases/pr/C48_ATM_ecflow.yaml index eae20d5a591..5ccb3709a9f 100644 --- a/dev/ci/cases/pr/C48_ATM_ecflow.yaml +++ b/dev/ci/cases/pr/C48_ATM_ecflow.yaml @@ -8,7 +8,7 @@ experiment: expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR idate: 2021032312 edate: 2021032312 - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/gfs_defaults_ci.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/gfs_defaults_ci.yaml skip_ci_on_hosts: - wcoss2 diff --git a/dev/ci/cases/pr/C48_S2SW.yaml b/dev/ci/cases/pr/C48_S2SW.yaml index cd7e5875903..555850e5683 100644 --- a/dev/ci/cases/pr/C48_S2SW.yaml +++ b/dev/ci/cases/pr/C48_S2SW.yaml @@ -9,7 +9,7 @@ experiment: expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR idate: 2021032312 edate: 2021032312 - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/gfs_defaults_ci.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/gfs_defaults_ci.yaml skip_ci_on_hosts: - wcoss2 diff --git a/dev/ci/cases/pr/C48_S2SWA_gefs.yaml b/dev/ci/cases/pr/C48_S2SWA_gefs.yaml index 6b69bdd3222..471bd063fd0 100644 --- a/dev/ci/cases/pr/C48_S2SWA_gefs.yaml +++ b/dev/ci/cases/pr/C48_S2SWA_gefs.yaml @@ -14,7 +14,7 @@ experiment: expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR idate: 2021032312 edate: 2021032312 - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/gefs_defaults_ci.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/gefs_defaults_ci.yaml skip_ci_on_hosts: - None diff --git a/dev/ci/cases/pr/C48_S2SWA_gefs_RT.yaml b/dev/ci/cases/pr/C48_S2SWA_gefs_RT.yaml index ba6749b1e03..93f84c1040e 100644 --- a/dev/ci/cases/pr/C48_S2SWA_gefs_RT.yaml +++ b/dev/ci/cases/pr/C48_S2SWA_gefs_RT.yaml @@ -14,7 +14,7 @@ experiment: expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR idate: 2024112500 edate: 2024112500 - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/gefs_defaults_ci.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/gefs_defaults_ci.yaml icsdir: /lfs/h2/emc/ens/noscrub/eric.sinsky/RETRO_ICS # TODO run on supported platforms once the gefs forecast and subsequent tasks can succeed with RETRO ICs diff --git a/dev/ci/cases/pr/C48_S2SW_extended.yaml b/dev/ci/cases/pr/C48_S2SW_extended.yaml index c6e0c29f25c..53a0e6b9c16 100644 --- a/dev/ci/cases/pr/C48_S2SW_extended.yaml +++ b/dev/ci/cases/pr/C48_S2SW_extended.yaml @@ -9,7 +9,7 @@ experiment: expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR idate: 2021032312 edate: 2021032312 - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/gfs_extended_ci.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/gfs_extended_ci.yaml skip_ci_on_hosts: - hera diff --git a/dev/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml b/dev/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml index 720bac1f691..91bb8038260 100644 --- a/dev/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml +++ b/dev/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml @@ -13,7 +13,7 @@ experiment: nens: 0 interval: 6 start: warm - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/soca_gfs_defaults_ci.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/soca_gfs_defaults_ci.yaml skip_ci_on_hosts: - gaeac5 diff --git a/dev/ci/cases/pr/C48mx500_hybAOWCDA.yaml b/dev/ci/cases/pr/C48mx500_hybAOWCDA.yaml index 905ed594f54..cc8bf2f3298 100644 --- a/dev/ci/cases/pr/C48mx500_hybAOWCDA.yaml +++ b/dev/ci/cases/pr/C48mx500_hybAOWCDA.yaml @@ -14,7 +14,7 @@ experiment: nens: 2 interval: 0 start: warm - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/soca_hyb_gfs_defaults_ci.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/soca_hyb_gfs_defaults_ci.yaml skip_ci_on_hosts: - gaeac5 diff --git a/dev/ci/cases/pr/C96C48_hybatmDA.yaml b/dev/ci/cases/pr/C96C48_hybatmDA.yaml index 66cb2168658..f0595904e91 100644 --- a/dev/ci/cases/pr/C96C48_hybatmDA.yaml +++ b/dev/ci/cases/pr/C96C48_hybatmDA.yaml @@ -14,7 +14,7 @@ experiment: nens: 2 interval: 24 start: cold - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/gfs_hyb_defaults_ci.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/gfs_hyb_defaults_ci.yaml skip_ci_on_hosts: - awsepicglobalworkflow diff --git a/dev/ci/cases/pr/C96C48_hybatmsnowDA.yaml b/dev/ci/cases/pr/C96C48_hybatmsnowDA.yaml index 59ea35b979e..d0e01c70098 100644 --- a/dev/ci/cases/pr/C96C48_hybatmsnowDA.yaml +++ b/dev/ci/cases/pr/C96C48_hybatmsnowDA.yaml @@ -13,7 +13,7 @@ experiment: nens: 2 interval: 24 start: cold - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/atmsnowDA_defaults_ci.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/atmsnowDA_defaults_ci.yaml skip_ci_on_hosts: - orion diff --git a/dev/ci/cases/pr/C96C48_hybatmsoilDA.yaml b/dev/ci/cases/pr/C96C48_hybatmsoilDA.yaml index 155e67a414c..d73e96a1179 100644 --- a/dev/ci/cases/pr/C96C48_hybatmsoilDA.yaml +++ b/dev/ci/cases/pr/C96C48_hybatmsoilDA.yaml @@ -13,7 +13,7 @@ experiment: nens: 2 interval: 6 start: warm - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/atmsoilDA_defaults_ci.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/atmsoilDA_defaults_ci.yaml skip_ci_on_hosts: - orion diff --git a/dev/ci/cases/pr/C96C48_ufs_hybatmDA.yaml b/dev/ci/cases/pr/C96C48_ufs_hybatmDA.yaml index eaafc4b3853..cfd312579db 100644 --- a/dev/ci/cases/pr/C96C48_ufs_hybatmDA.yaml +++ b/dev/ci/cases/pr/C96C48_ufs_hybatmDA.yaml @@ -13,7 +13,7 @@ experiment: nens: 2 interval: 24 start: warm - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/ufs_hybatmDA_defaults.ci.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/ufs_hybatmDA_defaults.ci.yaml skip_ci_on_hosts: - gaeac5 diff --git a/dev/ci/cases/pr/C96C48_ufsgsi_hybatmDA.yaml b/dev/ci/cases/pr/C96C48_ufsgsi_hybatmDA.yaml index 14cc30db214..f61bc09c45c 100644 --- a/dev/ci/cases/pr/C96C48_ufsgsi_hybatmDA.yaml +++ b/dev/ci/cases/pr/C96C48_ufsgsi_hybatmDA.yaml @@ -13,7 +13,7 @@ experiment: nens: 2 interval: 24 start: warm - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/ufsgsi_hybatmDA_defaults.ci.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/ufsgsi_hybatmDA_defaults.ci.yaml skip_ci_on_hosts: - gaeac5 diff --git a/dev/ci/cases/pr/C96C48mx500_S2SW_cyc_gfs.yaml b/dev/ci/cases/pr/C96C48mx500_S2SW_cyc_gfs.yaml index f7184a4b228..6deb23ead08 100644 --- a/dev/ci/cases/pr/C96C48mx500_S2SW_cyc_gfs.yaml +++ b/dev/ci/cases/pr/C96C48mx500_S2SW_cyc_gfs.yaml @@ -14,7 +14,7 @@ experiment: nens: 2 interval: 6 start: cold - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/gfs_cyc_defaults_ci.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/gfs_cyc_defaults_ci.yaml skip_ci_on_hosts: - gaeac5 diff --git a/dev/ci/cases/pr/C96_atm3DVar.yaml b/dev/ci/cases/pr/C96_atm3DVar.yaml index 63568627a34..d0cdde49cac 100644 --- a/dev/ci/cases/pr/C96_atm3DVar.yaml +++ b/dev/ci/cases/pr/C96_atm3DVar.yaml @@ -12,7 +12,7 @@ experiment: nens: 0 interval: 24 start: cold - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/gfs_metp_ci.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/gfs_metp_ci.yaml skip_ci_on_hosts: - gaeac5 diff --git a/dev/ci/cases/pr/C96_atm3DVar_extended.yaml b/dev/ci/cases/pr/C96_atm3DVar_extended.yaml index fda4f62d1c6..0a78e810628 100644 --- a/dev/ci/cases/pr/C96_atm3DVar_extended.yaml +++ b/dev/ci/cases/pr/C96_atm3DVar_extended.yaml @@ -12,7 +12,7 @@ experiment: nens: 0 interval: 6 start: cold - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/gfs_extended_ci.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/gfs_extended_ci.yaml skip_ci_on_hosts: - hera diff --git a/dev/ci/cases/pr/C96_gcafs_cycled.yaml b/dev/ci/cases/pr/C96_gcafs_cycled.yaml index 3a261feb198..4f7435b7342 100644 --- a/dev/ci/cases/pr/C96_gcafs_cycled.yaml +++ b/dev/ci/cases/pr/C96_gcafs_cycled.yaml @@ -12,7 +12,7 @@ experiment: nens: 0 interval: 12 start: cold - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/gcafs_cycled_defaults_ci.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/gcafs_cycled_defaults_ci.yaml skip_ci_on_hosts: - gaeac5 diff --git a/dev/ci/cases/pr/C96_gcafs_cycled_noDA.yaml b/dev/ci/cases/pr/C96_gcafs_cycled_noDA.yaml index dbe69605e7f..76bcd4528a0 100644 --- a/dev/ci/cases/pr/C96_gcafs_cycled_noDA.yaml +++ b/dev/ci/cases/pr/C96_gcafs_cycled_noDA.yaml @@ -12,7 +12,7 @@ experiment: nens: 0 interval: 12 start: cold - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/gcafs_cycled_noDA_defaults_ci.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/gcafs_cycled_noDA_defaults_ci.yaml skip_ci_on_hosts: - orion diff --git a/dev/ci/cases/pr/C96mx100_S2S.yaml b/dev/ci/cases/pr/C96mx100_S2S.yaml index 6049ccdd43e..ed3754284e3 100644 --- a/dev/ci/cases/pr/C96mx100_S2S.yaml +++ b/dev/ci/cases/pr/C96mx100_S2S.yaml @@ -13,7 +13,7 @@ experiment: comroot: {{ 'RUNTESTS' | getenv }}/COMROOT expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C96mx100/20250808 - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/sfs_defaults.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/sfs_defaults.yaml skip_ci_on_hosts: - gaeac5 diff --git a/dev/ci/cases/sfsv1/C96mx025_S2S.yaml b/dev/ci/cases/sfsv1/C96mx025_S2S.yaml index 04510e59ce6..ab81ba21b9d 100644 --- a/dev/ci/cases/sfsv1/C96mx025_S2S.yaml +++ b/dev/ci/cases/sfsv1/C96mx025_S2S.yaml @@ -13,7 +13,7 @@ experiment: comroot: {{ 'RUNTESTS' | getenv }}/COMROOT expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C96mx025/20251217 - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/sfs_CPC_defaults.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/sfs_CPC_defaults.yaml skip_ci_on_hosts: - gaeac5 diff --git a/dev/ci/cases/sfsv1/C96mx100_S2S.yaml b/dev/ci/cases/sfsv1/C96mx100_S2S.yaml index 85cec125b55..5f68fc1ac46 100644 --- a/dev/ci/cases/sfsv1/C96mx100_S2S.yaml +++ b/dev/ci/cases/sfsv1/C96mx100_S2S.yaml @@ -13,7 +13,7 @@ experiment: comroot: {{ 'RUNTESTS' | getenv }}/COMROOT expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C96mx100/20240610 - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/sfs_full.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/sfs_full.yaml workflow: engine: rocoto diff --git a/dev/ci/cases/weekly/C384C192_hybatmda.yaml b/dev/ci/cases/weekly/C384C192_hybatmda.yaml index 30ac6797615..2b85317114b 100644 --- a/dev/ci/cases/weekly/C384C192_hybatmda.yaml +++ b/dev/ci/cases/weekly/C384C192_hybatmda.yaml @@ -14,7 +14,7 @@ experiment: nens: 2 interval: 24 start: cold - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/gfs_defaults_ci.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/gfs_defaults_ci.yaml workflow: engine: rocoto diff --git a/dev/ci/cases/weekly/C384_atm3DVar.yaml b/dev/ci/cases/weekly/C384_atm3DVar.yaml index 7ef7c52ff11..740e7f99825 100644 --- a/dev/ci/cases/weekly/C384_atm3DVar.yaml +++ b/dev/ci/cases/weekly/C384_atm3DVar.yaml @@ -14,7 +14,7 @@ experiment: nens: 0 interval: 24 start: cold - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/gfs_defaults_ci.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/gfs_defaults_ci.yaml workflow: engine: rocoto diff --git a/dev/ci/cases/yamls/atmsnowDA_defaults_ci.yaml b/dev/ci/cases/yamls/atmsnowDA_defaults_ci.yaml index 8cd14335b89..2d4ac41c2cd 100644 --- a/dev/ci/cases/yamls/atmsnowDA_defaults_ci.yaml +++ b/dev/ci/cases/yamls/atmsnowDA_defaults_ci.yaml @@ -1,5 +1,5 @@ defaults: - !INC {{ HOMEgfs }}/dev/parm/config/gfs/yaml/defaults.yaml + !INC {{ HOMEglobal }}/dev/parm/config/gfs/yaml/defaults.yaml base: DO_JEDISNOWDA: "YES" DO_TEST_MODE: "NO" diff --git a/dev/ci/cases/yamls/atmsoilDA_defaults_ci.yaml b/dev/ci/cases/yamls/atmsoilDA_defaults_ci.yaml index 7c1a5c5857a..3337ca7d99e 100644 --- a/dev/ci/cases/yamls/atmsoilDA_defaults_ci.yaml +++ b/dev/ci/cases/yamls/atmsoilDA_defaults_ci.yaml @@ -1,5 +1,5 @@ defaults: - !INC {{ HOMEgfs }}/dev/parm/config/gfs/yaml/defaults.yaml + !INC {{ HOMEglobal }}/dev/parm/config/gfs/yaml/defaults.yaml base: DOIAU: "YES" DO_JEDISNOWDA: "YES" diff --git a/dev/ci/cases/yamls/gcafs_cycled_defaults_ci.yaml b/dev/ci/cases/yamls/gcafs_cycled_defaults_ci.yaml index 8b6fa75db3a..00beebd6a47 100644 --- a/dev/ci/cases/yamls/gcafs_cycled_defaults_ci.yaml +++ b/dev/ci/cases/yamls/gcafs_cycled_defaults_ci.yaml @@ -1,5 +1,5 @@ defaults: - !INC {{ HOMEgfs }}/dev/parm/config/gcafs/yaml/defaults.yaml + !INC {{ HOMEglobal }}/dev/parm/config/gcafs/yaml/defaults.yaml base: DO_TEST_MODE: "NO" AERO_ANL_RUNS: "gcdas" diff --git a/dev/ci/cases/yamls/gcafs_cycled_noDA_defaults_ci.yaml b/dev/ci/cases/yamls/gcafs_cycled_noDA_defaults_ci.yaml index e0941b6d549..070a6d3f54c 100644 --- a/dev/ci/cases/yamls/gcafs_cycled_noDA_defaults_ci.yaml +++ b/dev/ci/cases/yamls/gcafs_cycled_noDA_defaults_ci.yaml @@ -1,5 +1,5 @@ defaults: - !INC {{ HOMEgfs }}/dev/parm/config/gcafs/yaml/defaults.yaml + !INC {{ HOMEglobal }}/dev/parm/config/gcafs/yaml/defaults.yaml base: DO_TEST_MODE: "NO" USE_AERO_ANL: "NO" diff --git a/dev/ci/cases/yamls/gcafs_cycled_noDA_defaults_dev.yaml b/dev/ci/cases/yamls/gcafs_cycled_noDA_defaults_dev.yaml index d895b6ae5f5..b1c54d01f42 100644 --- a/dev/ci/cases/yamls/gcafs_cycled_noDA_defaults_dev.yaml +++ b/dev/ci/cases/yamls/gcafs_cycled_noDA_defaults_dev.yaml @@ -1,5 +1,5 @@ defaults: - !INC {{ HOMEgfs }}/dev/parm/config/gcafs/yaml/defaults.yaml + !INC {{ HOMEglobal }}/dev/parm/config/gcafs/yaml/defaults.yaml base: DO_TEST_MODE: "NO" USE_AERO_ANL: "NO" diff --git a/dev/ci/cases/yamls/gcafs_defaults_ci.yaml b/dev/ci/cases/yamls/gcafs_defaults_ci.yaml index c03466e4b7f..aac8f2c265f 100644 --- a/dev/ci/cases/yamls/gcafs_defaults_ci.yaml +++ b/dev/ci/cases/yamls/gcafs_defaults_ci.yaml @@ -1,4 +1,4 @@ defaults: - !INC {{ HOMEgfs }}/dev/parm/config/gcafs/yaml/defaults.yaml + !INC {{ HOMEglobal }}/dev/parm/config/gcafs/yaml/defaults.yaml base: DO_TEST_MODE: "NO" diff --git a/dev/ci/cases/yamls/gefs_defaults_ci.yaml b/dev/ci/cases/yamls/gefs_defaults_ci.yaml index 5919e9ad4e8..ed52cb82b5a 100644 --- a/dev/ci/cases/yamls/gefs_defaults_ci.yaml +++ b/dev/ci/cases/yamls/gefs_defaults_ci.yaml @@ -1,5 +1,5 @@ defaults: - !INC {{ HOMEgfs }}/dev/parm/config/gefs/yaml/defaults.yaml + !INC {{ HOMEglobal }}/dev/parm/config/gefs/yaml/defaults.yaml base: SFS_POST: "NO" FHOUT_GFS: 6 diff --git a/dev/ci/cases/yamls/gfs_cyc_defaults_ci.yaml b/dev/ci/cases/yamls/gfs_cyc_defaults_ci.yaml index f26a0372b62..263d91d9f13 100644 --- a/dev/ci/cases/yamls/gfs_cyc_defaults_ci.yaml +++ b/dev/ci/cases/yamls/gfs_cyc_defaults_ci.yaml @@ -1,5 +1,5 @@ defaults: - !INC {{ HOMEgfs }}/dev/parm/config/gfs/yaml/defaults.yaml + !INC {{ HOMEglobal }}/dev/parm/config/gfs/yaml/defaults.yaml base: DOIAU: "YES" DO_JEDIATMVAR: "NO" diff --git a/dev/ci/cases/yamls/gfs_defaults_ci.yaml b/dev/ci/cases/yamls/gfs_defaults_ci.yaml index f5c0910f61b..c7ec59eb9fb 100644 --- a/dev/ci/cases/yamls/gfs_defaults_ci.yaml +++ b/dev/ci/cases/yamls/gfs_defaults_ci.yaml @@ -1,5 +1,5 @@ defaults: - !INC {{ HOMEgfs }}/dev/parm/config/gfs/yaml/defaults.yaml + !INC {{ HOMEglobal }}/dev/parm/config/gfs/yaml/defaults.yaml base: DO_TEST_MODE: "YES" FETCHDIR: "/NCEPDEV/emc-global/1year/David.Grumm/test_data" diff --git a/dev/ci/cases/yamls/gfs_extended_ci.yaml b/dev/ci/cases/yamls/gfs_extended_ci.yaml index b45ac7d935a..cc2b4e53199 100644 --- a/dev/ci/cases/yamls/gfs_extended_ci.yaml +++ b/dev/ci/cases/yamls/gfs_extended_ci.yaml @@ -1,5 +1,5 @@ defaults: - !INC {{ HOMEgfs }}/dev/parm/config/gfs/yaml/defaults.yaml + !INC {{ HOMEglobal }}/dev/parm/config/gfs/yaml/defaults.yaml base: DO_GOES: "YES" diff --git a/dev/ci/cases/yamls/gfs_hyb_defaults_ci.yaml b/dev/ci/cases/yamls/gfs_hyb_defaults_ci.yaml index 56759c015b3..442be491f23 100644 --- a/dev/ci/cases/yamls/gfs_hyb_defaults_ci.yaml +++ b/dev/ci/cases/yamls/gfs_hyb_defaults_ci.yaml @@ -1,5 +1,5 @@ defaults: - !INC {{ HOMEgfs }}/dev/parm/config/gfs/yaml/defaults.yaml + !INC {{ HOMEglobal }}/dev/parm/config/gfs/yaml/defaults.yaml base: DO_GSISOILDA: "YES" DO_LAND_IAU: ".true." diff --git a/dev/ci/cases/yamls/gfs_metp_ci.yaml b/dev/ci/cases/yamls/gfs_metp_ci.yaml index 36ad40cf67a..0e37f459740 100644 --- a/dev/ci/cases/yamls/gfs_metp_ci.yaml +++ b/dev/ci/cases/yamls/gfs_metp_ci.yaml @@ -1,5 +1,5 @@ defaults: - !INC {{ HOMEgfs }}/dev/parm/config/gfs/yaml/defaults.yaml + !INC {{ HOMEglobal }}/dev/parm/config/gfs/yaml/defaults.yaml base: DO_TEST_MODE: "NO" DO_METP: "YES" diff --git a/dev/ci/cases/yamls/sfs_CPC_defaults.yaml b/dev/ci/cases/yamls/sfs_CPC_defaults.yaml index f977aa6c0ba..175716cec1f 100644 --- a/dev/ci/cases/yamls/sfs_CPC_defaults.yaml +++ b/dev/ci/cases/yamls/sfs_CPC_defaults.yaml @@ -1,5 +1,5 @@ defaults: - !INC {{ HOMEgfs }}/dev/parm/config/sfs/yaml/defaults.yaml + !INC {{ HOMEglobal }}/dev/parm/config/sfs/yaml/defaults.yaml base: FHMAX_GFS: 144 FCST_BREAKPOINTS: "48,96" diff --git a/dev/ci/cases/yamls/sfs_defaults.yaml b/dev/ci/cases/yamls/sfs_defaults.yaml index 9251f55fb55..68efa0a27d5 100644 --- a/dev/ci/cases/yamls/sfs_defaults.yaml +++ b/dev/ci/cases/yamls/sfs_defaults.yaml @@ -1,5 +1,5 @@ defaults: - !INC {{ HOMEgfs }}/dev/parm/config/sfs/yaml/defaults.yaml + !INC {{ HOMEglobal }}/dev/parm/config/sfs/yaml/defaults.yaml base: FHMAX_GFS: 144 FCST_BREAKPOINTS: "48,96" diff --git a/dev/ci/cases/yamls/sfs_full.yaml b/dev/ci/cases/yamls/sfs_full.yaml index 9426dd42e78..76442e8ae36 100644 --- a/dev/ci/cases/yamls/sfs_full.yaml +++ b/dev/ci/cases/yamls/sfs_full.yaml @@ -1,5 +1,5 @@ defaults: - !INC {{ HOMEgfs }}/dev/parm/config/sfs/yaml/defaults.yaml + !INC {{ HOMEglobal }}/dev/parm/config/sfs/yaml/defaults.yaml base: FHMAX_GFS: 2976 FCST_BREAKPOINTS: "" diff --git a/dev/ci/cases/yamls/soca_gfs_defaults_ci.yaml b/dev/ci/cases/yamls/soca_gfs_defaults_ci.yaml index 8725b61b0df..e050852da48 100644 --- a/dev/ci/cases/yamls/soca_gfs_defaults_ci.yaml +++ b/dev/ci/cases/yamls/soca_gfs_defaults_ci.yaml @@ -1,5 +1,5 @@ defaults: - !INC {{ HOMEgfs }}/dev/parm/config/gfs/yaml/defaults.yaml + !INC {{ HOMEglobal }}/dev/parm/config/gfs/yaml/defaults.yaml base: DO_JEDIOCNVAR: "YES" DO_METP: "NO" @@ -8,4 +8,4 @@ marineanl: SOCA_NINNER: 1 DO_TEST_MODE: "NO" marineanl: - JCB_ALGO_YAML_VAR: "{{ HOMEgfs }}/sorc/gdas.cd/test/gw-ci/soca/jcb-prototype_3dfgat_3DVarAOWCDA.yaml.j2" + JCB_ALGO_YAML_VAR: "{{ HOMEglobal }}/sorc/gdas.cd/test/gw-ci/soca/jcb-prototype_3dfgat_3DVarAOWCDA.yaml.j2" diff --git a/dev/ci/cases/yamls/soca_hyb_gfs_defaults_ci.yaml b/dev/ci/cases/yamls/soca_hyb_gfs_defaults_ci.yaml index cab6674c9ad..5163afbd329 100644 --- a/dev/ci/cases/yamls/soca_hyb_gfs_defaults_ci.yaml +++ b/dev/ci/cases/yamls/soca_hyb_gfs_defaults_ci.yaml @@ -1,5 +1,5 @@ defaults: - !INC {{ HOMEgfs }}/dev/parm/config/gfs/yaml/defaults.yaml + !INC {{ HOMEglobal }}/dev/parm/config/gfs/yaml/defaults.yaml base: DO_JEDIOCNVAR: "YES" DOHYBVAR_OCN: "YES" diff --git a/dev/ci/cases/yamls/ufs_hybatmDA_defaults.ci.yaml b/dev/ci/cases/yamls/ufs_hybatmDA_defaults.ci.yaml index bdcc3d90bec..d6676f5683d 100644 --- a/dev/ci/cases/yamls/ufs_hybatmDA_defaults.ci.yaml +++ b/dev/ci/cases/yamls/ufs_hybatmDA_defaults.ci.yaml @@ -1,5 +1,5 @@ defaults: - !INC {{ HOMEgfs }}/dev/parm/config/gfs/yaml/defaults.yaml + !INC {{ HOMEglobal }}/dev/parm/config/gfs/yaml/defaults.yaml base: DOIAU: "NO" DO_JEDIATMVAR: "YES" @@ -10,17 +10,17 @@ base: atmanl: LAYOUT_X_ATMANL: 4 LAYOUT_Y_ATMANL: 4 - OBS_LIST_YAML: "${HOMEgfs}/sorc/gdas.cd/test/gw-ci/atm/atm_obs_list_ufs_hybatmDA.yaml.j2" - VAR_JEDI_TEST_YAML: "${HOMEgfs}/sorc/gdas.cd/test/gw-ci/atm/jedi-test_3dvar_ufs_hybatmDA.yaml.j2" - FV3INC_JEDI_TEST_YAML: "${HOMEgfs}/sorc/gdas.cd/test/gw-ci/atm/jedi-test_3dvar-fv3inc_ufs_hybatmDA.yaml.j2" + OBS_LIST_YAML: "${HOMEglobal}/sorc/gdas.cd/test/gw-ci/atm/atm_obs_list_ufs_hybatmDA.yaml.j2" + VAR_JEDI_TEST_YAML: "${HOMEglobal}/sorc/gdas.cd/test/gw-ci/atm/jedi-test_3dvar_ufs_hybatmDA.yaml.j2" + FV3INC_JEDI_TEST_YAML: "${HOMEglobal}/sorc/gdas.cd/test/gw-ci/atm/jedi-test_3dvar-fv3inc_ufs_hybatmDA.yaml.j2" atmensanl: LAYOUT_X_ATMENSANL: 4 LAYOUT_Y_ATMENSANL: 4 - OBS_LIST_YAML: "${HOMEgfs}/sorc/gdas.cd/test/gw-ci/atm/atm_obs_list_ufs_hybatmDA.yaml.j2" - LETKF_JEDI_TEST_YAML: "${HOMEgfs}/sorc/gdas.cd/test/gw-ci/atm/jedi-test_lgetkf_ufs_hybatmDA.yaml.j2" - OBS_JEDI_TEST_YAML: "${HOMEgfs}/sorc/gdas.cd/test/gw-ci/atm/jedi-test_lgetkf-observer_ufs_hybatmDA.yaml.j2" - SOL_JEDI_TEST_YAML: "${HOMEgfs}/sorc/gdas.cd/test/gw-ci/atm/jedi-test_lgetkf-solver_ufs_hybatmDA.yaml.j2" - FV3INC_JEDI_TEST_YAML: "${HOMEgfs}/sorc/gdas.cd/test/gw-ci/atm/jedi-test_lgetkf-fv3inc_ufs_hybatmDA.yaml.j2" + OBS_LIST_YAML: "${HOMEglobal}/sorc/gdas.cd/test/gw-ci/atm/atm_obs_list_ufs_hybatmDA.yaml.j2" + LETKF_JEDI_TEST_YAML: "${HOMEglobal}/sorc/gdas.cd/test/gw-ci/atm/jedi-test_lgetkf_ufs_hybatmDA.yaml.j2" + OBS_JEDI_TEST_YAML: "${HOMEglobal}/sorc/gdas.cd/test/gw-ci/atm/jedi-test_lgetkf-observer_ufs_hybatmDA.yaml.j2" + SOL_JEDI_TEST_YAML: "${HOMEglobal}/sorc/gdas.cd/test/gw-ci/atm/jedi-test_lgetkf-solver_ufs_hybatmDA.yaml.j2" + FV3INC_JEDI_TEST_YAML: "${HOMEglobal}/sorc/gdas.cd/test/gw-ci/atm/jedi-test_lgetkf-fv3inc_ufs_hybatmDA.yaml.j2" esfc: DONST: "NO" nsst: diff --git a/dev/ci/cases/yamls/ufsgsi_hybatmDA_defaults.ci.yaml b/dev/ci/cases/yamls/ufsgsi_hybatmDA_defaults.ci.yaml index ab813714613..aac20ff313a 100644 --- a/dev/ci/cases/yamls/ufsgsi_hybatmDA_defaults.ci.yaml +++ b/dev/ci/cases/yamls/ufsgsi_hybatmDA_defaults.ci.yaml @@ -1,5 +1,5 @@ defaults: - !INC {{ HOMEgfs }}/dev/parm/config/gfs/yaml/defaults.yaml + !INC {{ HOMEglobal }}/dev/parm/config/gfs/yaml/defaults.yaml base: DOIAU: "NO" DO_JEDIATMVAR: "YES" @@ -12,9 +12,9 @@ base: atmanl: LAYOUT_X_ATMANL: 4 LAYOUT_Y_ATMANL: 4 - OBS_LIST_YAML: "${HOMEgfs}/sorc/gdas.cd/test/gw-ci/atm/atm_obs_list_ufs_hybatmDA.yaml.j2" - VAR_JEDI_TEST_YAML: "${HOMEgfs}/sorc/gdas.cd/test/gw-ci/atm/jedi-test_3dvar_ufs_hybatmDA.yaml.j2" - FV3INC_JEDI_TEST_YAML: "${HOMEgfs}/sorc/gdas.cd/test/gw-ci/atm/jedi-test_3dvar-fv3inc_ufs_hybatmDA.yaml.j2" + OBS_LIST_YAML: "${HOMEglobal}/sorc/gdas.cd/test/gw-ci/atm/atm_obs_list_ufs_hybatmDA.yaml.j2" + VAR_JEDI_TEST_YAML: "${HOMEglobal}/sorc/gdas.cd/test/gw-ci/atm/jedi-test_3dvar_ufs_hybatmDA.yaml.j2" + FV3INC_JEDI_TEST_YAML: "${HOMEglobal}/sorc/gdas.cd/test/gw-ci/atm/jedi-test_3dvar-fv3inc_ufs_hybatmDA.yaml.j2" NUMBER_OUTER_LOOPS: 2 NINNER_LOOP1: 2 NINNER_LOOP2: 4 diff --git a/dev/ci/platforms/config.gaeac6 b/dev/ci/platforms/config.gaeac6 index a1ae82994f6..58bec97b68e 100644 --- a/dev/ci/platforms/config.gaeac6 +++ b/dev/ci/platforms/config.gaeac6 @@ -27,7 +27,7 @@ export JENKINS_WORK_DIR="${GFS_CI_ROOT}/Jenkins/workspace" # NOTE: JENKINS custom_workspace directory where CI jobs are run # /gpfs/f6/drsa-precip3/proj-shared/global/CI -# and is defined in $HOMEgfs/dev/ci/Jenkinsfile +# and is defined in $HOMEglobal/dev/ci/Jenkinsfile ######################################################################### diff --git a/dev/ci/platforms/config.hera b/dev/ci/platforms/config.hera index d2226c9baf2..b7b9de9d36a 100644 --- a/dev/ci/platforms/config.hera +++ b/dev/ci/platforms/config.hera @@ -29,7 +29,7 @@ export JENKINS_AGENT_LAUNCH_DIR=${GFS_CI_ROOT}/Jenkins/agent export JENKINS_WORK_DIR=${GFS_CI_ROOT}/Jenkins/workspace # NOTE: JENKINS custom_workspace directory where CI jobs are run -# and is defined in $HOMEgfs/dev/ci/Jenkinsfile as custom_workspace +# and is defined in $HOMEglobal/dev/ci/Jenkinsfile as custom_workspace # /scratch3/NCEPDEV/global/role.glopara/CI diff --git a/dev/ci/platforms/config.ursa b/dev/ci/platforms/config.ursa index 057866d589c..7585008c3c8 100644 --- a/dev/ci/platforms/config.ursa +++ b/dev/ci/platforms/config.ursa @@ -29,7 +29,7 @@ export JENKINS_AGENT_LAUNCH_DIR=${GFS_CI_ROOT}/Jenkins/agent export JENKINS_WORK_DIR=${GFS_CI_ROOT}/Jenkins/workspace # NOTE: JENKINS custom_workspace directory where CI jobs are run -# and is defined in $HOMEgfs/dev/ci/Jenkinsfile as custom_workspace +# and is defined in $HOMEglobal/dev/ci/Jenkinsfile as custom_workspace # /scratch3/NCEPDEV/global/role.glopara/CI diff --git a/dev/ci/scripts/unittests/test_data/test_config.yaml b/dev/ci/scripts/unittests/test_data/test_config.yaml index 797538bf44c..f19804e7229 100644 --- a/dev/ci/scripts/unittests/test_data/test_config.yaml +++ b/dev/ci/scripts/unittests/test_data/test_config.yaml @@ -18,4 +18,4 @@ complex: value: 1 - name: second value: 2 -template_value: "{{ HOMEgfs }}/some/path" +template_value: "{{ HOMEglobal }}/some/path" diff --git a/dev/ci/scripts/unittests/test_parse_yaml.py b/dev/ci/scripts/unittests/test_parse_yaml.py index ca704e568ca..fed9effe345 100644 --- a/dev/ci/scripts/unittests/test_parse_yaml.py +++ b/dev/ci/scripts/unittests/test_parse_yaml.py @@ -47,7 +47,7 @@ def setUpClass(cls): value: 1 - name: second value: 2 -template_value: "/path/to/homegfs/some/path"''') +template_value: "/path/to/homeglobal/some/path"''') def test_cli_basic(self): """Test the command-line interface with basic options""" diff --git a/dev/ci/scripts/utils/ci_utils.sh b/dev/ci/scripts/utils/ci_utils.sh index a2df2624505..d31c7fea868 100755 --- a/dev/ci/scripts/utils/ci_utils.sh +++ b/dev/ci/scripts/utils/ci_utils.sh @@ -1,10 +1,10 @@ #!/usr/bin/env bash -# Determine HOMEgfs_ and source machine detection early -if [[ -z "${HOMEgfs_}" ]]; then - HOMEgfs_="$(cd "$(dirname "${BASH_SOURCE[0]}")" && git rev-parse --show-toplevel)" +# Determine HOMEglobal_ and source machine detection early +if [[ -z "${HOMEglobal_}" ]]; then + HOMEglobal_="$(cd "$(dirname "${BASH_SOURCE[0]}")" && git rev-parse --show-toplevel)" fi -source "${HOMEgfs_}/ush/detect_machine.sh" +source "${HOMEglobal_}/ush/detect_machine.sh" # --- Existing functions --- @@ -68,7 +68,7 @@ function get_pr_case_list() { # loop over every yaml file in the PR's ci/cases # and create an run directory for each one for this PR loop ############################################################# - for yaml_config in "${HOMEgfs_}/dev/ci/cases/pr/"*.yaml; do + for yaml_config in "${HOMEglobal_}/dev/ci/cases/pr/"*.yaml; do case=$(basename "${yaml_config}" .yaml) || true echo "${case}" done @@ -123,30 +123,30 @@ function create_experiment() { local yaml_config="${1}" pr_sha=$(git rev-parse --short HEAD) local TAG="${2:-${pr_sha}}" - cd "${HOMEgfs_}" || exit 1 + cd "${HOMEglobal_}" || exit 1 case=$(basename "${yaml_config}" .yaml) || true echo "Using provided TAG: ${TAG} for pslot" export pslot=${case}_${TAG} if [[ ${MACHINE_ID} == "noaacloud" ]]; then - source "${HOMEgfs_}/dev/ci/platforms/config.${PW_CSP}" + source "${HOMEglobal_}/dev/ci/platforms/config.${PW_CSP}" else - source "${HOMEgfs_}/dev/ci/platforms/config.${MACHINE_ID}" + source "${HOMEglobal_}/dev/ci/platforms/config.${MACHINE_ID}" fi - source "${HOMEgfs_}/dev/ush/gw_setup.sh" + source "${HOMEglobal_}/dev/ush/gw_setup.sh" # Remove RUNDIRS dir incase this is a retry (STMP now in host file) if [[ ${MACHINE_ID} == "noaacloud" ]]; then - STMP=$("${HOMEgfs_}/dev/ci/scripts/utils/parse_yaml.py" -y "${HOMEgfs_}/dev/workflow/hosts/${PW_CSP}pw.yaml" -k STMP -s) + STMP=$("${HOMEglobal_}/dev/ci/scripts/utils/parse_yaml.py" -y "${HOMEglobal_}/dev/workflow/hosts/${PW_CSP}pw.yaml" -k STMP -s) else - STMP=$("${HOMEgfs_}/dev/ci/scripts/utils/parse_yaml.py" -y "${HOMEgfs_}/dev/workflow/hosts/${MACHINE_ID}.yaml" -k STMP -s) + STMP=$("${HOMEglobal_}/dev/ci/scripts/utils/parse_yaml.py" -y "${HOMEglobal_}/dev/workflow/hosts/${MACHINE_ID}.yaml" -k STMP -s) fi echo "Removing ${STMP}/RUNDIRS/${pslot} directory incase this is a retry" rm -Rf "${STMP}/RUNDIRS/${pslot}" - "${HOMEgfs_}/${system}/dev/workflow/create_experiment.py" --overwrite --yaml "${yaml_config}" + "${HOMEglobal_}/${system}/dev/workflow/create_experiment.py" --overwrite --yaml "${yaml_config}" } @@ -170,8 +170,8 @@ function publish_logs() { if [[ -n "${full_paths}" ]]; then # shellcheck disable=SC2027,SC2086 - ${HOMEgfs_}/dev/ci/scripts/utils/publish_logs.py --file ${full_paths} --repo ${PR_header} > /dev/null - URL="$("${HOMEgfs_}/dev/ci/scripts/utils/publish_logs.py" --file "${full_paths}" --gist "${PR_header}")" + ${HOMEglobal_}/dev/ci/scripts/utils/publish_logs.py --file ${full_paths} --repo ${PR_header} > /dev/null + URL="$("${HOMEglobal_}/dev/ci/scripts/utils/publish_logs.py" --file "${full_paths}" --gist "${PR_header}")" fi echo "${URL}" } @@ -203,13 +203,13 @@ function publish_logs_from_file() { local URL="" if ((${#files[@]} > 0)); then # First, upload to repo (retain original behavior) if desired - "${HOMEgfs_}/dev/ci/scripts/utils/publish_logs.py" --file "${files[@]}" --repo "${PR_header}" > /dev/null || true + "${HOMEglobal_}/dev/ci/scripts/utils/publish_logs.py" --file "${files[@]}" --repo "${PR_header}" > /dev/null || true # For gist, if more than one file use --multiple --format github if ((${#files[@]} > 1)); then cmd_args="--multiple --format github" fi - URL="$("${HOMEgfs_}/dev/ci/scripts/utils/publish_logs.py" --file "${files[0]}" "${cmd_args:-}" --gist "${PR_header}")" + URL="$("${HOMEglobal_}/dev/ci/scripts/utils/publish_logs.py" --file "${files[0]}" "${cmd_args:-}" --gist "${PR_header}")" fi echo "${URL}" @@ -227,7 +227,7 @@ function cleanup_experiment() { pslot=$(basename "${EXPDIR}") # Use the Python utility to get the required variables - read -r ARCDIR ATARDIR STMP COMROOT < <("${HOMEgfs_}/dev/ci/scripts/utils/get_config_var.py" ARCDIR ATARDIR STMP COMROOT "${EXPDIR}") || true + read -r ARCDIR ATARDIR STMP COMROOT < <("${HOMEglobal_}/dev/ci/scripts/utils/get_config_var.py" ARCDIR ATARDIR STMP COMROOT "${EXPDIR}") || true rm -Rf "${ARCDIR:?}" rm -Rf "${ATARDIR:?}" @@ -238,13 +238,13 @@ function cleanup_experiment() { function build() { - source "${HOMEgfs_}/dev/ci/platforms/config.${MACHINE_ID}" - logs_dir="${HOMEgfs_}/sorc/logs" + source "${HOMEglobal_}/dev/ci/platforms/config.${MACHINE_ID}" + logs_dir="${HOMEglobal_}/sorc/logs" if [[ ! -d "${logs_dir}" ]]; then echo "Creating logs folder" mkdir -p "${logs_dir}" || exit 1 fi - "${HOMEgfs_}/sorc/build_all.sh" -c -A "${HPC_ACCOUNT}" all + "${HOMEglobal_}/sorc/build_all.sh" -c -A "${HPC_ACCOUNT}" all } diff --git a/dev/ci/scripts/utils/gitlab/launch_gitlab_runner.sh b/dev/ci/scripts/utils/gitlab/launch_gitlab_runner.sh index 93eb80e1fb6..1eb6dd50b2e 100755 --- a/dev/ci/scripts/utils/gitlab/launch_gitlab_runner.sh +++ b/dev/ci/scripts/utils/gitlab/launch_gitlab_runner.sh @@ -13,8 +13,8 @@ set -e # Usage: ./launch_gitlab_runner.sh [register|run|unregister] [token] ######################################################################### -# Set the HOMEgfs_ variable to the root directory of the global workflow -HOMEgfs_="$(cd "$(dirname "${BASH_SOURCE[0]}")" && git rev-parse --show-toplevel)" +# Set the HOMEglobal_ variable to the root directory of the global workflow +HOMEglobal_="$(cd "$(dirname "${BASH_SOURCE[0]}")" && git rev-parse --show-toplevel)" # Get the hostname of the current machine host="$(hostname)" @@ -23,7 +23,7 @@ host="$(hostname)" ######################################################################### # Source the detect_machine.sh script to determine the MACHINE_ID -source "${HOMEgfs_}/ush/detect_machine.sh" +source "${HOMEglobal_}/ush/detect_machine.sh" # Check the MACHINE_ID and set up the environment accordingly case "${MACHINE_ID}" in hera | orion | hercules | wcoss2 | gaeac5 | gaeac6) @@ -41,7 +41,7 @@ esac # Source the platform-specific configuration file # This file contains platform-specific variables such as GITLAB_URL, GITLAB_CI_BUILDS_DIR, # and GITLAB_RUNNER_DIR which are required for runner registration and execution -source "${HOMEgfs_}/dev/ci/platforms/config.${MACHINE_ID}" +source "${HOMEglobal_}/dev/ci/platforms/config.${MACHINE_ID}" # Change to the GitLab runner directory defined in the platform config cd "${GITLAB_RUNNER_DIR}" || exit 1 diff --git a/dev/ci/scripts/utils/launch_java_agent.sh b/dev/ci/scripts/utils/launch_java_agent.sh index 1be50215a73..cf0f7297107 100755 --- a/dev/ci/scripts/utils/launch_java_agent.sh +++ b/dev/ci/scripts/utils/launch_java_agent.sh @@ -69,15 +69,15 @@ controller_url="https://jenkins.epic.oarcloud.noaa.gov" controller_user=${controller_user:-"terry.mcguinness"} controller_user_auth_token="jenkins_token" -# Set the HOMEgfs_ variable to the root directory of the global workflow -HOMEgfs_="$(cd "$(dirname "${BASH_SOURCE[0]}")" && git rev-parse --show-toplevel)" +# Set the HOMEglobal_ variable to the root directory of the global workflow +HOMEglobal_="$(cd "$(dirname "${BASH_SOURCE[0]}")" && git rev-parse --show-toplevel)" host=$(hostname) ######################################################################### # Set up runtime environment varibles for accounts on supproted machines ######################################################################### -source "${HOMEgfs_}/ush/detect_machine.sh" +source "${HOMEglobal_}/ush/detect_machine.sh" case ${MACHINE_ID} in hera | orion | hercules | wcoss2 | gaeac5 | gaeac6) echo "Launch Jenkins Java Controler on ${MACHINE_ID}" @@ -94,14 +94,14 @@ esac LOG=launched_agent-$(date +%Y%m%d%M).log rm -f "${LOG}" -HOMEglobal="${HOMEgfs_}" source "${HOMEgfs_}/ush/module-setup.sh" -module use "${HOMEgfs_}/modulefiles" +HOMEglobal="${HOMEglobal_}" source "${HOMEglobal_}/ush/module-setup.sh" +module use "${HOMEglobal_}/modulefiles" module load "gw_setup.${MACHINE_ID}" if [[ ${MACHINE_ID} == "noaacloud" ]]; then - source "${HOMEgfs_}/dev/ci/platforms/config.${PW_CSP}" + source "${HOMEglobal_}/dev/ci/platforms/config.${PW_CSP}" else - source "${HOMEgfs_}/dev/ci/platforms/config.${MACHINE_ID}" + source "${HOMEglobal_}/dev/ci/platforms/config.${MACHINE_ID}" fi JAVA_HOME="${JENKINS_AGENT_LAUNCH_DIR}/JAVA/jdk-17.0.10" diff --git a/dev/ctests/CMakeLists.txt b/dev/ctests/CMakeLists.txt index 8b43c00aa0e..62d25f8ace3 100644 --- a/dev/ctests/CMakeLists.txt +++ b/dev/ctests/CMakeLists.txt @@ -17,9 +17,9 @@ function(set_from_env_or_default VAR_NAME ENV_VAR DEFAULT_VALUE) endif() endfunction() -# Set HOMEgfs -if (NOT DEFINED HOMEgfs) - set(HOMEgfs ${PROJECT_SOURCE_DIR}) +# Set HOMEglobal +if (NOT DEFINED HOMEglobal) + set(HOMEglobal ${PROJECT_SOURCE_DIR}) endif() # Set RUNTESTS @@ -46,7 +46,7 @@ if (NOT DEFINED ICSDIR_ROOT) return() endif() -message(STATUS "gw: global-workflow baselines will be used from: '${HOMEgfs}'") +message(STATUS "gw: global-workflow baselines will be used from: '${HOMEglobal}'") message(STATUS "gw: global-workflow tests will be run at: '${RUNTESTS}'") message(STATUS "gw: global-workflow tests will use the allocation: '${HPC_ACCOUNT}'") message(STATUS "gw: global-workflow tests will use ICSDIR_ROOT: '${ICSDIR_ROOT}'") @@ -73,7 +73,7 @@ function(AddJJOBTest) ${ARGN}) set(TEST_NAME ${ARG_CASE}-${ARG_JOB}) - set(CASE_PATH ${HOMEgfs}/dev/ci/cases/pr) + set(CASE_PATH ${HOMEglobal}/dev/ci/cases/pr) set(CASE_YAML ${CASE_PATH}/${ARG_CASE}.yaml) # Labels use binomial nomenclature: CASE-JOB (e.g., C48_S2SW-gfs_fcst_seg0) diff --git a/dev/ctests/README.md b/dev/ctests/README.md index 26f3311d1ca..e4654ac25fe 100644 --- a/dev/ctests/README.md +++ b/dev/ctests/README.md @@ -16,12 +16,12 @@ STAGED_CTESTS # Path to nightly baseline COMROOT ICSDIR_ROOT # Path to initial condition files ``` -These are typically defined in `$HOMEgfs/dev/ci/platforms/config.$MACHINE_ID`. +These are typically defined in `$HOMEglobal/dev/ci/platforms/config.$MACHINE_ID`. ### Configure and Build ```bash -cd $HOMEgfs/dev/ctests +cd $HOMEglobal/dev/ctests mkdir -p build cd build @@ -220,13 +220,13 @@ cd build/scripts ## Key Directories ``` -$HOMEgfs/dev/ctests/ # Framework root +$HOMEglobal/dev/ctests/ # Framework root ├── build/ # CMake build directory (create this) ├── cases/ # YAML test definitions ├── scripts/ # Test phase scripts └── CMakeLists.txt # Test configuration -$HOMEgfs/dev/ci/platforms/ # Platform-specific configuration +$HOMEglobal/dev/ci/platforms/ # Platform-specific configuration └── config.$MACHINE_ID # Machine settings (STAGED_CTESTS, HPC_ACCOUNT, etc.) ${STAGED_CTESTS}/COMROOT/ # Nightly baseline outputs (input source) @@ -235,7 +235,7 @@ ${RUNTESTS}/COMROOT/ # Test execution environments (created by test ## Platform Configuration -Platform-specific settings are in `$HOMEgfs/dev/ci/platforms/config.$MACHINE_ID`: +Platform-specific settings are in `$HOMEglobal/dev/ci/platforms/config.$MACHINE_ID`: ```bash # Example from config.hera @@ -249,8 +249,8 @@ export HPC_ACCOUNT=nems Source the appropriate configuration before running CMake: ```bash -source $HOMEgfs/ush/detect_machine.sh -source $HOMEgfs/dev/ci/platforms/config.$MACHINE_ID +source $HOMEglobal/ush/detect_machine.sh +source $HOMEglobal/dev/ci/platforms/config.$MACHINE_ID ``` ## Additional Resources diff --git a/dev/ctests/cases/C48_S2SW-gfs_waveinit.yaml b/dev/ctests/cases/C48_S2SW-gfs_waveinit.yaml index 8dab05f27b8..a1a92583369 100644 --- a/dev/ctests/cases/C48_S2SW-gfs_waveinit.yaml +++ b/dev/ctests/cases/C48_S2SW-gfs_waveinit.yaml @@ -33,7 +33,7 @@ input_files: copy: # No input files required from COMROOT for waveinit - # The job reads from FIXgfs wave grids and creates mod_def files + # The job reads from FIXglobal wave grids and creates mod_def files # This is a generation job, not a processing job output_files: diff --git a/dev/ctests/scripts/setup.sh.in b/dev/ctests/scripts/setup.sh.in index 637e0f2d3bc..ed6e7ab014c 100755 --- a/dev/ctests/scripts/setup.sh.in +++ b/dev/ctests/scripts/setup.sh.in @@ -6,16 +6,16 @@ TEST_NAME=${1:?"Name of the test is required"} YAML_FILE=${2:?"Name of the CI yaml file for the test"} # CMake to fill these variables -HOMEgfs="@PROJECT_SOURCE_DIR@" +HOMEglobal="@PROJECT_SOURCE_DIR@" RUNTESTS="@RUNTESTS@" ICSDIR_ROOT="@ICSDIR_ROOT@" HPC_ACCOUNT="@HPC_ACCOUNT@" set +x -source "${HOMEgfs}/dev/ush/gw_setup.sh" +source "${HOMEglobal}/dev/ush/gw_setup.sh" set -x -cd "${HOMEgfs}/dev/workflow" || exit 1 +cd "${HOMEglobal}/dev/workflow" || exit 1 # We need to tag the pslot with the current git commit hash # for a unique path in the RUNDIR directory for the use case diff --git a/dev/ctests/scripts/stage.sh.in b/dev/ctests/scripts/stage.sh.in index 9394ab96df8..9a24697de95 100755 --- a/dev/ctests/scripts/stage.sh.in +++ b/dev/ctests/scripts/stage.sh.in @@ -7,31 +7,31 @@ TEST_NAME=${2:?"Name of the test is required"} TEST_DATE=${3:?"Date of the test is required"} # CMake to fill these variables RUNTESTS="@RUNTESTS@" -HOMEgfs="@PROJECT_SOURCE_DIR@" +HOMEglobal="@PROJECT_SOURCE_DIR@" STAGED_CTESTS="@STAGED_CTESTS@" # Load the runtime environment for this script (needs wxflow and its dependencies) set +x -source "${HOMEgfs}/dev/ush/gw_setup.sh" +source "${HOMEglobal}/dev/ush/gw_setup.sh" rc=$? if [[ "${rc}" -ne 0 ]]; then exit "${rc}" fi set -x -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/sorc/wxflow/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEglobal}/sorc/wxflow/src" export PYTHONPATH -YAML_FILE="${HOMEgfs}/dev/ctests/cases/${TEST_NAME}.yaml" +YAML_FILE="${HOMEglobal}/dev/ctests/cases/${TEST_NAME}.yaml" # Find pslot with pattern CASE_NAME_ where suffix is typically a hash -PSLOT=$("${HOMEgfs}/dev/ci/scripts/utils/ci_utils.sh" get_pslot "${STAGED_CTESTS}" "${CASE_NAME}") -TEST_NAME=$("${HOMEgfs}/dev/ci/scripts/utils/ci_utils.sh" get_pslot "${RUNTESTS}" "${TEST_NAME}") +PSLOT=$("${HOMEglobal}/dev/ci/scripts/utils/ci_utils.sh" get_pslot "${STAGED_CTESTS}" "${CASE_NAME}") +TEST_NAME=$("${HOMEglobal}/dev/ci/scripts/utils/ci_utils.sh" get_pslot "${RUNTESTS}" "${TEST_NAME}") STAGED_CTESTS="${STAGED_CTESTS}" \ PSLOT="${PSLOT}" \ RUNTESTS="${RUNTESTS}" \ TEST_NAME="${TEST_NAME}" \ -"${HOMEgfs}/dev/ctests/scripts/stage.py" --yaml "${YAML_FILE}" --test_date "${TEST_DATE}" +"${HOMEglobal}/dev/ctests/scripts/stage.py" --yaml "${YAML_FILE}" --test_date "${TEST_DATE}" rc=$? if [[ "${rc}" -ne 0 ]]; then set +x diff --git a/dev/ctests/scripts/validate.sh.in b/dev/ctests/scripts/validate.sh.in index 13ac6366adf..871c982e8d1 100755 --- a/dev/ctests/scripts/validate.sh.in +++ b/dev/ctests/scripts/validate.sh.in @@ -7,25 +7,25 @@ TEST_DATE=${3:?"idate of the test is required"} # CMake to fill these variables RUNTESTS="@RUNTESTS@" -HOMEgfs="@PROJECT_SOURCE_DIR@" +HOMEglobal="@PROJECT_SOURCE_DIR@" STAGED_CTESTS="@STAGED_CTESTS@" # Load the runtime environment for this script (needs wxflow and its dependencies) set +x -source "${HOMEgfs}/dev/ush/gw_setup.sh" +source "${HOMEglobal}/dev/ush/gw_setup.sh" rc=$? if [[ "${rc}" -ne 0 ]]; then exit "${rc}" fi set -x -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/sorc/wxflow/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEglobal}/sorc/wxflow/src" export PYTHONPATH -YAML_FILE="${HOMEgfs}/dev/ctests/cases/${TEST_NAME}.yaml" +YAML_FILE="${HOMEglobal}/dev/ctests/cases/${TEST_NAME}.yaml" # Find pslot with pattern CASE_NAME_ where suffix was added to induce unique paths in RUNDIR -PSLOT=$("${HOMEgfs}/dev/ci/scripts/utils/ci_utils.sh" get_pslot "${STAGED_CTESTS}" "${CASE_NAME}") -TEST_NAME=$("${HOMEgfs}/dev/ci/scripts/utils/ci_utils.sh" get_pslot "${RUNTESTS}" "${TEST_NAME}") +PSLOT=$("${HOMEglobal}/dev/ci/scripts/utils/ci_utils.sh" get_pslot "${STAGED_CTESTS}" "${CASE_NAME}") +TEST_NAME=$("${HOMEglobal}/dev/ci/scripts/utils/ci_utils.sh" get_pslot "${RUNTESTS}" "${TEST_NAME}") echo "validating '${TEST_NAME}' with yaml file '${YAML_FILE}'" @@ -33,7 +33,7 @@ STAGED_CTESTS="${STAGED_CTESTS}" \ PSLOT="${PSLOT}" \ TEST_NAME="${TEST_NAME}" \ RUNTESTS="${RUNTESTS}" \ -"${HOMEgfs}/dev/ctests/scripts/validate.py" --yaml "${YAML_FILE}" --test_date "${TEST_DATE}" +"${HOMEglobal}/dev/ctests/scripts/validate.py" --yaml "${YAML_FILE}" --test_date "${TEST_DATE}" rc=$? if [[ "${rc}" -ne 0 ]]; then set +x diff --git a/dev/parm/config/gcafs/config.aeroanl.j2 b/dev/parm/config/gcafs/config.aeroanl.j2 index 09ca53ee0d8..f26ec3b4dfb 100644 --- a/dev/parm/config/gcafs/config.aeroanl.j2 +++ b/dev/parm/config/gcafs/config.aeroanl.j2 @@ -21,8 +21,8 @@ export CASE_ANL export STATICB_TYPE='diffusion' -export TASK_CONFIG_YAML="${PARMgfs}/gdas/aero/aero_det_config.yaml.j2" -export OBS_LIST_YAML="${PARMgfs}/gdas/aero/aero_obs_list.yaml.j2" +export TASK_CONFIG_YAML="${PARMglobal}/gdas/aero/aero_det_config.yaml.j2" +export OBS_LIST_YAML="${PARMglobal}/gdas/aero/aero_obs_list.yaml.j2" export io_layout_x="{{ IO_LAYOUT_X }}" export io_layout_y="{{ IO_LAYOUT_Y }}" diff --git a/dev/parm/config/gcafs/config.aeroanlgenb b/dev/parm/config/gcafs/config.aeroanlgenb index 8b656a7c4d2..0b01d7525d5 100644 --- a/dev/parm/config/gcafs/config.aeroanlgenb +++ b/dev/parm/config/gcafs/config.aeroanlgenb @@ -8,8 +8,8 @@ echo "BEGIN: config.aeroanlgenb" # Get task specific resources source "${EXPDIR}/config.resources" aeroanlgenb -export TASK_CONFIG_YAML="${PARMgfs}/gdas/aero/aero_bmat_config.yaml.j2" -export OBS_LIST_YAML="${PARMgfs}/gdas/aero/aero_obs_list.yaml.j2" +export TASK_CONFIG_YAML="${PARMglobal}/gdas/aero/aero_bmat_config.yaml.j2" +export OBS_LIST_YAML="${PARMglobal}/gdas/aero/aero_obs_list.yaml.j2" export aero_diffusion_iter=200 export aero_diffusion_horiz_len=300e3 diff --git a/dev/parm/config/gcafs/config.anlstat b/dev/parm/config/gcafs/config.anlstat index c8afee9d95c..9ff4216bc93 100644 --- a/dev/parm/config/gcafs/config.anlstat +++ b/dev/parm/config/gcafs/config.anlstat @@ -8,6 +8,6 @@ echo "BEGIN: config.anlstat" # Get task specific resources source "${EXPDIR}/config.resources" anlstat -export TASK_CONFIG_YAML="${PARMgfs}/gdas/anlstat/anlstat_config.yaml.j2" +export TASK_CONFIG_YAML="${PARMglobal}/gdas/anlstat/anlstat_config.yaml.j2" echo "END: config.anlstat" diff --git a/dev/parm/config/gcafs/config.atmos_products b/dev/parm/config/gcafs/config.atmos_products index 0e535a08731..3f9ecab56cb 100644 --- a/dev/parm/config/gcafs/config.atmos_products +++ b/dev/parm/config/gcafs/config.atmos_products @@ -26,11 +26,11 @@ elif [[ "${RUN:-}" == "gcafs" ]]; then fi # paramlist files for the different forecast hours and downsets -export paramlista="${PARMgfs}/product/gcafs.fFFF.paramlist.a.txt" -export paramlista_anl="${PARMgfs}/product/gcafs.anl.paramlist.a.txt" -export paramlista_f000="${PARMgfs}/product/gcafs.f000.paramlist.a.txt" -export paramlistb="${PARMgfs}/product/gcafs.fFFF.paramlist.b.txt" -export paramlistb_anl="${PARMgfs}/product/gcafs.anl.paramlist.b.txt" -export paramlistb_f000="${PARMgfs}/product/gcafs.f000.paramlist.b.txt" +export paramlista="${PARMglobal}/product/gcafs.fFFF.paramlist.a.txt" +export paramlista_anl="${PARMglobal}/product/gcafs.anl.paramlist.a.txt" +export paramlista_f000="${PARMglobal}/product/gcafs.f000.paramlist.a.txt" +export paramlistb="${PARMglobal}/product/gcafs.fFFF.paramlist.b.txt" +export paramlistb_anl="${PARMglobal}/product/gcafs.anl.paramlist.b.txt" +export paramlistb_f000="${PARMglobal}/product/gcafs.f000.paramlist.b.txt" echo "END: config.atmos_products" diff --git a/dev/parm/config/gcafs/config.base.j2 b/dev/parm/config/gcafs/config.base.j2 index 4d0e82a4775..6f5c6882016 100644 --- a/dev/parm/config/gcafs/config.base.j2 +++ b/dev/parm/config/gcafs/config.base.j2 @@ -28,21 +28,21 @@ export CLUSTERS_DTN="{{ CLUSTERS_DTN | default('${CLUSTERS_SERVICE}') }}" export HPSS_PROJECT="{{ HPSS_PROJECT }}" # Directories relative to installation areas: -export HOMEgfs="{{ HOMEgfs }}" -export EXECgfs="${HOMEgfs}/exec" -export FIXgfs="${HOMEgfs}/fix" -export PARMgfs="${HOMEgfs}/parm" -export SCRgfs="${HOMEgfs}/scripts" -export USHgfs="${HOMEgfs}/ush" - -export FIXam="${FIXgfs}/am" -export FIXaer="${FIXgfs}/aer" -export FIXcpl="${FIXgfs}/cpl" -export FIXlut="${FIXgfs}/lut" -export FIXcice="${FIXgfs}/cice" -export FIXmom="${FIXgfs}/mom6" -export FIXreg2grb2="${FIXgfs}/reg2grb2" -export FIXgdas="${FIXgfs}/gdas" +export HOMEglobal="{{ HOMEglobal }}" +export EXECglobal="${HOMEglobal}/exec" +export FIXglobal="${HOMEglobal}/fix" +export PARMglobal="${HOMEglobal}/parm" +export SCRglobal="${HOMEglobal}/scripts" +export USHglobal="${HOMEglobal}/ush" + +export FIXam="${FIXglobal}/am" +export FIXaer="${FIXglobal}/aer" +export FIXcpl="${FIXglobal}/cpl" +export FIXlut="${FIXglobal}/lut" +export FIXcice="${FIXglobal}/cice" +export FIXmom="${FIXglobal}/mom6" +export FIXreg2grb2="${FIXglobal}/reg2grb2" +export FIXgdas="${FIXglobal}/gdas" ######################################################################## @@ -103,9 +103,9 @@ export DO_TEST_MODE="{{ DO_TEST_MODE }}" # option to change configuration for au # FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT # CLEAR #################################################### -# Build paths relative to $HOMEgfs -export FIXgsi="${HOMEgfs}/fix/gsi" -export HOMEpost="${HOMEgfs}" +# Build paths relative to $HOMEglobal +export FIXgsi="${HOMEglobal}/fix/gsi" +export HOMEpost="${HOMEglobal}" export HOMEobsproc="${BASE_GIT:-}/obsproc/v${obsproc_run_ver:-}" # CONVENIENT utility scripts and other environment parameters @@ -117,10 +117,10 @@ export DEBUG_POSTSCRIPT="NO" # PBS only; sets debug=true export CHGRP_RSTPROD="{{ CHGRP_RSTPROD }}" export CHGRP_CMD="{{ CHGRP_CMD }}" export NCDUMP="${NETCDF:-${netcdf_c_ROOT:-}}/bin/ncdump" -export NCLEN="${HOMEgfs}/ush/getncdimlen" +export NCLEN="${HOMEglobal}/ush/getncdimlen" # Machine environment, jobs, and other utility scripts -export BASE_ENV="${HOMEgfs}/env" +export BASE_ENV="${HOMEglobal}/env" # EXPERIMENT specific environment parameters export SDATE="{{ SDATE }}" @@ -189,12 +189,12 @@ export DO_NEST="NO" # Whether to run a global-nested domain if [[ "${DO_NEST:-NO}" == "YES" ]] ; then export ntiles=7 export NEST_OUTPUT_GRID="regional_latlon" - export FIXugwd="${FIXgfs}/ugwd_nest" - export FIXorog="${FIXgfs}/orog_nest" + export FIXugwd="${FIXglobal}/ugwd_nest" + export FIXorog="${FIXglobal}/orog_nest" else export ntiles=6 - export FIXugwd="${FIXgfs}/ugwd" - export FIXorog="${FIXgfs}/orog" + export FIXugwd="${FIXglobal}/ugwd" + export FIXorog="${FIXglobal}/orog" fi # Set operational resolution @@ -380,7 +380,7 @@ export l4densvar=".true." export lwrite4danl=".true." export DO_CALC_INCREMENT="NO" export USE_BUILD_GSINFO="NO" -export BUILD_GSINFO_DIR="${PARMgfs}/gsinfo" +export BUILD_GSINFO_DIR="${PARMglobal}/gsinfo" # Early-cycle EnKF parameters export NMEM_ENS_GFS="{{ NMEM_ENS_GFS }}" diff --git a/dev/parm/config/gcafs/config.fcst.j2 b/dev/parm/config/gcafs/config.fcst.j2 index e95c21a886c..92a4fc1d712 100644 --- a/dev/parm/config/gcafs/config.fcst.j2 +++ b/dev/parm/config/gcafs/config.fcst.j2 @@ -76,8 +76,8 @@ export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_O ####################################################################### -export FORECASTSH="${SCRgfs}/exglobal_forecast.sh" -#export FORECASTSH="${SCRgfs}/exglobal_forecast.py" # Temp. while this is worked on +export FORECASTSH="${SCRglobal}/exglobal_forecast.sh" +#export FORECASTSH="${SCRglobal}/exglobal_forecast.py" # Temp. while this is worked on export FCSTEXEC="gcafs_model.x" ####################################################################### @@ -196,17 +196,17 @@ export random_clds=".true." case ${imp_physics} in 99) # ZhaoCarr export ncld=1 - export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_zhaocarr${tbf}${tbp}" + export FIELD_TABLE="${PARMglobal}/ufs/fv3/field_table_zhaocarr${tbf}${tbp}" export nwat=2 ;; 6) # WSM6 export ncld=2 - export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_wsm6${tbf}${tbp}" + export FIELD_TABLE="${PARMglobal}/ufs/fv3/field_table_wsm6${tbf}${tbp}" export nwat=6 ;; 8) # Thompson export ncld=2 - export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_thompson_noaero_tke${tbp}" + export FIELD_TABLE="${PARMglobal}/ufs/fv3/field_table_thompson_noaero_tke${tbp}" export nwat=6 export cal_pre=".false." @@ -230,7 +230,7 @@ case ${imp_physics} in export d4_bg=0.12 if [[ "${CCPP_SUITE}" == "FV3_global_nest"* ]]; then - export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_thompson_aero_tke${tbp}" + export FIELD_TABLE="${PARMglobal}/ufs/fv3/field_table_thompson_aero_tke${tbp}" export ltaerosol=".true." export lcnorm=".true." export do_mynnedmf=".true." @@ -245,7 +245,7 @@ case ${imp_physics} in ;; 11) # GFDL export ncld=5 - export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_gfdl${tbf}${tbp}" + export FIELD_TABLE="${PARMglobal}/ufs/fv3/field_table_gfdl${tbf}${tbp}" export nwat=6 export dnats=1 export cal_pre=".false." @@ -292,7 +292,7 @@ export FSICS="0" if [[ "${RUN}" =~ "gdas" || "${RUN}" =~ "gcdas" ]] ; then # GDAS cycle specific parameters # Variables used in DA cycling - export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table_da" + export DIAG_TABLE="${PARMglobal}/ufs/fv3/diag_table_da" # Write gfs restart files to rerun fcst from any break point export restart_interval=${restart_interval_gdas:-6} @@ -303,7 +303,7 @@ if [[ "${RUN}" =~ "gdas" || "${RUN}" =~ "gcdas" ]] ; then # GDAS cycle specific elif [[ "${RUN}" = "gfs" || "${RUN}" = "gcafs" ]] ; then # GFS or GCAFS cycle specific parameters # Write more variables to output - export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table" + export DIAG_TABLE="${PARMglobal}/ufs/fv3/diag_table" # Write gfs restart files to rerun fcst from any break point export restart_interval=${restart_interval_gfs:-12} diff --git a/dev/parm/config/gcafs/config.fetch b/dev/parm/config/gcafs/config.fetch index fbe9162afda..6bfc874e7a0 100644 --- a/dev/parm/config/gcafs/config.fetch +++ b/dev/parm/config/gcafs/config.fetch @@ -18,9 +18,9 @@ if [[ "${PDY}${cyc}" -gt "${SDATE}" ]]; then fi export gdas_version if [[ "${machine}" == "ORION" || "${machine}" == "HERCULES" ]]; then - FETCH_YAML_TMPL_LIST="${PARMgfs}/fetch/${NET}_${APP}_gdas-anl_msu.yaml.j2," + FETCH_YAML_TMPL_LIST="${PARMglobal}/fetch/${NET}_${APP}_gdas-anl_msu.yaml.j2," else - FETCH_YAML_TMPL_LIST="${PARMgfs}/fetch/${NET}_${APP}_gdas-anl.yaml.j2," + FETCH_YAML_TMPL_LIST="${PARMglobal}/fetch/${NET}_${APP}_gdas-anl.yaml.j2," fi export FETCH_YAML_TMPL_LIST else @@ -31,7 +31,7 @@ else else ic_type="warm" fi - export FETCH_YAML_TMPL="${PARMgfs}/fetch/${NET}_${APP}_${ic_type}_${MODE}.yaml.j2" + export FETCH_YAML_TMPL="${PARMglobal}/fetch/${NET}_${APP}_${ic_type}_${MODE}.yaml.j2" fi echo "END: config.fetch" diff --git a/dev/parm/config/gcafs/config.metp b/dev/parm/config/gcafs/config.metp index 564966fd6d2..b15622eaf77 100644 --- a/dev/parm/config/gcafs/config.metp +++ b/dev/parm/config/gcafs/config.metp @@ -19,7 +19,7 @@ export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus # METplus: Verify grid-to-grid, grid-to-obs, precipitation options #---------------------------------------------------------- ## EMC_VERIF_GLOBAL SETTINGS -export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export HOMEverif_global=${HOMEglobal}/sorc/verif-global.fd export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh ## INPUT DATA SETTINGS export model=${PSLOT} diff --git a/dev/parm/config/gcafs/config.upp b/dev/parm/config/gcafs/config.upp index ec7180612b9..6b8d8252078 100644 --- a/dev/parm/config/gcafs/config.upp +++ b/dev/parm/config/gcafs/config.upp @@ -8,7 +8,7 @@ echo "BEGIN: config.upp" # Get task specific resources . "${EXPDIR}/config.resources" upp -export UPP_CONFIG="${PARMgfs}/post/upp_gcafs.yaml" +export UPP_CONFIG="${PARMglobal}/post/upp_gcafs.yaml" # No. of forecast hours to process in a single job export NFHRS_PER_GROUP=3 diff --git a/dev/parm/config/gcafs/yaml/defaults.yaml b/dev/parm/config/gcafs/yaml/defaults.yaml index 87eb2491a4b..004374f7175 100644 --- a/dev/parm/config/gcafs/yaml/defaults.yaml +++ b/dev/parm/config/gcafs/yaml/defaults.yaml @@ -42,7 +42,7 @@ atmanl: LAYOUT_Y_ATMANL: 8 IO_LAYOUT_X: 1 IO_LAYOUT_Y: 1 - OBS_LIST_YAML: "${PARMgfs}/gdas/atm/atm_obs_list.yaml.j2" + OBS_LIST_YAML: "${PARMglobal}/gdas/atm/atm_obs_list.yaml.j2" VAR_JEDI_TEST_YAML: "" FV3INC_JEDI_TEST_YAML: "" @@ -51,7 +51,7 @@ atmensanl: LAYOUT_Y_ATMENSANL: 8 IO_LAYOUT_X: 1 IO_LAYOUT_Y: 1 - OBS_LIST_YAML: "${PARMgfs}/gdas/atm/atm_obs_list.yaml.j2" + OBS_LIST_YAML: "${PARMglobal}/gdas/atm/atm_obs_list.yaml.j2" LETKF_JEDI_TEST_YAML: "" OBS_JEDI_TEST_YAML: "" SOL_JEDI_TEST_YAML: "" @@ -66,23 +66,23 @@ snowanl: IO_LAYOUT_Y: 1 marinebmat: - SOCA_INPUT_FIX_DIR: "${FIXgfs}/gdas/soca/72x35x25/soca" - SOCA_ANL_GEOM: "${FIXgfs}/gdas/soca/72x35x25/soca" + SOCA_INPUT_FIX_DIR: "${FIXglobal}/gdas/soca/72x35x25/soca" + SOCA_ANL_GEOM: "${FIXglobal}/gdas/soca/72x35x25/soca" marineanl: - SOCA_INPUT_FIX_DIR: "${FIXgfs}/gdas/soca/72x35x25/soca" - SOCA_ANL_GEOM: "${FIXgfs}/gdas/soca/72x35x25/soca" - SOCA_OBS_LIST: "${PARMgfs}/gdas/marine/obs/obs_list.yaml" # TODO: This is also repeated in oceanprepobs + SOCA_INPUT_FIX_DIR: "${FIXglobal}/gdas/soca/72x35x25/soca" + SOCA_ANL_GEOM: "${FIXglobal}/gdas/soca/72x35x25/soca" + SOCA_OBS_LIST: "${PARMglobal}/gdas/marine/obs/obs_list.yaml" # TODO: This is also repeated in oceanprepobs SOCA_NINNER: 100 marineanlecen: - SOCA_INPUT_FIX_DIR: "${FIXgfs}/gdas/soca/72x35x25/soca" - SOCA_ANL_GEOM: "${FIXgfs}/gdas/soca/72x35x25/soca" + SOCA_INPUT_FIX_DIR: "${FIXglobal}/gdas/soca/72x35x25/soca" + SOCA_ANL_GEOM: "${FIXglobal}/gdas/soca/72x35x25/soca" marineanlletkf: - SOCA_INPUT_FIX_DIR: "${FIXgfs}/gdas/soca/72x35x25/soca" - SOCA_ANL_GEOM: "${FIXgfs}/gdas/soca/72x35x25/soca" - SOCA_OBS_LIST: "${PARMgfs}/gdas/marine/obs/obs_list.yaml.j2" + SOCA_INPUT_FIX_DIR: "${FIXglobal}/gdas/soca/72x35x25/soca" + SOCA_ANL_GEOM: "${FIXglobal}/gdas/soca/72x35x25/soca" + SOCA_OBS_LIST: "${PARMglobal}/gdas/marine/obs/obs_list.yaml.j2" fcst: CHECK_LAND_RESTART_OROG: "YES" diff --git a/dev/parm/config/gcafs/yaml/test_ci.yaml b/dev/parm/config/gcafs/yaml/test_ci.yaml index 85320122524..17651356e34 100644 --- a/dev/parm/config/gcafs/yaml/test_ci.yaml +++ b/dev/parm/config/gcafs/yaml/test_ci.yaml @@ -1,4 +1,4 @@ defaults: - !INC {{ PARMgfs }}/config/gcafs/yaml/defaults.yaml + !INC {{ PARMglobal }}/config/gcafs/yaml/defaults.yaml base: ACCOUNT: "nems" diff --git a/dev/parm/config/gefs/config.atmos_products b/dev/parm/config/gefs/config.atmos_products index edc38f389c1..a50c3134703 100644 --- a/dev/parm/config/gefs/config.atmos_products +++ b/dev/parm/config/gefs/config.atmos_products @@ -20,9 +20,9 @@ export FLXGF="NO" # Create interpolated sflux.1p00 file # paramlist files for the different forecast hours and downsets export downset=2 -export paramlista="${PARMgfs}/product/${NET}.0p25.fFFF.paramlist.a.txt" -export paramlista_anl="${PARMgfs}/product/${NET}.0p25.anl.paramlist.a.txt" -export paramlista_f000="${PARMgfs}/product/${NET}.0p25.f000.paramlist.a.txt" -export paramlistb="${PARMgfs}/product/${NET}.0p25.fFFF.paramlist.b.txt" +export paramlista="${PARMglobal}/product/${NET}.0p25.fFFF.paramlist.a.txt" +export paramlista_anl="${PARMglobal}/product/${NET}.0p25.anl.paramlist.a.txt" +export paramlista_f000="${PARMglobal}/product/${NET}.0p25.f000.paramlist.a.txt" +export paramlistb="${PARMglobal}/product/${NET}.0p25.fFFF.paramlist.b.txt" echo "END: config.atmos_products" diff --git a/dev/parm/config/gefs/config.base.j2 b/dev/parm/config/gefs/config.base.j2 index e6be151924f..87d42f544b9 100644 --- a/dev/parm/config/gefs/config.base.j2 +++ b/dev/parm/config/gefs/config.base.j2 @@ -28,14 +28,14 @@ export CLUSTERS_DTN="{{ CLUSTERS_DTN | default('${CLUSTERS_SERVICE}') }}" export HPSS_PROJECT="{{ HPSS_PROJECT }}" # Directories relative to installation areas: -export HOMEgfs="{{ HOMEgfs }}" -export EXECgfs=${HOMEgfs}/exec -export FIXgfs=${HOMEgfs}/fix -export PARMgfs=${HOMEgfs}/parm -export SCRgfs=${HOMEgfs}/scripts -export USHgfs=${HOMEgfs}/ush -export FIXorog=${FIXgfs}/orog -export FIXugwd=${FIXgfs}/ugwd +export HOMEglobal="{{ HOMEglobal }}" +export EXECglobal=${HOMEglobal}/exec +export FIXglobal=${HOMEglobal}/fix +export PARMglobal=${HOMEglobal}/parm +export SCRglobal=${HOMEglobal}/scripts +export USHglobal=${HOMEglobal}/ush +export FIXorog=${FIXglobal}/orog +export FIXugwd=${FIXglobal}/ugwd ######################################################################## @@ -71,8 +71,8 @@ export GEFSTYPE="{{ GEFSTYPE }}" # gefs-real-time/gefs-offline # FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT # CLEAR #################################################### -# Build paths relative to $HOMEgfs -export HOMEpost="${HOMEgfs}" +# Build paths relative to $HOMEglobal +export HOMEpost="${HOMEglobal}" # CONVENIENT utility scripts and other environment parameters export NMV="/bin/mv" @@ -83,10 +83,10 @@ export DEBUG_POSTSCRIPT="NO" # PBS only; sets debug=true export CHGRP_RSTPROD="{{ CHGRP_RSTPROD }}" export CHGRP_CMD="{{ CHGRP_CMD }}" export NCDUMP="${NETCDF:-${netcdf_c_ROOT:-}}/bin/ncdump" -export NCLEN="${HOMEgfs}/ush/getncdimlen" +export NCLEN="${HOMEglobal}/ush/getncdimlen" # Machine environment, jobs, and other utility scripts -export BASE_ENV="${HOMEgfs}/env" +export BASE_ENV="${HOMEglobal}/env" # EXPERIMENT specific environment parameters export SDATE="{{ SDATE }}" diff --git a/dev/parm/config/gefs/config.extractvars b/dev/parm/config/gefs/config.extractvars index 7f1166a8690..3bf7de34eaa 100644 --- a/dev/parm/config/gefs/config.extractvars +++ b/dev/parm/config/gefs/config.extractvars @@ -23,12 +23,12 @@ export zmax="300." # Maximum depth to extract from NetCDF ocean products export FHOUT_WAV_EXTRACT=6 # Frequency of wave output to be saved on disk #Paramater Tables used -export varlist_2d="${PARMgfs}/product/gefs_shortparmlist_2d.parm" # Parameter table for surface variables -export varlist_3d="${PARMgfs}/product/gefs_shortparmlist_3d_h.parm" # Parameter table for upper air instantaneous variables -export varlist_3d_d="${PARMgfs}/product/gefs_shortparmlist_3d_d.parm" # Parameter table for upper air daily-averaged variables -export varlist_wav="${PARMgfs}/product/gefs_wav_shortparmlist.parm" # Parameter table for wave variables -export varlist_ocn_netcdf="${PARMgfs}/product/gefs_ocn_shortparmlist.parm" # Parameter table for ocean netcdf variables -export varlist_ice_netcdf="${PARMgfs}/product/gefs_ice_shortparmlist.parm" # Parameter table for ice netcdf variables +export varlist_2d="${PARMglobal}/product/gefs_shortparmlist_2d.parm" # Parameter table for surface variables +export varlist_3d="${PARMglobal}/product/gefs_shortparmlist_3d_h.parm" # Parameter table for upper air instantaneous variables +export varlist_3d_d="${PARMglobal}/product/gefs_shortparmlist_3d_d.parm" # Parameter table for upper air daily-averaged variables +export varlist_wav="${PARMglobal}/product/gefs_wav_shortparmlist.parm" # Parameter table for wave variables +export varlist_ocn_netcdf="${PARMglobal}/product/gefs_ocn_shortparmlist.parm" # Parameter table for ocean netcdf variables +export varlist_ice_netcdf="${PARMglobal}/product/gefs_ice_shortparmlist.parm" # Parameter table for ice netcdf variables #Directory to save extracted variables export ARC_RFCST_PROD="${ARCDIR}/rfcst/${PDY:0:4}/${PDY:0:6}/${PDY:0:8}/mem${ENSMEM}" diff --git a/dev/parm/config/gefs/config.fcst.j2 b/dev/parm/config/gefs/config.fcst.j2 index b44f259794e..589231fceab 100644 --- a/dev/parm/config/gefs/config.fcst.j2 +++ b/dev/parm/config/gefs/config.fcst.j2 @@ -60,8 +60,8 @@ export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_O ####################################################################### -export FORECASTSH="${SCRgfs}/exglobal_forecast.sh" -#export FORECASTSH="${SCRgfs}/exglobal_forecast.py" # Temp. while this is worked on +export FORECASTSH="${SCRglobal}/exglobal_forecast.sh" +#export FORECASTSH="${SCRglobal}/exglobal_forecast.py" # Temp. while this is worked on export FCSTEXEC="${NET}_model.x" ####################################################################### @@ -172,17 +172,17 @@ export random_clds=".true." case ${imp_physics} in 99) # ZhaoCarr export ncld=1 - export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_zhaocarr${tbf}${tbp}" + export FIELD_TABLE="${PARMglobal}/ufs/fv3/field_table_zhaocarr${tbf}${tbp}" export nwat=2 ;; 6) # WSM6 export ncld=2 - export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_wsm6${tbf}${tbp}" + export FIELD_TABLE="${PARMglobal}/ufs/fv3/field_table_wsm6${tbf}${tbp}" export nwat=6 ;; 8) # Thompson export ncld=2 - export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_thompson_noaero_tke${tbp}" + export FIELD_TABLE="${PARMglobal}/ufs/fv3/field_table_thompson_noaero_tke${tbp}" export nwat=6 export cal_pre=".false." @@ -211,7 +211,7 @@ case ${imp_physics} in ;; 11) # GFDL export ncld=5 - export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_gfdl${tbf}${tbp}" + export FIELD_TABLE="${PARMglobal}/ufs/fv3/field_table_gfdl${tbf}${tbp}" export nwat=6 export dnats=1 export cal_pre=".false." @@ -260,7 +260,7 @@ export FSICS="0" #--------------------------------------------------------------------- # Write more variables to output -export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table" +export DIAG_TABLE="${PARMglobal}/ufs/fv3/diag_table" # Write gfs restart files to rerun fcst from any break point export restart_interval=${restart_interval_gfs:-12} diff --git a/dev/parm/config/gefs/config.oceanice_products b/dev/parm/config/gefs/config.oceanice_products index 7b30de84bc5..8a33141cfd9 100644 --- a/dev/parm/config/gefs/config.oceanice_products +++ b/dev/parm/config/gefs/config.oceanice_products @@ -9,7 +9,7 @@ source "${EXPDIR}/config.resources" oceanice_products export write_grib2=False export write_netcdf=False -export OCEANICEPRODUCTS_CONFIG="${PARMgfs}/post/oceanice_products_${NET}.yaml" +export OCEANICEPRODUCTS_CONFIG="${PARMglobal}/post/oceanice_products_${NET}.yaml" # Maximum number of rocoto tasks per member export MAX_TASKS=25 diff --git a/dev/parm/config/gefs/config.stage_ic.j2 b/dev/parm/config/gefs/config.stage_ic.j2 index 8693fc170c8..f92c6b44c74 100644 --- a/dev/parm/config/gefs/config.stage_ic.j2 +++ b/dev/parm/config/gefs/config.stage_ic.j2 @@ -12,17 +12,17 @@ export BASE_IC="{{ BASE_IC }}" # Platform home for staged ICs if [[ ${RUN} == "gefs" ]] ; then if [[ ${GEFSTYPE} = "gefs-real-time" ]] ; then - export STAGE_IC_YAML_TMPL="${PARMgfs}/stage/master_gefs_RT.yaml.j2" + export STAGE_IC_YAML_TMPL="${PARMglobal}/stage/master_gefs_RT.yaml.j2" elif [[ ${GEFSTYPE} = "gefs-offline" ]] ; then - export STAGE_IC_YAML_TMPL="${PARMgfs}/stage/master_gefs.yaml.j2" + export STAGE_IC_YAML_TMPL="${PARMglobal}/stage/master_gefs.yaml.j2" else echo "ERROR: Invalid GEFSTYPE=${GEFSTYPE} for config.stage_ic" fi else - export STAGE_IC_YAML_TMPL="${PARMgfs}/stage/master_gefs.yaml.j2" + export STAGE_IC_YAML_TMPL="${PARMglobal}/stage/master_gefs.yaml.j2" fi -source "${HOMEgfs}/versions/ic.ver" +source "${HOMEglobal}/versions/ic.ver" if [[ ${EXP_WARM_START} = ".false." ]] ; then export DOIAU="NO" # Turn off for staging diff --git a/dev/parm/config/gefs/config.ufs b/dev/parm/config/gefs/config.ufs index 09b0ba2ef0e..bf5eacf1c37 100644 --- a/dev/parm/config/gefs/config.ufs +++ b/dev/parm/config/gefs/config.ufs @@ -493,26 +493,26 @@ fi # WW3 restart field variable is different for slow vs fast loop. Add WW3_RSTFLDS="ice" for slow loop variables based on coupling scheme. case "${model_list}" in atm) - default_template="${PARMgfs}/ufs/ufs.configure.atm${tmpl_suffix:-}.IN" + default_template="${PARMglobal}/ufs/ufs.configure.atm${tmpl_suffix:-}.IN" ;; atm.aero) - default_template="${PARMgfs}/ufs/ufs.configure.atmaero${tmpl_suffix:-}.IN" + default_template="${PARMglobal}/ufs/ufs.configure.atmaero${tmpl_suffix:-}.IN" ;; atm.wave) - default_template="${PARMgfs}/ufs/ufs.configure.leapfrog_atm_wav${tmpl_suffix:-}.IN" + default_template="${PARMglobal}/ufs/ufs.configure.leapfrog_atm_wav${tmpl_suffix:-}.IN" ;; atm.ocean.ice) - default_template="${PARMgfs}/ufs/ufs.configure.s2s${tmpl_suffix:-}.IN" + default_template="${PARMglobal}/ufs/ufs.configure.s2s${tmpl_suffix:-}.IN" ;; atm.ocean.ice.aero) - default_template="${PARMgfs}/ufs/ufs.configure.s2sa${tmpl_suffix:-}.IN" + default_template="${PARMglobal}/ufs/ufs.configure.s2sa${tmpl_suffix:-}.IN" ;; atm.ocean.ice.wave) - default_template="${PARMgfs}/ufs/ufs.configure.s2sw${tmpl_suffix:-}.IN" + default_template="${PARMglobal}/ufs/ufs.configure.s2sw${tmpl_suffix:-}.IN" WW3_RSTFLDS="ice" ;; atm.ocean.ice.wave.aero) - default_template="${PARMgfs}/ufs/ufs.configure.s2swa${tmpl_suffix:-}.IN" + default_template="${PARMglobal}/ufs/ufs.configure.s2swa${tmpl_suffix:-}.IN" WW3_RSTFLDS="ice" ;; *) diff --git a/dev/parm/config/gfs/config.aero.j2 b/dev/parm/config/gfs/config.aero.j2 index 2722212c01e..e4f958ae44f 100644 --- a/dev/parm/config/gfs/config.aero.j2 +++ b/dev/parm/config/gfs/config.aero.j2 @@ -17,7 +17,7 @@ echo "BEGIN: config.aero" # Base directory for aerosol input data files (e.g., initial conditions, climatologies). # This path is mounted or staged in the workflow and referenced by the model for reading aerosol fields. #--------------------------------------------------------------------------------------------------- -export AERO_INPUTS_DIR="${HOMEgfs}/fix/chem/Emission_data" +export AERO_INPUTS_DIR="${HOMEglobal}/fix/chem/Emission_data" #------------------------------------------------- # Diag Table and Field Table for GOCART aerosols @@ -27,8 +27,8 @@ export AERO_INPUTS_DIR="${HOMEgfs}/fix/chem/Emission_data" # diag_table.aero: Specifies which aerosol fields to output and at what frequency (used by FMS diagnostics). # field_table.aero: Registers prognostic/diagnostic tracers with the FV3 dynamical core (e.g., for advection, diffusion). #--------------------------------------------------------------------------------------------------- -export AERO_DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table.aero" -export AERO_FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table.aero" +export AERO_DIAG_TABLE="${PARMglobal}/ufs/fv3/diag_table.aero" +export AERO_FIELD_TABLE="${PARMglobal}/ufs/fv3/field_table.aero" #================================================================================ # Aerosol configuration @@ -37,7 +37,7 @@ export AERO_FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table.aero" # Directory containing GOCART-specific namelists, parameters, and runtime configs (e.g., namelist.aero). # Loaded during model initialization to set aerosol scheme parameters like time steps, vertical levels. #--------------------------------------------------------------------------------------------------- -export AERO_CONFIG_DIR="${PARMgfs}/ufs/gocart" +export AERO_CONFIG_DIR="${PARMglobal}/ufs/gocart" # Aerosol convective scavenging factors (list of string array elements) # Element syntax: ':'. Use = * to set default factor for all aerosol tracers @@ -81,7 +81,7 @@ export AERO_EMIS_FIRE_HIST=1 # Use historical fire emissions | 1 = true 0 = fals #--------------------------------------------------------------------------------------------------- export FIRE_EMIS_NRT_DIR="" #TODO: set to DCOM for WCOSS2 "${DCOMROOT}/YYYYMMDD/firewx" # Directory containing NRT fire emissions -export FIRE_EMIS_DIR="${HOMEgfs}/fix/chem/Emission_data/fires_data/GBBEPx/v4" # Directory containing historical fire emissions +export FIRE_EMIS_DIR="${HOMEglobal}/fix/chem/Emission_data/fires_data/GBBEPx/v4" # Directory containing historical fire emissions #=============================================================================== @@ -101,7 +101,7 @@ export NEXUS_CONFIG="{{ NEXUS_CONFIG | default('gocart') }}" # Options: gocart, # Runtime choice of NEXUS config variant; defaults to gocart for standard aerosol tracers. # Overrides via Jinja2 templating in workflow (e.g., for different chemistry schemes). #--------------------------------------------------------------------------------------------------- -export NEXUS_CONFIG_DIR="${PARMgfs}/chem/nexus/${NEXUS_CONFIG}" # Directory containing NEXUS configuration files +export NEXUS_CONFIG_DIR="${PARMglobal}/chem/nexus/${NEXUS_CONFIG}" # Directory containing NEXUS configuration files # NEXUS Inputs #--------------- @@ -112,7 +112,7 @@ export NEXUS_CONFIG_DIR="${PARMgfs}/chem/nexus/${NEXUS_CONFIG}" # Directory cont # Specific path for GCAFS external data on this filesystem. # Contains emission datasets (e.g., CEDS2019/2024, HTAPv2, CAMS) processed by NEXUS. #--------------------------------------------------------------------------------------------------- -export NEXUS_INPUT_DIR="${HOMEgfs}/fix/chem/Emission_data/nexus" +export NEXUS_INPUT_DIR="${HOMEglobal}/fix/chem/Emission_data/nexus" diff --git a/dev/parm/config/gfs/config.aeroanl.j2 b/dev/parm/config/gfs/config.aeroanl.j2 index 9ab7d302a7e..472d31954e3 100644 --- a/dev/parm/config/gfs/config.aeroanl.j2 +++ b/dev/parm/config/gfs/config.aeroanl.j2 @@ -21,8 +21,8 @@ export CASE_ANL export STATICB_TYPE='diffusion' -export TASK_CONFIG_YAML="${PARMgfs}/gdas/aero/aero_det_config.yaml.j2" -export OBS_LIST_YAML="${PARMgfs}/gdas/aero/aero_obs_list.yaml.j2" +export TASK_CONFIG_YAML="${PARMglobal}/gdas/aero/aero_det_config.yaml.j2" +export OBS_LIST_YAML="${PARMglobal}/gdas/aero/aero_obs_list.yaml.j2" export io_layout_x="{{ IO_LAYOUT_X }}" export io_layout_y="{{ IO_LAYOUT_Y }}" diff --git a/dev/parm/config/gfs/config.aeroanlgenb b/dev/parm/config/gfs/config.aeroanlgenb index 87353213761..78249242c83 100644 --- a/dev/parm/config/gfs/config.aeroanlgenb +++ b/dev/parm/config/gfs/config.aeroanlgenb @@ -8,8 +8,8 @@ echo "BEGIN: config.aeroanlgenb" # Get task specific resources source "${EXPDIR}/config.resources" aeroanlgenb -export TASK_CONFIG_YAML="${PARMgfs}/gdas/aero/aero_bmat_config.yaml.j2" -export OBS_LIST_YAML="${PARMgfs}/gdas/aero/aero_obs_list.yaml.j2" +export TASK_CONFIG_YAML="${PARMglobal}/gdas/aero/aero_bmat_config.yaml.j2" +export OBS_LIST_YAML="${PARMglobal}/gdas/aero/aero_obs_list.yaml.j2" export aero_diffusion_iter=200 export aero_diffusion_horiz_len=300e3 diff --git a/dev/parm/config/gfs/config.anal b/dev/parm/config/gfs/config.anal index 3b23931e7df..fe1beb02ad2 100644 --- a/dev/parm/config/gfs/config.anal +++ b/dev/parm/config/gfs/config.anal @@ -39,10 +39,10 @@ export AMSR2BF=${AMSR2BF:-/dev/null} # Set default values for info files and observation error # NOTE: Remember to set PRVT in config.prep as OBERROR is set below -export CONVINFO=${FIXgfs}/gsi/global_convinfo.txt -export OZINFO=${FIXgfs}/gsi/global_ozinfo.txt -export SATINFO=${FIXgfs}/gsi/global_satinfo.txt -export OBERROR=${FIXgfs}/gsi/prepobs_errtable.global +export CONVINFO=${FIXglobal}/gsi/global_convinfo.txt +export OZINFO=${FIXglobal}/gsi/global_ozinfo.txt +export SATINFO=${FIXglobal}/gsi/global_satinfo.txt +export OBERROR=${FIXglobal}/gsi/prepobs_errtable.global # Use 2m diagnostic for screen level obs export hofx_2m_sfcfile=".true." @@ -50,49 +50,49 @@ export hofx_2m_sfcfile=".true." if [[ ${DO_GSISOILDA} = "YES" ]]; then export reducedgrid=".false." # not possible for sfc analysis, Jeff Whitaker says it's not useful anyway # NOTE: convinfo here will be over-written by date-specific files below. - export CONVINFO=${FIXgfs}/gsi/global_convinfo_2mObs.txt - export ANAVINFO=${FIXgfs}/gsi/global_anavinfo_soilanal.l127.txt + export CONVINFO=${FIXglobal}/gsi/global_convinfo_2mObs.txt + export ANAVINFO=${FIXglobal}/gsi/global_anavinfo_soilanal.l127.txt fi # Use experimental dumps in EMC GFS v16 parallels if [[ ${RUN_ENVIR} == "emc" ]]; then # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then - export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2019021900 - export OBERROR=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2019021900 + export CONVINFO=${FIXglobal}/gsi/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXglobal}/gsi/gfsv16_historical/prepobs_errtable.global.2019021900 fi # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then - export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2019110706 - export OBERROR=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2019110706 + export CONVINFO=${FIXglobal}/gsi/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXglobal}/gsi/gfsv16_historical/prepobs_errtable.global.2019110706 fi # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "2020052612" ]]; then - export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2020040718 - export OBERROR=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2020040718 + export CONVINFO=${FIXglobal}/gsi/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXglobal}/gsi/gfsv16_historical/prepobs_errtable.global.2020040718 fi # Assimilate COSMIC-2 if [[ "${PDY}${cyc}" -ge "2020052612" && "${PDY}${cyc}" -lt "2020082412" ]]; then - export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2020052612 - export OBERROR=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2020040718 + export CONVINFO=${FIXglobal}/gsi/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXglobal}/gsi/gfsv16_historical/prepobs_errtable.global.2020040718 fi # Assimilate HDOB if [[ "${PDY}${cyc}" -ge "2020082412" && "${PDY}${cyc}" -lt "2020091612" ]]; then - export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2020082412 + export CONVINFO=${FIXglobal}/gsi/gfsv16_historical/global_convinfo.txt.2020082412 fi # Assimilate Metop-C GNSSRO if [[ "${PDY}${cyc}" -ge "2020091612" && "${PDY}${cyc}" -lt "2021031712" ]]; then - export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2020091612 + export CONVINFO=${FIXglobal}/gsi/gfsv16_historical/global_convinfo.txt.2020091612 fi # Assimilate DO-2 GeoOptics if [[ "${PDY}${cyc}" -ge "2021031712" && "${PDY}${cyc}" -lt "2021091612" ]]; then - export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2021031712 + export CONVINFO=${FIXglobal}/gsi/gfsv16_historical/global_convinfo.txt.2021031712 fi # NOTE: @@ -101,38 +101,38 @@ if [[ ${RUN_ENVIR} == "emc" ]]; then # needed at this time. # Assimilate COSMIC-2 GPS # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then - # export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2021110312 + # export CONVINFO=${FIXglobal}/gsi/gfsv16_historical/global_convinfo.txt.2021110312 # fi # Turn off assmilation of OMPS during period of bad data if [[ "${PDY}${cyc}" -ge "2020011600" && "${PDY}${cyc}" -lt "2020011806" ]]; then - export OZINFO=${FIXgfs}/gsi/gfsv16_historical/global_ozinfo.txt.2020011600 + export OZINFO=${FIXglobal}/gsi/gfsv16_historical/global_ozinfo.txt.2020011600 fi # Set satinfo for start of GFS v16 parallels if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then - export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2019021900 + export SATINFO=${FIXglobal}/gsi/gfsv16_historical/global_satinfo.txt.2019021900 fi # Turn on assimilation of Metop-C AMSUA and MHS if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020022012" ]]; then - export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2019110706 + export SATINFO=${FIXglobal}/gsi/gfsv16_historical/global_satinfo.txt.2019110706 fi # Turn off assimilation of Metop-A MHS if [[ "${PDY}${cyc}" -ge "2020022012" && "${PDY}${cyc}" -lt "2021052118" ]]; then - export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2020022012 + export SATINFO=${FIXglobal}/gsi/gfsv16_historical/global_satinfo.txt.2020022012 fi # Turn off assimilation of S-NPP CrIS if [[ "${PDY}${cyc}" -ge "2021052118" && "${PDY}${cyc}" -lt "2021092206" ]]; then - export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2021052118 + export SATINFO=${FIXglobal}/gsi/gfsv16_historical/global_satinfo.txt.2021052118 fi # Turn off assimilation of MetOp-A IASI if [[ "${PDY}${cyc}" -ge "2021092206" && "${PDY}${cyc}" -lt "2021102612" ]]; then - export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2021092206 + export SATINFO=${FIXglobal}/gsi/gfsv16_historical/global_satinfo.txt.2021092206 fi # NOTE: @@ -142,7 +142,7 @@ if [[ ${RUN_ENVIR} == "emc" ]]; then # # Turn off assmilation of all Metop-A MHS # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then - # export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2021110312 + # export SATINFO=${FIXglobal}/gsi/gfsv16_historical/global_satinfo.txt.2021110312 # fi fi diff --git a/dev/parm/config/gfs/config.analcalc_fv3jedi b/dev/parm/config/gfs/config.analcalc_fv3jedi index 233a5f694ac..431328cfca5 100644 --- a/dev/parm/config/gfs/config.analcalc_fv3jedi +++ b/dev/parm/config/gfs/config.analcalc_fv3jedi @@ -14,7 +14,7 @@ export layout_y_analcalc_fv3jedi=2 # Get task specific resources source "${EXPDIR}/config.resources" analcalc_fv3jedi -export TASK_CONFIG_YAML="${PARMgfs}/gdas/analcalc/analcalc_config.yaml.j2" +export TASK_CONFIG_YAML="${PARMglobal}/gdas/analcalc/analcalc_config.yaml.j2" if [[ ${DOHYBVAR} = "YES" ]]; then export CASE_ANL=${CASE_ENS} diff --git a/dev/parm/config/gfs/config.anlstat b/dev/parm/config/gfs/config.anlstat index c8afee9d95c..9ff4216bc93 100644 --- a/dev/parm/config/gfs/config.anlstat +++ b/dev/parm/config/gfs/config.anlstat @@ -8,6 +8,6 @@ echo "BEGIN: config.anlstat" # Get task specific resources source "${EXPDIR}/config.resources" anlstat -export TASK_CONFIG_YAML="${PARMgfs}/gdas/anlstat/anlstat_config.yaml.j2" +export TASK_CONFIG_YAML="${PARMglobal}/gdas/anlstat/anlstat_config.yaml.j2" echo "END: config.anlstat" diff --git a/dev/parm/config/gfs/config.atmanl.j2 b/dev/parm/config/gfs/config.atmanl.j2 index f7a6511d4e7..da2ee0f7f58 100644 --- a/dev/parm/config/gfs/config.atmanl.j2 +++ b/dev/parm/config/gfs/config.atmanl.j2 @@ -14,7 +14,7 @@ export layout_y_atmanl="{{ LAYOUT_Y_ATMANL }}" export io_layout_x="{{ IO_LAYOUT_X }}" export io_layout_y="{{ IO_LAYOUT_Y }}" -export TASK_CONFIG_YAML="${PARMgfs}/gdas/atm/atm_det_config.yaml.j2" +export TASK_CONFIG_YAML="${PARMglobal}/gdas/atm/atm_det_config.yaml.j2" export LOCALIZATION_TYPE="bump" diff --git a/dev/parm/config/gfs/config.atmensanl.j2 b/dev/parm/config/gfs/config.atmensanl.j2 index adb1108267f..25a194ee180 100644 --- a/dev/parm/config/gfs/config.atmensanl.j2 +++ b/dev/parm/config/gfs/config.atmensanl.j2 @@ -15,6 +15,6 @@ export layout_y_atmensanl="{{ LAYOUT_Y_ATMENSANL }}" export io_layout_x="{{ IO_LAYOUT_X }}" export io_layout_y="{{ IO_LAYOUT_Y }}" -export TASK_CONFIG_YAML="${PARMgfs}/gdas/atm/atm_ens_config.yaml.j2" +export TASK_CONFIG_YAML="${PARMglobal}/gdas/atm/atm_ens_config.yaml.j2" echo "END: config.atmensanl" diff --git a/dev/parm/config/gfs/config.atmos_products b/dev/parm/config/gfs/config.atmos_products index e234d61e13c..38a6f707915 100644 --- a/dev/parm/config/gfs/config.atmos_products +++ b/dev/parm/config/gfs/config.atmos_products @@ -25,9 +25,9 @@ fi # paramlist files for the different forecast hours and downsets (a, b) export downset=2 -export paramlista="${PARMgfs}/product/gfs.fFFF.paramlist.a.txt" -export paramlista_anl="${PARMgfs}/product/gfs.anl.paramlist.a.txt" -export paramlista_f000="${PARMgfs}/product/gfs.f000.paramlist.a.txt" -export paramlistb="${PARMgfs}/product/gfs.fFFF.paramlist.b.txt" +export paramlista="${PARMglobal}/product/gfs.fFFF.paramlist.a.txt" +export paramlista_anl="${PARMglobal}/product/gfs.anl.paramlist.a.txt" +export paramlista_f000="${PARMglobal}/product/gfs.f000.paramlist.a.txt" +export paramlistb="${PARMglobal}/product/gfs.fFFF.paramlist.b.txt" echo "END: config.atmos_products" diff --git a/dev/parm/config/gfs/config.base.j2 b/dev/parm/config/gfs/config.base.j2 index 31231939575..a487739c77a 100644 --- a/dev/parm/config/gfs/config.base.j2 +++ b/dev/parm/config/gfs/config.base.j2 @@ -28,21 +28,21 @@ export CLUSTERS_DTN="{{ CLUSTERS_DTN | default('${CLUSTERS_SERVICE}') }}" export HPSS_PROJECT="{{ HPSS_PROJECT }}" # Directories relative to installation areas: -export HOMEgfs="{{ HOMEgfs }}" -export EXECgfs="${HOMEgfs}/exec" -export FIXgfs="${HOMEgfs}/fix" -export PARMgfs="${HOMEgfs}/parm" -export SCRgfs="${HOMEgfs}/scripts" -export USHgfs="${HOMEgfs}/ush" - -export FIXam="${FIXgfs}/am" -export FIXaer="${FIXgfs}/aer" -export FIXcpl="${FIXgfs}/cpl" -export FIXlut="${FIXgfs}/lut" -export FIXcice="${FIXgfs}/cice" -export FIXmom="${FIXgfs}/mom6" -export FIXreg2grb2="${FIXgfs}/reg2grb2" -export FIXgdas="${FIXgfs}/gdas" +export HOMEglobal="{{ HOMEglobal }}" +export EXECglobal="${HOMEglobal}/exec" +export FIXglobal="${HOMEglobal}/fix" +export PARMglobal="${HOMEglobal}/parm" +export SCRglobal="${HOMEglobal}/scripts" +export USHglobal="${HOMEglobal}/ush" + +export FIXam="${FIXglobal}/am" +export FIXaer="${FIXglobal}/aer" +export FIXcpl="${FIXglobal}/cpl" +export FIXlut="${FIXglobal}/lut" +export FIXcice="${FIXglobal}/cice" +export FIXmom="${FIXglobal}/mom6" +export FIXreg2grb2="${FIXglobal}/reg2grb2" +export FIXgdas="${FIXglobal}/gdas" ######################################################################## @@ -99,9 +99,9 @@ export DO_TEST_MODE="{{ DO_TEST_MODE }}" # option to change configuration for au # FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT # CLEAR #################################################### -# Build paths relative to $HOMEgfs -export FIXgsi="${HOMEgfs}/fix/gsi" -export HOMEpost="${HOMEgfs}" +# Build paths relative to $HOMEglobal +export FIXgsi="${HOMEglobal}/fix/gsi" +export HOMEpost="${HOMEglobal}" export HOMEobsproc="${BASE_GIT:-}/obsproc/v${obsproc_run_ver:-}" # CONVENIENT utility scripts and other environment parameters @@ -113,10 +113,10 @@ export DEBUG_POSTSCRIPT="NO" # PBS only; sets debug=true export CHGRP_RSTPROD="{{ CHGRP_RSTPROD }}" export CHGRP_CMD="{{ CHGRP_CMD }}" export NCDUMP="${NETCDF:-${netcdf_c_ROOT:-}}/bin/ncdump" -export NCLEN="${HOMEgfs}/ush/getncdimlen" +export NCLEN="${HOMEglobal}/ush/getncdimlen" # Machine environment, jobs, and other utility scripts -export BASE_ENV="${HOMEgfs}/env" +export BASE_ENV="${HOMEglobal}/env" # EXPERIMENT specific environment parameters export SDATE="{{ SDATE }}" @@ -190,12 +190,12 @@ export DO_NEST="NO" # Whether to run a global-nested domain if [[ "${DO_NEST:-NO}" == "YES" ]] ; then export ntiles=7 export NEST_OUTPUT_GRID="regional_latlon" - export FIXugwd="${FIXgfs}/ugwd_nest" - export FIXorog="${FIXgfs}/orog_nest" + export FIXugwd="${FIXglobal}/ugwd_nest" + export FIXorog="${FIXglobal}/orog_nest" else export ntiles=6 - export FIXugwd="${FIXgfs}/ugwd" - export FIXorog="${FIXgfs}/orog" + export FIXugwd="${FIXglobal}/ugwd" + export FIXorog="${FIXglobal}/orog" fi # Set operational resolution @@ -390,7 +390,7 @@ export l4densvar=".true." export lwrite4danl=".true." export DO_CALC_INCREMENT="NO" export USE_BUILD_GSINFO="YES" -export BUILD_GSINFO_DIR="${PARMgfs}/gsinfo" +export BUILD_GSINFO_DIR="${PARMglobal}/gsinfo" # Early-cycle EnKF parameters export NMEM_ENS_GFS="{{ NMEM_ENS_GFS }}" diff --git a/dev/parm/config/gfs/config.ecen_fv3jedi b/dev/parm/config/gfs/config.ecen_fv3jedi index 768140b2654..0550d8600ad 100644 --- a/dev/parm/config/gfs/config.ecen_fv3jedi +++ b/dev/parm/config/gfs/config.ecen_fv3jedi @@ -14,7 +14,7 @@ export layout_y_ecen_fv3jedi=1 # Get task specific resources source "${EXPDIR}/config.resources" ecen_fv3jedi -export TASK_CONFIG_YAML="${PARMgfs}/gdas/atm/atm_ecen_config.yaml.j2" +export TASK_CONFIG_YAML="${PARMglobal}/gdas/atm/atm_ecen_config.yaml.j2" if [[ ${DOHYBVAR} = "YES" ]]; then export CASE_ANL=${CASE_ENS} diff --git a/dev/parm/config/gfs/config.efcs b/dev/parm/config/gfs/config.efcs index 013188a074e..cbf260b2508 100644 --- a/dev/parm/config/gfs/config.efcs +++ b/dev/parm/config/gfs/config.efcs @@ -70,9 +70,9 @@ export SPPT_LOGIT=".true." export SPPT_SFCLIMIT=".true." if [[ "${QUILTING}" == ".true." ]] && [[ "${OUTPUT_GRID}" == "gaussian_grid" ]]; then - export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table_da" + export DIAG_TABLE="${PARMglobal}/ufs/fv3/diag_table_da" else - export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table_da_orig" + export DIAG_TABLE="${PARMglobal}/ufs/fv3/diag_table_da_orig" fi # Model config option for Ensemble diff --git a/dev/parm/config/gfs/config.esfc b/dev/parm/config/gfs/config.esfc index 0c35240dc31..80e6877a9c0 100644 --- a/dev/parm/config/gfs/config.esfc +++ b/dev/parm/config/gfs/config.esfc @@ -14,8 +14,8 @@ if [[ "${DO_OCN}" == "YES" && "${DO_ICE}" == "YES" ]]; then fi # With IAU only need surface analysis at start of IAU window. -# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at -# center of analysis window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. if [[ ${DOIAU_ENKF} = "YES" ]]; then export DOSFCANL_ENKF="NO" @@ -26,20 +26,20 @@ if [[ "${DO_JEDIATMENS}" == "YES" || "${DO_JEDIATMVAR}" == "YES" ]]; then export DONST="NO" fi -if [[ "${RUN/enkf}" == "gfs" ]]; then +if [[ "${RUN/enkf}" == "gfs" ]]; then echo "turning off gsi soilda for gfs run" DO_GSISOILDA="NO" fi # set up soil analysis if [[ ${DO_GSISOILDA} == "YES" ]]; then - if [[ "${DO_LAND_IAU}" = ".true." ]]; then + if [[ "${DO_LAND_IAU}" = ".true." ]]; then export GCYCLE_DO_SOILINCR=".false." else export GCYCLE_DO_SOILINCR=".true." fi export GCYCLE_INTERP_LANDINCR=".false." - export REGRID_EXEC="${HOMEgfs}/exec/regridStates.x" + export REGRID_EXEC="${HOMEglobal}/exec/regridStates.x" fi echo "END: config.esfc" diff --git a/dev/parm/config/gfs/config.esnowanl.j2 b/dev/parm/config/gfs/config.esnowanl.j2 index 086b3620e90..5d04cfb06ea 100644 --- a/dev/parm/config/gfs/config.esnowanl.j2 +++ b/dev/parm/config/gfs/config.esnowanl.j2 @@ -9,18 +9,18 @@ echo "BEGIN: config.esnowanl" source "${EXPDIR}/config.resources" esnowanl # Name of the executable that applies increment to bkg and its namelist template -export APPLY_INCR_EXE="${EXECgfs}/gdas_apply_incr.x" -export ENS_APPLY_INCR_NML_TMPL="${PARMgfs}/gdas/snow/ens_apply_incr_nml.j2" +export APPLY_INCR_EXE="${EXECglobal}/gdas_apply_incr.x" +export ENS_APPLY_INCR_NML_TMPL="${PARMglobal}/gdas/snow/ens_apply_incr_nml.j2" -export TASK_CONFIG_YAML="${PARMgfs}/gdas/snow/snow_ens_config.yaml.j2" -export OBS_LIST_YAML="${PARMgfs}/gdas/snow/snow_obs_list.yaml.j2" +export TASK_CONFIG_YAML="${PARMglobal}/gdas/snow/snow_ens_config.yaml.j2" +export OBS_LIST_YAML="${PARMglobal}/gdas/snow/snow_obs_list.yaml.j2" export ims_scf_obs_suffix="asc" # asc-ascii; nc-netcdf export fail_on_missing_snowobs=False # False: just warn; True: fail & exit -export PREP_SNOCVR_SNOMAD_YAML="${PARMgfs}/gdas/snow/prep/prep_snocvr_snomad.yaml.j2" -export OBSBUILDER="${USHgfs}/bufr_snocvr_snomad.py" -export PREP_GHCN_YAML="${PARMgfs}/gdas/snow/prep/prep_ghcn.yaml.j2" -export GHCN2IODACONV="${USHgfs}/ghcn_snod2ioda.py" +export PREP_SNOCVR_SNOMAD_YAML="${PARMglobal}/gdas/snow/prep/prep_snocvr_snomad.yaml.j2" +export OBSBUILDER="${USHglobal}/bufr_snocvr_snomad.py" +export PREP_GHCN_YAML="${PARMglobal}/gdas/snow/prep/prep_ghcn.yaml.j2" +export GHCN2IODACONV="${USHglobal}/ghcn_snod2ioda.py" export io_layout_x="{{ IO_LAYOUT_X }}" export io_layout_y="{{ IO_LAYOUT_Y }}" diff --git a/dev/parm/config/gfs/config.fcst.j2 b/dev/parm/config/gfs/config.fcst.j2 index c362eae36fa..edb5c4c02c2 100644 --- a/dev/parm/config/gfs/config.fcst.j2 +++ b/dev/parm/config/gfs/config.fcst.j2 @@ -72,8 +72,8 @@ export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_O ####################################################################### -export FORECASTSH="${SCRgfs}/exglobal_forecast.sh" -#export FORECASTSH="${SCRgfs}/exglobal_forecast.py" # Temp. while this is worked on +export FORECASTSH="${SCRglobal}/exglobal_forecast.sh" +#export FORECASTSH="${SCRglobal}/exglobal_forecast.py" # Temp. while this is worked on export FCSTEXEC="gfs_model.x" ####################################################################### @@ -188,17 +188,17 @@ export random_clds=".true." case ${imp_physics} in 99) # ZhaoCarr export ncld=1 - export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_zhaocarr${tbf}${tbp}" + export FIELD_TABLE="${PARMglobal}/ufs/fv3/field_table_zhaocarr${tbf}${tbp}" export nwat=2 ;; 6) # WSM6 export ncld=2 - export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_wsm6${tbf}${tbp}" + export FIELD_TABLE="${PARMglobal}/ufs/fv3/field_table_wsm6${tbf}${tbp}" export nwat=6 ;; 8) # Thompson export ncld=2 - export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_thompson_noaero_tke${tbp}" + export FIELD_TABLE="${PARMglobal}/ufs/fv3/field_table_thompson_noaero_tke${tbp}" export nwat=6 export cal_pre=".false." @@ -222,7 +222,7 @@ case ${imp_physics} in export d4_bg=0.12 if [[ "${CCPP_SUITE}" == "FV3_global_nest"* ]]; then - export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_thompson_aero_tke${tbp}" + export FIELD_TABLE="${PARMglobal}/ufs/fv3/field_table_thompson_aero_tke${tbp}" export ltaerosol=".true." export lcnorm=".true." export do_mynnedmf=".true." @@ -237,7 +237,7 @@ case ${imp_physics} in ;; 11) # GFDL export ncld=5 - export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_gfdl${tbf}${tbp}" + export FIELD_TABLE="${PARMglobal}/ufs/fv3/field_table_gfdl${tbf}${tbp}" export nwat=6 export dnats=1 export cal_pre=".false." @@ -286,7 +286,7 @@ export pbl_taper="0,0,0,0.1,0.2,0.4,0.6" if [[ "${RUN}" =~ "gdas" ]] ; then # GDAS cycle specific parameters # Variables used in DA cycling - export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table_da" + export DIAG_TABLE="${PARMglobal}/ufs/fv3/diag_table_da" # Write gfs restart files to rerun fcst from any break point export restart_interval=${restart_interval_gdas:-6} @@ -297,7 +297,7 @@ if [[ "${RUN}" =~ "gdas" ]] ; then # GDAS cycle specific parameters elif [[ "${RUN}" =~ "gfs" ]] ; then # GFS cycle specific parameters # Write more variables to output - export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table" + export DIAG_TABLE="${PARMglobal}/ufs/fv3/diag_table" # Write gfs restart files to rerun fcst from any break point export restart_interval=${restart_interval_gfs:-12} diff --git a/dev/parm/config/gfs/config.fetch b/dev/parm/config/gfs/config.fetch index 86ab5e3e2f8..78ad7f4a7cd 100644 --- a/dev/parm/config/gfs/config.fetch +++ b/dev/parm/config/gfs/config.fetch @@ -14,6 +14,6 @@ else ic_type="warm" fi -export FETCH_YAML_TMPL="${PARMgfs}/fetch/${NET}_${APP}_${ic_type}_${MODE}.yaml.j2" +export FETCH_YAML_TMPL="${PARMglobal}/fetch/${NET}_${APP}_${ic_type}_${MODE}.yaml.j2" echo "END: config.fetch" diff --git a/dev/parm/config/gfs/config.fit2obs b/dev/parm/config/gfs/config.fit2obs index 7d9e3aad4a0..52cbe9413ab 100644 --- a/dev/parm/config/gfs/config.fit2obs +++ b/dev/parm/config/gfs/config.fit2obs @@ -8,8 +8,8 @@ echo "BEGIN: config.fit2obs" # Get task specific resources . "${EXPDIR}/config.resources" fit2obs -export PRVT=${FIXgfs}/gsi/prepobs_errtable.global -export HYBLEVS=${FIXgfs}/am/global_hyblev.l${LEVS}.txt +export PRVT=${FIXglobal}/gsi/prepobs_errtable.global +export HYBLEVS=${FIXglobal}/am/global_hyblev.l${LEVS}.txt export COM_VRFYARCH="${ROTDIR}/vrfyarch" diff --git a/dev/parm/config/gfs/config.marineanl.j2 b/dev/parm/config/gfs/config.marineanl.j2 index 04fa53411c0..60f71bbc4a9 100644 --- a/dev/parm/config/gfs/config.marineanl.j2 +++ b/dev/parm/config/gfs/config.marineanl.j2 @@ -13,6 +13,6 @@ export ANL_GEOM="{{ SOCA_ANL_GEOM }}" export NINNER="{{ SOCA_NINNER }}" export OBS_LIST_YAML="{{ SOCA_OBS_LIST }}" -export TASK_CONFIG_YAML="${PARMgfs}/gdas/marine/marine_det_config.yaml.j2" +export TASK_CONFIG_YAML="${PARMglobal}/gdas/marine/marine_det_config.yaml.j2" echo "END: config.marineanl" diff --git a/dev/parm/config/gfs/config.marineanlecen.j2 b/dev/parm/config/gfs/config.marineanlecen.j2 index 30c0d056804..84415562b28 100644 --- a/dev/parm/config/gfs/config.marineanlecen.j2 +++ b/dev/parm/config/gfs/config.marineanlecen.j2 @@ -11,6 +11,6 @@ source "${EXPDIR}/config.resources" marineanlecen export INPUT_FIX_DIR="{{ SOCA_INPUT_FIX_DIR }}" export ANL_GEOM="{{ SOCA_ANL_GEOM }}" -export TASK_CONFIG_YAML="${PARMgfs}/gdas/marine/marine_ecen_config.yaml.j2" +export TASK_CONFIG_YAML="${PARMglobal}/gdas/marine/marine_ecen_config.yaml.j2" echo "END: config.marineanlecen" diff --git a/dev/parm/config/gfs/config.marineanlletkf.j2 b/dev/parm/config/gfs/config.marineanlletkf.j2 index 19e238e51e1..07ac5d7a75e 100644 --- a/dev/parm/config/gfs/config.marineanlletkf.j2 +++ b/dev/parm/config/gfs/config.marineanlletkf.j2 @@ -12,6 +12,6 @@ export INPUT_FIX_DIR="{{ SOCA_INPUT_FIX_DIR }}" export ANL_GEOM="{{ SOCA_ANL_GEOM }}" export OBS_LIST_YAML="{{ SOCA_OBS_LIST }}" -export TASK_CONFIG_YAML="${PARMgfs}/gdas/marine/marine_ens_config.yaml.j2" +export TASK_CONFIG_YAML="${PARMglobal}/gdas/marine/marine_ens_config.yaml.j2" echo "END: config.marineanlletkf" diff --git a/dev/parm/config/gfs/config.marinebmat.j2 b/dev/parm/config/gfs/config.marinebmat.j2 index ea342c0764d..7c9676dab29 100644 --- a/dev/parm/config/gfs/config.marinebmat.j2 +++ b/dev/parm/config/gfs/config.marinebmat.j2 @@ -11,6 +11,6 @@ source "${EXPDIR}/config.resources" marinebmat export INPUT_FIX_DIR="{{ SOCA_INPUT_FIX_DIR }}" export ANL_GEOM="{{ SOCA_ANL_GEOM }}" -export TASK_CONFIG_YAML="${PARMgfs}/gdas/marine/marine_bmat_config.yaml.j2" +export TASK_CONFIG_YAML="${PARMglobal}/gdas/marine/marine_bmat_config.yaml.j2" echo "END: config.marinebmat" diff --git a/dev/parm/config/gfs/config.metp b/dev/parm/config/gfs/config.metp index 564966fd6d2..b15622eaf77 100644 --- a/dev/parm/config/gfs/config.metp +++ b/dev/parm/config/gfs/config.metp @@ -19,7 +19,7 @@ export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus # METplus: Verify grid-to-grid, grid-to-obs, precipitation options #---------------------------------------------------------- ## EMC_VERIF_GLOBAL SETTINGS -export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export HOMEverif_global=${HOMEglobal}/sorc/verif-global.fd export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh ## INPUT DATA SETTINGS export model=${PSLOT} diff --git a/dev/parm/config/gfs/config.oceanice_products b/dev/parm/config/gfs/config.oceanice_products index 7f1d94bf0b9..7b51d8e9afb 100644 --- a/dev/parm/config/gfs/config.oceanice_products +++ b/dev/parm/config/gfs/config.oceanice_products @@ -12,7 +12,7 @@ export MAX_TASKS=25 export write_grib2=False export write_netcdf=False -export OCEANICEPRODUCTS_CONFIG="${PARMgfs}/post/oceanice_products_gfs.yaml" +export OCEANICEPRODUCTS_CONFIG="${PARMglobal}/post/oceanice_products_gfs.yaml" # No. of forecast hours to process in a single job export NFHRS_PER_GROUP=3 diff --git a/dev/parm/config/gfs/config.prep.j2 b/dev/parm/config/gfs/config.prep.j2 index a8b8f0c619a..235c866f283 100644 --- a/dev/parm/config/gfs/config.prep.j2 +++ b/dev/parm/config/gfs/config.prep.j2 @@ -12,7 +12,7 @@ export cdate10=${PDY}${cyc} # Relocation and syndata QC export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} -export TROPCYQCRELOSH="${SCRgfs}/exglobal_atmos_tropcy_qc_reloc.sh" +export TROPCYQCRELOSH="${SCRglobal}/exglobal_atmos_tropcy_qc_reloc.sh" export COMINsyn=${COMINsyn:-$(compath.py "${envir}/com/gfs/${gfs_ver}")/syndat} # Allow users to control the generation or use of either operational or @@ -31,17 +31,17 @@ fi # NOTE: Remember to set OBERROR in config.anal as PRVT is set below # # Set default prepobs_errtable.global -export PRVT="${FIXgfs}/gsi/prepobs_errtable.global" +export PRVT="${FIXglobal}/gsi/prepobs_errtable.global" # Set prepobs.errtable.global for GFS v16 retrospective parallels if [[ ${RUN_ENVIR} == "emc" ]]; then if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then - export PRVT="${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2019021900" + export PRVT="${FIXglobal}/gsi/gfsv16_historical/prepobs_errtable.global.2019021900" fi # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then - export PRVT="${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2019110706" + export PRVT="${FIXglobal}/gsi/gfsv16_historical/prepobs_errtable.global.2019110706" fi # NOTE: diff --git a/dev/parm/config/gfs/config.prepoceanobs.j2 b/dev/parm/config/gfs/config.prepoceanobs.j2 index 59ac7dbe8ae..ebbe8e14aa9 100644 --- a/dev/parm/config/gfs/config.prepoceanobs.j2 +++ b/dev/parm/config/gfs/config.prepoceanobs.j2 @@ -12,7 +12,7 @@ fi export DMPDIR="${dmpdir_exp:-${DMPDIR}}" -export MARINE_JCB_GDAS_OBS="${PARMgfs}/gdas/jcb-gdas/observations/marine" +export MARINE_JCB_GDAS_OBS="${PARMglobal}/gdas/jcb-gdas/observations/marine" # Get task specific resources . "${EXPDIR}/config.resources" prepoceanobs diff --git a/dev/parm/config/gfs/config.sfcanl b/dev/parm/config/gfs/config.sfcanl index 563804500ac..891c2155c12 100644 --- a/dev/parm/config/gfs/config.sfcanl +++ b/dev/parm/config/gfs/config.sfcanl @@ -18,7 +18,7 @@ if [[ "${DO_JEDIATMVAR}" == "YES" ]]; then export DONST="NO" fi -if [[ "${RUN/enkf}" == "gfs" ]]; then +if [[ "${RUN/enkf}" == "gfs" ]]; then echo "turning off gsi soilda for gfs run" DO_GSISOILDA="NO" fi @@ -30,7 +30,7 @@ if [[ "${DO_GSISOILDA}" == "YES" ]]; then export GCYCLE_DO_SOILINCR=".true." fi export GCYCLE_INTERP_LANDINCR=".false." - export REGRID_EXEC="${HOMEgfs}/exec/regridStates.x" + export REGRID_EXEC="${HOMEglobal}/exec/regridStates.x" fi echo "END: config.sfcanl" diff --git a/dev/parm/config/gfs/config.snowanl.j2 b/dev/parm/config/gfs/config.snowanl.j2 index a76e5842464..0dce9b878ec 100644 --- a/dev/parm/config/gfs/config.snowanl.j2 +++ b/dev/parm/config/gfs/config.snowanl.j2 @@ -9,18 +9,18 @@ echo "BEGIN: config.snowanl" source "${EXPDIR}/config.resources" snowanl # Name of the executable that applies increment to bkg and its namelist template -export APPLY_INCR_EXE="${EXECgfs}/gdas_apply_incr.x" -export APPLY_INCR_NML_TMPL="${PARMgfs}/gdas/snow/apply_incr_nml.j2" +export APPLY_INCR_EXE="${EXECglobal}/gdas_apply_incr.x" +export APPLY_INCR_NML_TMPL="${PARMglobal}/gdas/snow/apply_incr_nml.j2" -export TASK_CONFIG_YAML="${PARMgfs}/gdas/snow/snow_det_config.yaml.j2" -export OBS_LIST_YAML="${PARMgfs}/gdas/snow/snow_obs_list.yaml.j2" +export TASK_CONFIG_YAML="${PARMglobal}/gdas/snow/snow_det_config.yaml.j2" +export OBS_LIST_YAML="${PARMglobal}/gdas/snow/snow_obs_list.yaml.j2" export ims_scf_obs_suffix="asc" # asc-ascii; nc-netcdf export fail_on_missing_snowobs=False # False: just warn; True: fail & exit -export PREP_SNOCVR_SNOMAD_YAML="${PARMgfs}/gdas/snow/prep/prep_snocvr_snomad.yaml.j2" -export OBSBUILDER="${USHgfs}/bufr_snocvr_snomad.py" -export PREP_GHCN_YAML="${PARMgfs}/gdas/snow/prep/prep_ghcn.yaml.j2" -export GHCN2IODACONV="${USHgfs}/ghcn_snod2ioda.py" +export PREP_SNOCVR_SNOMAD_YAML="${PARMglobal}/gdas/snow/prep/prep_snocvr_snomad.yaml.j2" +export OBSBUILDER="${USHglobal}/bufr_snocvr_snomad.py" +export PREP_GHCN_YAML="${PARMglobal}/gdas/snow/prep/prep_ghcn.yaml.j2" +export GHCN2IODACONV="${USHglobal}/ghcn_snod2ioda.py" export io_layout_x="{{ IO_LAYOUT_X }}" export io_layout_y="{{ IO_LAYOUT_Y }}" diff --git a/dev/parm/config/gfs/config.stage_ic.j2 b/dev/parm/config/gfs/config.stage_ic.j2 index 66b2d375332..e2da453c082 100644 --- a/dev/parm/config/gfs/config.stage_ic.j2 +++ b/dev/parm/config/gfs/config.stage_ic.j2 @@ -15,9 +15,9 @@ fi export BASE_IC="{{ BASE_IC }}" # Platform home for staged ICs -export STAGE_IC_YAML_TMPL="${PARMgfs}/stage/master_${NET}.yaml.j2" +export STAGE_IC_YAML_TMPL="${PARMglobal}/stage/master_${NET}.yaml.j2" -source "${HOMEgfs}/versions/ic.ver" +source "${HOMEglobal}/versions/ic.ver" if [[ ${EXP_WARM_START} = ".false." ]] ; then export DOIAU="NO" # Turn off for staging diff --git a/dev/parm/config/gfs/config.ufs b/dev/parm/config/gfs/config.ufs index 411b0461b46..43c1e6ed4ba 100644 --- a/dev/parm/config/gfs/config.ufs +++ b/dev/parm/config/gfs/config.ufs @@ -653,26 +653,26 @@ fi # WW3 restart field variable is different for slow vs fast loop. Add WW3_RSTFLDS="ice" for slow loop variables based on coupling scheme. case "${model_list}" in atm) - default_template="${PARMgfs}/ufs/ufs.configure.atm${tmpl_suffix:-}.IN" + default_template="${PARMglobal}/ufs/ufs.configure.atm${tmpl_suffix:-}.IN" ;; atm.aero) - default_template="${PARMgfs}/ufs/ufs.configure.atmaero${tmpl_suffix:-}.IN" + default_template="${PARMglobal}/ufs/ufs.configure.atmaero${tmpl_suffix:-}.IN" ;; atm.wave) - default_template="${PARMgfs}/ufs/ufs.configure.leapfrog_atm_wav${tmpl_suffix:-}.IN" + default_template="${PARMglobal}/ufs/ufs.configure.leapfrog_atm_wav${tmpl_suffix:-}.IN" ;; atm.ocean.ice) - default_template="${PARMgfs}/ufs/ufs.configure.s2s${tmpl_suffix:-}.IN" + default_template="${PARMglobal}/ufs/ufs.configure.s2s${tmpl_suffix:-}.IN" ;; atm.ocean.ice.aero) - default_template="${PARMgfs}/ufs/ufs.configure.s2sa${tmpl_suffix:-}.IN" + default_template="${PARMglobal}/ufs/ufs.configure.s2sa${tmpl_suffix:-}.IN" ;; atm.ocean.ice.wave) - default_template="${PARMgfs}/ufs/ufs.configure.s2sw${tmpl_suffix:-}.IN" + default_template="${PARMglobal}/ufs/ufs.configure.s2sw${tmpl_suffix:-}.IN" WW3_RSTFLDS="ice" ;; atm.ocean.ice.wave.aero) - default_template="${PARMgfs}/ufs/ufs.configure.s2swa${tmpl_suffix:-}.IN" + default_template="${PARMglobal}/ufs/ufs.configure.s2swa${tmpl_suffix:-}.IN" WW3_RSTFLDS="ice" ;; *) diff --git a/dev/parm/config/gfs/config.upp b/dev/parm/config/gfs/config.upp index 41015c2feee..41e5dcd231f 100644 --- a/dev/parm/config/gfs/config.upp +++ b/dev/parm/config/gfs/config.upp @@ -8,7 +8,7 @@ echo "BEGIN: config.upp" # Get task specific resources . "${EXPDIR}/config.resources" upp -export UPP_CONFIG="${PARMgfs}/post/upp.yaml" +export UPP_CONFIG="${PARMglobal}/post/upp.yaml" # No. of forecast hours to process in a single job export NFHRS_PER_GROUP=3 diff --git a/dev/parm/config/gfs/config.verfozn b/dev/parm/config/gfs/config.verfozn index df7d18012de..e0f01af9228 100644 --- a/dev/parm/config/gfs/config.verfozn +++ b/dev/parm/config/gfs/config.verfozn @@ -9,14 +9,14 @@ echo "BEGIN: config.verfozn" export DO_DATA_RPT=1 export OZN_AREA="glb" export OZNMON_SUFFIX=${NET} -export SATYPE_FILE=${PARMgfs}/monitor/gdas_oznmon_satype.txt +export SATYPE_FILE=${PARMglobal}/monitor/gdas_oznmon_satype.txt # Source the parm file -. "${PARMgfs}/monitor/gdas_oznmon.parm" +. "${PARMglobal}/monitor/gdas_oznmon.parm" # Set up validation file if [[ ${VALIDATE_DATA} -eq 1 ]]; then - export ozn_val_file=${PARMgfs}/monitor/gdas_oznmon_base.tar + export ozn_val_file=${PARMglobal}/monitor/gdas_oznmon_base.tar fi echo "END: config.verfozn" diff --git a/dev/parm/config/gfs/config.verfrad b/dev/parm/config/gfs/config.verfrad index 506ce50b4f6..e90a897fe6a 100644 --- a/dev/parm/config/gfs/config.verfrad +++ b/dev/parm/config/gfs/config.verfrad @@ -6,10 +6,10 @@ echo "BEGIN: config.verfrad" # Get task specific resources . "${EXPDIR}/config.resources" verfrad -export satype_file=${PARMgfs}/monitor/gdas_radmon_satype.txt +export satype_file=${PARMglobal}/monitor/gdas_radmon_satype.txt # Source the parm file -. "${PARMgfs}/monitor/da_mon.parm" +. "${PARMglobal}/monitor/da_mon.parm" # Other variables export RAD_AREA="glb" diff --git a/dev/parm/config/gfs/config.vminmon b/dev/parm/config/gfs/config.vminmon index 7c7d362161f..29b9e20dced 100644 --- a/dev/parm/config/gfs/config.vminmon +++ b/dev/parm/config/gfs/config.vminmon @@ -9,7 +9,7 @@ echo "BEGIN: config.vminmon" export MINMON_SUFFIX=${MINMON_SUFFIX:-${NET}} export CYCLE_INTERVAL=${assim_freq:-6} -export mm_gnormfile=${PARMgfs}/monitor/${RUN}_minmon_gnorm.txt -export mm_costfile=${PARMgfs}/monitor/${RUN}_minmon_cost.txt +export mm_gnormfile=${PARMglobal}/monitor/${RUN}_minmon_gnorm.txt +export mm_costfile=${PARMglobal}/monitor/${RUN}_minmon_cost.txt echo "END: config.vminmon" diff --git a/dev/parm/config/gfs/yaml/defaults.yaml b/dev/parm/config/gfs/yaml/defaults.yaml index 684d45133d0..9ad0dee5b57 100644 --- a/dev/parm/config/gfs/yaml/defaults.yaml +++ b/dev/parm/config/gfs/yaml/defaults.yaml @@ -35,7 +35,7 @@ atmanl: LAYOUT_Y_ATMANL: 8 IO_LAYOUT_X: 1 IO_LAYOUT_Y: 1 - OBS_LIST_YAML: "${PARMgfs}/gdas/atm/atm_obs_list.yaml.j2" + OBS_LIST_YAML: "${PARMglobal}/gdas/atm/atm_obs_list.yaml.j2" VAR_JEDI_TEST_YAML: "" FV3INC_JEDI_TEST_YAML: "" @@ -44,7 +44,7 @@ atmensanl: LAYOUT_Y_ATMENSANL: 8 IO_LAYOUT_X: 1 IO_LAYOUT_Y: 1 - OBS_LIST_YAML: "${PARMgfs}/gdas/atm/atm_obs_list.yaml.j2" + OBS_LIST_YAML: "${PARMglobal}/gdas/atm/atm_obs_list.yaml.j2" LETKF_JEDI_TEST_YAML: "" OBS_JEDI_TEST_YAML: "" SOL_JEDI_TEST_YAML: "" @@ -59,23 +59,23 @@ snowanl: IO_LAYOUT_Y: 1 marinebmat: - SOCA_INPUT_FIX_DIR: "${FIXgfs}/gdas/soca/72x35x25/soca" - SOCA_ANL_GEOM: "${FIXgfs}/gdas/soca/72x35x25/soca" + SOCA_INPUT_FIX_DIR: "${FIXglobal}/gdas/soca/72x35x25/soca" + SOCA_ANL_GEOM: "${FIXglobal}/gdas/soca/72x35x25/soca" marineanl: - SOCA_INPUT_FIX_DIR: "${FIXgfs}/gdas/soca/72x35x25/soca" - SOCA_ANL_GEOM: "${FIXgfs}/gdas/soca/72x35x25/soca" - SOCA_OBS_LIST: "${PARMgfs}/gdas/marine/obs/obs_list.yaml.j2" # TODO: This is also repeated in oceanprepobs + SOCA_INPUT_FIX_DIR: "${FIXglobal}/gdas/soca/72x35x25/soca" + SOCA_ANL_GEOM: "${FIXglobal}/gdas/soca/72x35x25/soca" + SOCA_OBS_LIST: "${PARMglobal}/gdas/marine/obs/obs_list.yaml.j2" # TODO: This is also repeated in oceanprepobs SOCA_NINNER: 100 marineanlecen: - SOCA_INPUT_FIX_DIR: "${FIXgfs}/gdas/soca/72x35x25/soca" - SOCA_ANL_GEOM: "${FIXgfs}/gdas/soca/72x35x25/soca" + SOCA_INPUT_FIX_DIR: "${FIXglobal}/gdas/soca/72x35x25/soca" + SOCA_ANL_GEOM: "${FIXglobal}/gdas/soca/72x35x25/soca" marineanlletkf: - SOCA_INPUT_FIX_DIR: "${FIXgfs}/gdas/soca/72x35x25/soca" - SOCA_ANL_GEOM: "${FIXgfs}/gdas/soca/72x35x25/soca" - SOCA_OBS_LIST: "${PARMgfs}/gdas/marine/obs/obs_list.yaml.j2" + SOCA_INPUT_FIX_DIR: "${FIXglobal}/gdas/soca/72x35x25/soca" + SOCA_ANL_GEOM: "${FIXglobal}/gdas/soca/72x35x25/soca" + SOCA_OBS_LIST: "${PARMglobal}/gdas/marine/obs/obs_list.yaml.j2" prepoceanobs: use_exp_obs: "YES" diff --git a/dev/parm/config/sfs/config.base.j2 b/dev/parm/config/sfs/config.base.j2 index 3103f6deb37..216e5ce93df 100644 --- a/dev/parm/config/sfs/config.base.j2 +++ b/dev/parm/config/sfs/config.base.j2 @@ -28,14 +28,14 @@ export CLUSTERS_DTN="{{ CLUSTERS_DTN | default('${CLUSTERS_SERVICE}') }}" export HPSS_PROJECT="{{ HPSS_PROJECT }}" # Directories relative to installation areas: -export HOMEgfs="{{ HOMEgfs }}" -export EXECgfs=${HOMEgfs}/exec -export FIXgfs=${HOMEgfs}/fix -export PARMgfs=${HOMEgfs}/parm -export SCRgfs=${HOMEgfs}/scripts -export USHgfs=${HOMEgfs}/ush -export FIXorog=${FIXgfs}/orog -export FIXugwd=${FIXgfs}/ugwd +export HOMEglobal="{{ HOMEglobal }}" +export EXECglobal=${HOMEglobal}/exec +export FIXglobal=${HOMEglobal}/fix +export PARMglobal=${HOMEglobal}/parm +export SCRglobal=${HOMEglobal}/scripts +export USHglobal=${HOMEglobal}/ush +export FIXorog=${FIXglobal}/orog +export FIXugwd=${FIXglobal}/ugwd ######################################################################## @@ -71,8 +71,8 @@ export DO_TEST_MODE="{{ DO_TEST_MODE }}" # option to change configuration for au # FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT # CLEAR #################################################### -# Build paths relative to $HOMEgfs -export HOMEpost="${HOMEgfs}" +# Build paths relative to $HOMEglobal +export HOMEpost="${HOMEglobal}" # CONVENIENT utility scripts and other environment parameters export NMV="/bin/mv" @@ -83,10 +83,10 @@ export DEBUG_POSTSCRIPT="NO" # PBS only; sets debug=true export CHGRP_RSTPROD="{{ CHGRP_RSTPROD }}" export CHGRP_CMD="{{ CHGRP_CMD }}" export NCDUMP="${NETCDF:-${netcdf_c_ROOT:-}}/bin/ncdump" -export NCLEN="${HOMEgfs}/ush/getncdimlen" +export NCLEN="${HOMEglobal}/ush/getncdimlen" # Machine environment, jobs, and other utility scripts -export BASE_ENV="${HOMEgfs}/env" +export BASE_ENV="${HOMEglobal}/env" # EXPERIMENT specific environment parameters export SDATE="{{ SDATE }}" diff --git a/dev/parm/config/sfs/config.fcst.j2 b/dev/parm/config/sfs/config.fcst.j2 index b2b718c2a3a..b651e58b928 100644 --- a/dev/parm/config/sfs/config.fcst.j2 +++ b/dev/parm/config/sfs/config.fcst.j2 @@ -52,8 +52,8 @@ export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_O ####################################################################### -export FORECASTSH="${SCRgfs}/exglobal_forecast.sh" -#export FORECASTSH="${SCRgfs}/exglobal_forecast.py" # Temp. while this is worked on +export FORECASTSH="${SCRglobal}/exglobal_forecast.sh" +#export FORECASTSH="${SCRglobal}/exglobal_forecast.py" # Temp. while this is worked on export FCSTEXEC="${NET}_model.x" ####################################################################### @@ -164,17 +164,17 @@ export random_clds=".true." case ${imp_physics} in 99) # ZhaoCarr export ncld=1 - export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_zhaocarr${tbf}${tbp}" + export FIELD_TABLE="${PARMglobal}/ufs/fv3/field_table_zhaocarr${tbf}${tbp}" export nwat=2 ;; 6) # WSM6 export ncld=2 - export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_wsm6${tbf}${tbp}" + export FIELD_TABLE="${PARMglobal}/ufs/fv3/field_table_wsm6${tbf}${tbp}" export nwat=6 ;; 8) # Thompson export ncld=2 - export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_thompson_noaero_tke${tbp}" + export FIELD_TABLE="${PARMglobal}/ufs/fv3/field_table_thompson_noaero_tke${tbp}" export nwat=6 export cal_pre=".false." @@ -203,7 +203,7 @@ case ${imp_physics} in ;; 11) # GFDL export ncld=5 - export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_gfdl${tbf}${tbp}" + export FIELD_TABLE="${PARMglobal}/ufs/fv3/field_table_gfdl${tbf}${tbp}" export nwat=6 export dnats=1 export cal_pre=".false." @@ -252,7 +252,7 @@ export FSICS="0" #--------------------------------------------------------------------- # Write more variables to output -export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table" +export DIAG_TABLE="${PARMglobal}/ufs/fv3/diag_table" # Write gfs restart files to rerun fcst from any break point export restart_interval=${restart_interval_gfs:-12} diff --git a/dev/ush/README_NET_CONVERSION.md b/dev/ush/README_NET_CONVERSION.md index 98c89ce877f..05a9dc5351a 100644 --- a/dev/ush/README_NET_CONVERSION.md +++ b/dev/ush/README_NET_CONVERSION.md @@ -7,12 +7,12 @@ These scripts facilitate the conversion between development and operational vari ## Variable Mapping Development (global-workflow) → Operational (NCO): -- `HOMEglobal` → `HOME${NET}` (e.g., `HOMEgfs`) -- `PARMglobal` → `PARM${NET}` (e.g., `PARMgfs`) -- `USHglobal` → `USH${NET}` (e.g., `USHgfs`) -- `SCRglobal` → `SCR${NET}` (e.g., `SCRgfs`) -- `EXECglobal` → `EXEC${NET}` (e.g., `EXECgfs`) -- `FIXglobal` → `FIX${NET}` (e.g., `FIXgfs`) +- `HOMEglobal` → `HOME${NET}` (e.g., `HOMEglobal`) +- `PARMglobal` → `PARM${NET}` (e.g., `PARMglobal`) +- `USHglobal` → `USH${NET}` (e.g., `USHglobal`) +- `SCRglobal` → `SCR${NET}` (e.g., `SCRglobal`) +- `EXECglobal` → `EXEC${NET}` (e.g., `EXECglobal`) +- `FIXglobal` → `FIX${NET}` (e.g., `FIXglobal`) ## Scripts diff --git a/dev/workflow/README_ecflow.md b/dev/workflow/README_ecflow.md index bce9429ce13..c0236a2df7b 100644 --- a/dev/workflow/README_ecflow.md +++ b/dev/workflow/README_ecflow.md @@ -72,12 +72,12 @@ parameter is set, it will look in the `$ECFgfs/scripts` folder for the scripts associated with the tasks or templates defined within the YAML file. If not already in the environment, it is suggested to add the following to the config.base file: -`export ECFgfs=$HOMEgfs/ecf` +`export ECFgfs=$HOMEglobal/ecf` * **NOTE**: Older versions of the `config.base` may not contain this export so it will be important to add as the application does rely on some pieces of information from that folder. -* **NOTE**: In the examples provided below and their output, the `HOMEgfs` parameter +* **NOTE**: In the examples provided below and their output, the `HOMEglobal` parameter is set to `/usr1/knevins/global-workflow` so you can make the associated reference in yours to match the output that you are looking to accomplish. diff --git a/dev/workflow/rocoto/rocoto_scron.sh.j2 b/dev/workflow/rocoto/rocoto_scron.sh.j2 index eab9828b3c9..5c812622501 100644 --- a/dev/workflow/rocoto/rocoto_scron.sh.j2 +++ b/dev/workflow/rocoto/rocoto_scron.sh.j2 @@ -1,5 +1,5 @@ #! /usr/bin/env bash -source "{{ HOMEgfs }}/dev/ush/gw_setup.sh" +source "{{ HOMEglobal }}/dev/ush/gw_setup.sh" # Run rocotorun bash -c "{{ rocotorunstr }}" diff --git a/docs/source/errors_faq.rst b/docs/source/errors_faq.rst index ff5926adff6..26a9652772f 100644 --- a/docs/source/errors_faq.rst +++ b/docs/source/errors_faq.rst @@ -7,7 +7,7 @@ Error: Reserved Variables Causing Workflow Issues ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Several variables are reserved in the workflow and should not be used as environment variables in your shell. Some of the common ones include (but are not limited to): -``HOMEgfs``, ``machine``, ``ROTDIR``, ``COMROT``, ``COMROOT``, ``COMOUT``, ``COMIN``, ``STMP``, ``PTMP``, ``DATAROOT``, ``DATA``, ``ACCOUNT``, ``PDY``, ``cyc``, ``RUN``, etc. +``HOMEglobal``, ``machine``, ``ROTDIR``, ``COMROT``, ``COMROOT``, ``COMOUT``, ``COMIN``, ``STMP``, ``PTMP``, ``DATAROOT``, ``DATA``, ``ACCOUNT``, ``PDY``, ``cyc``, ``RUN``, etc. If you are using any of these variables in your shell, you may encounter unexpected behavior in the workflow. ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/docs/source/gcafs.rst b/docs/source/gcafs.rst index ba4be17c0b3..a1faf022167 100644 --- a/docs/source/gcafs.rst +++ b/docs/source/gcafs.rst @@ -78,7 +78,7 @@ The primary configuration file for aerosol settings, containing: .. code-block:: bash export AERO_INPUTS_DIR="/path/to/aerosol/data" # Base directory for aerosol input data - export AERO_CONFIG_DIR="${PARMgfs}/ufs/gocart" # GOCART configuration files + export AERO_CONFIG_DIR="${PARMglobal}/ufs/gocart" # GOCART configuration files export fscav_aero="'*:0.3','so2:0.0',..." # Convective scavenging factors export dnats_aero=2 # Number of diagnostic tracers diff --git a/docs/source/testing.rst b/docs/source/testing.rst index d2cb9c9964e..6408efbb063 100644 --- a/docs/source/testing.rst +++ b/docs/source/testing.rst @@ -68,7 +68,7 @@ Configuration Management Platform-specific configuration is defined in:: - $HOMEgfs/dev/ci/platforms/config.$MACHINE_ID + $HOMEglobal/dev/ci/platforms/config.$MACHINE_ID **Key Variables**: @@ -90,7 +90,7 @@ These variables define: CMake Integration ================= -The framework uses CMake to configure and manage test execution. The main CMakeLists.txt file is located at ``$HOMEgfs/dev/ctests/CMakeLists.txt``. +The framework uses CMake to configure and manage test execution. The main CMakeLists.txt file is located at ``$HOMEglobal/dev/ctests/CMakeLists.txt``. **AddJJOBTest Function**: @@ -166,7 +166,7 @@ Each test case is defined by a YAML file using Jinja2 templating. The YAML file mkdir: - {{ DST_DIR }}/gfs.{{ PDY }}/{{ cyc }}/model/atmos/input - {{ DST_DIR }}/gfs.{{ PDY }}/{{ cyc }}/model/atmos/history - + copy: - [{{ SRC_DIR }}/gfs.{{ PDY }}/{{ cyc }}/model/atmos/input/gfs_ctrl.nc, {{ DST_DIR }}/gfs.{{ PDY }}/{{ cyc }}/model/atmos/input/gfs_ctrl.nc] @@ -211,15 +211,15 @@ Some tests require data from multiple cycles (e.g., coupled forecasts needing re mkdir: # Current cycle (12Z) - {{ DST_DIR }}/gefs.{{ PDY }}/{{ cyc }}/mem001/model/atmos/input - + # Previous cycle (06Z) - {{ DST_DIR }}/gefs.{{ PDY }}/{{ cyc_offset }}/mem001/model/ocean/restart - + copy: # Current cycle ICs - [{{ SRC_DIR }}/gefs.{{ PDY }}/{{ cyc }}/mem001/model/atmos/input/gfs_ctrl.nc, {{ DST_DIR }}/gefs.{{ PDY }}/{{ cyc }}/mem001/model/atmos/input/gfs_ctrl.nc] - + # Previous cycle restarts - [{{ SRC_DIR }}/gefs.{{ PDY }}/{{ cyc_offset }}/mem001/model/ocean/restart/MOM.res.nc, {{ DST_DIR }}/gefs.{{ PDY }}/{{ cyc_offset }}/mem001/model/ocean/restart/MOM.res.nc] @@ -324,7 +324,7 @@ Environment Setup Before running tests, ensure the required environment variables are set. These can be provided via: -1. Platform configuration files (``$HOMEgfs/dev/ci/platforms/config.$MACHINE_ID``) +1. Platform configuration files (``$HOMEglobal/dev/ci/platforms/config.$MACHINE_ID``) 2. Command-line CMake options (``-DVARIABLE=value``) 3. Environment variables exported in shell @@ -337,7 +337,7 @@ Before running tests, ensure the required environment variables are set. These c **Optional Variables**: * ``RUNTESTS``: Test execution directory (defaults to ``${CMAKE_BINARY_DIR}/RUNTESTS``) -* ``HOMEgfs``: Global workflow root (defaults to ``${PROJECT_SOURCE_DIR}``) +* ``HOMEglobal``: Global workflow root (defaults to ``${PROJECT_SOURCE_DIR}``) Configuration ============= @@ -346,13 +346,13 @@ Configure the CTest framework using CMake from the ctests directory: .. code-block:: bash - cd $HOMEgfs/dev/ctests + cd $HOMEglobal/dev/ctests mkdir -p build cd build - + # Configure with environment variables cmake ../.. - + # Or configure with command-line options cmake -DHPC_ACCOUNT=myaccount \ -DSTAGED_CTESTS=/path/to/baselines/RUNTESTS \ @@ -374,7 +374,7 @@ Running Tests .. code-block:: bash - cd $HOMEgfs/dev/ctests/build + cd $HOMEglobal/dev/ctests/build ctest **Run Tests by Label** (all tests for a specific case): @@ -383,16 +383,16 @@ Running Tests # Run all C48_ATM tests (trailing hyphen prevents partial matches) ctest -L "C48_ATM-" - + # Run all C48_S2SW tests (excludes C48_S2SWA_gefs tests) ctest -L "C48_S2SW-" - + # Run all C48_S2SWA_gefs ensemble tests ctest -L "C48_S2SWA_gefs-" .. note:: - The trailing hyphen in the label pattern is important! Labels use binomial + The trailing hyphen in the label pattern is important! Labels use binomial nomenclature (``CASE-JOB``), and the hyphen acts as a natural delimiter. Without it, ``ctest -L C48_S2SW`` would also match ``C48_S2SWA_gefs`` tests due to CTest's regex-based label matching. @@ -403,7 +403,7 @@ Running Tests # Run specific test with detailed logging ctest -R test_C48_ATM-gfs_fcst_seg0_execute -V - + # Run entire test sequence for one case ctest -R C48_S2SW-gfs_fcst_seg0 -V @@ -413,7 +413,7 @@ Running Tests # Run only setup phase ctest -R test_C48_ATM-gfs_fcst_seg0_setup - + # Run only validation phase ctest -R test_C48_ATM-gfs_fcst_seg0_validate @@ -468,7 +468,7 @@ Step-by-Step Procedure **Step 1: Add Test Definition to CMakeLists.txt** -Add the test at the end of ``$HOMEgfs/dev/ctests/CMakeLists.txt``: +Add the test at the end of ``$HOMEglobal/dev/ctests/CMakeLists.txt``: .. code-block:: cmake @@ -480,7 +480,7 @@ Add the test at the end of ``$HOMEgfs/dev/ctests/CMakeLists.txt``: **Step 2: Create YAML Case File** -Create a YAML file following the naming convention in ``$HOMEgfs/dev/ctests/cases/``: +Create a YAML file following the naming convention in ``$HOMEglobal/dev/ctests/cases/``: **Filename**: ``${CASE}-${JOB}.yaml`` @@ -494,10 +494,10 @@ For the example above: ``C48_ATM-gfs_analysis.yaml`` # Navigate to stable baseline COMROOT cd ${STAGED_CTESTS}/COMROOT/${PSLOT} - + # List atmosphere input files ls gfs.20210323/12/model/atmos/input/ - + # List restart files from previous cycle ls gfs.20210323/06/model/ocean/restart/ @@ -526,7 +526,7 @@ Create the YAML file with proper input staging configuration: # Create all necessary directory structures - {{ DST_DIR }}/gfs.{{ PDY }}/{{ cyc }}/model/atmos/input - {{ DST_DIR }}/gfs.{{ PDY }}/{{ cyc }}/model/atmos/analysis - + copy: # Stage all required input files from baseline - [{{ SRC_DIR }}/gfs.{{ PDY }}/{{ cyc }}/model/atmos/input/gfs_ctrl.nc, @@ -544,7 +544,7 @@ Create the YAML file with proper input staging configuration: .. code-block:: bash - cd $HOMEgfs/dev/ctests/build + cd $HOMEglobal/dev/ctests/build cmake ../.. ctest -R test_C48_ATM-gfs_analysis_execute -V @@ -641,7 +641,7 @@ Missing Input Files # Compare stable baseline ls ${STAGED_CTESTS}/COMROOT/${PSLOT}/gfs.${PDY}/${cyc}/ - + # With test environment ls ${RUNTESTS}/COMROOT/${TEST_NAME}/gfs.${PDY}/${cyc}/ @@ -678,7 +678,7 @@ Cycle Offset Issues {% set H_offset = '-6H' %} {% set TEST_DATE_offset = TEST_DATE + H_offset %} {% set cyc_offset = TEST_DATE_offset | strftime('%H') %} - + # Stage files using cyc_offset for previous cycle paths - {{ SRC_DIR }}/gfs.{{ PDY }}/{{ cyc_offset }}/model/ocean/restart/... @@ -703,7 +703,7 @@ Missing Baseline Data **Diagnosis**: Nightly baseline runs incomplete or configuration incorrect -**Solution**: +**Solution**: 1. Verify ``STAGED_CTESTS`` path in ``config.$MACHINE_ID`` 2. Check nightly CI runs completed successfully @@ -723,7 +723,7 @@ HPC Account Issues # Set correct account cmake -DHPC_ACCOUNT=correct_account ../.. - + # Or export before cmake export HPC_ACCOUNT=correct_account cmake ../.. @@ -745,7 +745,7 @@ Debugging Strategies # Test experiment directory cd ${RUNTESTS}/COMROOT/${TEST_NAME}_${HASH} - + # Check logs tail -f EXPDIR/logs/test_name.log @@ -754,7 +754,7 @@ Debugging Strategies .. code-block:: bash # Run test phases manually - cd $HOMEgfs/dev/ctests/build/scripts + cd $HOMEglobal/dev/ctests/build/scripts ./setup.sh TEST_NAME CASE_YAML TEST_DATE ./stage.sh CASE_NAME TEST_NAME TEST_DATE ./execute.sh TEST_NAME JOB_NAME TEST_DATE @@ -766,7 +766,7 @@ Debugging Strategies # Test YAML parsing from wxflow import parse_j2yaml, to_datetime - + data = { 'TEST_DATE': to_datetime('2021032312'), 'STAGED_CTESTS': '/path/to/baselines', @@ -774,7 +774,7 @@ Debugging Strategies 'PSLOT': 'C48_ATM_baseline', 'TEST_NAME': 'C48_ATM-gfs_fcst_seg0_hash' } - + config = parse_j2yaml('cases/C48_ATM-gfs_fcst_seg0.yaml', data) print(config) @@ -794,18 +794,18 @@ Directory Reference **Key Directories**: -* ``$HOMEgfs/dev/ctests/`` - CTest framework root -* ``$HOMEgfs/dev/ctests/cases/`` - YAML test case definitions -* ``$HOMEgfs/dev/ctests/build/`` - CMake build directory +* ``$HOMEglobal/dev/ctests/`` - CTest framework root +* ``$HOMEglobal/dev/ctests/cases/`` - YAML test case definitions +* ``$HOMEglobal/dev/ctests/build/`` - CMake build directory * ``${STAGED_CTESTS}/COMROOT/`` - Stable baseline outputs * ``${RUNTESTS}/COMROOT/`` - Test execution environments -* ``$HOMEgfs/jobs/JGLOBAL_*`` - Production job scripts +* ``$HOMEglobal/jobs/JGLOBAL_*`` - Production job scripts **Configuration Files**: -* ``$HOMEgfs/dev/ci/platforms/config.$MACHINE_ID`` - Platform settings -* ``$HOMEgfs/dev/ctests/CMakeLists.txt`` - Test definitions -* ``$HOMEgfs/dev/ci/gitlab-ci-hosts.yml`` - CI/CD pipeline +* ``$HOMEglobal/dev/ci/platforms/config.$MACHINE_ID`` - Platform settings +* ``$HOMEglobal/dev/ctests/CMakeLists.txt`` - Test definitions +* ``$HOMEglobal/dev/ci/gitlab-ci-hosts.yml`` - CI/CD pipeline Development History =================== diff --git a/gempak/fix/gfs_meta b/gempak/fix/gfs_meta index b57042fde2e..68ab9f08b37 100755 --- a/gempak/fix/gfs_meta +++ b/gempak/fix/gfs_meta @@ -1,23 +1,23 @@ -${HOMEgfs}/gempak/ush/gfs_meta_us.sh 36 84 126 216 -${HOMEgfs}/gempak/ush/gfs_meta_bwx.sh 36 84 126 180 -${HOMEgfs}/gempak/ush/gfs_meta_comp.sh 36 84 126 -${HOMEgfs}/gempak/ush/gfs_meta_ak.sh 36 84 132 216 -${HOMEgfs}/gempak/ush/gfs_meta_crb.sh 126 -${HOMEgfs}/gempak/ush/gfs_meta_hur.sh 36 84 126 -${HOMEgfs}/gempak/ush/gfs_meta_qpf.sh 36 84 132 216 -${HOMEgfs}/gempak/ush/gfs_meta_precip.sh 36 84 132 216 384 -${HOMEgfs}/gempak/ush/gfs_meta_sa.sh 126 -${HOMEgfs}/gempak/ush/gfs_meta_ver.sh 126 -${HOMEgfs}/gempak/ush/gfs_meta_hi.sh 384 -${HOMEgfs}/gempak/ush/gfs_meta_nhsh.sh 384 -${HOMEgfs}/gempak/ush/gfs_meta_trop.sh 384 -${HOMEgfs}/gempak/ush/gfs_meta_usext.sh 384 -${HOMEgfs}/gempak/ush/gfs_meta_mar_ql.sh 24 48 96 180 -${HOMEgfs}/gempak/ush/gfs_meta_mar_comp.sh 126 -${HOMEgfs}/gempak/ush/gfs_meta_opc_na_ver.sh 126 -${HOMEgfs}/gempak/ush/gfs_meta_opc_np_ver.sh 126 -${HOMEgfs}/gempak/ush/gfs_meta_mar_atl.sh 180 -${HOMEgfs}/gempak/ush/gfs_meta_mar_pac.sh 180 -${HOMEgfs}/gempak/ush/gfs_meta_mar_ver.sh 48 -${HOMEgfs}/gempak/ush/gfs_meta_mar_skewt.sh 72 -${HOMEgfs}/gempak/ush/gfs_meta_sa2.sh 144 +${HOMEglobal}/gempak/ush/gfs_meta_us.sh 36 84 126 216 +${HOMEglobal}/gempak/ush/gfs_meta_bwx.sh 36 84 126 180 +${HOMEglobal}/gempak/ush/gfs_meta_comp.sh 36 84 126 +${HOMEglobal}/gempak/ush/gfs_meta_ak.sh 36 84 132 216 +${HOMEglobal}/gempak/ush/gfs_meta_crb.sh 126 +${HOMEglobal}/gempak/ush/gfs_meta_hur.sh 36 84 126 +${HOMEglobal}/gempak/ush/gfs_meta_qpf.sh 36 84 132 216 +${HOMEglobal}/gempak/ush/gfs_meta_precip.sh 36 84 132 216 384 +${HOMEglobal}/gempak/ush/gfs_meta_sa.sh 126 +${HOMEglobal}/gempak/ush/gfs_meta_ver.sh 126 +${HOMEglobal}/gempak/ush/gfs_meta_hi.sh 384 +${HOMEglobal}/gempak/ush/gfs_meta_nhsh.sh 384 +${HOMEglobal}/gempak/ush/gfs_meta_trop.sh 384 +${HOMEglobal}/gempak/ush/gfs_meta_usext.sh 384 +${HOMEglobal}/gempak/ush/gfs_meta_mar_ql.sh 24 48 96 180 +${HOMEglobal}/gempak/ush/gfs_meta_mar_comp.sh 126 +${HOMEglobal}/gempak/ush/gfs_meta_opc_na_ver.sh 126 +${HOMEglobal}/gempak/ush/gfs_meta_opc_np_ver.sh 126 +${HOMEglobal}/gempak/ush/gfs_meta_mar_atl.sh 180 +${HOMEglobal}/gempak/ush/gfs_meta_mar_pac.sh 180 +${HOMEglobal}/gempak/ush/gfs_meta_mar_ver.sh 48 +${HOMEglobal}/gempak/ush/gfs_meta_mar_skewt.sh 72 +${HOMEglobal}/gempak/ush/gfs_meta_sa2.sh 144 diff --git a/modulefiles/gw_run.gaeac6.lua b/modulefiles/gw_run.gaeac6.lua index dfef1d452f6..82be4745e03 100644 --- a/modulefiles/gw_run.gaeac6.lua +++ b/modulefiles/gw_run.gaeac6.lua @@ -2,11 +2,11 @@ help([[ Load environment to run GFS on Gaea C6 ]]) --- Test that HOMEgfs is set. +-- Test that HOMEglobal is set. -- If not, load_modules.sh was not sourced to load this module. -local homegfssdir=os.getenv("HOMEgfs") or "None" +local homegfssdir=os.getenv("HOMEglobal") or "None" if (homegfssdir == "None") then - LmodError("FATAL ERROR HOMEgfs variable is unset.\n" .. + LmodError("FATAL ERROR HOMEglobal variable is unset.\n" .. "Please \"source dev/ush/load_modules.sh\" rather than loading this module directly.\n") end diff --git a/modulefiles/gw_run.hera.lua b/modulefiles/gw_run.hera.lua index f068821ca5c..c151c62f5a8 100644 --- a/modulefiles/gw_run.hera.lua +++ b/modulefiles/gw_run.hera.lua @@ -2,11 +2,11 @@ help([[ Load environment to run GFS on Hera ]]) --- Test that HOMEgfs is set. +-- Test that HOMEglobal is set. -- If not, load_modules.sh was not sourced to load this module. -local homegfssdir=os.getenv("HOMEgfs") or "None" +local homegfssdir=os.getenv("HOMEglobal") or "None" if (homegfssdir == "None") then - LmodError("FATAL ERROR HOMEgfs variable is unset.\n" .. + LmodError("FATAL ERROR HOMEglobal variable is unset.\n" .. "Please \"source dev/ush/load_modules.sh\" rather than loading this module directly.\n") end diff --git a/modulefiles/gw_run.hercules.lua b/modulefiles/gw_run.hercules.lua index e5baba8a936..2942e96822d 100644 --- a/modulefiles/gw_run.hercules.lua +++ b/modulefiles/gw_run.hercules.lua @@ -2,11 +2,11 @@ help([[ Load environment to run GFS on Hercules ]]) --- Test that HOMEgfs is set. +-- Test that HOMEglobal is set. -- If not, load_modules.sh was not sourced to load this module. -local homegfssdir=os.getenv("HOMEgfs") or "None" +local homegfssdir=os.getenv("HOMEglobal") or "None" if (homegfssdir == "None") then - LmodError("FATAL ERROR HOMEgfs variable is unset.\n" .. + LmodError("FATAL ERROR HOMEglobal variable is unset.\n" .. "Please \"source dev/ush/load_modules.sh\" rather than loading this module directly.\n") end diff --git a/modulefiles/gw_run.noaacloud.lua b/modulefiles/gw_run.noaacloud.lua index 99eb924bed1..2508d6de0f5 100644 --- a/modulefiles/gw_run.noaacloud.lua +++ b/modulefiles/gw_run.noaacloud.lua @@ -2,11 +2,11 @@ help([[ Load environment to run GFS on NOAA cloud ]]) --- Test that HOMEgfs is set. +-- Test that HOMEglobal is set. -- If not, load_modules.sh was not sourced to load this module. -local homegfssdir=os.getenv("HOMEgfs") or "None" +local homegfssdir=os.getenv("HOMEglobal") or "None" if (homegfssdir == "None") then - LmodError("FATAL ERROR HOMEgfs variable is unset.\n" .. + LmodError("FATAL ERROR HOMEglobal variable is unset.\n" .. "Please \"source dev/ush/load_modules.sh\" rather than loading this module directly.\n") end diff --git a/modulefiles/gw_run.orion.lua b/modulefiles/gw_run.orion.lua index 6c4a709805f..9e4a32defc2 100644 --- a/modulefiles/gw_run.orion.lua +++ b/modulefiles/gw_run.orion.lua @@ -2,11 +2,11 @@ help([[ Load environment to run GFS on Orion ]]) --- Test that HOMEgfs is set. +-- Test that HOMEglobal is set. -- If not, load_modules.sh was not sourced to load this module. -local homegfssdir=os.getenv("HOMEgfs") or "None" +local homegfssdir=os.getenv("HOMEglobal") or "None" if (homegfssdir == "None") then - LmodError("FATAL ERROR HOMEgfs variable is unset.\n" .. + LmodError("FATAL ERROR HOMEglobal variable is unset.\n" .. "Please \"source dev/ush/load_modules.sh\" rather than loading this module directly.\n") end diff --git a/modulefiles/gw_run.ursa.lua b/modulefiles/gw_run.ursa.lua index b2a69828642..0fe3847f1a0 100644 --- a/modulefiles/gw_run.ursa.lua +++ b/modulefiles/gw_run.ursa.lua @@ -2,11 +2,11 @@ help([[ Load environment to run GFS on Ursa ]]) --- Test that HOMEgfs is set. +-- Test that HOMEglobal is set. -- If not, load_modules.sh was not sourced to load this module. -local homegfssdir=os.getenv("HOMEgfs") or "None" +local homegfssdir=os.getenv("HOMEglobal") or "None" if (homegfssdir == "None") then - LmodError("FATAL ERROR HOMEgfs variable is unset.\n" .. + LmodError("FATAL ERROR HOMEglobal variable is unset.\n" .. "Please \"source dev/ush/load_modules.sh\" rather than loading this module directly.\n") end diff --git a/modulefiles/gw_upp.wcoss2.lua b/modulefiles/gw_upp.wcoss2.lua index 79b01210939..3b85f481645 100644 --- a/modulefiles/gw_upp.wcoss2.lua +++ b/modulefiles/gw_upp.wcoss2.lua @@ -2,8 +2,8 @@ help([[ Load environment to run the UPP on WCOSS2 ]]) -local homegfs=os.getenv("HOMEgfs") or "" -prepend_path("MODULEPATH", pathJoin(homegfs,"/sorc/ufs_model.fd/UFSATM/upp/modulefiles")) +local homeglobal=os.getenv("HOMEglobal") or "" +prepend_path("MODULEPATH", pathJoin(homeglobal,"/sorc/ufs_model.fd/UFSATM/upp/modulefiles")) -- Load UPP modules load("wcoss2_intel") diff --git a/parm/gdas/staging/snow_ims_scf_to_ioda.yaml.j2 b/parm/gdas/staging/snow_ims_scf_to_ioda.yaml.j2 index 30ff3a3ed63..c6e805823b6 100644 --- a/parm/gdas/staging/snow_ims_scf_to_ioda.yaml.j2 +++ b/parm/gdas/staging/snow_ims_scf_to_ioda.yaml.j2 @@ -1,3 +1,3 @@ copy: - ['{{ COMIN_OBS }}/{{OPREFIX}}imssnow96.asc', '{{ DATA }}/obs/ims{{ current_cycle | to_julian }}_4km_v1.3.asc'] -- ['{{ FIXgfs }}/gdas/obs/ims/IMS_4km_to_{{ CASE }}.mx{{ OCNRES }}.nc', '{{ DATA }}/obs/IMS4km_to_FV3_mapping.{{ CASE }}.mx{{ OCNRES }}_oro_data.nc'] +- ['{{ FIXglobal }}/gdas/obs/ims/IMS_4km_to_{{ CASE }}.mx{{ OCNRES }}.nc', '{{ DATA }}/obs/IMS4km_to_FV3_mapping.{{ CASE }}.mx{{ OCNRES }}_oro_data.nc'] diff --git a/parm/post/oceanice_products_gefs.yaml b/parm/post/oceanice_products_gefs.yaml index a797efa6364..f1e514705e9 100644 --- a/parm/post/oceanice_products_gefs.yaml +++ b/parm/post/oceanice_products_gefs.yaml @@ -8,17 +8,17 @@ ocnicepost: mkdir: - "{{ DATA }}" copy: - - ["{{ PARMgfs }}/post/ocnicepost.nml.jinja2", "{{ DATA }}/"] + - ["{{ PARMglobal }}/post/ocnicepost.nml.jinja2", "{{ DATA }}/"] {% if write_grib2 or write_netcdf %} - - ["{{ PARMgfs }}/post/{{ component }}_{{ RUN }}.csv", "{{ DATA }}/{{ component }}.csv"] - - ["{{ EXECgfs }}/ocnicepost.x", "{{ DATA }}/"] - - ["{{ FIXgfs }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Bu.to.Ct.bilinear.nc", "{{ DATA }}/"] - - ["{{ FIXgfs }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Cu.to.Ct.bilinear.nc", "{{ DATA }}/"] - - ["{{ FIXgfs }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Cv.to.Ct.bilinear.nc", "{{ DATA }}/"] + - ["{{ PARMglobal }}/post/{{ component }}_{{ RUN }}.csv", "{{ DATA }}/{{ component }}.csv"] + - ["{{ EXECglobal }}/ocnicepost.x", "{{ DATA }}/"] + - ["{{ FIXglobal }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Bu.to.Ct.bilinear.nc", "{{ DATA }}/"] + - ["{{ FIXglobal }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Cu.to.Ct.bilinear.nc", "{{ DATA }}/"] + - ["{{ FIXglobal }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Cv.to.Ct.bilinear.nc", "{{ DATA }}/"] {% for grid in product_grids %} - - ["{{ FIXgfs }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Ct.to.rect.{{ grid }}.bilinear.nc", "{{ DATA }}/"] - - ["{{ FIXgfs }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Ct.to.rect.{{ grid }}.conserve.nc", "{{ DATA }}/"] - - ["{{ FIXgfs }}/mom6/post/template.global.{{ grid }}.gb2", "{{ DATA }}/"] + - ["{{ FIXglobal }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Ct.to.rect.{{ grid }}.bilinear.nc", "{{ DATA }}/"] + - ["{{ FIXglobal }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Ct.to.rect.{{ grid }}.conserve.nc", "{{ DATA }}/"] + - ["{{ FIXglobal }}/mom6/post/template.global.{{ grid }}.gb2", "{{ DATA }}/"] {% endfor %} {% endif %} diff --git a/parm/post/oceanice_products_gfs.yaml b/parm/post/oceanice_products_gfs.yaml index 6d8355fb491..21cb9ee694d 100644 --- a/parm/post/oceanice_products_gfs.yaml +++ b/parm/post/oceanice_products_gfs.yaml @@ -8,17 +8,17 @@ ocnicepost: mkdir: - "{{ DATA }}" copy: - - ["{{ PARMgfs }}/post/ocnicepost.nml.jinja2", "{{ DATA }}/"] + - ["{{ PARMglobal }}/post/ocnicepost.nml.jinja2", "{{ DATA }}/"] {% if write_grib2 or write_netcdf %} - - ["{{ PARMgfs }}/post/{{ component }}_{{ RUN }}.csv", "{{ DATA }}/{{ component }}.csv"] - - ["{{ EXECgfs }}/ocnicepost.x", "{{ DATA }}/"] - - ["{{ FIXgfs }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Bu.to.Ct.bilinear.nc", "{{ DATA }}/"] - - ["{{ FIXgfs }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Cu.to.Ct.bilinear.nc", "{{ DATA }}/"] - - ["{{ FIXgfs }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Cv.to.Ct.bilinear.nc", "{{ DATA }}/"] + - ["{{ PARMglobal }}/post/{{ component }}_{{ RUN }}.csv", "{{ DATA }}/{{ component }}.csv"] + - ["{{ EXECglobal }}/ocnicepost.x", "{{ DATA }}/"] + - ["{{ FIXglobal }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Bu.to.Ct.bilinear.nc", "{{ DATA }}/"] + - ["{{ FIXglobal }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Cu.to.Ct.bilinear.nc", "{{ DATA }}/"] + - ["{{ FIXglobal }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Cv.to.Ct.bilinear.nc", "{{ DATA }}/"] {% for grid in product_grids %} - - ["{{ FIXgfs }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Ct.to.rect.{{ grid }}.bilinear.nc", "{{ DATA }}/"] - - ["{{ FIXgfs }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Ct.to.rect.{{ grid }}.conserve.nc", "{{ DATA }}/"] - - ["{{ FIXgfs }}/mom6/post/template.global.{{ grid }}.gb2", "{{ DATA }}/"] + - ["{{ FIXglobal }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Ct.to.rect.{{ grid }}.bilinear.nc", "{{ DATA }}/"] + - ["{{ FIXglobal }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Ct.to.rect.{{ grid }}.conserve.nc", "{{ DATA }}/"] + - ["{{ FIXglobal }}/mom6/post/template.global.{{ grid }}.gb2", "{{ DATA }}/"] {% endfor %} {% endif %} diff --git a/parm/post/oceanice_products_sfs.yaml b/parm/post/oceanice_products_sfs.yaml index 086af354844..d509c258895 100644 --- a/parm/post/oceanice_products_sfs.yaml +++ b/parm/post/oceanice_products_sfs.yaml @@ -8,17 +8,17 @@ ocnicepost: mkdir: - "{{ DATA }}" copy: - - ["{{ PARMgfs }}/post/ocnicepost.nml.jinja2", "{{ DATA }}/"] + - ["{{ PARMglobal }}/post/ocnicepost.nml.jinja2", "{{ DATA }}/"] {% if write_grib2 or write_netcdf %} - - ["{{ PARMgfs }}/post/{{ component }}_{{ RUN }}.csv", "{{ DATA }}/{{ component }}.csv"] - - ["{{ EXECgfs }}/ocnicepost.x", "{{ DATA }}/"] - - ["{{ FIXgfs }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Bu.to.Ct.bilinear.nc", "{{ DATA }}/"] - - ["{{ FIXgfs }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Cu.to.Ct.bilinear.nc", "{{ DATA }}/"] - - ["{{ FIXgfs }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Cv.to.Ct.bilinear.nc", "{{ DATA }}/"] + - ["{{ PARMglobal }}/post/{{ component }}_{{ RUN }}.csv", "{{ DATA }}/{{ component }}.csv"] + - ["{{ EXECglobal }}/ocnicepost.x", "{{ DATA }}/"] + - ["{{ FIXglobal }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Bu.to.Ct.bilinear.nc", "{{ DATA }}/"] + - ["{{ FIXglobal }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Cu.to.Ct.bilinear.nc", "{{ DATA }}/"] + - ["{{ FIXglobal }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Cv.to.Ct.bilinear.nc", "{{ DATA }}/"] {% for grid in product_grids %} - - ["{{ FIXgfs }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Ct.to.rect.{{ grid }}.bilinear.nc", "{{ DATA }}/"] - - ["{{ FIXgfs }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Ct.to.rect.{{ grid }}.conserve.nc", "{{ DATA }}/"] - - ["{{ FIXgfs }}/mom6/post/template.global.{{ grid }}.gb2", "{{ DATA }}/"] + - ["{{ FIXglobal }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Ct.to.rect.{{ grid }}.bilinear.nc", "{{ DATA }}/"] + - ["{{ FIXglobal }}/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Ct.to.rect.{{ grid }}.conserve.nc", "{{ DATA }}/"] + - ["{{ FIXglobal }}/mom6/post/template.global.{{ grid }}.gb2", "{{ DATA }}/"] {% endfor %} {% endif %} diff --git a/parm/post/upp.yaml b/parm/post/upp.yaml index 4c1bd6d889b..f8e4d9c951a 100644 --- a/parm/post/upp.yaml +++ b/parm/post/upp.yaml @@ -9,16 +9,16 @@ upp: - "{{ DATA }}" copy: - ["{{ 'g2tmpl_ROOT' | getenv }}/share/params_grib2_tbl_new", "{{ DATA }}/params_grib2_tbl_new"] - - ["{{ PARMgfs }}/post/nam_micro_lookup.dat", "{{ DATA }}/eta_micro_lookup.dat"] - - ["{{ EXECgfs }}/upp.x", "{{ DATA }}/"] - - ["{{ PARMgfs }}/post/itag.jinja", "{{ DATA }}/"] + - ["{{ PARMglobal }}/post/nam_micro_lookup.dat", "{{ DATA }}/eta_micro_lookup.dat"] + - ["{{ EXECglobal }}/upp.x", "{{ DATA }}/"] + - ["{{ PARMglobal }}/post/itag.jinja", "{{ DATA }}/"] analysis: config: rdaod: True data_in: copy: - - ["{{ PARMgfs }}/post/gfs/postxconfig-NT-gfs-anl.txt", "{{ DATA }}/postxconfig-NT.txt"] + - ["{{ PARMglobal }}/post/gfs/postxconfig-NT-gfs-anl.txt", "{{ DATA }}/postxconfig-NT.txt"] {% if DO_JEDIATMVAR %} {% if ATMINC_GRID == 'gaussian' %} - ["{{ COMIN_ATMOS_ANALYSIS }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.analysis.atm.a006.nc", "{{ DATA }}/{{ atmos_filename }}"] @@ -43,9 +43,9 @@ forecast: data_in: copy: {% if forecast_hour == 0 %} - - ["{{ PARMgfs }}/post/gfs/postxconfig-NT-gfs-f00-two.txt", "{{ DATA }}/postxconfig-NT.txt"] + - ["{{ PARMglobal }}/post/gfs/postxconfig-NT-gfs-f00-two.txt", "{{ DATA }}/postxconfig-NT.txt"] {% else %} - - ["{{ PARMgfs }}/post/gfs/postxconfig-NT-gfs-two.txt", "{{ DATA }}/postxconfig-NT.txt"] + - ["{{ PARMglobal }}/post/gfs/postxconfig-NT-gfs-two.txt", "{{ DATA }}/postxconfig-NT.txt"] {% endif %} - ["{{ COMIN_ATMOS_HISTORY }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.atm.f{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/{{ atmos_filename }}"] - ["{{ COMIN_ATMOS_HISTORY }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.sfc.f{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/{{ flux_filename }}"] @@ -92,7 +92,7 @@ goes: {% endfor %} - ["{{ 'CRTM_FIX' | getenv }}/AerosolCoeff.bin", "{{ DATA }}/"] - ["{{ 'CRTM_FIX' | getenv }}/CloudCoeff.bin", "{{ DATA }}/"] - - ["{{ PARMgfs }}/post/gfs/postxconfig-NT-gfs-goes.txt", "{{ DATA }}/postxconfig-NT.txt"] + - ["{{ PARMglobal }}/post/gfs/postxconfig-NT-gfs-goes.txt", "{{ DATA }}/postxconfig-NT.txt"] - ["{{ COMIN_ATMOS_HISTORY }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.atm.f{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/{{ atmos_filename }}"] - ["{{ COMIN_ATMOS_HISTORY }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.sfc.f{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/{{ flux_filename }}"] data_out: diff --git a/parm/post/upp_gcafs.yaml b/parm/post/upp_gcafs.yaml index 8e131c7d283..67b50909257 100644 --- a/parm/post/upp_gcafs.yaml +++ b/parm/post/upp_gcafs.yaml @@ -10,15 +10,15 @@ upp: - "{{ DATA }}" copy: - ["{{ 'g2tmpl_ROOT' | getenv }}/share/params_grib2_tbl_new", "{{ DATA }}/params_grib2_tbl_new"] - - ["{{ PARMgfs }}/post/nam_micro_lookup.dat", "{{ DATA }}/eta_micro_lookup.dat"] - - ["{{ EXECgfs }}/upp.x", "{{ DATA }}/"] - - ["{{ PARMgfs }}/post/itag.jinja", "{{ DATA }}/"] - - ["{{ PARMgfs }}/post/optics_luts_DUST_nasa.dat", "{{ DATA }}/optics_luts_DUST_nasa.dat"] - - ["{{ PARMgfs }}/post/optics_luts_NITR_nasa.dat", "{{ DATA }}/optics_luts_NITR_nasa.dat"] - - ["{{ PARMgfs }}/post/optics_luts_SALT_nasa.dat", "{{ DATA }}/optics_luts_SALT_nasa.dat"] - - ["{{ PARMgfs }}/post/optics_luts_SOOT_nasa.dat", "{{ DATA }}/optics_luts_SOOT_nasa.dat"] - - ["{{ PARMgfs }}/post/optics_luts_SUSO_nasa.dat", "{{ DATA }}/optics_luts_SUSO_nasa.dat"] - - ["{{ PARMgfs }}/post/optics_luts_WASO_nasa.dat", "{{ DATA }}/optics_luts_WASO_nasa.dat"] + - ["{{ PARMglobal }}/post/nam_micro_lookup.dat", "{{ DATA }}/eta_micro_lookup.dat"] + - ["{{ EXECglobal }}/upp.x", "{{ DATA }}/"] + - ["{{ PARMglobal }}/post/itag.jinja", "{{ DATA }}/"] + - ["{{ PARMglobal }}/post/optics_luts_DUST_nasa.dat", "{{ DATA }}/optics_luts_DUST_nasa.dat"] + - ["{{ PARMglobal }}/post/optics_luts_NITR_nasa.dat", "{{ DATA }}/optics_luts_NITR_nasa.dat"] + - ["{{ PARMglobal }}/post/optics_luts_SALT_nasa.dat", "{{ DATA }}/optics_luts_SALT_nasa.dat"] + - ["{{ PARMglobal }}/post/optics_luts_SOOT_nasa.dat", "{{ DATA }}/optics_luts_SOOT_nasa.dat"] + - ["{{ PARMglobal }}/post/optics_luts_SUSO_nasa.dat", "{{ DATA }}/optics_luts_SUSO_nasa.dat"] + - ["{{ PARMglobal }}/post/optics_luts_WASO_nasa.dat", "{{ DATA }}/optics_luts_WASO_nasa.dat"] analysis: config: @@ -27,7 +27,7 @@ analysis: NET: GFS # upp doesn't work with GCAFS,set to GFS instead data_in: copy: - - ["{{ PARMgfs }}/post/gcafs/postxconfig-NT-gcafs.txt", "{{ DATA }}/postxconfig-NT.txt"] + - ["{{ PARMglobal }}/post/gcafs/postxconfig-NT-gcafs.txt", "{{ DATA }}/postxconfig-NT.txt"] - ["{{ COMIN_ATMOS_ANALYSIS }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.analysis.atm.a006.nc", "{{ DATA }}/{{ atmos_filename }}"] - ["{{ COMIN_ATMOS_ANALYSIS }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.analysis.sfc.a006.nc", "{{ DATA }}/{{ flux_filename }}"] data_out: diff --git a/parm/ufs/fix/gfs/atmos.fixed_files.yaml b/parm/ufs/fix/gfs/atmos.fixed_files.yaml index 374c26873e5..df93279ee6e 100644 --- a/parm/ufs/fix/gfs/atmos.fixed_files.yaml +++ b/parm/ufs/fix/gfs/atmos.fixed_files.yaml @@ -28,58 +28,58 @@ copy: - [$(FIXugwd)/ugwp_limb_tau.nc, $(DATA)/ugwp_limb_tau.nc] # CO2 climatology - - [$(FIXgfs)/am/co2monthlycyc.txt, $(DATA)/co2monthlycyc.txt] - - [$(FIXgfs)/am/global_co2historicaldata_glob.txt, $(DATA)/co2historicaldata_glob.txt] - - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2009.txt, $(DATA)/co2historicaldata_2009.txt] - - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2010.txt, $(DATA)/co2historicaldata_2010.txt] - - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2011.txt, $(DATA)/co2historicaldata_2011.txt] - - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2012.txt, $(DATA)/co2historicaldata_2012.txt] - - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2013.txt, $(DATA)/co2historicaldata_2013.txt] - - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2014.txt, $(DATA)/co2historicaldata_2014.txt] - - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2015.txt, $(DATA)/co2historicaldata_2015.txt] - - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2016.txt, $(DATA)/co2historicaldata_2016.txt] - - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2017.txt, $(DATA)/co2historicaldata_2017.txt] - - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2018.txt, $(DATA)/co2historicaldata_2018.txt] - - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2019.txt, $(DATA)/co2historicaldata_2019.txt] - - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2020.txt, $(DATA)/co2historicaldata_2020.txt] - - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2021.txt, $(DATA)/co2historicaldata_2021.txt] - - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2022.txt, $(DATA)/co2historicaldata_2022.txt] - - [$(FIXgfs)/am/fix_co2_proj/global_co2historicaldata_2023.txt, $(DATA)/co2historicaldata_2023.txt] + - [$(FIXglobal)/am/co2monthlycyc.txt, $(DATA)/co2monthlycyc.txt] + - [$(FIXglobal)/am/global_co2historicaldata_glob.txt, $(DATA)/co2historicaldata_glob.txt] + - [$(FIXglobal)/am/fix_co2_proj/global_co2historicaldata_2009.txt, $(DATA)/co2historicaldata_2009.txt] + - [$(FIXglobal)/am/fix_co2_proj/global_co2historicaldata_2010.txt, $(DATA)/co2historicaldata_2010.txt] + - [$(FIXglobal)/am/fix_co2_proj/global_co2historicaldata_2011.txt, $(DATA)/co2historicaldata_2011.txt] + - [$(FIXglobal)/am/fix_co2_proj/global_co2historicaldata_2012.txt, $(DATA)/co2historicaldata_2012.txt] + - [$(FIXglobal)/am/fix_co2_proj/global_co2historicaldata_2013.txt, $(DATA)/co2historicaldata_2013.txt] + - [$(FIXglobal)/am/fix_co2_proj/global_co2historicaldata_2014.txt, $(DATA)/co2historicaldata_2014.txt] + - [$(FIXglobal)/am/fix_co2_proj/global_co2historicaldata_2015.txt, $(DATA)/co2historicaldata_2015.txt] + - [$(FIXglobal)/am/fix_co2_proj/global_co2historicaldata_2016.txt, $(DATA)/co2historicaldata_2016.txt] + - [$(FIXglobal)/am/fix_co2_proj/global_co2historicaldata_2017.txt, $(DATA)/co2historicaldata_2017.txt] + - [$(FIXglobal)/am/fix_co2_proj/global_co2historicaldata_2018.txt, $(DATA)/co2historicaldata_2018.txt] + - [$(FIXglobal)/am/fix_co2_proj/global_co2historicaldata_2019.txt, $(DATA)/co2historicaldata_2019.txt] + - [$(FIXglobal)/am/fix_co2_proj/global_co2historicaldata_2020.txt, $(DATA)/co2historicaldata_2020.txt] + - [$(FIXglobal)/am/fix_co2_proj/global_co2historicaldata_2021.txt, $(DATA)/co2historicaldata_2021.txt] + - [$(FIXglobal)/am/fix_co2_proj/global_co2historicaldata_2022.txt, $(DATA)/co2historicaldata_2022.txt] + - [$(FIXglobal)/am/fix_co2_proj/global_co2historicaldata_2023.txt, $(DATA)/co2historicaldata_2023.txt] - # FIXgfs/am files - - [$(FIXgfs)/am/global_climaeropac_global.txt, $(DATA)/aerosol.dat] - - [$(FIXgfs)/am/ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77, $(DATA)/global_o3prdlos.f77] - - [$(FIXgfs)/am/global_h2o_pltc.f77, $(DATA)/global_h2oprdlos.f77] - - [$(FIXgfs)/am/global_glacier.2x2.grb, $(DATA)/global_glacier.2x2.grb] - - [$(FIXgfs)/am/global_maxice.2x2.grb, $(DATA)/global_maxice.2x2.grb] - - [$(FIXgfs)/am/global_snoclim.1.875.grb, $(DATA)/global_snoclim.1.875.grb] - - [$(FIXgfs)/am/global_slmask.t1534.3072.1536.grb, $(DATA)/global_slmask.t1534.3072.1536.grb] - - [$(FIXgfs)/am/global_soilmgldas.statsgo.t1534.3072.1536.grb, $(DATA)/global_soilmgldas.statsgo.t1534.3072.1536.grb] - - [$(FIXgfs)/am/global_solarconstant_noaa_an.txt, $(DATA)/solarconstant_noaa_an.txt] - - [$(FIXgfs)/am/global_sfc_emissivity_idx.txt, $(DATA)/sfc_emissivity_idx.txt] - - [$(FIXgfs)/am/RTGSST.1982.2012.monthly.clim.grb, $(DATA)/RTGSST.1982.2012.monthly.clim.grb] - - [$(FIXgfs)/am/IMS-NIC.blended.ice.monthly.clim.grb, $(DATA)/IMS-NIC.blended.ice.monthly.clim.grb] + # FIXglobal/am files + - [$(FIXglobal)/am/global_climaeropac_global.txt, $(DATA)/aerosol.dat] + - [$(FIXglobal)/am/ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77, $(DATA)/global_o3prdlos.f77] + - [$(FIXglobal)/am/global_h2o_pltc.f77, $(DATA)/global_h2oprdlos.f77] + - [$(FIXglobal)/am/global_glacier.2x2.grb, $(DATA)/global_glacier.2x2.grb] + - [$(FIXglobal)/am/global_maxice.2x2.grb, $(DATA)/global_maxice.2x2.grb] + - [$(FIXglobal)/am/global_snoclim.1.875.grb, $(DATA)/global_snoclim.1.875.grb] + - [$(FIXglobal)/am/global_slmask.t1534.3072.1536.grb, $(DATA)/global_slmask.t1534.3072.1536.grb] + - [$(FIXglobal)/am/global_soilmgldas.statsgo.t1534.3072.1536.grb, $(DATA)/global_soilmgldas.statsgo.t1534.3072.1536.grb] + - [$(FIXglobal)/am/global_solarconstant_noaa_an.txt, $(DATA)/solarconstant_noaa_an.txt] + - [$(FIXglobal)/am/global_sfc_emissivity_idx.txt, $(DATA)/sfc_emissivity_idx.txt] + - [$(FIXglobal)/am/RTGSST.1982.2012.monthly.clim.grb, $(DATA)/RTGSST.1982.2012.monthly.clim.grb] + - [$(FIXglobal)/am/IMS-NIC.blended.ice.monthly.clim.grb, $(DATA)/IMS-NIC.blended.ice.monthly.clim.grb] # MERRA2 Aerosol Climatology - - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m01.nc, $(DATA)/aeroclim.m01.nc] - - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m02.nc, $(DATA)/aeroclim.m02.nc] - - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m03.nc, $(DATA)/aeroclim.m03.nc] - - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m04.nc, $(DATA)/aeroclim.m04.nc] - - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m05.nc, $(DATA)/aeroclim.m05.nc] - - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m06.nc, $(DATA)/aeroclim.m06.nc] - - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m07.nc, $(DATA)/aeroclim.m07.nc] - - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m08.nc, $(DATA)/aeroclim.m08.nc] - - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m09.nc, $(DATA)/aeroclim.m09.nc] - - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m10.nc, $(DATA)/aeroclim.m10.nc] - - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m11.nc, $(DATA)/aeroclim.m11.nc] - - [$(FIXgfs)/aer/merra2.aerclim.2003-2014.m12.nc, $(DATA)/aeroclim.m12.nc] + - [$(FIXglobal)/aer/merra2.aerclim.2003-2014.m01.nc, $(DATA)/aeroclim.m01.nc] + - [$(FIXglobal)/aer/merra2.aerclim.2003-2014.m02.nc, $(DATA)/aeroclim.m02.nc] + - [$(FIXglobal)/aer/merra2.aerclim.2003-2014.m03.nc, $(DATA)/aeroclim.m03.nc] + - [$(FIXglobal)/aer/merra2.aerclim.2003-2014.m04.nc, $(DATA)/aeroclim.m04.nc] + - [$(FIXglobal)/aer/merra2.aerclim.2003-2014.m05.nc, $(DATA)/aeroclim.m05.nc] + - [$(FIXglobal)/aer/merra2.aerclim.2003-2014.m06.nc, $(DATA)/aeroclim.m06.nc] + - [$(FIXglobal)/aer/merra2.aerclim.2003-2014.m07.nc, $(DATA)/aeroclim.m07.nc] + - [$(FIXglobal)/aer/merra2.aerclim.2003-2014.m08.nc, $(DATA)/aeroclim.m08.nc] + - [$(FIXglobal)/aer/merra2.aerclim.2003-2014.m09.nc, $(DATA)/aeroclim.m09.nc] + - [$(FIXglobal)/aer/merra2.aerclim.2003-2014.m10.nc, $(DATA)/aeroclim.m10.nc] + - [$(FIXglobal)/aer/merra2.aerclim.2003-2014.m11.nc, $(DATA)/aeroclim.m11.nc] + - [$(FIXglobal)/aer/merra2.aerclim.2003-2014.m12.nc, $(DATA)/aeroclim.m12.nc] # Optical depth - - [$(FIXgfs)/lut/optics_BC.v1_3.dat, $(DATA)/optics_BC.dat] - - [$(FIXgfs)/lut/optics_DU.v15_3.dat, $(DATA)/optics_DU.dat] - - [$(FIXgfs)/lut/optics_OC.v1_3.dat, $(DATA)/optics_OC.dat] - - [$(FIXgfs)/lut/optics_SS.v3_3.dat, $(DATA)/optics_SS.dat] - - [$(FIXgfs)/lut/optics_SU.v1_3.dat, $(DATA)/optics_SU.dat] + - [$(FIXglobal)/lut/optics_BC.v1_3.dat, $(DATA)/optics_BC.dat] + - [$(FIXglobal)/lut/optics_DU.v15_3.dat, $(DATA)/optics_DU.dat] + - [$(FIXglobal)/lut/optics_OC.v1_3.dat, $(DATA)/optics_OC.dat] + - [$(FIXglobal)/lut/optics_SS.v3_3.dat, $(DATA)/optics_SS.dat] + - [$(FIXglobal)/lut/optics_SU.v1_3.dat, $(DATA)/optics_SU.dat] # fd_ufs.yaml file - - [$(HOMEgfs)/sorc/ufs_model.fd/tests/parm/fd_ufs.yaml, $(DATA)/] + - [$(HOMEglobal)/sorc/ufs_model.fd/tests/parm/fd_ufs.yaml, $(DATA)/] diff --git a/sorc/build_all.sh b/sorc/build_all.sh index cc87b04f890..73ea09f94b1 100755 --- a/sorc/build_all.sh +++ b/sorc/build_all.sh @@ -70,25 +70,25 @@ if [[ "${verbose}" == "YES" ]]; then fi script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)" -HOMEgfs=$(cd "${script_dir}" && git rev-parse --show-toplevel) +HOMEglobal=$(cd "${script_dir}" && git rev-parse --show-toplevel) # Needs to be exported for gw_setup.sh -export HOMEgfs +export HOMEglobal echo "Sourcing global-workflow modules ..." -source "${HOMEgfs}/dev/ush/gw_setup.sh" +source "${HOMEglobal}/dev/ush/gw_setup.sh" # Un-export after gw_setup.sh -export -n HOMEgfs +export -n HOMEglobal -cd "${HOMEgfs}/sorc" || exit 1 -mkdir -p "${HOMEgfs}/sorc/logs" || exit 1 +cd "${HOMEglobal}/sorc" || exit 1 +mkdir -p "${HOMEglobal}/sorc/logs" || exit 1 # Delete the rocoto XML and database if they exist rm -f "${build_xml}" "${build_db}" "${build_lock_db}" echo "Generating build.xml for building global-workflow programs ..." -yaml="${HOMEgfs}/sorc/build_opts.yaml" -"${HOMEgfs}/dev/workflow/setup_buildxml.py" --account "${HPC_ACCOUNT}" --yaml "${yaml}" --systems "${systems}" +yaml="${HOMEglobal}/sorc/build_opts.yaml" +"${HOMEglobal}/dev/workflow/setup_buildxml.py" --account "${HPC_ACCOUNT}" --yaml "${yaml}" --systems "${systems}" rc=$? if [[ "${rc}" -ne 0 ]]; then echo "FATAL ERROR: ${BASH_SOURCE[0]} failed to create 'build.xml' with error code ${rc}" diff --git a/sorc/build_gdas.sh b/sorc/build_gdas.sh index 0e1c5a083a8..9ad71c8febb 100755 --- a/sorc/build_gdas.sh +++ b/sorc/build_gdas.sh @@ -2,7 +2,7 @@ set -eux # shellcheck disable=SC2155 -readonly HOMEgfs_=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}")")" && git rev-parse --show-toplevel) +readonly HOMEglobal_=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}")")" && git rev-parse --show-toplevel) OPTIND=1 _opts="-f " # forces a clean build @@ -21,13 +21,13 @@ while getopts ":j:dv" option; do done shift $((OPTIND - 1)) -source "${HOMEgfs_}/ush/detect_machine.sh" +source "${HOMEglobal_}/ush/detect_machine.sh" # double quoting opts will not work since it is a string of options # shellcheck disable=SC2086 BUILD_JOBS="${BUILD_JOBS:-8}" \ WORKFLOW_BUILD="${WORKFLOW_BUILD:-"ON"}" \ WORKFLOW_TESTS="${WORKFLOW_TESTS:-"OFF"}" \ - "${HOMEgfs_}/sorc/gdas.cd/build.sh" ${_opts} -w ${HOMEgfs_} + "${HOMEglobal_}/sorc/gdas.cd/build.sh" ${_opts} -w ${HOMEglobal_} exit diff --git a/sorc/build_gfs_utils.sh b/sorc/build_gfs_utils.sh index 0fadf21069c..641d0715601 100755 --- a/sorc/build_gfs_utils.sh +++ b/sorc/build_gfs_utils.sh @@ -40,13 +40,13 @@ done shift $((OPTIND - 1)) # shellcheck disable=SC2155 -readonly HOMEgfs_=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}")")" && git rev-parse --show-toplevel) +readonly HOMEglobal_=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}")")" && git rev-parse --show-toplevel) -source "${HOMEgfs_}/ush/detect_machine.sh" +source "${HOMEglobal_}/ush/detect_machine.sh" BUILD_TYPE=${BUILD_TYPE:-"Release"} \ BUILD_VERBOSE=${BUILD_VERBOSE:-"NO"} \ BUILD_JOBS=${BUILD_JOBS:-8} \ - "${HOMEgfs_}/sorc/gfs_utils.fd/ush/build.sh" + "${HOMEglobal_}/sorc/gfs_utils.fd/ush/build.sh" exit diff --git a/sorc/build_gsi_enkf.sh b/sorc/build_gsi_enkf.sh index 79d5b3d4f1e..8a6cb1031f9 100755 --- a/sorc/build_gsi_enkf.sh +++ b/sorc/build_gsi_enkf.sh @@ -18,9 +18,9 @@ done shift $((OPTIND - 1)) # shellcheck disable=SC2155 -readonly HOMEgfs_=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}")")" && git rev-parse --show-toplevel) +readonly HOMEglobal_=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}")")" && git rev-parse --show-toplevel) -source "${HOMEgfs_}/ush/detect_machine.sh" +source "${HOMEglobal_}/ush/detect_machine.sh" BUILD_TYPE=${BUILD_TYPE:-"Release"} \ BUILD_VERBOSE=${BUILD_VERBOSE:-"NO"} \ @@ -28,6 +28,6 @@ BUILD_TYPE=${BUILD_TYPE:-"Release"} \ GSI_MODE=GFS \ ENKF_MODE=GFS \ REGRESSION_TESTS=NO \ - "${HOMEgfs_}/sorc/gsi_enkf.fd/ush/build.sh" + "${HOMEglobal_}/sorc/gsi_enkf.fd/ush/build.sh" exit diff --git a/sorc/build_gsi_monitor.sh b/sorc/build_gsi_monitor.sh index 98c1eebd3b9..e3939937dbc 100755 --- a/sorc/build_gsi_monitor.sh +++ b/sorc/build_gsi_monitor.sh @@ -2,7 +2,7 @@ set -eux # shellcheck disable=SC2155 -readonly HOMEgfs_=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}")")" && git rev-parse --show-toplevel) +readonly HOMEglobal_=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}")")" && git rev-parse --show-toplevel) OPTIND=1 while getopts ":j:dv" option; do @@ -20,11 +20,11 @@ while getopts ":j:dv" option; do done shift $((OPTIND - 1)) -source "${HOMEgfs_}/ush/detect_machine.sh" +source "${HOMEglobal_}/ush/detect_machine.sh" BUILD_TYPE=${BUILD_TYPE:-"Release"} \ BUILD_VERBOSE=${BUILD_VERBOSE:-"NO"} \ BUILD_JOBS=${BUILD_JOBS:-8} \ - "${HOMEgfs_}/sorc/gsi_monitor.fd/ush/build.sh" + "${HOMEglobal_}/sorc/gsi_monitor.fd/ush/build.sh" exit diff --git a/sorc/build_gsi_utils.sh b/sorc/build_gsi_utils.sh index 3341a6a1a47..1c30269d4bd 100755 --- a/sorc/build_gsi_utils.sh +++ b/sorc/build_gsi_utils.sh @@ -2,7 +2,7 @@ set -eux # shellcheck disable=SC2155 -readonly HOMEgfs_=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}")")" && git rev-parse --show-toplevel) +readonly HOMEglobal_=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}")")" && git rev-parse --show-toplevel) OPTIND=1 while getopts ":j:dv" option; do @@ -22,12 +22,12 @@ while getopts ":j:dv" option; do done shift $((OPTIND - 1)) -source "${HOMEgfs_}/ush/detect_machine.sh" +source "${HOMEglobal_}/ush/detect_machine.sh" BUILD_TYPE=${BUILD_TYPE:-"Release"} \ BUILD_VERBOSE=${BUILD_VERBOSE:-"NO"} \ BUILD_JOBS=${BUILD_JOBS:-8} \ UTIL_OPTS="-DBUILD_UTIL_ENKF_GFS=ON -DBUILD_UTIL_NCIO=ON" \ - "${HOMEgfs_}/sorc/gsi_utils.fd/ush/build.sh" + "${HOMEglobal_}/sorc/gsi_utils.fd/ush/build.sh" exit diff --git a/sorc/build_nexus.sh b/sorc/build_nexus.sh index 2b2297d7009..40b92d0e415 100755 --- a/sorc/build_nexus.sh +++ b/sorc/build_nexus.sh @@ -10,7 +10,7 @@ usage() { } # shellcheck disable=SC2155 -readonly HOMEgfs_=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}")")" && git rev-parse --show-toplevel) +readonly HOMEglobal_=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}")")" && git rev-parse --show-toplevel) OPTIND=1 _opts="-f " # forces a clean build @@ -34,6 +34,6 @@ shift $((OPTIND - 1)) # double quoting opts will not work since it is a string of options # shellcheck disable=SC2086 BUILD_JOBS="${BUILD_JOBS:-1}" \ - ./nexus.fd/build.sh ${_opts} -f -w ${HOMEgfs_} + ./nexus.fd/build.sh ${_opts} -f -w ${HOMEglobal_} exit diff --git a/sorc/build_ufs.sh b/sorc/build_ufs.sh index e55987a19e7..835de9ae801 100755 --- a/sorc/build_ufs.sh +++ b/sorc/build_ufs.sh @@ -2,7 +2,7 @@ set -eux # shellcheck disable=SC2155 -readonly HOMEgfs_=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}")")" && git rev-parse --show-toplevel) +readonly HOMEglobal_=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}")")" && git rev-parse --show-toplevel) # Default settings APP="S2SWA" @@ -34,10 +34,10 @@ while getopts ":da:fj:e:pvwy" option; do esac done -cd "${HOMEgfs_}/sorc/ufs_model.fd" +cd "${HOMEglobal_}/sorc/ufs_model.fd" -source "${HOMEgfs_}/ush/detect_machine.sh" -source "${HOMEgfs_}/sorc/ufs_model.fd/tests/module-setup.sh" +source "${HOMEglobal_}/ush/detect_machine.sh" +source "${HOMEglobal_}/sorc/ufs_model.fd/tests/module-setup.sh" MAKE_OPT="-DAPP=${APP} -D32BIT=ON -DCCPP_SUITES=${CCPP_SUITES}" if [[ ${PDLIB:-"OFF"} = "ON" ]]; then @@ -70,7 +70,7 @@ CLEAN_AFTER=NO # TODO: when ufs-weather-model#2716 is fixed, return to using tests/compile.sh if [[ "${MACHINE_ID}" == "wcoss2" && "${PARALLEL_RESTART}" == "NO" ]]; then set +x - module use "${HOMEgfs_}/sorc/ufs_model.fd/modulefiles" + module use "${HOMEglobal_}/sorc/ufs_model.fd/modulefiles" module load "ufs_wcoss2.intel" module list set -x @@ -97,7 +97,7 @@ if [[ "${MACHINE_ID}" == "wcoss2" && "${PARALLEL_RESTART}" == "NO" ]]; then rm -rf "${BUILD_DIR}" fi else - BUILD_JOBS=${BUILD_JOBS:-8} "${HOMEgfs_}/sorc/ufs_model.fd/tests/compile.sh" "${MACHINE_ID}" "${MAKE_OPT}" "${COMPILE_ID}" "intel" "${CLEAN_BEFORE}" "${CLEAN_AFTER}" + BUILD_JOBS=${BUILD_JOBS:-8} "${HOMEglobal_}/sorc/ufs_model.fd/tests/compile.sh" "${MACHINE_ID}" "${MAKE_OPT}" "${COMPILE_ID}" "intel" "${CLEAN_BEFORE}" "${CLEAN_AFTER}" fi mv "./tests/fv3_${COMPILE_ID}.exe" "./tests/${EXEC_NAME}" if [[ ! -f "./tests/modules.ufs_model.lua" ]]; then mv "./tests/modules.fv3_${COMPILE_ID}.lua" "./tests/modules.ufs_model.lua"; fi diff --git a/sorc/build_ufs_utils.sh b/sorc/build_ufs_utils.sh index 822734700b2..78511c1a438 100755 --- a/sorc/build_ufs_utils.sh +++ b/sorc/build_ufs_utils.sh @@ -2,7 +2,7 @@ set -eux # shellcheck disable=SC2155 -readonly HOMEgfs_=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}")")" && git rev-parse --show-toplevel) +readonly HOMEglobal_=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}")")" && git rev-parse --show-toplevel) OPTIND=1 while getopts ":j:dv" option; do @@ -20,12 +20,12 @@ while getopts ":j:dv" option; do done shift $((OPTIND - 1)) -source "${HOMEgfs_}/ush/detect_machine.sh" +source "${HOMEglobal_}/ush/detect_machine.sh" CMAKE_OPTS="-DGFS=ON" \ BUILD_TYPE=${BUILD_TYPE:-"Release"} \ BUILD_JOBS=${BUILD_JOBS:-8} \ BUILD_VERBOSE=${BUILD_VERBOSE:-} \ - "${HOMEgfs_}/sorc/ufs_utils.fd/build_all.sh" + "${HOMEglobal_}/sorc/ufs_utils.fd/build_all.sh" exit diff --git a/sorc/build_upp.sh b/sorc/build_upp.sh index 4bcc0427f62..ea8b5adac4c 100755 --- a/sorc/build_upp.sh +++ b/sorc/build_upp.sh @@ -2,7 +2,7 @@ set -eux # shellcheck disable=SC2155 -readonly HOMEgfs_=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}")")" && git rev-parse --show-toplevel) +readonly HOMEglobal_=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}")")" && git rev-parse --show-toplevel) script_dir=$(dirname "${BASH_SOURCE[0]}") cd "${script_dir}" || exit 1 @@ -24,10 +24,10 @@ while getopts ":dj:v" option; do done shift $((OPTIND - 1)) -source "${HOMEgfs_}/ush/detect_machine.sh" +source "${HOMEglobal_}/ush/detect_machine.sh" -cd "${HOMEgfs_}/sorc/ufs_model.fd/UFSATM/upp/tests" +cd "${HOMEglobal_}/sorc/ufs_model.fd/UFSATM/upp/tests" # shellcheck disable=SC2086 -BUILD_JOBS=${BUILD_JOBS:-8} bash -x "${HOMEgfs_}/sorc/ufs_model.fd/UFSATM/upp/tests/compile_upp.sh" ${_opts} +BUILD_JOBS=${BUILD_JOBS:-8} bash -x "${HOMEglobal_}/sorc/ufs_model.fd/UFSATM/upp/tests/compile_upp.sh" ${_opts} exit 0 diff --git a/sorc/build_ww3prepost.sh b/sorc/build_ww3prepost.sh index e9aa26d4a41..5814757304b 100755 --- a/sorc/build_ww3prepost.sh +++ b/sorc/build_ww3prepost.sh @@ -2,8 +2,8 @@ set -x # shellcheck disable=SC2155 -readonly HOMEgfs_=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}")")" && git rev-parse --show-toplevel) -cd "${HOMEgfs_}/sorc" || exit 1 +readonly HOMEglobal_=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}")")" && git rev-parse --show-toplevel) +cd "${HOMEglobal_}/sorc" || exit 1 # Default settings PDLIB="ON" @@ -24,15 +24,15 @@ while getopts ":j:a:dvw" option; do done # Determine machine and load modules -source "${HOMEgfs_}/ush/detect_machine.sh" +source "${HOMEglobal_}/ush/detect_machine.sh" set +x -source "${HOMEgfs_}/sorc/ufs_model.fd/tests/module-setup.sh" -module use "${HOMEgfs_}/sorc/ufs_model.fd/modulefiles" +source "${HOMEglobal_}/sorc/ufs_model.fd/tests/module-setup.sh" +module use "${HOMEglobal_}/sorc/ufs_model.fd/modulefiles" module load "ufs_${MACHINE_ID}.intel" set -x #Set WW3 directory -cd "${HOMEgfs_}/sorc/ufs_model.fd/WW3" || exit 1 +cd "${HOMEglobal_}/sorc/ufs_model.fd/WW3" || exit 1 WW3_DIR=$(pwd -P) export WW3_DIR diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index c75d9ca605b..763f7e7108b 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -2,8 +2,8 @@ #--make symbolic links for EMC installation and hardcopies for NCO delivery -HOMEgfs=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}")")" > /dev/null 2>&1 && git rev-parse --show-toplevel) -TRACE=NO source "${HOMEgfs}/ush/preamble.sh" +HOMEglobal=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}")")" > /dev/null 2>&1 && git rev-parse --show-toplevel) +TRACE=NO source "${HOMEglobal}/ush/preamble.sh" function usage() { cat << EOF @@ -56,15 +56,15 @@ else fi # shellcheck disable=SC1091 -COMPILER="intel" source "${HOMEgfs}/ush/detect_machine.sh" # (sets MACHINE_ID) +COMPILER="intel" source "${HOMEglobal}/ush/detect_machine.sh" # (sets MACHINE_ID) # shellcheck disable= machine=$(echo "${MACHINE_ID}" | cut -d. -f1) #------------------------------ #--Set up build.ver and run.ver #------------------------------ -${LINK_OR_COPY} "${HOMEgfs}/versions/build.${machine}.ver" "${HOMEgfs}/versions/build.ver" -${LINK_OR_COPY} "${HOMEgfs}/versions/run.${machine}.ver" "${HOMEgfs}/versions/run.ver" +${LINK_OR_COPY} "${HOMEglobal}/versions/build.${machine}.ver" "${HOMEglobal}/versions/build.ver" +${LINK_OR_COPY} "${HOMEglobal}/versions/run.${machine}.ver" "${HOMEglobal}/versions/run.ver" #------------------------------ #--model fix fields @@ -84,23 +84,23 @@ case "${machine}" in esac # Source fix version file -source "${HOMEgfs}/versions/fix.ver" +source "${HOMEglobal}/versions/fix.ver" # Link GDASapp python packages in ush/python packages=("jcb") for package in "${packages[@]}"; do - cd "${HOMEgfs}/ush/python" || exit 1 + cd "${HOMEglobal}/ush/python" || exit 1 if [[ -s "${package}" ]]; then rm -f "${package}" fi - ${LINK} "${HOMEgfs}/sorc/gdas.cd/sorc/${package}/src/${package}" . + ${LINK} "${HOMEglobal}/sorc/gdas.cd/sorc/${package}/src/${package}" . done # Link fix directories if [[ -n "${FIX_DIR}" ]]; then - mkdir -p "${HOMEgfs}/fix" || exit 1 + mkdir -p "${HOMEglobal}/fix" || exit 1 fi -cd "${HOMEgfs}/fix" || exit 1 +cd "${HOMEglobal}/fix" || exit 1 for dir in aer \ am \ chem \ @@ -143,7 +143,7 @@ fi #--------------------------------------- #--link sorc/upp.fd before referencing files within it #--------------------------------------- -cd "${HOMEgfs}/sorc" || exit 8 +cd "${HOMEglobal}/sorc" || exit 8 if [[ -d ufs_model.fd ]]; then if [[ -d upp.fd ]]; then rm -rf upp.fd @@ -154,37 +154,37 @@ fi #--add files from external repositories #--------------------------------------- #--copy/link NoahMp table form ccpp-physics repository -cd "${HOMEgfs}/parm/ufs" || exit 1 -${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_model.fd/tests/parm/noahmptable.tbl" . +cd "${HOMEglobal}/parm/ufs" || exit 1 +${LINK_OR_COPY} "${HOMEglobal}/sorc/ufs_model.fd/tests/parm/noahmptable.tbl" . -cd "${HOMEgfs}/parm/post" || exit 1 -${LINK_OR_COPY} "${HOMEgfs}/sorc/upp.fd/parm/params_grib2_tbl_new" . -${LINK_OR_COPY} "${HOMEgfs}/sorc/upp.fd/fix/nam_micro_lookup.dat" . +cd "${HOMEglobal}/parm/post" || exit 1 +${LINK_OR_COPY} "${HOMEglobal}/sorc/upp.fd/parm/params_grib2_tbl_new" . +${LINK_OR_COPY} "${HOMEglobal}/sorc/upp.fd/fix/nam_micro_lookup.dat" . for dir in gfs gcafs gefs sfs; do - ${LINK_OR_COPY} "${HOMEgfs}/sorc/upp.fd/parm/${dir}" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/upp.fd/parm/${dir}" . done for file in optics_luts_DUST.dat optics_luts_DUST_nasa.dat optics_luts_NITR_nasa.dat \ optics_luts_SALT.dat optics_luts_SALT_nasa.dat optics_luts_SOOT.dat optics_luts_SOOT_nasa.dat \ optics_luts_SUSO.dat optics_luts_SUSO_nasa.dat optics_luts_WASO.dat optics_luts_WASO_nasa.dat; do - ${LINK_OR_COPY} "${HOMEgfs}/sorc/upp.fd/fix/chem/${file}" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/upp.fd/fix/chem/${file}" . done for file in ice_gfs.csv ice_gefs.csv ocean_gfs.csv ocean_gefs.csv ocnicepost.nml.jinja2; do - ${LINK_OR_COPY} "${HOMEgfs}/sorc/gfs_utils.fd/parm/ocnicepost/${file}" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/gfs_utils.fd/parm/ocnicepost/${file}" . done -cd "${HOMEgfs}/scripts" || exit 8 -if [[ -d "${HOMEgfs}/sorc/gdas.cd" ]]; then +cd "${HOMEglobal}/scripts" || exit 8 +if [[ -d "${HOMEglobal}/sorc/gdas.cd" ]]; then declare -a gdas_scripts=(exglobal_prep_ocean_obs.py) for gdas_script in "${gdas_scripts[@]}"; do - ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/scripts/${gdas_script}" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/gdas.cd/scripts/${gdas_script}" . done fi # Link these templates from ufs-weather-model -cd "${HOMEgfs}/parm/ufs" || exit 1 +cd "${HOMEglobal}/parm/ufs" || exit 1 declare -a ufs_templates=("model_configure.IN" "input_global_nest.nml.IN" "MOM_input_025.IN" "MOM_input_050.IN" "MOM_input_100.IN" "MOM_input_500.IN" "MOM6_data_table.IN" @@ -205,29 +205,29 @@ for file in "${ufs_templates[@]}"; do if [[ -s "${file}" ]]; then rm -f "${file}" fi - ${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_model.fd/tests/parm/${file}" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/ufs_model.fd/tests/parm/${file}" . done # Link the script from ufs-weather-model that parses the templates -cd "${HOMEgfs}/ush" || exit 1 +cd "${HOMEglobal}/ush" || exit 1 if [[ -s "atparse.bash" ]]; then rm -f "atparse.bash" fi -${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_model.fd/tests/atparse.bash" . +${LINK_OR_COPY} "${HOMEglobal}/sorc/ufs_model.fd/tests/atparse.bash" . # add ufs_utils parm dir -if [[ -d "${HOMEgfs}/sorc/ufs_utils.fd" ]]; then - cd "${HOMEgfs}/parm" || exit 1 +if [[ -d "${HOMEglobal}/sorc/ufs_utils.fd" ]]; then + cd "${HOMEglobal}/parm" || exit 1 mkdir -p regrid_sfc cd regrid_sfc || exit 1 - ${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_utils.fd/parm/regrid_sfc/regrid.nml_tmpl" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/ufs_utils.fd/parm/regrid_sfc/regrid.nml_tmpl" . fi #------------------------------ #--add GDASApp fix directory #------------------------------ -if [[ -d "${HOMEgfs}/sorc/gdas.cd" ]]; then - cd "${HOMEgfs}/fix" || exit 1 +if [[ -d "${HOMEglobal}/sorc/gdas.cd" ]]; then + cd "${HOMEglobal}/fix" || exit 1 mkdir -p gdas cd gdas || exit 1 for gdas_sub in fv3jedi gsibec obs soca aero snow; do @@ -242,8 +242,8 @@ fi #------------------------------ #--add GDASApp parm directory #------------------------------ -if [[ -d "${HOMEgfs}/sorc/gdas.cd" ]]; then - cd "${HOMEgfs}/parm" || exit 1 +if [[ -d "${HOMEglobal}/sorc/gdas.cd" ]]; then + cd "${HOMEglobal}/parm" || exit 1 mkdir -p gdas cd gdas || exit 1 declare -a gdasapp_comps=("aero" "atm" "io" "ioda" "snow" "marine" "jcb-gdas" "jcb-algorithms" "anlstat" "analcalc") @@ -251,7 +251,7 @@ if [[ -d "${HOMEgfs}/sorc/gdas.cd" ]]; then if [[ -d "${comp}" ]]; then rm -rf "${comp}" fi - ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/parm/${comp}" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/gdas.cd/parm/${comp}" . done fi @@ -261,8 +261,8 @@ fi sources=("config" "scripts") targets=("parm/gdas" "ush") for i in "${!sources[@]}"; do - src="${HOMEgfs}/sorc/gdas.cd/sorc/spoc/dump/${sources[${i}]}" - dst="${HOMEgfs}/${targets[${i}]}" + src="${HOMEglobal}/sorc/gdas.cd/sorc/spoc/dump/${sources[${i}]}" + dst="${HOMEglobal}/${targets[${i}]}" if [[ -d "${src}" ]]; then cd "${dst}" || exit 1 @@ -273,54 +273,54 @@ done #------------------------------ #--add GDASApp files #------------------------------ -if [[ -d "${HOMEgfs}/sorc/gdas.cd/build" ]]; then - cd "${HOMEgfs}/ush/python" || exit 1 - ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/ush/soca" . - ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/ush/ufsda" . - ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/ush/ioda/bufr2ioda/gen_bufr2ioda_json.py" . - ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/ush/ioda/bufr2ioda/gen_bufr2ioda_yaml.py" . - ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/ush/ioda/bufr2ioda/run_bufr2ioda.py" . - ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/sorc/da-utils/ush/gsincdiag_to_ioda" . - ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/sorc/da-utils/ush/pyiodaconv" . - cd "${HOMEgfs}/ush" || exit 1 - ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/ush/gsi_satbias2ioda_all.sh" . - ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/ush/snow/bufr_snocvr_snomad.py" . - ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/ush/snow/ghcn_snod2ioda.py" . +if [[ -d "${HOMEglobal}/sorc/gdas.cd/build" ]]; then + cd "${HOMEglobal}/ush/python" || exit 1 + ${LINK_OR_COPY} "${HOMEglobal}/sorc/gdas.cd/ush/soca" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/gdas.cd/ush/ufsda" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/gdas.cd/ush/ioda/bufr2ioda/gen_bufr2ioda_json.py" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/gdas.cd/ush/ioda/bufr2ioda/gen_bufr2ioda_yaml.py" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/gdas.cd/ush/ioda/bufr2ioda/run_bufr2ioda.py" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/gdas.cd/sorc/da-utils/ush/gsincdiag_to_ioda" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/gdas.cd/sorc/da-utils/ush/pyiodaconv" . + cd "${HOMEglobal}/ush" || exit 1 + ${LINK_OR_COPY} "${HOMEglobal}/sorc/gdas.cd/ush/gsi_satbias2ioda_all.sh" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/gdas.cd/ush/snow/bufr_snocvr_snomad.py" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/gdas.cd/ush/snow/ghcn_snod2ioda.py" . fi #------------------------------ #--add DA Monitor file (NOTE: ensure to use correct version) #------------------------------ -if [[ -d "${HOMEgfs}/sorc/gsi_monitor.fd" ]]; then +if [[ -d "${HOMEglobal}/sorc/gsi_monitor.fd" ]]; then - cd "${HOMEgfs}/parm" || exit 1 + cd "${HOMEglobal}/parm" || exit 1 if [[ -d monitor ]]; then rm -rf monitor fi mkdir -p monitor cd monitor || exit 1 - ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gdas/fix/gdas_minmon_cost.txt" . - ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gdas/fix/gdas_minmon_gnorm.txt" . - ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gfs/fix/gfs_minmon_cost.txt" . - ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gfs/fix/gfs_minmon_gnorm.txt" . - ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/fix/gdas_oznmon_base.tar" . - ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/fix/gdas_oznmon_satype.txt" . - ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_base.tar" . - ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_satype.txt" . - ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_scaninfo.txt" . - ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/parm/gdas_radmon.parm" da_mon.parm - # ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gdas/parm/gdas_minmon.parm" . - # ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gfs/parm/gfs_minmon.parm" . - ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/parm/gdas_oznmon.parm" . - # ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/parm/gdas_radmon.parm" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gdas/fix/gdas_minmon_cost.txt" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gdas/fix/gdas_minmon_gnorm.txt" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gfs/fix/gfs_minmon_cost.txt" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gfs/fix/gfs_minmon_gnorm.txt" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/fix/gdas_oznmon_base.tar" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/fix/gdas_oznmon_satype.txt" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_base.tar" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_satype.txt" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_scaninfo.txt" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/parm/gdas_radmon.parm" da_mon.parm + # ${LINK_OR_COPY} "${HOMEglobal}/sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gdas/parm/gdas_minmon.parm" . + # ${LINK_OR_COPY} "${HOMEglobal}/sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gfs/parm/gfs_minmon.parm" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/parm/gdas_oznmon.parm" . + # ${LINK_OR_COPY} "${HOMEglobal}/sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/parm/gdas_radmon.parm" . fi #------------------------------------------- #--Add GSI conv, sat, and oz info parm files #------------------------------------------- -if [[ -d "${HOMEgfs}/sorc/gsi_enkf.fd/fix/build_gsinfo" ]]; then +if [[ -d "${HOMEglobal}/sorc/gsi_enkf.fd/fix/build_gsinfo" ]]; then - cd "${HOMEgfs}/parm" || exit 1 + cd "${HOMEglobal}/parm" || exit 1 mkdir -p gsinfo @@ -330,34 +330,34 @@ if [[ -d "${HOMEgfs}/sorc/gsi_enkf.fd/fix/build_gsinfo" ]]; then if [[ -d "${dir}" ]]; then rm -rf "${dir}" fi - ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_enkf.fd/fix/build_gsinfo/${dir}" "${dir}" + ${LINK_OR_COPY} "${HOMEglobal}/sorc/gsi_enkf.fd/fix/build_gsinfo/${dir}" "${dir}" done fi #------------------------------ #--add NEXUS files #------------------------------ -if [[ -d "${HOMEgfs}/sorc/nexus.fd" ]]; then - cd "${HOMEgfs}/parm/chem" || exit 1 +if [[ -d "${HOMEglobal}/sorc/nexus.fd" ]]; then + cd "${HOMEglobal}/parm/chem" || exit 1 if [[ -d nexus ]]; then rm -rf nexus fi mkdir -p nexus/gocart cd nexus/gocart || exit 1 - ${LINK_OR_COPY} "${HOMEgfs}/sorc/nexus.fd/config/gocart/NEXUS_Config.rc.j2" . - ${LINK_OR_COPY} "${HOMEgfs}/sorc/nexus.fd/config/gocart/HEMCO_sa_Grid.rc.j2" . - ${LINK_OR_COPY} "${HOMEgfs}/sorc/nexus.fd/config/gocart/HEMCO_sa_Time.rc.j2" . - ${LINK_OR_COPY} "${HOMEgfs}/sorc/nexus.fd/config/gocart/HEMCO_sa_Diag.rc.j2" . - ${LINK_OR_COPY} "${HOMEgfs}/sorc/nexus.fd/config/gocart/HEMCO_sa_Spec.rc.j2" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/nexus.fd/config/gocart/NEXUS_Config.rc.j2" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/nexus.fd/config/gocart/HEMCO_sa_Grid.rc.j2" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/nexus.fd/config/gocart/HEMCO_sa_Time.rc.j2" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/nexus.fd/config/gocart/HEMCO_sa_Diag.rc.j2" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/nexus.fd/config/gocart/HEMCO_sa_Spec.rc.j2" . fi #------------------------------ #--link executables #------------------------------ -mkdir -p "${HOMEgfs}/exec" || exit 1 +mkdir -p "${HOMEglobal}/exec" || exit 1 -cd "${HOMEgfs}/exec" || exit 1 +cd "${HOMEglobal}/exec" || exit 1 for utilexe in fbwndgfs.x gaussian_sfcanl.x gfs_bufr.x supvit.x syndat_getjtbul.x \ syndat_maksynrc.x syndat_qctropcy.x tocsbufr.x overgridid.x rdbfmsua.x \ @@ -366,7 +366,7 @@ for utilexe in fbwndgfs.x gaussian_sfcanl.x gfs_bufr.x supvit.x syndat_getjtbul. if [[ -s "${utilexe}" ]]; then rm -f "${utilexe}" fi - ${LINK_OR_COPY} "${HOMEgfs}/sorc/gfs_utils.fd/install/bin/${utilexe}" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/gfs_utils.fd/install/bin/${utilexe}" . done declare -a model_systems=("gfs" "gefs" "sfs" "gcafs") @@ -375,8 +375,8 @@ for sys in "${model_systems[@]}"; do if [[ -s "${model_exe}" ]]; then rm -f "${model_exe}" fi - if [[ -f "${HOMEgfs}/sorc/ufs_model.fd/tests/${model_exe}" ]]; then - ${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_model.fd/tests/${model_exe}" "${model_exe}" + if [[ -f "${HOMEglobal}/sorc/ufs_model.fd/tests/${model_exe}" ]]; then + ${LINK_OR_COPY} "${HOMEglobal}/sorc/ufs_model.fd/tests/${model_exe}" "${model_exe}" fi done @@ -390,13 +390,13 @@ wave_systems["sfs"]="pdlib_OFF" for sys in "${!wave_systems[@]}"; do build_loc="${wave_systems[${sys}]}" - if [[ -d "${HOMEgfs}/sorc/ufs_model.fd/WW3/install/${build_loc}" ]]; then + if [[ -d "${HOMEglobal}/sorc/ufs_model.fd/WW3/install/${build_loc}" ]]; then for ww3exe in "${ww3_exes[@]}"; do target_ww3_exe="${sys}_${ww3exe}.x" if [[ -s "${target_ww3_exe}" ]]; then rm -f "${target_ww3_exe}" fi - ${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_model.fd/WW3/install/${build_loc}/bin/${ww3exe}" "${HOMEgfs}/exec/${target_ww3_exe}" + ${LINK_OR_COPY} "${HOMEglobal}/sorc/ufs_model.fd/WW3/install/${build_loc}/bin/${ww3exe}" "${HOMEglobal}/exec/${target_ww3_exe}" done fi done @@ -404,71 +404,71 @@ done if [[ -s "upp.x" ]]; then rm -f upp.x fi -${LINK_OR_COPY} "${HOMEgfs}/sorc/upp.fd/exec/upp.x" . +${LINK_OR_COPY} "${HOMEglobal}/sorc/upp.fd/exec/upp.x" . for ufs_utilsexe in emcsfc_ice_blend emcsfc_snow2mdl global_cycle fregrid regridStates.x; do if [[ -s "${ufs_utilsexe}" ]]; then rm -f "${ufs_utilsexe}" fi - ${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_utils.fd/exec/${ufs_utilsexe}" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/ufs_utils.fd/exec/${ufs_utilsexe}" . done # GSI -if [[ -d "${HOMEgfs}/sorc/gsi_enkf.fd/install" ]]; then +if [[ -d "${HOMEglobal}/sorc/gsi_enkf.fd/install" ]]; then for gsiexe in enkf.x gsi.x; do if [[ -s "${gsiexe}" ]]; then rm -f "${gsiexe}" fi - ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_enkf.fd/install/bin/${gsiexe}" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/gsi_enkf.fd/install/bin/${gsiexe}" . done fi # GSI Utils -if [[ -d "${HOMEgfs}/sorc/gsi_utils.fd/install" ]]; then +if [[ -d "${HOMEglobal}/sorc/gsi_utils.fd/install" ]]; then for exe in calc_analysis.x calc_increment_ens_ncio.x calc_increment_ens.x \ getsfcensmeanp.x getsigensmeanp_smooth.x getsigensstatp.x \ interp_inc.x recentersigp.x; do if [[ -s "${exe}" ]]; then rm -f "${exe}" fi - ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_utils.fd/install/bin/${exe}" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/gsi_utils.fd/install/bin/${exe}" . done fi # GSI Monitor -if [[ -d "${HOMEgfs}/sorc/gsi_monitor.fd/install" ]]; then +if [[ -d "${HOMEglobal}/sorc/gsi_monitor.fd/install" ]]; then for exe in oznmon_horiz.x oznmon_time.x radmon_angle.x \ radmon_bcoef.x radmon_bcor.x radmon_time.x; do if [[ -s "${exe}" ]]; then rm -f "${exe}" fi - ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_monitor.fd/install/bin/${exe}" . + ${LINK_OR_COPY} "${HOMEglobal}/sorc/gsi_monitor.fd/install/bin/${exe}" . done fi # GDASApp executables -if [[ -d "${HOMEgfs}/sorc/gdas.cd/install" ]]; then - cp -f "${HOMEgfs}/sorc/gdas.cd/install/bin"/gdas* ./ - cp -f "${HOMEgfs}/sorc/gdas.cd/install/bin/apply_incr.exe" ./gdas_apply_incr.x +if [[ -d "${HOMEglobal}/sorc/gdas.cd/install" ]]; then + cp -f "${HOMEglobal}/sorc/gdas.cd/install/bin"/gdas* ./ + cp -f "${HOMEglobal}/sorc/gdas.cd/install/bin/apply_incr.exe" ./gdas_apply_incr.x fi # GDASApp libraries -if [[ -d "${HOMEgfs}/sorc/gdas.cd/install" ]]; then - mkdir -p "${HOMEgfs}/lib" || exit 1 - cd "${HOMEgfs}/lib" || exit 1 - cp -af "${HOMEgfs}/sorc/gdas.cd/install/lib/." ./ +if [[ -d "${HOMEglobal}/sorc/gdas.cd/install" ]]; then + mkdir -p "${HOMEglobal}/lib" || exit 1 + cd "${HOMEglobal}/lib" || exit 1 + cp -af "${HOMEglobal}/sorc/gdas.cd/install/lib/." ./ fi # NEXUS executable -if [[ -d "${HOMEgfs}/sorc/nexus.fd/build/bin" ]]; then - cd "${HOMEgfs}/exec" || exit 1 - ${LINK_OR_COPY} "${HOMEgfs}/sorc/nexus.fd/build/bin/nexus" nexus.x +if [[ -d "${HOMEglobal}/sorc/nexus.fd/build/bin" ]]; then + cd "${HOMEglobal}/exec" || exit 1 + ${LINK_OR_COPY} "${HOMEglobal}/sorc/nexus.fd/build/bin/nexus" nexus.x fi #------------------------------ #--link source code directories #------------------------------ -cd "${HOMEgfs}/sorc" || exit 8 +cd "${HOMEglobal}/sorc" || exit 8 if [[ -d gsi_enkf.fd ]]; then if [[ -d gsi.fd ]]; then diff --git a/ush/python/pygfs/task/archive.py b/ush/python/pygfs/task/archive.py index 3e9c317905c..3a8165c5389 100644 --- a/ush/python/pygfs/task/archive.py +++ b/ush/python/pygfs/task/archive.py @@ -699,7 +699,7 @@ def _pop_git_info(self, arch_dict: AttrDict) -> None: # Get commonly used variables arch_hashes = arch_dict.ARCH_HASHES arch_diffs = arch_dict.ARCH_DIFFS - homegfs = arch_dict.HOMEglobal + homeglobal = arch_dict.HOMEglobal expdir = arch_dict.EXPDIR # Find the git command @@ -709,7 +709,7 @@ def _pop_git_info(self, arch_dict: AttrDict) -> None: output = "" # Navigate to HOMEglobal to run the git commands - with chdir(homegfs): + with chdir(homeglobal): # Are we running git to get hashes? if arch_hashes: diff --git a/versions/build.gaeac6.ver b/versions/build.gaeac6.ver index d8c859809bd..1aebe367669 100644 --- a/versions/build.gaeac6.ver +++ b/versions/build.gaeac6.ver @@ -7,4 +7,4 @@ export stack_mpi_ver=8.1.30 spack_stack_root="/ncrc/proj/epic/spack-stack" -source "${HOMEgfs:-}/versions/spack.ver" +source "${HOMEglobal:-}/versions/spack.ver" diff --git a/versions/build.hera.ver b/versions/build.hera.ver index b35f8ee5d9a..509805acc9b 100644 --- a/versions/build.hera.ver +++ b/versions/build.hera.ver @@ -7,4 +7,4 @@ export stack_mpi_ver=2021.13 spack_stack_root="/contrib/spack-stack" -source "${HOMEgfs:-}/versions/spack.ver" +source "${HOMEglobal:-}/versions/spack.ver" diff --git a/versions/build.hercules.ver b/versions/build.hercules.ver index 0ca97fa8cfc..ba695efb48d 100644 --- a/versions/build.hercules.ver +++ b/versions/build.hercules.ver @@ -8,6 +8,6 @@ export stack_mpi_ver=2021.13 spack_stack_env="ue-${stack_compiler}-2024.1.0" spack_stack_root="/apps/contrib/spack-stack" -source "${HOMEgfs:-}/versions/spack.ver" +source "${HOMEglobal:-}/versions/spack.ver" export intel_mkl_ver=2023.1.0 diff --git a/versions/build.noaacloud.ver b/versions/build.noaacloud.ver index a8f16cdfd4d..c92bf7e2e14 100644 --- a/versions/build.noaacloud.ver +++ b/versions/build.noaacloud.ver @@ -10,4 +10,4 @@ export gnu_ver=13.2.0 spack_stack_root="/contrib/spack-stack-rocky8" -source "${HOMEgfs:-}/versions/spack.ver" +source "${HOMEglobal:-}/versions/spack.ver" diff --git a/versions/build.orion.ver b/versions/build.orion.ver index 0ca97fa8cfc..ba695efb48d 100644 --- a/versions/build.orion.ver +++ b/versions/build.orion.ver @@ -8,6 +8,6 @@ export stack_mpi_ver=2021.13 spack_stack_env="ue-${stack_compiler}-2024.1.0" spack_stack_root="/apps/contrib/spack-stack" -source "${HOMEgfs:-}/versions/spack.ver" +source "${HOMEglobal:-}/versions/spack.ver" export intel_mkl_ver=2023.1.0 diff --git a/versions/build.ursa.ver b/versions/build.ursa.ver index b35f8ee5d9a..509805acc9b 100644 --- a/versions/build.ursa.ver +++ b/versions/build.ursa.ver @@ -7,4 +7,4 @@ export stack_mpi_ver=2021.13 spack_stack_root="/contrib/spack-stack" -source "${HOMEgfs:-}/versions/spack.ver" +source "${HOMEglobal:-}/versions/spack.ver" diff --git a/versions/run.gaeac6.ver b/versions/run.gaeac6.ver index 68446906e38..121119beeb3 100644 --- a/versions/run.gaeac6.ver +++ b/versions/run.gaeac6.ver @@ -8,7 +8,7 @@ export stack_mpi_ver=8.1.30 spack_stack_root="/ncrc/proj/epic/spack-stack/c6" -source "${HOMEgfs:-}/versions/spack.ver" +source "${HOMEglobal:-}/versions/spack.ver" # Gaea uses a slgihtly older version of cdo export cdo_ver=2.3.0 diff --git a/versions/run.hera.ver b/versions/run.hera.ver index 1e228c55092..ff602b4d18e 100644 --- a/versions/run.hera.ver +++ b/versions/run.hera.ver @@ -7,7 +7,7 @@ export stack_mpi_ver=2021.13 spack_stack_root="/contrib/spack-stack" -source "${HOMEgfs:-}/versions/spack.ver" +source "${HOMEglobal:-}/versions/spack.ver" # Hera has an older version of CDO installed export cdo_ver=2.3.0 diff --git a/versions/run.hercules.ver b/versions/run.hercules.ver index 7b5ad7ea3c6..59b59e76f58 100644 --- a/versions/run.hercules.ver +++ b/versions/run.hercules.ver @@ -8,7 +8,7 @@ export stack_mpi_ver=2021.13 spack_stack_env="ue-${stack_compiler}-2024.1.0" spack_stack_root="/apps/contrib/spack-stack" -source "${HOMEgfs:-}/versions/spack.ver" +source "${HOMEglobal:-}/versions/spack.ver" export sven_root_path="/home/gfekete/sven" export intel_mkl_ver=2023.1.0 diff --git a/versions/run.noaacloud.ver b/versions/run.noaacloud.ver index 2c94eb051a9..66a7a5033f6 100644 --- a/versions/run.noaacloud.ver +++ b/versions/run.noaacloud.ver @@ -7,7 +7,7 @@ export stack_mpi_ver=2021.13 spack_stack_root="/contrib/spack-stack-rocky8" -source "${HOMEgfs:-}/versions/spack.ver" +source "${HOMEglobal:-}/versions/spack.ver" export cdo_ver=2.3.0 export perl_ver=5.38.0 diff --git a/versions/run.orion.ver b/versions/run.orion.ver index 7b5ad7ea3c6..59b59e76f58 100644 --- a/versions/run.orion.ver +++ b/versions/run.orion.ver @@ -8,7 +8,7 @@ export stack_mpi_ver=2021.13 spack_stack_env="ue-${stack_compiler}-2024.1.0" spack_stack_root="/apps/contrib/spack-stack" -source "${HOMEgfs:-}/versions/spack.ver" +source "${HOMEglobal:-}/versions/spack.ver" export sven_root_path="/home/gfekete/sven" export intel_mkl_ver=2023.1.0 diff --git a/versions/run.ursa.ver b/versions/run.ursa.ver index 9b7d3c9502c..723775bd04f 100644 --- a/versions/run.ursa.ver +++ b/versions/run.ursa.ver @@ -7,6 +7,6 @@ export stack_mpi_ver=2021.13 spack_stack_root="/contrib/spack-stack" -source "${HOMEgfs:-}/versions/spack.ver" +source "${HOMEglobal:-}/versions/spack.ver" export imagemagick_ver=7.1.1-29 From 8f21cb3aca35be84e126ac04a8144157a793aa65 Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Thu, 5 Feb 2026 16:16:29 -0500 Subject: [PATCH 03/71] update converting scripts --- dev/ush/convert_from_global_to_net.sh | 243 +++++++++++++++++++++++++ dev/ush/convert_from_net_to_global.sh | 251 ++++++++++++++++++++++++++ dev/ush/test.sh | 3 + 3 files changed, 497 insertions(+) create mode 100755 dev/ush/convert_from_global_to_net.sh create mode 100755 dev/ush/convert_from_net_to_global.sh create mode 100755 dev/ush/test.sh diff --git a/dev/ush/convert_from_global_to_net.sh b/dev/ush/convert_from_global_to_net.sh new file mode 100755 index 00000000000..84bf947775a --- /dev/null +++ b/dev/ush/convert_from_global_to_net.sh @@ -0,0 +1,243 @@ +#!/bin/bash +# convert_from_global_to_net.sh +# Script to convert HOMEglobal, PARMglobal, etc. to HOME${NET}, PARM${NET}, etc. +# for operational deployment +# +# Usage: convert_from_global_to_net.sh [--exclude dir1 dir2 dir3 ...] +# +# NET_value can be: gfs, gefs, sfs, or gcafs (NOT 'all' - use specific NET) +# target_path can be a file or directory +# +# Example: convert_from_global_to_net.sh gfs /path/to/deployment --exclude sorc dev parm/archive +# +# This script performs selective search/replace for deployment variables + +set -eu + +# Color codes for output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +CYAN='\033[0;36m' +NC='\033[0m' # No Color + +# Cleanup function for temporary files +cleanup() { + local exit_code=$? + if [[ -f "/tmp/convert_files_$$.txt" ]]; then + rm -f "/tmp/convert_files_$$.txt" + fi + if [[ ${exit_code} -ne 0 ]]; then + echo -e "${RED}Error: Script failed with exit code ${exit_code}${NC}" >&2 + fi +} + +# Set trap to ensure cleanup on exit +trap cleanup EXIT ERR + +# Parse arguments +if [[ $# -lt 2 ]]; then + echo "ERROR: NET value and target path required" + echo "Usage: $0 [--exclude dir1 dir2 dir3 ...]" + echo "NET_value must be one of: gfs, gefs, sfs, gcafs" + exit 1 +fi + +NET="$1" +shift + +# Define all possible NET values for validation +ALL_NET_VALUES=("gefs" "gfs" "gcafs" "sfs") + +# Validate NET value - must be specific, NOT 'all' +if [[ "${NET}" == "all" ]]; then + echo -e "${RED}ERROR: 'all' is not supported for convert_from_global_to_net.sh${NC}" >&2 + echo -e "${YELLOW}You must specify a single NET value: gfs, gefs, sfs, or gcafs${NC}" >&2 + echo -e "${YELLOW}Use convert_from_net_to_global.sh with 'all' to revert changes${NC}" >&2 + exit 1 +fi + +# Validate NET is one of the allowed values +valid_net=false +for net_val in "${ALL_NET_VALUES[@]}"; do + if [[ "${NET}" == "${net_val}" ]]; then + valid_net=true + break + fi +done + +if ! ${valid_net}; then + echo -e "${RED}ERROR: Invalid NET value '${NET}'${NC}" >&2 + echo -e "${YELLOW}Must be one of: ${ALL_NET_VALUES[*]}${NC}" >&2 + exit 1 +fi + +# Initialize target path +TARGET_PATH="" +EXCLUDE_DIRS=() + +# Parse remaining arguments +while [[ $# -gt 0 ]]; do + case "$1" in + --exclude) + shift + # Collect all remaining arguments as exclude directories + while [[ $# -gt 0 ]]; do + EXCLUDE_DIRS+=("$1") + shift + done + ;; + *) + if [[ -z "${TARGET_PATH}" ]]; then + TARGET_PATH="$1" + else + echo "ERROR: Unexpected argument: $1" + exit 1 + fi + shift + ;; + esac +done + +# Check if target path exists +if [[ ! -e "${TARGET_PATH}" ]]; then + echo -e "${RED}ERROR: Target path ${TARGET_PATH} does not exist${NC}" >&2 + exit 1 +fi + +# Display processing header +echo -e "${CYAN}=========================================${NC}" +echo -e "${YELLOW}Processing: Converting ${RED}global${NC}${YELLOW}-workflow variables to ${GREEN}${NET}${NC}${YELLOW}-specific variables${NC}" +echo -e "${BLUE}Target: ${TARGET_PATH}${NC}" +if [[ ${#EXCLUDE_DIRS[@]} -gt 0 ]]; then + echo -e "${BLUE}Excluding directories: ${EXCLUDE_DIRS[*]}${NC}" +fi +echo -e "${CYAN}=========================================${NC}" + +echo "" +echo -e "${YELLOW}Converting for: ${RED}global${NC} ${YELLOW}→${NC} ${GREEN}${NET}${NC}" + +# List of patterns to convert +declare -A patterns=( + ["HOMEglobal"]="HOME${NET}" + ["PARMglobal"]="PARM${NET}" + ["USHglobal"]="USH${NET}" + ["SCRglobal"]="SCR${NET}" + ["EXECglobal"]="EXEC${NET}" + ["FIXglobal"]="FIX${NET}" +) + +# If target is a single file, process it directly +if [[ -f "${TARGET_PATH}" ]]; then + # Pre-check: Skip if ANY NET-specific variable already exists + if grep -qE '\b(HOME|PARM|USH|SCR|EXEC|FIX)(gfs|gefs|sfs|gcafs)\b' "${TARGET_PATH}" 2>/dev/null; then + echo -e "${YELLOW}⚠ File already has NET-specific variables - skipped${NC}" + exit 0 + fi + + file_modified=false + for pattern in "${!patterns[@]}"; do + replacement="${patterns[$pattern]}" + if grep -q "\\b${pattern}\\b" "${TARGET_PATH}" 2>/dev/null; then + if ! sed -i "s/\\b${pattern}\\b/${replacement}/g" "${TARGET_PATH}"; then + echo -e "${RED}ERROR: Failed to process ${TARGET_PATH}${NC}" >&2 + exit 1 + fi + file_modified=true + fi + done + + if ${file_modified}; then + echo -e "${GREEN}✓ Processed 1 file for NET=${NET}${NC}" + else + echo -e "${YELLOW}⚠ No patterns found for NET=${NET}${NC}" + fi +else + # Build find command with exclusions for directory + find_cmd="find \"${TARGET_PATH}\"" + + # Add excluded directories to find command + for exclude_dir in "${EXCLUDE_DIRS[@]}"; do + # Remove leading ./ if present + exclude_dir="${exclude_dir#./}" + + # Check if path is absolute (starts with /) + if [[ "${exclude_dir:0:1}" == "/" ]]; then + # Use absolute path as-is + find_cmd+=" -path \"${exclude_dir}\" -prune -o" + else + # Treat as relative to TARGET_PATH + find_cmd+=" -path \"${TARGET_PATH}/${exclude_dir}\" -prune -o" + fi + done + + # Complete find command + find_cmd+=" -type f -print" + + # Execute find and get file list + if ! eval "${find_cmd}" > /tmp/convert_files_$$.txt; then + echo -e "${RED}ERROR: Failed to find files in ${TARGET_PATH}${NC}" >&2 + exit 1 + fi + + # Count files to process + file_count=$(wc -l < /tmp/convert_files_$$.txt) + + if [[ ${file_count} -eq 0 ]]; then + echo -e "${YELLOW}Warning: No files found to process${NC}" + exit 0 + fi + + echo -e "${BLUE}Processing ${file_count} files...${NC}" + + # Perform the replacements + failed_files=0 + skipped_files=0 + while IFS= read -r file; do + if [[ -f "${file}" ]]; then + # Pre-check: Skip file if it contains ANY NET-specific variable (gfs, gefs, sfs, gcafs) + if grep -qE '\b(HOME|PARM|USH|SCR|EXEC|FIX)(gfs|gefs|sfs|gcafs)\b' "${file}" 2>/dev/null; then + skipped_files=$((skipped_files + 1)) + continue + fi + + # Proceed with conversion only if no NET-specific vars found + file_modified=false + file_failed=false + for pattern in "${!patterns[@]}"; do + replacement="${patterns[$pattern]}" + if grep -q "\\b${pattern}\\b" "${file}" 2>/dev/null; then + if ! sed -i "s/\\b${pattern}\\b/${replacement}/g" "${file}"; then + echo -e "${RED}ERROR: sed failed on ${file}${NC}" >&2 + failed_files=$((failed_files + 1)) + file_failed=true + break + fi + file_modified=true + fi + done + + if ! ${file_modified} && ! ${file_failed}; then + skipped_files=$((skipped_files + 1)) + fi + fi + done < /tmp/convert_files_$$.txt + + # Clean up + rm -f /tmp/convert_files_$$.txt + + if [[ ${failed_files} -gt 0 ]]; then + echo -e "${YELLOW}⚠ Processed $((file_count - failed_files - skipped_files))/${file_count} files (${failed_files} failed, ${skipped_files} skipped - has NET vars) for NET=${NET}${NC}" + elif [[ ${skipped_files} -gt 0 ]]; then + echo -e "${GREEN}✓ Processed $((file_count - skipped_files))/${file_count} files (${skipped_files} skipped - has NET vars) for NET=${NET}${NC}" + else + echo -e "${GREEN}✓ Processed ${file_count} files for NET=${NET}${NC}" + fi +fi + +echo -e "${GREEN}Completed!${NC}" + +echo "" +echo -e "${CYAN}=========================================${NC}" +echo -e "${GREEN}Conversion to NET=${NET} completed successfully!${NC}" diff --git a/dev/ush/convert_from_net_to_global.sh b/dev/ush/convert_from_net_to_global.sh new file mode 100755 index 00000000000..69bbc431e2b --- /dev/null +++ b/dev/ush/convert_from_net_to_global.sh @@ -0,0 +1,251 @@ +#!/bin/bash +# convert_from_net_to_global.sh +# Script to convert HOME${NET}, PARM${NET}, etc. back to HOMEglobal, PARMglobal, etc. +# for development +# +# Usage: convert_from_net_to_global.sh [--exclude dir1 dir2 dir3 ...] +# +# NET_value can be: gfs, gefs, sfs, gcafs, or all (for all NET values) +# target_path can be a file or directory +# +# Example: convert_from_net_to_global.sh gfs /path/to/development --exclude sorc dev parm/archive +# Example: convert_from_net_to_global.sh all /path/to/development +# +# This script performs selective search/replace to revert deployment variables + +set -eu + +# Color codes for output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +CYAN='\033[0;36m' +NC='\033[0m' # No Color + +# Cleanup function for temporary files +cleanup() { + local exit_code=$? + if [[ -f "/tmp/convert_files_$$.txt" ]]; then + rm -f "/tmp/convert_files_$$.txt" + fi + if [[ ${exit_code} -ne 0 ]]; then + echo -e "${RED}Error: Script failed with exit code ${exit_code}${NC}" >&2 + fi +} + +# Set trap to ensure cleanup on exit +trap cleanup EXIT ERR + +# Parse arguments +if [[ $# -lt 2 ]]; then + echo "ERROR: NET value and target path required" + echo "Usage: $0 [--exclude dir1 dir2 dir3 ...]" + echo "NET_value can be: gfs, gefs, sfs, gcafs, or all" + exit 1 +fi + +NET="$1" +shift + +# Define all possible NET values +ALL_NET_VALUES=("gefs" "gfs" "gcafs" "sfs") + +# If NET is "all", use all values; otherwise use the single value +if [[ "${NET}" == "all" ]]; then + NET_LIST=("${ALL_NET_VALUES[@]}") +else + NET_LIST=("${NET}") +fi + +# Initialize target path +TARGET_PATH="" +EXCLUDE_DIRS=() + +# Parse remaining arguments +while [[ $# -gt 0 ]]; do + case "$1" in + --exclude) + shift + # Collect all remaining arguments as exclude directories + while [[ $# -gt 0 ]]; do + EXCLUDE_DIRS+=("$1") + shift + done + ;; + *) + if [[ -z "${TARGET_PATH}" ]]; then + TARGET_PATH="$1" + else + echo "ERROR: Unexpected argument: $1" + exit 1 + fi + shift + ;; + esac +done + +# Check if target path exists +if [[ ! -e "${TARGET_PATH}" ]]; then + echo -e "${RED}ERROR: Target path ${TARGET_PATH} does not exist${NC}" >&2 + exit 1 +fi + +# Display processing header +echo -e "${CYAN}=========================================${NC}" +if [[ "${NET}" == "all" ]]; then + echo -e "${YELLOW}Processing: Converting NET-specific variables to ${RED}global${NC}${YELLOW}-workflow variables for: ${GREEN}${NET_LIST[*]}${NC}" +else + echo -e "${YELLOW}Processing: Converting ${GREEN}${NET}${NC}${YELLOW}-specific variables to ${RED}global${NC}${YELLOW}-workflow variables${NC}" +fi +echo -e "${BLUE}Target: ${TARGET_PATH}${NC}" +if [[ ${#EXCLUDE_DIRS[@]} -gt 0 ]]; then + echo -e "${BLUE}Excluding directories: ${EXCLUDE_DIRS[*]}${NC}" +fi +echo -e "${CYAN}=========================================${NC}" + +# Process each NET value +for current_net in "${NET_LIST[@]}"; do + echo "" + echo -e "${YELLOW}Converting for: ${GREEN}${current_net}${NC} ${YELLOW}→${NC} ${RED}global${NC}" + + # List of patterns to convert (reverse of convert_from_global_to_net.sh) + declare -A patterns=( + ["HOME${current_net}"]="HOMEglobal" + ["PARM${current_net}"]="PARMglobal" + ["USH${current_net}"]="USHglobal" + ["SCR${current_net}"]="SCRglobal" + ["EXEC${current_net}"]="EXECglobal" + ["FIX${current_net}"]="FIXglobal" + ) + + # If target is a single file, process it directly + if [[ -f "${TARGET_PATH}" ]]; then + file_modified=false + for pattern in "${!patterns[@]}"; do + replacement="${patterns[$pattern]}" + # Unconditional replacement - convert all occurrences + if grep -q "\\b${pattern}\\b" "${TARGET_PATH}" 2>/dev/null; then + if ! sed -i "s/\\b${pattern}\\b/${replacement}/g" "${TARGET_PATH}"; then + echo -e "${RED}ERROR: Failed to process ${TARGET_PATH}${NC}" >&2 + exit 1 + fi + file_modified=true + fi + done + if ${file_modified}; then + echo -e "${GREEN}✓ Processed 1 file for NET=${current_net}${NC}" + else + echo -e "${YELLOW}⚠ No patterns found for NET=${current_net}${NC}" + fi + else + # Build find command with exclusions for directory + find_cmd="find \"${TARGET_PATH}\"" + + # Add excluded directories to find command + for exclude_dir in "${EXCLUDE_DIRS[@]}"; do + # Remove leading ./ if present + exclude_dir="${exclude_dir#./}" + + # Check if path is absolute (starts with /) + if [[ "${exclude_dir:0:1}" == "/" ]]; then + # Use absolute path as-is + find_cmd+=" -path \"${exclude_dir}\" -prune -o" + else + # Treat as relative to TARGET_PATH + find_cmd+=" -path \"${TARGET_PATH}/${exclude_dir}\" -prune -o" + fi + done + + # Complete find command - commented out file type filtering to process all files + # Uncomment the line below and comment out the next line to filter by file type: + # find_cmd+=" -type f \\( -name \"*.sh\" -o -name \"*.bash\" -o -name \"*.py\" -o -name \"*.env\" -o -name \"*.config\" -o -name \"*.ecf\" -o -name \"J*\" -o -name \"ex*\" \\) -print" + find_cmd+=" -type f -print" + + # Execute find and get file list + if ! eval "${find_cmd}" > /tmp/convert_files_$$.txt; then + echo -e "${RED}ERROR: Failed to find files in ${TARGET_PATH}${NC}" >&2 + exit 1 + fi + + # Count files to process + file_count=$(wc -l < /tmp/convert_files_$$.txt) + + if [[ ${file_count} -eq 0 ]]; then + echo -e "${YELLOW}Warning: No files found to process${NC}" + continue + fi + + echo -e "${BLUE}Processing ${file_count} files...${NC}" + + # Perform the replacements + failed_files=0 + skipped_files=0 + while IFS= read -r file; do + if [[ -f "${file}" ]]; then + # Pre-check: Skip file if ANY global variable already exists + should_skip=false + + # Build list of global patterns to check for + declare -a global_patterns=( + "HOMEglobal" + "PARMglobal" + "USHglobal" + "SCRglobal" + "EXECglobal" + "FIXglobal" + ) + + # Check if any global pattern already exists in file + for global_pattern in "${global_patterns[@]}"; do + if grep -q "\\b${global_pattern}\\b" "${file}" 2>/dev/null; then + should_skip=true + break + fi + done + + if ${should_skip}; then + skipped_files=$((skipped_files + 1)) + continue + fi + + # Proceed with conversion only if no global vars found + file_modified=false + file_failed=false + for pattern in "${!patterns[@]}"; do + replacement="${patterns[$pattern]}" + if grep -q "\\b${pattern}\\b" "${file}" 2>/dev/null; then + if ! sed -i "s/\\b${pattern}\\b/${replacement}/g" "${file}"; then + echo -e "${RED}ERROR: sed failed on ${file}${NC}" >&2 + failed_files=$((failed_files + 1)) + file_failed=true + break + fi + file_modified=true + fi + done + + if ! ${file_modified} && ! ${file_failed}; then + skipped_files=$((skipped_files + 1)) + fi + fi + done < /tmp/convert_files_$$.txt + + # Clean up + rm -f /tmp/convert_files_$$.txt + + if [[ ${failed_files} -gt 0 ]]; then + echo -e "${YELLOW}⚠ Processed $((file_count - failed_files - skipped_files))/${file_count} files (${failed_files} failed, ${skipped_files} skipped - already converted) for NET=${current_net}${NC}" + elif [[ ${skipped_files} -gt 0 ]]; then + echo -e "${GREEN}✓ Processed $((file_count - skipped_files))/${file_count} files (${skipped_files} already converted) for NET=${current_net}${NC}" + else + echo -e "${GREEN}✓ Processed ${file_count} files for NET=${current_net}${NC}" + fi + fi + + echo -e "${GREEN}Completed!${NC}" +done + +echo "" +echo -e "${CYAN}=========================================${NC}" +echo -e "${GREEN}All conversions completed successfully!${NC}" diff --git a/dev/ush/test.sh b/dev/ush/test.sh new file mode 100755 index 00000000000..96553f22230 --- /dev/null +++ b/dev/ush/test.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env bash +DIR=$1 +find "$DIR" -type f -exec sed -i -e 's/HOMEgfs/HOMEglobal/g' -e 's/PARMgfs/PARMglobal/g' -e 's/USHgfs/USHglobal/g' -e 's/SCRgfs/SCRglobal/g' {} + \ No newline at end of file From 69414c8b7c710c41932fd15ffa43e0af42fd02e7 Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Thu, 5 Feb 2026 21:43:58 +0000 Subject: [PATCH 04/71] update HOMEglobal for new commit --- dev/workflow/setup_buildxml.py | 2 +- sorc/build_all.sh | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/dev/workflow/setup_buildxml.py b/dev/workflow/setup_buildxml.py index 3d24beb6f97..524ff67ce8f 100755 --- a/dev/workflow/setup_buildxml.py +++ b/dev/workflow/setup_buildxml.py @@ -64,7 +64,7 @@ def get_task_spec(task_name: str, task_spec: Dict, host_spec: Dict) -> Dict: task_dict.task_name = task_name task_dict.cycledef = "build" task_dict.maxtries = 1 - task_dict.command = f"cd {HOMEgfs}/sorc/; {task_spec.command} -j {task_spec.cores}" + task_dict.command = f"cd {HOMEglobal}/sorc/; {task_spec.command} -j {task_spec.cores}" if host_spec.debug: task_dict.command = f"{task_dict.command} -d" task_dict.job_name = task_name diff --git a/sorc/build_all.sh b/sorc/build_all.sh index a3eb6248a6a..7661d6be3d6 100755 --- a/sorc/build_all.sh +++ b/sorc/build_all.sh @@ -90,9 +90,9 @@ mkdir -p "${HOMEglobal}/sorc/logs" || exit 1 rm -f "${build_xml}" "${build_db}" "${build_lock_db}" echo "Generating build.xml for building global-workflow programs ..." -yaml="${HOMEgfs}/sorc/build_opts.yaml" +yaml="${HOMEglobal}/sorc/build_opts.yaml" # shellcheck disable=SC2086,SC2248 -"${HOMEgfs}/dev/workflow/setup_buildxml.py" --account "${HPC_ACCOUNT}" --yaml "${yaml}" --systems "${systems}" ${debug_opt:-} +"${HOMEglobal}/dev/workflow/setup_buildxml.py" --account "${HPC_ACCOUNT}" --yaml "${yaml}" --systems "${systems}" ${debug_opt:-} rc=$? if [[ "${rc}" -ne 0 ]]; then echo "FATAL ERROR: ${BASH_SOURCE[0]} failed to create 'build.xml' with error code ${rc}" From 07cf584be118cd5e50fe4734c70cd2646c383438 Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Tue, 10 Feb 2026 12:37:01 -0500 Subject: [PATCH 05/71] update scripts --- dev/ush/convert_from_global_to_net.sh | 58 ++++++++++---------------- dev/ush/convert_from_net_to_global.sh | 60 +++++++++++++++------------ 2 files changed, 55 insertions(+), 63 deletions(-) diff --git a/dev/ush/convert_from_global_to_net.sh b/dev/ush/convert_from_global_to_net.sh index 84bf947775a..1ac2d83faa2 100755 --- a/dev/ush/convert_from_global_to_net.sh +++ b/dev/ush/convert_from_global_to_net.sh @@ -151,44 +151,30 @@ if [[ -f "${TARGET_PATH}" ]]; then if ${file_modified}; then echo -e "${GREEN}✓ Processed 1 file for NET=${NET}${NC}" else - echo -e "${YELLOW}⚠ No patterns found for NET=${NET}${NC}" + echo -e "${YELLOW}No files to convert for NET=${current_net}${NC}" fi else # Build find command with exclusions for directory - find_cmd="find \"${TARGET_PATH}\"" - - # Add excluded directories to find command - for exclude_dir in "${EXCLUDE_DIRS[@]}"; do - # Remove leading ./ if present - exclude_dir="${exclude_dir#./}" - - # Check if path is absolute (starts with /) - if [[ "${exclude_dir:0:1}" == "/" ]]; then - # Use absolute path as-is - find_cmd+=" -path \"${exclude_dir}\" -prune -o" + # Build find command with excluded directories (properly handle subdirectories) + if [[ ${#EXCLUDE_DIRS[@]} -gt 0 ]]; then + exclude_args="" + for exclude_dir in "${EXCLUDE_DIRS[@]}"; do + exclude_args+="-name \"$(basename ${exclude_dir})\" -o " + done + exclude_args="${exclude_args% -o }" + eval "find \"${TARGET_PATH}\" -type d \( ${exclude_args} \) -prune -o -type f -print" > /tmp/convert_files_$$.txt else - # Treat as relative to TARGET_PATH - find_cmd+=" -path \"${TARGET_PATH}/${exclude_dir}\" -prune -o" + find "${TARGET_PATH}" -type f > /tmp/convert_files_$$.txt fi - done - - # Complete find command - find_cmd+=" -type f -print" - # Execute find and get file list - if ! eval "${find_cmd}" > /tmp/convert_files_$$.txt; then - echo -e "${RED}ERROR: Failed to find files in ${TARGET_PATH}${NC}" >&2 - exit 1 - fi + file_count=$(wc -l < /tmp/convert_files_$$.txt) + if [[ ${file_count} -eq 0 ]]; then + echo -e "${YELLOW}No files to convert for NET=${current_net}${NC}" + rm -f /tmp/convert_files_$$.txt + continue + fi # Count files to process - file_count=$(wc -l < /tmp/convert_files_$$.txt) - - if [[ ${file_count} -eq 0 ]]; then - echo -e "${YELLOW}Warning: No files found to process${NC}" - exit 0 - fi - echo -e "${BLUE}Processing ${file_count} files...${NC}" # Perform the replacements @@ -227,15 +213,15 @@ else # Clean up rm -f /tmp/convert_files_$$.txt - if [[ ${failed_files} -gt 0 ]]; then - echo -e "${YELLOW}⚠ Processed $((file_count - failed_files - skipped_files))/${file_count} files (${failed_files} failed, ${skipped_files} skipped - has NET vars) for NET=${NET}${NC}" - elif [[ ${skipped_files} -gt 0 ]]; then - echo -e "${GREEN}✓ Processed $((file_count - skipped_files))/${file_count} files (${skipped_files} skipped - has NET vars) for NET=${NET}${NC}" + files_converted=$((file_count - failed_files - skipped_files)) + if [[ ${files_converted} -eq 0 ]]; then + echo -e "${YELLOW}No files to convert for NET=${NET}${NC}" + elif [[ ${failed_files} -gt 0 ]]; then + echo -e "${YELLOW}⚠ Converted ${files_converted}/${file_count} files (${failed_files} failed) for NET=${NET}${NC}" else - echo -e "${GREEN}✓ Processed ${file_count} files for NET=${NET}${NC}" + echo -e "${GREEN}✓ Converted ${files_converted}/${file_count} files for NET=${NET}${NC}" fi fi - echo -e "${GREEN}Completed!${NC}" echo "" diff --git a/dev/ush/convert_from_net_to_global.sh b/dev/ush/convert_from_net_to_global.sh index 69bbc431e2b..34810678cd1 100755 --- a/dev/ush/convert_from_net_to_global.sh +++ b/dev/ush/convert_from_net_to_global.sh @@ -94,9 +94,9 @@ fi # Display processing header echo -e "${CYAN}=========================================${NC}" if [[ "${NET}" == "all" ]]; then - echo -e "${YELLOW}Processing: Converting NET-specific variables to ${RED}global${NC}${YELLOW}-workflow variables for: ${GREEN}${NET_LIST[*]}${NC}" + echo -e "${YELLOW}Processing: Converting NET-specific variables to ${GREEN}global${NC}${YELLOW}-workflow variables from: ${RED}${NET_LIST[*]}${NC}" else - echo -e "${YELLOW}Processing: Converting ${GREEN}${NET}${NC}${YELLOW}-specific variables to ${RED}global${NC}${YELLOW}-workflow variables${NC}" + echo -e "${YELLOW}Processing: Converting ${RED}${NET}${NC}${YELLOW}-specific variables to ${GREEN}global${NC}${YELLOW}-workflow variables${NC}" fi echo -e "${BLUE}Target: ${TARGET_PATH}${NC}" if [[ ${#EXCLUDE_DIRS[@]} -gt 0 ]]; then @@ -107,7 +107,7 @@ echo -e "${CYAN}=========================================${NC}" # Process each NET value for current_net in "${NET_LIST[@]}"; do echo "" - echo -e "${YELLOW}Converting for: ${GREEN}${current_net}${NC} ${YELLOW}→${NC} ${RED}global${NC}" + echo -e "${YELLOW}Converting for: ${RED}${current_net}${NC} ${YELLOW}→${NC} ${GREEN}global${NC}" # List of patterns to convert (reverse of convert_from_global_to_net.sh) declare -A patterns=( @@ -136,32 +136,37 @@ for current_net in "${NET_LIST[@]}"; do if ${file_modified}; then echo -e "${GREEN}✓ Processed 1 file for NET=${current_net}${NC}" else - echo -e "${YELLOW}⚠ No patterns found for NET=${current_net}${NC}" + echo -e "${YELLOW}No files to convert for NET=${current_net}${NC}" fi else # Build find command with exclusions for directory find_cmd="find \"${TARGET_PATH}\"" - # Add excluded directories to find command - for exclude_dir in "${EXCLUDE_DIRS[@]}"; do - # Remove leading ./ if present - exclude_dir="${exclude_dir#./}" - - # Check if path is absolute (starts with /) - if [[ "${exclude_dir:0:1}" == "/" ]]; then - # Use absolute path as-is - find_cmd+=" -path \"${exclude_dir}\" -prune -o" - else - # Treat as relative to TARGET_PATH - find_cmd+=" -path \"${TARGET_PATH}/${exclude_dir}\" -prune -o" - fi - done + # Build exclusion list for directories + if [[ ${#EXCLUDE_DIRS[@]} -gt 0 ]]; then + find_cmd+=" -type d \\(" + first=true + for exclude_dir in "${EXCLUDE_DIRS[@]}"; do + # Remove leading ./ if present + exclude_dir="${exclude_dir#./}" + + # Extract just the directory name (last component of path) + dir_name=$(basename "${exclude_dir}") + + if ${first}; then + find_cmd+=" -name \"${dir_name}\"" + first=false + else + find_cmd+=" -o -name \"${dir_name}\"" + fi + done + find_cmd+=" \\) -prune -o" + fi - # Complete find command - commented out file type filtering to process all files - # Uncomment the line below and comment out the next line to filter by file type: - # find_cmd+=" -type f \\( -name \"*.sh\" -o -name \"*.bash\" -o -name \"*.py\" -o -name \"*.env\" -o -name \"*.config\" -o -name \"*.ecf\" -o -name \"J*\" -o -name \"ex*\" \\) -print" + # Complete find command to get files find_cmd+=" -type f -print" + # Execute find and get file list if ! eval "${find_cmd}" > /tmp/convert_files_$$.txt; then echo -e "${RED}ERROR: Failed to find files in ${TARGET_PATH}${NC}" >&2 @@ -172,7 +177,7 @@ for current_net in "${NET_LIST[@]}"; do file_count=$(wc -l < /tmp/convert_files_$$.txt) if [[ ${file_count} -eq 0 ]]; then - echo -e "${YELLOW}Warning: No files found to process${NC}" + echo -e "${YELLOW}No files to convert${NC}" continue fi @@ -234,12 +239,13 @@ for current_net in "${NET_LIST[@]}"; do # Clean up rm -f /tmp/convert_files_$$.txt - if [[ ${failed_files} -gt 0 ]]; then - echo -e "${YELLOW}⚠ Processed $((file_count - failed_files - skipped_files))/${file_count} files (${failed_files} failed, ${skipped_files} skipped - already converted) for NET=${current_net}${NC}" - elif [[ ${skipped_files} -gt 0 ]]; then - echo -e "${GREEN}✓ Processed $((file_count - skipped_files))/${file_count} files (${skipped_files} already converted) for NET=${current_net}${NC}" + files_converted=$((file_count - failed_files - skipped_files)) + if [[ ${files_converted} -eq 0 ]]; then + echo -e "${YELLOW}No files to convert for NET=${current_net}${NC}" + elif [[ ${failed_files} -gt 0 ]]; then + echo -e "${YELLOW}⚠ Converted ${files_converted}/${file_count} files (${failed_files} failed) for NET=${current_net}${NC}" else - echo -e "${GREEN}✓ Processed ${file_count} files for NET=${current_net}${NC}" + echo -e "${GREEN}✓ Converted ${files_converted}/${file_count} files for NET=${current_net}${NC}" fi fi From 079c734ed3d998c81872547a3b47f8bb1592287f Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Tue, 10 Feb 2026 14:34:14 -0500 Subject: [PATCH 06/71] Update dev/ush/convert_from_global_to_net.sh Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- dev/ush/convert_from_global_to_net.sh | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/dev/ush/convert_from_global_to_net.sh b/dev/ush/convert_from_global_to_net.sh index 1ac2d83faa2..e5c26379ddb 100755 --- a/dev/ush/convert_from_global_to_net.sh +++ b/dev/ush/convert_from_global_to_net.sh @@ -24,13 +24,13 @@ NC='\033[0m' # No Color # Cleanup function for temporary files cleanup() { - local exit_code=$? - if [[ -f "/tmp/convert_files_$$.txt" ]]; then - rm -f "/tmp/convert_files_$$.txt" - fi - if [[ ${exit_code} -ne 0 ]]; then - echo -e "${RED}Error: Script failed with exit code ${exit_code}${NC}" >&2 - fi + local exit_code=$? + if [[ -f "/tmp/convert_files_$$.txt" ]]; then + rm -f "/tmp/convert_files_$$.txt" + fi + if [[ ${exit_code} -ne 0 ]]; then + echo -e "${RED}Error: Script failed with exit code ${exit_code}${NC}" >&2 + fi } # Set trap to ensure cleanup on exit From a97f2190532370450509c2cab92680ae9fa7d6d1 Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Tue, 10 Feb 2026 14:34:23 -0500 Subject: [PATCH 07/71] Update dev/ush/convert_from_global_to_net.sh Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- dev/ush/convert_from_global_to_net.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev/ush/convert_from_global_to_net.sh b/dev/ush/convert_from_global_to_net.sh index e5c26379ddb..d258db6d6ef 100755 --- a/dev/ush/convert_from_global_to_net.sh +++ b/dev/ush/convert_from_global_to_net.sh @@ -131,7 +131,7 @@ declare -A patterns=( # If target is a single file, process it directly if [[ -f "${TARGET_PATH}" ]]; then # Pre-check: Skip if ANY NET-specific variable already exists - if grep -qE '\b(HOME|PARM|USH|SCR|EXEC|FIX)(gfs|gefs|sfs|gcafs)\b' "${TARGET_PATH}" 2>/dev/null; then + if grep -qE '\b(HOME|PARM|USH|SCR|EXEC|FIX)(gfs|gefs|sfs|gcafs)\b' "${TARGET_PATH}" 2> /dev/null; then echo -e "${YELLOW}⚠ File already has NET-specific variables - skipped${NC}" exit 0 fi From 88839daac144870557582f049953aac832a62fa3 Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Tue, 10 Feb 2026 14:34:32 -0500 Subject: [PATCH 08/71] Update dev/ush/convert_from_global_to_net.sh Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- dev/ush/convert_from_global_to_net.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev/ush/convert_from_global_to_net.sh b/dev/ush/convert_from_global_to_net.sh index d258db6d6ef..555292ef741 100755 --- a/dev/ush/convert_from_global_to_net.sh +++ b/dev/ush/convert_from_global_to_net.sh @@ -139,7 +139,7 @@ if [[ -f "${TARGET_PATH}" ]]; then file_modified=false for pattern in "${!patterns[@]}"; do replacement="${patterns[$pattern]}" - if grep -q "\\b${pattern}\\b" "${TARGET_PATH}" 2>/dev/null; then + if grep -q "\\b${pattern}\\b" "${TARGET_PATH}" 2> /dev/null; then if ! sed -i "s/\\b${pattern}\\b/${replacement}/g" "${TARGET_PATH}"; then echo -e "${RED}ERROR: Failed to process ${TARGET_PATH}${NC}" >&2 exit 1 From 1c021735df2b8850b722d6ac1fcd7e1efe04c871 Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Tue, 10 Feb 2026 14:34:45 -0500 Subject: [PATCH 09/71] Update dev/ush/convert_from_global_to_net.sh Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- dev/ush/convert_from_global_to_net.sh | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/dev/ush/convert_from_global_to_net.sh b/dev/ush/convert_from_global_to_net.sh index 555292ef741..d65dafc1926 100755 --- a/dev/ush/convert_from_global_to_net.sh +++ b/dev/ush/convert_from_global_to_net.sh @@ -155,17 +155,17 @@ if [[ -f "${TARGET_PATH}" ]]; then fi else # Build find command with exclusions for directory - # Build find command with excluded directories (properly handle subdirectories) - if [[ ${#EXCLUDE_DIRS[@]} -gt 0 ]]; then - exclude_args="" - for exclude_dir in "${EXCLUDE_DIRS[@]}"; do - exclude_args+="-name \"$(basename ${exclude_dir})\" -o " - done - exclude_args="${exclude_args% -o }" - eval "find \"${TARGET_PATH}\" -type d \( ${exclude_args} \) -prune -o -type f -print" > /tmp/convert_files_$$.txt - else - find "${TARGET_PATH}" -type f > /tmp/convert_files_$$.txt - fi + # Build find command with excluded directories (properly handle subdirectories) + if [[ ${#EXCLUDE_DIRS[@]} -gt 0 ]]; then + exclude_args="" + for exclude_dir in "${EXCLUDE_DIRS[@]}"; do + exclude_args+="-name \"$(basename ${exclude_dir})\" -o " + done + exclude_args="${exclude_args% -o }" + eval "find \"${TARGET_PATH}\" -type d \( ${exclude_args} \) -prune -o -type f -print" > /tmp/convert_files_$$.txt + else + find "${TARGET_PATH}" -type f > /tmp/convert_files_$$.txt + fi file_count=$(wc -l < /tmp/convert_files_$$.txt) if [[ ${file_count} -eq 0 ]]; then From 61e5e903f903303f56aea16325b9c240ab987c0b Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Tue, 10 Feb 2026 14:34:56 -0500 Subject: [PATCH 10/71] Update dev/ush/convert_from_global_to_net.sh Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- dev/ush/convert_from_global_to_net.sh | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/dev/ush/convert_from_global_to_net.sh b/dev/ush/convert_from_global_to_net.sh index d65dafc1926..51162fda9b2 100755 --- a/dev/ush/convert_from_global_to_net.sh +++ b/dev/ush/convert_from_global_to_net.sh @@ -167,12 +167,12 @@ else find "${TARGET_PATH}" -type f > /tmp/convert_files_$$.txt fi - file_count=$(wc -l < /tmp/convert_files_$$.txt) - if [[ ${file_count} -eq 0 ]]; then - echo -e "${YELLOW}No files to convert for NET=${current_net}${NC}" - rm -f /tmp/convert_files_$$.txt - continue - fi + file_count=$(wc -l < /tmp/convert_files_$$.txt) + if [[ ${file_count} -eq 0 ]]; then + echo -e "${YELLOW}No files to convert for NET=${current_net}${NC}" + rm -f /tmp/convert_files_$$.txt + continue + fi # Count files to process echo -e "${BLUE}Processing ${file_count} files...${NC}" From 3e156213b5ce599d9531f78bccb9251db06b7afa Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Tue, 10 Feb 2026 14:35:06 -0500 Subject: [PATCH 11/71] Update dev/ush/convert_from_global_to_net.sh Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- dev/ush/convert_from_global_to_net.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev/ush/convert_from_global_to_net.sh b/dev/ush/convert_from_global_to_net.sh index 51162fda9b2..6027346c9d0 100755 --- a/dev/ush/convert_from_global_to_net.sh +++ b/dev/ush/convert_from_global_to_net.sh @@ -183,7 +183,7 @@ else while IFS= read -r file; do if [[ -f "${file}" ]]; then # Pre-check: Skip file if it contains ANY NET-specific variable (gfs, gefs, sfs, gcafs) - if grep -qE '\b(HOME|PARM|USH|SCR|EXEC|FIX)(gfs|gefs|sfs|gcafs)\b' "${file}" 2>/dev/null; then + if grep -qE '\b(HOME|PARM|USH|SCR|EXEC|FIX)(gfs|gefs|sfs|gcafs)\b' "${file}" 2> /dev/null; then skipped_files=$((skipped_files + 1)) continue fi From b615d71eb7dfa54be9071f4d87f0c9fe0bbd7e64 Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Tue, 10 Feb 2026 14:35:15 -0500 Subject: [PATCH 12/71] Update dev/ush/convert_from_global_to_net.sh Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- dev/ush/convert_from_global_to_net.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev/ush/convert_from_global_to_net.sh b/dev/ush/convert_from_global_to_net.sh index 6027346c9d0..bce35c518cf 100755 --- a/dev/ush/convert_from_global_to_net.sh +++ b/dev/ush/convert_from_global_to_net.sh @@ -193,7 +193,7 @@ else file_failed=false for pattern in "${!patterns[@]}"; do replacement="${patterns[$pattern]}" - if grep -q "\\b${pattern}\\b" "${file}" 2>/dev/null; then + if grep -q "\\b${pattern}\\b" "${file}" 2> /dev/null; then if ! sed -i "s/\\b${pattern}\\b/${replacement}/g" "${file}"; then echo -e "${RED}ERROR: sed failed on ${file}${NC}" >&2 failed_files=$((failed_files + 1)) From 731c51290364e838463ba38dbcf8ec98d629befb Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Tue, 10 Feb 2026 14:35:24 -0500 Subject: [PATCH 13/71] Update dev/ush/convert_from_net_to_global.sh Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- dev/ush/convert_from_net_to_global.sh | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/dev/ush/convert_from_net_to_global.sh b/dev/ush/convert_from_net_to_global.sh index 34810678cd1..796ad9b7100 100755 --- a/dev/ush/convert_from_net_to_global.sh +++ b/dev/ush/convert_from_net_to_global.sh @@ -25,13 +25,13 @@ NC='\033[0m' # No Color # Cleanup function for temporary files cleanup() { - local exit_code=$? - if [[ -f "/tmp/convert_files_$$.txt" ]]; then - rm -f "/tmp/convert_files_$$.txt" - fi - if [[ ${exit_code} -ne 0 ]]; then - echo -e "${RED}Error: Script failed with exit code ${exit_code}${NC}" >&2 - fi + local exit_code=$? + if [[ -f "/tmp/convert_files_$$.txt" ]]; then + rm -f "/tmp/convert_files_$$.txt" + fi + if [[ ${exit_code} -ne 0 ]]; then + echo -e "${RED}Error: Script failed with exit code ${exit_code}${NC}" >&2 + fi } # Set trap to ensure cleanup on exit From 0d3a19dfd26a5c7f780411dc056c0141327b7f85 Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Tue, 10 Feb 2026 14:35:32 -0500 Subject: [PATCH 14/71] Update dev/ush/convert_from_net_to_global.sh Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- dev/ush/convert_from_net_to_global.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/dev/ush/convert_from_net_to_global.sh b/dev/ush/convert_from_net_to_global.sh index 796ad9b7100..5090c8b3d4f 100755 --- a/dev/ush/convert_from_net_to_global.sh +++ b/dev/ush/convert_from_net_to_global.sh @@ -166,7 +166,6 @@ for current_net in "${NET_LIST[@]}"; do # Complete find command to get files find_cmd+=" -type f -print" - # Execute find and get file list if ! eval "${find_cmd}" > /tmp/convert_files_$$.txt; then echo -e "${RED}ERROR: Failed to find files in ${TARGET_PATH}${NC}" >&2 From 0a45bdfb55f35f99bb887b279f49062ad90a3b6b Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Tue, 10 Feb 2026 14:35:41 -0500 Subject: [PATCH 15/71] Update dev/ush/convert_from_net_to_global.sh Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- dev/ush/convert_from_net_to_global.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev/ush/convert_from_net_to_global.sh b/dev/ush/convert_from_net_to_global.sh index 5090c8b3d4f..d79f38fba07 100755 --- a/dev/ush/convert_from_net_to_global.sh +++ b/dev/ush/convert_from_net_to_global.sh @@ -125,7 +125,7 @@ for current_net in "${NET_LIST[@]}"; do for pattern in "${!patterns[@]}"; do replacement="${patterns[$pattern]}" # Unconditional replacement - convert all occurrences - if grep -q "\\b${pattern}\\b" "${TARGET_PATH}" 2>/dev/null; then + if grep -q "\\b${pattern}\\b" "${TARGET_PATH}" 2> /dev/null; then if ! sed -i "s/\\b${pattern}\\b/${replacement}/g" "${TARGET_PATH}"; then echo -e "${RED}ERROR: Failed to process ${TARGET_PATH}${NC}" >&2 exit 1 From 037ec1ca36593cedb31a888df7dc11d355d0b681 Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Tue, 10 Feb 2026 14:36:04 -0500 Subject: [PATCH 16/71] Update dev/ush/convert_from_net_to_global.sh Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- dev/ush/convert_from_net_to_global.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev/ush/convert_from_net_to_global.sh b/dev/ush/convert_from_net_to_global.sh index d79f38fba07..e70a35c480b 100755 --- a/dev/ush/convert_from_net_to_global.sh +++ b/dev/ush/convert_from_net_to_global.sh @@ -202,7 +202,7 @@ for current_net in "${NET_LIST[@]}"; do # Check if any global pattern already exists in file for global_pattern in "${global_patterns[@]}"; do - if grep -q "\\b${global_pattern}\\b" "${file}" 2>/dev/null; then + if grep -q "\\b${global_pattern}\\b" "${file}" 2> /dev/null; then should_skip=true break fi From bd64d0ada548d0df72b6f3f475fce63157b236ec Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Tue, 10 Feb 2026 14:36:12 -0500 Subject: [PATCH 17/71] Update dev/ush/convert_from_net_to_global.sh Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- dev/ush/convert_from_net_to_global.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev/ush/convert_from_net_to_global.sh b/dev/ush/convert_from_net_to_global.sh index e70a35c480b..02f9d81679c 100755 --- a/dev/ush/convert_from_net_to_global.sh +++ b/dev/ush/convert_from_net_to_global.sh @@ -218,7 +218,7 @@ for current_net in "${NET_LIST[@]}"; do file_failed=false for pattern in "${!patterns[@]}"; do replacement="${patterns[$pattern]}" - if grep -q "\\b${pattern}\\b" "${file}" 2>/dev/null; then + if grep -q "\\b${pattern}\\b" "${file}" 2> /dev/null; then if ! sed -i "s/\\b${pattern}\\b/${replacement}/g" "${file}"; then echo -e "${RED}ERROR: sed failed on ${file}${NC}" >&2 failed_files=$((failed_files + 1)) From c26f8f46fdcc086fe92d7c6f5c2efa5c344acf60 Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Tue, 10 Feb 2026 14:37:59 -0500 Subject: [PATCH 18/71] Update dev/ush/convert_from_global_to_net.sh Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- dev/ush/convert_from_global_to_net.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev/ush/convert_from_global_to_net.sh b/dev/ush/convert_from_global_to_net.sh index bce35c518cf..90e5f710344 100755 --- a/dev/ush/convert_from_global_to_net.sh +++ b/dev/ush/convert_from_global_to_net.sh @@ -159,7 +159,7 @@ else if [[ ${#EXCLUDE_DIRS[@]} -gt 0 ]]; then exclude_args="" for exclude_dir in "${EXCLUDE_DIRS[@]}"; do - exclude_args+="-name \"$(basename ${exclude_dir})\" -o " + exclude_args+="-name \"$(basename "${exclude_dir}")\" -o " done exclude_args="${exclude_args% -o }" eval "find \"${TARGET_PATH}\" -type d \( ${exclude_args} \) -prune -o -type f -print" > /tmp/convert_files_$$.txt From 4d492c238b274bf7084fc9a424d2c2422d3d7295 Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Tue, 10 Feb 2026 14:38:19 -0500 Subject: [PATCH 19/71] Update dev/ush/convert_from_global_to_net.sh Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- dev/ush/convert_from_global_to_net.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev/ush/convert_from_global_to_net.sh b/dev/ush/convert_from_global_to_net.sh index 90e5f710344..3c9ca727db0 100755 --- a/dev/ush/convert_from_global_to_net.sh +++ b/dev/ush/convert_from_global_to_net.sh @@ -192,7 +192,7 @@ else file_modified=false file_failed=false for pattern in "${!patterns[@]}"; do - replacement="${patterns[$pattern]}" + replacement="${patterns[${pattern}]}" if grep -q "\\b${pattern}\\b" "${file}" 2> /dev/null; then if ! sed -i "s/\\b${pattern}\\b/${replacement}/g" "${file}"; then echo -e "${RED}ERROR: sed failed on ${file}${NC}" >&2 From 08cc6694d28faa22577e78f7bbb90daad0159ce0 Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Tue, 10 Feb 2026 14:38:28 -0500 Subject: [PATCH 20/71] Update dev/ush/convert_from_global_to_net.sh Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- dev/ush/convert_from_global_to_net.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev/ush/convert_from_global_to_net.sh b/dev/ush/convert_from_global_to_net.sh index 3c9ca727db0..1efefd968c0 100755 --- a/dev/ush/convert_from_global_to_net.sh +++ b/dev/ush/convert_from_global_to_net.sh @@ -138,7 +138,7 @@ if [[ -f "${TARGET_PATH}" ]]; then file_modified=false for pattern in "${!patterns[@]}"; do - replacement="${patterns[$pattern]}" + replacement="${patterns[${pattern}]}" if grep -q "\\b${pattern}\\b" "${TARGET_PATH}" 2> /dev/null; then if ! sed -i "s/\\b${pattern}\\b/${replacement}/g" "${TARGET_PATH}"; then echo -e "${RED}ERROR: Failed to process ${TARGET_PATH}${NC}" >&2 From 3ca9cd82cd55d6d60919eb7c5a2845c1459cb4fe Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Tue, 10 Feb 2026 14:41:44 -0500 Subject: [PATCH 21/71] Update dev/ush/convert_from_net_to_global.sh Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- dev/ush/convert_from_net_to_global.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev/ush/convert_from_net_to_global.sh b/dev/ush/convert_from_net_to_global.sh index 02f9d81679c..5f4f6a7fdd7 100755 --- a/dev/ush/convert_from_net_to_global.sh +++ b/dev/ush/convert_from_net_to_global.sh @@ -217,7 +217,7 @@ for current_net in "${NET_LIST[@]}"; do file_modified=false file_failed=false for pattern in "${!patterns[@]}"; do - replacement="${patterns[$pattern]}" + replacement="${patterns[${pattern}]}" if grep -q "\\b${pattern}\\b" "${file}" 2> /dev/null; then if ! sed -i "s/\\b${pattern}\\b/${replacement}/g" "${file}"; then echo -e "${RED}ERROR: sed failed on ${file}${NC}" >&2 From a433ea925d7ad6800e0006efbe954e5b612fa250 Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Tue, 10 Feb 2026 14:41:51 -0500 Subject: [PATCH 22/71] Update dev/ush/convert_from_net_to_global.sh Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- dev/ush/convert_from_net_to_global.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev/ush/convert_from_net_to_global.sh b/dev/ush/convert_from_net_to_global.sh index 5f4f6a7fdd7..7c2a66f1079 100755 --- a/dev/ush/convert_from_net_to_global.sh +++ b/dev/ush/convert_from_net_to_global.sh @@ -123,7 +123,7 @@ for current_net in "${NET_LIST[@]}"; do if [[ -f "${TARGET_PATH}" ]]; then file_modified=false for pattern in "${!patterns[@]}"; do - replacement="${patterns[$pattern]}" + replacement="${patterns[${pattern}]}" # Unconditional replacement - convert all occurrences if grep -q "\\b${pattern}\\b" "${TARGET_PATH}" 2> /dev/null; then if ! sed -i "s/\\b${pattern}\\b/${replacement}/g" "${TARGET_PATH}"; then From 94a8838cdb06492ad86557241ca66b1dcff141f9 Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Tue, 10 Feb 2026 14:59:58 -0500 Subject: [PATCH 23/71] Update dev/ush/convert_from_net.sh Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- dev/ush/convert_from_net.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev/ush/convert_from_net.sh b/dev/ush/convert_from_net.sh index 30e38253a83..ae84caa9fd5 100755 --- a/dev/ush/convert_from_net.sh +++ b/dev/ush/convert_from_net.sh @@ -46,7 +46,7 @@ file_list=$(find "${TARGET_DIR}" \ for file in ${file_list}; do if [[ -f "${file}" ]]; then for pattern in "${!patterns[@]}"; do - replacement="${patterns[$pattern]}" + replacement="${patterns[${pattern}]}" sed -i "s/\b${pattern}\b/${replacement}/g" "${file}" done echo "Processed: ${file}" From 4fe0b81784eebcfa642c7e68d15825535bf21f0e Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Tue, 10 Feb 2026 15:12:11 -0500 Subject: [PATCH 24/71] Update dev/ush/test.sh Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- dev/ush/test.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev/ush/test.sh b/dev/ush/test.sh index 96553f22230..3b2875202de 100755 --- a/dev/ush/test.sh +++ b/dev/ush/test.sh @@ -1,3 +1,3 @@ #!/usr/bin/env bash DIR=$1 -find "$DIR" -type f -exec sed -i -e 's/HOMEgfs/HOMEglobal/g' -e 's/PARMgfs/PARMglobal/g' -e 's/USHgfs/USHglobal/g' -e 's/SCRgfs/SCRglobal/g' {} + \ No newline at end of file +find "${DIR}" -type f -exec sed -i -e 's/HOMEgfs/HOMEglobal/g' -e 's/PARMgfs/PARMglobal/g' -e 's/USHgfs/USHglobal/g' -e 's/SCRgfs/SCRglobal/g' {} + From 360e4033af299cc48afbaa691e88784e3748b7dc Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Tue, 10 Feb 2026 20:17:33 +0000 Subject: [PATCH 25/71] update scripts --- dev/ush/README_NET_CONVERSION.md | 132 -------------------------- dev/ush/convert_from_global_to_net.sh | 2 +- dev/ush/convert_from_net.sh | 57 ----------- dev/ush/convert_to_net.sh | 62 ------------ dev/ush/test.sh | 3 - 5 files changed, 1 insertion(+), 255 deletions(-) delete mode 100644 dev/ush/README_NET_CONVERSION.md delete mode 100755 dev/ush/convert_from_net.sh delete mode 100755 dev/ush/convert_to_net.sh delete mode 100755 dev/ush/test.sh diff --git a/dev/ush/README_NET_CONVERSION.md b/dev/ush/README_NET_CONVERSION.md deleted file mode 100644 index 05a9dc5351a..00000000000 --- a/dev/ush/README_NET_CONVERSION.md +++ /dev/null @@ -1,132 +0,0 @@ -# NET Variable Conversion Scripts - -## Overview - -These scripts facilitate the conversion between development and operational variable naming conventions for NOAA's global-workflow system, in accordance with EE2 standards. - -## Variable Mapping - -Development (global-workflow) → Operational (NCO): -- `HOMEglobal` → `HOME${NET}` (e.g., `HOMEglobal`) -- `PARMglobal` → `PARM${NET}` (e.g., `PARMglobal`) -- `USHglobal` → `USH${NET}` (e.g., `USHglobal`) -- `SCRglobal` → `SCR${NET}` (e.g., `SCRglobal`) -- `EXECglobal` → `EXEC${NET}` (e.g., `EXECglobal`) -- `FIXglobal` → `FIX${NET}` (e.g., `FIXglobal`) - -## Scripts - -### convert_to_net.sh - -Converts development variables (global) to NET-specific variables for operational handoff. - -**Usage:** -```bash -./convert_to_net.sh [target_directory] -``` - -**Example:** -```bash -# Convert for GFS operational deployment -./convert_to_net.sh gfs /path/to/deployment - -# Convert for GEFS -./convert_to_net.sh gefs /path/to/deployment -``` - -### convert_from_net.sh - -Converts NET-specific variables back to development variables (global). - -**Usage:** -```bash -./convert_from_net.sh [target_directory] -``` - -**Example:** -```bash -# Restore development variables -./convert_from_net.sh gfs /path/to/code -``` - -## Scope - -The scripts process the following file types: -- Shell scripts (*.sh, *.bash) -- Python files (*.py) -- Environment files (*.env) -- Configuration files (*.config) -- ecFlow files (*.ecf) -- Job scripts (J*) -- Execution scripts (ex*) - -The scripts **exclude** these directories: -- .git (version control) -- sorc/ (source code - uses build-time substitution) -- exec/ (compiled executables) -- lib/ (libraries) -- fix/ (static data files) - -## Verification - -After conversion, always verify: - -1. **Check file changes:** - ```bash - git diff | head -100 - ``` - -2. **Syntax validation:** - ```bash - bash -n dev/jobs/JGLOBAL_FORECAST - bash -n scripts/exglobal_forecast.sh - ``` - -3. **Count changes:** - ```bash - git diff --shortstat - ``` - -## Workflow Integration - -### Pre-Operational Handoff (Development → NCO) - -1. Complete all development and testing in global-workflow -2. Create a deployment branch -3. Run convert_to_net.sh on deployment directory -4. Verify all syntax checks pass -5. Test the converted package -6. Hand off to NCO SPAs for installation - -### Post-Operational Update (NCO → Development) - -1. Receive operational code from NCO -2. Run convert_from_net.sh to restore development variables -3. Integrate changes back into global-workflow repository - -## CI/CD Integration - -These scripts support continuous integration by: -- Using word boundary matching (`\b`) to prevent partial replacements -- Excluding compiled/static content -- Providing verification guidance -- Supporting automated testing workflows - -## EE2 Compliance - -This approach aligns with EE2 standards by: -- Separating development from operational naming -- Providing clear conversion paths -- Maintaining consistency across the codebase -- Supporting multiple NET values (gfs, gefs, sfs, gcafs) - -## Notes - -- The conversion is **selective**, not a blanket search/replace -- Word boundaries ensure `globalworkflow` is NOT changed to `gfsworkflow` -- Scripts are idempotent - running twice produces the same result -- Always create backups before conversion - -## Support - -For issues or questions, contact the global-workflow development team. diff --git a/dev/ush/convert_from_global_to_net.sh b/dev/ush/convert_from_global_to_net.sh index 1ac2d83faa2..7dbd296459d 100755 --- a/dev/ush/convert_from_global_to_net.sh +++ b/dev/ush/convert_from_global_to_net.sh @@ -185,7 +185,7 @@ else # Pre-check: Skip file if it contains ANY NET-specific variable (gfs, gefs, sfs, gcafs) if grep -qE '\b(HOME|PARM|USH|SCR|EXEC|FIX)(gfs|gefs|sfs|gcafs)\b' "${file}" 2>/dev/null; then skipped_files=$((skipped_files + 1)) - continue + return fi # Proceed with conversion only if no NET-specific vars found diff --git a/dev/ush/convert_from_net.sh b/dev/ush/convert_from_net.sh deleted file mode 100755 index 30e38253a83..00000000000 --- a/dev/ush/convert_from_net.sh +++ /dev/null @@ -1,57 +0,0 @@ -#!/bin/bash -# convert_from_net.sh -# Script to convert HOME${NET}, PARM${NET}, etc. back to HOMEglobal, PARMglobal, etc. -# for development -# -# Usage: convert_from_net.sh -# -# Example: convert_from_net.sh gfs /path/to/code -# -# This script performs selective search/replace to restore development variables - -set -eux - -if [[ $# -lt 1 ]]; then - echo "ERROR: NET value required" - echo "Usage: $0 [target_directory]" - exit 1 -fi - -NET="$1" -TARGET_DIR="${2:-.}" - -if [[ ! -d "${TARGET_DIR}" ]]; then - echo "ERROR: Target directory ${TARGET_DIR} does not exist" - exit 1 -fi - -echo "Converting ${NET}-specific variables back to global-workflow standard in ${TARGET_DIR}" - -# List of patterns to convert (reverse of convert_to_net.sh) -declare -A patterns=( - ["HOME${NET}"]="HOMEglobal" - ["PARM${NET}"]="PARMglobal" - ["USH${NET}"]="USHglobal" - ["SCR${NET}"]="SCRglobal" - ["EXEC${NET}"]="EXECglobal" - ["FIX${NET}"]="FIXglobal" -) - -# Find all relevant files -file_list=$(find "${TARGET_DIR}" \ - -type d \( -name .git -o -name sorc -o -name exec -o -name lib -o -name fix \) -prune -o \ - -type f \( -name "*.sh" -o -name "*.bash" -o -name "*.py" -o -name "*.env" -o -name "*.config" -o -name "*.ecf" -o -name "J*" -o -name "ex*" \) -print) - -# Perform the replacements -for file in ${file_list}; do - if [[ -f "${file}" ]]; then - for pattern in "${!patterns[@]}"; do - replacement="${patterns[$pattern]}" - sed -i "s/\b${pattern}\b/${replacement}/g" "${file}" - done - echo "Processed: ${file}" - fi -done - -echo "Conversion complete!" -echo "Restored global-workflow development variables" diff --git a/dev/ush/convert_to_net.sh b/dev/ush/convert_to_net.sh deleted file mode 100755 index 2ad0d1f434e..00000000000 --- a/dev/ush/convert_to_net.sh +++ /dev/null @@ -1,62 +0,0 @@ -#!/bin/bash -# convert_to_net.sh -# Script to convert HOMEglobal, PARMglobal, etc. to HOME${NET}, PARM${NET}, etc. -# for NCO operational handoff -# -# Usage: convert_to_net.sh [target_directory] -# -# Example: convert_to_net.sh gfs /path/to/deployment -# -# This script performs selective search/replace of standard EE2 variables: -# HOMEglobal -> HOME${NET} -# PARMglobal -> PARM${NET} -# USHglobal -> USH${NET} -# SCRglobal -> SCR${NET} -# EXECglobal -> EXEC${NET} -# FIXglobal -> FIX${NET} - -set -eux - -if [[ $# -lt 1 ]]; then - echo "ERROR: NET value required" - echo "Usage: $0 [target_directory]" - exit 1 -fi - -NET="$1" -TARGET_DIR="${2:-.}" - -if [[ ! -d "${TARGET_DIR}" ]]; then - echo "ERROR: Target directory ${TARGET_DIR} does not exist" - exit 1 -fi - -echo "Converting global-workflow standard variables to ${NET} in ${TARGET_DIR}" - -# Find all shell scripts, Python files, and configuration files -# Exclude .git, sorc/*, exec/*, lib/*, fix/* to avoid modifying compiled/static content -file_list=$(find "${TARGET_DIR}" \ - -type d \( -name .git -o -name sorc -o -name exec -o -name lib -o -name fix \) -prune -o \ - -type f \( -name "*.sh" -o -name "*.bash" -o -name "*.py" -o -name "*.env" -o -name "*.config" -o -name "*.ecf" -o -name "J*" -o -name "ex*" \) -print) - -# Perform the replacements using word boundaries -echo "Performing replacements..." -for file in ${file_list}; do - if [[ -f "${file}" ]]; then - sed -i "s/\bHOMEglobal\b/HOME${NET}/g" "${file}" - sed -i "s/\bPARMglobal\b/PARM${NET}/g" "${file}" - sed -i "s/\bUSHglobal\b/USH${NET}/g" "${file}" - sed -i "s/\bSCRglobal\b/SCR${NET}/g" "${file}" - sed -i "s/\bEXECglobal\b/EXEC${NET}/g" "${file}" - sed -i "s/\bFIXglobal\b/FIX${NET}/g" "${file}" - fi -done - -echo "Conversion complete!" -echo "Converted global-workflow variables to ${NET}-specific variables" -echo "" -echo "Files processed: $(echo "${file_list}" | wc -w)" -echo "" -echo "IMPORTANT: Before handoff to NCO, verify the changes with:" -echo " git diff | head -100" -echo " bash -n " diff --git a/dev/ush/test.sh b/dev/ush/test.sh deleted file mode 100755 index 96553f22230..00000000000 --- a/dev/ush/test.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env bash -DIR=$1 -find "$DIR" -type f -exec sed -i -e 's/HOMEgfs/HOMEglobal/g' -e 's/PARMgfs/PARMglobal/g' -e 's/USHgfs/USHglobal/g' -e 's/SCRgfs/SCRglobal/g' {} + \ No newline at end of file From 4f3aab821ecffb0c1292f5567117e23b34ce68e3 Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Tue, 10 Feb 2026 20:26:03 +0000 Subject: [PATCH 26/71] add return --- dev/ush/convert_from_global_to_net.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev/ush/convert_from_global_to_net.sh b/dev/ush/convert_from_global_to_net.sh index 09eff181430..ac0458d6ec2 100755 --- a/dev/ush/convert_from_global_to_net.sh +++ b/dev/ush/convert_from_global_to_net.sh @@ -171,7 +171,7 @@ else if [[ ${file_count} -eq 0 ]]; then echo -e "${YELLOW}No files to convert for NET=${current_net}${NC}" rm -f /tmp/convert_files_$$.txt - continue + return fi # Count files to process From db8510c666d9f5c485a782be955ecd471ec58947 Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Wed, 11 Feb 2026 05:21:38 +0000 Subject: [PATCH 27/71] update scripts --- dev/ush/convert_from_net_to_global.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/dev/ush/convert_from_net_to_global.sh b/dev/ush/convert_from_net_to_global.sh index 7c2a66f1079..01b88b75db0 100755 --- a/dev/ush/convert_from_net_to_global.sh +++ b/dev/ush/convert_from_net_to_global.sh @@ -60,7 +60,8 @@ fi # Initialize target path TARGET_PATH="" -EXCLUDE_DIRS=() +# Default exclusions: Always exclude the conversion scripts themselves +EXCLUDE_DIRS=("dev/ush/convert_from_net_to_global.sh" "dev/ush/convert_from_global_to_net.sh") # Parse remaining arguments while [[ $# -gt 0 ]]; do From 7d9bdb61d0e0f43eb4faf24224db44022de7cfb2 Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Wed, 11 Feb 2026 05:43:37 +0000 Subject: [PATCH 28/71] update script --- dev/ush/convert_from_global_to_net.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dev/ush/convert_from_global_to_net.sh b/dev/ush/convert_from_global_to_net.sh index ac0458d6ec2..1efefd968c0 100755 --- a/dev/ush/convert_from_global_to_net.sh +++ b/dev/ush/convert_from_global_to_net.sh @@ -171,7 +171,7 @@ else if [[ ${file_count} -eq 0 ]]; then echo -e "${YELLOW}No files to convert for NET=${current_net}${NC}" rm -f /tmp/convert_files_$$.txt - return + continue fi # Count files to process @@ -185,7 +185,7 @@ else # Pre-check: Skip file if it contains ANY NET-specific variable (gfs, gefs, sfs, gcafs) if grep -qE '\b(HOME|PARM|USH|SCR|EXEC|FIX)(gfs|gefs|sfs|gcafs)\b' "${file}" 2> /dev/null; then skipped_files=$((skipped_files + 1)) - return + continue fi # Proceed with conversion only if no NET-specific vars found From bf085fc58299197c212fde50e14ac5d2449c9955 Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Wed, 11 Feb 2026 05:50:43 +0000 Subject: [PATCH 29/71] update scripts --- dev/ush/convert_from_global_to_net.sh | 42 ++++++++++++++++++++++----- dev/ush/convert_from_net_to_global.sh | 30 ++++++++++++++++++- 2 files changed, 64 insertions(+), 8 deletions(-) diff --git a/dev/ush/convert_from_global_to_net.sh b/dev/ush/convert_from_global_to_net.sh index 1efefd968c0..f92079316bd 100755 --- a/dev/ush/convert_from_global_to_net.sh +++ b/dev/ush/convert_from_global_to_net.sh @@ -1,6 +1,6 @@ #!/bin/bash # convert_from_global_to_net.sh -# Script to convert HOMEglobal, PARMglobal, etc. to HOME${NET}, PARM${NET}, etc. +# Script to convert HOMEgfs, PARMgfs, etc. to HOME${NET}, PARM${NET}, etc. # for operational deployment # # Usage: convert_from_global_to_net.sh [--exclude dir1 dir2 dir3 ...] @@ -106,6 +106,34 @@ if [[ ! -e "${TARGET_PATH}" ]]; then exit 1 fi +# List of directories and files to exclude from processing +exclude_items=( + "sorc" + "dev/ush/convert_from_net_to_global.sh" + "dev/ush/convert_from_global_to_net.sh" +) + +# Build grep exclusion pattern (includes all items) +exclude_pattern="" +for item in "${exclude_items[@]}"; do + if [[ -n "${exclude_pattern}" ]]; then + exclude_pattern="${exclude_pattern}|" + fi + exclude_pattern="${exclude_pattern}${item}" +done + +# Display what we're excluding (filter out conversion scripts from display) +display_exclude=() +for item in "${exclude_items[@]}"; do + if [[ "${item}" != "dev/ush/convert_from_net_to_global.sh" && "${item}" != "dev/ush/convert_from_global_to_net.sh" ]]; then + display_exclude+=("${item}") + fi +done + +if [[ ${#display_exclude[@]} -gt 0 ]]; then + echo "Excluding directories: ${display_exclude[*]}" +fi + # Display processing header echo -e "${CYAN}=========================================${NC}" echo -e "${YELLOW}Processing: Converting ${RED}global${NC}${YELLOW}-workflow variables to ${GREEN}${NET}${NC}${YELLOW}-specific variables${NC}" @@ -120,12 +148,12 @@ echo -e "${YELLOW}Converting for: ${RED}global${NC} ${YELLOW}→${NC} ${GREEN}${ # List of patterns to convert declare -A patterns=( - ["HOMEglobal"]="HOME${NET}" - ["PARMglobal"]="PARM${NET}" - ["USHglobal"]="USH${NET}" - ["SCRglobal"]="SCR${NET}" - ["EXECglobal"]="EXEC${NET}" - ["FIXglobal"]="FIX${NET}" + ["HOMEgfs"]="HOME${NET}" + ["PARMgfs"]="PARM${NET}" + ["USHgfs"]="USH${NET}" + ["SCRgfs"]="SCR${NET}" + ["EXECgfs"]="EXEC${NET}" + ["FIXgfs"]="FIX${NET}" ) # If target is a single file, process it directly diff --git a/dev/ush/convert_from_net_to_global.sh b/dev/ush/convert_from_net_to_global.sh index 01b88b75db0..bd38fb00223 100755 --- a/dev/ush/convert_from_net_to_global.sh +++ b/dev/ush/convert_from_net_to_global.sh @@ -1,6 +1,6 @@ #!/bin/bash # convert_from_net_to_global.sh -# Script to convert HOME${NET}, PARM${NET}, etc. back to HOMEglobal, PARMglobal, etc. +# Script to convert HOME${NET}, PARM${NET}, etc. back to HOMEgfs, PARMgfs, etc. # for development # # Usage: convert_from_net_to_global.sh [--exclude dir1 dir2 dir3 ...] @@ -63,6 +63,34 @@ TARGET_PATH="" # Default exclusions: Always exclude the conversion scripts themselves EXCLUDE_DIRS=("dev/ush/convert_from_net_to_global.sh" "dev/ush/convert_from_global_to_net.sh") +# List of directories and files to exclude from processing +exclude_items=( + "sorc" + "dev/ush/convert_from_net_to_global.sh" + "dev/ush/convert_from_global_to_net.sh" +) + +# Build grep exclusion pattern (includes all items) +exclude_pattern="" +for item in "${exclude_items[@]}"; do + if [[ -n "${exclude_pattern}" ]]; then + exclude_pattern="${exclude_pattern}|" + fi + exclude_pattern="${exclude_pattern}${item}" +done + +# Display what we're excluding (filter out conversion scripts from display) +display_exclude=() +for item in "${exclude_items[@]}"; do + if [[ "${item}" != "dev/ush/convert_from_net_to_global.sh" && "${item}" != "dev/ush/convert_from_global_to_net.sh" ]]; then + display_exclude+=("${item}") + fi +done + +if [[ ${#display_exclude[@]} -gt 0 ]]; then + echo "Excluding directories: ${display_exclude[*]}" +fi + # Parse remaining arguments while [[ $# -gt 0 ]]; do case "$1" in From 96715b7aeb7e05f79d208aca2dc1a738c7090c2e Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Wed, 11 Feb 2026 05:54:13 +0000 Subject: [PATCH 30/71] script update --- dev/ush/convert_from_global_to_net.sh | 14 +++++++------- dev/ush/convert_from_net_to_global.sh | 4 ++-- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/dev/ush/convert_from_global_to_net.sh b/dev/ush/convert_from_global_to_net.sh index f92079316bd..ff76893a47a 100755 --- a/dev/ush/convert_from_global_to_net.sh +++ b/dev/ush/convert_from_global_to_net.sh @@ -1,6 +1,6 @@ #!/bin/bash # convert_from_global_to_net.sh -# Script to convert HOMEgfs, PARMgfs, etc. to HOME${NET}, PARM${NET}, etc. +# Script to convert HOMEglobal, PARMglobal, etc. to HOME${NET}, PARM${NET}, etc. # for operational deployment # # Usage: convert_from_global_to_net.sh [--exclude dir1 dir2 dir3 ...] @@ -148,12 +148,12 @@ echo -e "${YELLOW}Converting for: ${RED}global${NC} ${YELLOW}→${NC} ${GREEN}${ # List of patterns to convert declare -A patterns=( - ["HOMEgfs"]="HOME${NET}" - ["PARMgfs"]="PARM${NET}" - ["USHgfs"]="USH${NET}" - ["SCRgfs"]="SCR${NET}" - ["EXECgfs"]="EXEC${NET}" - ["FIXgfs"]="FIX${NET}" + ["HOMEglobal"]="HOME${NET}" + ["PARMglobal"]="PARM${NET}" + ["USHglobal"]="USH${NET}" + ["SCRglobal"]="SCR${NET}" + ["EXECglobal"]="EXEC${NET}" + ["FIXglobal"]="FIX${NET}" ) # If target is a single file, process it directly diff --git a/dev/ush/convert_from_net_to_global.sh b/dev/ush/convert_from_net_to_global.sh index bd38fb00223..05a7cb67a1d 100755 --- a/dev/ush/convert_from_net_to_global.sh +++ b/dev/ush/convert_from_net_to_global.sh @@ -128,8 +128,8 @@ else echo -e "${YELLOW}Processing: Converting ${RED}${NET}${NC}${YELLOW}-specific variables to ${GREEN}global${NC}${YELLOW}-workflow variables${NC}" fi echo -e "${BLUE}Target: ${TARGET_PATH}${NC}" -if [[ ${#EXCLUDE_DIRS[@]} -gt 0 ]]; then - echo -e "${BLUE}Excluding directories: ${EXCLUDE_DIRS[*]}${NC}" +if [[ ${#display_exclude[@]} -gt 0 ]]; then + echo -e "${BLUE}Excluding directories: ${display_exclude[*]}${NC}" fi echo -e "${CYAN}=========================================${NC}" From d4765d0da90f72e43648aae0b2b4725588fdcf5f Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Wed, 11 Feb 2026 09:38:25 -0500 Subject: [PATCH 31/71] Update dev/ush/convert_from_net_to_global.sh Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- dev/ush/convert_from_net_to_global.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev/ush/convert_from_net_to_global.sh b/dev/ush/convert_from_net_to_global.sh index 05a7cb67a1d..b1944fce595 100755 --- a/dev/ush/convert_from_net_to_global.sh +++ b/dev/ush/convert_from_net_to_global.sh @@ -88,7 +88,7 @@ for item in "${exclude_items[@]}"; do done if [[ ${#display_exclude[@]} -gt 0 ]]; then - echo "Excluding directories: ${display_exclude[*]}" + echo "Excluding directories: ${display_exclude[*]}" fi # Parse remaining arguments From 7117efb6b1042f727e8829110746a4635e6760b3 Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Wed, 11 Feb 2026 09:38:36 -0500 Subject: [PATCH 32/71] Update dev/ush/convert_from_net_to_global.sh Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- dev/ush/convert_from_net_to_global.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/dev/ush/convert_from_net_to_global.sh b/dev/ush/convert_from_net_to_global.sh index b1944fce595..902944d15f4 100755 --- a/dev/ush/convert_from_net_to_global.sh +++ b/dev/ush/convert_from_net_to_global.sh @@ -73,10 +73,10 @@ exclude_items=( # Build grep exclusion pattern (includes all items) exclude_pattern="" for item in "${exclude_items[@]}"; do - if [[ -n "${exclude_pattern}" ]]; then - exclude_pattern="${exclude_pattern}|" - fi - exclude_pattern="${exclude_pattern}${item}" + if [[ -n "${exclude_pattern}" ]]; then + exclude_pattern="${exclude_pattern}|" + fi + exclude_pattern="${exclude_pattern}${item}" done # Display what we're excluding (filter out conversion scripts from display) From 60fa4820b87e5da154139bc164583ac2204b0849 Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Wed, 11 Feb 2026 09:38:44 -0500 Subject: [PATCH 33/71] Update dev/ush/convert_from_net_to_global.sh Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- dev/ush/convert_from_net_to_global.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/dev/ush/convert_from_net_to_global.sh b/dev/ush/convert_from_net_to_global.sh index 902944d15f4..9eb3c7fe58d 100755 --- a/dev/ush/convert_from_net_to_global.sh +++ b/dev/ush/convert_from_net_to_global.sh @@ -65,9 +65,9 @@ EXCLUDE_DIRS=("dev/ush/convert_from_net_to_global.sh" "dev/ush/convert_from_glob # List of directories and files to exclude from processing exclude_items=( - "sorc" - "dev/ush/convert_from_net_to_global.sh" - "dev/ush/convert_from_global_to_net.sh" + "sorc" + "dev/ush/convert_from_net_to_global.sh" + "dev/ush/convert_from_global_to_net.sh" ) # Build grep exclusion pattern (includes all items) From 32641875d188948a84872d1d77208c23b6b50852 Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Wed, 11 Feb 2026 09:38:53 -0500 Subject: [PATCH 34/71] Update dev/ush/convert_from_net_to_global.sh Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- dev/ush/convert_from_net_to_global.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/dev/ush/convert_from_net_to_global.sh b/dev/ush/convert_from_net_to_global.sh index 9eb3c7fe58d..aad903797fd 100755 --- a/dev/ush/convert_from_net_to_global.sh +++ b/dev/ush/convert_from_net_to_global.sh @@ -82,9 +82,9 @@ done # Display what we're excluding (filter out conversion scripts from display) display_exclude=() for item in "${exclude_items[@]}"; do - if [[ "${item}" != "dev/ush/convert_from_net_to_global.sh" && "${item}" != "dev/ush/convert_from_global_to_net.sh" ]]; then - display_exclude+=("${item}") - fi + if [[ "${item}" != "dev/ush/convert_from_net_to_global.sh" && "${item}" != "dev/ush/convert_from_global_to_net.sh" ]]; then + display_exclude+=("${item}") + fi done if [[ ${#display_exclude[@]} -gt 0 ]]; then From b37ecd82281b1b0d5eff9d2f1c12c66ffc25e3d9 Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Wed, 11 Feb 2026 09:39:01 -0500 Subject: [PATCH 35/71] Update dev/ush/convert_from_global_to_net.sh Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- dev/ush/convert_from_global_to_net.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev/ush/convert_from_global_to_net.sh b/dev/ush/convert_from_global_to_net.sh index ff76893a47a..3eb0bd18c10 100755 --- a/dev/ush/convert_from_global_to_net.sh +++ b/dev/ush/convert_from_global_to_net.sh @@ -131,7 +131,7 @@ for item in "${exclude_items[@]}"; do done if [[ ${#display_exclude[@]} -gt 0 ]]; then - echo "Excluding directories: ${display_exclude[*]}" + echo "Excluding directories: ${display_exclude[*]}" fi # Display processing header From 8d3dc37950b10817d19803cdad7793d80cc1d900 Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Wed, 11 Feb 2026 09:39:09 -0500 Subject: [PATCH 36/71] Update dev/ush/convert_from_global_to_net.sh Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- dev/ush/convert_from_global_to_net.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/dev/ush/convert_from_global_to_net.sh b/dev/ush/convert_from_global_to_net.sh index 3eb0bd18c10..e016064894e 100755 --- a/dev/ush/convert_from_global_to_net.sh +++ b/dev/ush/convert_from_global_to_net.sh @@ -125,9 +125,9 @@ done # Display what we're excluding (filter out conversion scripts from display) display_exclude=() for item in "${exclude_items[@]}"; do - if [[ "${item}" != "dev/ush/convert_from_net_to_global.sh" && "${item}" != "dev/ush/convert_from_global_to_net.sh" ]]; then - display_exclude+=("${item}") - fi + if [[ "${item}" != "dev/ush/convert_from_net_to_global.sh" && "${item}" != "dev/ush/convert_from_global_to_net.sh" ]]; then + display_exclude+=("${item}") + fi done if [[ ${#display_exclude[@]} -gt 0 ]]; then From ebdb201664dbac7676d9914954f3215e792c37fa Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Wed, 11 Feb 2026 09:39:18 -0500 Subject: [PATCH 37/71] Update dev/ush/convert_from_global_to_net.sh Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- dev/ush/convert_from_global_to_net.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/dev/ush/convert_from_global_to_net.sh b/dev/ush/convert_from_global_to_net.sh index e016064894e..f9c2f9e44f6 100755 --- a/dev/ush/convert_from_global_to_net.sh +++ b/dev/ush/convert_from_global_to_net.sh @@ -108,9 +108,9 @@ fi # List of directories and files to exclude from processing exclude_items=( - "sorc" - "dev/ush/convert_from_net_to_global.sh" - "dev/ush/convert_from_global_to_net.sh" + "sorc" + "dev/ush/convert_from_net_to_global.sh" + "dev/ush/convert_from_global_to_net.sh" ) # Build grep exclusion pattern (includes all items) From 912c71ed542c01cdc82aba23b490e7f2ee6c6161 Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Wed, 11 Feb 2026 09:56:39 -0500 Subject: [PATCH 38/71] Update dev/ush/convert_from_global_to_net.sh Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- dev/ush/convert_from_global_to_net.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/dev/ush/convert_from_global_to_net.sh b/dev/ush/convert_from_global_to_net.sh index f9c2f9e44f6..a0f2399b25a 100755 --- a/dev/ush/convert_from_global_to_net.sh +++ b/dev/ush/convert_from_global_to_net.sh @@ -116,10 +116,10 @@ exclude_items=( # Build grep exclusion pattern (includes all items) exclude_pattern="" for item in "${exclude_items[@]}"; do - if [[ -n "${exclude_pattern}" ]]; then - exclude_pattern="${exclude_pattern}|" - fi - exclude_pattern="${exclude_pattern}${item}" + if [[ -n "${exclude_pattern}" ]]; then + exclude_pattern="${exclude_pattern}|" + fi + exclude_pattern="${exclude_pattern}${item}" done # Display what we're excluding (filter out conversion scripts from display) From 2ed47f9fba72016c8ec34f42a5bc41fd7dc7f92d Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Wed, 11 Feb 2026 14:58:27 +0000 Subject: [PATCH 39/71] update dev/ush/convert_from_net_to_global.sh --- dev/ush/convert_from_net_to_global.sh | 141 +++++++++++++------------- 1 file changed, 70 insertions(+), 71 deletions(-) diff --git a/dev/ush/convert_from_net_to_global.sh b/dev/ush/convert_from_net_to_global.sh index aad903797fd..a8632e74c88 100755 --- a/dev/ush/convert_from_net_to_global.sh +++ b/dev/ush/convert_from_net_to_global.sh @@ -65,30 +65,30 @@ EXCLUDE_DIRS=("dev/ush/convert_from_net_to_global.sh" "dev/ush/convert_from_glob # List of directories and files to exclude from processing exclude_items=( - "sorc" - "dev/ush/convert_from_net_to_global.sh" - "dev/ush/convert_from_global_to_net.sh" + "sorc" + "dev/ush/convert_from_net_to_global.sh" + "dev/ush/convert_from_global_to_net.sh" ) # Build grep exclusion pattern (includes all items) exclude_pattern="" for item in "${exclude_items[@]}"; do - if [[ -n "${exclude_pattern}" ]]; then - exclude_pattern="${exclude_pattern}|" - fi - exclude_pattern="${exclude_pattern}${item}" + if [[ -n "${exclude_pattern}" ]]; then + exclude_pattern="${exclude_pattern}|" + fi + exclude_pattern="${exclude_pattern}${item}" done # Display what we're excluding (filter out conversion scripts from display) display_exclude=() for item in "${exclude_items[@]}"; do - if [[ "${item}" != "dev/ush/convert_from_net_to_global.sh" && "${item}" != "dev/ush/convert_from_global_to_net.sh" ]]; then - display_exclude+=("${item}") - fi + if [[ "${item}" != "dev/ush/convert_from_net_to_global.sh" && "${item}" != "dev/ush/convert_from_global_to_net.sh" ]]; then + display_exclude+=("${item}") + fi done if [[ ${#display_exclude[@]} -gt 0 ]]; then - echo "Excluding directories: ${display_exclude[*]}" + echo "Excluding directories: ${display_exclude[*]}" fi # Parse remaining arguments @@ -206,74 +206,73 @@ for current_net in "${NET_LIST[@]}"; do if [[ ${file_count} -eq 0 ]]; then echo -e "${YELLOW}No files to convert${NC}" - continue - fi + else + echo -e "${BLUE}Processing ${file_count} files...${NC}" + + # Perform the replacements + failed_files=0 + skipped_files=0 + while IFS= read -r file; do + if [[ -f "${file}" ]]; then + # Pre-check: Skip file if ANY global variable already exists + should_skip=false + + # Build list of global patterns to check for + declare -a global_patterns=( + "HOMEglobal" + "PARMglobal" + "USHglobal" + "SCRglobal" + "EXECglobal" + "FIXglobal" + ) + + # Check if any global pattern already exists in file + for global_pattern in "${global_patterns[@]}"; do + if grep -q "\\b${global_pattern}\\b" "${file}" 2> /dev/null; then + should_skip=true + break + fi + done - echo -e "${BLUE}Processing ${file_count} files...${NC}" - - # Perform the replacements - failed_files=0 - skipped_files=0 - while IFS= read -r file; do - if [[ -f "${file}" ]]; then - # Pre-check: Skip file if ANY global variable already exists - should_skip=false - - # Build list of global patterns to check for - declare -a global_patterns=( - "HOMEglobal" - "PARMglobal" - "USHglobal" - "SCRglobal" - "EXECglobal" - "FIXglobal" - ) - - # Check if any global pattern already exists in file - for global_pattern in "${global_patterns[@]}"; do - if grep -q "\\b${global_pattern}\\b" "${file}" 2> /dev/null; then - should_skip=true - break + if ${should_skip}; then + skipped_files=$((skipped_files + 1)) + continue fi - done - - if ${should_skip}; then - skipped_files=$((skipped_files + 1)) - continue - fi - # Proceed with conversion only if no global vars found - file_modified=false - file_failed=false - for pattern in "${!patterns[@]}"; do - replacement="${patterns[${pattern}]}" - if grep -q "\\b${pattern}\\b" "${file}" 2> /dev/null; then - if ! sed -i "s/\\b${pattern}\\b/${replacement}/g" "${file}"; then - echo -e "${RED}ERROR: sed failed on ${file}${NC}" >&2 - failed_files=$((failed_files + 1)) - file_failed=true - break + # Proceed with conversion only if no global vars found + file_modified=false + file_failed=false + for pattern in "${!patterns[@]}"; do + replacement="${patterns[${pattern}]}" + if grep -q "\\b${pattern}\\b" "${file}" 2> /dev/null; then + if ! sed -i "s/\\b${pattern}\\b/${replacement}/g" "${file}"; then + echo -e "${RED}ERROR: sed failed on ${file}${NC}" >&2 + failed_files=$((failed_files + 1)) + file_failed=true + break + fi + file_modified=true fi - file_modified=true - fi - done + done - if ! ${file_modified} && ! ${file_failed}; then - skipped_files=$((skipped_files + 1)) + if ! ${file_modified} && ! ${file_failed}; then + skipped_files=$((skipped_files + 1)) + fi fi - fi - done < /tmp/convert_files_$$.txt + done < /tmp/convert_files_$$.txt - # Clean up - rm -f /tmp/convert_files_$$.txt + # Clean up + rm -f /tmp/convert_files_$$.txt - files_converted=$((file_count - failed_files - skipped_files)) - if [[ ${files_converted} -eq 0 ]]; then - echo -e "${YELLOW}No files to convert for NET=${current_net}${NC}" - elif [[ ${failed_files} -gt 0 ]]; then - echo -e "${YELLOW}⚠ Converted ${files_converted}/${file_count} files (${failed_files} failed) for NET=${current_net}${NC}" - else - echo -e "${GREEN}✓ Converted ${files_converted}/${file_count} files for NET=${current_net}${NC}" + files_converted=$((file_count - failed_files - skipped_files)) + if [[ ${files_converted} -eq 0 ]]; then + echo -e "${YELLOW}No files to convert for NET=${current_net}${NC}" + elif [[ ${failed_files} -gt 0 ]]; then + echo -e "${YELLOW}⚠ Converted ${files_converted}/${file_count} files (${failed_files} failed) for NET=${current_net}${NC}" + else + echo -e "${GREEN}✓ Converted ${files_converted}/${file_count} files for NET=${current_net}${NC}" + fi fi fi From 9ec74dcfc960636c2ff99d3861ce8f7f18d6fe67 Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Wed, 11 Feb 2026 09:59:13 -0500 Subject: [PATCH 40/71] Update dev/ush/convert_from_net_to_global.sh Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- dev/ush/convert_from_net_to_global.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/dev/ush/convert_from_net_to_global.sh b/dev/ush/convert_from_net_to_global.sh index a8632e74c88..5983856d795 100755 --- a/dev/ush/convert_from_net_to_global.sh +++ b/dev/ush/convert_from_net_to_global.sh @@ -65,9 +65,9 @@ EXCLUDE_DIRS=("dev/ush/convert_from_net_to_global.sh" "dev/ush/convert_from_glob # List of directories and files to exclude from processing exclude_items=( - "sorc" - "dev/ush/convert_from_net_to_global.sh" - "dev/ush/convert_from_global_to_net.sh" + "sorc" + "dev/ush/convert_from_net_to_global.sh" + "dev/ush/convert_from_global_to_net.sh" ) # Build grep exclusion pattern (includes all items) From 7cee89aa467e87aeaf6752e265c68de5e28a422b Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Wed, 11 Feb 2026 09:59:21 -0500 Subject: [PATCH 41/71] Update dev/ush/convert_from_net_to_global.sh Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- dev/ush/convert_from_net_to_global.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/dev/ush/convert_from_net_to_global.sh b/dev/ush/convert_from_net_to_global.sh index 5983856d795..333ca91586f 100755 --- a/dev/ush/convert_from_net_to_global.sh +++ b/dev/ush/convert_from_net_to_global.sh @@ -73,10 +73,10 @@ exclude_items=( # Build grep exclusion pattern (includes all items) exclude_pattern="" for item in "${exclude_items[@]}"; do - if [[ -n "${exclude_pattern}" ]]; then - exclude_pattern="${exclude_pattern}|" - fi - exclude_pattern="${exclude_pattern}${item}" + if [[ -n "${exclude_pattern}" ]]; then + exclude_pattern="${exclude_pattern}|" + fi + exclude_pattern="${exclude_pattern}${item}" done # Display what we're excluding (filter out conversion scripts from display) From 9417721d8fee8d5ba8ce954b9c437885006d9c9a Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Wed, 11 Feb 2026 09:59:29 -0500 Subject: [PATCH 42/71] Update dev/ush/convert_from_net_to_global.sh Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- dev/ush/convert_from_net_to_global.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/dev/ush/convert_from_net_to_global.sh b/dev/ush/convert_from_net_to_global.sh index 333ca91586f..a38afcc77a4 100755 --- a/dev/ush/convert_from_net_to_global.sh +++ b/dev/ush/convert_from_net_to_global.sh @@ -82,9 +82,9 @@ done # Display what we're excluding (filter out conversion scripts from display) display_exclude=() for item in "${exclude_items[@]}"; do - if [[ "${item}" != "dev/ush/convert_from_net_to_global.sh" && "${item}" != "dev/ush/convert_from_global_to_net.sh" ]]; then - display_exclude+=("${item}") - fi + if [[ "${item}" != "dev/ush/convert_from_net_to_global.sh" && "${item}" != "dev/ush/convert_from_global_to_net.sh" ]]; then + display_exclude+=("${item}") + fi done if [[ ${#display_exclude[@]} -gt 0 ]]; then From 6469c8433f42dcca90074148c4c62fd47377d742 Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Wed, 11 Feb 2026 09:59:50 -0500 Subject: [PATCH 43/71] Update dev/ush/convert_from_net_to_global.sh Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- dev/ush/convert_from_net_to_global.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev/ush/convert_from_net_to_global.sh b/dev/ush/convert_from_net_to_global.sh index a38afcc77a4..9fe13a150e7 100755 --- a/dev/ush/convert_from_net_to_global.sh +++ b/dev/ush/convert_from_net_to_global.sh @@ -88,7 +88,7 @@ for item in "${exclude_items[@]}"; do done if [[ ${#display_exclude[@]} -gt 0 ]]; then - echo "Excluding directories: ${display_exclude[*]}" + echo "Excluding directories: ${display_exclude[*]}" fi # Parse remaining arguments From 24a1ccb807156d2763d442e46527d0c76c9ed676 Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Wed, 11 Feb 2026 15:10:44 +0000 Subject: [PATCH 44/71] update scripts --- dev/ush/convert_from_global_to_net.sh | 1 + dev/ush/convert_from_net_to_global.sh | 1 + 2 files changed, 2 insertions(+) diff --git a/dev/ush/convert_from_global_to_net.sh b/dev/ush/convert_from_global_to_net.sh index a0f2399b25a..8c1c8bb31b3 100755 --- a/dev/ush/convert_from_global_to_net.sh +++ b/dev/ush/convert_from_global_to_net.sh @@ -208,6 +208,7 @@ else # Perform the replacements failed_files=0 skipped_files=0 + # shellcheck disable=SC2162 while IFS= read -r file; do if [[ -f "${file}" ]]; then # Pre-check: Skip file if it contains ANY NET-specific variable (gfs, gefs, sfs, gcafs) diff --git a/dev/ush/convert_from_net_to_global.sh b/dev/ush/convert_from_net_to_global.sh index 9fe13a150e7..1c057a2a984 100755 --- a/dev/ush/convert_from_net_to_global.sh +++ b/dev/ush/convert_from_net_to_global.sh @@ -212,6 +212,7 @@ for current_net in "${NET_LIST[@]}"; do # Perform the replacements failed_files=0 skipped_files=0 + # shellcheck disable=SC2162 while IFS= read -r file; do if [[ -f "${file}" ]]; then # Pre-check: Skip file if ANY global variable already exists From 7d1ca6684fe4f81ba7a228c1fbb94fdc5b0e2576 Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Wed, 11 Feb 2026 15:20:44 +0000 Subject: [PATCH 45/71] update scripts --- dev/ush/convert_from_global_to_net.sh | 38 +++++++++++++-------------- dev/ush/convert_from_net_to_global.sh | 38 +++++++++++++-------------- 2 files changed, 36 insertions(+), 40 deletions(-) diff --git a/dev/ush/convert_from_global_to_net.sh b/dev/ush/convert_from_global_to_net.sh index 8c1c8bb31b3..69cf585c246 100755 --- a/dev/ush/convert_from_global_to_net.sh +++ b/dev/ush/convert_from_global_to_net.sh @@ -208,33 +208,31 @@ else # Perform the replacements failed_files=0 skipped_files=0 - # shellcheck disable=SC2162 while IFS= read -r file; do if [[ -f "${file}" ]]; then # Pre-check: Skip file if it contains ANY NET-specific variable (gfs, gefs, sfs, gcafs) if grep -qE '\b(HOME|PARM|USH|SCR|EXEC|FIX)(gfs|gefs|sfs|gcafs)\b' "${file}" 2> /dev/null; then skipped_files=$((skipped_files + 1)) - continue - fi - - # Proceed with conversion only if no NET-specific vars found - file_modified=false - file_failed=false - for pattern in "${!patterns[@]}"; do - replacement="${patterns[${pattern}]}" - if grep -q "\\b${pattern}\\b" "${file}" 2> /dev/null; then - if ! sed -i "s/\\b${pattern}\\b/${replacement}/g" "${file}"; then - echo -e "${RED}ERROR: sed failed on ${file}${NC}" >&2 - failed_files=$((failed_files + 1)) - file_failed=true - break + else + # Proceed with conversion only if no NET-specific vars found + file_modified=false + file_failed=false + for pattern in "${!patterns[@]}"; do + replacement="${patterns[${pattern}]}" + if grep -q "\\b${pattern}\\b" "${file}" 2> /dev/null; then + if ! sed -i "s/\\b${pattern}\\b/${replacement}/g" "${file}"; then + echo -e "${RED}ERROR: sed failed on ${file}${NC}" >&2 + failed_files=$((failed_files + 1)) + file_failed=true + break + fi + file_modified=true fi - file_modified=true - fi - done + done - if ! ${file_modified} && ! ${file_failed}; then - skipped_files=$((skipped_files + 1)) + if ! ${file_modified} && ! ${file_failed}; then + skipped_files=$((skipped_files + 1)) + fi fi fi done < /tmp/convert_files_$$.txt diff --git a/dev/ush/convert_from_net_to_global.sh b/dev/ush/convert_from_net_to_global.sh index 1c057a2a984..1688bc0fe08 100755 --- a/dev/ush/convert_from_net_to_global.sh +++ b/dev/ush/convert_from_net_to_global.sh @@ -212,7 +212,6 @@ for current_net in "${NET_LIST[@]}"; do # Perform the replacements failed_files=0 skipped_files=0 - # shellcheck disable=SC2162 while IFS= read -r file; do if [[ -f "${file}" ]]; then # Pre-check: Skip file if ANY global variable already exists @@ -238,27 +237,26 @@ for current_net in "${NET_LIST[@]}"; do if ${should_skip}; then skipped_files=$((skipped_files + 1)) - continue - fi - - # Proceed with conversion only if no global vars found - file_modified=false - file_failed=false - for pattern in "${!patterns[@]}"; do - replacement="${patterns[${pattern}]}" - if grep -q "\\b${pattern}\\b" "${file}" 2> /dev/null; then - if ! sed -i "s/\\b${pattern}\\b/${replacement}/g" "${file}"; then - echo -e "${RED}ERROR: sed failed on ${file}${NC}" >&2 - failed_files=$((failed_files + 1)) - file_failed=true - break + else + # Proceed with conversion only if no global vars found + file_modified=false + file_failed=false + for pattern in "${!patterns[@]}"; do + replacement="${patterns[${pattern}]}" + if grep -q "\\b${pattern}\\b" "${file}" 2> /dev/null; then + if ! sed -i "s/\\b${pattern}\\b/${replacement}/g" "${file}"; then + echo -e "${RED}ERROR: sed failed on ${file}${NC}" >&2 + failed_files=$((failed_files + 1)) + file_failed=true + break + fi + file_modified=true fi - file_modified=true - fi - done + done - if ! ${file_modified} && ! ${file_failed}; then - skipped_files=$((skipped_files + 1)) + if ! ${file_modified} && ! ${file_failed}; then + skipped_files=$((skipped_files + 1)) + fi fi fi done < /tmp/convert_files_$$.txt From d530da046061cf02f7aed02e01ac826992ef1a5e Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Wed, 11 Feb 2026 15:23:32 +0000 Subject: [PATCH 46/71] update scripts --- dev/ush/convert_from_global_to_net.sh | 84 +++++++++++++-------------- 1 file changed, 42 insertions(+), 42 deletions(-) diff --git a/dev/ush/convert_from_global_to_net.sh b/dev/ush/convert_from_global_to_net.sh index 69cf585c246..9b37cff8a8d 100755 --- a/dev/ush/convert_from_global_to_net.sh +++ b/dev/ush/convert_from_global_to_net.sh @@ -196,57 +196,57 @@ else fi file_count=$(wc -l < /tmp/convert_files_$$.txt) + if [[ ${file_count} -eq 0 ]]; then - echo -e "${YELLOW}No files to convert for NET=${current_net}${NC}" + echo -e "${YELLOW}No files to convert for NET=${NET}${NC}" rm -f /tmp/convert_files_$$.txt - continue - fi - - # Count files to process - echo -e "${BLUE}Processing ${file_count} files...${NC}" + else + # Count files to process + echo -e "${BLUE}Processing ${file_count} files...${NC}" - # Perform the replacements - failed_files=0 - skipped_files=0 - while IFS= read -r file; do - if [[ -f "${file}" ]]; then - # Pre-check: Skip file if it contains ANY NET-specific variable (gfs, gefs, sfs, gcafs) - if grep -qE '\b(HOME|PARM|USH|SCR|EXEC|FIX)(gfs|gefs|sfs|gcafs)\b' "${file}" 2> /dev/null; then - skipped_files=$((skipped_files + 1)) - else - # Proceed with conversion only if no NET-specific vars found - file_modified=false - file_failed=false - for pattern in "${!patterns[@]}"; do - replacement="${patterns[${pattern}]}" - if grep -q "\\b${pattern}\\b" "${file}" 2> /dev/null; then - if ! sed -i "s/\\b${pattern}\\b/${replacement}/g" "${file}"; then - echo -e "${RED}ERROR: sed failed on ${file}${NC}" >&2 - failed_files=$((failed_files + 1)) - file_failed=true - break + # Perform the replacements + failed_files=0 + skipped_files=0 + while IFS= read -r file; do + if [[ -f "${file}" ]]; then + # Pre-check: Skip file if it contains ANY NET-specific variable (gfs, gefs, sfs, gcafs) + if grep -qE '\b(HOME|PARM|USH|SCR|EXEC|FIX)(gfs|gefs|sfs|gcafs)\b' "${file}" 2> /dev/null; then + skipped_files=$((skipped_files + 1)) + else + # Proceed with conversion only if no NET-specific vars found + file_modified=false + file_failed=false + for pattern in "${!patterns[@]}"; do + replacement="${patterns[${pattern}]}" + if grep -q "\\b${pattern}\\b" "${file}" 2> /dev/null; then + if ! sed -i "s/\\b${pattern}\\b/${replacement}/g" "${file}"; then + echo -e "${RED}ERROR: sed failed on ${file}${NC}" >&2 + failed_files=$((failed_files + 1)) + file_failed=true + break + fi + file_modified=true fi - file_modified=true - fi - done + done - if ! ${file_modified} && ! ${file_failed}; then - skipped_files=$((skipped_files + 1)) + if ! ${file_modified} && ! ${file_failed}; then + skipped_files=$((skipped_files + 1)) + fi fi fi - fi - done < /tmp/convert_files_$$.txt + done < /tmp/convert_files_$$.txt - # Clean up - rm -f /tmp/convert_files_$$.txt + # Clean up + rm -f /tmp/convert_files_$$.txt - files_converted=$((file_count - failed_files - skipped_files)) - if [[ ${files_converted} -eq 0 ]]; then - echo -e "${YELLOW}No files to convert for NET=${NET}${NC}" - elif [[ ${failed_files} -gt 0 ]]; then - echo -e "${YELLOW}⚠ Converted ${files_converted}/${file_count} files (${failed_files} failed) for NET=${NET}${NC}" - else - echo -e "${GREEN}✓ Converted ${files_converted}/${file_count} files for NET=${NET}${NC}" + files_converted=$((file_count - failed_files - skipped_files)) + if [[ ${files_converted} -eq 0 ]]; then + echo -e "${YELLOW}No files to convert for NET=${NET}${NC}" + elif [[ ${failed_files} -gt 0 ]]; then + echo -e "${YELLOW}⚠ Converted ${files_converted}/${file_count} files (${failed_files} failed) for NET=${NET}${NC}" + else + echo -e "${GREEN}✓ Converted ${files_converted}/${file_count} files for NET=${NET}${NC}" + fi fi fi echo -e "${GREEN}Completed!${NC}" From 9dfefeb7ec2698752256a6f5a8ea8fcbe4376fb3 Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Thu, 12 Feb 2026 12:50:15 -0500 Subject: [PATCH 47/71] kept ecf files as NET --- ecf/scripts/enkfgdas/analysis/create/jenkfgdas_diag.ecf | 2 +- ecf/scripts/enkfgdas/analysis/create/jenkfgdas_select_obs.ecf | 2 +- ecf/scripts/enkfgdas/analysis/create/jenkfgdas_update.ecf | 2 +- ecf/scripts/enkfgdas/analysis/recenter/ecen/jenkfgdas_ecen.ecf | 2 +- ecf/scripts/enkfgdas/analysis/recenter/jenkfgdas_sfc.ecf | 2 +- ecf/scripts/enkfgdas/forecast/jenkfgdas_fcst.ecf | 2 +- ecf/scripts/enkfgdas/post/jenkfgdas_post_master.ecf | 2 +- ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis.ecf | 2 +- ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf | 2 +- ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_diag.ecf | 2 +- ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf | 2 +- ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf | 2 +- .../gdas/atmos/obsproc/dump/jgdas_atmos_tropcy_qc_reloc.ecf | 2 +- ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_prep_sfc.ecf | 2 +- ecf/scripts/gdas/atmos/post/jgdas_atmos_post_manager.ecf | 2 +- ecf/scripts/gdas/atmos/post/jgdas_atmos_post_master.ecf | 2 +- .../gdas/atmos/post_processing/jgdas_atmos_chgres_forenkf.ecf | 2 +- ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfozn.ecf | 2 +- ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfrad.ecf | 2 +- ecf/scripts/gdas/atmos/verf/jgdas_atmos_vminmon.ecf | 2 +- ecf/scripts/gdas/jgdas_forecast.ecf | 2 +- ecf/scripts/gdas/wave/init/jgdas_wave_init.ecf | 2 +- ecf/scripts/gdas/wave/post/jgdas_wave_postpnt.ecf | 2 +- ecf/scripts/gdas/wave/post/jgdas_wave_postsbs.ecf | 2 +- ecf/scripts/gdas/wave/prep/jgdas_wave_prep.ecf | 2 +- ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis.ecf | 2 +- ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis_calc.ecf | 2 +- ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf | 2 +- ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_meta.ecf | 2 +- ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf | 2 +- ecf/scripts/gfs/atmos/gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf | 2 +- ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf | 2 +- .../gfs/atmos/obsproc/dump/jgfs_atmos_tropcy_qc_reloc.ecf | 2 +- ecf/scripts/gfs/atmos/obsproc/prep/jgfs_atmos_prep_sfc.ecf | 2 +- ecf/scripts/gfs/atmos/post/jgfs_atmos_post_manager.ecf | 2 +- ecf/scripts/gfs/atmos/post/jgfs_atmos_post_master.ecf | 2 +- .../post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf | 2 +- .../atmos/post_processing/bufr_sounding/jgfs_atmos_postsnd.ecf | 2 +- .../gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf | 2 +- ecf/scripts/gfs/atmos/verf/jgfs_atmos_vminmon.ecf | 2 +- ecf/scripts/gfs/jgfs_forecast.ecf | 2 +- ecf/scripts/gfs/wave/gempak/jgfs_wave_gempak.ecf | 2 +- ecf/scripts/gfs/wave/init/jgfs_wave_init.ecf | 2 +- ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpnt.ecf | 2 +- ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpntbll.ecf | 2 +- ecf/scripts/gfs/wave/post/jgfs_wave_postpnt.ecf | 2 +- ecf/scripts/gfs/wave/post/jgfs_wave_postsbs.ecf | 2 +- ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_bulls.ecf | 2 +- ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_gridded.ecf | 2 +- ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf | 2 +- 50 files changed, 50 insertions(+), 50 deletions(-) diff --git a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_diag.ecf b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_diag.ecf index 38d0b47122b..03835172b9c 100755 --- a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_diag.ecf +++ b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_diag.ecf @@ -41,7 +41,7 @@ export USE_CFP=YES ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGDAS_ENKF_DIAG +${HOMEgfs}/jobs/JGDAS_ENKF_DIAG if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_select_obs.ecf b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_select_obs.ecf index 65a0e3fd9e8..bc289b8006b 100755 --- a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_select_obs.ecf +++ b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_select_obs.ecf @@ -41,7 +41,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGDAS_ENKF_SELECT_OBS +${HOMEgfs}/jobs/JGDAS_ENKF_SELECT_OBS if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_update.ecf b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_update.ecf index 0c59f75b1de..91eef7fb74b 100755 --- a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_update.ecf +++ b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_update.ecf @@ -42,7 +42,7 @@ export USE_CFP=YES ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGDAS_ENKF_UPDATE +${HOMEgfs}/jobs/JGDAS_ENKF_UPDATE if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/enkfgdas/analysis/recenter/ecen/jenkfgdas_ecen.ecf b/ecf/scripts/enkfgdas/analysis/recenter/ecen/jenkfgdas_ecen.ecf index 591621b6e5d..044a65c8433 100755 --- a/ecf/scripts/enkfgdas/analysis/recenter/ecen/jenkfgdas_ecen.ecf +++ b/ecf/scripts/enkfgdas/analysis/recenter/ecen/jenkfgdas_ecen.ecf @@ -44,7 +44,7 @@ export FHOUT_ECEN=$FHRGRP ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGDAS_ENKF_ECEN +${HOMEgfs}/jobs/JGDAS_ENKF_ECEN if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/enkfgdas/analysis/recenter/jenkfgdas_sfc.ecf b/ecf/scripts/enkfgdas/analysis/recenter/jenkfgdas_sfc.ecf index a78112af576..954ca495331 100755 --- a/ecf/scripts/enkfgdas/analysis/recenter/jenkfgdas_sfc.ecf +++ b/ecf/scripts/enkfgdas/analysis/recenter/jenkfgdas_sfc.ecf @@ -42,7 +42,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGDAS_ENKF_SFC +${HOMEgfs}/jobs/JGDAS_ENKF_SFC if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/enkfgdas/forecast/jenkfgdas_fcst.ecf b/ecf/scripts/enkfgdas/forecast/jenkfgdas_fcst.ecf index f9083772777..2fd692d1dfe 100755 --- a/ecf/scripts/enkfgdas/forecast/jenkfgdas_fcst.ecf +++ b/ecf/scripts/enkfgdas/forecast/jenkfgdas_fcst.ecf @@ -40,7 +40,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGDAS_ENKF_FCST +${HOMEgfs}/jobs/JGDAS_ENKF_FCST if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/enkfgdas/post/jenkfgdas_post_master.ecf b/ecf/scripts/enkfgdas/post/jenkfgdas_post_master.ecf index 3273945b650..4f682a7a0a8 100755 --- a/ecf/scripts/enkfgdas/post/jenkfgdas_post_master.ecf +++ b/ecf/scripts/enkfgdas/post/jenkfgdas_post_master.ecf @@ -42,7 +42,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -$HOMEglobal/jobs/JGDAS_ENKF_POST +$HOMEgfs/jobs/JGDAS_ENKF_POST if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis.ecf b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis.ecf index 0934c56127c..cc6eee326d1 100755 --- a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis.ecf +++ b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis.ecf @@ -50,7 +50,7 @@ export PREINP='' ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGLOBAL_ATMOS_ANALYSIS +${HOMEgfs}/jobs/JGLOBAL_ATMOS_ANALYSIS if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" ecflow_client --abort diff --git a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf index e4e38a29c7d..92c8c0551ef 100755 --- a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf +++ b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf @@ -40,7 +40,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC +${HOMEgfs}/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" ecflow_client --abort diff --git a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_diag.ecf b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_diag.ecf index a1cceef6ecd..53d9daf734e 100755 --- a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_diag.ecf +++ b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_diag.ecf @@ -41,7 +41,7 @@ export USE_CFP=YES ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGDAS_ATMOS_ANALYSIS_DIAG +${HOMEgfs}/jobs/JGDAS_ATMOS_ANALYSIS_DIAG if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" ecflow_client --abort diff --git a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf index d83b25c38e4..b3bb579ca33 100755 --- a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf +++ b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf @@ -43,7 +43,7 @@ export FHR3=%FHR3% ############################################################ export model=gdas -${HOMEglobal}/jobs/JGDAS_ATMOS_GEMPAK +${HOMEgfs}/jobs/JGDAS_ATMOS_GEMPAK if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf index 2870519efbb..312d3dcdaa6 100755 --- a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf +++ b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf @@ -34,7 +34,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC +${HOMEgfs}/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gdas/atmos/obsproc/dump/jgdas_atmos_tropcy_qc_reloc.ecf b/ecf/scripts/gdas/atmos/obsproc/dump/jgdas_atmos_tropcy_qc_reloc.ecf index 30e5d0cad44..c5f838fb5f9 100755 --- a/ecf/scripts/gdas/atmos/obsproc/dump/jgdas_atmos_tropcy_qc_reloc.ecf +++ b/ecf/scripts/gdas/atmos/obsproc/dump/jgdas_atmos_tropcy_qc_reloc.ecf @@ -37,7 +37,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC +${HOMEgfs}/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_prep_sfc.ecf b/ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_prep_sfc.ecf index ab3cab62c82..2fea72a8e7c 100755 --- a/ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_prep_sfc.ecf +++ b/ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_prep_sfc.ecf @@ -35,7 +35,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGLOBAL_ATMOS_PREP_SFC +${HOMEgfs}/jobs/JGLOBAL_ATMOS_PREP_SFC if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_manager.ecf b/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_manager.ecf index ce94f1cbcd0..9792253ec88 100755 --- a/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_manager.ecf +++ b/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_manager.ecf @@ -29,7 +29,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGLOBAL_ATMOS_POST_MANAGER +${HOMEgfs}/jobs/JGLOBAL_ATMOS_POST_MANAGER if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_master.ecf b/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_master.ecf index bf090e26e01..b65be6586ee 100755 --- a/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_master.ecf +++ b/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_master.ecf @@ -51,7 +51,7 @@ export g2tmpl_ver=v${g2tmpl_ver} ############################################################ # CALL executable job script here ############################################################ -$HOMEglobal/jobs/JGLOBAL_ATMOS_NCEPPOST +$HOMEgfs/jobs/JGLOBAL_ATMOS_NCEPPOST if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gdas/atmos/post_processing/jgdas_atmos_chgres_forenkf.ecf b/ecf/scripts/gdas/atmos/post_processing/jgdas_atmos_chgres_forenkf.ecf index b73a40d7800..32b024f6632 100755 --- a/ecf/scripts/gdas/atmos/post_processing/jgdas_atmos_chgres_forenkf.ecf +++ b/ecf/scripts/gdas/atmos/post_processing/jgdas_atmos_chgres_forenkf.ecf @@ -42,7 +42,7 @@ export USE_CFP=YES ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGDAS_ATMOS_CHGRES_FORENKF +${HOMEgfs}/jobs/JGDAS_ATMOS_CHGRES_FORENKF if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfozn.ecf b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfozn.ecf index 69d71c18e96..938611b4bce 100755 --- a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfozn.ecf +++ b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfozn.ecf @@ -40,7 +40,7 @@ export VERBOSE=YES ############################################################ # CALL executable job script here ############################################################ -$HOMEglobal/jobs/JGDAS_ATMOS_VERFOZN +$HOMEgfs/jobs/JGDAS_ATMOS_VERFOZN if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfrad.ecf b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfrad.ecf index cd7c6ce3106..dd0c19d6f09 100755 --- a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfrad.ecf +++ b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfrad.ecf @@ -40,7 +40,7 @@ export VERBOSE=YES ############################################################ # CALL executable job script here ############################################################ -$HOMEglobal/jobs/JGDAS_ATMOS_VERFRAD +$HOMEgfs/jobs/JGDAS_ATMOS_VERFRAD if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_vminmon.ecf b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_vminmon.ecf index 6a217efca78..b538a18a3dc 100755 --- a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_vminmon.ecf +++ b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_vminmon.ecf @@ -38,7 +38,7 @@ export VERBOSE=YES ############################################################ # CALL executable job script here ############################################################ -$HOMEglobal/jobs/JGDAS_ATMOS_VMINMON +$HOMEgfs/jobs/JGDAS_ATMOS_VMINMON if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gdas/jgdas_forecast.ecf b/ecf/scripts/gdas/jgdas_forecast.ecf index a58855db35c..392d5f362fa 100755 --- a/ecf/scripts/gdas/jgdas_forecast.ecf +++ b/ecf/scripts/gdas/jgdas_forecast.ecf @@ -39,7 +39,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGLOBAL_FORECAST +${HOMEgfs}/jobs/JGLOBAL_FORECAST if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gdas/wave/init/jgdas_wave_init.ecf b/ecf/scripts/gdas/wave/init/jgdas_wave_init.ecf index 30c59ea1641..1f73e43eb11 100755 --- a/ecf/scripts/gdas/wave/init/jgdas_wave_init.ecf +++ b/ecf/scripts/gdas/wave/init/jgdas_wave_init.ecf @@ -37,7 +37,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGLOBAL_WAVE_INIT +${HOMEgfs}/jobs/JGLOBAL_WAVE_INIT if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gdas/wave/post/jgdas_wave_postpnt.ecf b/ecf/scripts/gdas/wave/post/jgdas_wave_postpnt.ecf index e00f11a1d50..fb45d8fda54 100755 --- a/ecf/scripts/gdas/wave/post/jgdas_wave_postpnt.ecf +++ b/ecf/scripts/gdas/wave/post/jgdas_wave_postpnt.ecf @@ -38,7 +38,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGLOBAL_WAVE_POST_PNT +${HOMEgfs}/jobs/JGLOBAL_WAVE_POST_PNT if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gdas/wave/post/jgdas_wave_postsbs.ecf b/ecf/scripts/gdas/wave/post/jgdas_wave_postsbs.ecf index 2f297ab8f9e..5212a026d93 100755 --- a/ecf/scripts/gdas/wave/post/jgdas_wave_postsbs.ecf +++ b/ecf/scripts/gdas/wave/post/jgdas_wave_postsbs.ecf @@ -41,7 +41,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGLOBAL_WAVE_POST_SBS +${HOMEgfs}/jobs/JGLOBAL_WAVE_POST_SBS if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gdas/wave/prep/jgdas_wave_prep.ecf b/ecf/scripts/gdas/wave/prep/jgdas_wave_prep.ecf index 622fdc96ed3..b1fd9fe32e3 100755 --- a/ecf/scripts/gdas/wave/prep/jgdas_wave_prep.ecf +++ b/ecf/scripts/gdas/wave/prep/jgdas_wave_prep.ecf @@ -45,7 +45,7 @@ export USE_CFP=YES ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGLOBAL_WAVE_PREP +${HOMEgfs}/jobs/JGLOBAL_WAVE_PREP if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis.ecf b/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis.ecf index 7f9793131cb..12653d0e95d 100755 --- a/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis.ecf +++ b/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis.ecf @@ -51,7 +51,7 @@ export PREINP='' ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGLOBAL_ATMOS_ANALYSIS +${HOMEgfs}/jobs/JGLOBAL_ATMOS_ANALYSIS if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis_calc.ecf b/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis_calc.ecf index e4e38a29c7d..92c8c0551ef 100755 --- a/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis_calc.ecf +++ b/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis_calc.ecf @@ -40,7 +40,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC +${HOMEgfs}/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" ecflow_client --abort diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf index 8cfb574a962..5f56e7ac17f 100755 --- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf @@ -41,7 +41,7 @@ export FHR3=%FHR3% ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGFS_ATMOS_GEMPAK +${HOMEgfs}/jobs/JGFS_ATMOS_GEMPAK if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_meta.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_meta.ecf index 515b25e16bf..4798e2a06ac 100755 --- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_meta.ecf +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_meta.ecf @@ -40,7 +40,7 @@ export USE_CFP=YES ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGFS_ATMOS_GEMPAK_META +${HOMEgfs}/jobs/JGFS_ATMOS_GEMPAK_META if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf index 506d1adb14e..25659058f8c 100755 --- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf @@ -37,7 +37,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF +${HOMEgfs}/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf index 7a9d1b71f21..da66dfe7f6d 100755 --- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf @@ -36,7 +36,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS +${HOMEgfs}/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf index d18daca1739..df0f9f90f18 100755 --- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf @@ -39,7 +39,7 @@ export FHR3=%FHR3% ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC +${HOMEgfs}/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/atmos/obsproc/dump/jgfs_atmos_tropcy_qc_reloc.ecf b/ecf/scripts/gfs/atmos/obsproc/dump/jgfs_atmos_tropcy_qc_reloc.ecf index 30e5d0cad44..c5f838fb5f9 100755 --- a/ecf/scripts/gfs/atmos/obsproc/dump/jgfs_atmos_tropcy_qc_reloc.ecf +++ b/ecf/scripts/gfs/atmos/obsproc/dump/jgfs_atmos_tropcy_qc_reloc.ecf @@ -37,7 +37,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC +${HOMEgfs}/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/atmos/obsproc/prep/jgfs_atmos_prep_sfc.ecf b/ecf/scripts/gfs/atmos/obsproc/prep/jgfs_atmos_prep_sfc.ecf index 6397bbc4ec5..9e677316bf6 100755 --- a/ecf/scripts/gfs/atmos/obsproc/prep/jgfs_atmos_prep_sfc.ecf +++ b/ecf/scripts/gfs/atmos/obsproc/prep/jgfs_atmos_prep_sfc.ecf @@ -35,7 +35,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGLOBAL_ATMOS_PREP_SFC +${HOMEgfs}/jobs/JGLOBAL_ATMOS_PREP_SFC if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_manager.ecf b/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_manager.ecf index 16dad405575..50a71a44ba6 100755 --- a/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_manager.ecf +++ b/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_manager.ecf @@ -29,7 +29,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGLOBAL_ATMOS_POST_MANAGER +${HOMEgfs}/jobs/JGLOBAL_ATMOS_POST_MANAGER if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_master.ecf b/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_master.ecf index 8f7d3785291..d8b1e2b5310 100755 --- a/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_master.ecf +++ b/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_master.ecf @@ -56,7 +56,7 @@ export g2tmpl_ver=v${g2tmpl_ver} ############################################################ # CALL executable job script here ############################################################ -$HOMEglobal/jobs/JGLOBAL_ATMOS_NCEPPOST +$HOMEgfs/jobs/JGLOBAL_ATMOS_NCEPPOST if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf b/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf index 464a74cd74f..c559de3394b 100755 --- a/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf @@ -40,7 +40,7 @@ export FHRGRP=%FHRGRP% FHRLST=%FHRLST% FCSTHR=%FCSTHR% TRDRUN=%TRDRUN% fcsthr=%F ############################################################ # CALL executable job script here ############################################################ -$HOMEglobal/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/atmos/post_processing/bufr_sounding/jgfs_atmos_postsnd.ecf b/ecf/scripts/gfs/atmos/post_processing/bufr_sounding/jgfs_atmos_postsnd.ecf index 26928b3543e..3322aceeb1f 100755 --- a/ecf/scripts/gfs/atmos/post_processing/bufr_sounding/jgfs_atmos_postsnd.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/bufr_sounding/jgfs_atmos_postsnd.ecf @@ -44,7 +44,7 @@ export OMP_NUM_THREADS=1 ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGFS_ATMOS_POSTSND +${HOMEgfs}/jobs/JGFS_ATMOS_POSTSND if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf b/ecf/scripts/gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf index cb5ea471d8d..4afac0d2734 100755 --- a/ecf/scripts/gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf @@ -38,7 +38,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGFS_ATMOS_FBWIND +${HOMEgfs}/jobs/JGFS_ATMOS_FBWIND if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/atmos/verf/jgfs_atmos_vminmon.ecf b/ecf/scripts/gfs/atmos/verf/jgfs_atmos_vminmon.ecf index 2682194d8da..2d9e8814ab2 100755 --- a/ecf/scripts/gfs/atmos/verf/jgfs_atmos_vminmon.ecf +++ b/ecf/scripts/gfs/atmos/verf/jgfs_atmos_vminmon.ecf @@ -38,7 +38,7 @@ export VERBOSE=YES ############################################################ # CALL executable job script here ############################################################ -$HOMEglobal/jobs/JGFS_ATMOS_VMINMON +$HOMEgfs/jobs/JGFS_ATMOS_VMINMON if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/jgfs_forecast.ecf b/ecf/scripts/gfs/jgfs_forecast.ecf index 81bd5c9cbb5..26d0c3b80d0 100755 --- a/ecf/scripts/gfs/jgfs_forecast.ecf +++ b/ecf/scripts/gfs/jgfs_forecast.ecf @@ -39,7 +39,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGLOBAL_FORECAST +${HOMEgfs}/jobs/JGLOBAL_FORECAST if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" ecflow_client --abort diff --git a/ecf/scripts/gfs/wave/gempak/jgfs_wave_gempak.ecf b/ecf/scripts/gfs/wave/gempak/jgfs_wave_gempak.ecf index 55579fa9d9f..8406f0449c9 100755 --- a/ecf/scripts/gfs/wave/gempak/jgfs_wave_gempak.ecf +++ b/ecf/scripts/gfs/wave/gempak/jgfs_wave_gempak.ecf @@ -36,7 +36,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGLOBAL_WAVE_GEMPAK +${HOMEgfs}/jobs/JGLOBAL_WAVE_GEMPAK if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/wave/init/jgfs_wave_init.ecf b/ecf/scripts/gfs/wave/init/jgfs_wave_init.ecf index 30c59ea1641..1f73e43eb11 100755 --- a/ecf/scripts/gfs/wave/init/jgfs_wave_init.ecf +++ b/ecf/scripts/gfs/wave/init/jgfs_wave_init.ecf @@ -37,7 +37,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGLOBAL_WAVE_INIT +${HOMEgfs}/jobs/JGLOBAL_WAVE_INIT if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpnt.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpnt.ecf index 1e54b042374..d4de0a97251 100755 --- a/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpnt.ecf +++ b/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpnt.ecf @@ -38,7 +38,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGLOBAL_WAVE_POST_BNDPNT +${HOMEgfs}/jobs/JGLOBAL_WAVE_POST_BNDPNT if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpntbll.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpntbll.ecf index d1e3abd0bac..528068f0574 100755 --- a/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpntbll.ecf +++ b/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpntbll.ecf @@ -36,7 +36,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL +${HOMEgfs}/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_postpnt.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_postpnt.ecf index ac0b43748c9..d09204cb2d0 100755 --- a/ecf/scripts/gfs/wave/post/jgfs_wave_postpnt.ecf +++ b/ecf/scripts/gfs/wave/post/jgfs_wave_postpnt.ecf @@ -38,7 +38,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGLOBAL_WAVE_POST_PNT +${HOMEgfs}/jobs/JGLOBAL_WAVE_POST_PNT if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_postsbs.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_postsbs.ecf index c09c3d53342..52179a56e2f 100755 --- a/ecf/scripts/gfs/wave/post/jgfs_wave_postsbs.ecf +++ b/ecf/scripts/gfs/wave/post/jgfs_wave_postsbs.ecf @@ -41,7 +41,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGLOBAL_WAVE_POST_SBS +${HOMEgfs}/jobs/JGLOBAL_WAVE_POST_SBS if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_bulls.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_bulls.ecf index 17f73e92c13..f7d0ea1be7a 100755 --- a/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_bulls.ecf +++ b/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_bulls.ecf @@ -36,7 +36,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGLOBAL_WAVE_PRDGEN_BULLS +${HOMEgfs}/jobs/JGLOBAL_WAVE_PRDGEN_BULLS if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_gridded.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_gridded.ecf index 4b316932ab0..1c6ba47c93e 100755 --- a/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_gridded.ecf +++ b/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_gridded.ecf @@ -38,7 +38,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED +${HOMEgfs}/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" diff --git a/ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf b/ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf index 899cf327f98..171e737692c 100755 --- a/ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf +++ b/ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf @@ -45,7 +45,7 @@ export cycle=t%CYC%z ############################################################ # CALL executable job script here ############################################################ -${HOMEglobal}/jobs/JGLOBAL_WAVE_PREP +${HOMEgfs}/jobs/JGLOBAL_WAVE_PREP if [ $? -ne 0 ]; then ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" ecflow_client --abort From af1f6729d150b45334342a5f1134545e87ecff67 Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Thu, 12 Feb 2026 18:18:43 +0000 Subject: [PATCH 48/71] submodule updates --- sorc/gdas.cd | 2 +- sorc/gfs_utils.fd | 2 +- sorc/gsi_monitor.fd | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 8fa33c87860..5272fcae678 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 8fa33c87860fa2a9913bcc9a8cfd44c28f46e4ec +Subproject commit 5272fcae678f918fe257ed7a0f02ae2e274caa67 diff --git a/sorc/gfs_utils.fd b/sorc/gfs_utils.fd index 3240ee02827..721f5db221d 160000 --- a/sorc/gfs_utils.fd +++ b/sorc/gfs_utils.fd @@ -1 +1 @@ -Subproject commit 3240ee02827933e65766f6507da0186a885d79ae +Subproject commit 721f5db221d0566db9f87cce7ee56a4e61704f62 diff --git a/sorc/gsi_monitor.fd b/sorc/gsi_monitor.fd index 74df9c1b954..fd742904f2c 160000 --- a/sorc/gsi_monitor.fd +++ b/sorc/gsi_monitor.fd @@ -1 +1 @@ -Subproject commit 74df9c1b954f52c170c61ac8d4f9836804bc091b +Subproject commit fd742904f2c66f504f8e28bf702fa93d8a398bbe From 9e1c43d8dea51825f6546142fc18e449d749ab33 Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Thu, 12 Feb 2026 14:09:35 -0500 Subject: [PATCH 49/71] add paths --- .gitmodules | 6 +++--- sorc/gfs_utils.fd | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.gitmodules b/.gitmodules index fc87e5f3655..3176c5569dc 100644 --- a/.gitmodules +++ b/.gitmodules @@ -7,10 +7,10 @@ url = https://github.com/NOAA-EMC/wxflow [submodule "sorc/gfs_utils.fd"] path = sorc/gfs_utils.fd - url = https://github.com/NOAA-EMC/gfs-utils + url = https://github.com/AntonMFernando-NOAA/gfs-utils [submodule "sorc/ufs_utils.fd"] path = sorc/ufs_utils.fd - url = https://github.com/ufs-community/UFS_UTILS.git + url = https://github.com/AntonMFernando-NOAA/UFS_UTILS [submodule "sorc/verif-global.fd"] path = sorc/verif-global.fd url = https://github.com/NOAA-EMC/EMC_verif-global.git @@ -19,7 +19,7 @@ url = https://github.com/NOAA-EMC/GSI.git [submodule "sorc/gdas.cd"] path = sorc/gdas.cd - url = https://github.com/NOAA-EMC/GDASApp.git + url = https://github.com/AntonMFernando-NOAA/GDASApp ignore = dirty [submodule "sorc/gsi_utils.fd"] path = sorc/gsi_utils.fd diff --git a/sorc/gfs_utils.fd b/sorc/gfs_utils.fd index 721f5db221d..3240ee02827 160000 --- a/sorc/gfs_utils.fd +++ b/sorc/gfs_utils.fd @@ -1 +1 @@ -Subproject commit 721f5db221d0566db9f87cce7ee56a4e61704f62 +Subproject commit 3240ee02827933e65766f6507da0186a885d79ae From fbf23d65dcc2952e2d203d87b4014d2c354b21ff Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Thu, 12 Feb 2026 14:37:26 -0500 Subject: [PATCH 50/71] update submodules --- .gitmodules | 2 +- sorc/ufs_utils.fd | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.gitmodules b/.gitmodules index 3176c5569dc..0164bcc7190 100644 --- a/.gitmodules +++ b/.gitmodules @@ -26,7 +26,7 @@ url = https://github.com/NOAA-EMC/GSI-Utils.git [submodule "sorc/gsi_monitor.fd"] path = sorc/gsi_monitor.fd - url = https://github.com/NOAA-EMC/GSI-Monitor.git + url = https://github.com/AntonMFernando-NOAA/GSI-Monitor [submodule "sorc/nexus.fd"] path = sorc/nexus.fd url = https://github.com/NOAA-OAR-ARL/NEXUS.git diff --git a/sorc/ufs_utils.fd b/sorc/ufs_utils.fd index cdfd24c051c..721f5db221d 160000 --- a/sorc/ufs_utils.fd +++ b/sorc/ufs_utils.fd @@ -1 +1 @@ -Subproject commit cdfd24c051c3545099f3ca7f1fc20f8d2c15349c +Subproject commit 721f5db221d0566db9f87cce7ee56a4e61704f62 From 7784b8f0ff4e8d9802aa4a652c6ff31b6bd6de3e Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Thu, 12 Feb 2026 17:24:36 -0500 Subject: [PATCH 51/71] update verif-global.fd module --- sorc/verif-global.fd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/verif-global.fd b/sorc/verif-global.fd index 743d14a8046..a696b889235 160000 --- a/sorc/verif-global.fd +++ b/sorc/verif-global.fd @@ -1 +1 @@ -Subproject commit 743d14a804640092177739da5ca0b8d9445d14d1 +Subproject commit a696b8892359b1614a720742af46231e42f36e0b From a1c6753213eefe5f834bf6aa405d1944c694672c Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Thu, 12 Feb 2026 17:30:50 -0500 Subject: [PATCH 52/71] update gitmodules --- .gitmodules | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitmodules b/.gitmodules index 0164bcc7190..8968a0cd91f 100644 --- a/.gitmodules +++ b/.gitmodules @@ -13,7 +13,7 @@ url = https://github.com/AntonMFernando-NOAA/UFS_UTILS [submodule "sorc/verif-global.fd"] path = sorc/verif-global.fd - url = https://github.com/NOAA-EMC/EMC_verif-global.git + url = https://github.com/AntonMFernando-NOAA/EMC_verif-global [submodule "sorc/gsi_enkf.fd"] path = sorc/gsi_enkf.fd url = https://github.com/NOAA-EMC/GSI.git From 24e1d69e2ef2a9538b710567ff55526ab99e90f8 Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Fri, 13 Feb 2026 16:02:07 -0500 Subject: [PATCH 53/71] upadte gdas --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 5272fcae678..ce78b570c3f 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 5272fcae678f918fe257ed7a0f02ae2e274caa67 +Subproject commit ce78b570c3f104d5ad6f2c2bdc10d0558cfbea0c From e41e46aba3311ff88ed2e97b8f1679e07d2eb19b Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Fri, 13 Feb 2026 23:16:48 -0500 Subject: [PATCH 54/71] update .gitmodules --- .gitmodules | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitmodules b/.gitmodules index 8968a0cd91f..8ff6635b5a7 100644 --- a/.gitmodules +++ b/.gitmodules @@ -7,7 +7,7 @@ url = https://github.com/NOAA-EMC/wxflow [submodule "sorc/gfs_utils.fd"] path = sorc/gfs_utils.fd - url = https://github.com/AntonMFernando-NOAA/gfs-utils + url = https://github.com/NOAA-EMC/gfs-utils [submodule "sorc/ufs_utils.fd"] path = sorc/ufs_utils.fd url = https://github.com/AntonMFernando-NOAA/UFS_UTILS From 48788edbf886734259a02a7180dcf127b80a2629 Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Sat, 14 Feb 2026 15:52:37 -0500 Subject: [PATCH 55/71] update ufs_utils module --- sorc/ufs_utils.fd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/ufs_utils.fd b/sorc/ufs_utils.fd index 721f5db221d..02fbab14b1f 160000 --- a/sorc/ufs_utils.fd +++ b/sorc/ufs_utils.fd @@ -1 +1 @@ -Subproject commit 721f5db221d0566db9f87cce7ee56a4e61704f62 +Subproject commit 02fbab14b1f00c3e1b796052826c07de976e79b7 From 90b90a9581cac6e513e2b57ceea215f4a5bc64a8 Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Tue, 17 Feb 2026 13:07:57 -0500 Subject: [PATCH 56/71] update ufs_utils.fd --- sorc/ufs_utils.fd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/ufs_utils.fd b/sorc/ufs_utils.fd index 02fbab14b1f..721f5db221d 160000 --- a/sorc/ufs_utils.fd +++ b/sorc/ufs_utils.fd @@ -1 +1 @@ -Subproject commit 02fbab14b1f00c3e1b796052826c07de976e79b7 +Subproject commit 721f5db221d0566db9f87cce7ee56a4e61704f62 From 46973b2aab758c3e238e571e4588a20d0117a110 Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Tue, 17 Feb 2026 21:46:11 +0000 Subject: [PATCH 57/71] update convert scripts --- dev/ush/convert_from_global_to_net.sh | 1 - dev/ush/convert_from_net_to_global.sh | 1 - 2 files changed, 2 deletions(-) diff --git a/dev/ush/convert_from_global_to_net.sh b/dev/ush/convert_from_global_to_net.sh index 9b37cff8a8d..09e9762c810 100755 --- a/dev/ush/convert_from_global_to_net.sh +++ b/dev/ush/convert_from_global_to_net.sh @@ -108,7 +108,6 @@ fi # List of directories and files to exclude from processing exclude_items=( - "sorc" "dev/ush/convert_from_net_to_global.sh" "dev/ush/convert_from_global_to_net.sh" ) diff --git a/dev/ush/convert_from_net_to_global.sh b/dev/ush/convert_from_net_to_global.sh index 1688bc0fe08..fdfd21ca62e 100755 --- a/dev/ush/convert_from_net_to_global.sh +++ b/dev/ush/convert_from_net_to_global.sh @@ -65,7 +65,6 @@ EXCLUDE_DIRS=("dev/ush/convert_from_net_to_global.sh" "dev/ush/convert_from_glob # List of directories and files to exclude from processing exclude_items=( - "sorc" "dev/ush/convert_from_net_to_global.sh" "dev/ush/convert_from_global_to_net.sh" ) From 148794b58ea9580315e6ba2ccfdf1881729ac207 Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Thu, 19 Feb 2026 16:28:16 +0000 Subject: [PATCH 58/71] update exglobal_scripts --- dev/scripts/exglobal_atmos_sfcanl.sh | 4 ++-- ush/global_cycle.sh | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/dev/scripts/exglobal_atmos_sfcanl.sh b/dev/scripts/exglobal_atmos_sfcanl.sh index 2d9b154cdb8..711a0dd5345 100755 --- a/dev/scripts/exglobal_atmos_sfcanl.sh +++ b/dev/scripts/exglobal_atmos_sfcanl.sh @@ -171,7 +171,7 @@ for hr in "${!gcycle_dates[@]}"; do # Copy inputs from COMIN to DATA for ((nn = 1; nn <= ntiles; nn++)); do cpreq "${sfcdata_dir}/${datestr}.${snow_prefix}sfc_data.tile${nn}.nc" "${DATA}/fnbgsi.00${nn}" - cpreq "${DATA}/fnbgsi.00${nn}" "${DATA}/fnbgso.00${nn}" + cpreq "${DATA}/fnbgsi.00${nn}" "${DATA}/sfc_data_cycle.00${nn}" done "${CYCLESH}" && true @@ -182,7 +182,7 @@ for hr in "${!gcycle_dates[@]}"; do # Copy outputs from DATA to COMOUT for ((nn = 1; nn <= ntiles; nn++)); do - cpfs "${DATA}/fnbgso.00${nn}" "${COMOUT_ATMOS_RESTART}/${datestr}.sfcanl_data.tile${nn}.nc" + cpfs "${DATA}/sfc_data_cycle.00${nn}" "${COMOUT_ATMOS_RESTART}/${datestr}.sfcanl_data.tile${nn}.nc" done done diff --git a/ush/global_cycle.sh b/ush/global_cycle.sh index 019b8fd6e71..0a22902e4f2 100755 --- a/ush/global_cycle.sh +++ b/ush/global_cycle.sh @@ -335,7 +335,6 @@ cat << EOF > fort.37 NST_FILE="${NST_FILE}", DO_SOILINCR=${GCYCLE_DO_SOILINCR}, DO_SNOWINCR=${GCYCLE_DO_SNOWINCR}, - INTERP_LANDINCR=${GCYCLE_INTERP_LANDINCR}, lsoil_incr=${LSOIL_INCR}, / EOF From 2e5e8bd8de0035bacba8f02ca758a911d53c8b37 Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Thu, 19 Feb 2026 16:54:27 +0000 Subject: [PATCH 59/71] update sorc/link_workflow.sh --- sorc/link_workflow.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index da19b790c56..305277a5e71 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -96,12 +96,12 @@ for package in "${packages[@]}"; do done # Link wxflow to ush/python -cd "${HOMEgfs}/ush/python" || exit 1 -if [[ -d "${HOMEgfs}/sorc/wxflow/src/wxflow" ]]; then +cd "${HOMEglobal}/ush/python" || exit 1 +if [[ -d "${HOMEglobal}/sorc/wxflow/src/wxflow" ]]; then if [[ -s "wxflow" ]]; then rm -f "wxflow" fi - ${LINK} "${HOMEgfs}/sorc/wxflow/src/wxflow" . + ${LINK} "${HOMEglobal}/sorc/wxflow/src/wxflow" . fi # Link fix directories From f3b96d6682b62da67ed6dabfac34d742ab986625 Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Thu, 19 Feb 2026 20:44:47 +0000 Subject: [PATCH 60/71] update convert scripts --- dev/jobs/JGDAS_AERO_ANALYSIS_GENERATE_BMATRIX | 2 +- dev/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE | 2 +- dev/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE | 2 +- dev/jobs/JGLOBAL_AERO_ANALYSIS_VARIATIONAL | 2 +- dev/jobs/JGLOBAL_ANALYSIS_STATS | 2 +- dev/jobs/JGLOBAL_ARCHIVE_TARS | 2 +- dev/jobs/JGLOBAL_ARCHIVE_VRFY | 2 +- dev/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE | 2 +- .../JGLOBAL_ATMENS_ANALYSIS_FV3_INCREMENT | 2 +- dev/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE | 2 +- dev/jobs/JGLOBAL_ATMENS_ANALYSIS_LETKF | 2 +- dev/jobs/JGLOBAL_ATMENS_ANALYSIS_OBS | 2 +- dev/jobs/JGLOBAL_ATMENS_ANALYSIS_SOL | 2 +- dev/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC | 2 +- dev/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_FV3JEDI | 2 +- dev/jobs/JGLOBAL_ATMOS_UPP | 2 +- dev/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE | 2 +- dev/jobs/JGLOBAL_ATM_ANALYSIS_FV3_INCREMENT | 2 +- dev/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE | 2 +- dev/jobs/JGLOBAL_ATM_ANALYSIS_VARIATIONAL | 2 +- dev/jobs/JGLOBAL_ENKF_ARCHIVE_TARS | 2 +- dev/jobs/JGLOBAL_ENKF_ARCHIVE_VRFY | 2 +- dev/jobs/JGLOBAL_ENKF_ECEN_FV3JEDI | 2 +- dev/jobs/JGLOBAL_ENS_GLOBUS_ARCH | 2 +- dev/jobs/JGLOBAL_FETCH | 2 +- dev/jobs/JGLOBAL_FORECAST | 2 +- dev/jobs/JGLOBAL_GLOBUS_ARCH | 2 +- dev/jobs/JGLOBAL_MARINE_ANALYSIS_CHECKPOINT | 2 +- dev/jobs/JGLOBAL_MARINE_ANALYSIS_ECEN | 2 +- dev/jobs/JGLOBAL_MARINE_ANALYSIS_FINALIZE | 2 +- dev/jobs/JGLOBAL_MARINE_ANALYSIS_INITIALIZE | 2 +- dev/jobs/JGLOBAL_MARINE_ANALYSIS_LETKF | 2 +- dev/jobs/JGLOBAL_MARINE_ANALYSIS_VARIATIONAL | 2 +- dev/jobs/JGLOBAL_MARINE_BMAT | 2 +- dev/jobs/JGLOBAL_MARINE_BMAT_INITIALIZE | 2 +- dev/jobs/JGLOBAL_OCEANICE_PRODUCTS | 2 +- dev/jobs/JGLOBAL_OFFLINE_ATMOS_ANALYSIS | 2 +- dev/jobs/JGLOBAL_PREP_EMISSIONS | 2 +- dev/jobs/JGLOBAL_PREP_OCEAN_OBS | 2 +- dev/jobs/JGLOBAL_SNOWENS_ANALYSIS | 2 +- dev/jobs/JGLOBAL_SNOW_ANALYSIS | 2 +- dev/jobs/JGLOBAL_STAGE_IC | 2 +- dev/ush/convert_from_global_to_net.sh | 175 ++++++++---------- dev/ush/convert_from_net_to_global.sh | 106 +++-------- 44 files changed, 146 insertions(+), 219 deletions(-) diff --git a/dev/jobs/JGDAS_AERO_ANALYSIS_GENERATE_BMATRIX b/dev/jobs/JGDAS_AERO_ANALYSIS_GENERATE_BMATRIX index db9515e641c..8ecc001c8eb 100755 --- a/dev/jobs/JGDAS_AERO_ANALYSIS_GENERATE_BMATRIX +++ b/dev/jobs/JGDAS_AERO_ANALYSIS_GENERATE_BMATRIX @@ -6,7 +6,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "aeroanlgenb" -c "base aeroanl aero # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH ############################################## diff --git a/dev/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE b/dev/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE index 44696aaef19..9298217ceda 100755 --- a/dev/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE +++ b/dev/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE @@ -8,7 +8,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "aeroanlfinal" -c "base aeroanl aer # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH ############################################## diff --git a/dev/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE b/dev/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE index 9e88745d7cf..fa89f6bca0d 100755 --- a/dev/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE +++ b/dev/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE @@ -7,7 +7,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "aeroanlinit" -c "base aeroanl aero # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH # shellcheck disable=SC2153 diff --git a/dev/jobs/JGLOBAL_AERO_ANALYSIS_VARIATIONAL b/dev/jobs/JGLOBAL_AERO_ANALYSIS_VARIATIONAL index 949b86ecc8a..e8214b06a7f 100755 --- a/dev/jobs/JGLOBAL_AERO_ANALYSIS_VARIATIONAL +++ b/dev/jobs/JGLOBAL_AERO_ANALYSIS_VARIATIONAL @@ -8,7 +8,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "aeroanlvar" -c "base aeroanl aeroa # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH ############################################## diff --git a/dev/jobs/JGLOBAL_ANALYSIS_STATS b/dev/jobs/JGLOBAL_ANALYSIS_STATS index 20796f156c7..df5460401d5 100755 --- a/dev/jobs/JGLOBAL_ANALYSIS_STATS +++ b/dev/jobs/JGLOBAL_ANALYSIS_STATS @@ -6,7 +6,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "anlstat" -c "base anlstat" # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH ############################################## diff --git a/dev/jobs/JGLOBAL_ARCHIVE_TARS b/dev/jobs/JGLOBAL_ARCHIVE_TARS index f2b4112517c..650696169b2 100755 --- a/dev/jobs/JGLOBAL_ARCHIVE_TARS +++ b/dev/jobs/JGLOBAL_ARCHIVE_TARS @@ -10,7 +10,7 @@ fi # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ diff --git a/dev/jobs/JGLOBAL_ARCHIVE_VRFY b/dev/jobs/JGLOBAL_ARCHIVE_VRFY index a9ef7f08c6b..bbd79570a02 100755 --- a/dev/jobs/JGLOBAL_ARCHIVE_VRFY +++ b/dev/jobs/JGLOBAL_ARCHIVE_VRFY @@ -6,7 +6,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "arch_vrfy" -c "base arch_vrfy" # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ diff --git a/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE b/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE index 85589352cfd..64abd0da340 100755 --- a/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE +++ b/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE @@ -8,7 +8,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "atmensanlfinal" -c "base atmensanl # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH GDUMP="gdas" diff --git a/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_FV3_INCREMENT b/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_FV3_INCREMENT index 1c181e35384..a0c86a56b5d 100755 --- a/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_FV3_INCREMENT +++ b/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_FV3_INCREMENT @@ -8,7 +8,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "atmensanlfv3inc" -c "base atmensan # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH ############################################## diff --git a/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE b/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE index 5e133131dda..7799a06148f 100755 --- a/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE +++ b/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE @@ -7,7 +7,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "atmensanlinit" -c "base atmensanl # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH # shellcheck disable=SC2153 diff --git a/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_LETKF b/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_LETKF index 887f7625970..b30c2953d21 100755 --- a/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_LETKF +++ b/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_LETKF @@ -8,7 +8,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "atmensanlletkf" -c "base atmensanl # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH ############################################## diff --git a/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_OBS b/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_OBS index 339e15a890d..551a9ed6c5e 100755 --- a/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_OBS +++ b/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_OBS @@ -8,7 +8,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "atmensanlobs" -c "base atmensanl a # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH ############################################## diff --git a/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_SOL b/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_SOL index f703a141381..2ace56c324e 100755 --- a/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_SOL +++ b/dev/jobs/JGLOBAL_ATMENS_ANALYSIS_SOL @@ -8,7 +8,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "atmensanlsol" -c "base atmensanl a # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH ############################################## diff --git a/dev/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC b/dev/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC index 34b890c3dd3..bc416b2325f 100755 --- a/dev/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC +++ b/dev/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC @@ -3,7 +3,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "analcalc" -c "base anal analcalc" # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH ############################################## diff --git a/dev/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_FV3JEDI b/dev/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_FV3JEDI index 16dc84f9ed4..f698616b466 100755 --- a/dev/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_FV3JEDI +++ b/dev/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_FV3JEDI @@ -9,7 +9,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "analcalc_fv3jedi" -c "base analcal # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") diff --git a/dev/jobs/JGLOBAL_ATMOS_UPP b/dev/jobs/JGLOBAL_ATMOS_UPP index 23e4b422566..7922f358a85 100755 --- a/dev/jobs/JGLOBAL_ATMOS_UPP +++ b/dev/jobs/JGLOBAL_ATMOS_UPP @@ -6,7 +6,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "upp" -c "base upp" # Set variables used in the exglobal script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH ############################################## diff --git a/dev/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE b/dev/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE index 312a276a5cb..6ab77c1d3ee 100755 --- a/dev/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE +++ b/dev/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE @@ -8,7 +8,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "atmanlfinal" -c "base atmanl atman # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH ############################################## diff --git a/dev/jobs/JGLOBAL_ATM_ANALYSIS_FV3_INCREMENT b/dev/jobs/JGLOBAL_ATM_ANALYSIS_FV3_INCREMENT index fe1164e11cb..1b095b710d4 100755 --- a/dev/jobs/JGLOBAL_ATM_ANALYSIS_FV3_INCREMENT +++ b/dev/jobs/JGLOBAL_ATM_ANALYSIS_FV3_INCREMENT @@ -8,7 +8,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "atmanlfv3inc" -c "base atmanl atma # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH ############################################## diff --git a/dev/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE b/dev/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE index 0917b87fbdf..dff93e26928 100755 --- a/dev/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE +++ b/dev/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE @@ -7,7 +7,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "atmanlinit" -c "base atmanl atmanl # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH # shellcheck disable=SC2153 diff --git a/dev/jobs/JGLOBAL_ATM_ANALYSIS_VARIATIONAL b/dev/jobs/JGLOBAL_ATM_ANALYSIS_VARIATIONAL index a02fb87c1c3..af8c6429e2b 100755 --- a/dev/jobs/JGLOBAL_ATM_ANALYSIS_VARIATIONAL +++ b/dev/jobs/JGLOBAL_ATM_ANALYSIS_VARIATIONAL @@ -8,7 +8,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "atmanlvar" -c "base atmanl atmanlv # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH ############################################## diff --git a/dev/jobs/JGLOBAL_ENKF_ARCHIVE_TARS b/dev/jobs/JGLOBAL_ENKF_ARCHIVE_TARS index 1e267693a11..81b1925c042 100755 --- a/dev/jobs/JGLOBAL_ENKF_ARCHIVE_TARS +++ b/dev/jobs/JGLOBAL_ENKF_ARCHIVE_TARS @@ -6,7 +6,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "earc_tars" -c "base arch_tars earc # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH MEMDIR="ensstat" YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ diff --git a/dev/jobs/JGLOBAL_ENKF_ARCHIVE_VRFY b/dev/jobs/JGLOBAL_ENKF_ARCHIVE_VRFY index 2c61afa745b..9db683d0235 100755 --- a/dev/jobs/JGLOBAL_ENKF_ARCHIVE_VRFY +++ b/dev/jobs/JGLOBAL_ENKF_ARCHIVE_VRFY @@ -6,7 +6,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "earc_vrfy" -c "base earc_vrfy" # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH MEMDIR="ensstat" YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ diff --git a/dev/jobs/JGLOBAL_ENKF_ECEN_FV3JEDI b/dev/jobs/JGLOBAL_ENKF_ECEN_FV3JEDI index 4990b553680..1eb15fef60e 100755 --- a/dev/jobs/JGLOBAL_ENKF_ECEN_FV3JEDI +++ b/dev/jobs/JGLOBAL_ENKF_ECEN_FV3JEDI @@ -9,7 +9,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "ecen_fv3jedi" -c "base ecen_fv3jed # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") diff --git a/dev/jobs/JGLOBAL_ENS_GLOBUS_ARCH b/dev/jobs/JGLOBAL_ENS_GLOBUS_ARCH index d362af31788..7b1a7d0a4a6 100755 --- a/dev/jobs/JGLOBAL_ENS_GLOBUS_ARCH +++ b/dev/jobs/JGLOBAL_ENS_GLOBUS_ARCH @@ -6,7 +6,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "globus_earc" -c "base globus earc_ # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH MEMDIR="ensstat" YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ diff --git a/dev/jobs/JGLOBAL_FETCH b/dev/jobs/JGLOBAL_FETCH index 657777e08f7..fc6d587e342 100755 --- a/dev/jobs/JGLOBAL_FETCH +++ b/dev/jobs/JGLOBAL_FETCH @@ -3,7 +3,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "fetch" -c "base fetch" # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH # Execute fetching diff --git a/dev/jobs/JGLOBAL_FORECAST b/dev/jobs/JGLOBAL_FORECAST index 36c06c2117e..926a64038cf 100755 --- a/dev/jobs/JGLOBAL_FORECAST +++ b/dev/jobs/JGLOBAL_FORECAST @@ -11,7 +11,7 @@ else fi # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH # Create the directory to hold restarts and output from the model in stmp diff --git a/dev/jobs/JGLOBAL_GLOBUS_ARCH b/dev/jobs/JGLOBAL_GLOBUS_ARCH index 238707a1fe6..5ed7082fc49 100755 --- a/dev/jobs/JGLOBAL_GLOBUS_ARCH +++ b/dev/jobs/JGLOBAL_GLOBUS_ARCH @@ -6,7 +6,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "globus_arch" -c "base globus" # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ diff --git a/dev/jobs/JGLOBAL_MARINE_ANALYSIS_CHECKPOINT b/dev/jobs/JGLOBAL_MARINE_ANALYSIS_CHECKPOINT index aca6c756977..87ec96a01c1 100755 --- a/dev/jobs/JGLOBAL_MARINE_ANALYSIS_CHECKPOINT +++ b/dev/jobs/JGLOBAL_MARINE_ANALYSIS_CHECKPOINT @@ -9,7 +9,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "marineanlchkpt" -c "base marineanl # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH ############################################## diff --git a/dev/jobs/JGLOBAL_MARINE_ANALYSIS_ECEN b/dev/jobs/JGLOBAL_MARINE_ANALYSIS_ECEN index d4cef593221..13fdc37fe8c 100755 --- a/dev/jobs/JGLOBAL_MARINE_ANALYSIS_ECEN +++ b/dev/jobs/JGLOBAL_MARINE_ANALYSIS_ECEN @@ -10,7 +10,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "marineanlecen" -c "base marineanle # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH # Ignore possible spelling error (nothing is misspelled) diff --git a/dev/jobs/JGLOBAL_MARINE_ANALYSIS_FINALIZE b/dev/jobs/JGLOBAL_MARINE_ANALYSIS_FINALIZE index 8298e0aad5e..cd68d3180f8 100755 --- a/dev/jobs/JGLOBAL_MARINE_ANALYSIS_FINALIZE +++ b/dev/jobs/JGLOBAL_MARINE_ANALYSIS_FINALIZE @@ -9,7 +9,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "marineanlfinal" -c "base marineanl # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH ############################################## diff --git a/dev/jobs/JGLOBAL_MARINE_ANALYSIS_INITIALIZE b/dev/jobs/JGLOBAL_MARINE_ANALYSIS_INITIALIZE index 9aeed602859..825f82a829f 100755 --- a/dev/jobs/JGLOBAL_MARINE_ANALYSIS_INITIALIZE +++ b/dev/jobs/JGLOBAL_MARINE_ANALYSIS_INITIALIZE @@ -9,7 +9,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "marineanlinit" -c "base marineanl # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH # Ignore possible spelling error (nothing is misspelled) diff --git a/dev/jobs/JGLOBAL_MARINE_ANALYSIS_LETKF b/dev/jobs/JGLOBAL_MARINE_ANALYSIS_LETKF index 668474fc1a0..a3ddd7eb279 100755 --- a/dev/jobs/JGLOBAL_MARINE_ANALYSIS_LETKF +++ b/dev/jobs/JGLOBAL_MARINE_ANALYSIS_LETKF @@ -12,7 +12,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "marineanlletkf" -c "base marineanl # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH # Ignore possible spelling error (nothing is misspelled) diff --git a/dev/jobs/JGLOBAL_MARINE_ANALYSIS_VARIATIONAL b/dev/jobs/JGLOBAL_MARINE_ANALYSIS_VARIATIONAL index eeecac72d5f..dbf26bc6e4d 100755 --- a/dev/jobs/JGLOBAL_MARINE_ANALYSIS_VARIATIONAL +++ b/dev/jobs/JGLOBAL_MARINE_ANALYSIS_VARIATIONAL @@ -10,7 +10,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "marineanlvar" -c "base marineanl m # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH ############################################## diff --git a/dev/jobs/JGLOBAL_MARINE_BMAT b/dev/jobs/JGLOBAL_MARINE_BMAT index 75e84faa2f8..3a9158c1f9d 100755 --- a/dev/jobs/JGLOBAL_MARINE_BMAT +++ b/dev/jobs/JGLOBAL_MARINE_BMAT @@ -18,7 +18,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "marinebmat" -c "base marinebmat" # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH # shellcheck disable=SC2153 diff --git a/dev/jobs/JGLOBAL_MARINE_BMAT_INITIALIZE b/dev/jobs/JGLOBAL_MARINE_BMAT_INITIALIZE index c86e91cb9ba..75dede4a8c9 100755 --- a/dev/jobs/JGLOBAL_MARINE_BMAT_INITIALIZE +++ b/dev/jobs/JGLOBAL_MARINE_BMAT_INITIALIZE @@ -17,7 +17,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "marinebmatinit" -c "base marinebma # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH # shellcheck disable=SC2153 diff --git a/dev/jobs/JGLOBAL_OCEANICE_PRODUCTS b/dev/jobs/JGLOBAL_OCEANICE_PRODUCTS index 68e07588759..6ac6de25f7f 100755 --- a/dev/jobs/JGLOBAL_OCEANICE_PRODUCTS +++ b/dev/jobs/JGLOBAL_OCEANICE_PRODUCTS @@ -3,7 +3,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "oceanice_products" -c "base oceanice_products" # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH ############################################## diff --git a/dev/jobs/JGLOBAL_OFFLINE_ATMOS_ANALYSIS b/dev/jobs/JGLOBAL_OFFLINE_ATMOS_ANALYSIS index bda0911d614..fd43fea65ae 100755 --- a/dev/jobs/JGLOBAL_OFFLINE_ATMOS_ANALYSIS +++ b/dev/jobs/JGLOBAL_OFFLINE_ATMOS_ANALYSIS @@ -7,7 +7,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "offlineanl" -c "base offlineanl" # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH ############################################## diff --git a/dev/jobs/JGLOBAL_PREP_EMISSIONS b/dev/jobs/JGLOBAL_PREP_EMISSIONS index f00a1cd1583..35796ba6b9f 100755 --- a/dev/jobs/JGLOBAL_PREP_EMISSIONS +++ b/dev/jobs/JGLOBAL_PREP_EMISSIONS @@ -6,7 +6,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "prep_emissions" -c "base prep_emis # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH # TODO: Set local variables used in this script e.g. GDATE may be needed for previous cycle diff --git a/dev/jobs/JGLOBAL_PREP_OCEAN_OBS b/dev/jobs/JGLOBAL_PREP_OCEAN_OBS index fffbeabf244..a3b83b1d114 100755 --- a/dev/jobs/JGLOBAL_PREP_OCEAN_OBS +++ b/dev/jobs/JGLOBAL_PREP_OCEAN_OBS @@ -5,7 +5,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "prepoceanobs" -c "base marineanl p # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COMOUT_OBS:COM_OBS_TMPL diff --git a/dev/jobs/JGLOBAL_SNOWENS_ANALYSIS b/dev/jobs/JGLOBAL_SNOWENS_ANALYSIS index b568360baeb..c6090477dc9 100755 --- a/dev/jobs/JGLOBAL_SNOWENS_ANALYSIS +++ b/dev/jobs/JGLOBAL_SNOWENS_ANALYSIS @@ -6,7 +6,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "esnowanl" -c "base esnowanl" # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH # Ignore possible spelling error (nothing is misspelled) diff --git a/dev/jobs/JGLOBAL_SNOW_ANALYSIS b/dev/jobs/JGLOBAL_SNOW_ANALYSIS index f29a674cc1f..f7b03750056 100755 --- a/dev/jobs/JGLOBAL_SNOW_ANALYSIS +++ b/dev/jobs/JGLOBAL_SNOW_ANALYSIS @@ -6,7 +6,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "snowanl" -c "base snowanl" # Set variables used in the script ############################################## # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH # Ignore possible spelling error (nothing is misspelled) diff --git a/dev/jobs/JGLOBAL_STAGE_IC b/dev/jobs/JGLOBAL_STAGE_IC index eb23da86bbe..92615f1d736 100755 --- a/dev/jobs/JGLOBAL_STAGE_IC +++ b/dev/jobs/JGLOBAL_STAGE_IC @@ -3,7 +3,7 @@ source "${HOMEglobal}/ush/jjob_header.sh" -e "stage_ic" -c "base stage_ic" # Setup Python path for pygfs -PYTHONPATH="${HOMEgfs}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" +PYTHONPATH="${HOMEglobal}/ush/python${PYTHONPATH:+:${PYTHONPATH}}" export PYTHONPATH # Execute staging diff --git a/dev/ush/convert_from_global_to_net.sh b/dev/ush/convert_from_global_to_net.sh index 09e9762c810..3e51760b50c 100755 --- a/dev/ush/convert_from_global_to_net.sh +++ b/dev/ush/convert_from_global_to_net.sh @@ -1,16 +1,15 @@ #!/bin/bash # convert_from_global_to_net.sh # Script to convert HOMEglobal, PARMglobal, etc. to HOME${NET}, PARM${NET}, etc. -# for operational deployment +# for operations/development # # Usage: convert_from_global_to_net.sh [--exclude dir1 dir2 dir3 ...] -# -# NET_value can be: gfs, gefs, sfs, or gcafs (NOT 'all' - use specific NET) +# NET_value can be: gfs, gefs, sfs, gcafs # target_path can be a file or directory # -# Example: convert_from_global_to_net.sh gfs /path/to/deployment --exclude sorc dev parm/archive +# Example: convert_from_global_to_net.sh gfs /path/to/development --exclude sorc dev parm/archive # -# This script performs selective search/replace for deployment variables +# This script performs selective search/replace to convert global variables to NET-specific ones set -eu @@ -40,42 +39,27 @@ trap cleanup EXIT ERR if [[ $# -lt 2 ]]; then echo "ERROR: NET value and target path required" echo "Usage: $0 [--exclude dir1 dir2 dir3 ...]" - echo "NET_value must be one of: gfs, gefs, sfs, gcafs" + echo "NET_value can be: gfs, gefs, sfs, gcafs" exit 1 fi NET="$1" shift -# Define all possible NET values for validation -ALL_NET_VALUES=("gefs" "gfs" "gcafs" "sfs") - -# Validate NET value - must be specific, NOT 'all' -if [[ "${NET}" == "all" ]]; then - echo -e "${RED}ERROR: 'all' is not supported for convert_from_global_to_net.sh${NC}" >&2 - echo -e "${YELLOW}You must specify a single NET value: gfs, gefs, sfs, or gcafs${NC}" >&2 - echo -e "${YELLOW}Use convert_from_net_to_global.sh with 'all' to revert changes${NC}" >&2 - exit 1 -fi - -# Validate NET is one of the allowed values -valid_net=false -for net_val in "${ALL_NET_VALUES[@]}"; do - if [[ "${NET}" == "${net_val}" ]]; then - valid_net=true - break - fi -done - -if ! ${valid_net}; then - echo -e "${RED}ERROR: Invalid NET value '${NET}'${NC}" >&2 - echo -e "${YELLOW}Must be one of: ${ALL_NET_VALUES[*]}${NC}" >&2 - exit 1 -fi +# Validate NET value +case "${NET}" in + gfs|gefs|sfs|gcafs) + ;; + *) + echo "ERROR: Invalid NET value '${NET}'. Must be one of: gfs, gefs, sfs, gcafs" + exit 1 + ;; +esac # Initialize target path TARGET_PATH="" -EXCLUDE_DIRS=() +# Default exclusions: Always exclude the conversion scripts themselves +EXCLUDE_DIRS=("dev/ush/convert_from_net_to_global.sh" "dev/ush/convert_from_global_to_net.sh") # Parse remaining arguments while [[ $# -gt 0 ]]; do @@ -106,44 +90,24 @@ if [[ ! -e "${TARGET_PATH}" ]]; then exit 1 fi -# List of directories and files to exclude from processing -exclude_items=( - "dev/ush/convert_from_net_to_global.sh" - "dev/ush/convert_from_global_to_net.sh" -) - -# Build grep exclusion pattern (includes all items) -exclude_pattern="" -for item in "${exclude_items[@]}"; do - if [[ -n "${exclude_pattern}" ]]; then - exclude_pattern="${exclude_pattern}|" - fi - exclude_pattern="${exclude_pattern}${item}" -done - -# Display what we're excluding (filter out conversion scripts from display) +# Build display exclude list (filter out conversion scripts) display_exclude=() -for item in "${exclude_items[@]}"; do +for item in "${EXCLUDE_DIRS[@]}"; do if [[ "${item}" != "dev/ush/convert_from_net_to_global.sh" && "${item}" != "dev/ush/convert_from_global_to_net.sh" ]]; then display_exclude+=("${item}") fi done -if [[ ${#display_exclude[@]} -gt 0 ]]; then - echo "Excluding directories: ${display_exclude[*]}" -fi - # Display processing header echo -e "${CYAN}=========================================${NC}" -echo -e "${YELLOW}Processing: Converting ${RED}global${NC}${YELLOW}-workflow variables to ${GREEN}${NET}${NC}${YELLOW}-specific variables${NC}" +echo -e "${YELLOW}Processing: Converting ${GREEN}global${NC}${YELLOW}-workflow variables to ${RED}${NET}${NC}${YELLOW}-specific variables${NC}" echo -e "${BLUE}Target: ${TARGET_PATH}${NC}" -if [[ ${#EXCLUDE_DIRS[@]} -gt 0 ]]; then - echo -e "${BLUE}Excluding directories: ${EXCLUDE_DIRS[*]}${NC}" +if [[ ${#display_exclude[@]} -gt 0 ]]; then + echo -e "${BLUE}Excluding directories: ${display_exclude[*]}${NC}" fi echo -e "${CYAN}=========================================${NC}" - echo "" -echo -e "${YELLOW}Converting for: ${RED}global${NC} ${YELLOW}→${NC} ${GREEN}${NET}${NC}" +echo -e "${YELLOW}Converting: ${GREEN}global${NC} ${YELLOW}→${NC} ${RED}${NET}${NC}" # List of patterns to convert declare -A patterns=( @@ -157,12 +121,6 @@ declare -A patterns=( # If target is a single file, process it directly if [[ -f "${TARGET_PATH}" ]]; then - # Pre-check: Skip if ANY NET-specific variable already exists - if grep -qE '\b(HOME|PARM|USH|SCR|EXEC|FIX)(gfs|gefs|sfs|gcafs)\b' "${TARGET_PATH}" 2> /dev/null; then - echo -e "${YELLOW}⚠ File already has NET-specific variables - skipped${NC}" - exit 0 - fi - file_modified=false for pattern in "${!patterns[@]}"; do replacement="${patterns[${pattern}]}" @@ -178,59 +136,75 @@ if [[ -f "${TARGET_PATH}" ]]; then if ${file_modified}; then echo -e "${GREEN}✓ Processed 1 file for NET=${NET}${NC}" else - echo -e "${YELLOW}No files to convert for NET=${current_net}${NC}" + echo -e "${YELLOW}No files to convert for NET=${NET}${NC}" fi else # Build find command with exclusions for directory - # Build find command with excluded directories (properly handle subdirectories) + find_cmd="find \"${TARGET_PATH}\"" + + # Build exclusion list for directories if [[ ${#EXCLUDE_DIRS[@]} -gt 0 ]]; then - exclude_args="" + find_cmd+=" -type d \\(" + first=true for exclude_dir in "${EXCLUDE_DIRS[@]}"; do - exclude_args+="-name \"$(basename "${exclude_dir}")\" -o " + # Remove leading ./ if present + exclude_dir="${exclude_dir#./}" + + # Extract just the directory name (last component of path) + dir_name=$(basename "${exclude_dir}") + + if ${first}; then + find_cmd+=" -name \"${dir_name}\"" + first=false + else + find_cmd+=" -o -name \"${dir_name}\"" + fi done - exclude_args="${exclude_args% -o }" - eval "find \"${TARGET_PATH}\" -type d \( ${exclude_args} \) -prune -o -type f -print" > /tmp/convert_files_$$.txt - else - find "${TARGET_PATH}" -type f > /tmp/convert_files_$$.txt + find_cmd+=" \\) -prune -o" + fi + + # Complete find command to get files + find_cmd+=" -type f -print" + + # Execute find and get file list + if ! eval "${find_cmd}" > /tmp/convert_files_$$.txt; then + echo -e "${RED}ERROR: Failed to find files in ${TARGET_PATH}${NC}" >&2 + exit 1 fi + # Count files to process file_count=$(wc -l < /tmp/convert_files_$$.txt) if [[ ${file_count} -eq 0 ]]; then echo -e "${YELLOW}No files to convert for NET=${NET}${NC}" rm -f /tmp/convert_files_$$.txt else - # Count files to process echo -e "${BLUE}Processing ${file_count} files...${NC}" # Perform the replacements failed_files=0 - skipped_files=0 + converted_files=0 while IFS= read -r file; do if [[ -f "${file}" ]]; then - # Pre-check: Skip file if it contains ANY NET-specific variable (gfs, gefs, sfs, gcafs) - if grep -qE '\b(HOME|PARM|USH|SCR|EXEC|FIX)(gfs|gefs|sfs|gcafs)\b' "${file}" 2> /dev/null; then - skipped_files=$((skipped_files + 1)) - else - # Proceed with conversion only if no NET-specific vars found - file_modified=false - file_failed=false - for pattern in "${!patterns[@]}"; do - replacement="${patterns[${pattern}]}" - if grep -q "\\b${pattern}\\b" "${file}" 2> /dev/null; then - if ! sed -i "s/\\b${pattern}\\b/${replacement}/g" "${file}"; then - echo -e "${RED}ERROR: sed failed on ${file}${NC}" >&2 - failed_files=$((failed_files + 1)) - file_failed=true - break - fi - file_modified=true + # Process file regardless of whether it has NET-specific vars + # (this allows converting files with mixed global/NET vars) + file_modified=false + file_failed=false + for pattern in "${!patterns[@]}"; do + replacement="${patterns[${pattern}]}" + if grep -q "\\b${pattern}\\b" "${file}" 2> /dev/null; then + if ! sed -i "s/\\b${pattern}\\b/${replacement}/g" "${file}"; then + echo -e "${RED}ERROR: sed failed on ${file}${NC}" >&2 + failed_files=$((failed_files + 1)) + file_failed=true + break fi - done - - if ! ${file_modified} && ! ${file_failed}; then - skipped_files=$((skipped_files + 1)) + file_modified=true fi + done + + if ${file_modified} && ! ${file_failed}; then + converted_files=$((converted_files + 1)) fi fi done < /tmp/convert_files_$$.txt @@ -238,18 +212,17 @@ else # Clean up rm -f /tmp/convert_files_$$.txt - files_converted=$((file_count - failed_files - skipped_files)) - if [[ ${files_converted} -eq 0 ]]; then + if [[ ${converted_files} -eq 0 ]]; then echo -e "${YELLOW}No files to convert for NET=${NET}${NC}" elif [[ ${failed_files} -gt 0 ]]; then - echo -e "${YELLOW}⚠ Converted ${files_converted}/${file_count} files (${failed_files} failed) for NET=${NET}${NC}" + echo -e "${YELLOW}⚠ Converted ${converted_files} files (${failed_files} failed) for NET=${NET}${NC}" else - echo -e "${GREEN}✓ Converted ${files_converted}/${file_count} files for NET=${NET}${NC}" + echo -e "${GREEN}✓ Converted ${converted_files} files for NET=${NET}${NC}" fi fi fi -echo -e "${GREEN}Completed!${NC}" echo "" echo -e "${CYAN}=========================================${NC}" -echo -e "${GREEN}Conversion to NET=${NET} completed successfully!${NC}" +echo -e "${GREEN}Conversion completed successfully!${NC}" +echo -e "${CYAN}=========================================${NC}" diff --git a/dev/ush/convert_from_net_to_global.sh b/dev/ush/convert_from_net_to_global.sh index fdfd21ca62e..8226e13d368 100755 --- a/dev/ush/convert_from_net_to_global.sh +++ b/dev/ush/convert_from_net_to_global.sh @@ -1,10 +1,9 @@ #!/bin/bash # convert_from_net_to_global.sh -# Script to convert HOME${NET}, PARM${NET}, etc. back to HOMEgfs, PARMgfs, etc. +# Script to convert HOME${NET}, PARM${NET}, etc. back to HOMEglobal, PARMglobal, etc. # for development # # Usage: convert_from_net_to_global.sh [--exclude dir1 dir2 dir3 ...] -# # NET_value can be: gfs, gefs, sfs, gcafs, or all (for all NET values) # target_path can be a file or directory # @@ -63,33 +62,6 @@ TARGET_PATH="" # Default exclusions: Always exclude the conversion scripts themselves EXCLUDE_DIRS=("dev/ush/convert_from_net_to_global.sh" "dev/ush/convert_from_global_to_net.sh") -# List of directories and files to exclude from processing -exclude_items=( - "dev/ush/convert_from_net_to_global.sh" - "dev/ush/convert_from_global_to_net.sh" -) - -# Build grep exclusion pattern (includes all items) -exclude_pattern="" -for item in "${exclude_items[@]}"; do - if [[ -n "${exclude_pattern}" ]]; then - exclude_pattern="${exclude_pattern}|" - fi - exclude_pattern="${exclude_pattern}${item}" -done - -# Display what we're excluding (filter out conversion scripts from display) -display_exclude=() -for item in "${exclude_items[@]}"; do - if [[ "${item}" != "dev/ush/convert_from_net_to_global.sh" && "${item}" != "dev/ush/convert_from_global_to_net.sh" ]]; then - display_exclude+=("${item}") - fi -done - -if [[ ${#display_exclude[@]} -gt 0 ]]; then - echo "Excluding directories: ${display_exclude[*]}" -fi - # Parse remaining arguments while [[ $# -gt 0 ]]; do case "$1" in @@ -119,6 +91,14 @@ if [[ ! -e "${TARGET_PATH}" ]]; then exit 1 fi +# Build display exclude list (filter out conversion scripts) +display_exclude=() +for item in "${EXCLUDE_DIRS[@]}"; do + if [[ "${item}" != "dev/ush/convert_from_net_to_global.sh" && "${item}" != "dev/ush/convert_from_global_to_net.sh" ]]; then + display_exclude+=("${item}") + fi +done + # Display processing header echo -e "${CYAN}=========================================${NC}" if [[ "${NET}" == "all" ]]; then @@ -210,52 +190,28 @@ for current_net in "${NET_LIST[@]}"; do # Perform the replacements failed_files=0 - skipped_files=0 + converted_files=0 while IFS= read -r file; do if [[ -f "${file}" ]]; then - # Pre-check: Skip file if ANY global variable already exists - should_skip=false - - # Build list of global patterns to check for - declare -a global_patterns=( - "HOMEglobal" - "PARMglobal" - "USHglobal" - "SCRglobal" - "EXECglobal" - "FIXglobal" - ) - - # Check if any global pattern already exists in file - for global_pattern in "${global_patterns[@]}"; do - if grep -q "\\b${global_pattern}\\b" "${file}" 2> /dev/null; then - should_skip=true - break + # Process file regardless of whether it has global vars + # (this allows converting files with mixed NET/global vars) + file_modified=false + file_failed=false + for pattern in "${!patterns[@]}"; do + replacement="${patterns[${pattern}]}" + if grep -q "\\b${pattern}\\b" "${file}" 2> /dev/null; then + if ! sed -i "s/\\b${pattern}\\b/${replacement}/g" "${file}"; then + echo -e "${RED}ERROR: sed failed on ${file}${NC}" >&2 + failed_files=$((failed_files + 1)) + file_failed=true + break + fi + file_modified=true fi done - if ${should_skip}; then - skipped_files=$((skipped_files + 1)) - else - # Proceed with conversion only if no global vars found - file_modified=false - file_failed=false - for pattern in "${!patterns[@]}"; do - replacement="${patterns[${pattern}]}" - if grep -q "\\b${pattern}\\b" "${file}" 2> /dev/null; then - if ! sed -i "s/\\b${pattern}\\b/${replacement}/g" "${file}"; then - echo -e "${RED}ERROR: sed failed on ${file}${NC}" >&2 - failed_files=$((failed_files + 1)) - file_failed=true - break - fi - file_modified=true - fi - done - - if ! ${file_modified} && ! ${file_failed}; then - skipped_files=$((skipped_files + 1)) - fi + if ${file_modified} && ! ${file_failed}; then + converted_files=$((converted_files + 1)) fi fi done < /tmp/convert_files_$$.txt @@ -263,20 +219,18 @@ for current_net in "${NET_LIST[@]}"; do # Clean up rm -f /tmp/convert_files_$$.txt - files_converted=$((file_count - failed_files - skipped_files)) - if [[ ${files_converted} -eq 0 ]]; then + if [[ ${converted_files} -eq 0 ]]; then echo -e "${YELLOW}No files to convert for NET=${current_net}${NC}" elif [[ ${failed_files} -gt 0 ]]; then - echo -e "${YELLOW}⚠ Converted ${files_converted}/${file_count} files (${failed_files} failed) for NET=${current_net}${NC}" + echo -e "${YELLOW}⚠ Converted ${converted_files} files (${failed_files} failed) for NET=${current_net}${NC}" else - echo -e "${GREEN}✓ Converted ${files_converted}/${file_count} files for NET=${current_net}${NC}" + echo -e "${GREEN}✓ Converted ${converted_files} files for NET=${current_net}${NC}" fi fi fi - - echo -e "${GREEN}Completed!${NC}" done echo "" echo -e "${CYAN}=========================================${NC}" echo -e "${GREEN}All conversions completed successfully!${NC}" +echo -e "${CYAN}=========================================${NC}" From 4c194f3c45650e0c3e148f8b95701d28739cabc7 Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Thu, 19 Feb 2026 20:52:05 +0000 Subject: [PATCH 61/71] Fix shellcheck formatting in convert_from_global_to_net.sh --- dev/ush/convert_from_global_to_net.sh | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/dev/ush/convert_from_global_to_net.sh b/dev/ush/convert_from_global_to_net.sh index 3e51760b50c..8dcfe47775f 100755 --- a/dev/ush/convert_from_global_to_net.sh +++ b/dev/ush/convert_from_global_to_net.sh @@ -48,8 +48,7 @@ shift # Validate NET value case "${NET}" in - gfs|gefs|sfs|gcafs) - ;; + gfs | gefs | sfs | gcafs) ;; *) echo "ERROR: Invalid NET value '${NET}'. Must be one of: gfs, gefs, sfs, gcafs" exit 1 From cdc06ed2929e4703dc2e8cd31304795f34e6f9d1 Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Thu, 19 Feb 2026 22:47:07 +0000 Subject: [PATCH 62/71] update gdas to current hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index ce78b570c3f..13888d8dc5c 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit ce78b570c3f104d5ad6f2c2bdc10d0558cfbea0c +Subproject commit 13888d8dc5c32f71d6cc46ad8dde964d5bde586e From 2fabbf989c39728efe0cefc04de9da0dfdc0d342 Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Fri, 20 Feb 2026 15:50:52 +0000 Subject: [PATCH 63/71] update HOMEgfs->HOMEglobal --- dev/ci/cases/pr/C48_gsienkf_atmDA.yaml | 2 +- dev/ci/cases/pr/C48_ufsenkf_atmDA.yaml | 2 +- dev/ci/cases/yamls/gsienkf_atmDA_defaults.ci.yaml | 2 +- dev/ci/cases/yamls/ufsenkf_atmDA_defaults.ci.yaml | 2 +- dev/job_cards/rocoto/prepatmanlbias.sh | 4 ++-- dev/jobs/JGLOBAL_ATM_PREP_ANL_BIAS | 4 ++-- dev/workflow/rocoto/gfs_tasks.py | 4 ++-- 7 files changed, 10 insertions(+), 10 deletions(-) diff --git a/dev/ci/cases/pr/C48_gsienkf_atmDA.yaml b/dev/ci/cases/pr/C48_gsienkf_atmDA.yaml index 3e173cee86c..fd1f2d12ac2 100644 --- a/dev/ci/cases/pr/C48_gsienkf_atmDA.yaml +++ b/dev/ci/cases/pr/C48_gsienkf_atmDA.yaml @@ -13,7 +13,7 @@ experiment: nens: 2 interval: 0 start: warm - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/gsienkf_atmDA_defaults.ci.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/gsienkf_atmDA_defaults.ci.yaml skip_ci_on_hosts: - gaeac5 diff --git a/dev/ci/cases/pr/C48_ufsenkf_atmDA.yaml b/dev/ci/cases/pr/C48_ufsenkf_atmDA.yaml index b55f1de57b2..2067ccbb956 100644 --- a/dev/ci/cases/pr/C48_ufsenkf_atmDA.yaml +++ b/dev/ci/cases/pr/C48_ufsenkf_atmDA.yaml @@ -13,7 +13,7 @@ experiment: nens: 2 interval: 0 start: warm - yaml: {{ HOMEgfs }}/dev/ci/cases/yamls/ufsenkf_atmDA_defaults.ci.yaml + yaml: {{ HOMEglobal }}/dev/ci/cases/yamls/ufsenkf_atmDA_defaults.ci.yaml skip_ci_on_hosts: - gaeac5 diff --git a/dev/ci/cases/yamls/gsienkf_atmDA_defaults.ci.yaml b/dev/ci/cases/yamls/gsienkf_atmDA_defaults.ci.yaml index 6f27588162b..97da14a8608 100644 --- a/dev/ci/cases/yamls/gsienkf_atmDA_defaults.ci.yaml +++ b/dev/ci/cases/yamls/gsienkf_atmDA_defaults.ci.yaml @@ -1,5 +1,5 @@ defaults: - !INC {{ HOMEgfs }}/dev/parm/config/gfs/yaml/defaults.yaml + !INC {{ HOMEglobal }}/dev/parm/config/gfs/yaml/defaults.yaml base: DOIAU: "NO" DO_JEDIATMVAR: "NO" diff --git a/dev/ci/cases/yamls/ufsenkf_atmDA_defaults.ci.yaml b/dev/ci/cases/yamls/ufsenkf_atmDA_defaults.ci.yaml index 0a76c23e6ec..ab55a4c1f01 100644 --- a/dev/ci/cases/yamls/ufsenkf_atmDA_defaults.ci.yaml +++ b/dev/ci/cases/yamls/ufsenkf_atmDA_defaults.ci.yaml @@ -1,5 +1,5 @@ defaults: - !INC {{ HOMEgfs }}/dev/parm/config/gfs/yaml/defaults.yaml + !INC {{ HOMEglobal }}/dev/parm/config/gfs/yaml/defaults.yaml base: DOIAU: "NO" DO_JEDIATMVAR: "NO" diff --git a/dev/job_cards/rocoto/prepatmanlbias.sh b/dev/job_cards/rocoto/prepatmanlbias.sh index 2d576aa68ba..7376fca14d6 100755 --- a/dev/job_cards/rocoto/prepatmanlbias.sh +++ b/dev/job_cards/rocoto/prepatmanlbias.sh @@ -2,7 +2,7 @@ ############################################################### # Source UFSDA workflow modules -source "${HOMEgfs}/dev/ush/load_modules.sh" ufsda +source "${HOMEglobal}/dev/ush/load_modules.sh" ufsda status=$? if [[ ${status} -ne 0 ]]; then exit "${status}" @@ -13,6 +13,6 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/dev/jobs/JGLOBAL_ATM_PREP_ANL_BIAS" +"${HOMEglobal}/dev/jobs/JGLOBAL_ATM_PREP_ANL_BIAS" status=$? exit "${status}" diff --git a/dev/jobs/JGLOBAL_ATM_PREP_ANL_BIAS b/dev/jobs/JGLOBAL_ATM_PREP_ANL_BIAS index 386ff87421f..d9c9c553d1f 100755 --- a/dev/jobs/JGLOBAL_ATM_PREP_ANL_BIAS +++ b/dev/jobs/JGLOBAL_ATM_PREP_ANL_BIAS @@ -1,7 +1,7 @@ #! /usr/bin/env bash export WIPE_DATA="NO" export DATA=${DATA:-${DATAROOT}/${RUN}fetch_${cyc}} -source "${HOMEgfs}/ush/jjob_header.sh" -e "prepatmanlbias" -c "base prepatmanlbias" +source "${HOMEglobal}/ush/jjob_header.sh" -e "prepatmanlbias" -c "base prepatmanlbias" ############################################## # Set variables used in the script @@ -34,7 +34,7 @@ ABIAS_SAT_JEDI_TAR=${COMOUT_ATMOS_ANALYSIS_PREV}/${GDUMP}.t${gcyc}z.rad_varbc_pa ABIAS_AIR_JEDI_TAR=${COMOUT_ATMOS_ANALYSIS_PREV}/${GDUMP}.t${gcyc}z.air_varbc_params.tar SATBIAS2IODAX=${SATBIAS2IODAX:-${EXECgfs}/satbias2ioda.x} -SATBIAS2IODAY=${HOMEgfs}/sorc/gdas.cd/ush/satbias_converter.yaml.tmpl +SATBIAS2IODAY=${HOMEglobal}/sorc/gdas.cd/ush/satbias_converter.yaml.tmpl OUTPUT=${DATA}/output mkdir -p "${COMOUT_ATMOS_ANALYSIS_PREV}" diff --git a/dev/workflow/rocoto/gfs_tasks.py b/dev/workflow/rocoto/gfs_tasks.py index 8ca781d03ce..522991cedf2 100644 --- a/dev/workflow/rocoto/gfs_tasks.py +++ b/dev/workflow/rocoto/gfs_tasks.py @@ -51,7 +51,7 @@ def fetchatmanlbias(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': cycledef, - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/fetch.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/fetch.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -199,7 +199,7 @@ def prepatmanlbias(self): 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/dev/job_cards/rocoto/prepatmanlbias.sh', + 'command': f'{self.HOMEglobal}/dev/job_cards/rocoto/prepatmanlbias.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' From 1fad4ae312c860173599e83186c93a931bd9f1b8 Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Fri, 20 Feb 2026 16:56:57 +0000 Subject: [PATCH 64/71] update gdas app --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 13888d8dc5c..5ea6e5749f3 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 13888d8dc5c32f71d6cc46ad8dde964d5bde586e +Subproject commit 5ea6e5749f3555656580ab9332f8f714cfaab603 From eba9292e60c0a29353a9f92137e62301e9a46cf2 Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Fri, 20 Feb 2026 21:52:06 +0000 Subject: [PATCH 65/71] update global_cycle scripts --- dev/scripts/exglobal_enkf_sfc.sh | 5 ++--- ush/global_cycle.sh | 3 +-- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/dev/scripts/exglobal_enkf_sfc.sh b/dev/scripts/exglobal_enkf_sfc.sh index f78e3beb9dd..8dad1c52bbf 100755 --- a/dev/scripts/exglobal_enkf_sfc.sh +++ b/dev/scripts/exglobal_enkf_sfc.sh @@ -235,7 +235,7 @@ if [[ "${DOIAU}" == "YES" ]]; then mkdir -p "${COMOUT_ATMOS_RESTART_MEM}" fi - cpfs "${DATA}/fnbgso.${cmem}" "${COMOUT_ATMOS_RESTART_MEM}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" + cpfs "${DATA}/sfc_data_cycle.${cmem}" "${COMOUT_ATMOS_RESTART_MEM}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" done # ensembles @@ -275,7 +275,6 @@ if [[ "${DOSFCANL_ENKF}" == "YES" ]]; then else sfcdata_dir="${COMIN_ATMOS_RESTART_MEM_PREV}" fi - cpreq "${sfcdata_dir}/${PDY}.${cyc}0000.${snow_prefix}sfc_data.tile${n}.nc" \ "${DATA}/fnbgsi.${cmem}" cpreq "${DATA}/fnbgsi.${cmem}" "${DATA}/fnbgso.${cmem}" @@ -313,7 +312,7 @@ if [[ "${DOSFCANL_ENKF}" == "YES" ]]; then mkdir -p "${COMOUT_ATMOS_RESTART_MEM}" fi - cpfs "${DATA}/fnbgso.${cmem}" "${COMOUT_ATMOS_RESTART_MEM}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" + cpfs "${DATA}/sfc_data_cycle.${cmem}" "${COMOUT_ATMOS_RESTART_MEM}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" done diff --git a/ush/global_cycle.sh b/ush/global_cycle.sh index 0a22902e4f2..cee44f23cec 100755 --- a/ush/global_cycle.sh +++ b/ush/global_cycle.sh @@ -127,7 +127,6 @@ # DO_SFCCYCLE Call sfcsub routine # GCYCLE_DO_SOILINCR Call routine to add soil increments # GCYCLE_DO_SNOWINCR Call routine to add snow inrcements -# GCYCLE_INTERP_LANDINCR Flag to regrid input land increment from Gaus to native model # grid inside gcycle # # zsea1/zsea2 When running with NST model, this is the lower/upper bound @@ -230,7 +229,7 @@ if [[ "${GCYCLE_DO_SOILINCR}" == ".true." ]] || [[ "${GCYCLE_DO_SNOWINCR}" == ". else DO_LANDINCR=".false." fi -GCYCLE_INTERP_LANDINCR=${GCYCLE_INTERP_LANDINCR:-.false.} + zsea1=${zsea1:-0} zsea2=${zsea2:-0} MAX_TASKS_CY=${MAX_TASKS_CY:-99999} From 7431e649f0c25ef6e5a2e895d71784c913b5d8eb Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Fri, 20 Feb 2026 22:02:05 +0000 Subject: [PATCH 66/71] update fnbgso --- dev/scripts/exglobal_enkf_sfc.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dev/scripts/exglobal_enkf_sfc.sh b/dev/scripts/exglobal_enkf_sfc.sh index 8dad1c52bbf..23e4f8d09b3 100755 --- a/dev/scripts/exglobal_enkf_sfc.sh +++ b/dev/scripts/exglobal_enkf_sfc.sh @@ -199,7 +199,7 @@ if [[ "${DOIAU}" == "YES" ]]; then fi cpreq "${sfcdata_dir}/${bPDY}.${bcyc}0000.${snow_prefix}sfc_data.tile${n}.nc" \ "${DATA}/fnbgsi.${cmem}" - cpreq "${DATA}/fnbgsi.${cmem}" "${DATA}/fnbgso.${cmem}" + cpreq "${DATA}/fnbgsi.${cmem}" "${DATA}/sfc_data_cycle.${cmem}" cpreq "${FIXglobal}/orog/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" cpreq "${FIXglobal}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" @@ -277,7 +277,7 @@ if [[ "${DOSFCANL_ENKF}" == "YES" ]]; then fi cpreq "${sfcdata_dir}/${PDY}.${cyc}0000.${snow_prefix}sfc_data.tile${n}.nc" \ "${DATA}/fnbgsi.${cmem}" - cpreq "${DATA}/fnbgsi.${cmem}" "${DATA}/fnbgso.${cmem}" + cpreq "${DATA}/fnbgsi.${cmem}" "${DATA}/sfc_data_cycle.${cmem}" cpreq "${FIXglobal}/orog/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" cpreq "${FIXglobal}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" From 3fd67f14afb629e8967d6b85cfc255683b2e8bfc Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Tue, 24 Feb 2026 14:47:43 +0000 Subject: [PATCH 67/71] update gdas app --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 5ea6e5749f3..b3fc0c79119 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 5ea6e5749f3555656580ab9332f8f714cfaab603 +Subproject commit b3fc0c791198370fa0b09c4cd9c19998c1bd91a2 From b585b2e5b7b824aa808bf24cae036653ba6b484e Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Thu, 26 Feb 2026 18:50:41 +0000 Subject: [PATCH 68/71] update gsi monitor --- sorc/gsi_monitor.fd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gsi_monitor.fd b/sorc/gsi_monitor.fd index fd742904f2c..59f45983606 160000 --- a/sorc/gsi_monitor.fd +++ b/sorc/gsi_monitor.fd @@ -1 +1 @@ -Subproject commit fd742904f2c66f504f8e28bf702fa93d8a398bbe +Subproject commit 59f45983606fd228a165806f78f832de033e7478 From 0ca4a1e2d11b670403e91f6b07e0ef0ec19dc8c7 Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Thu, 26 Feb 2026 20:40:28 +0000 Subject: [PATCH 69/71] update global-cycle scripts --- dev/scripts/exglobal_enkf_sfc.sh | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/dev/scripts/exglobal_enkf_sfc.sh b/dev/scripts/exglobal_enkf_sfc.sh index 23e4f8d09b3..3b7952ab57d 100755 --- a/dev/scripts/exglobal_enkf_sfc.sh +++ b/dev/scripts/exglobal_enkf_sfc.sh @@ -198,8 +198,7 @@ if [[ "${DOIAU}" == "YES" ]]; then mkdir -p "${COMOUT_ATMOS_RESTART_MEM}" fi cpreq "${sfcdata_dir}/${bPDY}.${bcyc}0000.${snow_prefix}sfc_data.tile${n}.nc" \ - "${DATA}/fnbgsi.${cmem}" - cpreq "${DATA}/fnbgsi.${cmem}" "${DATA}/sfc_data_cycle.${cmem}" + "${DATA}/sfc_data_cycle.${cmem}" cpreq "${FIXglobal}/orog/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" cpreq "${FIXglobal}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" @@ -322,7 +321,5 @@ fi ################################################################################ ################################################################################ -# Postprocessing -cd "${pwd}" || exit 1 exit "${err}" From 3fa3b974c870f6cf9fb18ee6e631bf5a8c0d27fe Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Thu, 26 Feb 2026 15:46:30 -0500 Subject: [PATCH 70/71] Remove pwd assignment from exglobal_enkf_sfc.sh Removed unnecessary assignment of the current working directory. --- dev/scripts/exglobal_enkf_sfc.sh | 3 --- 1 file changed, 3 deletions(-) diff --git a/dev/scripts/exglobal_enkf_sfc.sh b/dev/scripts/exglobal_enkf_sfc.sh index 3b7952ab57d..3e46e4e138c 100755 --- a/dev/scripts/exglobal_enkf_sfc.sh +++ b/dev/scripts/exglobal_enkf_sfc.sh @@ -17,9 +17,6 @@ # ################################################################################ -# Directories. -pwd=$(pwd) - # Base variables DONST=${DONST:-"NO"} DO_GSISOILDA=${DO_GSISOILDA:-"NO"} From bc5ccc38a6cf0b7ce8dc249c3a48a8e3be6d1fc3 Mon Sep 17 00:00:00 2001 From: Anton Fernando Date: Thu, 26 Feb 2026 21:01:20 +0000 Subject: [PATCH 71/71] update gsi_monitor --- sorc/gsi_monitor.fd | 2 +- sorc/verif-global.fd | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/sorc/gsi_monitor.fd b/sorc/gsi_monitor.fd index 59f45983606..004068715e2 160000 --- a/sorc/gsi_monitor.fd +++ b/sorc/gsi_monitor.fd @@ -1 +1 @@ -Subproject commit 59f45983606fd228a165806f78f832de033e7478 +Subproject commit 004068715e2fbffef745353416e20277c5214728 diff --git a/sorc/verif-global.fd b/sorc/verif-global.fd index a696b889235..d20c05d4098 160000 --- a/sorc/verif-global.fd +++ b/sorc/verif-global.fd @@ -1 +1 @@ -Subproject commit a696b8892359b1614a720742af46231e42f36e0b +Subproject commit d20c05d40988b63093d12e7f5f80fafef5f2fa44