diff --git a/Config/WCOSS/v3.1/template_forcing_engine_Hawaii_RRFS_Analysis.config b/Config/WCOSS/v3.1/template_forcing_engine_Hawaii_RRFS_Analysis.config new file mode 100755 index 0000000..1fd99b5 --- /dev/null +++ b/Config/WCOSS/v3.1/template_forcing_engine_Hawaii_RRFS_Analysis.config @@ -0,0 +1,350 @@ +#-------------------------------------------------------------------- +# WRF-Hydro Forcing Engine Configuration File +# +# Input options to the forcing engine include: +# 1.) Choices for input forcing files to use. +# 2.) Options for specifying date ranges and forecast intervals +# for input files. +# 3.) Choices for ESMF regridding techniques. +# 4.) Choices for optional downscaling techniques. +# 5.) Choices for optional bias correction techniques. +# 6.) Choices for optional supplemental precipitation products. +# 7.) Choices for optional ensemble member variations. +# 8.) Choices for output directories to place final output files. + +[Input] +# Choose a set of value(s) of forcing variables to be processed for +# WRF-Hydro. Please be advised that the order of which the values are +# chosen below are the order that the final products will be layered +# into the final LDASIN files. See documentation for additional +# information and examples. +# The following is a global set of key values to map forcing files +# to variables within LDASIN files for WRF-Hydro. The forcing engine +# will map files to external variable names internally. For custom +# external native forcing files (see documenation), the code will +# expect a set of named variables to process. The following is a +# mapping of numeric values to external input native forcing files: +# 1 - NLDAS GRIB retrospective files +# 2 - NARR GRIB retrospective files +# 3 - GFS GRIB2 Global production files on the full gaussian grid +# 4 - NAM Nest GRIB2 Conus production files +# 5 - HRRR GRIB2 Conus production files +# 6 - RAP GRIB2 Conus 13km production files +# 7 - CFSv2 6-hourly GRIB2 Global production files +# 8 - WRF-ARW - GRIB2 Hawaii nest files +# 9 - GFS GRIB2 Global production files on 0.25 degree lat/lon grids. +# 10 - Custom NetCDF hourly forcing files +# 11 - Custom NetCDF hourly forcing files +# 12 - Custom NetCDF hourly forcing files +# 13 - Hawaii 3-km NAM Nest. +# 14 - Puerto Rico 3-km NAM Nest. +# 15 - Alaska 3-km Alaska Nest +# 16 - NAM_Nest_3km_Hawaii_Radiation-Only +# 17 - NAM_Nest_3km_PuertoRico_Radiation-Only +# 18 - WRF-ARW GRIB2 PuertoRico +# 19 - HRRR GRIB2 Alaska production files +# 20 - ExtAna HRRR AK FE output +# 22 NDFD +# 23 Regrid_Conus_HRRR +# 24 RRFS NA +# 25 RRFS HI +# 26 RRFS PR +InputForcings = [25] + +# Specify the file type for each forcing (comma separated) +# Valid types are GRIB1, GRIB2, and NETCDF +# (GRIB files will be converted internally with WGRIB[2]) +InputForcingTypes = GRIB2 + +# Specify the input directories for each forcing product. +InputForcingDirectories = /lfs/h1/ops/prod/com/rrfs/v1.0 + +# Specify whether the input forcings listed above are mandatory, or optional. +# This is important for layering contingencies if a product is missing, +# but forcing files are still desired. +# 0 - Not mandatory +# 1 - Mandatory +# NOTE!!! If not files are found for any products, code will error out indicating +# the final field is all missing values. +InputMandatory = [1] + +[Output] +# Specify the output frequency in minutes. +# Note that any frequencies at higher intervals +# than what is provided as input will entail input +# forcing data being temporally interpolated. If no SubOutputHour/SubOutFreq, set it to 0 +OutputFrequency = 60 +SubOutputHour = 0 +SubOutFreq = 0 + +# Specify a top level output directory. For re-forecasts +# and forecasts, sub-directories for each forecast cycle +# will be generated. For retrospective processing, final +# output files will be placed in this directory. +OutDir = /lfs/h1/owp/ptmp/donald.johnson/test/tmp/nwm_forcing_hawaii_short_rrfs_test/HIANA + +# Specify a scratch directory that will be used +# for storage of temporary files. These files +# will be removed automatically by the program. +ScratchDir = /lfs/h1/owp/ptmp/donald.johnson/test/tmp/nwm_forcing_hawaii_ana_rrfs_test + +# Flag to activate scale_factor / add_offset byte packing in +# the output files. +# 0 - Deactivate compression +# 1 - Activate compression +compressOutput = 0 + +# Flag to use floating point output vs scale_factor / add_offset byte packing in +# the output files (the default) +# 0 - Use scale/offset encoding +# 1 - Use floating-point encoding +floatOutput = 0 + +[Retrospective] +# Specify to process forcings in retrosective mode +# 0 - No +# 1 - Yes +RetroFlag = 0 + +# Choose the beginning date of processing forcing files. +# NOTE - Dates are given in YYYYMMDDHHMM format +# If in real-time forecasting mode, leave as -9999. +# These dates get over-ridden in lookBackHours. +BDateProc = 202004152300 +EDateProc = 202004200000 + +[Forecast] +# ONLY for realtime forecasting. +# - Specify a lookback period in minutes to process data. +# This overrides any BDateProc/EDateProc options passed above. +# If no LookBack specified, please specify -9999. +LookBack = 180 + +# If running reforecasts, specify a window below. This will override +# using the LookBack value to calculate a processing window. +RefcstBDateProc = 202412230500 +RefcstEDateProc = 202412230600 + +# Specify a forecast frequency in minutes. This value specifies how often +# to generate a set of forecast forcings. If generating hourly retrospective +# forcings, specify this value to be 60. +ForecastFrequency = 60 + +# Forecast cycles are determined by splitting up a day by equal +# ForecastFrequency interval. If there is a desire to shift the +# cycles to a different time step, ForecastShift will shift forecast +# cycles ahead by a determined set of minutes. For example, ForecastFrequency +# of 6 hours will produce forecasts cycles at 00, 06, 12, and 18 UTC. However, +# a ForecastShift of 1 hour will produce forecast cycles at 01, 07, +# 13, and 18 UTC. NOTE - This is only used by the realtime instance +# to calculate forecast cycles accordingly. Re-forecasts will use the beginning +# and ending dates specified in conjunction with the forecast frequency +# to determine forecast cycle dates. +ForecastShift = 0 + +# Specify how much (in minutes) of each input forcing is desires for each +# forecast cycle. See documentation for examples. The length of +# this array must match the input forcing choices. +ForecastInputHorizons = [60] + +# This option is for applying an offset to input forcings to use a different +# forecasted interval. For example, a user may wish to use 4-5 hour forecasted +# fields from an NWP grid from one of their input forcings. In that instance +# the offset would be 4 hours, but 0 for other remaining forcings. +# +# In AnA runs, this value is the offset from the available forecast and 00z +# For example, if forecast are available at 06z and 18z, set this value to 6 +ForecastInputOffsets = [0] + +[Geospatial] +# Specify a geogrid file that defines the WRF-Hydro (or NWM) domain to which +# the forcings are being processed to. +GeogridIn = /lfs/h1/owp/nwm/noscrub/cham.pham/test1/packages/nwm.v3.1.0/parm/domain_hawaii/geo_em_HI.nc + +# Specify the optional land spatial metadata file. If found, coordinate projection information +# and coordinate will be translated from to the final output file. +SpatialMetaIn = /lfs/h1/owp/nwm/noscrub/cham.pham/test1/packages/nwm.v3.1.0/parm/domain_hawaii/GEOGRID_LDASOUT_Spatial_Metadata_HI.nc + +[Regridding] +# Choose regridding options for each input forcing files being used. Options available are: +# 1 - ESMF Bilinear +# 2 - ESMF Nearest Neighbor +# 3 - ESMF Conservative Bilinear +RegridOpt = [1] + +[Interpolation] +# Specify an temporal interpolation for the forcing variables. +# Interpolation will be done between the two neighboring +# input forcing states that exist. If only one nearest +# state exist (I.E. only a state forward in time, or behind), +# then that state will be used as a "nearest neighbor". +# NOTE - All input options here must be of the same length +# of the input forcing number. Also note all temporal interpolation +# occurs BEFORE downscaling and bias correction. +# 0 - No temporal interpolation. +# 1 - Nearest Neighbor +# 2 - Linear weighted average +ForcingTemporalInterpolation = [0] + +[BiasCorrection] +# Choose bias correction options for each of the input forcing files. Length of each option +# must match the length of input forcings. + +AnAFlag = 1 + +# Specify a temperature bias correction method. +# 0 - No bias correction +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +# 2 - Custom NCAR bias-correction based on HRRRv3 analysis - based on hour of day (USE WITH CAUTION). +# 3 - NCAR parametric GFS bias correction +# 4 - NCAR parametric HRRR bias correction +TemperatureBiasCorrection = [0] + +# Specify a surface pressure bias correction method. +# 0 - No bias correction. +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +PressureBiasCorrection = [0] + +# Specify a specific humidity bias correction method. +# 0 - No bias correction. +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +# 2 - Custom NCAR bias-correction based on HRRRv3 analysis - based on hour of day (USE WITH CAUTION). +HumidityBiasCorrection = [0] + +# Specify a wind bias correction. +# 0 - No bias correction. +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +# 2 - Custom NCAR bias-correction based on HRRRv3 analysis - based on hour of day (USE WITH CAUTION). +# 3 - NCAR parametric GFS bias correction +# 4 - NCAR parametric HRRR bias correction +WindBiasCorrection = [0] + +# Specify a bias correction for incoming short wave radiation flux. +# 0 - No bias correction. +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +# 2 - Custom NCAR bias-correction based on HRRRv3 analysis (USE WITH CAUTION). +SwBiasCorrection = [0] + +# Specify a bias correction for incoming long wave radiation flux. +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +# 2 - Custom NCAR bias-correction based on HRRRv3 analysis, blanket adjustment (USE WITH CAUTION). +# 3 - NCAR parametric GFS bias correction +LwBiasCorrection = [0] + +# Specify a bias correction for precipitation. +# 0 - No bias correction. +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +PrecipBiasCorrection = [0] + +[Downscaling] +# Choose downscaling options for each of the input forcing files. Length of each option +# must match the length of input forcings. + +# Specify a temperature downscaling method: +# 0 - No downscaling. +# 1 - Use a simple lapse rate of 6.75 degrees Celsius to get from the model elevation +# to the WRF-Hydro elevation. +# 2 - Use a pre-calculated lapse rate regridded to the WRF-Hydro domain. +TemperatureDownscaling = [1] + +# Specify a surface pressure downscaling method: +# 0 - No downscaling. +# 1 - Use input elevation and WRF-Hydro elevation to downscale +# surface pressure. +PressureDownscaling = [1] + +# Specify a shortwave radiation downscaling routine. +# 0 - No downscaling +# 1 - Run a topographic adjustment using the WRF-Hydro elevation +ShortwaveDownscaling = [1] + +# Specify a precipitation downscaling routine. +# 0 - No downscaling +# 1 - Use monthly PRISM climatology regridded to the WRF-Hydro domain to +# downscale precipitation via mountain mapper. +PrecipDownscaling = [0] + +# Specify a specific humidity downscaling routine. +# 0 - No downscaling +# 1 - Use regridded humidity, along with downscaled temperature/pressure +# to extrapolate a downscaled surface specific humidty. +HumidityDownscaling = [1] + +# Specify the input parameter directory containing necessary downscaling grids. +DownscalingParamDirs = /lfs/h1/owp/nwm/noscrub/cham.pham/test1/packages/nwm.v3.1.0/parm/forcingParam/Hawaii + +[SuppForcing] +# Choose a set of supplemental precipitation file(s) to layer +# into the final LDASIN forcing files processed from +# the options above. The following is a mapping of +# numeric values to external input native forcing files: +# 1 - CONUS MRMS GRIB2 hourly radar-only QPE +# 2 - CONUS MRMS GRIB2 hourly MultiSensor QPE +# 3 - WRF-ARW 2.5 km 48-hr Hawaii nest precipitation. +# 4 - WRF-ARW 2.5 km 48-hr Puerto Rico nest precipitation. +# 5 - CONUS MRMS GRIB2 hourly MultiSensor QPE (Pass 2 or Pass 1) +# 6 - Hawaii MRMS GRIB2 hourly MultiSensor QPE (Pass 2 or Pass 1) +# 7 - MRMS SBCv2 Liquid Water Fraction (netCDF only) +# 8 - NBM Conus MR +# 9 - NBM Alaska MR +# 10 - Alaska MRMS (no liquid water fraction) +# 11 - Alaska Stage IV NWS Precip +# 12 - MRMS Precip Flag +# +SuppPcp = [6] + +# Specify the file type for each supplemental precipitation file (comma separated) +# Valid types are GRIB1, GRIB2, and NETCDF +# (GRIB files will be converted internally with WGRIB[2]) +SuppPcpForcingTypes = GRIB2 + +# Specify the correponding supplemental precipitation directories +# that will be searched for input files. +SuppPcpDirectories = /lfs/h1/owp/ptmp/cham.pham/test1/tmp/nwm_forcing_analysis_assim_hawaii_05_1504.275359 + +# Specify whether the Supplemental Precips listed above are mandatory, or optional. +# This is important for layering contingencies if a product is missing, +# but forcing files are still desired. +# 0 - Not mandatory +# 1 - Mandatory +SuppPcpMandatory = [0] + +# Specify regridding options for the supplemental precipitation products. +RegridOptSuppPcp = [1] + +# Specify the time interpretation methods for the supplemental precipitation +# products. +SuppPcpTemporalInterpolation = [0] + +# In AnA runs, this value is the offset from the available forecast and 00z +# For example, if forecast are available at 06z and 18z, set this value to 6 +SuppPcpInputOffsets = [0] + +# Optional RQI method for radar-based data. +# 0 - Do not use any RQI filtering. Use all radar-based estimates. +# 1 - Use hourly MRMS Radar Quality Index grids. +# 2 - Use NWM monthly climatology grids (NWM only!!!!) +RqiMethod = 0 + +# Optional RQI threshold to be used to mask out. Currently used for MRMS products. +# Please choose a value from 0.0-1.0. Associated radar quality index files will be expected +# from MRMS data. +RqiThreshold = 0.0 + +# Specify an optional directory that contains supplemental precipitation parameter fields, +# I.E monthly RQI climatology +SuppPcpParamDir = /lfs/h1/owp/nwm/noscrub/cham.pham/test1/packages/nwm.v3.1.0/parm/forcingParam/Hawaii + +[Ensembles] +# Choose ensemble options for each input forcing file being used. Ensemble options include: +# FILL IN ENSEMBLE OPTIONS HERE..... +# Choose the CFS ensemble member number to process +cfsEnsNumber = 1 + +[Custom] +# These are options for specifying custom input NetCDF forcing files (in minutes). +# Choose the input frequency of files that are being processed. I.E., are the +# input files every 15 minutes, 60 minutes, 3-hours, etc. Please specify the +# length of custom input frequencies to match the number of custom NetCDF inputs +# selected above in the Logistics section. +custom_input_fcst_freq = [] diff --git a/Config/WCOSS/v3.1/template_forcing_engine_Hawaii_RRFS_Short.config b/Config/WCOSS/v3.1/template_forcing_engine_Hawaii_RRFS_Short.config new file mode 100755 index 0000000..81caf84 --- /dev/null +++ b/Config/WCOSS/v3.1/template_forcing_engine_Hawaii_RRFS_Short.config @@ -0,0 +1,349 @@ +#-------------------------------------------------------------------- +# WRF-Hydro Forcing Engine Configuration File +# +# Input options to the forcing engine include: +# 1.) Choices for input forcing files to use. +# 2.) Options for specifying date ranges and forecast intervals +# for input files. +# 3.) Choices for ESMF regridding techniques. +# 4.) Choices for optional downscaling techniques. +# 5.) Choices for optional bias correction techniques. +# 6.) Choices for optional supplemental precipitation products. +# 7.) Choices for optional ensemble member variations. +# 8.) Choices for output directories to place final output files. + +[Input] +# Choose a set of value(s) of forcing variables to be processed for +# WRF-Hydro. Please be advised that the order of which the values are +# chosen below are the order that the final products will be layered +# into the final LDASIN files. See documentation for additional +# information and examples. +# The following is a global set of key values to map forcing files +# to variables within LDASIN files for WRF-Hydro. The forcing engine +# will map files to external variable names internally. For custom +# external native forcing files (see documenation), the code will +# expect a set of named variables to process. The following is a +# mapping of numeric values to external input native forcing files: +# 1 - NLDAS GRIB retrospective files +# 2 - NARR GRIB retrospective files +# 3 - GFS GRIB2 Global production files on the full gaussian grid +# 4 - NAM Nest GRIB2 Conus production files +# 5 - HRRR GRIB2 Conus production files +# 6 - RAP GRIB2 Conus 13km production files +# 7 - CFSv2 6-hourly GRIB2 Global production files +# 8 - WRF-ARW - GRIB2 Hawaii nest files +# 9 - GFS GRIB2 Global production files on 0.25 degree lat/lon grids. +# 10 - Custom NetCDF hourly forcing files +# 11 - Custom NetCDF hourly forcing files +# 12 - Custom NetCDF hourly forcing files +# 13 - Hawaii 3-km NAM Nest. +# 14 - Puerto Rico 3-km NAM Nest. +# 15 - Alaska 3-km Alaska Nest +# 16 - NAM_Nest_3km_Hawaii_Radiation-Only +# 17 - NAM_Nest_3km_PuertoRico_Radiation-Only +# 18 - WRF-ARW GRIB2 PuertoRico +# 19 - HRRR GRIB2 Alaska production files +# 20 - ExtAna HRRR AK FE output +# 22 - NDFD +# 23 - Regrid_Conus_HRRR +# 24 - RRFS NA +# 25 - RRFS Hi +# 26 - RRFS PR +# +InputForcings = [25] + +# Specify the file type for each forcing (comma separated) +# Valid types are GRIB1, GRIB2, and NETCDF +# (GRIB files will be converted internally with WGRIB[2]) +InputForcingTypes = GRIB2 + +# Specify the input directories for each forcing product. +InputForcingDirectories = + +# Specify whether the input forcings listed above are mandatory, or optional. +# This is important for layering contingencies if a product is missing, +# but forcing files are still desired. +# 0 - Not mandatory +# 1 - Mandatory +# NOTE!!! If not files are found for any products, code will error out indicating +# the final field is all missing values. +InputMandatory = [1] + +[Output] +# Specify the output frequency in minutes. +# Note that any frequencies at higher intervals +# than what is provided as input will entail input +# forcing data being temporally interpolated. If no SubOutputHour/SubOutFreq, set it to 0 +OutputFrequency = 60 +SubOutputHour = 0 +SubOutFreq = 0 + +# Specify a top level output directory. For re-forecasts +# and forecasts, sub-directories for each forecast cycle +# will be generated. For retrospective processing, final +# output files will be placed in this directory. +OutDir = /lfs/h1/owp/ptmp/donald.johnson/test/tmp/nwm_forcing_hawaii_short_rrfs_test/hawaii + +# Specify a scratch directory that will be used +# for storage of temporary files. These files +# will be removed automatically by the program. +ScratchDir = /lfs/h1/owp/ptmp/donald.johnson/test/tmp/nwm_forcing_hawaii_short_rrfs_test + +# Flag to activate scale_factor / add_offset byte packing in +# the output files. +# 0 - Deactivate compression +# 1 - Activate compression +compressOutput = 0 + +# Flag to use floating point output vs scale_factor / add_offset byte packing in +# the output files (the default) +# 0 - Use scale/offset encoding +# 1 - Use floating-point encoding +floatOutput = 0 + +[Retrospective] +# Specify to process forcings in retrosective mode +# 0 - No +# 1 - Yes +RetroFlag = 0 + +# Choose the beginning date of processing forcing files. +# NOTE - Dates are given in YYYYMMDDHHMM format +# If in real-time forecasting mode, leave as -9999. +# These dates get over-ridden in lookBackHours. +BDateProc = syyyymmdy0000 +EDateProc = eyyyymmdy0000 + +[Forecast] +# ONLY for realtime forecasting. +# - Specify a lookback period in minutes to process data. +# This overrides any BDateProc/EDateProc options passed above. +# If no LookBack specified, please specify -9999. +LookBack = -9999 + +# If running reforecasts, specify a window below. This will override +# using the LookBack value to calculate a processing window. +RefcstBDateProc = 202412230000 +RefcstEDateProc = 202412231200 + +# Specify a forecast frequency in minutes. This value specifies how often +# to generate a set of forecast forcings. If generating hourly retrospective +# forcings, specify this value to be 60. +ForecastFrequency = 720 + +# Forecast cycles are determined by splitting up a day by equal +# ForecastFrequency interval. If there is a desire to shift the +# cycles to a different time step, ForecastShift will shift forecast +# cycles ahead by a determined set of minutes. For example, ForecastFrequency +# of 6 hours will produce forecasts cycles at 00, 06, 12, and 18 UTC. However, +# a ForecastShift of 1 hour will produce forecast cycles at 01, 07, +# 13, and 18 UTC. NOTE - This is only used by the realtime instance +# to calculate forecast cycles accordingly. Re-forecasts will use the beginning +# and ending dates specified in conjunction with the forecast frequency +# to determine forecast cycle dates. +ForecastShift = 0 + +# Specify how much (in minutes) of each input forcing is desires for each +# forecast cycle. See documentation for examples. The length of +# this array must match the input forcing choices. +ForecastInputHorizons = [2880] + +# This option is for applying an offset to input forcings to use a different +# forecasted interval. For example, a user may wish to use 4-5 hour forecasted +# fields from an NWP grid from one of their input forcings. In that instance +# the offset would be 4 hours, but 0 for other remaining forcings. +# +# In AnA runs, this value is the offset from the available forecast and 00z +# For example, if forecast are available at 06z and 18z, set this value to 6 +ForecastInputOffsets = [0] + +[Geospatial] +# Specify a geogrid file that defines the WRF-Hydro (or NWM) domain to which +# the forcings are being processed to. +GeogridIn = /lfs/h1/owp/nwm/noscrub/cham.pham/test1/packages/nwm.v3.1.0/parm/domain_hawaii/geo_em_HI.nc + +# Specify the optional land spatial metadata file. If found, coordinate projection information +# and coordinate will be translated from to the final output file. +SpatialMetaIn = /lfs/h1/owp/nwm/noscrub/cham.pham/test1/packages/nwm.v3.1.0/parm/domain_hawaii/GEOGRID_LDASOUT_Spatial_Metadata_HI.nc + +[Regridding] +# Choose regridding options for each input forcing files being used. Options available are: +# 1 - ESMF Bilinear +# 2 - ESMF Nearest Neighbor +# 3 - ESMF Conservative Bilinear +RegridOpt = [1] + +[Interpolation] +# Specify an temporal interpolation for the forcing variables. +# Interpolation will be done between the two neighboring +# input forcing states that exist. If only one nearest +# state exist (I.E. only a state forward in time, or behind), +# then that state will be used as a "nearest neighbor". +# NOTE - All input options here must be of the same length +# of the input forcing number. Also note all temporal interpolation +# occurs BEFORE downscaling and bias correction. +# 0 - No temporal interpolation. +# 1 - Nearest Neighbor +# 2 - Linear weighted average +ForcingTemporalInterpolation = [0] + +[BiasCorrection] +# Choose bias correction options for each of the input forcing files. Length of each option +# must match the length of input forcings. + +AnAFlag = 0 + +# Specify a temperature bias correction method. +# 0 - No bias correction +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +# 2 - Custom NCAR bias-correction based on HRRRv3 analysis - based on hour of day (USE WITH CAUTION). +# 3 - NCAR parametric GFS bias correction +# 4 - NCAR parametric HRRR bias correction +TemperatureBiasCorrection = [0] + +# Specify a surface pressure bias correction method. +# 0 - No bias correction. +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +PressureBiasCorrection = [0] + +# Specify a specific humidity bias correction method. +# 0 - No bias correction. +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +# 2 - Custom NCAR bias-correction based on HRRRv3 analysis - based on hour of day (USE WITH CAUTION). +HumidityBiasCorrection = [0] + +# Specify a wind bias correction. +# 0 - No bias correction. +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +# 2 - Custom NCAR bias-correction based on HRRRv3 analysis - based on hour of day (USE WITH CAUTION). +# 3 - NCAR parametric GFS bias correction +# 4 - NCAR parametric HRRR bias correction +WindBiasCorrection = [0] + +# Specify a bias correction for incoming short wave radiation flux. +# 0 - No bias correction. +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +# 2 - Custom NCAR bias-correction based on HRRRv3 analysis (USE WITH CAUTION). +SwBiasCorrection = [0] + +# Specify a bias correction for incoming long wave radiation flux. +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +# 2 - Custom NCAR bias-correction based on HRRRv3 analysis, blanket adjustment (USE WITH CAUTION). +# 3 - NCAR parametric GFS bias correction +LwBiasCorrection = [0] + +# Specify a bias correction for precipitation. +# 0 - No bias correction. +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +PrecipBiasCorrection = [0] + +[Downscaling] +# Choose downscaling options for each of the input forcing files. Length of each option +# must match the length of input forcings. + +# Specify a temperature downscaling method: +# 0 - No downscaling. +# 1 - Use a simple lapse rate of 6.75 degrees Celsius to get from the model elevation +# to the WRF-Hydro elevation. +# 2 - Use a pre-calculated lapse rate regridded to the WRF-Hydro domain. +TemperatureDownscaling = [1] + +# Specify a surface pressure downscaling method: +# 0 - No downscaling. +# 1 - Use input elevation and WRF-Hydro elevation to downscale +# surface pressure. +PressureDownscaling = [1] + +# Specify a shortwave radiation downscaling routine. +# 0 - No downscaling +# 1 - Run a topographic adjustment using the WRF-Hydro elevation +ShortwaveDownscaling = [1] + +# Specify a precipitation downscaling routine. +# 0 - No downscaling +# 1 - Use monthly PRISM climatology regridded to the WRF-Hydro domain to +# downscale precipitation via mountain mapper. +PrecipDownscaling = [0] + +# Specify a specific humidity downscaling routine. +# 0 - No downscaling +# 1 - Use regridded humidity, along with downscaled temperature/pressure +# to extrapolate a downscaled surface specific humidty. +HumidityDownscaling = [1] + +# Specify the input parameter directory containing necessary downscaling grids. +DownscalingParamDirs = /lfs/h1/owp/nwm/noscrub/cham.pham/test1/packages/nwm.v3.1.0/parm/forcingParam/Hawaii, /lfs/h1/owp/nwm/noscrub/cham.pham/test1/packages/nwm.v3.1.0/parm/forcingParam/Hawaii + +[SuppForcing] +# Choose a set of supplemental precipitation file(s) to layer +# into the final LDASIN forcing files processed from +# the options above. The following is a mapping of +# numeric values to external input native forcing files: +# 1 - CONUS MRMS GRIB2 hourly radar-only QPE +# 2 - CONUS MRMS GRIB2 hourly MultiSensor QPE +# 3 - WRF-ARW 2.5 km 48-hr Hawaii nest precipitation. +# 4 - WRF-ARW 2.5 km 48-hr Puerto Rico nest precipitation. +# 5 - Hawaii MRMS GRIB2 hourly MultiSensor QPE +# 6 - Hawaii MRMS GRIB2 hourly MultiSensor QPE (Pass 2 or Pass 1) +# 7 - MRMS SBCv2 Liquid Water Fraction (netCDF only) +# 8 - NBM Conus MR +# 9 - NBM Alaska MR +# 10 - Alaska MRMS (no liquid water fraction) +# 11 - Alaska Stage IV NWS Precip +SuppPcp = [ ] + +# Specify the file type for each supplemental precipitation file (comma separated) +# Valid types are GRIB1, GRIB2, and NETCDF +# (GRIB files will be converted internally with WGRIB[2]) +SuppPcpForcingTypes = + +# Specify the correponding supplemental precipitation directories +# that will be searched for input files. +SuppPcpDirectories = /lfs/h1/ops/prod/com/hiresw/v8.1 + +# Specify whether the Supplemental Precips listed above are mandatory, or optional. +# This is important for layering contingencies if a product is missing, +# but forcing files are still desired. +# 0 - Not mandatory +# 1 - Mandatory +SuppPcpMandatory = [0] + +# Specify regridding options for the supplemental precipitation products. +RegridOptSuppPcp = [1] + +# Specify the time interpretation methods for the supplemental precipitation +# products. +SuppPcpTemporalInterpolation = [0] + +# In AnA runs, this value is the offset from the available forecast and 00z +# For example, if forecast are available at 06z and 18z, set this value to 6 +SuppPcpInputOffsets = [0] + +# Optional RQI method for radar-based data. +# 0 - Do not use any RQI filtering. Use all radar-based estimates. +# 1 - Use hourly MRMS Radar Quality Index grids. +# 2 - Use NWM monthly climatology grids (NWM only!!!!) +RqiMethod = 0 + +# Optional RQI threshold to be used to mask out. Currently used for MRMS products. +# Please choose a value from 0.0-1.0. Associated radar quality index files will be expected +# from MRMS data. +RqiThreshold = 0.9 + +# Specify an optional directory that contains supplemental precipitation parameter fields, +# I.E monthly RQI climatology +SuppPcpParamDir = /lfs/h1/owp/nwm/noscrub/cham.pham/test1/packages/nwm.v3.1.0/parm/forcingParam/Hawaii + +[Ensembles] +# Choose ensemble options for each input forcing file being used. Ensemble options include: +# FILL IN ENSEMBLE OPTIONS HERE..... +# Choose the CFS ensemble member number to process +cfsEnsNumber = 1 + +[Custom] +# These are options for specifying custom input NetCDF forcing files (in minutes). +# Choose the input frequency of files that are being processed. I.E., are the +# input files every 15 minutes, 60 minutes, 3-hours, etc. Please specify the +# length of custom input frequencies to match the number of custom NetCDF inputs +# selected above in the Logistics section. +custom_input_fcst_freq = [] diff --git a/Config/WCOSS/v3.1/template_forcing_engine_Medium_GFS-NDFD.config b/Config/WCOSS/v3.1/template_forcing_engine_Medium_GFS-NDFD.config index c0a755f..89e8501 100755 --- a/Config/WCOSS/v3.1/template_forcing_engine_Medium_GFS-NDFD.config +++ b/Config/WCOSS/v3.1/template_forcing_engine_Medium_GFS-NDFD.config @@ -175,6 +175,8 @@ IgnoredBorderWidths = [0, 0] # 3 - ESMF Conservative Bilinear RegridOpt = [1,2] +RegridWeightsDir = + [Interpolation] # Specify an temporal interpolation for the forcing variables. # Interpolation will be done between the two neighboring diff --git a/Config/WCOSS/v3.1/template_forcing_engine_PuertoRico_RRFS_Analysis.config b/Config/WCOSS/v3.1/template_forcing_engine_PuertoRico_RRFS_Analysis.config new file mode 100755 index 0000000..41ce2bc --- /dev/null +++ b/Config/WCOSS/v3.1/template_forcing_engine_PuertoRico_RRFS_Analysis.config @@ -0,0 +1,349 @@ +#-------------------------------------------------------------------- +# WRF-Hydro Forcing Engine Configuration File +# +# Input options to the forcing engine include: +# 1.) Choices for input forcing files to use. +# 2.) Options for specifying date ranges and forecast intervals +# for input files. +# 3.) Choices for ESMF regridding techniques. +# 4.) Choices for optional downscaling techniques. +# 5.) Choices for optional bias correction techniques. +# 6.) Choices for optional supplemental precipitation products. +# 7.) Choices for optional ensemble member variations. +# 8.) Choices for output directories to place final output files. + +[Input] +# Choose a set of value(s) of forcing variables to be processed for +# WRF-Hydro. Please be advised that the order of which the values are +# chosen below are the order that the final products will be layered +# into the final LDASIN files. See documentation for additional +# information and examples. +# The following is a global set of key values to map forcing files +# to variables within LDASIN files for WRF-Hydro. The forcing engine +# will map files to external variable names internally. For custom +# external native forcing files (see documenation), the code will +# expect a set of named variables to process. The following is a +# mapping of numeric values to external input native forcing files: +# 1 - NLDAS GRIB retrospective files +# 2 - NARR GRIB retrospective files +# 3 - GFS GRIB2 Global production files on the full gaussian grid +# 4 - NAM Nest GRIB2 Conus production files +# 5 - HRRR GRIB2 Conus production files +# 6 - RAP GRIB2 Conus 13km production files +# 7 - CFSv2 6-hourly GRIB2 Global production files +# 8 - WRF-ARW - GRIB2 Hawaii nest files +# 9 - GFS GRIB2 Global production files on 0.25 degree lat/lon grids. +# 10 - Custom NetCDF hourly forcing files +# 11 - Custom NetCDF hourly forcing files +# 12 - Custom NetCDF hourly forcing files +# 13 - Hawaii 3-km NAM Nest. +# 14 - Puerto Rico 3-km NAM Nest. +# 15 - Alaska 3-km Alaska Nest +# 16 - NAM_Nest_3km_Hawaii_Radiation-Only +# 17 - NAM_Nest_3km_PuertoRico_Radiation-Only +# 18 - WRF-ARW GRIB2 PuertoRico +# 19 - HRRR GRIB2 Alasaka production files +# 20 - ExtAna HRRR ak fe OUTPUT +# 24 - RRFS Na +# 25 - RRFS HI +# 26 - RRFS PR +InputForcings = [26] + +# Specify the file type for each forcing (comma separated) +# Valid types are GRIB1, GRIB2, and NETCDF +# (GRIB files will be converted internally with WGRIB[2]) +InputForcingTypes = GRIB2 + +# Specify the input directories for each forcing product. +InputForcingDirectories = /lfs/h1/ops/prod/com/rrfs/v1.0 + +# Specify whether the input forcings listed above are mandatory, or optional. +# This is important for layering contingencies if a product is missing, +# but forcing files are still desired. +# 0 - Not mandatory +# 1 - Mandatory +# NOTE!!! If not files are found for any products, code will error out indicating +# the final field is all missing values. +InputMandatory = [1] + +[Output] +# Specify the output frequency in minutes. +# Note that any frequencies at higher intervals +# than what is provided as input will entail input +# forcing data being temporally interpolated. If no SubOutputHour/SubOutFreq, set it to 0 +OutputFrequency = 60 +SubOutputHour = 0 +SubOutFreq = 0 + +# Specify a top level output directory. For re-forecasts +# and forecasts, sub-directories for each forecast cycle +# will be generated. For retrospective processing, final +# output files will be placed in this directory. +OutDir = /lfs/h1/owp/ptmp/cham.pham/test1/tmp/nwm_forcing_analysis_assim_puertorico_05_1504.275360/PRAnA + +# Specify a scratch directory that will be used +# for storage of temporary files. These files +# will be removed automatically by the program. +ScratchDir = /lfs/h1/owp/ptmp/donald.johnson/test/tmp/nwm_forcing_puerto_rico_ana_rrfs_test + +# Flag to activate scale_factor / add_offset byte packing in +# the output files. +# 0 - Deactivate compression +# 1 - Activate compression +compressOutput = 0 + +# Flag to use floating point output vs scale_factor / add_offset byte packing in +# the output files (the default) +# 0 - Use scale/offset encoding +# 1 - Use floating-point encoding +floatOutput = 0 + +[Retrospective] +# Specify to process forcings in retrosective mode +# 0 - No +# 1 - Yes +RetroFlag = 0 + +# Choose the beginning date of processing forcing files. +# NOTE - Dates are given in YYYYMMDDHHMM format +# If in real-time forecasting mode, leave as -9999. +# These dates get over-ridden in lookBackHours. +BDateProc = 202004152300 +EDateProc = 202004200000 + +[Forecast] +# ONLY for realtime forecasting. +# - Specify a lookback period in minutes to process data. +# This overrides any BDateProc/EDateProc options passed above. +# If no LookBack specified, please specify -9999. +LookBack = 180 + +# If running reforecasts, specify a window below. This will override +# using the LookBack value to calculate a processing window. +RefcstBDateProc = 202412230500 +RefcstEDateProc = 202412230600 + +# Specify a forecast frequency in minutes. This value specifies how often +# to generate a set of forecast forcings. If generating hourly retrospective +# forcings, specify this value to be 60. +ForecastFrequency = 60 + +# Forecast cycles are determined by splitting up a day by equal +# ForecastFrequency interval. If there is a desire to shift the +# cycles to a different time step, ForecastShift will shift forecast +# cycles ahead by a determined set of minutes. For example, ForecastFrequency +# of 6 hours will produce forecasts cycles at 00, 06, 12, and 18 UTC. However, +# a ForecastShift of 1 hour will produce forecast cycles at 01, 07, +# 13, and 18 UTC. NOTE - This is only used by the realtime instance +# to calculate forecast cycles accordingly. Re-forecasts will use the beginning +# and ending dates specified in conjunction with the forecast frequency +# to determine forecast cycle dates. +ForecastShift = 0 + +# Specify how much (in minutes) of each input forcing is desires for each +# forecast cycle. See documentation for examples. The length of +# this array must match the input forcing choices. +ForecastInputHorizons = [60] + +# This option is for applying an offset to input forcings to use a different +# forecasted interval. For example, a user may wish to use 4-5 hour forecasted +# fields from an NWP grid from one of their input forcings. In that instance +# the offset would be 4 hours, but 0 for other remaining forcings. +# +# In AnA runs, this value is the offset from the available forecast and 00z +# For example, if forecast are available at 06z and 18z, set this value to 6 +ForecastInputOffsets = [0] + +[Geospatial] +# Specify a geogrid file that defines the WRF-Hydro (or NWM) domain to which +# the forcings are being processed to. +GeogridIn = /lfs/h1/owp/nwm/noscrub/cham.pham/test1/packages/nwm.v3.1.0/parm/domain_puertorico/geo_em_PRVI.nc + +# Specify the optional land spatial metadata file. If found, coordinate projection information +# and coordinate will be translated from to the final output file. +SpatialMetaIn = /lfs/h1/owp/nwm/noscrub/cham.pham/test1/packages/nwm.v3.1.0/parm/domain_puertorico/GEOGRID_LDASOUT_Spatial_Metadata_PRVI.nc + +[Regridding] +# Choose regridding options for each input forcing files being used. Options available are: +# 1 - ESMF Bilinear +# 2 - ESMF Nearest Neighbor +# 3 - ESMF Conservative Bilinear +RegridOpt = [1] + +[Interpolation] +# Specify an temporal interpolation for the forcing variables. +# Interpolation will be done between the two neighboring +# input forcing states that exist. If only one nearest +# state exist (I.E. only a state forward in time, or behind), +# then that state will be used as a "nearest neighbor". +# NOTE - All input options here must be of the same length +# of the input forcing number. Also note all temporal interpolation +# occurs BEFORE downscaling and bias correction. +# 0 - No temporal interpolation. +# 1 - Nearest Neighbor +# 2 - Linear weighted average +ForcingTemporalInterpolation = [0] + +[BiasCorrection] +# Choose bias correction options for each of the input forcing files. Length of each option +# must match the length of input forcings. + +AnAFlag = 1 + +# Specify a temperature bias correction method. +# 0 - No bias correction +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +# 2 - Custom NCAR bias-correction based on HRRRv3 analysis - based on hour of day (USE WITH CAUTION). +# 3 - NCAR parametric GFS bias correction +# 4 - NCAR parametric HRRR bias correction +TemperatureBiasCorrection = [0] + +# Specify a surface pressure bias correction method. +# 0 - No bias correction. +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +PressureBiasCorrection = [0] + +# Specify a specific humidity bias correction method. +# 0 - No bias correction. +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +# 2 - Custom NCAR bias-correction based on HRRRv3 analysis - based on hour of day (USE WITH CAUTION). +HumidityBiasCorrection = [0] + +# Specify a wind bias correction. +# 0 - No bias correction. +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +# 2 - Custom NCAR bias-correction based on HRRRv3 analysis - based on hour of day (USE WITH CAUTION). +# 3 - NCAR parametric GFS bias correction +# 4 - NCAR parametric HRRR bias correction +WindBiasCorrection = [0] + +# Specify a bias correction for incoming short wave radiation flux. +# 0 - No bias correction. +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +# 2 - Custom NCAR bias-correction based on HRRRv3 analysis (USE WITH CAUTION). +SwBiasCorrection = [0] + +# Specify a bias correction for incoming long wave radiation flux. +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +# 2 - Custom NCAR bias-correction based on HRRRv3 analysis, blanket adjustment (USE WITH CAUTION). +# 3 - NCAR parametric GFS bias correction +LwBiasCorrection = [0] + +# Specify a bias correction for precipitation. +# 0 - No bias correction. +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +PrecipBiasCorrection = [0] + +[Downscaling] +# Choose downscaling options for each of the input forcing files. Length of each option +# must match the length of input forcings. + +# Specify a temperature downscaling method: +# 0 - No downscaling. +# 1 - Use a simple lapse rate of 6.75 degrees Celsius to get from the model elevation +# to the WRF-Hydro elevation. +# 2 - Use a pre-calculated lapse rate regridded to the WRF-Hydro domain. +TemperatureDownscaling = [1] + +# Specify a surface pressure downscaling method: +# 0 - No downscaling. +# 1 - Use input elevation and WRF-Hydro elevation to downscale +# surface pressure. +PressureDownscaling = [1] + +# Specify a shortwave radiation downscaling routine. +# 0 - No downscaling +# 1 - Run a topographic adjustment using the WRF-Hydro elevation +ShortwaveDownscaling = [1] + +# Specify a precipitation downscaling routine. +# 0 - No downscaling +# 1 - Use monthly PRISM climatology regridded to the WRF-Hydro domain to +# downscale precipitation via mountain mapper. +PrecipDownscaling = [0] + +# Specify a specific humidity downscaling routine. +# 0 - No downscaling +# 1 - Use regridded humidity, along with downscaled temperature/pressure +# to extrapolate a downscaled surface specific humidty. +HumidityDownscaling = [1] + +# Specify the input parameter directory containing necessary downscaling grids. +DownscalingParamDirs = /lfs/h1/owp/nwm/noscrub/cham.pham/test1/packages/nwm.v3.1.0/parm/forcingParam/PuertoRico + +[SuppForcing] +# Choose a set of supplemental precipitation file(s) to layer +# into the final LDASIN forcing files processed from +# the options above. The following is a mapping of +# numeric values to external input native forcing files: +# 1 - MRMS GRIB2 hourly radar-only QPE +# 2 - MRMS GRIB2 hourly MultiSensor QPE +# 3 - WRF-ARW 2.5 km 48-hr Hawaii nest precipitation. +# 4 - WRF-ARW 2.5 km 48-hr Puerto Rico nest precipitation. +# 5 - CONUS/PR MRMS GRIB2 hourly MultiSensor QPE +# 6 - Hawaii MMRS GRIB2 hourly MultiSensore QPE +# 7 - MRS Liquid Water Fraction +# 8 - NBM Core CONUS APCP +# 9 - NBM Core Alaska PCP +# 10 - AK Mrms +# 11 - AK Stage IV Precip MRMS +# 12 - MRMS Precip Flag +# 13 - Custom Frequency Supp Pcp +# 14 - NBM Coew PR APCP +SuppPcp = [5] + +# Specify the file type for each supplemental precipitation file (comma separated) +# Valid types are GRIB1, GRIB2, and NETCDF +# (GRIB files will be converted internally with WGRIB[2]) +SuppPcpForcingTypes = GRIB2 + +# Specify the correponding supplemental precipitation directories +# that will be searched for input files. +SuppPcpDirectories = /lfs/h1/owp/ptmp/cham.pham/test1/tmp/nwm_forcing_analysis_assim_puertorico_05_1504.275360 + +# Specify whether the Supplemental Precips listed above are mandatory, or optional. +# This is important for layering contingencies if a product is missing, +# but forcing files are still desired. +# 0 - Not mandatory +# 1 - Mandatory +SuppPcpMandatory = [0] + +# Specify regridding options for the supplemental precipitation products. +RegridOptSuppPcp = [1] + +# Specify the time interpretation methods for the supplemental precipitation +# products. +SuppPcpTemporalInterpolation = [0] + +# In AnA runs, this value is the offset from the available forecast and 00z +# For example, if forecast are available at 06z and 18z, set this value to 6 +SuppPcpInputOffsets = [6] + +# Optional RQI method for radar-based data. +# 0 - Do not use any RQI filtering. Use all radar-based estimates. +# 1 - Use hourly MRMS Radar Quality Index grids. +# 2 - Use NWM monthly climatology grids (NWM only!!!!) +RqiMethod = 0 + +# Optional RQI threshold to be used to mask out. Currently used for MRMS products. +# Please choose a value from 0.0-1.0. Associated radar quality index files will be expected +# from MRMS data. +RqiThreshold = 0.0 + +# Specify an optional directory that contains supplemental precipitation parameter fields, +# I.E monthly RQI climatology +SuppPcpParamDir = /lfs/h1/owp/nwm/noscrub/cham.pham/test1/packages/nwm.v3.1.0/parm/forcingParam/PuertoRico + +[Ensembles] +# Choose ensemble options for each input forcing file being used. Ensemble options include: +# FILL IN ENSEMBLE OPTIONS HERE..... +# Choose the CFS ensemble member number to process +cfsEnsNumber = 1 + +[Custom] +# These are options for specifying custom input NetCDF forcing files (in minutes). +# Choose the input frequency of files that are being processed. I.E., are the +# input files every 15 minutes, 60 minutes, 3-hours, etc. Please specify the +# length of custom input frequencies to match the number of custom NetCDF inputs +# selected above in the Logistics section. +custom_input_fcst_freq = [] diff --git a/Config/WCOSS/v3.1/template_forcing_engine_PuertoRico_RRFS_Short.config b/Config/WCOSS/v3.1/template_forcing_engine_PuertoRico_RRFS_Short.config new file mode 100755 index 0000000..994ba01 --- /dev/null +++ b/Config/WCOSS/v3.1/template_forcing_engine_PuertoRico_RRFS_Short.config @@ -0,0 +1,347 @@ +#-------------------------------------------------------------------- +# WRF-Hydro Forcing Engine Configuration File +# +# Input options to the forcing engine include: +# 1.) Choices for input forcing files to use. +# 2.) Options for specifying date ranges and forecast intervals +# for input files. +# 3.) Choices for ESMF regridding techniques. +# 4.) Choices for optional downscaling techniques. +# 5.) Choices for optional bias correction techniques. +# 6.) Choices for optional supplemental precipitation products. +# 7.) Choices for optional ensemble member variations. +# 8.) Choices for output directories to place final output files. + +[Input] +# Choose a set of value(s) of forcing variables to be processed for +# WRF-Hydro. Please be advised that the order of which the values are +# chosen below are the order that the final products will be layered +# into the final LDASIN files. See documentation for additional +# information and examples. +# The following is a global set of key values to map forcing files +# to variables within LDASIN files for WRF-Hydro. The forcing engine +# will map files to external variable names internally. For custom +# external native forcing files (see documenation), the code will +# expect a set of named variables to process. The following is a +# mapping of numeric values to external input native forcing files: +# 1 - NLDAS GRIB retrospective files +# 2 - NARR GRIB retrospective files +# 3 - GFS GRIB2 Global production files on the full gaussian grid +# 4 - NAM Nest GRIB2 Conus production files +# 5 - HRRR GRIB2 Conus production files +# 6 - RAP GRIB2 Conus 13km production files +# 7 - CFSv2 6-hourly GRIB2 Global production files +# 8 - WRF-ARW - GRIB2 PRVI nest files +# 9 - GFS GRIB2 Global production files on 0.25 degree lat/lon grids. +# 10 - Custom NetCDF hourly forcing files +# 11 - Custom NetCDF hourly forcing files +# 12 - Custom NetCDF hourly forcing files +# 13 - PRVI 3-km NAM Nest. +# 14 - Puerto Rico 3-km NAM Nest. +# 15 - Alaska 3-km Alaska Nest +# 16 - NAM_Nest_3km_PRVI_Radiation-Only +# 17 - NAM_Nest_3km_PuertoRico_Radiation-Only +# 18 - WRF-ARW GRIB2 PuertoRico +# 19 - HRRR GRIB2 Alaska production files +# 20 - ExtAna HRRR AK FE output +# 24 - RRFS NA +# 25 - RRFA HI +# 26 - RRFS PR +InputForcings = [26] + +# Specify the file type for each forcing (comma separated) +# Valid types are GRIB1, GRIB2, and NETCDF +# (GRIB files will be converted internally with WGRIB[2]) +InputForcingTypes = GRIB2 + +# Specify the input directories for each forcing product. +InputForcingDirectories = + +# Specify whether the input forcings listed above are mandatory, or optional. +# This is important for layering contingencies if a product is missing, +# but forcing files are still desired. +# 0 - Not mandatory +# 1 - Mandatory +# NOTE!!! If not files are found for any products, code will error out indicating +# the final field is all missing values. +InputMandatory = [1] + +[Output] +# Specify the output frequency in minutes. +# Note that any frequencies at higher intervals +# than what is provided as input will entail input +# forcing data being temporally interpolated. If no SubOutputHour/SubOutFreq, set it to 0 +OutputFrequency = 60 +SubOutputHour = 0 +SubOutFreq = 0 + +# Specify a top level output directory. For re-forecasts +# and forecasts, sub-directories for each forecast cycle +# will be generated. For retrospective processing, final +# output files will be placed in this directory. +OutDir = /lfs/h1/owp/ptmp/donald.johnson/test/tmp/nwm_forcing_short_pr_rrfs_test/puertorica + +# Specify a scratch directory that will be used +# for storage of temporary files. These files +# will be removed automatically by the program. +ScratchDir = /lfs/h1/owp/ptmp/donald.johnson/test/tmp/nwm_forcing_short_pr_rrfs_test/puertorica +# Flag to activate scale_factor / add_offset byte packing in +# the output files. +# 0 - Deactivate compression +# 1 - Activate compression +compressOutput = 0 + +# Flag to use floating point output vs scale_factor / add_offset byte packing in +# the output files (the default) +# 0 - Use scale/offset encoding +# 1 - Use floating-point encoding +floatOutput = 0 + +[Retrospective] +# Specify to process forcings in retrosective mode +# 0 - No +# 1 - Yes +RetroFlag = 0 + +# Choose the beginning date of processing forcing files. +# NOTE - Dates are given in YYYYMMDDHHMM format +# If in real-time forecasting mode, leave as -9999. +# These dates get over-ridden in lookBackHours. +BDateProc = 202408060000 +EDateProc = 202408080000 + +[Forecast] +# ONLY for realtime forecasting. +# - Specify a lookback period in minutes to process data. +# This overrides any BDateProc/EDateProc options passed above. +# If no LookBack specified, please specify -9999. +LookBack = -9999 + +# If running reforecasts, specify a window below. This will override +# using the LookBack value to calculate a processing window. +RefcstBDateProc = 202412230000 +RefcstEDateProc = 202412230600 + +# Specify a forecast frequency in minutes. This value specifies how often +# to generate a set of forecast forcings. If generating hourly retrospective +# forcings, specify this value to be 60. +#ForecastFrequency = 720 +ForecastFrequency = 360 + +# Forecast cycles are determined by splitting up a day by equal +# ForecastFrequency interval. If there is a desire to shift the +# cycles to a different time step, ForecastShift will shift forecast +# cycles ahead by a determined set of minutes. For example, ForecastFrequency +# of 6 hours will produce forecasts cycles at 00, 06, 12, and 18 UTC. However, +# a ForecastShift of 1 hour will produce forecast cycles at 01, 07, +# 13, and 18 UTC. NOTE - This is only used by the realtime instance +# to calculate forecast cycles accordingly. Re-forecasts will use the beginning +# and ending dates specified in conjunction with the forecast frequency +# to determine forecast cycle dates. +ForecastShift = 0 + +# Specify how much (in minutes) of each input forcing is desires for each +# forecast cycle. See documentation for examples. The length of +# this array must match the input forcing choices. +ForecastInputHorizons = [2880] + +# This option is for applying an offset to input forcings to use a different +# forecasted interval. For example, a user may wish to use 4-5 hour forecasted +# fields from an NWP grid from one of their input forcings. In that instance +# the offset would be 4 hours, but 0 for other remaining forcings. +# +# In AnA runs, this value is the offset from the available forecast and 00z +# For example, if forecast are available at 06z and 18z, set this value to 6 +ForecastInputOffsets = [0] + +[Geospatial] +# Specify a geogrid file that defines the WRF-Hydro (or NWM) domain to which +# the forcings are being processed to. +GeogridIn = /lfs/h1/owp/nwm/noscrub/cham.pham/test1/packages/nwm.v3.1.0/parm/domain_puertorico/geo_em_PRVI.nc + +# Specify the optional land spatial metadata file. If found, coordinate projection information +# and coordinate will be translated from to the final output file. +SpatialMetaIn = /lfs/h1/owp/nwm/noscrub/cham.pham/test1/packages/nwm.v3.1.0/parm/domain_puertorico/GEOGRID_LDASOUT_Spatial_Metadata_PRVI.nc + +[Regridding] +# Choose regridding options for each input forcing files being used. Options available are: +# 1 - ESMF Bilinear +# 2 - ESMF Nearest Neighbor +# 3 - ESMF Conservative Bilinear +RegridOpt = [1] + +[Interpolation] +# Specify an temporal interpolation for the forcing variables. +# Interpolation will be done between the two neighboring +# input forcing states that exist. If only one nearest +# state exist (I.E. only a state forward in time, or behind), +# then that state will be used as a "nearest neighbor". +# NOTE - All input options here must be of the same length +# of the input forcing number. Also note all temporal interpolation +# occurs BEFORE downscaling and bias correction. +# 0 - No temporal interpolation. +# 1 - Nearest Neighbor +# 2 - Linear weighted average +ForcingTemporalInterpolation = [0] + +[BiasCorrection] +# Choose bias correction options for each of the input forcing files. Length of each option +# must match the length of input forcings. + +AnAFlag = 0 + +# Specify a temperature bias correction method. +# 0 - No bias correction +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +# 2 - Custom NCAR bias-correction based on HRRRv3 analysis - based on hour of day (USE WITH CAUTION). +# 3 - NCAR parametric GFS bias correction +# 4 - NCAR parametric HRRR bias correction +TemperatureBiasCorrection = [0] + +# Specify a surface pressure bias correction method. +# 0 - No bias correction. +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +PressureBiasCorrection = [0] + +# Specify a specific humidity bias correction method. +# 0 - No bias correction. +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +# 2 - Custom NCAR bias-correction based on HRRRv3 analysis - based on hour of day (USE WITH CAUTION). +HumidityBiasCorrection = [0] + +# Specify a wind bias correction. +# 0 - No bias correction. +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +# 2 - Custom NCAR bias-correction based on HRRRv3 analysis - based on hour of day (USE WITH CAUTION). +# 3 - NCAR parametric GFS bias correction +# 4 - NCAR parametric HRRR bias correction +WindBiasCorrection = [0] + +# Specify a bias correction for incoming short wave radiation flux. +# 0 - No bias correction. +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +# 2 - Custom NCAR bias-correction based on HRRRv3 analysis (USE WITH CAUTION). +SwBiasCorrection = [0] + +# Specify a bias correction for incoming long wave radiation flux. +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +# 2 - Custom NCAR bias-correction based on HRRRv3 analysis, blanket adjustment (USE WITH CAUTION). +# 3 - NCAR parametric GFS bias correction +LwBiasCorrection = [0] + +# Specify a bias correction for precipitation. +# 0 - No bias correction. +# 1 - CFSv2 - NLDAS2 Parametric Distribution - NWM ONLY +PrecipBiasCorrection = [0] + +[Downscaling] +# Choose downscaling options for each of the input forcing files. Length of each option +# must match the length of input forcings. + +# Specify a temperature downscaling method: +# 0 - No downscaling. +# 1 - Use a simple lapse rate of 6.75 degrees Celsius to get from the model elevation +# to the WRF-Hydro elevation. +# 2 - Use a pre-calculated lapse rate regridded to the WRF-Hydro domain. +TemperatureDownscaling = [1] + +# Specify a surface pressure downscaling method: +# 0 - No downscaling. +# 1 - Use input elevation and WRF-Hydro elevation to downscale +# surface pressure. +PressureDownscaling = [1] + +# Specify a shortwave radiation downscaling routine. +# 0 - No downscaling +# 1 - Run a topographic adjustment using the WRF-Hydro elevation +ShortwaveDownscaling = [1] + +# Specify a precipitation downscaling routine. +# 0 - No downscaling +# 1 - Use monthly PRISM climatology regridded to the WRF-Hydro domain to +# downscale precipitation via mountain mapper. +PrecipDownscaling = [0] + +# Specify a specific humidity downscaling routine. +# 0 - No downscaling +# 1 - Use regridded humidity, along with downscaled temperature/pressure +# to extrapolate a downscaled surface specific humidty. +HumidityDownscaling = [1] + +# Specify the input parameter directory containing necessary downscaling grids. +DownscalingParamDirs = /lfs/h1/owp/nwm/noscrub/cham.pham/test1/packages/nwm.v3.1.0/parm/forcingParam/PuertoRico, /lfs/h1/owp/nwm/noscrub/cham.pham/test1/packages/nwm.v3.1.0/parm/forcingParam/PuertoRico + +[SuppForcing] +# Choose a set of supplemental precipitation file(s) to layer +# into the final LDASIN forcing files processed from +# the options above. The following is a mapping of +# numeric values to external input native forcing files: +# 1 - CONUS MRMS GRIB2 hourly radar-only QPE +# 2 - CONUS MRMS GRIB2 hourly MultiSensor QPE +# 3 - WRF-ARW 2.5 km 48-hr PRVI nest precipitation. +# 4 - WRF-ARW 2.5 km 48-hr Puerto Rico nest precipitation. +# 5 - PRVI MRMS GRIB2 hourly MultiSensor QPE +# 6 - Hawaii MRMS GRIB2 hourly MultiSensor QPE (Pass 2 or Pass 1) +# 7 - MRMS SBCv2 Liquid Water Fraction (netCDF only) +# 8 - NBM Conus MR +# 9 - NBM Alaska MR +# 10 - Alaska MRMS (no liquid water fraction) +# 11 - Alaska Stage IV NWS Precip +# 14 - NBM - PRVI +SuppPcp = [14] + +# Specify the file type for each supplemental precipitation file (comma separated) +# Valid types are GRIB1, GRIB2, and NETCDF +# (GRIB files will be converted internally with WGRIB[2]) +SuppPcpForcingTypes = [GRIB2] + +# Specify the correponding supplemental precipitation directories +# that will be searched for input files. +SuppPcpDirectories = /lfs/h1/ops/prod/com/blend/v4.2 + +# Specify whether the Supplemental Precips listed above are mandatory, or optional. +# This is important for layering contingencies if a product is missing, +# but forcing files are still desired. +# 0 - Not mandatory +# 1 - Mandatory +SuppPcpMandatory = [0] + +# Specify regridding options for the supplemental precipitation products. +RegridOptSuppPcp = [1] + +# Specify the time intrpretation methods for the supplemental precipitation +# products. +SuppPcpTemporalInterpolation = [0] + +# In AnA runs, this value is the offset from the available forecast and 00z +# For example, if forecast are available at 06z and 18z, set this value to 6 +SuppPcpInputOffsets = [0] + +# Optional RQI method for radar-based data. +# 0 - Do not use any RQI filtering. Use all radar-based estimates. +# 1 - Use hourly MRMS Radar Quality Index grids. +# 2 - Use NWM monthly climatology grids (NWM only!!!!) +RqiMethod = 0 + +# Optional RQI threshold to be used to mask out. Currently used for MRMS products. +# Please choose a value from 0.0-1.0. Associated radar quality index files will be expected +# from MRMS data. +RqiThreshold = 0.9 + +# Specify an optional directory that contains supplemental precipitation parameter fields, +# I.E monthly RQI climatology +SuppPcpParamDir = /lfs/h1/owp/nwm/noscrub/cham.pham/test1/packages/nwm.v3.1.0/parm/forcingParam/PuertoRico + +[Ensembles] +# Choose ensemble options for each input forcing file being used. Ensemble options include: +# FILL IN ENSEMBLE OPTIONS HERE..... +# Choose the CFS ensemble member number to process +cfsEnsNumber = 1 + +[Custom] +# These are options for specifying custom input NetCDF forcing files (in minutes). +# Choose the input frequency of files that are being processed. I.E., are the +# input files every 15 minutes, 60 minutes, 3-hours, etc. Please specify the +# length of custom input frequencies to match the number of custom NetCDF inputs +# selected above in the Logistics section. +custom_input_fcst_freq = [] diff --git a/Template/template_forcing_engine.config b/Template/template_forcing_engine.config index 63ad00c..3e654a5 100755 --- a/Template/template_forcing_engine.config +++ b/Template/template_forcing_engine.config @@ -46,6 +46,9 @@ # 20 - Alaska Extended AnA # 21 - National Blend of Models (NBM) # 22 - National Digital Forecast Database (NDFD) +# 24 - RRFS NA (CONUS) +# 25 - RRFS PuertoRico +# 26 - RRFS Hawaii InputForcings = [3] # Specify the file type for each forcing (comma separated) diff --git a/core/downscale.py b/core/downscale.py old mode 100644 new mode 100755 diff --git a/core/forcingInputMod.py b/core/forcingInputMod.py index 3aedbd9..536b9c5 100755 --- a/core/forcingInputMod.py +++ b/core/forcingInputMod.py @@ -249,11 +249,11 @@ def define_product(self): 23: ['TMP', 'SPFH', 'UGRD', 'VGRD', 'APCP', 'DSWRF', 'DLWRF', 'PRES'], 24: ['TMP', 'SPFH', 'UGRD', 'VGRD', 'PRATE', 'DSWRF', - 'DLWRF', 'PRES'], + 'DLWRF', 'PRES', 'CPOFP'], 25: ['TMP', 'SPFH', 'UGRD', 'VGRD', 'PRATE', 'DSWRF', - 'DLWRF', 'PRES'], + 'DLWRF', 'PRES', 'CPOFP'], 26: ['TMP', 'SPFH', 'UGRD', 'VGRD', 'PRATE', 'DSWRF', - 'DLWRF', 'PRES'], + 'DLWRF', 'PRES', 'CPOFP'], } self.grib_vars = grib_vars_in[self.keyValue] @@ -307,16 +307,16 @@ def define_product(self): '10 m above ground', 'surface'], 23: ['2 m above ground', '2 m above ground', '10 m above ground', '10 m above ground', - 'surface', 'surface', 'surface', 'surface'], + 'surface', 'surface', 'surface','surface'], 24: ['2 m above ground', '2 m above ground', '10 m above ground', '10 m above ground', - 'surface', 'surface', 'surface', 'surface'], + 'surface', 'surface', 'surface', 'surface', 'surface'], 25: ['2 m above ground', '2 m above ground', '10 m above ground', '10 m above ground', - 'surface', 'surface', 'surface', 'surface'], + 'surface', 'surface', 'surface', 'surface', 'surface'], 26: ['2 m above ground', '2 m above ground', '10 m above ground', '10 m above ground', - 'surface', 'surface', 'surface', 'surface'] + 'surface', 'surface', 'surface', 'surface', 'surface'] } self.grib_levels = grib_levels_in[self.keyValue] @@ -387,19 +387,20 @@ def define_product(self): 'APCP_surface'], 23: ['TMP_2maboveground', 'SPFH_2maboveground', 'UGRD_10maboveground', 'VGRD_10maboveground', - 'APCP_surface', 'DSWRF_surface', 'DLWRF_surface'], + 'APCP_surface', 'DSWRF_surface', 'DLWRF_surface', + 'PRES_surface'], 24: ['TMP_2maboveground', 'SPFH_2maboveground', 'UGRD_10maboveground', 'VGRD_10maboveground', 'PRATE_surface', 'DSWRF_surface', 'DLWRF_surface', - 'PRES_surface'], + 'PRES_surface', 'CPOFP_surface'], 25: ['TMP_2maboveground', 'SPFH_2maboveground', 'UGRD_10maboveground', 'VGRD_10maboveground', 'PRATE_surface', 'DSWRF_surface', 'DLWRF_surface', - 'PRES_surface'], + 'PRES_surface', 'CPOFP_surface'], 26: ['TMP_2maboveground', 'SPFH_2maboveground', 'UGRD_10maboveground', 'VGRD_10maboveground', 'PRATE_surface', 'DSWRF_surface', 'DLWRF_surface', - 'PRES_surface'] + 'PRES_surface', 'CPOFP_surface'] } self.netcdf_var_names = netcdf_variables[self.keyValue] @@ -460,9 +461,9 @@ def define_product(self): 21: [4, 3], 22: [4,0,1,3], 23: [4,5,0,1,3,7,2,6], - 24: [4,5,0,1,3,7,2,6], #['TMP', 'SPFH', 'UGRD', 'VGRD','PRATE', 'DSWRF', 'DLWRF','PRES', 'CPOFP'], - 25: [4,5,0,1,3,7,2,6], - 26: [4,5,0,1,3,7,2,6] + 24: [4,5,0,1,3,7,2,6, 8], #['TMP', 'SPFH', 'UGRD', 'VGRD','PRATE', 'DSWRF', 'DLWRF','PRES', 'CPOFP'], + 25: [4,5,0,1,3,7,2,6, 8], + 26: [4,5,0,1,3,7,2,6, 8] } self.input_map_output = input_map_to_outputs[self.keyValue] @@ -489,7 +490,10 @@ def define_product(self): 20: None, 21: None, 22: None, - 23: [18, 18, 18, 18, 18, 18, 36, 18, 18, 18, 18, 18, 36, 18, 18, 18, 18, 18, 36, 18, 18, 18, 18, 18] + 23: [18, 18, 18, 18, 18, 18, 36, 18, 18, 18, 18, 18, 36, 18, 18, 18, 18, 18, 36, 18, 18, 18, 18, 18], + 24: [84, 18, 18, 18, 18, 18, 84, 18, 18, 18, 18, 18, 84, 18, 18, 18, 18, 18, 84, 18, 18, 18, 18, 18], + 25: None, + 26: None } self.forecast_horizons = forecast_horizons[self.keyValue] diff --git a/core/regrid.py b/core/regrid.py index 48bfc29..7a93e7d 100755 --- a/core/regrid.py +++ b/core/regrid.py @@ -4642,3 +4642,715 @@ def calculate_supp_pcp_weights(supplemental_precip, id_tmp, tmp_file, config_opt supplemental_precip.esmf_field_out = supplemental_precip.regridObj(supplemental_precip.esmf_field_in, supplemental_precip.esmf_field_out) supplemental_precip.regridded_mask[:] = supplemental_precip.esmf_field_out.data[:] + + + +def regrid_rrfs_na(input_forcings, config_options, wrf_hydro_geo_meta, mpi_config): + """ + Function for handling regridding of HRRR data. + :param input_forcings: + :param config_options: + :param wrf_hydro_geo_meta: + :param mpi_config: + :return: + """ + # If the expected file is missing, this means we are allowing missing files, simply + # exit out of this routine as the regridded fields have already been set to NDV. + if not os.path.isfile(input_forcings.file_in2): + if mpi_config.rank == 0: + config_options.statusMsg = "No RAP regridding required for this timestep." + err_handler.log_msg(config_options, mpi_config) + return + + # Create a path for a temporary NetCDF file + input_forcings.tmpFile = config_options.scratch_dir + "/" + "RAP_CONUS_TMP-{}.nc".format(mkfilename()) + err_handler.check_program_status(config_options, mpi_config) + + if input_forcings.fileType != NETCDF: + # This file shouldn't exist.... but if it does (previously failed + # execution of the program), remove it..... + if mpi_config.rank == 0: + if os.path.isfile(input_forcings.tmpFile): + config_options.statusMsg = "Found old temporary file: " + \ + input_forcings.tmpFile + ", removing." + err_handler.log_warning(config_options, mpi_config) + try: + os.remove(input_forcings.tmpFile) + except OSError: + config_options.errMsg = "Unable to remove file: " + input_forcings.tmpFile + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + fields = [] + for force_count, grib_var in enumerate(input_forcings.grib_vars): + if mpi_config.rank == 0: + config_options.statusMsg = "Converting CONUS RRFS Variable: " + grib_var + err_handler.log_msg(config_options, mpi_config) + time_str = "{}-{} hour acc fcst".format(input_forcings.fcst_hour1, input_forcings.fcst_hour2) \ + if grib_var in ("APCP",) else str(input_forcings.fcst_hour2) + " hour fcst" + fields.append(':' + grib_var + ':' + + input_forcings.grib_levels[force_count] + ':' + + time_str + ":") + fields.append(":(HGT):(surface):") + + # categorical precip for liquid fraction + fields.append(":(CFRZR):(surface):") + fields.append(":(CICEP):(surface):") + fields.append(":(CSNOW):(surface):") + fields.append(":(CRAIN):(surface):") + + if input_forcings.t2dDownscaleOpt == 3: # dynamic lapse rate + fields.append(":(HGT):(12 hybrid level):") + fields.append(":(TMP):(12 hybrid level):") + + # Create a temporary NetCDF file from the bgrb/pgrb GRIB2 files. + cmd = f'$WGRIB2 -match "(' + '|'.join(fields) + f')" -netcdf {input_forcings.tmpFile} -' + id_tmp = ioMod.open_grib2(input_forcings.file_in2, input_forcings.tmpFile, cmd, + config_options, mpi_config, inputVar=None) + err_handler.check_program_status(config_options, mpi_config) + else: + create_link("RRFS", input_forcings.file_in2, input_forcings.tmpFile, config_options, mpi_config) + id_tmp = ioMod.open_netcdf_forcing(input_forcings.tmpFile, config_options, mpi_config) + + for force_count, grib_var in enumerate(input_forcings.grib_vars): + if mpi_config.rank == 0: + config_options.statusMsg = "Processing Conus RRFS Variable: " + grib_var + err_handler.log_msg(config_options, mpi_config) + + if grib_var != "LQFRAC": + calc_regrid_flag = check_regrid_status(id_tmp, force_count, input_forcings, + config_options, wrf_hydro_geo_meta, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + else: + calc_regrid_flag = False + + if calc_regrid_flag: + if mpi_config.rank == 0: + config_options.statusMsg = "Calculating RRFS regridding weights." + err_handler.log_msg(config_options, mpi_config) + calculate_weights(id_tmp, force_count, input_forcings, config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # Regrid the height variable. + hgt_tmp = None + if mpi_config.rank == 0: + try: + hgt_tmp = id_tmp.variables['HGT_surface'][0, :, :] + except (ValueError, KeyError, AttributeError) as err: + config_options.errMsg = "Unable to extract HGT_surface from : " + id_tmp + \ + " (" + str(err) + ")" + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + var_sub_tmp = mpi_config.scatter_array(input_forcings, hgt_tmp, config_options) + err_handler.check_program_status(config_options, mpi_config) + + try: + input_forcings.esmf_field_in.data[:, :] = var_sub_tmp + except (ValueError, KeyError, AttributeError) as err: + config_options.errMsg = "Unable to place temporary RRFS elevation variable into ESMF field: " + str(err) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + if mpi_config.rank == 0: + config_options.statusMsg = "Regridding RRFS surface elevation data to the WRF-Hydro domain." + err_handler.log_msg(config_options, mpi_config) + try: + input_forcings.esmf_field_out = input_forcings.regridObj(input_forcings.esmf_field_in, + input_forcings.esmf_field_out) + except ValueError as ve: + config_options.errMsg = "Unable to regrid RRFS elevation data using ESMF: " + str(ve) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # Set any pixel cells outside the input domain to the global missing value. + try: + input_forcings.esmf_field_out.data[np.where(input_forcings.regridded_mask == 0)] = \ + config_options.globalNdv + except (ValueError, ArithmeticError) as npe: + config_options.errMsg = "Unable to perform mask search on RRFS elevation data: " + str(npe) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + try: + input_forcings.height[:, :] = input_forcings.esmf_field_out.data + except (ValueError, KeyError, AttributeError) as err: + config_options.errMsg = "Unable to place RRFS ESMF elevation field into local array: " + str(err) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # Regrid the input variables. + var_tmp = None + if mpi_config.rank == 0: + try: + if grib_var == "LQFRAC": + var_tmp_CFRZR = id_tmp.variables['CFRZR_surface'][0, :, :] + var_tmp_CICEP = id_tmp.variables['CICEP_surface'][0, :, :] + var_tmp_CSNOW = id_tmp.variables['CSNOW_surface'][0, :, :] + var_tmp_CRAIN = id_tmp.variables['CRAIN_surface'][0, :, :] + + var_tmp = var_tmp_CRAIN / (var_tmp_CFRZR+var_tmp_CSNOW+var_tmp_CICEP+1) + var_tmp = np.where(var_tmp_CFRZR+var_tmp_CSNOW+var_tmp_CICEP+var_tmp_CRAIN == 0, np.nan, var_tmp) # flag for temperature partitioning + else: + var_tmp = id_tmp.variables[input_forcings.netcdf_var_names[force_count]][0, :, :] + if grib_var in ("APCP",): + var_tmp /= 3600 # convert hourly accumulated precip to instantaneous rate + except (ValueError, KeyError, AttributeError) as err: + config_options.errMsg = "Unable to extract: " + input_forcings.netcdf_var_names[force_count] + \ + " from: " + input_forcings.tmpFile + \ + " (" + str(err) + ")" + err_handler.log_critical(config_options, mpi_config) + + err_handler.check_program_status(config_options, mpi_config) + + var_sub_tmp = mpi_config.scatter_array(input_forcings, var_tmp, config_options) + err_handler.check_program_status(config_options, mpi_config) + + # mask out missing frozen fraction + mask = input_forcings.esmf_grid_in.get_item(ESMF.GridItem.MASK) + prev_mask = np.copy(mask) + if grib_var == 'LQFRAC': + mask[np.where(np.isnan(var_sub_tmp))] = 0 + + if grib_var == 'TMP' and input_forcings.t2dDownscaleOpt == 3: # dynamic lapse rate + dyn_lapse = None + if input_forcings.lapseGrid is None: + input_forcings.lapseGrid = np.empty([wrf_hydro_geo_meta.ny_local, wrf_hydro_geo_meta.nx_local],np.float32) + if mpi_config.rank == 0: + # read HGT,0,12 and TMP,12 + # TODO: parameterize the level + try: + hgt_top = id_tmp.variables['HGT_12hybridlevel'][0, :, :] + hgt_bot = id_tmp.variables['HGT_surface'][0, :, :] + tmp_top = id_tmp.variables['TMP_12hybridlevel'][0, :, :] + + hgt_delta = hgt_top - hgt_bot + tmp_delta = tmp_top - var_tmp + dyn_lapse = -1000 * (tmp_delta / hgt_delta) + + # limit the dyn_lapse to the range (-10,10) TODO: this could be parameterized + dyn_lapse = np.clip(dyn_lapse, -10, 10) + + except (ValueError, KeyError, AttributeError) as err: + config_options.errMsg = "Unable to extract RRFS HGT or TMP at hybrid level 12 from: " + id_tmp + \ + " (" + str(err) + ")" + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + dyn_sub_tmp = mpi_config.scatter_array(input_forcings, dyn_lapse, config_options) + try: + input_forcings.esmf_field_in.data[:, :] = dyn_sub_tmp + except (ValueError, KeyError, AttributeError) as err: + config_options.errMsg = "Unable to place temporary RRFS lapse rate into ESMF field: " + str(err) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + if mpi_config.rank == 0: + config_options.statusMsg = "Regridding RRFS lapse rate to the WRF-Hydro domain." + err_handler.log_msg(config_options, mpi_config) + try: + input_forcings.esmf_field_out = input_forcings.regridObj(input_forcings.esmf_field_in, + input_forcings.esmf_field_out) + except ValueError as ve: + config_options.errMsg = "Unable to regrid RRFS lapse rate using ESMF: " + str(ve) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # Set any pixel cells outside the input domain to the global missing value. + try: + input_forcings.esmf_field_out.data[np.where(input_forcings.regridded_mask == 0)] = \ + config_options.globalNdv + except (ValueError, ArithmeticError) as npe: + config_options.errMsg = "Unable to perform mask search on RRFS lapse rate: " + str(npe) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + try: + input_forcings.lapseGrid[:, :] = input_forcings.esmf_field_out.data + except (ValueError, KeyError, AttributeError) as err: + config_options.errMsg = "Unable to place RRFS ESMF lapse rate into local array: " + str(err) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + dyn_lapse = None + dyn_sub_tmp = None + + try: + input_forcings.esmf_field_in.data[:, :] = var_sub_tmp + except (ValueError, KeyError, AttributeError) as err: + config_options.errMsg = "Unable to place local RRFS array into ESMF field: " + str(err) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + if mpi_config.rank == 0: + config_options.statusMsg = "Regridding Input RRFS Field: " + input_forcings.netcdf_var_names[force_count] + err_handler.log_msg(config_options, mpi_config) + try: + input_forcings.esmf_field_out = input_forcings.regridObj(input_forcings.esmf_field_in, + input_forcings.esmf_field_out) + except ValueError as ve: + config_options.errMsg = "Unable to regrid RRFS variable: " + input_forcings.netcdf_var_names[force_count] \ + + str(ve) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # Set any pixel cells outside the input domain to the global missing value. + try: + input_forcings.esmf_field_out.data[np.isnan(input_forcings.esmf_field_out.data)] = -50 + input_forcings.esmf_field_out.data[np.where(input_forcings.regridded_mask == 0)] = \ + config_options.globalNdv + except (ValueError, ArithmeticError) as npe: + config_options.errMsg = "Unable to run mask calculation on RRFS variable: " + \ + input_forcings.netcdf_var_names[force_count] + " (" + str(npe) + ")" + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + try: + input_forcings.regridded_forcings2[input_forcings.input_map_output[force_count], :, :] = \ + input_forcings.esmf_field_out.data + except (ValueError, KeyError, AttributeError) as err: + config_options.errMsg = "Unable to place RRFS ESMF data into local array: " + str(err) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # If we are on the first timestep, set the previous regridded field to be + # the latest as there are no states for time 0. + if config_options.current_output_step == 1: + input_forcings.regridded_forcings1[input_forcings.input_map_output[force_count], :, :] = \ + input_forcings.regridded_forcings2[input_forcings.input_map_output[force_count], :, :] + err_handler.check_program_status(config_options, mpi_config) + + # If we are on the first timestep, set the previous regridded field to be + # the latest as there are no states for time 0. + if config_options.current_output_step == 1: + input_forcings.regridded_forcings1[input_forcings.input_map_output[force_count], :, :] = \ + input_forcings.regridded_forcings2[input_forcings.input_map_output[force_count], :, :] + err_handler.check_program_status(config_options, mpi_config) + + # reset mask + mask[:] = prev_mask + + # Close the temporary NetCDF file and remove it. + if mpi_config.rank == 0: + try: + id_tmp.close() + except OSError: + config_options.errMsg = "Unable to close NetCDF file: " + input_forcings.tmpFile + err_handler.log_critical(config_options, mpi_config) + + try: + os.remove(input_forcings.tmpFile) + except OSError: + config_options.errMsg = "Unable to remove NetCDF file: " + input_forcings.tmpFile + err_handler.log_critical(config_options, mpi_config) + + err_handler.check_program_status(config_options, mpi_config) + err_handler.log_msg(config_options, mpi_config) + # err_handler.log_msg(config_options, mpi_config) + return + + +def regrid_rrfs_pr(input_forcings, config_options, wrf_hydro_geo_meta, mpi_config): + """ + Function for handing regridding of input NAM nest data + fro GRIB2 files. + :param mpi_config: + :param wrf_hydro_geo_meta: + :param input_forcings: + :param config_options: + :return: + """ + # If the expected file is missing, this means we are allowing missing files, simply + # exit out of this routine as the regridded fields have already been set to NDV. + if not os.path.isfile(input_forcings.file_in2): + return + + # Check to see if the regrid complete flag for this + # output time step is true. This entails the necessary + # inputs have already been regridded and we can move on. + if input_forcings.regridComplete: + config_options.statusMsg = "No regridding of RRFS data necessary for this timestep - already completed." + err_handler.log_msg(config_options, mpi_config) + return + + # Create a path for a temporary NetCDF file + input_forcings.tmpFile = config_options.scratch_dir + "/" + "RRFS_PR_TMP-{}.nc".format(mkfilename()) + err_handler.check_program_status(config_options, mpi_config) + if input_forcings.fileType != NETCDF: + + # This file shouldn't exist.... but if it does (previously failed + # execution of the program), remove it..... + if mpi_config.rank == 0: + if os.path.isfile(input_forcings.tmpFile): + config_options.statusMsg = "Found old temporary file: " + \ + input_forcings.tmpFile + ", removing." + err_handler.log_warning(config_options, mpi_config) + try: + os.remove(input_forcings.tmpFile) + except OSError: + err_handler.err_out(config_options) + err_handler.check_program_status(config_options, mpi_config) + + fields = [] + for force_count, grib_var in enumerate(input_forcings.grib_vars): + if mpi_config.rank == 0: + config_options.statusMsg = "Converting RRFS Variable: " + grib_var + err_handler.log_msg(config_options, mpi_config) + fields.append(':' + grib_var + ':' + + input_forcings.grib_levels[force_count] + ':' + + str(input_forcings.fcst_hour2) + " hour fcst:") + fields.append(":(HGT):(surface):") + + # Create a temporary NetCDF file from the GRIB2 file. + cmd = '$WGRIB2 -match "(' + '|'.join(fields) + ')" ' + input_forcings.file_in2 + \ + " -netcdf " + input_forcings.tmpFile + id_tmp = ioMod.open_grib2(input_forcings.file_in2, input_forcings.tmpFile, cmd, + config_options, mpi_config, inputVar=None) + err_handler.check_program_status(config_options, mpi_config) + + #config_options.statusMsg = cmd + #err_handler.log_msg(config_options, mpi_config) + else: + create_link("RRFS-Pr", input_forcings.file_in2, input_forcings.tmpFile, config_options, mpi_config) + id_tmp = ioMod.open_netcdf_forcing(input_forcings.tmpFile, config_options, mpi_config) + + # Loop through all of the input forcings in NAM nest data. Convert the GRIB2 files + # to NetCDF, read in the data, regrid it, then map it to the appropriate + # array slice in the output arrays. + for force_count, grib_var in enumerate(input_forcings.grib_vars): + if mpi_config.rank == 0: + config_options.statusMsg = "Processing RRFS Variable: " + grib_var + err_handler.log_msg(config_options, mpi_config) + + calc_regrid_flag = check_regrid_status(id_tmp, force_count, input_forcings, + config_options, wrf_hydro_geo_meta, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + if calc_regrid_flag: + if mpi_config.rank == 0: + config_options.statusMsg = "Calculating RRFS PR regridding weights...." + err_handler.log_msg(config_options, mpi_config) + calculate_weights(id_tmp, force_count, input_forcings, config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # Regrid the height variable. + if mpi_config.rank == 0: + var_tmp = id_tmp.variables['HGT_surface'][0, :, :] + else: + var_tmp = None + err_handler.check_program_status(config_options, mpi_config) + + var_sub_tmp = mpi_config.scatter_array(input_forcings, var_tmp, config_options) + err_handler.check_program_status(config_options, mpi_config) + + try: + input_forcings.esmf_field_in.data[:, :] = var_sub_tmp + except (ValueError, KeyError, AttributeError) as err: + config_options.errMsg = "Unable to place NetCDF RRFS PR elevation data into the ESMF field object: " \ + + str(err) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + if mpi_config.rank == 0: + config_options.statusMsg = "Regridding RRFS PR elevation data to the WRF-Hydro domain." + err_handler.log_msg(config_options, mpi_config) + try: + input_forcings.esmf_field_out = input_forcings.regridObj(input_forcings.esmf_field_in, + input_forcings.esmf_field_out) + except ValueError as ve: + config_options.errMsg = "Unable to regrid RRFS PR elevation data to the WRF-Hydro domain " \ + "using ESMF: " + str(ve) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # Set any pixel cells outside the input domain to the global missing value. + try: + input_forcings.esmf_field_out.data[np.where(input_forcings.regridded_mask == 0)] = \ + config_options.globalNdv + except (ValueError, ArithmeticError) as npe: + config_options.errMsg = "Unable to compute mask on RRFS PR elevation data: " + str(npe) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + try: + input_forcings.height[:, :] = input_forcings.esmf_field_out.data + except (ValueError, KeyError, AttributeError) as err: + config_options.errMsg = "Unable to extract ESMF regridded RRFS PR elevation data to a local " \ + "array: " + str(err) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + err_handler.check_program_status(config_options, mpi_config) + + # Regrid the input variables. + var_tmp = None + if mpi_config.rank == 0: + config_options.statusMsg = "Regridding RRFS input variable: " + \ + input_forcings.netcdf_var_names[force_count] + err_handler.log_msg(config_options, mpi_config) + try: + var_tmp = id_tmp.variables[input_forcings.netcdf_var_names[force_count]][0, :, :] + except (ValueError, KeyError, AttributeError) as err: + config_options.errMsg = "Unable to extract " + input_forcings.netcdf_var_names[force_count] + \ + " from: " + input_forcings.tmpFile + " (" + str(err) + ")" + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + var_sub_tmp = mpi_config.scatter_array(input_forcings, var_tmp, config_options) + err_handler.check_program_status(config_options, mpi_config) + + try: + input_forcings.esmf_field_in.data[:, :] = var_sub_tmp + except (ValueError, KeyError, AttributeError) as err: + config_options.errMsg = "Unable to place local array into local ESMF field: " + str(err) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + try: + input_forcings.esmf_field_out = input_forcings.regridObj(input_forcings.esmf_field_in, + input_forcings.esmf_field_out) + except ValueError as ve: + config_options.errMsg = "Unable to regrid input RRFS PR forcing variables using ESMF: " + str(ve) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # Set any pixel cells outside the input domain to the global missing value. + try: + input_forcings.esmf_field_out.data[np.where(input_forcings.regridded_mask == 0)] = \ + config_options.globalNdv + except (ValueError, ArithmeticError) as npe: + config_options.errMsg = "Unable to calculate mask from input RRFS PR regridded forcings: " + str(npe) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + try: + input_forcings.regridded_forcings2[input_forcings.input_map_output[force_count], :, :] = \ + input_forcings.esmf_field_out.data + except (ValueError, KeyError, AttributeError) as err: + config_options.errMsg = "Unable to place local ESMF regridded data into local array: " + str(err) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # If we are on the first timestep, set the previous regridded field to be + # the latest as there are no states for time 0. + if config_options.current_output_step == 1: + input_forcings.regridded_forcings1[input_forcings.input_map_output[force_count], :, :] = \ + input_forcings.regridded_forcings2[input_forcings.input_map_output[force_count], :, :] + err_handler.check_program_status(config_options, mpi_config) + + # Close the temporary NetCDF file and remove it. + if mpi_config.rank == 0: + try: + id_tmp.close() + except OSError: + config_options.errMsg = "Unable to close NetCDF file: " + input_forcings.tmpFile + err_handler.log_critical(config_options, mpi_config) + try: + os.remove(input_forcings.tmpFile) + except OSError: + config_options.errMsg = "Unable to remove NetCDF file: " + input_forcings.tmpFile + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + + + +def regrid_rrfs_hi(input_forcings, config_options, wrf_hydro_geo_meta, mpi_config): + """ + Function for handing regridding of input NAM nest data + fro GRIB2 files. + :param mpi_config: + :param wrf_hydro_geo_meta: + :param input_forcings: + :param config_options: + :return: + """ + # If the expected file is missing, this means we are allowing missing files, simply + # exit out of this routine as the regridded fields have already been set to NDV. + if not os.path.isfile(input_forcings.file_in2): + return + + # Check to see if the regrid complete flag for this + # output time step is true. This entails the necessary + # inputs have already been regridded and we can move on. + if input_forcings.regridComplete: + config_options.statusMsg = "No regridding of NAM nest data necessary for this timestep - already completed." + err_handler.log_msg(config_options, mpi_config) + return + + # Create a path for a temporary NetCDF file + input_forcings.tmpFile = config_options.scratch_dir + "/" + "NAM_NEST_TMP-{}.nc".format(mkfilename()) + err_handler.check_program_status(config_options, mpi_config) + if input_forcings.fileType != NETCDF: + + # This file shouldn't exist.... but if it does (previously failed + # execution of the program), remove it..... + if mpi_config.rank == 0: + if os.path.isfile(input_forcings.tmpFile): + config_options.statusMsg = "Found old temporary file: " + \ + input_forcings.tmpFile + ", removing." + err_handler.log_warning(config_options, mpi_config) + try: + os.remove(input_forcings.tmpFile) + except OSError: + err_handler.err_out(config_options) + err_handler.check_program_status(config_options, mpi_config) + + fields = [] + for force_count, grib_var in enumerate(input_forcings.grib_vars): + if mpi_config.rank == 0: + config_options.statusMsg = "Converting RRFS Variable: " + grib_var + err_handler.log_msg(config_options, mpi_config) + fields.append(':' + grib_var + ':' + + input_forcings.grib_levels[force_count] + ':' + + str(input_forcings.fcst_hour2) + " hour fcst:") + fields.append(":(HGT):(surface):") + + # Create a temporary NetCDF file from the GRIB2 file. + cmd = '$WGRIB2 -match "(' + '|'.join(fields) + ')" ' + input_forcings.file_in2 + \ + " -netcdf " + input_forcings.tmpFile + id_tmp = ioMod.open_grib2(input_forcings.file_in2, input_forcings.tmpFile, cmd, + config_options, mpi_config, inputVar=None) + err_handler.check_program_status(config_options, mpi_config) + else: + create_link("RRFS", input_forcings.file_in2, input_forcings.tmpFile, config_options, mpi_config) + id_tmp = ioMod.open_netcdf_forcing(input_forcings.tmpFile, config_options, mpi_config) + + # Loop through all of the input forcings in NAM nest data. Convert the GRIB2 files + # to NetCDF, read in the data, regrid it, then map it to the appropriate + # array slice in the output arrays. + for force_count, grib_var in enumerate(input_forcings.grib_vars): + if mpi_config.rank == 0: + config_options.statusMsg = "Processing RRFS Variable: " + grib_var + err_handler.log_msg(config_options, mpi_config) + + calc_regrid_flag = check_regrid_status(id_tmp, force_count, input_forcings, + config_options, wrf_hydro_geo_meta, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + if calc_regrid_flag: + if mpi_config.rank == 0: + config_options.statusMsg = "Calculating RRFS HI regridding weights...." + err_handler.log_msg(config_options, mpi_config) + calculate_weights(id_tmp, force_count, input_forcings, config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # Regrid the height variable. + if mpi_config.rank == 0: + var_tmp = id_tmp.variables['HGT_surface'][0, :, :] + else: + var_tmp = None + err_handler.check_program_status(config_options, mpi_config) + + var_sub_tmp = mpi_config.scatter_array(input_forcings, var_tmp, config_options) + err_handler.check_program_status(config_options, mpi_config) + + try: + input_forcings.esmf_field_in.data[:, :] = var_sub_tmp + except (ValueError, KeyError, AttributeError) as err: + config_options.errMsg = "Unable to place NetCDF RRFS HI elevation data into the ESMF field object: " \ + + str(err) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + if mpi_config.rank == 0: + config_options.statusMsg = "Regridding RRFS HI elevation data to the WRF-Hydro domain." + err_handler.log_msg(config_options, mpi_config) + try: + input_forcings.esmf_field_out = input_forcings.regridObj(input_forcings.esmf_field_in, + input_forcings.esmf_field_out) + except ValueError as ve: + config_options.errMsg = "Unable to regrid RRFS HI elevation data to the WRF-Hydro domain " \ + "using ESMF: " + str(ve) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # Set any pixel cells outside the input domain to the global missing value. + try: + input_forcings.esmf_field_out.data[np.where(input_forcings.regridded_mask == 0)] = \ + config_options.globalNdv + except (ValueError, ArithmeticError) as npe: + config_options.errMsg = "Unable to compute mask on RRFS HI elevation data: " + str(npe) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + try: + input_forcings.height[:, :] = input_forcings.esmf_field_out.data + except (ValueError, KeyError, AttributeError) as err: + config_options.errMsg = "Unable to extract ESMF regridded RRFS HI elevation data to a local " \ + "array: " + str(err) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + err_handler.check_program_status(config_options, mpi_config) + + # Regrid the input variables. + var_tmp = None + if mpi_config.rank == 0: + config_options.statusMsg = "Regridding RRFS input variable: " + \ + input_forcings.netcdf_var_names[force_count] + err_handler.log_msg(config_options, mpi_config) + try: + var_tmp = id_tmp.variables[input_forcings.netcdf_var_names[force_count]][0, :, :] + except (ValueError, KeyError, AttributeError) as err: + config_options.errMsg = "Unable to extract " + input_forcings.netcdf_var_names[force_count] + \ + " from: " + input_forcings.tmpFile + " (" + str(err) + ")" + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + var_sub_tmp = mpi_config.scatter_array(input_forcings, var_tmp, config_options) + err_handler.check_program_status(config_options, mpi_config) + + try: + input_forcings.esmf_field_in.data[:, :] = var_sub_tmp + except (ValueError, KeyError, AttributeError) as err: + config_options.errMsg = "Unable to place local array into local ESMF field: " + str(err) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + try: + input_forcings.esmf_field_out = input_forcings.regridObj(input_forcings.esmf_field_in, + input_forcings.esmf_field_out) + except ValueError as ve: + config_options.errMsg = "Unable to regrid input RRFS HI forcing variables using ESMF: " + str(ve) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # Set any pixel cells outside the input domain to the global missing value. + try: + input_forcings.esmf_field_out.data[np.where(input_forcings.regridded_mask == 0)] = \ + config_options.globalNdv + except (ValueError, ArithmeticError) as npe: + config_options.errMsg = "Unable to calculate mask from input RRFS HI regridded forcings: " + str(npe) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + try: + input_forcings.regridded_forcings2[input_forcings.input_map_output[force_count], :, :] = \ + input_forcings.esmf_field_out.data + except (ValueError, KeyError, AttributeError) as err: + config_options.errMsg = "Unable to place local ESMF regridded data into local array: " + str(err) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # If we are on the first timestep, set the previous regridded field to be + # the latest as there are no states for time 0. + if config_options.current_output_step == 1: + input_forcings.regridded_forcings1[input_forcings.input_map_output[force_count], :, :] = \ + input_forcings.regridded_forcings2[input_forcings.input_map_output[force_count], :, :] + err_handler.check_program_status(config_options, mpi_config) + + # Close the temporary NetCDF file and remove it. + if mpi_config.rank == 0: + try: + id_tmp.close() + except OSError: + config_options.errMsg = "Unable to close NetCDF file: " + input_forcings.tmpFile + err_handler.log_critical(config_options, mpi_config) + try: + os.remove(input_forcings.tmpFile) + except OSError: + config_options.errMsg = "Unable to remove NetCDF file: " + input_forcings.tmpFile + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + diff --git a/core/time_handling.py b/core/time_handling.py index fed4aad..b58e87b 100755 --- a/core/time_handling.py +++ b/core/time_handling.py @@ -711,6 +711,412 @@ def find_ak_hrrr_neighbors(input_forcings, config_options, d_current, mpi_config if input_forcings.regridded_forcings2 is not None: input_forcings.regridded_forcings2[:, :, :] = config_options.globalNdv +def find_rrfs_na_neighbors(input_forcings, config_options, d_current, mpi_config): + """ + Function to calculate the previous and after HRRR conus cycles based on the current timestep. + :param input_forcings: + :param config_options: + :param d_current: + :param mpi_config: + :return: + """ + if mpi_config.rank == 0: + config_options.statusMsg = "Processing Conus HRRR Data. Calculating neighboring " \ + "files for this output timestep" + err_handler.log_msg(config_options, mpi_config) + + default_horizon = 18 # 18-hour forecasts. + six_hr_horizon = 84 # 84-hour forecasts every six hours. + + # First find the current HRRR forecast cycle that we are using. + ana_offset = 1 if config_options.ana_flag else 0 + current_rrfs_cycle = config_options.current_fcst_cycle - datetime.timedelta( + seconds=(ana_offset + input_forcings.userCycleOffset) * 60.0) + if current_rrfs_cycle.hour % 6 != 0: + rrfs_horizon = default_horizon + else: + rrfs_horizon = six_hr_horizon + + # If the user has specified a forcing horizon that is greater than what is available + # for this time period, throw an error. + if (input_forcings.userFcstHorizon + input_forcings.userCycleOffset) / 60.0 > rrfs_horizon: + config_options.errMsg = "User has specified RRFS NA forecast horizon " + \ + "that is greater than the maximum allowed hours of: " + str(rrfs_horizon) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # Calculate the current forecast hour within this HRRR cycle. + dt_tmp = d_current - current_rrfs_cycle + current_rrfs_hour = int(dt_tmp.days*24) + float(dt_tmp.seconds/3600.0) + + # Calculate the previous file to process. + min_since_last_output = (current_rrfs_hour * 60) % 60 + current_rrfs_hour = int(current_rrfs_hour) + + if min_since_last_output == 0: + min_since_last_output = 60 + prev_rrfs_date = d_current - datetime.timedelta(seconds=min_since_last_output * 60) + input_forcings.fcst_date1 = prev_rrfs_date + if min_since_last_output == 60: + min_until_next_output = 0 + else: + min_until_next_output = 60 - min_since_last_output + next_rrfs_date = d_current + datetime.timedelta(seconds=min_until_next_output * 60) + input_forcings.fcst_date2 = next_rrfs_date + + # Calculate the output forecast hours needed based on the prev/next dates. + dt_tmp = next_hrrr_date - current_rrfs_cycle + next_rrfs_forecast_hour = int(dt_tmp.days * 24.0) + int(dt_tmp.seconds / 3600.0) + input_forcings.fcst_hour2 = next_rrfs_forecast_hour + dt_tmp = prev_rrfs_date - current_rrfs_cycle + prev_rrfs_forecast_hour = int(dt_tmp.days * 24.0) + int(dt_tmp.seconds / 3600.0) + input_forcings.fcst_hour1 = prev_rrfs_forecast_hour + err_handler.check_program_status(config_options, mpi_config) + + # If we are on the first RRFS forecast hour (1), and we have calculated the previous forecast + # hour to be 0, simply set both hours to be 1. Hour 0 will not produce the fields we need, and + # no interpolation is required. + if prev_rrfs_forecast_hour == 0: + prev_rrfs_forecast_hour = 1 + + # Calculate expected file paths. + tmp_file1 = \ + f"{input_forcings.inDir}/rrfs.{current_rrfs_cycle.strftime('%Y%m%d')}/{prev_rrfs_cycle.strftime('%H')}/rrfs.t{current_rrfs_cycle.strftime('%H')}z.prslev.3km.f{str(prev_rrfs_forecast_hour).zfill(3)}.na.grib2" + if mpi_config.rank == 0: + config_options.statusMsg = "Previous RRFS file being used: " + tmp_file1 + err_handler.log_msg(config_options, mpi_config) + + tmp_file2 = \ + f"{input_forcings.inDir}/rrfs.{current_rrfs_cycle.strftime('%Y%m%d')}/{current_rrfs_cycle.strftime('%H')}/rrfs.t{current_rrfs_cycle.strftime('%H')}z.prslev.3km.f{str(current_rrfs_forecast_hour).zfill(3)}.na.grib2" + if mpi_config.rank == 0: + if mpi_config.rank == 0: + config_options.statusMsg = "Next RRFS file being used: " + tmp_file2 + err_handler.log_msg(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # Check to see if files are already set. If not, then reset, grids and + # regridding objects to communicate things need to be re-established. + if input_forcings.file_in1 != tmp_file1 or input_forcings.file_in2 != tmp_file2: + if config_options.current_output_step == 1: + input_forcings.regridded_forcings1 = input_forcings.regridded_forcings1 + input_forcings.regridded_forcings2 = input_forcings.regridded_forcings2 + input_forcings.file_in1 = tmp_file1 + input_forcings.file_in2 = tmp_file2 + else: + # Check to see if we are restarting from a previously failed instance. In this case, + # We are not on the first timestep, but no previous forcings have been processed. + # We need to process the previous input timestep for temporal interpolation purposes. + if input_forcings.regridded_forcings1 is None: + # if not np.any(input_forcings.regridded_forcings1): + if mpi_config.rank == 0: + config_options.statusMsg = "Restarting forecast cycle. Will regrid previous: " + \ + input_forcings.productName + err_handler.log_msg(config_options, mpi_config) + input_forcings.rstFlag = 1 + input_forcings.regridded_forcings1 = input_forcings.regridded_forcings1 + input_forcings.regridded_forcings2 = input_forcings.regridded_forcings2 + input_forcings.file_in2 = tmp_file1 + input_forcings.file_in1 = tmp_file1 + input_forcings.fcst_date2 = input_forcings.fcst_date1 + input_forcings.fcst_hour2 = input_forcings.fcst_hour1 + else: + # The HRRR window has shifted. Reset fields 2 to + # be fields 1. + input_forcings.regridded_forcings1[:, :, :] = input_forcings.regridded_forcings2[:, :, :] + input_forcings.file_in1 = tmp_file1 + input_forcings.file_in2 = tmp_file2 + input_forcings.regridComplete = False + err_handler.check_program_status(config_options, mpi_config) + + # Ensure we have the necessary new file + if mpi_config.rank == 0: + if not os.path.exists(input_forcings.file_in2): + if input_forcings.enforce == 1: + config_options.errMsg = "Expected input RRFS file: " + input_forcings.file_in2 + " not found." + err_handler.log_critical(config_options, mpi_config) + else: + config_options.statusMsg = "Expected input RRFS file: " + input_forcings.file_in2 + " not found. " \ + "Will not use in " \ + "final layering." + err_handler.log_warning(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # If the file is missing, set the local slab of arrays to missing. + if not os.path.exists(input_forcings.file_in2): + if input_forcings.regridded_forcings2 is not None: + input_forcings.regridded_forcings2[:, :, :] = config_options.globalNdv + + +def find_rrfs_pr_neighbors(input_forcings, config_options, d_current, mpi_config): + """ + Function to calculate the previous and after HRRR conus cycles based on the current timestep. + :param input_forcings: + :param config_options: + :param d_current: + :param mpi_config: + :return: + """ + if mpi_config.rank == 0: + config_options.statusMsg = "Processing RRFS PR Data. Calculating neighboring " \ + "files for this output timestep" + err_handler.log_msg(config_options, mpi_config) + + rrfs_horizon = 84 + + # First find the current HRRR forecast cycle that we are using. + ana_offset = 1 if config_options.ana_flag else 0 + current_rrfs_cycle = config_options.current_fcst_cycle - datetime.timedelta( + seconds=(ana_offset + input_forcings.userCycleOffset) * 60.0) + + # If the user has specified a forcing horizon that is greater than what is available + # for this time period, throw an error. + if (input_forcings.userFcstHorizon + input_forcings.userCycleOffset) / 60.0 > rrfs_horizon: + config_options.errMsg = "User has specified RRFS PR forecast horizon " + \ + "that is greater than the maximum allowed hours of: " + str(rrfs_horizon) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # Calculate the current forecast hour within this RRFS cycle. + dt_tmp = d_current - current_rrfs_cycle + current_rrfs_hour = int(dt_tmp.days*24) + float(dt_tmp.seconds/3600.0) + + # Calculate the previous file to process. + min_since_last_output = (current_rrfs_hour * 60) % 60 + current_rrfs_hour = int(current_rrfs_hour) + + if min_since_last_output == 0: + min_since_last_output = 60 + prev_rrfs_date = d_current - datetime.timedelta(seconds=min_since_last_output * 60) + input_forcings.fcst_date1 = prev_rrfs_date + if min_since_last_output == 60: + min_until_next_output = 0 + else: + min_until_next_output = 60 - min_since_last_output + next_rrfs_date = d_current + datetime.timedelta(seconds=min_until_next_output * 60) + input_forcings.fcst_date2 = next_rrfs_date + + # Calculate the output forecast hours needed based on the prev/next dates. + dt_tmp = next_rrfs_date - current_rrfs_cycle + next_rrfs_forecast_hour = int(dt_tmp.days * 24.0) + int(dt_tmp.seconds / 3600.0) + if config_options.ana_flag: + next_rrfs_forecast_hour -= 1 # for analysis vs forecast + input_forcings.fcst_hour2 = next_rrfs_forecast_hour + dt_tmp = prev_rrfs_date - current_rrfs_cycle + prev_rrfs_forecast_hour = int(dt_tmp.days * 24.0) + int(dt_tmp.seconds / 3600.0) + if config_options.ana_flag: + prev_rrfs_forecast_hour -= 1 # for analysis vs forecast + input_forcings.fcst_hour1 = prev_rrfs_forecast_hour + err_handler.check_program_status(config_options, mpi_config) + + # If we are on the first HRRR forecast hour (1), and we have calculated the previous forecast + # hour to be 0, simply set both hours to be 1. Hour 0 will not produce the fields we need, and + # no interpolation is required. + if prev_rrfs_forecast_hour == 0: + prev_rrfs_forecast_hour = 1 + + # Calculate expected file paths. + tmp_file1 = \ + f"{input_forcings.inDir}/rrfs.{current_rrfs_cycle.strftime('%Y%m%d')}/{current_rrfs_cycle.strftime('%H')}/rrfs.t{current_rrfs_cycle.strftime('%H')}z.prslev.2p5km.f{str(prev_rrfs_forecast_hour).zfill(3)}.pr.grib2" + if mpi_config.rank == 0: + config_options.statusMsg = "Previous RRFS file being used: " + tmp_file1 + err_handler.log_msg(config_options, mpi_config) + + tmp_file2 = \ + f"{input_forcings.inDir}/rrfs.{current_rrfs_cycle.strftime('%Y%m%d')}/{current_rrfs_cycle.strftime('%H')}/rrfs.t{current_rrfs_cycle.strftime('%H')}z.prslev.2p5km.f{str(next_rrfs_forecast_hour).zfill(3)}.pr.grib2" + if mpi_config.rank == 0: + if mpi_config.rank == 0: + config_options.statusMsg = "Next RRFS file being used: " + tmp_file2 + err_handler.log_msg(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # Check to see if files are already set. If not, then reset, grids and + # regridding objects to communicate things need to be re-established. + if input_forcings.file_in1 != tmp_file1 or input_forcings.file_in2 != tmp_file2: + if config_options.current_output_step == 1: + input_forcings.regridded_forcings1 = input_forcings.regridded_forcings1 + input_forcings.regridded_forcings2 = input_forcings.regridded_forcings2 + input_forcings.file_in1 = tmp_file1 + input_forcings.file_in2 = tmp_file2 + else: + # Check to see if we are restarting from a previously failed instance. In this case, + # We are not on the first timestep, but no previous forcings have been processed. + # We need to process the previous input timestep for temporal interpolation purposes. + if input_forcings.regridded_forcings1 is None: + # if not np.any(input_forcings.regridded_forcings1): + if mpi_config.rank == 0: + config_options.statusMsg = "Restarting forecast cycle. Will regrid previous: " + \ + input_forcings.productName + err_handler.log_msg(config_options, mpi_config) + input_forcings.rstFlag = 1 + input_forcings.regridded_forcings1 = input_forcings.regridded_forcings1 + input_forcings.regridded_forcings2 = input_forcings.regridded_forcings2 + input_forcings.file_in2 = tmp_file1 + input_forcings.file_in1 = tmp_file1 + input_forcings.fcst_date2 = input_forcings.fcst_date1 + input_forcings.fcst_hour2 = input_forcings.fcst_hour1 + else: + # The HRRR window has shifted. Reset fields 2 to + # be fields 1. + input_forcings.regridded_forcings1[:, :, :] = input_forcings.regridded_forcings2[:, :, :] + input_forcings.file_in1 = tmp_file1 + input_forcings.file_in2 = tmp_file2 + input_forcings.regridComplete = False + err_handler.check_program_status(config_options, mpi_config) + + # Ensure we have the necessary new file + if mpi_config.rank == 0: + if not os.path.exists(input_forcings.file_in2): + if input_forcings.enforce == 1: + config_options.errMsg = "Expected input RRFS file: " + input_forcings.file_in2 + " not found." + err_handler.log_critical(config_options, mpi_config) + else: + config_options.statusMsg = "Expected input RRFS file: " + input_forcings.file_in2 + " not found. " \ + "Will not use in " \ + "final layering." + err_handler.log_warning(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # If the file is missing, set the local slab of arrays to missing. + if not os.path.exists(input_forcings.file_in2): + if input_forcings.regridded_forcings2 is not None: + input_forcings.regridded_forcings2[:, :, :] = config_options.globalNdv + + +def find_rrfs_hi_neighbors(input_forcings, config_options, d_current, mpi_config): + """ + Function to calculate the previous and after HRRR conus cycles based on the current timestep. + :param input_forcings: + :param config_options: + :param d_current: + :param mpi_config: + :return: + """ + if mpi_config.rank == 0: + config_options.statusMsg = "Processing Hawaii RRFS Data. Calculating neighboring " \ + "files for this output timestep" + err_handler.log_msg(config_options, mpi_config) + + + # First find the current RRFS forecast cycle that we are using. + ana_offset = 1 if config_options.ana_flag else 0 + current_rrfs_cycle = config_options.current_fcst_cycle - datetime.timedelta( + seconds=(ana_offset + input_forcings.userCycleOffset) * 60.0) + rrfs_horizon = 48 + + # If the user has specified a forcing horizon that is greater than what is available + # for this time period, throw an error. + if (input_forcings.userFcstHorizon + input_forcings.userCycleOffset) / 60.0 > rrfs_horizon: + config_options.errMsg = "User has specified RRFS HI forecast horizon " + \ + "that is greater than the maximum allowed hours of: " + str(rrfs_horizon) + err_handler.log_critical(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # Calculate the current forecast hour within this RRFS cycle. + dt_tmp = d_current - current_rrfs_cycle + current_rrfs_hour = int(dt_tmp.days*24) + float(dt_tmp.seconds/3600.0) + + # Calculate the previous file to process. + min_since_last_output = (current_rrfs_hour * 60) % 60 + current_rrfs_hour = int(current_rrfs_hour) + + if min_since_last_output == 0: + min_since_last_output = 60 + prev_rrfs_date = d_current - datetime.timedelta(seconds=min_since_last_output * 60) + input_forcings.fcst_date1 = prev_rrfs_date + if min_since_last_output == 60: + min_until_next_output = 0 + else: + min_until_next_output = 60 - min_since_last_output + next_rrfs_date = d_current + datetime.timedelta(seconds=min_until_next_output * 60) + input_forcings.fcst_date2 = next_rrfs_date + + # Calculate the output forecast hours needed based on the prev/next dates. + dt_tmp = next_rrfs_date - current_rrfs_cycle + next_rrfs_forecast_hour = int(dt_tmp.days * 24.0) + int(dt_tmp.seconds / 3600.0) + if config_options.ana_flag: + next_rrfs_forecast_hour -= 1 # for analysis vs forecast + input_forcings.fcst_hour2 = next_rrfs_forecast_hour + dt_tmp = prev_rrfs_date - current_rrfs_cycle + prev_rrfs_forecast_hour = int(dt_tmp.days * 24.0) + int(dt_tmp.seconds / 3600.0) + if config_options.ana_flag: + prev_rrfs_forecast_hour -= 1 # for analysis vs forecast + input_forcings.fcst_hour1 = prev_rrfs_forecast_hour + err_handler.check_program_status(config_options, mpi_config) + + # If we are on the first RRFS forecast hour (1), and we have calculated the previous forecast + # hour to be 0, simply set both hours to be 1. Hour 0 will not produce the fields we need, and + # no interpolation is required. + if prev_rrfs_forecast_hour == 0: + prev_rrfs_forecast_hour = 1 + + # Calculate expected file paths. + tmp_file1 = \ + f"{input_forcings.inDir}/rrfs.{prev_rrfs_date.strftime('%Y%m%d')}/{prev_rrfs_date.strftime('%H')}/rrfs.t{prev_rrfs_date.strftime('%H')}z.prslev.2p5km.f{str(prev_rrfs_forecast_hour).zfill(3)}.hi.grib2" + if mpi_config.rank == 0: + config_options.statusMsg = "Previous RRFS file being used: " + tmp_file1 + err_handler.log_msg(config_options, mpi_config) + + tmp_file2 = \ + f"{input_forcings.inDir}/rrfs.{current_rrfs_cycle.strftime('%Y%m%d')}/{current_rrfs_cycle.strftime('%H')}/rrfs.t{current_rrfs_cycle.strftime('%H')}z.prslev.2p5km.f{str(current_rrfs_hour).zfill(3)}.hi.grib2" + if mpi_config.rank == 0: + if mpi_config.rank == 0: + config_options.statusMsg = "Next RRFS file being used: " + tmp_file2 + err_handler.log_msg(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # Check to see if files are already set. If not, then reset, grids and + # regridding objects to communicate things need to be re-established. + if input_forcings.file_in1 != tmp_file1 or input_forcings.file_in2 != tmp_file2: + if config_options.current_output_step == 1: + input_forcings.regridded_forcings1 = input_forcings.regridded_forcings1 + input_forcings.regridded_forcings2 = input_forcings.regridded_forcings2 + input_forcings.file_in1 = tmp_file1 + input_forcings.file_in2 = tmp_file2 + else: + # Check to see if we are restarting from a previously failed instance. In this case, + # We are not on the first timestep, but no previous forcings have been processed. + # We need to process the previous input timestep for temporal interpolation purposes. + if input_forcings.regridded_forcings1 is None: + # if not np.any(input_forcings.regridded_forcings1): + if mpi_config.rank == 0: + config_options.statusMsg = "Restarting forecast cycle. Will regrid previous: " + \ + input_forcings.productName + err_handler.log_msg(config_options, mpi_config) + input_forcings.rstFlag = 1 + input_forcings.regridded_forcings1 = input_forcings.regridded_forcings1 + input_forcings.regridded_forcings2 = input_forcings.regridded_forcings2 + input_forcings.file_in2 = tmp_file1 + input_forcings.file_in1 = tmp_file1 + input_forcings.fcst_date2 = input_forcings.fcst_date1 + input_forcings.fcst_hour2 = input_forcings.fcst_hour1 + else: + # The RRFS window has shifted. Reset fields 2 to + # be fields 1. + input_forcings.regridded_forcings1[:, :, :] = input_forcings.regridded_forcings2[:, :, :] + input_forcings.file_in1 = tmp_file1 + input_forcings.file_in2 = tmp_file2 + input_forcings.regridComplete = False + err_handler.check_program_status(config_options, mpi_config) + + # Ensure we have the necessary new file + if mpi_config.rank == 0: + if not os.path.exists(input_forcings.file_in2): + if input_forcings.enforce == 1: + config_options.errMsg = "Expected input RRFS file: " + input_forcings.file_in2 + " not found." + err_handler.log_critical(config_options, mpi_config) + else: + config_options.statusMsg = "Expected input RRFS file: " + input_forcings.file_in2 + " not found. " \ + "Will not use in " \ + "final layering." + err_handler.log_warning(config_options, mpi_config) + err_handler.check_program_status(config_options, mpi_config) + + # If the file is missing, set the local slab of arrays to missing. + if not os.path.exists(input_forcings.file_in2): + if input_forcings.regridded_forcings2 is not None: + input_forcings.regridded_forcings2[:, :, :] = config_options.globalNdv + + def find_rrfs_na_neighbors(input_forcings, config_options, d_current, mpi_config): """