diff --git a/.circleci/config.yml b/.circleci/config.yml index 5c89ea615..7c7d0d6a4 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,11 +1,11 @@ version: 2.1 # Anchors in case we need to override the defaults from the orb -#baselibs_version: &baselibs_version v7.17.0 -#bcs_version: &bcs_version v11.4.0 +#baselibs_version: &baselibs_version v7.27.0 +#bcs_version: &bcs_version v11.6.0 orbs: - ci: geos-esm/circleci-tools@2 + ci: geos-esm/circleci-tools@4 workflows: build-test: diff --git a/CMakeLists.txt b/CMakeLists.txt index a3b379668..bba3fcd40 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -8,6 +8,11 @@ set (alldirs GEOSwgcm_GridComp ) +option(BUILD_WITH_GIGATRAJ "Build GEOSgcm with Gigatraj" OFF) + +if (BUILD_WITH_GIGATRAJ) + list(APPEND alldirs GEOSgigatraj_GridComp) +endif() if (EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/GEOS_GcmGridComp.F90) @@ -17,6 +22,8 @@ if (EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/GEOS_GcmGridComp.F90) SUBCOMPONENTS ${alldirs} DEPENDENCIES MAPL ESMF::ESMF) + target_compile_definitions (${this} PRIVATE $<$:HAS_GIGATRAJ>) + ecbuild_install_project( NAME GEOSgcm_GridComp) else () diff --git a/GEOS_GcmGridComp.F90 b/GEOS_GcmGridComp.F90 index f9e242d48..96d183e14 100644 --- a/GEOS_GcmGridComp.F90 +++ b/GEOS_GcmGridComp.F90 @@ -20,6 +20,10 @@ module GEOS_GcmGridCompMod use GEOS_AgcmGridCompMod, only: AGCM_SetServices => SetServices use GEOS_mkiauGridCompMod, only: AIAU_SetServices => SetServices use DFI_GridCompMod, only: ADFI_SetServices => SetServices +#ifdef HAS_GIGATRAJ + use GEOS_GigatrajGridCompMod, only: GigaTraj_SetServices => SetServices +#endif + use GEOS_OgcmGridCompMod, only: OGCM_SetServices => SetServices use GEOS_WgcmGridCompMod, only: WGCM_SetServices => SetServices use MAPL_HistoryGridCompMod, only: Hist_SetServices => SetServices @@ -58,6 +62,7 @@ module GEOS_GcmGridCompMod integer :: ADFI integer :: WGCM integer :: hist +integer :: gigatraj integer :: bypass_ogcm integer :: k @@ -251,6 +256,10 @@ subroutine SetServices ( GC, RC ) else AGCM = MAPL_AddChild(GC, NAME='AGCM', SS=Agcm_SetServices, RC=STATUS) VERIFY_(STATUS) +#ifdef HAS_GIGATRAJ + gigatraj = MAPL_AddChild(GC, NAME='GIGATRAJ', SS=GigaTraj_SetServices, RC=STATUS) + VERIFY_(STATUS) +#endif AIAU = MAPL_AddChild(GC, NAME='AIAU', SS=AIAU_SetServices, RC=STATUS) VERIFY_(STATUS) ADFI = MAPL_AddChild(GC, NAME='ADFI', SS=ADFI_SetServices, RC=STATUS) @@ -955,6 +964,10 @@ subroutine Initialize ( GC, IMPORT, EXPORT, CLOCK, RC ) ! Recursive setup of grids (should be disabled) call ESMF_GridCompSet(GCS(AGCM), grid=agrid, rc=status) VERIFY_(STATUS) +#ifdef HAS_GIGATRAJ + call ESMF_GridCompSet(GCS(gigatraj), grid=agrid, rc=status) + VERIFY_(STATUS) +#endif call ESMF_GridCompSet(GCS(OGCM), grid=ogrid, rc=status) VERIFY_(STATUS) if(.not. DO_DATA_ATM4OCN) then @@ -2019,10 +2032,23 @@ subroutine Run ( GC, IMPORT, EXPORT, CLOCK, RC ) else call MAPL_TimerOn(MAPL,"AGCM" ) endif + +#ifdef HAS_GIGATRAJ + ! use agcm export as gigatraj's import to get the initial state. + ! it only runs at the begining of the first time step + call ESMF_GridCompRun ( GCS(gigatraj), importState=GEX(AGCM), exportState=GEX(gigatraj), clock=clock, phase=1, userRC=status ) + VERIFY_(STATUS) +#endif call ESMF_GridCompRun ( GCS(AGCM), importState=GIM(AGCM), exportState=GEX(AGCM), clock=clock, userRC=status ) VERIFY_(STATUS) +#ifdef HAS_GIGATRAJ + ! use agcm export as gigatraj's import + call ESMF_GridCompRun ( GCS(gigatraj), importState=GEX(AGCM), exportState=GEX(gigatraj), clock=clock, phase=2, userRC=status ) + VERIFY_(STATUS) +#endif + if(DO_DATA_ATM4OCN) then call MAPL_TimerOff(MAPL,"DATAATM" ) else diff --git a/GEOSagcm_GridComp/CMakeLists.txt b/GEOSagcm_GridComp/CMakeLists.txt index 7e0783090..f28aaff0c 100644 --- a/GEOSagcm_GridComp/CMakeLists.txt +++ b/GEOSagcm_GridComp/CMakeLists.txt @@ -20,6 +20,8 @@ elseif (EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/GEOS_AgcmGridComp.F90) SUBCOMPONENTS ${alldirs} DEPENDENCIES MAPL GEOS_Shared Chem_Shared ESMF::ESMF) + target_compile_definitions (${this} PRIVATE $<$:HAS_GIGATRAJ>) + else () esma_add_subdirectories (${alldirs}) diff --git a/GEOSagcm_GridComp/GEOS_AgcmGridComp.F90 b/GEOSagcm_GridComp/GEOS_AgcmGridComp.F90 index 3a3bee32b..26d96fdc2 100644 --- a/GEOSagcm_GridComp/GEOS_AgcmGridComp.F90 +++ b/GEOSagcm_GridComp/GEOS_AgcmGridComp.F90 @@ -790,6 +790,34 @@ subroutine SetServices ( GC, RC ) RC = STATUS) VERIFY_(STATUS) +#ifdef HAS_GIGATRAJ + call MAPL_AddExportSpec(GC, & + SHORT_NAME = 'PL', & + CHILD_ID = SDYN, & + RC = STATUS) + VERIFY_(STATUS) + call MAPL_AddExportSpec(GC, & + SHORT_NAME = 'OMEGA', & + CHILD_ID = SDYN, & + RC = STATUS) + VERIFY_(STATUS) + call MAPL_AddExportSpec(GC, & + SHORT_NAME = 'TH', & + CHILD_ID = SDYN, & + RC = STATUS) + VERIFY_(STATUS) + call MAPL_AddExportSpec(GC, & + SHORT_NAME = 'DTDTDYN', & + CHILD_ID = SDYN, & + RC = STATUS) + VERIFY_(STATUS) + call MAPL_AddExportSpec(GC, & + SHORT_NAME = 'ZL', & + CHILD_ID = SDYN, & + RC = STATUS) + VERIFY_(STATUS) +#endif + call MAPL_AddExportSpec( GC, & SHORT_NAME = 'PS', & CHILD_ID = SDYN, & diff --git a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOS_PhysicsGridComp.F90 b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOS_PhysicsGridComp.F90 index 36dd7c3f6..068af3a61 100644 --- a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOS_PhysicsGridComp.F90 +++ b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOS_PhysicsGridComp.F90 @@ -1215,8 +1215,8 @@ subroutine SetServices ( GC, RC ) call MAPL_AddConnectivity ( GC, & SHORT_NAME = (/ 'RL ', 'QL ', 'QLTOT ', 'DQLDT ', & 'RI ', 'QI ', 'QITOT ', 'DQIDT ', & - 'QLCN ', 'PFL_CN ', 'PFL_LSAN', & - 'QICN ', 'PFI_CN ', 'PFI_LSAN', & + 'QLCN ', 'PFL_CN ', 'PFL_LSAN', 'ZLCL ', & + 'QICN ', 'PFI_CN ', 'PFI_LSAN', 'ZLFC ', & 'FCLD ', 'QCTOT ', 'CNV_QC ', & 'REV_LS ', 'REV_AN ', 'REV_CN ', 'TPREC ', & 'Q ', 'DQDT ', 'DQRL ', 'DQRC ', & diff --git a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSmoist_GridComp/CMakeLists.txt b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSmoist_GridComp/CMakeLists.txt index 1366abb01..7992821cb 100644 --- a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSmoist_GridComp/CMakeLists.txt +++ b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSmoist_GridComp/CMakeLists.txt @@ -36,6 +36,12 @@ esma_add_library (${this} SRCS ${srcs} DEPENDENCIES GEOS_Shared GMAO_mpeu MAPL Chem_Shared Chem_Base ESMF::ESMF) +# We need to add_dependencies for fms_r4 because CMake doesn't know we +# need it for include purposes. In R4R8, we only ever link against +# fms_r8, so it doesn't know to build the target fms_r4 +# NOTE NOTE NOTE: This should *not* be included in GEOSgcm v12 +# because FMS is pre-built library in that case. +add_dependencies (${this} fms_r4) get_target_property (extra_incs fms_r4 INCLUDE_DIRECTORIES) target_include_directories(${this} PRIVATE $ diff --git a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOS_SurfaceGridComp.F90 b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOS_SurfaceGridComp.F90 index 191bd636e..a5aa010e1 100644 --- a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOS_SurfaceGridComp.F90 +++ b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOS_SurfaceGridComp.F90 @@ -552,7 +552,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddImportSpec(GC, & SHORT_NAME = 'DRPARN', & - LONG_NAME = 'normalized_surface_downwelling_par_beam_flux', & + LONG_NAME = 'normalized_surface_downwelling_PAR_beam_flux', & UNITS = '1', & DIMS = MAPL_DimsHorzOnly, & VLOCATION = MAPL_VLocationNone, & @@ -561,7 +561,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddImportSpec(GC, & SHORT_NAME = 'DFPARN', & - LONG_NAME = 'normalized_surface_downwelling_par_diffuse_flux', & + LONG_NAME = 'normalized_surface_downwelling_PAR_diffuse_flux', & UNITS = '1', & DIMS = MAPL_DimsHorzOnly, & VLOCATION = MAPL_VLocationNone, & @@ -683,7 +683,7 @@ subroutine SetServices ( GC, RC ) ! !EXPORT STATE: call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_for_visible_beam', & + LONG_NAME = 'surface_reflectivity_for_visible_beam', & UNITS = '1', & SHORT_NAME = 'ALBVR', & DIMS = MAPL_DimsHorzOnly, & @@ -692,7 +692,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_for_visible_diffuse',& + LONG_NAME = 'surface_reflectivity_for_visible_diffuse',& UNITS = '1', & SHORT_NAME = 'ALBVF', & DIMS = MAPL_DimsHorzOnly, & @@ -701,7 +701,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_for_nearinfrared_beam', & + LONG_NAME = 'surface_reflectivity_for_near_infrared_beam', & UNITS = '1', & SHORT_NAME = 'ALBNR', & DIMS = MAPL_DimsHorzOnly, & @@ -710,7 +710,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_for_nearinfraed_diffuse', & + LONG_NAME = 'surface_reflectivity_for_near_infrared_diffuse', & UNITS = '1', & SHORT_NAME = 'ALBNF', & DIMS = MAPL_DimsHorzOnly, & @@ -766,7 +766,16 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'FRACI', & - LONG_NAME = 'ice_covered_fraction_of_tile', & + LONG_NAME = 'ice_covered_fraction_of_grid_cell', & + UNITS = '1', & + DIMS = MAPL_DimsHorzOnly, & + VLOCATION = MAPL_VLocationNone, & + RC=STATUS ) + VERIFY_(STATUS) + + call MAPL_AddExportSpec(GC, & + SHORT_NAME = 'OFRACI', & + LONG_NAME = 'ice_covered_fraction_of_ocean_area',& UNITS = '1', & DIMS = MAPL_DimsHorzOnly, & VLOCATION = MAPL_VLocationNone, & @@ -938,7 +947,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'TSOIL1', & - LONG_NAME = 'soil_temperatures_layer_1' ,& + LONG_NAME = 'soil_temperature_layer_1' ,& UNITS = 'K', & DIMS = MAPL_DimsHorzOnly, & VLOCATION = MAPL_VLocationNone, & @@ -947,7 +956,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'TSOIL2', & - LONG_NAME = 'soil_temperatures_layer_2' ,& + LONG_NAME = 'soil_temperature_layer_2' ,& UNITS = 'K', & DIMS = MAPL_DimsHorzOnly, & VLOCATION = MAPL_VLocationNone, & @@ -956,7 +965,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'TSOIL3', & - LONG_NAME = 'soil_temperatures_layer_3' ,& + LONG_NAME = 'soil_temperature_layer_3' ,& UNITS = 'K', & DIMS = MAPL_DimsHorzOnly, & VLOCATION = MAPL_VLocationNone, & @@ -965,7 +974,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'TSOIL4', & - LONG_NAME = 'soil_temperatures_layer_4' ,& + LONG_NAME = 'soil_temperature_layer_4' ,& UNITS = 'K', & DIMS = MAPL_DimsHorzOnly, & VLOCATION = MAPL_VLocationNone, & @@ -974,7 +983,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'TSOIL5', & - LONG_NAME = 'soil_temperatures_layer_5' ,& + LONG_NAME = 'soil_temperature_layer_5' ,& UNITS = 'K', & DIMS = MAPL_DimsHorzOnly, & VLOCATION = MAPL_VLocationNone, & @@ -983,7 +992,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'TSOIL6', & - LONG_NAME = 'soil_temperatures_layer_6' ,& + LONG_NAME = 'soil_temperature_layer_6' ,& UNITS = 'K', & DIMS = MAPL_DimsHorzOnly, & VLOCATION = MAPL_VLocationNone, & @@ -992,7 +1001,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'fractional_area_of_land_snowcover',& + LONG_NAME = 'fractional_area_of_snow_on_land',& UNITS = '1' ,& SHORT_NAME = 'ASNOW' ,& DIMS = MAPL_DimsHorzOnly ,& @@ -1019,7 +1028,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_temperature_of_snow',& + LONG_NAME = 'surface_temperature_of_snow_on_land',& UNITS = 'K' ,& SHORT_NAME = 'TPSNOW' ,& DIMS = MAPL_DimsHorzOnly ,& @@ -1046,7 +1055,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_temperature_of_wilted_zone',& + LONG_NAME = 'surface_temperature_of_wilting_zone',& UNITS = 'K' ,& SHORT_NAME = 'TPWLT' ,& DIMS = MAPL_DimsHorzOnly ,& @@ -1100,7 +1109,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC ,& - LONG_NAME = 'surface_soil_wetness' ,& + LONG_NAME = 'soil_wetness_surface' ,& UNITS = '1' ,& SHORT_NAME = 'WET1' ,& DIMS = MAPL_DimsHorzOnly ,& @@ -1109,7 +1118,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC ,& - LONG_NAME = 'surface_soil_wetness_for_chem' ,& + LONG_NAME = 'soil_wetness_surface_for_chem' ,& UNITS = '1' ,& SHORT_NAME = 'WET1_FOR_CHEM' ,& DIMS = MAPL_DimsHorzOnly ,& @@ -1118,7 +1127,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC ,& - LONG_NAME = 'root_zone_soil_wetness' ,& + LONG_NAME = 'soil_wetness_rootzone' ,& UNITS = '1' ,& SHORT_NAME = 'WET2' ,& DIMS = MAPL_DimsHorzOnly ,& @@ -1127,7 +1136,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC ,& - LONG_NAME = 'ave_prof_soil_moisture' ,& + LONG_NAME = 'soil_wetness_profile' ,& UNITS = '1' ,& SHORT_NAME = 'WET3' ,& DIMS = MAPL_DimsHorzOnly ,& @@ -1136,7 +1145,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC ,& - LONG_NAME = 'water_surface_layer' ,& + LONG_NAME = 'soil_moisture_surface' ,& UNITS = 'm3 m-3' ,& SHORT_NAME = 'WCSF' ,& DIMS = MAPL_DimsHorzOnly ,& @@ -1145,7 +1154,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC ,& - LONG_NAME = 'water_root_zone' ,& + LONG_NAME = 'soil_moisture_rootzone' ,& UNITS = 'm3 m-3' ,& SHORT_NAME = 'WCRZ' ,& DIMS = MAPL_DimsHorzOnly ,& @@ -1154,7 +1163,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC ,& - LONG_NAME = 'water_profile' ,& + LONG_NAME = 'soil_moisture_profile' ,& UNITS = 'm3 m-3' ,& SHORT_NAME = 'WCPR' ,& DIMS = MAPL_DimsHorzOnly ,& @@ -1172,7 +1181,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC ,& - LONG_NAME = 'greenness_fraction' ,& + LONG_NAME = 'vegetation_greenness_fraction' ,& UNITS = '1' ,& SHORT_NAME = 'GRN' ,& DIMS = MAPL_DimsHorzOnly ,& @@ -1418,7 +1427,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC ,& - LONG_NAME = 'snow_depth_within_snow_covered_area_fraction' ,& + LONG_NAME = 'snow_depth_within_snow_covered_area_fraction_on_land' ,& UNITS = 'm' ,& SHORT_NAME = 'SNOWDP' ,& DIMS = MAPL_DimsHorzOnly ,& @@ -1603,7 +1612,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'runoff_flux',& + LONG_NAME = 'runoff_total_flux' ,& UNITS = 'kg m-2 s-1' ,& SHORT_NAME = 'RUNOFF' ,& DIMS = MAPL_DimsHorzOnly ,& @@ -1693,7 +1702,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'baseflow_flux' ,& + LONG_NAME = 'baseflow_flux_land' ,& UNITS = 'kg m-2 s-1' ,& SHORT_NAME = 'BASEFLOW' ,& DIMS = MAPL_DimsHorzOnly ,& @@ -1712,7 +1721,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'EVLAND', & - LONG_NAME = 'Total_evapotranspiration_land', & + LONG_NAME = 'total_evapotranspiration_land', & UNITS = 'kg m-2 s-1', & DIMS = MAPL_DimsHorzOnly, & VLOCATION = MAPL_VLocationNone, & @@ -1739,7 +1748,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'DRPARLAND', & - LONG_NAME = 'surface_downwelling_par_beam_flux', & + LONG_NAME = 'surface_downwelling_PAR_beam_flux', & UNITS = 'W m-2', & DIMS = MAPL_DimsHorzOnly, & VLOCATION = MAPL_VLocationNone, & @@ -1748,7 +1757,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'DFPARLAND', & - LONG_NAME = 'surface_downwelling_par_diffuse_flux', & + LONG_NAME = 'surface_downwelling_PAR_diffuse_flux', & UNITS = 'W m-2', & DIMS = MAPL_DimsHorzOnly, & VLOCATION = MAPL_VLocationNone, & @@ -1793,7 +1802,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'SWNETSNOW', & - LONG_NAME = 'Net_shortwave_snow', & + LONG_NAME = 'Net_shortwave_flux_snow', & UNITS = 'W m-2', & DIMS = MAPL_DimsHorzOnly, & VLOCATION = MAPL_VLocationNone, & @@ -1820,7 +1829,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'GHSNOW', & - LONG_NAME = 'Ground_heating_snow', & + LONG_NAME = 'Ground_heating_flux_snow', & UNITS = 'W m-2', & DIMS = MAPL_DimsHorzOnly, & VLOCATION = MAPL_VLocationNone, & @@ -1847,7 +1856,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'SWLAND', & - LONG_NAME = 'Net_shortwave_land', & + LONG_NAME = 'Net_shortwave_flux_land', & UNITS = 'W m-2', & DIMS = MAPL_DimsHorzOnly, & VLOCATION = MAPL_VLocationNone, & @@ -1856,7 +1865,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'SWDOWNLAND', & - LONG_NAME = 'Incident_shortwave_land', & + LONG_NAME = 'Incident_shortwave_flux_land', & UNITS = 'W m-2', & DIMS = MAPL_DimsHorzOnly, & VLOCATION = MAPL_VLocationNone, & @@ -1865,7 +1874,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'LWLAND', & - LONG_NAME = 'Net_longwave_land', & + LONG_NAME = 'Net_longwave_flux_land', & UNITS = 'W m-2', & DIMS = MAPL_DimsHorzOnly, & VLOCATION = MAPL_VLocationNone, & @@ -1874,7 +1883,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'GHLAND', & - LONG_NAME = 'Ground_heating_land', & + LONG_NAME = 'Ground_heating_flux_land', & UNITS = 'W m-2', & DIMS = MAPL_DimsHorzOnly, & VLOCATION = MAPL_VLocationNone, & @@ -1883,7 +1892,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'GHTSKIN', & - LONG_NAME = 'Ground_heating_for_skin_temp',& + LONG_NAME = 'Ground_heating_flux_for_skin_temp',& UNITS = 'W m-2', & DIMS = MAPL_DimsHorzOnly, & VLOCATION = MAPL_VLocationNone, & @@ -1910,7 +1919,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'TWLAND', & - LONG_NAME = 'Avail_water_storage_land', & + LONG_NAME = 'total_water_storage_land', & UNITS = 'kg m-2', & DIMS = MAPL_DimsHorzOnly, & VLOCATION = MAPL_VLocationNone, & @@ -2395,7 +2404,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'interception_reservoir_capac', & + LONG_NAME = 'vegetation_interception_water_storage', & UNITS = 'kg m-2', & SHORT_NAME = 'CAPAC', & DIMS = MAPL_DimsHorzOnly, & @@ -3343,7 +3352,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddInternalSpec(GC, & SHORT_NAME = 'TS', & - LONG_NAME = 'surface_skin_temperature', & + LONG_NAME = 'surface_temperature', & UNITS = 'K', & FriendlyTO = trim(COMP_NAME), & DIMS = MAPL_DimsHorzOnly, & @@ -5384,6 +5393,7 @@ subroutine RUN2 ( GC, IMPORT, EXPORT, CLOCK, RC ) real, pointer, dimension(:,:) :: LST => NULL() real, pointer, dimension(:,:) :: FRI => NULL() + real, pointer, dimension(:,:) :: OFRI => NULL() real, pointer, dimension(:,:) :: EMISS => NULL() real, pointer, dimension(:,:) :: ALBVR => NULL() real, pointer, dimension(:,:) :: ALBVF => NULL() @@ -5738,6 +5748,7 @@ subroutine RUN2 ( GC, IMPORT, EXPORT, CLOCK, RC ) real, pointer, dimension(:) :: LSTTILE => NULL() real, pointer, dimension(:) :: FRTILE => NULL() + real, pointer, dimension(:) :: OFRTILE => NULL() real, pointer, dimension(:) :: EMISSTILE => NULL() real, pointer, dimension(:) :: ALBVRTILE => NULL() real, pointer, dimension(:) :: ALBVFTILE => NULL() @@ -6041,6 +6052,7 @@ subroutine RUN2 ( GC, IMPORT, EXPORT, CLOCK, RC ) real, allocatable :: PRECSUM(:,:) character(len=ESMF_MAXPATHLEN) :: SolCycFileName logical :: PersistSolar + logical :: allocateRunoff !============================================================================= @@ -6425,7 +6437,7 @@ subroutine RUN2 ( GC, IMPORT, EXPORT, CLOCK, RC ) ! call MAPL_CFIORead( PRECIP_FILE, CurrentTime, Bundle, RC=STATUS) ! VERIFY_(STATUS) - call MAPL_read_bundle( Bundle,PRECIP_FILE, CurrentTime, RC=status) + call MAPL_read_bundle( Bundle, PRECIP_FILE, CurrentTime, regrid_method=REGRID_METHOD_CONSERVE, RC=status) VERIFY_(STATUS) call ESMFL_BundleGetPointerToData(Bundle,'PRECTOT',PTTe, RC=STATUS) VERIFY_(STATUS) @@ -6879,6 +6891,10 @@ subroutine RUN2 ( GC, IMPORT, EXPORT, CLOCK, RC ) call MAPL_GetPointer(EXPORT , FRI , 'FRACI' , alloc=associated(LWI) , rC=STATUS) VERIFY_(STATUS) +! Not sure about why alloc of FRI depends on LWI, but copy the logic anyway + call MAPL_GetPointer(EXPORT , OFRI , 'OFRACI' , alloc=associated(LWI) , rC=STATUS) + VERIFY_(STATUS) + ! FRI = max(min(FRI,1.0),0.0) ! RiverRouting: force allocations of RUNOFF from continental components, @@ -7249,6 +7265,7 @@ subroutine RUN2 ( GC, IMPORT, EXPORT, CLOCK, RC ) call MKTILE(ALBNF ,ALBNFTILE ,NT,RC=STATUS); VERIFY_(STATUS) call MKTILE(EMISS ,EMISSTILE ,NT,RC=STATUS); VERIFY_(STATUS) call MKTILE(FRI ,FRTILE ,NT,RC=STATUS); VERIFY_(STATUS) + call MKTILE(OFRI ,OFRTILE ,NT,RC=STATUS); VERIFY_(STATUS) call MKTILE(TSOIL1 ,TSOIL1TILE ,NT,RC=STATUS); VERIFY_(STATUS) call MKTILE(TSOIL2 ,TSOIL2TILE ,NT,RC=STATUS); VERIFY_(STATUS) call MKTILE(TSOIL3 ,TSOIL3TILE ,NT,RC=STATUS); VERIFY_(STATUS) @@ -7350,12 +7367,19 @@ subroutine RUN2 ( GC, IMPORT, EXPORT, CLOCK, RC ) call MKTILE(LWNDSRF ,LWNDSRFTILE ,NT,RC=STATUS); VERIFY_(STATUS) call MKTILE(SWNDSRF ,SWNDSRFTILE ,NT,RC=STATUS); VERIFY_(STATUS) + allocateRunoff = .false. + if (associated(RUNOFF)) allocateRunoff = .true. + if (associated(SURF_INTERNAL_STATE%RoutingType) .or. DO_DATA_ATM4OCN) then ! routing file exists or we run DataAtm allocate(DISCHARGETILE(NT),stat=STATUS); VERIFY_(STATUS) DISCHARGETILE=MAPL_Undef + allocateRunoff = .true. + end if + if (allocateRunoff) then allocate(RUNOFFTILE(NT),stat=STATUS); VERIFY_(STATUS) - RUNOFFTILE=MAPL_Undef + RUNOFFTILE = 0.0 end if + call MKTILE(RUNSURF ,RUNSURFTILE ,NT,RC=STATUS); VERIFY_(STATUS) call MKTILE(BASEFLOW,BASEFLOWTILE,NT,RC=STATUS); VERIFY_(STATUS) call MKTILE(ACCUM ,ACCUMTILE ,NT,RC=STATUS); VERIFY_(STATUS) @@ -7492,13 +7516,12 @@ subroutine RUN2 ( GC, IMPORT, EXPORT, CLOCK, RC ) end if - FRTILE = 0.0 + FRTILE = 0.0 + OFRTILE = MAPL_UNDEF ! Cycle through all continental children (skip ocean), ! collecting RUNOFFTILE exports. - if (associated(RUNOFFTILE)) RUNOFFTILE = 0.0 - do I = 1, NUM_CHILDREN if (I == OCEAN) cycle call DOTYPE(I,RC=STATUS) @@ -7677,7 +7700,11 @@ subroutine RUN2 ( GC, IMPORT, EXPORT, CLOCK, RC ) VERIFY_(STATUS) endif if(associated( FRI )) then - call MAPL_LocStreamTransform( LOCSTREAM, FRI , FRTILE, RC=STATUS) + call MAPL_LocStreamTransform( LOCSTREAM, FRI , FRTILE, RC=STATUS) + VERIFY_(STATUS) + endif + if(associated( OFRI )) then + call MAPL_LocStreamTransform( LOCSTREAM, OFRI , OFRTILE, RC=STATUS) VERIFY_(STATUS) endif if(associated(TSOIL1)) then @@ -9030,6 +9057,7 @@ subroutine RUN2 ( GC, IMPORT, EXPORT, CLOCK, RC ) if(associated(ALBVRTILE )) deallocate(ALBVRTILE) if(associated(EMISSTILE )) deallocate(EMISSTILE) if(associated(FRTILE )) deallocate(FRTILE ) + if(associated(OFRTILE )) deallocate(OFRTILE ) if(associated(DUDP)) deallocate( DUDP ) if(associated(DUWT)) deallocate( DUWT ) @@ -9300,6 +9328,9 @@ subroutine DOTYPE(type,RC) VERIFY_(STATUS) call MAPL_GetPointer(GEX(type), dum, 'FRACI' , ALLOC=associated( FRTILE ), notFoundOK=.true., RC=STATUS) VERIFY_(STATUS) +! in case FRACI removed in future + call MAPL_GetPointer(GEX(type), dum, 'FRACI' , ALLOC=associated( OFRTILE ), notFoundOK=.true., RC=STATUS) + VERIFY_(STATUS) call MAPL_GetPointer(GEX(type), dum, 'RDU001' , ALLOC=associated(RDU001TILE ), notFoundOK=.true., RC=STATUS) VERIFY_(STATUS) call MAPL_GetPointer(GEX(type), dum, 'RDU002' , ALLOC=associated(RDU002TILE ), notFoundOK=.true., RC=STATUS) @@ -9680,6 +9711,10 @@ subroutine DOTYPE(type,RC) call FILLOUT_TILE(GEX(type), 'FRACI', FRTILE, XFORM, RC=STATUS) VERIFY_(STATUS) end if + if(associated( OFRTILE)) then + call FILLOUT_TILE(GEX(type), 'FRACI', OFRTILE, XFORM, RC=STATUS) + VERIFY_(STATUS) + end if if(associated(TSOIL1TILE)) then call FILLOUT_TILE(GEX(type), 'TP1', TSOIL1TILE, XFORM, RC=STATUS) VERIFY_(STATUS) diff --git a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSlake_GridComp/GEOS_LakeGridComp.F90 b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSlake_GridComp/GEOS_LakeGridComp.F90 index 564beefe7..8af0fc45a 100644 --- a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSlake_GridComp/GEOS_LakeGridComp.F90 +++ b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSlake_GridComp/GEOS_LakeGridComp.F90 @@ -137,7 +137,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_for_visible_beam', & + LONG_NAME = 'surface_reflectivity_for_visible_beam', & UNITS = '1', & SHORT_NAME = 'ALBVR', & DIMS = MAPL_DimsTileOnly, & @@ -146,7 +146,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_for_visible_diffuse',& + LONG_NAME = 'surface_reflectivity_for_visible_diffuse',& UNITS = '1', & SHORT_NAME = 'ALBVF', & DIMS = MAPL_DimsTileOnly, & @@ -155,7 +155,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_for_near_infrared_beam', & + LONG_NAME = 'surface_reflectivity_for_near_infrared_beam', & UNITS = '1', & SHORT_NAME = 'ALBNR', & DIMS = MAPL_DimsTileOnly, & @@ -164,7 +164,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_for_near_infrared_diffuse', & + LONG_NAME = 'surface_reflectivity_for_near_infrared_diffuse', & UNITS = '1', & SHORT_NAME = 'ALBNF', & DIMS = MAPL_DimsTileOnly, & @@ -192,7 +192,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'runoff_flux',& + LONG_NAME = 'runoff_total_flux' ,& UNITS = 'kg m-2 s-1' ,& SHORT_NAME = 'RUNOFF' ,& DIMS = MAPL_DimsTileOnly ,& diff --git a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSland_GridComp/GEOScatchCN_GridComp/GEOS_CatchCNGridComp.F90 b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSland_GridComp/GEOScatchCN_GridComp/GEOS_CatchCNGridComp.F90 index 96d970073..05b20561d 100644 --- a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSland_GridComp/GEOScatchCN_GridComp/GEOS_CatchCNGridComp.F90 +++ b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSland_GridComp/GEOScatchCN_GridComp/GEOS_CatchCNGridComp.F90 @@ -245,7 +245,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddImportSpec(GC ,& - LONG_NAME = 'surface_downwelling_par_beam_flux',& + LONG_NAME = 'surface_downwelling_PAR_beam_flux',& UNITS = 'W m-2' ,& SHORT_NAME = 'DRPAR' ,& DIMS = MAPL_DimsTileOnly ,& @@ -254,7 +254,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddImportSpec(GC ,& - LONG_NAME = 'surface_downwelling_par_diffuse_flux',& + LONG_NAME = 'surface_downwelling_PAR_diffuse_flux',& UNITS = 'W m-2' ,& SHORT_NAME = 'DFPAR' ,& DIMS = MAPL_DimsTileOnly ,& @@ -346,7 +346,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddImportSpec(GC ,& - LONG_NAME = 'greeness_fraction' ,& + LONG_NAME = 'vegetation_greenness_fraction',& UNITS = '1' ,& SHORT_NAME = 'GRN' ,& DIMS = MAPL_DimsTileOnly ,& diff --git a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSland_GridComp/GEOScatchCN_GridComp/GEOScatchCNCLM40_GridComp/GEOS_CatchCNCLM40GridComp.F90 b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSland_GridComp/GEOScatchCN_GridComp/GEOScatchCNCLM40_GridComp/GEOS_CatchCNCLM40GridComp.F90 index 775e2de57..96639fe1d 100644 --- a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSland_GridComp/GEOScatchCN_GridComp/GEOScatchCNCLM40_GridComp/GEOS_CatchCNCLM40GridComp.F90 +++ b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSland_GridComp/GEOScatchCN_GridComp/GEOScatchCNCLM40_GridComp/GEOS_CatchCNCLM40GridComp.F90 @@ -344,7 +344,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddImportSpec(GC ,& - LONG_NAME = 'surface_downwelling_par_beam_flux',& + LONG_NAME = 'surface_downwelling_PAR_beam_flux',& UNITS = 'W m-2' ,& SHORT_NAME = 'DRPAR' ,& DIMS = MAPL_DimsTileOnly ,& @@ -353,7 +353,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddImportSpec(GC ,& - LONG_NAME = 'surface_downwelling_par_diffuse_flux',& + LONG_NAME = 'surface_downwelling_PAR_diffuse_flux',& UNITS = 'W m-2' ,& SHORT_NAME = 'DFPAR' ,& DIMS = MAPL_DimsTileOnly ,& @@ -445,7 +445,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddImportSpec(GC ,& - LONG_NAME = 'greeness_fraction' ,& + LONG_NAME = 'vegetation_greenness_fraction' ,& UNITS = '1' ,& SHORT_NAME = 'GRN' ,& DIMS = MAPL_DimsTileOnly ,& @@ -1161,7 +1161,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddInternalSpec(GC ,& - LONG_NAME = 'interception_reservoir_capac',& + LONG_NAME = 'vegetation_interception_water_storage',& UNITS = 'kg m-2' ,& SHORT_NAME = 'CAPAC' ,& FRIENDLYTO = trim(COMP_NAME) ,& @@ -1381,7 +1381,7 @@ subroutine SetServices ( GC, RC ) if (SNOW_ALBEDO_INFO == 1) then call MAPL_AddInternalSpec(GC ,& - LONG_NAME = 'effective_snow_albedo' ,& + LONG_NAME = 'effective_snow_reflectivity',& UNITS = '1' ,& SHORT_NAME = 'SNOWALB' ,& FRIENDLYTO = trim(COMP_NAME) ,& @@ -2172,7 +2172,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'runoff_flux' ,& + LONG_NAME = 'runoff_total_flux' ,& UNITS = 'kg m-2 s-1' ,& SHORT_NAME = 'RUNOFF' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2208,7 +2208,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'snow_ice_evaporation_energy_flux',& + LONG_NAME = 'snow_ice_evaporation_energy_flux_on_land',& UNITS = 'W m-2' ,& SHORT_NAME = 'EVPICE' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2227,7 +2227,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'totoal soil moisture' ,& + LONG_NAME = 'total_soil_moisture' ,& UNITS = 'kg m-2' ,& SHORT_NAME = 'WATSOI' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2253,7 +2253,7 @@ subroutine SetServices ( GC, RC ) RC=STATUS ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'baseflow_flux' ,& + LONG_NAME = 'baseflow_flux_land' ,& UNITS = 'kg m-2 s-1' ,& SHORT_NAME = 'BASEFLOW' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2389,7 +2389,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'ave_catchment_temp_incl_snw',& + LONG_NAME = 'surface_temperature_of_land_incl_snow',& UNITS = 'K' ,& SHORT_NAME = 'TPSURF' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2398,7 +2398,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'temperature_top_snow_layer',& + LONG_NAME = 'surface_temperature_of_snow_on_land',& UNITS = 'K' ,& SHORT_NAME = 'TPSNOW' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2407,7 +2407,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'temperature_unsaturated_zone',& + LONG_NAME = 'surface_temperature_of_unsaturated_zone',& UNITS = 'K' ,& SHORT_NAME = 'TPUNST' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2416,7 +2416,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'temperature_saturated_zone',& + LONG_NAME = 'surface_temperature_of_saturated_zone',& UNITS = 'K' ,& SHORT_NAME = 'TPSAT' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2425,7 +2425,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'temperature_wilted_zone' ,& + LONG_NAME = 'surface_temperature_of_wilting_zone' ,& UNITS = 'K' ,& SHORT_NAME = 'TPWLT' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2434,7 +2434,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'fractional_area_of_land_snowcover',& + LONG_NAME = 'fractional_area_of_snow_on_land',& UNITS = '1' ,& SHORT_NAME = 'ASNOW' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2497,7 +2497,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'snow_depth_within_snow_covered_area_fraction' ,& + LONG_NAME = 'snow_depth_within_snow_covered_area_fraction_on_land' ,& UNITS = 'm' ,& SHORT_NAME = 'SNOWDP' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2506,7 +2506,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_soil_wetness' ,& + LONG_NAME = 'soil_wetness_surface' ,& UNITS = '1' ,& SHORT_NAME = 'WET1' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2515,7 +2515,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'root_zone_soil_wetness' ,& + LONG_NAME = 'soil_wetness_rootzone' ,& UNITS = '1' ,& SHORT_NAME = 'WET2' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2524,7 +2524,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'ave_prof_soil__moisture' ,& + LONG_NAME = 'soil_wetness_profile' ,& UNITS = '1' ,& SHORT_NAME = 'WET3' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2533,7 +2533,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'water_surface_layer' ,& + LONG_NAME = 'soil_moisture_surface' ,& UNITS = 'm3 m-3' ,& SHORT_NAME = 'WCSF' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2542,7 +2542,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'water_root_zone' ,& + LONG_NAME = 'soil_moisture_rootzone' ,& UNITS = 'm3 m-3' ,& SHORT_NAME = 'WCRZ' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2551,7 +2551,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'water_ave_prof' ,& + LONG_NAME = 'soil_moisture_profile' ,& UNITS = 'm3 m-3' ,& SHORT_NAME = 'WCPR' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2560,7 +2560,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'soil_temperatures_layer_1' ,& + LONG_NAME = 'soil_temperature_layer_1' ,& UNITS = 'K' ,& ! units now K, rreichle & borescan, 6 Nov 2020 SHORT_NAME = 'TP1' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2569,7 +2569,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'soil_temperatures_layer_2' ,& + LONG_NAME = 'soil_temperature_layer_2' ,& UNITS = 'K' ,& ! units now K, rreichle & borescan, 6 Nov 2020 SHORT_NAME = 'TP2' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2578,7 +2578,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'soil_temperatures_layer_3' ,& + LONG_NAME = 'soil_temperature_layer_3' ,& UNITS = 'K' ,& ! units now K, rreichle & borescan, 6 Nov 2020 SHORT_NAME = 'TP3' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2587,7 +2587,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'soil_temperatures_layer_4' ,& + LONG_NAME = 'soil_temperature_layer_4' ,& UNITS = 'K' ,& ! units now K, rreichle & borescan, 6 Nov 2020 SHORT_NAME = 'TP4' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2596,7 +2596,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'soil_temperatures_layer_5' ,& + LONG_NAME = 'soil_temperature_layer_5' ,& UNITS = 'K' ,& ! units now K, rreichle & borescan, 6 Nov 2020 SHORT_NAME = 'TP5' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2605,7 +2605,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'soil_temperatures_layer_6' ,& + LONG_NAME = 'soil_temperature_layer_6' ,& UNITS = 'K' ,& ! units now K, rreichle & borescan, 6 Nov 2020 SHORT_NAME = 'TP6' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2623,7 +2623,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_visible_beam',& + LONG_NAME = 'surface_reflectivity_visible_beam',& UNITS = '1' ,& SHORT_NAME = 'ALBVR' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2632,7 +2632,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_visible_diffuse',& + LONG_NAME = 'surface_reflectivity_visible_diffuse',& UNITS = '1' ,& SHORT_NAME = 'ALBVF' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2641,7 +2641,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_near_infrared_beam',& + LONG_NAME = 'surface_reflectivity_near_infrared_beam',& UNITS = '1' ,& SHORT_NAME = 'ALBNR' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2650,7 +2650,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_near_infrared_diffuse',& + LONG_NAME = 'surface_reflectivity_near_infrared_diffuse',& UNITS = '1' ,& SHORT_NAME = 'ALBNF' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2932,7 +2932,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'EVLAND', & - LONG_NAME = 'Evaporation_land', & + LONG_NAME = 'total_evapotranspiration_land', & UNITS = 'kg m-2 s-1', & DIMS = MAPL_DimsTileOnly, & VLOCATION = MAPL_VLocationNone, & @@ -2959,7 +2959,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'DRPARLAND', & - LONG_NAME = 'surface_downwelling_par_beam_flux', & + LONG_NAME = 'surface_downwelling_PAR_beam_flux', & UNITS = 'W m-2', & DIMS = MAPL_DimsTileOnly, & VLOCATION = MAPL_VLocationNone, & @@ -2968,7 +2968,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'DFPARLAND', & - LONG_NAME = 'surface_downwelling_par_diffuse_flux', & + LONG_NAME = 'surface_downwelling_PAR_diffuse_flux', & UNITS = 'W m-2', & DIMS = MAPL_DimsTileOnly, & VLOCATION = MAPL_VLocationNone, & @@ -2987,7 +2987,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'SWNETSNOW', & - LONG_NAME = 'Net_shortwave_snow', & + LONG_NAME = 'Net_shortwave_flux_snow', & UNITS = 'W m-2', & DIMS = MAPL_DimsTileOnly, & VLOCATION = MAPL_VLocationNone, & @@ -3071,7 +3071,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'SWLAND', & - LONG_NAME = 'Net_shortwave_land', & + LONG_NAME = 'Net_shortwave_flux_land', & UNITS = 'W m-2', & DIMS = MAPL_DimsTileOnly, & VLOCATION = MAPL_VLocationNone, & @@ -3080,7 +3080,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'SWDOWNLAND', & - LONG_NAME = 'Incident_shortwave_land', & + LONG_NAME = 'Incident_shortwave_flux_land', & UNITS = 'W m-2', & DIMS = MAPL_DimsTileOnly, & VLOCATION = MAPL_VLocationNone, & @@ -3090,7 +3090,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'LWLAND', & - LONG_NAME = 'Net_longwave_land', & + LONG_NAME = 'Net_longwave_flux_land', & UNITS = 'W m-2', & DIMS = MAPL_DimsTileOnly, & VLOCATION = MAPL_VLocationNone, & @@ -3100,7 +3100,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'GHLAND', & - LONG_NAME = 'Ground_heating_land', & + LONG_NAME = 'Ground_heating_flux_land', & UNITS = 'W m-2', & DIMS = MAPL_DimsTileOnly, & VLOCATION = MAPL_VLocationNone, & @@ -3109,7 +3109,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'GHTSKIN', & - LONG_NAME = 'Ground_heating_skin_temp', & + LONG_NAME = 'Ground_heating_flux_for_skin_temp_land', & UNITS = 'W m-2', & DIMS = MAPL_DimsTileOnly, & VLOCATION = MAPL_VLocationNone, & @@ -3128,7 +3128,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'TWLAND', & - LONG_NAME = 'Avail_water_storage_land', & + LONG_NAME = 'total_water_storage_land', & UNITS = 'kg m-2', & DIMS = MAPL_DimsTileOnly, & VLOCATION = MAPL_VLocationNone, & @@ -3174,7 +3174,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'SPLAND', & - LONG_NAME = 'rate_of_spurious_land_energy_source',& + LONG_NAME = 'rate_of_spurious_energy_source_land',& UNITS = 'W m-2', & DIMS = MAPL_DimsTileOnly, & VLOCATION = MAPL_VLocationNone, & @@ -3183,7 +3183,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'SPWATR', & - LONG_NAME = 'rate_of_spurious_land_water_source',& + LONG_NAME = 'rate_of_spurious_water_source_land',& UNITS = 'kg m-2 s-1', & DIMS = MAPL_DimsTileOnly, & VLOCATION = MAPL_VLocationNone, & @@ -3192,7 +3192,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'SPSNOW', & - LONG_NAME = 'rate_of_spurious_snow_energy',& + LONG_NAME = 'rate_of_spurious_snow_energy_source_land',& UNITS = 'W m-2', & DIMS = MAPL_DimsTileOnly, & VLOCATION = MAPL_VLocationNone, & diff --git a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSland_GridComp/GEOScatch_GridComp/GEOS_CatchGridComp.F90 b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSland_GridComp/GEOScatch_GridComp/GEOS_CatchGridComp.F90 index 99fd7f8d3..df829ab53 100644 --- a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSland_GridComp/GEOScatch_GridComp/GEOS_CatchGridComp.F90 +++ b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSland_GridComp/GEOScatch_GridComp/GEOS_CatchGridComp.F90 @@ -321,7 +321,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddImportSpec(GC ,& - LONG_NAME = 'surface_downwelling_par_beam_flux',& + LONG_NAME = 'surface_downwelling_PAR_beam_flux',& UNITS = 'W m-2' ,& SHORT_NAME = 'DRPAR' ,& DIMS = MAPL_DimsTileOnly ,& @@ -330,7 +330,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddImportSpec(GC ,& - LONG_NAME = 'surface_downwelling_par_diffuse_flux',& + LONG_NAME = 'surface_downwelling_PAR_diffuse_flux',& UNITS = 'W m-2' ,& SHORT_NAME = 'DFPAR' ,& DIMS = MAPL_DimsTileOnly ,& @@ -411,7 +411,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddImportSpec(GC ,& - LONG_NAME = 'greeness_fraction' ,& + LONG_NAME = 'vegetation_greenness_fraction',& UNITS = '1' ,& SHORT_NAME = 'GRN' ,& DIMS = MAPL_DimsTileOnly ,& @@ -1112,7 +1112,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddInternalSpec(GC ,& - LONG_NAME = 'interception_reservoir_capac',& + LONG_NAME = 'vegetation_interception_water_storage',& UNITS = 'kg m-2' ,& SHORT_NAME = 'CAPAC' ,& FRIENDLYTO = trim(COMP_NAME) ,& @@ -1332,7 +1332,7 @@ subroutine SetServices ( GC, RC ) if (CATCH_INTERNAL_STATE%SNOW_ALBEDO_INFO == 1) then call MAPL_AddInternalSpec(GC ,& - LONG_NAME = 'effective_snow_albedo' ,& + LONG_NAME = 'effective_snow_reflectivity' ,& UNITS = '1' ,& SHORT_NAME = 'SNOWALB' ,& FRIENDLYTO = trim(COMP_NAME) ,& @@ -1564,7 +1564,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'runoff_flux' ,& + LONG_NAME = 'runoff_total_flux' ,& UNITS = 'kg m-2 s-1' ,& SHORT_NAME = 'RUNOFF' ,& DIMS = MAPL_DimsTileOnly ,& @@ -1619,7 +1619,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'totoal soil moisture' ,& + LONG_NAME = 'total_soil_moisture' ,& UNITS = 'kg m-2' ,& SHORT_NAME = 'WATSOI' ,& DIMS = MAPL_DimsTileOnly ,& @@ -1645,7 +1645,7 @@ subroutine SetServices ( GC, RC ) RC=STATUS ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'baseflow_flux' ,& + LONG_NAME = 'baseflow_flux_land' ,& UNITS = 'kg m-2 s-1' ,& SHORT_NAME = 'BASEFLOW' ,& DIMS = MAPL_DimsTileOnly ,& @@ -1780,7 +1780,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'ave_catchment_temp_incl_snw',& + LONG_NAME = 'surface_temperature_of_land_incl_snow',& UNITS = 'K' ,& SHORT_NAME = 'TPSURF' ,& DIMS = MAPL_DimsTileOnly ,& @@ -1789,7 +1789,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'temperature_top_snow_layer',& + LONG_NAME = 'surface_temperature_of_snow_on_land',& UNITS = 'K' ,& SHORT_NAME = 'TPSNOW' ,& DIMS = MAPL_DimsTileOnly ,& @@ -1798,7 +1798,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'temperature_unsaturated_zone',& + LONG_NAME = 'surface_temperature_of_unsaturated_zone',& UNITS = 'K' ,& SHORT_NAME = 'TPUNST' ,& DIMS = MAPL_DimsTileOnly ,& @@ -1807,7 +1807,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'temperature_saturated_zone',& + LONG_NAME = 'surface_temperature_of_saturated_zone',& UNITS = 'K' ,& SHORT_NAME = 'TPSAT' ,& DIMS = MAPL_DimsTileOnly ,& @@ -1816,7 +1816,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'temperature_wilted_zone' ,& + LONG_NAME = 'surface_temperature_of_wilting_zone' ,& UNITS = 'K' ,& SHORT_NAME = 'TPWLT' ,& DIMS = MAPL_DimsTileOnly ,& @@ -1825,7 +1825,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'fractional_area_of_land_snowcover',& + LONG_NAME = 'fractional_area_of_snow_on_land',& UNITS = '1' ,& SHORT_NAME = 'ASNOW' ,& DIMS = MAPL_DimsTileOnly ,& @@ -1888,7 +1888,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'snow_depth_within_snow_covered_area_fraction' ,& + LONG_NAME = 'snow_depth_within_snow_covered_area_fraction_on_land' ,& UNITS = 'm' ,& SHORT_NAME = 'SNOWDP' ,& DIMS = MAPL_DimsTileOnly ,& @@ -1897,7 +1897,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_soil_wetness' ,& + LONG_NAME = 'soil_wetness_surface' ,& UNITS = '1' ,& SHORT_NAME = 'WET1' ,& DIMS = MAPL_DimsTileOnly ,& @@ -1906,7 +1906,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'root_zone_soil_wetness' ,& + LONG_NAME = 'soil_wetness_rootzone' ,& UNITS = '1' ,& SHORT_NAME = 'WET2' ,& DIMS = MAPL_DimsTileOnly ,& @@ -1915,7 +1915,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'ave_prof_soil__moisture' ,& + LONG_NAME = 'soil_wetness_profile' ,& UNITS = '1' ,& SHORT_NAME = 'WET3' ,& DIMS = MAPL_DimsTileOnly ,& @@ -1924,7 +1924,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'water_surface_layer' ,& + LONG_NAME = 'soil_moisture_surface' ,& UNITS = 'm3 m-3' ,& SHORT_NAME = 'WCSF' ,& DIMS = MAPL_DimsTileOnly ,& @@ -1933,7 +1933,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'water_root_zone' ,& + LONG_NAME = 'soil_moisture_rootzone' ,& UNITS = 'm3 m-3' ,& SHORT_NAME = 'WCRZ' ,& DIMS = MAPL_DimsTileOnly ,& @@ -1942,8 +1942,8 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'water_ave_prof' ,& - UNITS = 'm3 m-3' ,& + LONG_NAME = 'soil_moisture_profile' ,& + UNITS = 'm3 m-3' ,& SHORT_NAME = 'WCPR' ,& DIMS = MAPL_DimsTileOnly ,& VLOCATION = MAPL_VLocationNone ,& @@ -1951,7 +1951,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'soil_temperatures_layer_1' ,& + LONG_NAME = 'soil_temperature_layer_1' ,& UNITS = 'K' ,& ! units now K, rreichle & borescan, 6 Nov 2020 SHORT_NAME = 'TP1' ,& DIMS = MAPL_DimsTileOnly ,& @@ -1960,7 +1960,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'soil_temperatures_layer_2' ,& + LONG_NAME = 'soil_temperature_layer_2' ,& UNITS = 'K' ,& ! units now K, rreichle & borescan, 6 Nov 2020 SHORT_NAME = 'TP2' ,& DIMS = MAPL_DimsTileOnly ,& @@ -1969,7 +1969,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'soil_temperatures_layer_3' ,& + LONG_NAME = 'soil_temperature_layer_3' ,& UNITS = 'K' ,& ! units now K, rreichle & borescan, 6 Nov 2020 SHORT_NAME = 'TP3' ,& DIMS = MAPL_DimsTileOnly ,& @@ -1978,7 +1978,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'soil_temperatures_layer_4' ,& + LONG_NAME = 'soil_temperature_layer_4' ,& UNITS = 'K' ,& ! units now K, rreichle & borescan, 6 Nov 2020 SHORT_NAME = 'TP4' ,& DIMS = MAPL_DimsTileOnly ,& @@ -1987,7 +1987,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'soil_temperatures_layer_5' ,& + LONG_NAME = 'soil_temperature_layer_5' ,& UNITS = 'K' ,& ! units now K, rreichle & borescan, 6 Nov 2020 SHORT_NAME = 'TP5' ,& DIMS = MAPL_DimsTileOnly ,& @@ -1996,7 +1996,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'soil_temperatures_layer_6' ,& + LONG_NAME = 'soil_temperature_layer_6' ,& UNITS = 'K' ,& ! units now K, rreichle & borescan, 6 Nov 2020 SHORT_NAME = 'TP6' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2014,7 +2014,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_visible_beam',& + LONG_NAME = 'surface_reflectivity_visible_beam',& UNITS = '1' ,& SHORT_NAME = 'ALBVR' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2023,7 +2023,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_visible_diffuse',& + LONG_NAME = 'surface_reflectivity_visible_diffuse',& UNITS = '1' ,& SHORT_NAME = 'ALBVF' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2032,7 +2032,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_near_infrared_beam',& + LONG_NAME = 'surface_reflectivity_near_infrared_beam',& UNITS = '1' ,& SHORT_NAME = 'ALBNR' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2041,7 +2041,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_near_infrared_diffuse',& + LONG_NAME = 'surface_reflectivity_near_infrared_diffuse',& UNITS = '1' ,& SHORT_NAME = 'ALBNF' ,& DIMS = MAPL_DimsTileOnly ,& @@ -2323,7 +2323,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'EVLAND', & - LONG_NAME = 'Total_evapotranspiration_land', & + LONG_NAME = 'total_evapotranspiration_land', & UNITS = 'kg m-2 s-1', & DIMS = MAPL_DimsTileOnly, & VLOCATION = MAPL_VLocationNone, & @@ -2350,7 +2350,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'DRPARLAND', & - LONG_NAME = 'surface_downwelling_par_beam_flux', & + LONG_NAME = 'surface_downwelling_PAR_beam_flux', & UNITS = 'W m-2', & DIMS = MAPL_DimsTileOnly, & VLOCATION = MAPL_VLocationNone, & @@ -2359,7 +2359,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'DFPARLAND', & - LONG_NAME = 'surface_downwelling_par_diffuse_flux', & + LONG_NAME = 'surface_downwelling_PAR_diffuse_flux', & UNITS = 'W m-2', & DIMS = MAPL_DimsTileOnly, & VLOCATION = MAPL_VLocationNone, & @@ -2378,7 +2378,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'SWNETSNOW', & - LONG_NAME = 'Net_shortwave_snow', & + LONG_NAME = 'Net_shortwave_flux_snow', & UNITS = 'W m-2', & DIMS = MAPL_DimsTileOnly, & VLOCATION = MAPL_VLocationNone, & @@ -2462,7 +2462,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'SWLAND', & - LONG_NAME = 'Net_shortwave_land', & + LONG_NAME = 'Net_shortwave_flux_land', & UNITS = 'W m-2', & DIMS = MAPL_DimsTileOnly, & VLOCATION = MAPL_VLocationNone, & @@ -2471,7 +2471,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'SWDOWNLAND', & - LONG_NAME = 'Incident_shortwave_land', & + LONG_NAME = 'Incident_shortwave_flux_land', & UNITS = 'W m-2', & DIMS = MAPL_DimsTileOnly, & VLOCATION = MAPL_VLocationNone, & @@ -2481,7 +2481,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'LWLAND', & - LONG_NAME = 'Net_longwave_land', & + LONG_NAME = 'Net_longwave_flux_land', & UNITS = 'W m-2', & DIMS = MAPL_DimsTileOnly, & VLOCATION = MAPL_VLocationNone, & @@ -2491,7 +2491,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'GHLAND', & - LONG_NAME = 'Ground_heating_land', & + LONG_NAME = 'Ground_heating_flux_land', & UNITS = 'W m-2', & DIMS = MAPL_DimsTileOnly, & VLOCATION = MAPL_VLocationNone, & @@ -2500,7 +2500,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'GHTSKIN', & - LONG_NAME = 'Ground_heating_skin_temp', & + LONG_NAME = 'Ground_heating_flux_for_skin_temp_land', & UNITS = 'W m-2', & DIMS = MAPL_DimsTileOnly, & VLOCATION = MAPL_VLocationNone, & @@ -2519,7 +2519,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'TWLAND', & - LONG_NAME = 'Avail_water_storage_land', & + LONG_NAME = 'total_water_storage_land', & UNITS = 'kg m-2', & DIMS = MAPL_DimsTileOnly, & VLOCATION = MAPL_VLocationNone, & @@ -2921,7 +2921,7 @@ subroutine Initialize ( GC, IMPORT, EXPORT, CLOCK, RC ) call MAPL_TimerOn(MAPL,"INITIALIZE") - ! retrieve interal state + ! retrieve internal state call ESMF_UserCompGetInternalState ( GC, 'CatchInternal',wrap,status ) VERIFY_(STATUS) @@ -5159,15 +5159,15 @@ subroutine Driver ( RC ) QC(:,FSNW) = QSAT(:,FSNW) -! -------------------------------------------------------------------------- + ! -------------------------------------------------------------------------- ! get total solid precip ! -------------------------------------------------------------------------- SLDTOT = SNO+ICE+FRZR -! protect the forcing from unsavory values, as per practice in offline -! driver -! -------------------------------------------------------------------------- + ! protect the forcing from unsavory values, as per practice in offline + ! driver + ! -------------------------------------------------------------------------- _ASSERT(count(PLS<0.)==0,'needs informative message') _ASSERT(count(PCU<0.)==0,'needs informative message') diff --git a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSland_GridComp/GEOSroute_GridComp/GEOS_RouteGridComp.F90 b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSland_GridComp/GEOSroute_GridComp/GEOS_RouteGridComp.F90 index ad2be4db2..4a28c0da2 100644 --- a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSland_GridComp/GEOSroute_GridComp/GEOS_RouteGridComp.F90 +++ b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSland_GridComp/GEOSroute_GridComp/GEOS_RouteGridComp.F90 @@ -178,7 +178,7 @@ subroutine SetServices ( GC, RC ) ! ----------------------------------------------------------- call MAPL_AddImportSpec(GC, & - LONG_NAME = 'runoff_flux' ,& + LONG_NAME = 'runoff_total_flux' ,& UNITS = 'kg m-2 s-1' ,& SHORT_NAME = 'RUNOFF' ,& DIMS = MAPL_DimsTileOnly ,& diff --git a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSland_GridComp/GEOSvegdyn_GridComp/GEOS_VegdynGridComp.F90 b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSland_GridComp/GEOSvegdyn_GridComp/GEOS_VegdynGridComp.F90 index 83e575269..063d9ab13 100644 --- a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSland_GridComp/GEOSvegdyn_GridComp/GEOS_VegdynGridComp.F90 +++ b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSland_GridComp/GEOSvegdyn_GridComp/GEOS_VegdynGridComp.F90 @@ -217,7 +217,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC ,& SHORT_NAME = 'GRN' ,& - LONG_NAME = 'greeness_fraction' ,& + LONG_NAME = 'vegetation_greenness_fraction' ,& UNITS = '1' ,& DIMS = MAPL_DimsTileOnly ,& VLOCATION = MAPL_VLocationNone ,& diff --git a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSlandice_GridComp/GEOS_LandIceGridComp.F90 b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSlandice_GridComp/GEOS_LandIceGridComp.F90 index 96841911a..a7725436a 100644 --- a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSlandice_GridComp/GEOS_LandIceGridComp.F90 +++ b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSlandice_GridComp/GEOS_LandIceGridComp.F90 @@ -198,7 +198,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_for_visible_beam', & + LONG_NAME = 'surface_reflectivity_for_visible_beam', & UNITS = '1', & SHORT_NAME = 'ALBVR', & DIMS = MAPL_DimsTileOnly, & @@ -207,7 +207,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_for_visible_diffuse',& + LONG_NAME = 'surface_reflectivity_for_visible_diffuse',& UNITS = '1', & SHORT_NAME = 'ALBVF', & DIMS = MAPL_DimsTileOnly, & @@ -216,7 +216,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_for_near_infrared_beam', & + LONG_NAME = 'surface_reflectivity_for_near_infrared_beam', & UNITS = '1', & SHORT_NAME = 'ALBNR', & DIMS = MAPL_DimsTileOnly, & @@ -225,7 +225,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_for_near_infrared_diffuse', & + LONG_NAME = 'surface_reflectivity_for_near_infrared_diffuse', & UNITS = '1', & SHORT_NAME = 'ALBNF', & DIMS = MAPL_DimsTileOnly, & @@ -235,7 +235,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'TST', & - LONG_NAME = 'surface_skin_temperature', & + LONG_NAME = 'surface_temperature', & UNITS = 'K', & DIMS = MAPL_DimsTileOnly, & VLOCATION = MAPL_VLocationNone, & @@ -397,7 +397,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'fractional_area_of_glaciated_surface_snowcover',& + LONG_NAME = 'fractional_snow_covered_area_of_glaciated_surface',& UNITS = '1' ,& SHORT_NAME = 'ASNOW_GL' ,& DIMS = MAPL_DimsTileOnly ,& @@ -485,7 +485,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'top_snow_layer_mass_change_due_to_sub_con', & + LONG_NAME = 'top_snow_layer_mass_change_due_to_sublimation_and_condensation', & UNITS = 'kg m-2 s-1' ,& SHORT_NAME = 'WESNSC' ,& DIMS = MAPL_DimsTileOnly ,& @@ -569,7 +569,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'contribution_to_smb_from_refreezed_rain_over_bare_ice', & + LONG_NAME = 'contribution_to_surface_mass_balance_from_rain_frozen_onto_bare_ice', & UNITS = 'kg m-2 s-1' ,& SHORT_NAME = 'RAINRFZ' ,& DIMS = MAPL_DimsTileOnly ,& @@ -578,7 +578,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'snowmelt_flux' ,& + LONG_NAME = 'snow_melt_flux' ,& UNITS = 'kg m-2 s-1' ,& SHORT_NAME = 'SMELT' ,& DIMS = MAPL_DimsTileOnly ,& @@ -587,7 +587,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'icemelt_flux' ,& + LONG_NAME = 'ice_melt_flux' ,& UNITS = 'kg m-2 s-1' ,& SHORT_NAME = 'IMELT' ,& DIMS = MAPL_DimsTileOnly ,& @@ -596,7 +596,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'snow_broadband_albedo', & + LONG_NAME = 'snow_broadband_reflectivity', & UNITS = '1' ,& SHORT_NAME = 'SNOWALB' ,& DIMS = MAPL_DimsTileOnly ,& @@ -605,7 +605,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'aggregated_snow_ice_broadband_albedo', & + LONG_NAME = 'aggregated_snow_ice_broadband_reflectivity', & UNITS = '1' ,& SHORT_NAME = 'SNICEALB' ,& DIMS = MAPL_DimsTileOnly ,& @@ -623,7 +623,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'melt_water_content', & + LONG_NAME = 'snowpack_meltwater_content', & UNITS = 'kg m-2' ,& SHORT_NAME = 'MELTWTRCONT' ,& DIMS = MAPL_DimsTileOnly ,& @@ -641,7 +641,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'runoff_flux' ,& + LONG_NAME = 'runoff_total_flux' ,& UNITS = 'kg m-2 s-1' ,& SHORT_NAME = 'RUNOFF' ,& DIMS = MAPL_DimsTileOnly ,& @@ -848,7 +848,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'Ground_heating_for_tskin' ,& + LONG_NAME = 'glacier_ice_heating_flux' ,& UNITS = 'W m-2' ,& SHORT_NAME = 'GHTSKIN' ,& DIMS = MAPL_DimsTileOnly ,& diff --git a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSsaltwater_GridComp/GEOS_CICE4ColumnPhysGridComp.F90 b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSsaltwater_GridComp/GEOS_CICE4ColumnPhysGridComp.F90 index bc5a7c044..b6fbdea25 100644 --- a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSsaltwater_GridComp/GEOS_CICE4ColumnPhysGridComp.F90 +++ b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSsaltwater_GridComp/GEOS_CICE4ColumnPhysGridComp.F90 @@ -181,7 +181,7 @@ subroutine SetServices ( GC, RC ) _RC ) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_for_visible_beam', & + LONG_NAME = 'surface_reflectivity_for_visible_beam', & UNITS = '1', & SHORT_NAME = 'ALBVR', & DIMS = MAPL_DimsTileOnly, & @@ -189,7 +189,7 @@ subroutine SetServices ( GC, RC ) _RC ) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_for_visible_diffuse',& + LONG_NAME = 'surface_reflectivity_for_visible_diffuse',& UNITS = '1', & SHORT_NAME = 'ALBVF', & DIMS = MAPL_DimsTileOnly, & @@ -197,7 +197,7 @@ subroutine SetServices ( GC, RC ) _RC ) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_for_near_infrared_beam', & + LONG_NAME = 'surface_reflectivity_for_near_infrared_beam', & UNITS = '1', & SHORT_NAME = 'ALBNR', & DIMS = MAPL_DimsTileOnly, & @@ -205,7 +205,7 @@ subroutine SetServices ( GC, RC ) _RC ) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_for_near_infrared_diffuse', & + LONG_NAME = 'surface_reflectivity_for_near_infrared_diffuse', & UNITS = '1', & SHORT_NAME = 'ALBNF', & DIMS = MAPL_DimsTileOnly, & @@ -1266,7 +1266,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'SIALB' ,& - LONG_NAME = 'broad_band_sea_ice_albedo' ,& + LONG_NAME = 'broad_band_sea_ice_reflectivity' ,& UNITS = '1' ,& DIMS = MAPL_DimsTileOnly ,& VLOCATION = MAPL_VLocationNone ,& @@ -1388,7 +1388,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'ialb_CMIP5' ,& - LONG_NAME = 'bare_sea_ice_albedo' ,& + LONG_NAME = 'bare_sea_ice_reflectivity' ,& UNITS = '1' ,& DIMS = MAPL_DimsTileOnly ,& VLOCATION = MAPL_VLocationNone ,& @@ -1527,7 +1527,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'ALBIN' ,& - LONG_NAME = 'ice_surface_albedo_over_ice_categories' ,& + LONG_NAME = 'ice_surface_reflectivity_over_ice_categories' ,& UNITS = '1' ,& DIMS = MAPL_DimsTileOnly ,& UNGRIDDED_DIMS = (/NUM_ICE_CATEGORIES/) ,& @@ -1536,7 +1536,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'ALBSN' ,& - LONG_NAME = 'snow_surface_albedo_over_ice_categories' ,& + LONG_NAME = 'snow_surface_reflectivity_over_ice_categories' ,& UNITS = '1' ,& DIMS = MAPL_DimsTileOnly ,& UNGRIDDED_DIMS = (/NUM_ICE_CATEGORIES/) ,& diff --git a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSsaltwater_GridComp/GEOS_OpenWaterGridComp.F90 b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSsaltwater_GridComp/GEOS_OpenWaterGridComp.F90 index 436045ac7..ce89b0429 100644 --- a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSsaltwater_GridComp/GEOS_OpenWaterGridComp.F90 +++ b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSsaltwater_GridComp/GEOS_OpenWaterGridComp.F90 @@ -192,7 +192,7 @@ subroutine SetServices ( GC, RC ) _RC) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_for_visible_beam', & + LONG_NAME = 'surface_reflectivity_for_visible_beam', & UNITS = '1', & SHORT_NAME = 'ALBVR', & DIMS = MAPL_DimsTileOnly, & @@ -200,7 +200,7 @@ subroutine SetServices ( GC, RC ) _RC) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_for_visible_diffuse',& + LONG_NAME = 'surface_reflectivity_for_visible_diffuse',& UNITS = '1', & SHORT_NAME = 'ALBVF', & DIMS = MAPL_DimsTileOnly, & @@ -208,7 +208,7 @@ subroutine SetServices ( GC, RC ) _RC) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_for_near_infrared_beam', & + LONG_NAME = 'surface_reflectivity_for_near_infrared_beam', & UNITS = '1', & SHORT_NAME = 'ALBNR', & DIMS = MAPL_DimsTileOnly, & @@ -216,7 +216,7 @@ subroutine SetServices ( GC, RC ) _RC) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_for_near_infrared_diffuse', & + LONG_NAME = 'surface_reflectivity_for_near_infrared_diffuse', & UNITS = '1', & SHORT_NAME = 'ALBNF', & DIMS = MAPL_DimsTileOnly, & diff --git a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSsaltwater_GridComp/GEOS_SaltWaterGridComp.F90 b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSsaltwater_GridComp/GEOS_SaltWaterGridComp.F90 index fa66715ec..de266edd7 100644 --- a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSsaltwater_GridComp/GEOS_SaltWaterGridComp.F90 +++ b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSsaltwater_GridComp/GEOS_SaltWaterGridComp.F90 @@ -183,7 +183,7 @@ subroutine SetServices ( GC, RC ) _RC) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_for_visible_beam', & + LONG_NAME = 'surface_reflectivity_for_visible_beam', & UNITS = '1', & SHORT_NAME = 'ALBVR', & DIMS = MAPL_DimsTileOnly, & @@ -191,7 +191,7 @@ subroutine SetServices ( GC, RC ) _RC) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_for_visible_diffuse',& + LONG_NAME = 'surface_reflectivity_for_visible_diffuse',& UNITS = '1', & SHORT_NAME = 'ALBVF', & DIMS = MAPL_DimsTileOnly, & @@ -199,7 +199,7 @@ subroutine SetServices ( GC, RC ) _RC) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_for_near_infrared_beam', & + LONG_NAME = 'surface_reflectivity_for_near_infrared_beam', & UNITS = '1', & SHORT_NAME = 'ALBNR', & DIMS = MAPL_DimsTileOnly, & @@ -207,7 +207,7 @@ subroutine SetServices ( GC, RC ) _RC) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_for_near_infrared_diffuse', & + LONG_NAME = 'surface_reflectivity_for_near_infrared_diffuse', & UNITS = '1', & SHORT_NAME = 'ALBNF', & DIMS = MAPL_DimsTileOnly, & diff --git a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSsaltwater_GridComp/GEOS_SeaiceInterfaceGridComp.F90 b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSsaltwater_GridComp/GEOS_SeaiceInterfaceGridComp.F90 index 33ca3bc35..9a28956cc 100644 --- a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSsaltwater_GridComp/GEOS_SeaiceInterfaceGridComp.F90 +++ b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSsaltwater_GridComp/GEOS_SeaiceInterfaceGridComp.F90 @@ -164,7 +164,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_for_visible_beam', & + LONG_NAME = 'surface_reflectivity_for_visible_beam', & UNITS = '1', & SHORT_NAME = 'ALBVR', & DIMS = MAPL_DimsTileOnly, & @@ -173,7 +173,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_for_visible_diffuse',& + LONG_NAME = 'surface_reflectivity_for_visible_diffuse',& UNITS = '1', & SHORT_NAME = 'ALBVF', & DIMS = MAPL_DimsTileOnly, & @@ -182,7 +182,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_for_near_infrared_beam', & + LONG_NAME = 'surface_reflectivity_for_near_infrared_beam', & UNITS = '1', & SHORT_NAME = 'ALBNR', & DIMS = MAPL_DimsTileOnly, & @@ -191,7 +191,7 @@ subroutine SetServices ( GC, RC ) VERIFY_(STATUS) call MAPL_AddExportSpec(GC, & - LONG_NAME = 'surface_albedo_for_near_infrared_diffuse', & + LONG_NAME = 'surface_reflectivity_for_near_infrared_diffuse', & UNITS = '1', & SHORT_NAME = 'ALBNF', & DIMS = MAPL_DimsTileOnly, & @@ -1283,7 +1283,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'ALBIN' ,& - LONG_NAME = 'ice_surface_albedo_over_ice_categories' ,& + LONG_NAME = 'ice_surface_reflectivity_over_ice_categories' ,& UNITS = '1' ,& DIMS = MAPL_DimsTileOnly ,& UNGRIDDED_DIMS = (/NUM_ICE_CATEGORIES/) ,& @@ -1293,7 +1293,7 @@ subroutine SetServices ( GC, RC ) call MAPL_AddExportSpec(GC, & SHORT_NAME = 'ALBSN' ,& - LONG_NAME = 'snow_surface_albedo_over_ice_categories' ,& + LONG_NAME = 'snow_surface_reflectivity_over_ice_categories' ,& UNITS = '1' ,& DIMS = MAPL_DimsTileOnly ,& UNGRIDDED_DIMS = (/NUM_ICE_CATEGORIES/) ,& diff --git a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSsaltwater_GridComp/GEOS_SimpleSeaiceGridComp.F90 b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSsaltwater_GridComp/GEOS_SimpleSeaiceGridComp.F90 index a86e02fa4..3833e8698 100644 --- a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSsaltwater_GridComp/GEOS_SimpleSeaiceGridComp.F90 +++ b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/GEOSsaltwater_GridComp/GEOS_SimpleSeaiceGridComp.F90 @@ -135,33 +135,33 @@ subroutine SetServices ( GC, RC ) VLOCATION = MAPL_VLocationNone, & _RC) - call MAPL_AddExportSpec(GC, & + call MAPL_AddExportSpec(GC, & SHORT_NAME = 'ALBVR', & - LONG_NAME = 'surface_albedo_for_visible_beam', & + LONG_NAME = 'surface_reflectivity_for_visible_beam', & UNITS = '1', & DIMS = MAPL_DimsTileOnly, & VLOCATION = MAPL_VLocationNone, & _RC) - call MAPL_AddExportSpec(GC, & + call MAPL_AddExportSpec(GC, & SHORT_NAME = 'ALBVF', & - LONG_NAME = 'surface_albedo_for_visible_diffuse',& + LONG_NAME = 'surface_reflectivity_for_visible_diffuse', & UNITS = '1', & DIMS = MAPL_DimsTileOnly, & VLOCATION = MAPL_VLocationNone, & _RC) - call MAPL_AddExportSpec(GC, & + call MAPL_AddExportSpec(GC, & SHORT_NAME = 'ALBNR', & - LONG_NAME = 'surface_albedo_for_near_infrared_beam', & + LONG_NAME = 'surface_reflectivity_for_near_infrared_beam', & UNITS = '1', & DIMS = MAPL_DimsTileOnly, & VLOCATION = MAPL_VLocationNone, & _RC) - call MAPL_AddExportSpec(GC, & + call MAPL_AddExportSpec(GC, & SHORT_NAME = 'ALBNF', & - LONG_NAME = 'surface_albedo_for_near_infrared_diffuse', & + LONG_NAME = 'surface_reflectivity_for_near_infrared_diffuse', & UNITS = '1', & DIMS = MAPL_DimsTileOnly, & VLOCATION = MAPL_VLocationNone, & diff --git a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/Utils/mk_restarts/CatchmentRst.F90 b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/Utils/mk_restarts/CatchmentRst.F90 index 3af5915f7..4d07ddf7e 100644 --- a/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/Utils/mk_restarts/CatchmentRst.F90 +++ b/GEOSagcm_GridComp/GEOSphysics_GridComp/GEOSsurface_GridComp/Utils/mk_restarts/CatchmentRst.F90 @@ -577,7 +577,7 @@ subroutine add_bcs_to_rst(this, surflay, DataDir, rc) call MAPL_VarRead ( CatchFmt ,'SNOWALB', this%snowalb, __RC__) if ( .not. this%meta%has_variable('SNOWALB')) then var = Variable(type=pFIO_REAL32, dimensions='tile') - call var%add_attribute('long_name', 'snow_albedo') + call var%add_attribute('long_name', 'snow_reflectivity') call var%add_attribute('units', '1') call this%meta%add_variable('SNOWALB', var) endif diff --git a/GEOSagcm_GridComp/GEOSsuperdyn_GridComp/CMakeLists.txt b/GEOSagcm_GridComp/GEOSsuperdyn_GridComp/CMakeLists.txt index 968ceaf51..c59826405 100644 --- a/GEOSagcm_GridComp/GEOSsuperdyn_GridComp/CMakeLists.txt +++ b/GEOSagcm_GridComp/GEOSsuperdyn_GridComp/CMakeLists.txt @@ -20,6 +20,8 @@ if (EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/GEOS_SuperdynGridComp.F90) SUBCOMPONENTS ${alldirs} DEPENDENCIES MAPL GEOS_Shared ESMF::ESMF) + target_compile_definitions (${this} PRIVATE $<$:HAS_GIGATRAJ>) + else () esma_add_subdirectories (${alldirs}) diff --git a/GEOSagcm_GridComp/GEOSsuperdyn_GridComp/GEOS_SuperdynGridComp.F90 b/GEOSagcm_GridComp/GEOSsuperdyn_GridComp/GEOS_SuperdynGridComp.F90 index 670b5fa9c..9d10e6eaf 100644 --- a/GEOSagcm_GridComp/GEOSsuperdyn_GridComp/GEOS_SuperdynGridComp.F90 +++ b/GEOSagcm_GridComp/GEOSsuperdyn_GridComp/GEOS_SuperdynGridComp.F90 @@ -274,6 +274,15 @@ subroutine SetServices ( GC, RC ) RC=STATUS ) VERIFY_(STATUS) +#ifdef HAS_GIGATRAJ + call MAPL_AddExportSpec ( GC , & + SHORT_NAME = 'ZL', & + CHILD_ID = DYN, & + RC=STATUS ) + VERIFY_(STATUS) +#endif + + call MAPL_AddExportSpec ( GC , & SHORT_NAME = 'PREF', & CHILD_ID = DYN, & diff --git a/GEOSdataatm_GridComp/CMakeLists.txt b/GEOSdataatm_GridComp/CMakeLists.txt index 10298c385..407031c05 100644 --- a/GEOSdataatm_GridComp/CMakeLists.txt +++ b/GEOSdataatm_GridComp/CMakeLists.txt @@ -11,6 +11,6 @@ esma_add_library (${this} target_compile_definitions (${this} PRIVATE USE_CICE USE_R8) install( - FILES CORE_NYF_Data_AtmForcings_ExtData.yaml + FILES JRA55-DO_DataAtm_Forcings_ExtData.yaml CORE_NYF_Data_AtmForcings_ExtData.yaml DESTINATION etc ) diff --git a/GEOSdataatm_GridComp/GEOS_DataAtmGridComp.F90 b/GEOSdataatm_GridComp/GEOS_DataAtmGridComp.F90 index 551aadeb9..95964a7e4 100644 --- a/GEOSdataatm_GridComp/GEOS_DataAtmGridComp.F90 +++ b/GEOSdataatm_GridComp/GEOS_DataAtmGridComp.F90 @@ -202,6 +202,7 @@ subroutine SetServices ( GC, RC ) LONG_NAME = 'surface_temperature', & UNITS = 'K', & DIMS = MAPL_DimsHorzOnly, & + DEFAULT = -1000.0, & VLOCATION = MAPL_VLocationNone, __RC__) call MAPL_AddInternalSpec(GC, & @@ -270,7 +271,12 @@ subroutine SetServices ( GC, RC ) ! This call is needed only when we use ReadForcing. ! If we switch to use ExtData, next line has be commented out - call MAPL_TerminateImport ( GC, ALL=.true., __RC__ ) + if (DO_CICE_THERMO == 2) then + call MAPL_TerminateImport ( GC, SHORT_NAMES=['SURFSTATE'], & + CHILD_IDS=[SURF], __RC__ ) + else + call MAPL_TerminateImport ( GC, ALL=.true., __RC__ ) + endif call MAPL_GenericSetServices ( GC, __RC__) @@ -419,6 +425,7 @@ subroutine RUN ( GC, IMPORT, EXPORT, CLOCK, RC ) real, dimension(:,:), pointer :: ALW, BLW, SPEED, DISCHARGE, rPCU, rPLS, sSNO real, dimension(:,:), pointer :: CT, CQ, CM, SH, EVAP, TAUX, TAUY, Tskin, lwdnsrf real, dimension(:,:), pointer :: DRPARN, DFPARN, DRNIRN, DFNIRN, DRUVRN, DFUVRN + real, dimension(:,:), pointer :: DSH, DEVAP real, dimension(:,:), pointer :: EMISSRF real, allocatable, dimension(:,:) :: ZTH @@ -474,7 +481,7 @@ subroutine RUN ( GC, IMPORT, EXPORT, CLOCK, RC ) ! real LATSO, LONSO real, parameter :: HW_hack = 2. - logical :: firsttime = .true. + logical :: firsttime = .false. real :: TAU_TS real :: DT @@ -609,11 +616,13 @@ subroutine RUN ( GC, IMPORT, EXPORT, CLOCK, RC ) call SetVarToZero('DEWL', __RC__) call SetVarToZero('FRSL', __RC__) + call MAPL_GetPointer(SurfImport, DSH, 'DSH', __RC__) + call MAPL_GetPointer(SurfImport, DEVAP, 'DEVAP', __RC__) ! these should be set to 0 (for now) - call SetVarToZero('DSH', __RC__) + !call SetVarToZero('DSH', __RC__) call SetVarToZero('DFU', __RC__) call SetVarToZero('DFV', __RC__) - call SetVarToZero('DEVAP', __RC__) + !call SetVarToZero('DEVAP', __RC__) call SetVarToZero('DDEWL', __RC__) call SetVarToZero('DFRSL', __RC__) @@ -653,7 +662,10 @@ subroutine RUN ( GC, IMPORT, EXPORT, CLOCK, RC ) !------------------------------------------------------ call ESMF_ClockGet(CLOCK, TIMESTEP=DELT, __RC__) - DELT = DELT * NINT((86400./DT)) ! emulate daily Solar + ! the line below only works for daily forcing e..g. CORE I + ! for JRA55-DO or any dataset at higher frequency, this line makes SW much + ! higher than what data prescribed + !DELT = DELT * NINT((86400./DT)) ! emulate daily Solar call MAPL_SunGetInsolation(LONS, LATS, & ORBIT, ZTH, SLR, & @@ -711,6 +723,10 @@ subroutine RUN ( GC, IMPORT, EXPORT, CLOCK, RC ) call MAPL_GetPointer(SurfImport, ALW, 'ALW', __RC__) call MAPL_GetPointer(SurfImport, BLW, 'BLW', __RC__) + if(any(Tskin<0.0)) then !only when DATAATM restart is bootstrapped + firsttime = .true. + end if + if (firsttime) then firsttime = .false. Tskin = TA @@ -768,6 +784,10 @@ subroutine RUN ( GC, IMPORT, EXPORT, CLOCK, RC ) TAUX = CM * (Uskin - Uair) TAUY = CM * (Vskin - Vair) + ! these derivatives are important for sea ice + DSH = CT !* MAPL_CP (MAPL_CP got multiplied in Surf) + DEVAP = CQ + 101 format (A, e20.12, 3I3.2) !!! if (mapl_am_i_root()) PRINT*, __FILE__, __LINE__ @@ -1070,7 +1090,7 @@ subroutine Finalize ( gc, import, export, clock, rc ) call MAPL_TimerOn(MAPL,"TOTAL" ) call MAPL_TimerOn(MAPL,"FINALIZE") - if (DO_CICE_THERMO /= 0) call dealloc_column_physics( MAPL_AM_I_Root(), Iam ) + if (DO_CICE_THERMO == 1) call dealloc_column_physics( MAPL_AM_I_Root(), Iam ) call MAPL_TimerOff(MAPL,"FINALIZE") call MAPL_TimerOff(MAPL,"TOTAL" ) diff --git a/GEOSdataatm_GridComp/JRA55-DO_DataAtm_Forcings_ExtData.yaml b/GEOSdataatm_GridComp/JRA55-DO_DataAtm_Forcings_ExtData.yaml new file mode 100755 index 000000000..5da6f8658 --- /dev/null +++ b/GEOSdataatm_GridComp/JRA55-DO_DataAtm_Forcings_ExtData.yaml @@ -0,0 +1,106 @@ +Collections: + psl_1_5_0: + template: ExtData/JRA55-DO/v1-5-0/psl/psl_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-5-0_gr_%y401010000-%y412312100.nc + valid_range: "1958-01-01T00:00:00/2020-01-01T00:00:00" + psl_1_5_0_1: + template: ExtData/JRA55-DO/v1-5-0-1/psl/psl_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-5-0-1_gr_%y401010000-%y412312100.nc + valid_range: "2020-01-01T00:00:00/2024-02-02T00:00:00" + tas_1_5_0: + template: ExtData/JRA55-DO/v1-5-0/tas/tas_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-5-0_gr_%y401010000-%y412312100.nc + valid_range: "1958-01-01T00:00:00/2020-01-01T00:00:00" + tas_1_5_0_1: + template: ExtData/JRA55-DO/v1-5-0-1/tas/tas_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-5-0-1_gr_%y401010000-%y412312100.nc + valid_range: "2020-01-01T00:00:00/2024-02-02T00:00:00" + huss_1_5_0: + template: ExtData/JRA55-DO/v1-5-0/huss/huss_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-5-0_gr_%y401010000-%y412312100.nc + valid_range: "1958-01-01T00:00:00/2020-01-01T00:00:00" + huss_1_5_0_1: + template: ExtData/JRA55-DO/v1-5-0-1/huss/huss_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-5-0-1_gr_%y401010000-%y412312100.nc + valid_range: "2020-01-01T00:00:00/2024-02-02T00:00:00" + uas_1_5_0: + template: ExtData/JRA55-DO/v1-5-0/uas/uas_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-5-0_gr_%y401010000-%y412312100.nc + valid_range: "1958-01-01T00:00:00/2020-01-01T00:00:00" + uas_1_5_0_1: + template: ExtData/JRA55-DO/v1-5-0-1/uas/uas_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-5-0-1_gr_%y401010000-%y412312100.nc + valid_range: "2020-01-01T00:00:00/2024-02-02T00:00:00" + vas_1_5_0: + template: ExtData/JRA55-DO/v1-5-0/vas/vas_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-5-0_gr_%y401010000-%y412312100.nc + valid_range: "1958-01-01T00:00:00/2020-01-01T00:00:00" + vas_1_5_0_1: + template: ExtData/JRA55-DO/v1-5-0-1/vas/vas_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-5-0-1_gr_%y401010000-%y412312100.nc + valid_range: "2020-01-01T00:00:00/2024-02-02T00:00:00" + uvas_1_5_0: + template: ExtData/JRA55-DO/v1-5-0/uvas/uvas_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-5-0_gr_%y401010000-%y412312100.nc + valid_range: "1958-01-01T00:00:00/2020-01-01T00:00:00" + uvas_1_5_0_1: + template: ExtData/JRA55-DO/v1-5-0-1/uvas/uvas_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-5-0-1_gr_%y401010000-%y412312100.nc + valid_range: "2020-01-01T00:00:00/2024-02-02T00:00:00" + friver_1_5_0: + template: ExtData/JRA55-DO/v1-5-0/friver/friver_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-5-0_gr_%y40101-%y41231.nc + valid_range: "1958-01-01T00:00:00/2020-01-01T00:00:00" + friver_1_5_0_1: + template: ExtData/JRA55-DO/v1-5-0-1/friver/friver_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-5-0-1_gr_%y40101-%y41231.nc + valid_range: "2020-01-01T00:00:00/2024-02-02T00:00:00" + prra_1_5_0: + template: ExtData/JRA55-DO/v1-5-0/prra/prra_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-5-0_gr_%y401010130-%y412312230.nc + valid_range: "1958-01-01T00:00:00/2020-01-01T00:00:00" + prra_1_5_0_1: + template: ExtData/JRA55-DO/v1-5-0-1/prra/prra_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-5-0-1_gr_%y401010130-%y412312230.nc + valid_range: "2020-01-01T00:00:00/2024-02-02T00:00:00" + prsn_1_5_0: + template: ExtData/JRA55-DO/v1-5-0/prsn/prsn_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-5-0_gr_%y401010130-%y412312230.nc + valid_range: "1958-01-01T00:00:00/2020-01-01T00:00:00" + prsn_1_5_0_1: + template: ExtData/JRA55-DO/v1-5-0-1/prsn/prsn_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-5-0-1_gr_%y401010130-%y412312230.nc + valid_range: "2020-01-01T00:00:00/2024-02-02T00:00:00" + rlds_1_5_0: + template: ExtData/JRA55-DO/v1-5-0/rlds/rlds_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-5-0_gr_%y401010130-%y412312230.nc + valid_range: "1958-01-01T00:00:00/2020-01-01T00:00:00" + rlds_1_5_0_1: + template: ExtData/JRA55-DO/v1-5-0-1/rlds/rlds_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-5-0-1_gr_%y401010130-%y412312230.nc + valid_range: "2020-01-01T00:00:00/2024-02-02T00:00:00" + rsds_1_5_0: + template: ExtData/JRA55-DO/v1-5-0/rsds/rsds_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-5-0_gr_%y401010130-%y412312230.nc + valid_range: "1958-01-01T00:00:00/2020-01-01T00:00:00" + rsds_1_5_0_1: + template: ExtData/JRA55-DO/v1-5-0-1/rsds/rsds_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-5-0-1_gr_%y401010130-%y412312230.nc + valid_range: "2020-01-01T00:00:00/2024-02-02T00:00:00" + +Samplings: + interannual_sample: + flux_conserve_sample: + time_interpolation: false + update_offset: "PT1H30M" + river_conserve_sample: + time_interpolation: false + update_offset: "PT12H" + +Exports: + PS: + - {starting: "1958-01-01T00:00:00", collection: psl_1_5_0, sample: interannual_sample, variable: psl} + - {starting: "2020-01-01T00:00:00", collection: psl_1_5_0_1, sample: interannual_sample, variable: psl} + TA: + - {starting: "1958-01-01T00:00:00", collection: tas_1_5_0, sample: interannual_sample, variable: tas} + - {starting: "2020-01-01T00:00:00", collection: tas_1_5_0_1, sample: interannual_sample, variable: tas} + QA: + - {starting: "1958-01-01T00:00:00", collection: huss_1_5_0, sample: interannual_sample, variable: huss} + - {starting: "2020-01-01T00:00:00", collection: huss_1_5_0_1, sample: interannual_sample, variable: huss} + UA;VA: + - {starting: "1958-01-01T00:00:00", collection: uvas_1_5_0, sample: interannual_sample, variable: uas;vas} + - {starting: "2020-01-01T00:00:00", collection: uvas_1_5_0_1, sample: interannual_sample, variable: uas;vas} + RUNOFF: + - {starting: "1958-01-01T00:00:00", collection: friver_1_5_0, regrid: CONSERVE, sample: river_conserve_sample, variable: friver} + - {starting: "2020-01-01T00:00:00", collection: friver_1_5_0_1, regrid: CONSERVE, sample: river_conserve_sample, variable: friver} + PCU: + - {starting: "1958-01-01T00:00:00", collection: prra_1_5_0, regrid: CONSERVE, sample: flux_conserve_sample, variable: prra} + - {starting: "2020-01-01T00:00:00", collection: prra_1_5_0_1, regrid: CONSERVE, sample: flux_conserve_sample, variable: prra} + PLS: {collection: /dev/null} + SNO: + - {starting: "1958-01-01T00:00:00", collection: prsn_1_5_0, regrid: CONSERVE, sample: flux_conserve_sample, variable: prsn} + - {starting: "2020-01-01T00:00:00", collection: prsn_1_5_0_1, regrid: CONSERVE, sample: flux_conserve_sample, variable: prsn} + LWDN: + - {starting: "1958-01-01T00:00:00", collection: rlds_1_5_0, regrid: CONSERVE, sample: flux_conserve_sample, variable: rlds} + - {starting: "2020-01-01T00:00:00", collection: rlds_1_5_0_1, regrid: CONSERVE, sample: flux_conserve_sample, variable: rlds} + SWGDWN: + - {starting: "1958-01-01T00:00:00", collection: rsds_1_5_0, regrid: CONSERVE, sample: flux_conserve_sample, variable: rsds} + - {starting: "2020-01-01T00:00:00", collection: rsds_1_5_0_1, regrid: CONSERVE, sample: flux_conserve_sample, variable: rsds} diff --git a/GEOSgigatraj_GridComp/.gitignore b/GEOSgigatraj_GridComp/.gitignore new file mode 100644 index 000000000..7bf7f4af4 --- /dev/null +++ b/GEOSgigatraj_GridComp/.gitignore @@ -0,0 +1,3 @@ +@GigaTraj/ +GigaTraj/ +GigaTraj@/ diff --git a/GEOSgigatraj_GridComp/CMakeLists.txt b/GEOSgigatraj_GridComp/CMakeLists.txt new file mode 100644 index 000000000..3b4d5db38 --- /dev/null +++ b/GEOSgigatraj_GridComp/CMakeLists.txt @@ -0,0 +1,9 @@ +esma_set_this() + +set (dependencies MAPL ESMF::ESMF geos_giga metsources filters gigatraj) + +esma_add_library (${this} + SRCS GEOS_Giga_InterOp.F90 Gigatraj_Utils.F90 GEOS_GigatrajGridComp.F90 + DEPENDENCIES ${dependencies}) + +esma_add_subdirectories(GigaTraj) diff --git a/GEOSgigatraj_GridComp/GEOS_Giga_InterOp.F90 b/GEOSgigatraj_GridComp/GEOS_Giga_InterOp.F90 new file mode 100644 index 000000000..0c914dc53 --- /dev/null +++ b/GEOSgigatraj_GridComp/GEOS_Giga_InterOp.F90 @@ -0,0 +1,190 @@ +! This module define the interface bewteen GEOS and gigatraj +! The functions are defined in gigatraj + +module GEOS_Giga_InterOpMod + use, intrinsic :: iso_c_binding, only : c_double, c_int, c_ptr, c_null_char, c_associated + use, intrinsic :: iso_c_binding, only : c_loc, c_null_ptr + use mpi + implicit none + private + + public :: initMetGEOSDistributedLatLonData + public :: initMetGEOSDistributedCubedData + public :: updateFields + public :: RK4_advance + public :: setData + public :: getData + public :: getData2d + + public :: test_Field3D + public :: test_dataflow + public :: test_metData + + interface + + function initMetGEOSDistributedCubedData(comm, ijToRank, Ig, lev, i1, i2, j1, j2, nzs, lons_ptr, lats_ptr, eta_ptr, ctime_ptr) result (metdata_ptr) bind(C, name="initGigaGridDistributedCubedData") + import :: c_int, c_ptr + implicit none + integer(c_int), intent(in), value :: comm, Ig, lev, i1,i2,j1,j2, nzs + type(c_ptr), intent(in), value :: ijToRank, lons_ptr, lats_ptr, eta_ptr, ctime_ptr + type(c_ptr) :: metdata_ptr + end function + + function initMetGEOSDistributedLatLonData(comm, ijToRank, Ig, Jg,lev, nlon_local, nlat_local, nzs, lons_ptr, lats_ptr, eta_ptr, ctime_ptr) result (metdata_ptr) bind(C, name="initGigaGridDistributedLatLonData") + import :: c_int, c_ptr + implicit none + integer(c_int), intent(in), value :: comm, Ig, Jg, lev, nlon_local, nlat_local, nzs + type(c_ptr), intent(in), value :: ijToRank, lons_ptr, lats_ptr, eta_ptr, ctime_ptr + type(c_ptr) :: metdata_ptr + end function + + subroutine updateFields( metSrc_ptr, ctime_ptr, u_ptr, v_ptr, w_ptr, p_ptr) bind(C, name="updateFields") + import :: c_ptr + implicit none + type(c_ptr), intent(in), value :: metSrc_ptr, ctime_ptr, u_ptr, v_ptr, w_ptr, p_ptr + end subroutine + + subroutine RK4_advance(metsrc_ptr, ctime_ptr, dt, n, lons_ptr, lats_ptr, levs_ptr) bind( C, name='RK4_advance') + import :: c_ptr, c_int, c_double + type(c_ptr), intent(in), value :: metsrc_ptr + real(c_double), intent(in), value :: dt + integer(c_int), intent(in), value :: n + type(c_ptr), intent(in), value :: ctime_ptr, lons_ptr, lats_ptr, levs_ptr + end subroutine + + subroutine test_Field3d(obj_ptr) bind(C, name="test_Field3D") + import :: c_ptr + implicit none + type(c_ptr), intent(in), value :: obj_ptr + end subroutine + + subroutine test_metData(obj_ptr, time, n, lons_ptr, lats_ptr, levs_ptr, u_ptr, v_ptr, w_ptr) bind(C, name="test_metData") + import :: c_ptr,c_int, c_double + type(c_ptr), intent(in), value :: obj_ptr + real(c_double), intent(in), value :: time + integer(c_int), intent(in), value :: n + type(c_ptr), intent(in), value :: lons_ptr, lats_ptr, levs_ptr, u_ptr, v_ptr, w_ptr + end subroutine + + subroutine setData ( metSrc_ptr, ctime, quantity_ptr, data_ptr) bind(C, name="setData") + import :: c_ptr + type(c_ptr), intent(in), value :: metSrc_ptr, ctime, quantity_ptr, data_ptr + end subroutine setData + + subroutine getData ( metSrc_ptr, ctime, quantity_ptr, n, lons_ptr, lats_ptr, levs_ptr, values_ptr) bind(C, name="getData") + import :: c_ptr, c_int + integer(c_int), intent(in), value :: n + type(c_ptr), intent(in), value :: metSrc_ptr, ctime, quantity_ptr, lons_ptr, lats_ptr, levs_ptr, values_ptr + end subroutine getData + + subroutine getData2d ( metSrc_ptr, ctime, quantity_ptr, n, lons_ptr, lats_ptr, values_ptr) bind(C, name="getData2d") + import :: c_ptr, c_int + integer(c_int), intent(in), value :: n + type(c_ptr), intent(in), value :: metSrc_ptr, ctime, quantity_ptr, lons_ptr, lats_ptr, values_ptr + end subroutine getData2d + end interface + +contains + + subroutine test_dataflow(num_parcels, lons, lats, zs, CellToRank, DIMS, comm) + integer :: num_parcels, comm, DIMS(3) + real, dimension(:), intent(in) :: lons, lats,zs + integer, dimension(:,:), intent(in) :: CellToRank + + integer :: i, npes, ierror, rank, my_rank + real :: dlon, dlat + real, allocatable :: lons_positive(:) + + real, allocatable :: lons_send(:), lats_send(:), zs_send(:) + real, allocatable :: lons_recv(:), lats_recv(:), zs_recv(:) + real, allocatable :: U_recv(:), U_send(:) + real, allocatable :: U(:), V(:), W(:), pos(:) + + integer, allocatable :: counts_send(:),counts_recv(:), II(:), JJ(:), ranks(:) + integer, allocatable :: disp_send(:), disp_recv(:), tmp_position(:) + + dlon = 360.0 / DIMS(1) + dlat = 180.0 / DIMS(2) + + lons_positive = lons + where (lons_positive < 0) lons_positive=lons_positive + 360.0 + II = min( max(ceiling (lons_positive/dlon),1), DIMS(1)) + JJ = min( max(ceiling ((lats + 90.0)/dlat),1), DIMS(2)) + + call MPI_Comm_size(comm, npes, ierror) + call MPI_Comm_rank(comm, my_rank, ierror) + + allocate(ranks(num_parcels)) + allocate(counts_send(npes)) + allocate(counts_recv(npes)) + allocate(disp_send(npes)) + allocate(disp_recv(npes)) + + do i = 1, num_parcels + ranks(i) = CellToRank(II(i), JJ(i)) + enddo + +!-- ------------------- +!step 4) Pack the location data and send them to where the metData sit +!-- ------------------- + + do rank = 0, npes-1 + counts_send(rank+1) = count(ranks == rank) + enddo + + call MPI_AllToALL(counts_send, 1, MPI_INTEGER, counts_recv, 1, MPI_INTEGER, comm, ierror) + + disp_send = 0 + do rank = 1, npes-1 + disp_send(rank+1) = disp_send(rank)+ counts_send(rank) + enddo + disp_recv = 0 + do rank = 1, npes-1 + disp_recv(rank+1) = disp_recv(rank)+ counts_recv(rank) + enddo + + + ! re-arranged lats lons, and ids + tmp_position = disp_send + allocate(lons_send(num_parcels)) + allocate(lons_recv(sum(counts_recv))) + allocate(lats_send(num_parcels)) + allocate(lats_recv(sum(counts_recv))) + allocate(zs_send(num_parcels)) + allocate(zs_recv(sum(counts_recv))) + + allocate(pos(num_parcels)) + do i = 1, num_parcels + rank = ranks(i) + pos(i) = tmp_position(rank+1) +1 + lons_send(pos(i)) = lons(i) + lats_send(pos(i)) = lats(i) + zs_send(pos(i)) = zs(i) + tmp_position(rank+1) = tmp_position(rank+1) + 1 + enddo + + call MPI_AllToALLv(lons_send, counts_send, disp_send, MPI_REAL, lons_recv, counts_recv, disp_recv, MPI_REAL, comm, ierror) + call MPI_AllToALLv(lats_send, counts_send, disp_send, MPI_REAL, lats_recv, counts_recv, disp_recv, MPI_REAL, comm, ierror) + call MPI_AllToALLv(zs_send, counts_send, disp_send, MPI_REAL, zs_recv, counts_recv, disp_recv, MPI_REAL, comm, ierror) +!-- ------------------- +!step 5) Interpolate the data ( horiontally and vertically) and send back where they are from +!-- ------------------- + allocate(U_recv(sum(counts_recv)), source = my_rank*1.0) + allocate(U_send(num_parcels), source = -1.0) + ! + ! Horizontal and vertical interpolator here + ! + call MPI_AllToALLv(U_recv, counts_recv, disp_recv, MPI_REAL, U_send, counts_send, disp_send, MPI_REAL, comm, ierror) + +!--------------------- +!step 6) Rearrange data ( not necessary if ids was rearranged ins step 4) +!--------------------- + + allocate(U(num_parcels)) + allocate(V(num_parcels)) + allocate(W(num_parcels)) + U(:) = U_send(pos(:)) + + end subroutine + +end module GEOS_Giga_InterOpMod diff --git a/GEOSgigatraj_GridComp/GEOS_GigatrajGridComp.F90 b/GEOSgigatraj_GridComp/GEOS_GigatrajGridComp.F90 new file mode 100644 index 000000000..ba2f62b1d --- /dev/null +++ b/GEOSgigatraj_GridComp/GEOS_GigatrajGridComp.F90 @@ -0,0 +1,1477 @@ +#include "MAPL_Generic.h" + +module GEOS_GigatrajGridCompMod + use, intrinsic :: iso_c_binding, only : c_int, c_ptr, c_null_ptr, c_associated, c_null_char + use, intrinsic :: iso_c_binding, only : c_loc + use ESMF + use MAPL + use MAPL_VerticalDataMod + use mpi + use GEOS_Giga_interOpMod + use Gigatraj_UtilsMod + implicit none + + public :: SetServices + +contains + + subroutine SetServices ( GC, RC ) + type(ESMF_GridComp), intent(INOUT) :: GC ! gridded component + integer, optional :: RC ! return code + + character(len=ESMF_MAXSTR) :: IAm + integer :: STATUS + character(len=ESMF_MAXSTR) :: COMP_NAME + + type (GigaTrajInternal), pointer :: GigaTrajInternalPtr + type (GigatrajInternalWrap) :: wrap + + call ESMF_GridCompGet( GC, NAME=COMP_NAME, _RC ) + Iam = trim(COMP_NAME) // 'SetServices' + + ! Register services for this component + ! ------------------------------------ + + call MAPL_GridCompSetEntryPoint ( GC, ESMF_METHOD_INITIALIZE, Initialize, _RC ) + call MAPL_GridCompSetEntryPoint ( GC, ESMF_METHOD_RUN, GetInitVars , _RC ) + call MAPL_GridCompSetEntryPoint ( GC, ESMF_METHOD_RUN, Run , _RC ) + + + ! Internal state + + call MAPL_AddInternalSpec(GC, & + SHORT_NAME = 'U', & + LONG_NAME = 'eastward_wind', & + UNITS = 'm s-1', & + DIMS = MAPL_DimsHorzVert, & + VLOCATION = MAPL_VLocationCenter, _RC) + + call MAPL_AddInternalSpec(GC, & + SHORT_NAME = 'V', & + LONG_NAME = 'northward_wind', & + UNITS = 'm s-1', & + DIMS = MAPL_DimsHorzVert, & + VLOCATION = MAPL_VLocationCenter, _RC) + + call MAPL_AddInternalSpec ( gc, & + SHORT_NAME = 'OMEGA', & + LONG_NAME = 'vertical_pressure_velocity', & + UNITS = 'Pa s-1', & + DIMS = MAPL_DimsHorzVert, & + VLOCATION = MAPL_VLocationCenter, _RC) + + call MAPL_AddInternalSpec ( gc, & + SHORT_NAME = 'PL', & + LONG_NAME = 'mid_level_pressure', & + UNITS = 'Pa', & + DIMS = MAPL_DimsHorzVert, & + VLOCATION = MAPL_VLocationCenter, _RC) + + call MAPL_AddInternalSpec ( gc, & + SHORT_NAME = 'ZL', & + LONG_NAME = 'mid_layer_heights', & + UNITS = 'm', & + DIMS = MAPL_DimsHorzVert, & + VLOCATION = MAPL_VLocationCenter, _RC) + + call MAPL_AddInternalSpec ( gc, & + SHORT_NAME = 'W', & + LONG_NAME = 'vertical_velocity', & + UNITS = 'm s-1', & + DIMS = MAPL_DimsHorzVert, & + VLOCATION = MAPL_VLocationCenter, _RC) + + call MAPL_AddInternalSpec ( gc, & + SHORT_NAME = 'TH', & + LONG_NAME = 'potential_temperature', & + UNITS = 'K', & + DIMS = MAPL_DimsHorzVert, & + VLOCATION = MAPL_VLocationCenter, _RC) + + call MAPL_AddInternalSpec ( gc, & + SHORT_NAME = 'DTDTDYN', & + LONG_NAME = 'tendency_of_air_temperature_due_to_dynamics', & + UNITS = 'K s-1', & + DIMS = MAPL_DimsHorzVert, & + VLOCATION = MAPL_VLocationCenter, RC=STATUS ) + VERIFY_(STATUS) + + allocate(GigaTrajInternalPtr) + wrap%ptr => GigaTrajInternalPtr + call ESMF_UserCompSetInternalState(GC, 'GigaTrajInternal', wrap, _RC) + + call MAPL_GenericSetServices(GC, _RC ) + + call MAPL_TimerAdd(GC, name="INITIALIZE" ,_RC) + call MAPL_TimerAdd(GC, name="RUN" ,_RC) + + RETURN_(ESMF_SUCCESS) + end subroutine SetServices + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + subroutine Initialize ( GC, IMPORT, EXPORT, CLOCK, RC ) + type(ESMF_GridComp), intent(inout) :: GC ! Gridded component + type(ESMF_State), intent(inout) :: IMPORT ! Import state + type(ESMF_State), intent(inout) :: EXPORT ! Export state + type(ESMF_Clock), intent(inout) :: CLOCK ! The clock + integer, optional, intent( out) :: RC ! Error code + + character(len=ESMF_MAXSTR) :: IAm + integer :: STATUS + character(len=ESMF_MAXSTR) :: COMP_NAME + + ! Local derived type aliases + type (MAPL_MetaComp), pointer :: MPL + type (ESMF_VM) :: vm + integer :: I1, I2, J1, J2, comm, npes, my_rank, rank, ierror, NX, NY, NPZ + type(ESMF_Grid) :: CubedGrid + integer, allocatable :: I1s(:), J1s(:), I2s(:),J2s(:) + integer :: DIMS(3) + type (GigaTrajInternal), pointer :: GigaTrajInternalPtr + type (GigatrajInternalWrap) :: wrap + type (ESMF_TIME) :: CurrentTime + type(ESMF_Alarm) :: GigaTrajOutAlarm, GigaTrajRebalanceAlarm, GigaTrajIntegrateAlarm + type(ESMF_TimeInterval) :: parcelsOut_DT, Rebalance_DT, Integrate_DT + type(ESMF_TimeInterval) :: ModelTimeStep + integer :: HH, MM, SS + integer :: integrate_time, r_time, o_time + character(len=ESMF_MAXSTR) :: parcels_file + character(len=ESMF_MAXSTR) :: grid_name, vCoord + character(len=ESMF_MAXSTR) :: regrid_to_latlon + character(len=ESMF_MAXSTR), allocatable :: cName(:), bName(:), fName(:), aName(:) + type(ESMF_Grid) :: grid_ + + call ESMF_GridCompGet ( GC, name=COMP_NAME, _RC ) + Iam = trim(COMP_NAME) // "Initialize" + + call MAPL_GetObjectFromGC ( GC, MPL, _RC) + + call MAPL_TimerOn(MPL,"TOTAL") + call MAPL_TimerOn(MPL,"INITIALIZE") + + call ESMF_ClockGet(clock, currTime=CurrentTime, _RC) + call ESMF_ClockGet(clock, timeStep=ModelTimeStep, _RC) + + call ESMF_TimeIntervalGet(ModelTimeStep, h = hh, m = mm, s = ss, _RC) + + call MAPL_GetResource(MPL, integrate_time, "GIGATRAJ_INTEGRATE_DT:", default = hh*10000+mm*100+ss, _RC) + hh = integrate_time/10000 + mm = mod(integrate_time, 10000)/100 + ss = mod(integrate_time, 100) + call ESMF_TimeIntervalSet(Integrate_DT, h = hh, m = mm, s = ss, _RC) + + call MAPL_GetResource(MPL, r_time, "GIGATRAJ_REBALANCE_DT:", default = integrate_time, _RC) + hh = r_time/10000 + mm = mod(r_time, 10000)/100 + ss = mod(r_time, 100) + call ESMF_TimeIntervalSet(Rebalance_DT, h = hh, m = mm, s = ss, _RC) + + call MAPL_GetResource(MPL, o_time, "GIGATRAJ_OUTPUT_DT:", default = integrate_time, _RC) + hh = o_time/10000 + mm = mod(o_time, 10000)/100 + ss = mod(o_time, 100) + call ESMF_TimeIntervalSet(parcelsOut_DT, h = hh, m = mm, s = ss, _RC) + + GigaTrajOutAlarm = ESMF_AlarmCreate( & + clock, & + name='GigatrajOut', & + ringTime= CurrentTime + parcelsOut_DT-ModelTimeStep, & + ringInterval=parcelsOut_DT, & + ringTimeStepCount=1, & + sticky=.false., _RC) + + GigaTrajRebalanceAlarm = ESMF_AlarmCreate( & + clock, & + name='GigatrajRebalance', & + ringTime= CurrentTime + Rebalance_DT-ModelTimeStep, & + ringInterval=Rebalance_DT, & + ringTimeStepCount=1, & + sticky=.false., _RC) + + GigaTrajIntegrateAlarm = ESMF_AlarmCreate( & + clock, & + name='GigatrajIntegrate', & + ringTime= CurrentTime + integrate_DT-ModelTimeStep, & + ringInterval=integrate_DT, & + ringTimeStepCount=1, & + sticky=.false., _RC) + + call MAPL_GenericInitialize ( GC, IMPORT, EXPORT, CLOCK, _RC) + + call ESMF_VMGetCurrent(vm, _RC) + call ESMF_VMGet(vm, mpiCommunicator=comm, _RC) + call MPI_Comm_size(comm, npes, ierror); _VERIFY(ierror) + call MPI_Comm_rank(comm, my_rank, ierror); _VERIFY(ierror) + + call ESMF_UserCompGetInternalState(GC, 'GigaTrajInternal', wrap, status); _VERIFY(STATUS) + GigaTrajInternalPtr => wrap%ptr + GigaTrajInternalPtr%npes = npes + + call ESMF_GridCompGet(GC, grid=CubedGrid, _RC) + call MAPL_GridGet(CubedGrid, globalCellCountPerDim=DIMS, _RC) + + call MAPL_GetResource(MPL, vCoord, "GIGATRAJ_VERTICAL_COORD:", default='DYN%%PL|P', rc=status) + call parseCompsAndFieldsName(vCoord, cName, bName, fName, aName) + GigaTrajInternalPtr%vCoord = trim(fName(1)) + GigaTrajInternalPtr%vAlias = trim(aName(1)) + select case(GigaTrajInternalPtr%vCoord) + case ('PL') + GigaTrajInternalPtr%vTendency = 'OMEGA' + case('TH') + GigaTrajInternalPtr%vTendency = 'DTDTDYN' + case('ZL') + GigaTrajInternalPtr%vTendency = 'W' + case default + _ASSERT(.false., "vertical coordinate is needed") + end select + + npz = Dims(3) + GigaTrajInternalPtr%npz = npz + GigaTrajInternalPtr%Integrate_DT = Integrate_DT + + call MAPL_GetResource(MPL, NX, "NX:", _RC) + call MAPL_GetResource(MPL, grid_name, "AGCM_GRIDNAME:", _RC) + + ! the level is differtent from the original grid + GigaTrajInternalPtr%CubedGrid = grid_manager%make_grid(& + CubedSphereGridFactory(grid_name=trim(grid_name),im_world = DIMS(1), lm=npz, nx=NX, ny=NX, rc=status)); _VERIFY(status) + + call MAPL_GetResource(MPL, regrid_to_latlon, "GIGATRAJ_REGRID_TO_LATLON:", default='YES', _RC) + + GigaTrajInternalPtr%regrid_to_latlon = .true. + if (trim(regrid_to_latlon) == "NO") GigaTrajInternalPtr%regrid_to_latlon = .false. + + if ( GigaTrajInternalPtr%regrid_to_latlon ) then + + call MAPL_MakeDecomposition(NX,NY,_RC) + + GigaTrajInternalPtr%LatLonGrid = grid_manager%make_grid( & + LatLonGridFactory(im_world=DIMS(1)*4, jm_world=DIMS(1)*2+1, lm=npz, & + nx=NX, ny=NY, pole='PC', dateline= 'DC', rc=status) ); _VERIFY(status) + + GigaTrajInternalPtr%cube2latlon => new_regridder_manager%make_regridder(GigaTrajInternalPtr%CubedGrid, GigaTrajInternalPtr%LatLonGrid, REGRID_METHOD_CONSERVE, _RC) + + grid_ = GigaTrajInternalPtr%LatLonGrid + call MAPL_GridGet(grid_, globalCellCountPerDim=DIMS, _RC) + else + grid_ = CubedGrid + endif + + call MAPL_Grid_interior(grid_, i1,i2,j1,j2) + + allocate(I1s(npes),J1s(npes)) + allocate(I2s(npes),J2s(npes)) + + call MPI_Allgather(i1, 1, MPI_INTEGER, I1s, 1, MPI_INTEGER, comm, ierror); _VERIFY(ierror) + call MPI_Allgather(i2, 1, MPI_INTEGER, I2s, 1, MPI_INTEGER, comm, ierror); _VERIFY(ierror) + call MPI_Allgather(j1, 1, MPI_INTEGER, J1s, 1, MPI_INTEGER, comm, ierror); _VERIFY(ierror) + call MPI_Allgather(j2, 1, MPI_INTEGER, J2s, 1, MPI_INTEGER, comm, ierror); _VERIFY(ierror) + + allocate(GigaTrajInternalPtr%CellToRank(DIMS(1),DIMS(2))) + + do rank = 0, npes -1 + I1 = I1s(rank+1) + I2 = I2s(rank+1) + J1 = J1s(rank+1) + J2 = J2s(rank+1) + GigaTrajInternalPtr%CellToRank(I1:I2,J1:J2) = rank + enddo + + call read_parcels(GC, GigaTrajInternalPtr, _RC) + + call MAPL_TimerOff(MPL,"INITIALIZE") + call MAPL_TimerOff(MPL,"TOTAL") + RETURN_(ESMF_SUCCESS) + end subroutine Initialize + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + subroutine GetInitVars ( GC, IMPORT, EXPORT, CLOCK, RC ) + type(ESMF_GridComp), intent(inout) :: GC + type(ESMF_State), intent(inout) :: IMPORT + type(ESMF_State), intent(inout) :: EXPORT + type(ESMF_Clock), intent(inout) :: CLOCK + integer, optional, intent( out) :: RC + + integer :: i, status, k + character(len=ESMF_MAXSTR) :: IAm + type (ESMF_State) :: INTERNAL, leaf_export + type (GigaTrajInternal), pointer :: GigaTrajInternalPtr + type (GigatrajInternalWrap) :: wrap + character(len=ESMF_MAXSTR) :: GigaRstFile + character(len=ESMF_MAXSTR) :: other_fields + type(ESMF_Field) :: tmp_field + type (ESMF_FieldBundle) :: bdle + type (ESMF_TIME) :: CurrentTime + character(len=20), target :: ctime + type (MAPL_MetaComp), pointer :: MPL + logical, save :: init = .false. + real, dimension(:,:,:) , pointer :: ptr3d + type(Netcdf4_fileformatter) :: formatter + type(FileMetadata) :: meta + character(len=ESMF_MAXSTR) :: parcels_file + character(len=:), allocatable :: fieldname, tmp_name + character(len=20) :: diffusions(4) + character (len=ESMF_MAXSTR), allocatable :: itemNameList(:) + character (len=ESMF_MAXSTR) :: LONG_NAME, UNITS + character (len=ESMF_MAXSTR), allocatable :: fieldnames(:) + character (len=ESMF_MAXSTR), allocatable :: bundlenames(:) + character (len=ESMF_MAXSTR), allocatable :: compnames(:) + character (len=ESMF_MAXSTR), allocatable :: aliasnames(:) + integer :: nitems + logical :: file_exists + + Iam = "getInitVars" + + if (init) then + RETURN_(ESMF_SUCCESS) + endif + + call MAPL_GetObjectFromGC ( GC, MPL, _RC) + + call ESMF_ClockGet(clock, currTime=CurrentTime, _RC) + call ESMF_TimeGet(CurrentTime, timeStringISOFrac=ctime) + ctime(20:20) = c_null_char + + call MAPL_Get(MPL, INTERNAL_ESMF_STATE=INTERNAL, _RC) + + call ESMF_UserCompGetInternalState(GC, 'GigaTrajInternal', wrap, _RC) + GigaTrajInternalPtr => wrap%ptr + + call MAPL_GetResource(MPL, GigaRstFile, 'GIGATRAJ_INTERNAL_RESTART_FILE:', default="NONE", RC=STATUS ) + + if (trim(GigaRstFile) == 'NONE') then + ! without restart file, get value from import + call init_metsrc_field0(GC, IMPORT, ctime, _RC) + else + INQUIRE(FILE= GigaRstFile, EXIST=file_exists) + _ASSERT( file_exists, " GIGATRAJ_INTERNAL_RESTART_FILE does not exist") + call init_metsrc_field0(GC, INTERNAL, ctime, _RC) + endif + + call MAPL_GetResource(MPL, other_fields, "GIGATRAJ_EXTRA_FIELDS:", default='NONE', _RC) + + if (other_fields /= 'NONE') then + call MAPL_GetResource(MPL, parcels_file, "GIGATRAJ_PARCELS_FILE:", default='parcels.nc4', _RC) + if (MAPL_AM_I_ROOT()) then + call formatter%open(trim(parcels_file), pFIO_WRITE, _RC) + meta = formatter%read(_RC) + endif + + call parseCompsAndFieldsName(other_fields, compnames, bundlenames, fieldnames, aliasnames) + GigaTrajInternalPtr%ExtraCompNames = compnames + GigaTrajInternalPtr%ExtraBundleNames = bundlenames + GigaTrajInternalPtr%ExtraFieldNames = fieldnames + GigaTrajInternalPtr%ExtraAliasNames = aliasnames + + do i = 1, size(FieldNames) + call MAPL_ExportStateGet([import], trim(compnames(i)), leaf_export, _RC) + if ( trim(bundlenames(i)) == 'NONE') then + call ESMF_StateGet(leaf_export, trim(FieldNames(i)), tmp_field, _RC) + else + call ESMF_StateGet(leaf_export, trim(bundlenames(i)), bdle, _RC) + call ESMFL_BundleGetPointerToData(bdle , trim(FieldNames(i)) , ptr3d, _RC) + if ( .not. associated(ptr3d)) then + _ASSERT(.false., trim(FieldNames(i)) // " in bundle "//trim(bundlenames(i)) // " is not allocated, gigatraj cannot output this field") + endif + call ESMF_FieldBundleGet(bdle, trim(FieldNames(i)), field=tmp_field, _RC) + endif + call MAPL_AllocateCoupling(tmp_field, _RC) + call ESMF_AttributeGet(tmp_field, NAME='LONG_NAME', VALUE=LONG_NAME, _RC) + call ESMF_AttributeGet(tmp_field, NAME='UNITS', VALUE=UNITS, _RC) + + call create_new_vars( meta, formatter, trim(long_name), trim(aliasnames(i)), trim(units)) + + enddo + if (MAPL_AM_I_Root()) then + call formatter%close() + endif + endif + + init = .true. + + RETURN_(ESMF_SUCCESS) + + end subroutine GetInitVars + + subroutine Init_metsrc_field0 (GC, state, ctime, RC ) + type(ESMF_GridComp), intent(inout) :: GC ! Gridded component + type(ESMF_State), intent(inout) :: state + character(*), target, intent(in) :: ctime + integer, optional, intent(out) :: RC ! Error code + + character(len=ESMF_MAXSTR) :: IAm + integer :: STATUS + + type(GigaTrajInternal), pointer :: GigaTrajInternalPtr + type (GigatrajInternalWrap) :: wrap + real, dimension(:,:,:), pointer :: U, V, W, P, PL0, PLE, TH + real, dimension(:,:,:), allocatable :: U_latlon, V_latlon, W_latlon, P_latlon + real, dimension(:,:,:), allocatable, target :: haloU, haloV, haloW, haloP + integer :: counts(3), dims(3), d1,d2,km,lm, i1,i2,j1,j2,i + real, allocatable, target :: lats_center(:), lons_center(:), levs_center(:) + real, allocatable, target :: cube_lats_center(:, :), cube_lons_center(:,:) + integer :: comm + real :: delt, High, low + type(ESMF_VM) :: vm + type(ESMF_Grid) :: grid_ + + Iam = "init_metsrc_field0" + + call ESMF_VMGetCurrent(vm, _RC) + call ESMF_VMGet(vm, mpiCommunicator=comm, _RC) + call ESMF_UserCompGetInternalState(GC, 'GigaTrajInternal', wrap, _RC) + GigaTrajInternalPtr => wrap%ptr + + call MAPL_GetPointer(state, U, "U", _RC) + call MAPL_GetPointer(state, V, "V", _RC) + call MAPL_GetPointer(state, W, trim(GigaTrajInternalPtr%vTendency), _RC) + call MAPL_GetPointer(state, P, trim(GigaTrajInternalPtr%vCoord), _RC) + + if (GigaTrajInternalPtr%regrid_to_latlon) then + grid_ = GigaTrajInternalPtr%LatLonGrid + else + grid_ = GigaTrajInternalPtr%CubedGrid + endif + + call MAPL_GridGet( grid_, localCellCountPerDim=counts, globalCellCountPerDim=dims, _RC) + + select case ( trim(GigaTrajInternalPtr%vCoord)) + case ("PL") + High = 100000. + Low = 2. + case ("TH") + High = 5000. + Low = 200. + case ("ZL") + High = 78000. + Low = 1. + end select + + delt = (log(High)-log(low))/dims(3) + levs_center=[(exp(log(High)-(i-1)*delt), i=1, dims(3))] + + if (GigaTrajInternalPtr%regrid_to_latlon) then + call get_latlon_centers(gc, lons_center, lats_center, _RC) + GigaTrajInternalPtr%metSrc = initMetGEOSDistributedLatLonData(comm, c_loc(GigaTrajInternalPtr%CellToRank), DIMS(1), DIMS(2), & + dims(3), counts(1)+2, counts(2)+2, dims(3), & + c_loc(lons_center), c_loc(lats_center), c_loc(levs_center), c_loc(ctime)) + deallocate(lons_center, lats_center) + else + call MAPL_Grid_interior(grid_, i1, i2, j1, j2) + call get_cube_centers(gc, cube_lons_center, cube_lats_center, _RC) + GigaTrajInternalPtr%metSrc = initMetGEOSDistributedCubedData(comm, c_loc(GigaTrajInternalPtr%CellToRank), DIMS(1), & + dims(3), i1, i2, j1, j2, dims(3), & + c_loc(cube_lons_center), c_loc(cube_lats_center), c_loc(levs_center), c_loc(ctime)) + + deallocate(cube_lons_center, cube_lats_center) + + endif + deallocate(levs_center) + + lm = dims(3) + d1 = counts(1) + d2 = counts(2) + + allocate(haloU(d1+2, d2+2,lm), source = 0.0) + allocate(haloV(d1+2, d2+2,lm), source = 0.0) + allocate(haloW(d1+2, d2+2,lm), source = 0.0) + allocate(haloP(d1+2, d2+2,lm), source = 0.0) + + if ( GigaTrajInternalPtr%regrid_to_latlon) then + allocate(U_latlon(d1,d2,lm)) + allocate(V_latlon(d1,d2,lm)) + allocate(W_latlon(d1,d2,lm)) + allocate(P_latlon(d1,d2,lm)) + call GigaTrajInternalPtr%cube2latlon%regrid(U, V, U_latlon, V_latlon, _RC) + call GigaTrajInternalPtr%cube2latlon%regrid(W, W_latlon, _RC) + call GigaTrajInternalPtr%cube2latlon%regrid(P, P_latlon, _RC) + + call esmf_halo(GigaTrajInternalPtr%LatLonGrid, U_Latlon, haloU, _RC) + call esmf_halo(GigaTrajInternalPtr%LatLonGrid, V_Latlon, haloV, _RC) + call esmf_halo(GigaTrajInternalPtr%LatLonGrid, W_Latlon, haloW, _RC) + call esmf_halo(GigaTrajInternalPtr%LatLonGrid, P_Latlon, haloP, _RC) + deallocate(U_latlon, V_latlon, W_latlon, P_latlon) + else + call esmf_halo(GigaTrajInternalPtr%CubedGrid, U, haloU, _RC) + call esmf_halo(GigaTrajInternalPtr%CubedGrid, V, haloV, _RC) + call esmf_halo(GigaTrajInternalPtr%CubedGrid, W, haloW, _RC) + call esmf_halo(GigaTrajInternalPtr%CubedGrid, P, haloP, _RC) + endif + + call updateFields( GigaTrajInternalPtr%metSrc, c_loc(ctime), c_loc(haloU), c_loc(haloV), c_loc(haloW), c_loc(haloP)) + + if(associated(PL0)) deallocate(PL0) + deallocate(haloU, haloV, haloW, haloP) + RETURN_(ESMF_SUCCESS) + + end subroutine init_metsrc_field0 + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + subroutine Run ( GC, IMPORT, EXPORT, CLOCK, RC ) + type(ESMF_GridComp), intent(inout) :: GC ! Gridded component + type(ESMF_State), intent(inout) :: IMPORT ! Import state + type(ESMF_State), intent(inout) :: EXPORT ! Export state + type(ESMF_Clock), intent(inout) :: CLOCK ! The clock + integer, optional, intent( out) :: RC ! Error code + + character(len=ESMF_MAXSTR) :: IAm + integer :: STATUS + integer :: CSTAT, ESTAT, YY, DD + character(512) :: CMSG + character(256) :: command_line + character(19) :: begdate, enddate + character(64) :: format_string + type(ESMF_TimeInterval) :: ModelTimeStep + type(ESMF_Time) :: CurrentTime, preTime + type(ESMF_Grid) :: grid_ + + type (GigaTrajInternal), pointer :: GigaTrajInternalPtr + type (GigatrajInternalWrap) :: wrap + + integer :: lm, d1, d2, k, itemCount + integer ::counts(3), DIMS(3), comm, ierror + type (ESMF_VM) :: vm + + real, dimension(:,:,:), pointer :: U_cube, V_cube, W_cube, P_cube, PLE_Cube, with_halo + real, dimension(:,:,:), pointer :: internal_field, model_field + real, dimension(:,:,:), pointer :: tmp_ptr + + real, dimension(:,:,:), allocatable :: U_latlon, V_latlon, W_latlon, P_latlon + real, dimension(:,:,:), allocatable :: U_inter, V_inter, W_inter, P_inter + real, dimension(:,:,:), allocatable, target :: haloU, haloV, haloW, haloP + + real(ESMF_KIND_R8) :: DT + + character(len=20), target :: ctime, ctime0 + type(ESMF_State) :: INTERNAL + type(MAPL_MetaComp),pointer :: MPL + type(ESMF_Alarm) :: GigaTrajIntegrateAlarm + type(MAPL_VarSpec ), pointer:: internal_specs(:) + character(len=ESMF_MAXSTR) :: SHORT_NAME + character(len=ESMF_MAXSTR), allocatable :: item_names(:) + +!--------------- +! Update internal +!--------------- + call ESMF_UserCompGetInternalState(GC, 'GigaTrajInternal', wrap, _RC) + GigaTrajInternalPtr => wrap%ptr + call MAPL_GetObjectFromGC ( GC, MPL, _RC) + call MAPL_Get (MPL, INTERNAL_ESMF_STATE=INTERNAL, _RC) + + call ESMF_StateGet(INTERNAL, itemCount=itemCount, _RC) + allocate(item_names(itemCount)) + call ESMF_StateGet(INTERNAL, itemNameList=item_names, _RC) + + do k=1, ItemCount + call MAPL_GetPointer(Import, model_field, trim(item_names(k)), _RC) + call MAPL_GetPointer(INTERNAL, internal_field, trim(item_names(k)), _RC) + internal_field(:,:,:) = model_field(:,:,:) + enddo + deallocate(item_names) + + call ESMF_ClockGetAlarm(clock, 'GigatrajIntegrate', GigaTrajIntegrateAlarm, _RC) + + if ( .not. ESMF_AlarmIsRinging(GigaTrajIntegrateAlarm)) then + RETURN_(ESMF_SUCCESS) + endif + + call ESMF_VMGetCurrent(vm, _RC) + call ESMF_VMGet(vm, mpiCommunicator=comm, _RC) + + + call ESMF_ClockGet(clock, currTime=CurrentTime, _RC) + call ESMF_ClockGet(clock, timeStep=ModelTimeStep, _RC) + + ! W.J note: this run is after agcm's run. The clock is not yet ticked + ! So the values we are using are at (CurrentTime + ModelTimeStep) + + CurrentTime = CurrentTime + ModelTimeStep + call ESMF_TimeGet(CurrentTime, timeStringISOFrac=ctime) + ctime(20:20) = c_null_char + + preTime = CurrentTime - GigaTrajInternalPtr%Integrate_DT + call ESMF_TimeGet(preTime, timeStringISOFrac=ctime0) + ctime0(20:20) = c_null_char + + call ESMF_TimeIntervalGet(GigaTrajInternalPtr%Integrate_DT, d_r8=DT, _RC) + +!--------------- +! Step 1) Regrid the metData field from cubed to lat-lon +!--------------- + call MAPL_GetPointer(Import, U_cube, "U", _RC) + call MAPL_GetPointer(Import, V_cube, "V", _RC) + call MAPL_GetPointer(Import, W_cube, GigaTrajInternalPtr%vTendency, _RC) + call MAPL_GetPointer(Import, P_cube, GigaTrajInternalPtr%vCoord, _RC) + + lm = size(u_cube,3) + d1 = size(u_cube,1) + d2 = size(u_cube,2) + + if (GigaTrajInternalPtr%regrid_to_latlon) then + grid_ = GigaTrajInternalPtr%LatLonGrid + call MAPL_GridGet(grid_, localCellCountPerDim=counts, & + globalCellCountPerDim=DIMS, _RC) + + allocate(U_latlon(counts(1), counts(2),lm), source = 0.0) + allocate(V_latlon(counts(1), counts(2),lm), source = 0.0) + allocate(W_latlon(counts(1), counts(2),lm), source = 0.0) + allocate(P_latlon(counts(1), counts(2),lm), source = 0.0) + else + grid_ = GigaTrajInternalPtr%CubedGrid + call MAPL_GridGet(grid_, localCellCountPerDim=counts, & + globalCellCountPerDim=DIMS, _RC) + endif + + allocate(haloU(counts(1)+2, counts(2)+2,lm), source = 0.0) + allocate(haloV(counts(1)+2, counts(2)+2,lm), source = 0.0) + allocate(haloW(counts(1)+2, counts(2)+2,lm), source = 0.0) + allocate(haloP(counts(1)+2, counts(2)+2,lm), source = 0.0) + +!--------------- +! Step 2) Get halo +!--------------- + if (GigaTrajInternalPtr%regrid_to_latlon) then + + call GigaTrajInternalPtr%cube2latlon%regrid(U_cube,V_cube, U_latlon, V_latlon, _RC) + call GigaTrajInternalPtr%cube2latlon%regrid(W_cube, W_latlon, _RC) + call GigaTrajInternalPtr%cube2latlon%regrid(P_cube, P_latlon, _RC) + + call esmf_halo(grid_, U_Latlon, haloU, _RC) + call esmf_halo(grid_, V_Latlon, haloV, _RC) + call esmf_halo(grid_, W_Latlon, haloW, _RC) + call esmf_halo(grid_, P_Latlon, haloP, _RC) + + deallocate( U_Latlon, V_latlon, W_latlon, P_latlon) + else + call esmf_halo(grid_, U_cube, haloU, _RC) + call esmf_halo(grid_, V_cube, haloV, _RC) + call esmf_halo(grid_, W_cube, haloW, _RC) + call esmf_halo(grid_, P_cube, haloP, _RC) + endif + +!--------------- +! Step 3) Update +!--------------- + + call updateFields( GigaTrajInternalPtr%metSrc, c_loc(ctime), c_loc(haloU), c_loc(haloV), c_loc(haloW), c_loc(haloP)) + +!--------------- +! Step 4) Time advance +!--------------- + call RK4_advance( GigaTrajInternalPtr%metSrc, c_loc(ctime0), DT, GigaTrajInternalPtr%parcels%num_parcels, & + c_loc(GigaTrajInternalPtr%parcels%lons), & + c_loc(GigaTrajInternalPtr%parcels%lats), & + c_loc(GigaTrajInternalPtr%parcels%zs)) + + deallocate(haloU, haloV, haloW, haloP) + +!--------------- +! Step 5) rebalance parcels among processors ( configurable with alarm) +!--------------- + call rebalance_parcels(clock, GigaTrajInternalPtr%parcels, GigaTrajInternalPtr%CellToRank, comm, grid_, _RC) + +!--------------- +! Step 6) write out parcel positions and related fields ( configurable with alarm) +!--------------- + + call write_parcels(GC, import, clock, currentTime, _RC) + RETURN_(ESMF_SUCCESS) + + end subroutine Run + + subroutine esmf_halo(grid, Field,haloField, rc) + type(ESMF_Grid), intent(in) :: grid + real, dimension(:,:,:), intent(in) :: Field + real, dimension(:,:,:), intent(inout) :: haloField + integer, optional, intent( out) :: RC + + character(len=ESMF_MAXSTR) :: IAm + integer :: counts(3), k, count3 + integer :: status + type(ESMF_Field) :: halo_field + type(ESMF_RouteHandle) :: rh + real, dimension(:,:), pointer :: with_halo + + Iam = "Gigatraj ESMF Halo" + call MAPL_GridGet(grid, localCellCountPerDim=counts, _RC) + + count3 = size(field,3) ! may be nbins + + halo_field = ESMF_FieldCreate(grid, ESMF_TYPEKIND_R4, name='halo_field', & + totalLWidth=[1,1],totalUWidth=[1,1]) + call ESMF_FieldGet(halo_field, farrayPtr=with_halo, _RC) + with_halo = 0.0 + call ESMF_FieldHaloStore(halo_field, rh, _RC) + ! + ! W.Y note, the pointer with_halo's lbound is 0 + ! + do k = 1, count3 + with_halo(1:counts(1), 1:counts(2)) = Field(:,:,k) + call ESMF_FieldHalo(halo_field, rh, _RC) + haloField(:,:,k) = with_halo + enddo + + call ESMF_FieldDestroy(halo_field) + call ESMF_FieldHaloRelease(rh, _RC) + + RETURN_(ESMF_SUCCESS) + end subroutine esmf_halo + + ! move the parcels to the PE where they belong to + subroutine rebalance_parcels(clock, parcels, CellToRank, comm, grid, rc) + type(ESMF_Clock), intent(inout) :: CLOCK ! The clock + type(horde), intent(inout) :: parcels + integer, dimension(:,:), intent(in) :: CellToRank + integer :: comm + type(ESMF_Grid), intent(inout) :: grid + integer, optional, intent(out) :: rc + + integer :: status + integer :: DIMS(3) + character(len=:), allocatable :: Iam + integer :: num_parcels0, num_parcels + real, dimension(:), allocatable :: lons0, lats0, zs0 + integer, dimension(:), allocatable :: IDs0 + + integer :: i, npes, ierror, rank, my_rank, pos + real :: dlon, dlat + + real, allocatable :: lons_send(:), lats_send(:), zs_send(:) + integer, allocatable :: ids_send(:) + type(ESMF_Alarm) :: GigaTrajRebalanceAlarm + integer, allocatable :: counts_send(:),counts_recv(:), II(:), JJ(:), ranks(:) + integer, allocatable :: disp_send(:), disp_recv(:), tmp_position(:) + + Iam = "rebalance_parcels" + + call ESMF_ClockGetAlarm(clock, 'GigatrajRebalance', GigaTrajRebalanceAlarm, _RC) + + if ( .not. ESMF_AlarmIsRinging(GigaTrajRebalanceAlarm)) then + RETURN_(ESMF_SUCCESS) + endif + + call move_alloc( parcels%lons, lons0) + call move_alloc( parcels%lats, lats0) + call move_alloc( parcels%zs, zs0) + call move_alloc( parcels%IDs, IDs0) + num_parcels0 = parcels%num_parcels + + where (lons0 < -180.0) lons0 =lons0 + 360.0 + where (lons0 > 180.0) lons0 =lons0 - 360.0 + + allocate(II(num_parcels0), JJ(num_parcels0)) + call MAPL_GridGet(Grid, globalCellCountPerDim=DIMS) + if (DIMS(2) == 6*DIMS(1)) then + call MAPL_GetGlobalHorzIJIndex(num_parcels0, II, JJ, lons0/180.0*MAPL_PI, lats0/180.0*MAPL_PI, Grid=Grid, rc=status) + else + + dlon = 360.0 / DIMS(1) + dlat = 180.0 / (DIMS(2)-1) + ! DC + II = min( max(ceiling ((lons0+dlon/2.+180.0)/dlon),1), DIMS(1)) + JJ = min( max(ceiling ((lats0+dlat/2.+ 90.0)/dlat),1), DIMS(2)) + + endif + + call MPI_Comm_size(comm, npes, ierror) + call MPI_Comm_rank(comm, my_rank, ierror) + + allocate(ranks (num_parcels0)) + allocate(lons_send(num_parcels0)) + allocate(lats_send(num_parcels0)) + allocate(zs_send (num_parcels0)) + allocate(IDs_send (num_parcels0)) + + allocate(counts_send(npes)) + allocate(counts_recv(npes)) + allocate(disp_send(npes)) + allocate(disp_recv(npes)) + + do i = 1, num_parcels0 + ranks(i) = CellToRank(II(i), JJ(i)) + enddo + + do rank = 0, npes-1 + counts_send(rank+1) = count(ranks == rank) + enddo + + call MPI_AllToALL(counts_send, 1, MPI_INTEGER, counts_recv, 1, MPI_INTEGER, comm, ierror) + + disp_send = 0 + do rank = 1, npes-1 + disp_send(rank+1) = disp_send(rank)+ counts_send(rank) + enddo + disp_recv = 0 + do rank = 1, npes-1 + disp_recv(rank+1) = disp_recv(rank)+ counts_recv(rank) + enddo + + ! re-arranged lats lons, and ids + tmp_position = disp_send + parcels%num_parcels = sum(counts_recv) + num_parcels = parcels%num_parcels + allocate(parcels%lons(num_parcels )) + allocate(parcels%lats(num_parcels )) + allocate(parcels%zs (num_parcels )) + allocate(parcels%IDs (num_parcels )) + + do i = 1, num_parcels0 + rank = ranks(i) + pos = tmp_position(rank+1) +1 + lons_send(pos) = lons0(i) + lats_send(pos) = lats0(i) + zs_send(pos) = zs0(i) + IDs_send(pos) = IDs0(i) + tmp_position(rank+1) = tmp_position(rank+1) + 1 + enddo + + call MPI_AllToALLv(lons_send, counts_send, disp_send, MPI_REAL, parcels%lons, counts_recv, disp_recv, MPI_REAL, comm, ierror) + call MPI_AllToALLv(lats_send, counts_send, disp_send, MPI_REAL, parcels%lats, counts_recv, disp_recv, MPI_REAL, comm, ierror) + call MPI_AllToALLv(zs_send, counts_send, disp_send, MPI_REAL, parcels%zs, counts_recv, disp_recv, MPI_REAL, comm, ierror) + call MPI_AllToALLv(ids_send, counts_send, disp_send, MPI_INTEGER, parcels%IDs, counts_recv, disp_recv, MPI_INTEGER, comm, ierror) + + RETURN_(ESMF_SUCCESS) + end subroutine rebalance_parcels + + ! Scatter parcels from root after reading parcels file + subroutine scatter_parcels(num_parcels0, lons0, lats0, zs0, IDs0, CellToRank, Grid, comm, lons, lats, zs, IDs, num_parcels) + integer :: num_parcels0 + real, dimension(:), intent(inout) :: lons0 + real, dimension(:), intent(in) :: lats0, zs0 + integer, dimension(:), intent(in) :: IDs0 + integer, dimension(:,:), intent(in) :: CellToRank + type(ESMF_GRID), intent(inout) :: Grid + integer, intent(in) :: comm + real, dimension(:), allocatable, intent(out) :: lons, lats, zs + integer, dimension(:), allocatable, intent(out) :: IDs + integer, intent(out) :: num_parcels + + integer :: DIMS(3) + integer :: i, npes, ierror, rank, my_rank, counts_recv, pos, status + real :: dlon, dlat + + real, allocatable :: lons_send(:), lats_send(:), zs_send(:) + integer, allocatable :: ids_send(:) + + integer, allocatable :: counts_send(:), II(:), JJ(:), ranks(:) + integer, allocatable :: disp_send(:), tmp_position(:) + + call MPI_Comm_size(comm, npes, ierror) + call MPI_Comm_rank(comm, my_rank, ierror) + + call MAPL_GridGet(Grid, globalCellCountPerDim=DIMS) + + allocate(II(num_parcels0), JJ(num_parcels0)) + + allocate(counts_send(npes), source = 0) + allocate(disp_send(npes), source = 0) + where (lons0 < -180.0) lons0 =lons0 + 360.0 + where (lons0 > 180.0 ) lons0 =lons0 - 360.0 + + if (my_rank == 0) then + if (DIMS(2) == 6*DIMS(1)) then + call MAPL_GetGlobalHorzIJIndex(num_parcels0, II, JJ, lons0/180.0*MAPL_PI, lats0/180.0*MAPL_PI, Grid=Grid, rc=status) + else + dlon = 360.0 / DIMS(1) + dlat = 180.0 / (DIMS(2)-1) !PC + + II = min( max(ceiling ((lons0+dlon/2.0 + 180.0)/dlon),1), DIMS(1)) + JJ = min( max(ceiling ((lats0+dlat/2.0 + 90.0 )/dlat),1), DIMS(2)) + endif + + allocate(ranks(num_parcels0)) + do i = 1, num_parcels0 + ranks(i) = CellToRank(II(i), JJ(i)) + enddo + + do rank = 0, npes-1 + counts_send(rank+1) = count(ranks == rank) + enddo + + do rank = 1, npes-1 + disp_send(rank+1) = disp_send(rank)+ counts_send(rank) + enddo + endif + + call MPI_Scatter(counts_send, 1, MPI_INTEGER, counts_recv, 1, MPI_INTEGER, 0, comm, ierror) + + ! re-arranged lats lons, and ids + tmp_position = disp_send + num_parcels = counts_recv + + allocate(lons_send(num_parcels0)) + allocate(lons (num_parcels )) + allocate(lats_send(num_parcels0)) + allocate(lats (num_parcels )) + allocate(zs_send (num_parcels0)) + allocate(zs (num_parcels )) + allocate(IDs_send (num_parcels0)) + allocate(IDs (num_parcels )) + + if (my_rank == 0) then + do i = 1, num_parcels0 + rank = ranks(i) + pos = tmp_position(rank+1) +1 + lons_send(pos) = lons0(i) + lats_send(pos) = lats0(i) + zs_send(pos) = zs0(i) + IDs_send(pos) = IDs0(i) + tmp_position(rank+1) = tmp_position(rank+1) + 1 + enddo + endif + + call MPI_ScatterV(lons_send, counts_send, disp_send, MPI_REAL, lons, counts_recv, MPI_REAL, 0, comm, ierror) + call MPI_ScatterV(lats_send, counts_send, disp_send, MPI_REAL, lats, counts_recv, MPI_REAL, 0, comm, ierror) + call MPI_ScatterV(zs_send, counts_send, disp_send, MPI_REAL, zs, counts_recv, MPI_REAL, 0, comm, ierror) + call MPI_ScatterV(ids_send, counts_send, disp_send, MPI_INTEGER, IDs, counts_recv, MPI_INTEGER,0, comm, ierror) + + end subroutine scatter_parcels + + ! gather parcels to root for writing + subroutine gather_parcels(num_parcels0, lons0, lats0, zs0, IDs0, comm, lons, lats, zs, IDs, num_parcels) + integer, intent(out) :: num_parcels0 + real, dimension(:), allocatable, intent(out) :: lons0, lats0,zs0 + integer, dimension(:), allocatable, intent(out) :: IDs0 + integer, intent(in) :: comm + real, dimension(:), intent(in) :: lons, lats, zs + integer, dimension(:), intent(in) :: IDs + integer, intent(in) :: num_parcels + + integer :: i, npes, ierror, my_rank + integer, allocatable :: nums_all(:), displ(:) + + call MPI_Comm_size(comm, npes, ierror) + call MPI_Comm_rank(comm, my_rank, ierror) + + allocate(nums_all(npes), source = 0) + call MPI_Gather(num_parcels, 1, MPI_INTEGER, nums_all, 1, MPI_INTEGER, 0, comm, ierror) + + num_parcels0 = sum(nums_all) + + allocate(lons0(num_parcels0)) + allocate(lats0(num_parcels0)) + allocate( zs0(num_parcels0)) + allocate( IDS0(num_parcels0)) + allocate( displ(npes), source =0) + do i =2, npes + displ(i) = displ(i-1)+nums_all(i-1) + enddo + + call MPI_GatherV(lons, num_parcels, MPI_REAL, lons0, nums_all, displ, MPI_REAL, 0, comm,ierror) + call MPI_GatherV(lats, num_parcels, MPI_REAL, lats0, nums_all, displ, MPI_REAL, 0, comm,ierror) + call MPI_GatherV(zs, num_parcels, MPI_REAL, zs0, nums_all, displ, MPI_REAL, 0, comm,ierror) + call MPI_GatherV(IDS, num_parcels, MPI_INTEGER, IDs0,nums_all, displ, MPI_INTEGER, 0, comm,ierror) + + end subroutine gather_parcels + + subroutine gather_onefield(num_parcels0, field0, comm, field, num_parcels) + integer, intent(out) :: num_parcels0 + real, dimension(:), allocatable, intent(out) :: field0 + integer, intent(in) :: comm + real, dimension(:), intent(in) :: field + integer, intent(in) :: num_parcels + + integer :: i, npes, ierror, my_rank + integer, allocatable :: nums_all(:), displ(:) + + call MPI_Comm_size(comm, npes, ierror) + call MPI_Comm_rank(comm, my_rank, ierror) + + allocate(nums_all(npes), source = 0) + call MPI_Gather(num_parcels, 1, MPI_INTEGER, nums_all, 1, MPI_INTEGER, 0, comm, ierror) + + num_parcels0 = sum(nums_all) + + allocate(field0(num_parcels0)) + allocate( displ(npes), source =0) + do i =2, npes + displ(i) = displ(i-1)+nums_all(i-1) + enddo + + call MPI_GatherV(field, num_parcels, MPI_REAL, field0, nums_all, displ, MPI_REAL, 0, comm,ierror) + + end subroutine gather_onefield + + subroutine write_parcels(GC, state, CLOCK, currentTime, rc) + type(ESMF_GridComp), intent(inout) :: GC ! Gridded component + type(ESMF_State), intent(inout) :: state ! Import state + type(ESMF_Clock), intent(inout) :: CLOCK ! The clock + type(ESMF_TIME), intent(in) :: currentTime + integer, optional, intent(out) :: rc + + character(len=:), allocatable :: Iam + type (ESMF_VM) :: vm + type(Netcdf4_fileformatter) :: formatter + integer :: comm, my_rank, total_num, i, k,status, last_time, ierror, count3 + real, allocatable :: lats0(:), lons0(:), zs0(:), values(:), values0(:) + real,target, allocatable :: values_2d(:,:) + real,pointer :: field(:,:,:) + integer, allocatable :: ids0(:), ids0_in(:) + type(ESMF_Alarm) :: GigaTrajOutAlarm + type(FileMetadata) :: meta + real(ESMF_KIND_R8) :: tint_d + type(ESMF_TimeInterval) :: tint + type(MAPL_MetaComp),pointer :: MPL + character(len=ESMF_MAXSTR) :: parcels_file, other_fields + character(len=ESMF_MAXSTR), allocatable :: varnames(:) + character(len=:), allocatable:: var_name, var_, comp_name, var_alias, bdlename + + type (GigaTrajInternal), pointer :: GigaTrajInternalPtr + type (GigatrajInternalWrap) :: wrap + character(len=20), target :: ctime + character(len=:), allocatable :: vAlias + + Iam = "write_parcels" + call ESMF_ClockGetAlarm(clock, 'GigatrajOut', GigaTrajOutAlarm, _RC) + + if ( .not. ESMF_AlarmIsRinging(GigaTrajOutAlarm)) then + RETURN_(ESMF_SUCCESS) + endif + + call MAPL_GetObjectFromGC ( GC, MPL, _RC) + call ESMF_UserCompGetInternalState(GC, 'GigaTrajInternal', wrap, _RC) + GigaTrajInternalPtr => wrap%ptr + + call MAPL_GetResource(MPL, parcels_file, "GIGATRAJ_PARCELS_FILE:", default='parcels.nc4', _RC) + + call ESMF_VMGetCurrent(vm, _RC) + call ESMF_VMGet(vm, mpiCommunicator=comm, _RC) + call gather_parcels(total_num, lons0, lats0, zs0, IDs0, & + comm, & + GigaTrajInternalPtr%parcels%lons, & + GigaTrajInternalPtr%parcels%lats, & + GigaTrajInternalPtr%parcels%zs, & + GigaTrajInternalPtr%parcels%IDS, & + GigaTrajInternalPtr%parcels%num_parcels ) + + call MPI_Comm_rank(comm, my_rank, ierror); _VERIFY(ierror) + if (my_rank ==0) then + if (GigaTrajInternalPtr%vCoord == 'PL') then + zs0 = zs0 / 100.0 ! hard coded, conert Pa back to hPa + endif + ! reorder + ids0 = ids0 + 1 ! element start 0, make it to 1 for ordering + ids0(ids0) = [(k, k=1,size(ids0))] + + ! test if ordering is right + ! ids0_in = ids0 + ! ids0_in = ids0_in(ids0) + ! do k = 1, size(ids0) + ! if (k /= ids0_in(k)) then + ! RETURN_(-1) + ! endif + ! enddo + + lats0 = lats0(ids0(:)) ! id is zero-bases, plus 1 Fortran + lons0 = lons0(ids0(:)) + where (lons0 > 180.0) lons0 = lons0 - 360. + where (lons0 < -180.0) lons0 = lons0 + 360. + zs0 = zs0(ids0(:)) + call formatter%open(trim(parcels_file), pFIO_WRITE, _RC) + meta = formatter%read(_RC) + last_time = meta%get_dimension('time', _RC) + tint = CurrentTime - GigaTrajInternalPtr%startTime + call ESMF_TimeIntervalGet(tint,d_r8=tint_d,rc=status) + + call formatter%put_var('lat', lats0, start=[1, last_time+1], _RC) + call formatter%put_var('lon', lons0, start=[1, last_time+1], _RC) + call formatter%put_var(GigaTrajInternalPtr%vAlias, zs0, start=[1, last_time+1], _RC) + call formatter%put_var('time', [tint_d], start=[last_time+1], _RC) + endif + + ! extra fields + if (allocated(GigaTrajInternalPtr%ExtraFieldNames)) then + call ESMF_TimeGet(CurrentTime, timeStringISOFrac=ctime) + ctime(20:20) = c_null_char + do k = 1, size(GigaTrajInternalPtr%ExtraFieldNames) + comp_name = trim(GigaTrajInternalPtr%ExtraCompNames(k)) + var_name = trim(GigaTrajInternalPtr%ExtraFieldNames(k)) + bdlename = trim(GigaTrajInternalPtr%ExtraBundleNames(k)) + var_alias = trim(GigaTrajInternalPtr%ExtraAliasNames(k)) + + if ( index(var_name, 'bcDP') /= 0 .or. & + index(var_name, 'ocDP') /= 0 .or. & + index(var_name, 'bcWT') /= 0 .or. & + index(var_name, 'ocWT') /= 0 .or. & + index(var_name, 'bcSD') /= 0 .or. & + index(var_name, 'ocSD') /= 0 .or. & + index(var_name, 'bcSV') /= 0 .or. & + index(var_name, 'ocSV') /= 0 ) then + + call MAPL_GetPointer(state, field, var_name, _RC) + count3 = size(field,3) + allocate(values_2d(GigaTrajInternalPtr%parcels%num_parcels, count3)) + call get_metsrc_data2d (GC, state, ctime, var_name, values_2d, RC ) + do i = 1, count3 + call gather_onefield(total_num, values0, comm, values_2d(:,i), GigaTrajInternalPtr%parcels%num_parcels) + + if (my_rank == 0) then + values0 = values0(ids0(:)) + var_ = var_alias //'00'//i_to_string(i) + if ( meta%has_variable(var_)) then + call formatter%put_var( var_, values0, start=[1, last_time+1], _RC) + else + print*, "Please provide "//var_ // " in the file "//trim(parcels_file) + endif + endif + enddo + deallocate(values_2d) + else if ( index(var_name, 'TRI') /= 0) then + allocate(values(GigaTrajInternalPtr%parcels%num_parcels)) + allocate(varnames(4)) + varnames(1) = "CA.bc::CA.bcphilicIT" + varnames(2) = "CA.bc::CA.bcphobicIT" + varnames(3) = "CA.oc::CA.ocphilicIT" + varnames(4) = "CA.oc::CA.ocphobicIT" + do i = 1, 4 + var_ = varnames(i)(8:) + call get_metsrc_data (GC, state, ctime, comp_name, bdlename, varnames(i), values, _RC) + call gather_onefield(total_num, values0, comm, values, GigaTrajInternalPtr%parcels%num_parcels) + + if (my_rank == 0) then + values0 = values0(ids0(:)) + if ( meta%has_variable(var_)) then + call formatter%put_var( var_, values0, start=[1, last_time+1], _RC) + else + print*, "Please provide "//var_ // " in the file "//trim(parcels_file) + endif + endif + enddo + deallocate(values, varnames) + else + allocate(values(GigaTrajInternalPtr%parcels%num_parcels)) + call get_metsrc_data (GC, state, ctime, comp_name, bdlename, var_name, values, _RC ) + call gather_onefield(total_num, values0, comm, values, GigaTrajInternalPtr%parcels%num_parcels) + if (my_rank == 0) then + values0 = values0(ids0(:)) + if ( meta%has_variable(var_alias)) then + if(var_alias == 'P') values0 = values0/100.0 ! hard coded to hPa + call formatter%put_var( var_alias, values0, start=[1, last_time+1], _RC) + else + print*, "Please provide "//var_alias // " in the file "//trim(parcels_file) + endif + endif + deallocate(values) + endif + enddo + endif + + if (my_rank ==0) then + call formatter%close(_RC) + endif + RETURN_(ESMF_SUCCESS) + contains + + end subroutine write_parcels + + subroutine read_parcels(GC,internal, rc) + type(ESMF_GridComp), intent(inout) :: GC + type(GigaTrajInternal), intent(inout) :: internal + integer, optional, intent(out) :: rc + + type(Netcdf4_fileformatter) :: formatter + type(FileMetadata) :: meta + integer :: comm, my_rank, total_num, ierror, last_time + real, allocatable :: lats0(:), lons0(:), zs0(:) + !real(kind=ESMF_KIND_R8), allocatable :: ids0_r(:) + integer, allocatable :: ids0(:) + integer :: status, k + character(len=ESMF_MAXSTR) :: parcels_file + character(len=ESMF_MAXSTR) :: regrid_to_latlon + type(MAPL_MetaComp),pointer :: MPL + type (ESMF_VM) :: vm + type (ESMF_GRID) :: grid_ + class(Variable), pointer :: t + type(Attribute), pointer :: attr + class(*), pointer :: units + character(len=ESMF_MAXSTR) :: Iam ="read_parcels" + + call ESMF_VMGetCurrent(vm, _RC) + call ESMF_VMGet(vm, mpiCommunicator=comm, _RC) + + call MPI_Comm_rank(comm, my_rank, ierror); _VERIFY(ierror) + call MAPL_GetObjectFromGC ( GC, MPL, _RC) + call MAPL_GetResource(MPL, parcels_file, "GIGATRAJ_PARCELS_FILE:", default='parcels.nc4', _RC) + total_num = 0 + if (my_rank ==0) then + call formatter%open(parcels_file, pFIO_READ, _RC) + meta = formatter%read(_RC) + total_num = meta%get_dimension('id', _RC) + last_time = meta%get_dimension('time', _RC) + t => meta%get_variable('time', _RC) + attr => t%get_attribute('long_name') + units => attr%get_value() + select type(units) + type is (character(*)) + internal%startTime = parse_time_string(units, _RC) + class default + _FAIL('unsupported subclass for units') + end select + endif + + allocate(lats0(total_num), lons0(total_num), zs0(total_num),ids0(total_num)) + + if (my_rank ==0) then + call formatter%get_var('lat', lats0, start = [1,last_time], _RC) + call formatter%get_var('lon', lons0, start = [1,last_time], _RC) + call formatter%get_var(internal%vAlias,zs0, start = [1,last_time], _RC) + if (internal%vCoord == 'PL') zs0 = zs0*100.0 ! hard coded from hPa to Pa + call formatter%close(_RC) + ids0 = [(k, k=0,total_num-1)] + endif + call MAPL_GetResource(MPL, regrid_to_latlon, "GIGATRAJ_REGRID_TO_LATLON:", default= 'YES' , _RC) + if (trim (regrid_to_latlon) == 'YES') then + grid_ = internal%LatLonGrid + else + grid_ = internal%CubedGrid + endif + call scatter_parcels(total_num, lons0, lats0, zs0, IDs0, internal%CellToRank, grid_, comm, & + Internal%parcels%lons, & + Internal%parcels%lats, & + Internal%parcels%zs, & + Internal%parcels%IDS, & + Internal%parcels%num_parcels) + + RETURN_(ESMF_SUCCESS) + contains + ! a copy from MAPL_TimeMod + function parse_time_string(timeUnits,rc) result(time) + character(len=*), intent(inout) :: timeUnits + integer, optional, intent(out) :: rc + + type(ESMF_Time) :: time + integer :: status + + integer year ! 4-digit year + integer month ! month + integer day ! day + integer hour ! hour + integer min ! minute + integer sec ! second + + integer ypos(2), mpos(2), dpos(2), hpos(2), spos(2) + integer strlen + integer firstdash, lastdash + integer firstcolon, lastcolon + integer lastspace + strlen = LEN_TRIM (TimeUnits) + + firstdash = index(TimeUnits, '-') + lastdash = index(TimeUnits, '-', BACK=.TRUE.) + if (firstdash .LE. 0 .OR. lastdash .LE. 0) then + _FAIL('time string is not a valid format') + endif + ypos(2) = firstdash - 1 + mpos(1) = firstdash + 1 + ypos(1) = ypos(2) - 3 + + mpos(2) = lastdash - 1 + dpos(1) = lastdash + 1 + dpos(2) = dpos(1) + 1 + + read ( TimeUnits(ypos(1):ypos(2)), * ) year + read ( TimeUnits(mpos(1):mpos(2)), * ) month + read ( TimeUnits(dpos(1):dpos(2)), * ) day + + firstcolon = index(TimeUnits, ':') + if (firstcolon .LE. 0) then + + ! If no colons, check for hour. + + ! Logic below assumes a null character or something else is after the hour + ! if we do not find a null character add one so that it correctly parses time + if (TimeUnits(strlen:strlen) /= char(0)) then + TimeUnits = trim(TimeUnits)//char(0) + strlen=len_trim(TimeUnits) + endif + lastspace = index(TRIM(TimeUnits), ' ', BACK=.TRUE.) + if ((strlen-lastspace).eq.2 .or. (strlen-lastspace).eq.3) then + hpos(1) = lastspace+1 + hpos(2) = strlen-1 + read (TimeUnits(hpos(1):hpos(2)), * ) hour + min = 0 + sec = 0 + else + hour = 0 + min = 0 + sec = 0 + endif + else + hpos(1) = firstcolon - 2 + hpos(2) = firstcolon - 1 + lastcolon = index(TimeUnits, ':', BACK=.TRUE.) + if ( lastcolon .EQ. firstcolon ) then + mpos(1) = firstcolon + 1 + mpos(2) = firstcolon + 2 + read (TimeUnits(hpos(1):hpos(2)), * ) hour + read (TimeUnits(mpos(1):mpos(2)), * ) min + sec = 0 + else + mpos(1) = firstcolon + 1 + mpos(2) = lastcolon - 1 + spos(1) = lastcolon + 1 + spos(2) = lastcolon + 2 + read (TimeUnits(hpos(1):hpos(2)), * ) hour + read (TimeUnits(mpos(1):mpos(2)), * ) min + read (TimeUnits(spos(1):spos(2)), * ) sec + endif + endif + + call ESMF_TimeSet(time,yy=year,mm=month,dd=day,h=hour,m=min,s=sec,rc=status) + _VERIFY(status) + RETURN_(ESMF_SUCCESS) + end function parse_time_string + end subroutine read_parcels + + subroutine get_metsrc_data (GC, state, ctime, compname, bundlename, fieldname, values, RC ) + type(ESMF_GridComp), intent(inout) :: GC ! Gridded component + type(ESMF_State), intent(inout) :: state + character(*), target, intent(in) :: ctime + character(*), intent(in) :: compname + character(*), intent(in) :: bundlename + character(*), intent(in) :: fieldname + real, target, intent(inout) :: values(:) + integer, optional, intent(out) :: RC ! Error code + + character(len=ESMF_MAXSTR) :: IAm + integer :: STATUS + + type(GigaTrajInternal), pointer :: GigaTrajInternalPtr + type (GigatrajInternalWrap) :: wrap + type (ESMF_FieldBundle) :: bdle + type (ESMF_GRID) :: grid_ + type(ESMF_State) :: leaf_export + real, dimension(:,:,:), pointer :: ptr3d + real, dimension(:,:,:), allocatable :: field_latlon + + real, dimension(:,:,:), allocatable, target :: haloField + integer :: counts(3), dims(3), d1, d2, lm, count3 + character(len=:), target, allocatable :: field_ + + Iam = "get_metsrc_data" + + !if (index(fieldname,'philicIT') /=0 .or. index(fieldname,'phobicIT') /=0) then + ! call ESMF_StateGet(state, 'PHYSICS_Exports/TURBULENCE_Exports/TRI', TRI, _RC) + ! call ESMF_FieldBundleGet(TRI, fieldname, field=field, _RC) + ! call ESMF_FieldGet(field,farrayPtr=ptr3d, _RC) + !else + ! call MAPL_GetPointer(state, ptr3d, fieldname, _RC) + !endif + + call MAPL_ExportStateGet([state], trim(compname), leaf_export, _RC) + if (trim(bundlename) /= 'NONE') then + call ESMF_StateGet(leaf_export, trim(bundlename), bdle, _RC) + call ESMFL_BundleGetPointerToData(bdle, fieldname, ptr3d, _RC) + else + call MAPL_GetPointer(leaf_export, ptr3d, fieldname, _RC) + endif + + call ESMF_UserCompGetInternalState(GC, 'GigaTrajInternal', wrap, _RC) + GigaTrajInternalPtr => wrap%ptr + + lm = size(ptr3d,3) + d1 = size(ptr3d,1) + d2 = size(ptr3d,2) + + if (GigaTrajInternalPtr%regrid_to_latlon) then + grid_ = GigaTrajInternalPtr%LatLonGrid + call MAPL_GridGet(grid_, localCellCountPerDim=counts,globalCellCountPerDim=DIMS, _RC) + allocate(field_latlon(counts(1),counts(2), lm)) + else + grid_ = GigaTrajInternalPtr%CubedGrid + call MAPL_GridGet(grid_, localCellCountPerDim=counts,globalCellCountPerDim=DIMS, _RC) + endif + + allocate(haloField(counts(1)+2, counts(2)+2, lm), source = 0.0) + + if (GigaTrajInternalPtr%regrid_to_latlon) then + call GigaTrajInternalPtr%cube2latlon%regrid(ptr3d, Field_latlon, _RC) + call esmf_halo(grid_, Field_latlon, haloField, _RC) + deallocate(Field_latlon) + else + call esmf_halo(grid_, ptr3d, haloField, _RC) + endif + + field_ = trim(fieldname)//c_null_char + call setData( GigaTrajInternalPtr%metSrc, c_loc(ctime), c_loc(field_), c_loc(haloField)) + call getData(GigaTrajInternalPtr%metSrc, c_loc(ctime), c_loc(field_), & + GigaTrajInternalPtr%parcels%num_parcels, & + c_loc(GigaTrajInternalPtr%parcels%lons), & + c_loc(GigaTrajInternalPtr%parcels%lats), & + c_loc(GigaTrajInternalPtr%parcels%zs), & + c_loc(values)) + + deallocate(haloField) + RETURN_(ESMF_SUCCESS) + + end subroutine get_metsrc_data + + subroutine get_metsrc_data2d (GC, state, ctime, fieldname, values, RC ) + type(ESMF_GridComp), intent(inout) :: GC ! Gridded component + type(ESMF_State), intent(inout) :: state + character(*), target, intent(in) :: ctime + character(*), intent(in) :: fieldname + real, target, intent(inout) :: values(:,:) + integer, optional, intent(out) :: RC ! Error code + + character(len=ESMF_MAXSTR) :: IAm + integer :: STATUS + + type(GigaTrajInternal), pointer :: GigaTrajInternalPtr + type (GigatrajInternalWrap) :: wrap + type (ESMF_GRID) :: grid_ + real, dimension(:,:,:), pointer :: field + real, dimension(:,:,:), allocatable :: field_latlon + real, dimension(:,:,:), allocatable, target :: haloField + integer :: counts(3), i, count3 + character(len=:), target, allocatable :: field_ + + Iam = "get_metsrc_data" + + call MAPL_GetPointer(state, field, fieldname, _RC) + count3 = size(field,3) + + call ESMF_UserCompGetInternalState(GC, 'GigaTrajInternal', wrap, _RC) + GigaTrajInternalPtr => wrap%ptr + if (GigaTrajInternalPtr%regrid_to_latlon) then + grid_ = GigaTrajInternalPtr%LatLonGrid + else + grid_ = GigaTrajInternalPtr%CubedGrid + endif + + call MAPL_GridGet(grid_, localCellCountPerDim=counts, _RC) + + allocate(field_latlon(counts(1),counts(2),count3)) + allocate(haloField(counts(1)+2, counts(2)+2,count3), source = 0.0) + + if (GigaTrajInternalPtr%regrid_to_latlon) then + call GigaTrajInternalPtr%cube2latlon%regrid(field, Field_latlon, _RC) + call esmf_halo(grid_, Field_latlon, haloField, _RC) + else + call esmf_halo(grid_, field, haloField, _RC) + endif + + field_ = trim(fieldname)//'_2D'//c_null_char + + do i = 1,count3 + + call setData( GigaTrajInternalPtr%metSrc, c_loc(ctime), c_loc(field_), c_loc(haloField(1,1,i))) + call getData2d(GigaTrajInternalPtr%metSrc, c_loc(ctime), c_loc(field_), & + GigaTrajInternalPtr%parcels%num_parcels, & + c_loc(GigaTrajInternalPtr%parcels%lons), & + c_loc(GigaTrajInternalPtr%parcels%lats), & + c_loc(values(1,i))) + + enddo + deallocate(field_latlon, haloField, field_) + RETURN_(ESMF_SUCCESS) + + end subroutine get_metsrc_data2d + +end module GEOS_GigatrajGridCompMod diff --git a/GEOSgigatraj_GridComp/Gigatraj_Utils.F90 b/GEOSgigatraj_GridComp/Gigatraj_Utils.F90 new file mode 100644 index 000000000..bb77ed1a3 --- /dev/null +++ b/GEOSgigatraj_GridComp/Gigatraj_Utils.F90 @@ -0,0 +1,291 @@ +#include "MAPL_Generic.h" + +module Gigatraj_UtilsMod + use, intrinsic :: iso_c_binding, only : c_int, c_ptr, c_null_ptr, c_associated, c_null_char + use, intrinsic :: iso_c_binding, only : c_loc + + use ESMF + use MAPL + use mpi + implicit none + public :: parseCompsAndFieldsName + public :: create_new_vars + public :: get_levels + public :: get_latlon_centers + public :: get_cube_centers + public :: horde + public :: GigaTrajInternal + public :: GigatrajInternalWrap + + type horde + integer :: num_parcels + integer, allocatable :: IDS(:) + real, allocatable :: lats(:), lons(:), zs(:) + end type + + type GigaTrajInternal + integer :: npes + integer :: npz ! number of pressure levels + type (ESMF_Grid) :: LatLonGrid + type (ESMF_Grid) :: CubedGrid + class (AbstractRegridder), pointer :: cube2latlon => null() + integer, allocatable :: CellToRank(:,:) + type(horde) :: parcels + type(c_ptr) :: metSrc + type(ESMF_Time) :: startTime + type(ESMF_TimeInterval) :: Integrate_DT + character(len=ESMF_MAXSTR), allocatable :: ExtraFieldNames(:) + character(len=ESMF_MAXSTR), allocatable :: ExtraCompNames(:) + character(len=ESMF_MAXSTR), allocatable :: ExtrabundleNames(:) + character(len=ESMF_MAXSTR), allocatable :: ExtraAliasNames(:) + + character(len=:), allocatable :: vCoord + character(len=:), allocatable :: vAlias + character(len=:), allocatable :: vTendency + + logical :: regrid_to_latlon + end type + + type GigatrajInternalWrap + type (GigaTrajInternal), pointer :: PTR + end type + +contains + + subroutine parseCompsAndFieldsName(fields_line, CompNames, BundleNames, FieldNames, AliasNames) + character(*), intent(in) :: fields_line + character(len=ESMF_MAXSTR), allocatable, intent(out) :: CompNames(:) + character(len=ESMF_MAXSTR), allocatable, intent(out) :: BundleNames(:) + character(len=ESMF_MAXSTR), allocatable, intent(out) :: FieldNames(:) + character(len=ESMF_MAXSTR), allocatable, intent(out) :: AliasNames(:) + integer :: num_field, i, j, k, l, endl, num_ + character(len=:), allocatable :: tmp, tmp_bnf, tmp_f, tmp_alias + num_field = 1 + k = 1 + do + i = index(fields_line(k:),';') + if (i == 0) exit + if (trim(fields_line(i+1:)) =='') exit ! take care of the last unnecessay ";" + k = k+i + num_field = num_field+1 + enddo + + allocate(Fieldnames(num_field)) + allocate(Compnames(num_field)) + allocate(BundleNames(num_field)) + allocate(AliasNames(num_field)) + + k = 1 + num_ = 1 + + do + i = index(fields_line(k:),';') + if (i == 0) then + endl = len(fields_line) + else + endl = (k-1)+i-1 + endif + tmp = fields_line(k:endl) + + j = index(tmp, '%%') + if (j /= 0) then ! there is bundle + Compnames(num_) = trim(adjustl(tmp(1:j-1))) + tmp_bnf = trim(adjustl(tmp(j+2:))) + l = index(tmp_bnf, '%') + if (l /=0) then + BundleNames(num_) = tmp_bnf(1:l-1) + tmp_f = tmp_bnf(l+1:) + else + print*, "%field is a must" + endif + else + BundleNames(num_) = 'NONE' + j = index(tmp, '%') + Compnames(num_) = trim(adjustl(tmp(1:j-1))) + tmp_f = tmp(j+1:) + endif + + ! Aliasing....., Hard coded here + l = index(tmp_f, '|') + if (l /=0) then + FieldNames(num_) = tmp_f(1:l-1) + tmp_alias = tmp_f(l+1:) + else + FieldNames(num_) = tmp_f + tmp_alias = tmp_f + endif + + AliasNames(num_) = tmp_alias + + num_ = num_ + 1 + k = endl + 2 + if (num_ > num_field) exit + enddo + end subroutine parseCompsAndFieldsName + + subroutine create_new_vars(meta, formatter, long_name, short_name, units) + type(FileMetadata), intent(inout) :: meta + type(Netcdf4_fileformatter), intent(inout) :: formatter + character(*), intent(in) :: long_name + character(*), intent(in) :: short_name + character(*), intent(in) :: units + type(Variable) :: var + character(len=:), allocatable :: var_name + if (MAPL_AM_I_Root()) then + if( meta%has_variable(short_name)) return + var_name = short_name + var = variable(type=pFIO_REAL32, dimensions='id,time') + call var%add_attribute('long_name', long_name) + call var%add_attribute('units', units) + call var%add_attribute('positive', "up") + call var%add_attribute('_FillValue', -999.99) + call var%add_attribute('missing_value', -999.99) + call meta%add_variable(var_name, var) + call formatter%add_variable(meta, short_name) + endif + end subroutine create_new_vars + + subroutine get_levels(P, func, levels, rc) + real, dimension(:,:,:), intent(in) :: P + character(*), intent(in) :: func + real, dimension(:), intent(out) :: levels + integer, optional, intent(out) :: rc + logical :: positive + type (ESMF_VM) :: vm + integer :: comm, lm, status, i, ll + real :: local_min_val, local_max_val, lev01, levLm, delt + real, allocatable :: temp(:,:) + character(:), allocatable :: Iam + + Iam = "get_levels" + + lm = size(P,3) + + call ESMF_VMgetCurrent(vm) + call ESMF_VMGet(vm, mpiCommunicator = comm, rc = status) + positive = P(1,1,1) < P(1,1,2) + if (positive) then + local_min_val = maxval(P(:,:,1)) + call MPI_Allreduce(lev01, local_min_val,1, MPI_FLOAT, MPI_MIN, comm, status) + temp = P(:,:,lm) + where(temp >= MAPL_UNDEF) temp = -MAPL_UNDEF + local_max_val = maxval(temp) + call MPI_Allreduce(levLm, local_max_val,1, MPI_FLOAT, MPI_MAX, comm, status) + else + local_min_val = minval(P(:,:,lm)) + call MPI_Allreduce(levLm, local_min_val,1, MPI_FLOAT, MPI_MIN, comm, status) + temp = P(:,:,1) + where(temp >= MAPL_UNDEF) temp = -MAPL_UNDEF + local_max_val = maxval(temp) + call MPI_Allreduce(lev01, local_max_val,1, MPI_FLOAT, MPI_MAX, comm, status) + endif + + ll = size(levels) + + if (trim(func) == 'log') then + delt = (log(levLm)-log(lev01))/(lm-1) + levels =[ (exp(log(lev01)+i*delt), i=0, ll-1)] + else + delt = (levLm-lev01)/(lm-1) + levels =[ (lev01 + i*delt, i=0, ll-1)] + endif + + end subroutine get_levels + + subroutine get_cube_centers(GC, lon_center, lat_center, rc) + type(ESMF_GridComp), intent(inout) :: GC ! Gridded component + real, allocatable, intent(out) :: lat_center(:,:), lon_center(:,:) + integer, optional, intent( out) :: RC + integer :: i1, i2, j1, j2, imc, jmc, status + real(ESMF_KIND_R8), pointer :: centerX(:,:) + real(ESMF_KIND_R8), pointer :: centerY(:,:) + real(ESMF_KIND_R8), pointer :: ptr(:,:) + type(ESMF_Field) :: field + type(ESMF_RouteHandle) :: rh + type (GigaTrajInternal), pointer :: GigaTrajInternalPtr + type (GigatrajInternalWrap) :: wrap + type(ESMF_Grid) :: grid_ + character(:), allocatable :: Iam + Iam="get_cube_centers,cube with halo" + + call ESMF_UserCompGetInternalState(GC, 'GigaTrajInternal', wrap, status); _VERIFY(STATUS) + GigaTrajInternalPtr => wrap%ptr + grid_ = GigaTrajInternalPtr%CubedGrid + call MAPL_Grid_interior(Grid_, i1,i2,j1,j2) + imc = i2-i1 + 1 + jmc = j2-j1 + 1 + + allocate(lon_center(imc+2, jmc+2)) + allocate(lat_center(imc+2, jmc+2)) + + field = ESMF_FieldCreate(Grid_, ESMF_TYPEKIND_R8, name='halo', staggerLoc=ESMF_STAGGERLOC_CENTER,totalLWidth=[1,1],totalUWidth=[1,1],_RC) + call ESMF_FieldGet(field,farrayPtr=ptr,_RC) + ptr = 0.0d0 + call ESMF_FieldHaloStore(field,rh,_RC) + + call ESMF_GridGetCoord(grid_ , coordDim=1, localDE=0, & + staggerloc=ESMF_STAGGERLOC_CENTER, & + farrayPtr=centerX, _RC) + + ptr(1:imc,1:jmc)=centerX + call ESMF_FieldHalo(field,rh, _RC) + lon_center = ptr + + call ESMF_GridGetCoord(grid_ , coordDim=2, localDE=0, & + staggerloc=ESMF_STAGGERLOC_CENTER, & + farrayPtr=centerY, _RC) + ptr = 0.0d0 + ptr(1:imc,1:jmc)=centerY + call ESMF_FieldHalo(field,rh, _RC) + lat_center = ptr + + lon_center = lon_center/MAPL_PI*180.0 + lat_center = lat_center/MAPL_PI*180.0 + where (lon_center < -180.) lon_center = lon_center + 360. + where (lon_center > 180.) lon_center = lon_center - 360. + call ESMF_FieldDestroy(field,_RC) + call ESMF_FieldHaloRelease(rh,_RC) + _RETURN(_SUCCESS) + end subroutine get_cube_centers + + subroutine get_latlon_centers(GC, lon_center, lat_center, rc) + type(ESMF_GridComp), intent(inout) :: GC ! Gridded component + real, allocatable, intent(out) :: lat_center(:), lon_center(:) + integer, optional, intent( out) :: RC + integer :: i1, i2, j1, j2, imc, jmc, i, j, status, DIMS(3) + real :: dlon, dlat + type (GigaTrajInternal), pointer :: GigaTrajInternalPtr + type (GigatrajInternalWrap) :: wrap + type(ESMF_Grid) :: grid_ + character(len=:), allocatable :: Iam + Iam="get_latlon_centers, latlon with halo" + + + call ESMF_UserCompGetInternalState(GC, 'GigaTrajInternal', wrap, status); _VERIFY(STATUS) + GigaTrajInternalPtr => wrap%ptr + grid_ = GigaTrajInternalPtr%LatLonGrid + call MAPL_GridGet(Grid_, globalCellCountPerDim=DIMS, _RC) + call MAPL_Grid_interior(Grid_, i1,i2,j1,j2) + imc = i2-i1 + 1 + jmc = j2-j1 + 1 + + allocate(lon_center(imc+2)) + allocate(lat_center(jmc+2)) + + dlon = 360.0/dims(1) + ! DE + !lons_center = [(dlon*(i-1)+dlon/2., i= i1-1, i2+1)] + ! DC + lon_center = [(dlon*(i-1) - 180.0 , i= i1-1, i2+1)] + !PE + !dlat = 180.0/dims(2) + !lats_center = [(-dlat/2. + dlat*j-90.0, j= j1-1, j2+1)] + !PC + dlat = 180.0/(dims(2)-1) ! PC + lat_center = [(-90.0 + (j-1)*dlat, j= j1-1, j2+1)] + where(lat_center <-90.) lat_center = -90. + where(lat_center >90. ) lat_center = 90. + _RETURN(_SUCCESS) + end subroutine get_latlon_centers + +end module diff --git a/GEOSmkiau_GridComp/CMakeLists.txt b/GEOSmkiau_GridComp/CMakeLists.txt index 213c2776d..283c5981a 100644 --- a/GEOSmkiau_GridComp/CMakeLists.txt +++ b/GEOSmkiau_GridComp/CMakeLists.txt @@ -1,5 +1,7 @@ esma_set_this() +option(BUILD_PYMKIAU_INTERFACE "Build pyMKIAU interface" OFF) + set (srcs IAU_GridCompMod.F90 GEOS_mkiauGridComp.F90 @@ -8,6 +10,90 @@ set (srcs DynVec_GridComp.F90 ) -set(dependencies MAPL_cfio_r4 NCEP_sp_r4i4 GEOS_Shared GMAO_mpeu MAPL FVdycoreCubed_GridComp ESMF::ESMF NetCDF::NetCDF_Fortran) -esma_add_library (${this} SRCS ${srcs} DEPENDENCIES ${dependencies}) +if (BUILD_PYMKIAU_INTERFACE) + list (APPEND srcs + pyMKIAU/interface/interface.f90 + pyMKIAU/interface/interface.c) + + message(STATUS "Building pyMKIAU interface") + + add_definitions(-DPYMKIAU_INTEGRATION) + + # The Python library creation requires mpiexec/mpirun to run on a + # compute node. Probably a weird SLURM thing? + find_package(Python3 COMPONENTS Interpreter REQUIRED) + + # Set up some variables in case names change + set(PYMKIAU_INTERFACE_LIBRARY ${CMAKE_CURRENT_BINARY_DIR}/libpyMKIAU_interface_py.so) + set(PYMKIAU_INTERFACE_HEADER_FILE ${CMAKE_CURRENT_BINARY_DIR}/pyMKIAU_interface_py.h) + set(PYMKIAU_INTERFACE_FLAG_HEADER_FILE ${CMAKE_CURRENT_SOURCE_DIR}/pyMKIAU/interface/interface.h) + set(PYMKIAU_INTERFACE_SRCS ${CMAKE_CURRENT_SOURCE_DIR}/pyMKIAU/interface/interface.py) + + # This command creates the shared object library from Python + add_custom_command( + OUTPUT ${PYMKIAU_INTERFACE_LIBRARY} + # Note below is essentially: + # mpirun -np 1 python file + # but we use the CMake options as much as we can for flexibility + COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PYMKIAU_INTERFACE_FLAG_HEADER_FILE} ${CMAKE_CURRENT_BINARY_DIR} + COMMAND ${MPIEXEC_EXECUTABLE} ${MPIEXEC_NUMPROC_FLAG} 1 ${Python3_EXECUTABLE} ${PYMKIAU_INTERFACE_SRCS} + BYPRODUCTS ${PYMKIAU_INTERFACE_HEADER_FILE} + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + MAIN_DEPENDENCY ${PYMKIAU_INTERFACE_SRCS} + COMMENT "Building pyMKIAU interface library with Python" + VERBATIM + ) + + # This creates a target we can use for dependencies and post build + add_custom_target(generate_pyMKIAU_interface_library DEPENDS ${PYMKIAU_INTERFACE_LIBRARY}) + # Because of the weird hacking of INTERFACE libraries below, we cannot + # use the "usual" CMake calls to install() the .so. I think it's because + # INTERFACE libraries don't actually produce any artifacts as far as + # CMake is concerned. So we add a POST_BUILD custom command to "install" + # the library into install/lib + add_custom_command(TARGET generate_pyMKIAU_interface_library + POST_BUILD + # We first need to make a lib dir if it doesn't exist. If not, then + # the next command can copy the script into a *file* called lib because + # of a race condition (if install/lib/ isn't mkdir'd first) + COMMAND ${CMAKE_COMMAND} -E make_directory ${CMAKE_INSTALL_PREFIX}/lib + # Now we copy the file (if different...though not sure if this is useful) + COMMAND ${CMAKE_COMMAND} -E copy_if_different "${PYMKIAU_INTERFACE_LIBRARY}" ${CMAKE_INSTALL_PREFIX}/lib + ) + + # We use INTERFACE libraries to create a sort of "fake" target library we can use + # to make libFVdycoreCubed_GridComp.a depend on. It seems to work! + add_library(pyMKIAU_interface_py INTERFACE) + + # The target_include_directories bits were essentially stolen from the esma_add_library + # code... + target_include_directories(pyMKIAU_interface_py INTERFACE + $ + $ # stubs + # modules and copied *.h, *.inc + $ + $ + ) + target_link_libraries(pyMKIAU_interface_py INTERFACE ${PYMKIAU_INTERFACE_LIBRARY}) + + # This makes sure the library is built first + add_dependencies(pyMKIAU_interface_py generate_pyMKIAU_interface_library) + + # This bit is to resolve an issue and Google told me to do this. I'm not + # sure that the LIBRARY DESTINATION bit actually does anything since + # this is using INTERFACE + install(TARGETS pyMKIAU_interface_py + EXPORT ${PROJECT_NAME}-targets + LIBRARY DESTINATION ${CMAKE_INSTALL_PREFIX}/lib + ) + +endif () + +if (BUILD_PYMKIAU_INTERFACE) + set(dependencies pyMKIAU_interface_py MAPL_cfio_r4 NCEP_sp_r4i4 GEOS_Shared GMAO_mpeu MAPL FVdycoreCubed_GridComp ESMF::ESMF NetCDF::NetCDF_Fortran) +else () + set(dependencies MAPL_cfio_r4 NCEP_sp_r4i4 GEOS_Shared GMAO_mpeu MAPL FVdycoreCubed_GridComp ESMF::ESMF NetCDF::NetCDF_Fortran) +endif () + +esma_add_library (${this} SRCS ${srcs} DEPENDENCIES ${dependencies}) diff --git a/GEOSmkiau_GridComp/GEOS_mkiauGridComp.F90 b/GEOSmkiau_GridComp/GEOS_mkiauGridComp.F90 index 08c36a967..6ed4dbb9e 100644 --- a/GEOSmkiau_GridComp/GEOS_mkiauGridComp.F90 +++ b/GEOSmkiau_GridComp/GEOS_mkiauGridComp.F90 @@ -19,6 +19,10 @@ module GEOS_mkiauGridCompMod use GEOS_UtilsMod ! use GEOS_RemapMod, only: myremap => remap use m_set_eta, only: set_eta +#ifdef PYMKIAU_INTEGRATION + use pyMKIAU_interface_mod + use ieee_exceptions, only: ieee_get_halting_mode, ieee_set_halting_mode, ieee_all +#endif implicit none private @@ -91,8 +95,15 @@ subroutine SetServices ( GC, RC ) type (ESMF_Config) :: CF logical :: BLEND_AT_PBL - -!============================================================================= +#ifdef PYMKIAU_INTEGRATION + ! IEEE trapping see below + logical :: halting_mode(5) + ! BOGUS DATA TO SHOW USAGE + type(a_pod_struct_type) :: options + real, allocatable, dimension(:,:,:) :: in_buffer + real, allocatable, dimension(:,:,:) :: out_buffer +#endif + !============================================================================= ! Begin... @@ -459,6 +470,25 @@ subroutine SetServices ( GC, RC ) call MAPL_GenericSetServices ( gc, RC=STATUS) VERIFY_(STATUS) +#ifdef PYMKIAU_INTEGRATION + ! Spin the interface - we have to deactivate the ieee error + ! to be able to load numpy, scipy and other numpy packages + ! that generate NaN as an init mechanism for numerical solving + call ieee_get_halting_mode(ieee_all, halting_mode) + call ieee_set_halting_mode(ieee_all, .false.) + call pyMKIAU_interface_f_setservice() + call ieee_set_halting_mode(ieee_all, halting_mode) + + ! BOGUS CODE TO SHOW USAGE + options%npx = 10 + options%npy = 11 + options%npz = 12 + allocate (in_buffer(10,11,12), source = 42.42 ) + allocate (out_buffer(10,11,12), source = 0.0 ) + call pyMKIAU_interface_f_run(options, in_buffer, out_buffer) + write(*,*) "[pyMKIAU] From fortran OUT[5,5,5] is ", out_buffer(5,5,5) +#endif + RETURN_(ESMF_SUCCESS) end subroutine SetServices diff --git a/GEOSmkiau_GridComp/pyMKIAU/.gitignore b/GEOSmkiau_GridComp/pyMKIAU/.gitignore new file mode 100644 index 000000000..9ae227288 --- /dev/null +++ b/GEOSmkiau_GridComp/pyMKIAU/.gitignore @@ -0,0 +1,12 @@ +__pycache__/ +*.py[cod] +*$py.class +.pytest_cache +*.egg-info/ +test_data/ +.gt_cache_* +.translate-*/ +.vscode +test_data/ +sandbox/ +*.mod diff --git a/GEOSmkiau_GridComp/pyMKIAU/README.md b/GEOSmkiau_GridComp/pyMKIAU/README.md new file mode 100644 index 000000000..61f039520 --- /dev/null +++ b/GEOSmkiau_GridComp/pyMKIAU/README.md @@ -0,0 +1,40 @@ +# Fortran - Python bridge prototype + +Nomenclatura: we call the brige "fpy" and "c", "f" and "py" denotes functions in their respective language. + +Building: you have to pass `-DBUILD_PYMKIAU_INTERFACE=ON` to your `cmake` command to turn on the interface build and execution. + +## Pipeline + +Here's a quick rundown of how a buffer travels through the interface and back. + +- From Fortran in `GEOS_MKIAUGridComp:488` we call `pyMKIAU_interface_f_run` with the buffer passed as argument +- This pings the interface, located at `pyMKIAU/interface/interface.f90`. This interface uses the `iso_c_binding` to marshall the parameters downward (careful about the user type, look at the code) +- Fortran then call into C at `pyMKIAU/interface/interface.c`. Those functions now expect that a few `extern` hooks have been made available on the python side, they are define in `pyMKIAU/interface/interface.h` +- At runtime, the hooks are found and code carries to the python thanks to cffi. The .so that exposes the hooks is in `pyMKIAU/interface/interface.py`. Within this code, we: expose extern functions via `ffi.extern`, build a shared library to link for runtime and pass the code down to the `pyMKIAU` python package which lives at `pyMKIAU/pyMKIAU` +- In the package, the `serservices` or `run` function is called. + +## Fortran <--> C: iso_c_binding + +We leverage Fortan `iso_c_binding` extension to do conform Fortran and C calling structure. Which comes with a bunch of easy type casting and some pretty steep potholes. +The two big ones are: + +- strings need to be send/received as a buffer plus a length, +- pointers/buffers are _not_ able to be pushed into a user type. + +## C <->Python: CFFI based glue + +The interface is based on CFFI which is reponsible for the heavy lifting of + +- spinning a python interpreter +- passing memory between C and Python without a copy + +## Running python + +The last trick is to make sure your package is callable by the `interface.py`. Basically your code has to be accessible by the interpreter, be via virtual env, conda env or PYTHONPATH. +The easy way to know is that you need to be able to get into your environment and run in a python terminal: + +```python +from pyMKIAU.core import pyMKIAU_init +pyMKIAU_init() +``` diff --git a/GEOSmkiau_GridComp/pyMKIAU/interface/interface.c b/GEOSmkiau_GridComp/pyMKIAU/interface/interface.c new file mode 100644 index 000000000..28ebad972 --- /dev/null +++ b/GEOSmkiau_GridComp/pyMKIAU/interface/interface.c @@ -0,0 +1,31 @@ +#include +#include +#include "interface.h" + +extern int pyMKIAU_interface_c_setservice() +{ + // Check magic number + int return_code = pyMKIAU_interface_py_setservices(); + + if (return_code < 0) + { + exit(return_code); + } +} + +extern int pyMKIAU_interface_c_run(a_pod_struct_t *options, const float *in_buffer, float *out_buffer) +{ + // Check magic number + if (options->mn_123456789 != 123456789) + { + printf("Magic number failed, pyMKIAU interface is broken on the C side\n"); + exit(-1); + } + + int return_code = pyMKIAU_interface_py_run(options, in_buffer, out_buffer); + + if (return_code < 0) + { + exit(return_code); + } +} diff --git a/GEOSmkiau_GridComp/pyMKIAU/interface/interface.f90 b/GEOSmkiau_GridComp/pyMKIAU/interface/interface.f90 new file mode 100644 index 000000000..c94b4a06b --- /dev/null +++ b/GEOSmkiau_GridComp/pyMKIAU/interface/interface.f90 @@ -0,0 +1,43 @@ +module pyMKIAU_interface_mod + + use iso_c_binding, only: c_int, c_float, c_double, c_bool, c_ptr + + implicit none + + private + public :: pyMKIAU_interface_f_setservice, pyMKIAU_interface_f_run + public :: a_pod_struct_type + + !----------------------------------------------------------------------- + ! See `interface.h` for explanation of the POD-strict struct + !----------------------------------------------------------------------- + type, bind(c) :: a_pod_struct_type + integer(kind=c_int) :: npx + integer(kind=c_int) :: npy + integer(kind=c_int) :: npz + ! Magic number + integer(kind=c_int) :: make_flags_C_interop = 123456789 + end type + + + interface + + subroutine pyMKIAU_interface_f_setservice() bind(c, name='pyMKIAU_interface_c_setservice') + end subroutine pyMKIAU_interface_f_setservice + + subroutine pyMKIAU_interface_f_run(options, in_buffer, out_buffer) bind(c, name='pyMKIAU_interface_c_run') + + import c_float, a_pod_struct_type + + implicit none + ! This is an interface to a C function, the intent ARE NOT enforced + ! by the compiler. Consider them developer hints + type(a_pod_struct_type), intent(in) :: options + real(kind=c_float), dimension(*), intent(in) :: in_buffer + real(kind=c_float), dimension(*), intent(out) :: out_buffer + + end subroutine pyMKIAU_interface_f_run + + end interface + +end module pyMKIAU_interface_mod diff --git a/GEOSmkiau_GridComp/pyMKIAU/interface/interface.h b/GEOSmkiau_GridComp/pyMKIAU/interface/interface.h new file mode 100644 index 000000000..ce8dfb179 --- /dev/null +++ b/GEOSmkiau_GridComp/pyMKIAU/interface/interface.h @@ -0,0 +1,40 @@ +#pragma once + +/*** + * C Header for the interface to python. + * Define here any POD-strict structures and external functions + * that will get exported by cffi from python (see interface.py) + ***/ + +#include +#include + +// POD-strict structure to pack options and flags efficiently +// Struct CANNOT hold pointers. The iso_c_binding does not allow for foolproof +// pointer memory packing. +// We use the low-embedded trick of the magic number to attempt to catch +// any type mismatch betweeen Fortran and C. This is not a foolproof method +// but it bring a modicum of check at the cost of a single integer. +typedef struct +{ + int npx; + int npy; + int npz; + // Magic number needs to be last item + int mn_123456789; +} a_pod_struct_t; + +// For complex type that can be exported with different +// types (like the MPI communication object), you can rely on C `union` +typedef union +{ + int comm_int; + void *comm_ptr; +} MPI_Comm_t; + +// Python hook functions: defined as external so that the .so can link out ot them +// Though we define `in_buffer` as a `const float*` it is _not_ enforced +// by the interface. Treat as a developer hint only. + +extern int pyMKIAU_interface_py_run(a_pod_struct_t *options, const float *in_buffer, float *out_buffer); +extern int pyMKIAU_interface_py_setservices(); diff --git a/GEOSmkiau_GridComp/pyMKIAU/interface/interface.py b/GEOSmkiau_GridComp/pyMKIAU/interface/interface.py new file mode 100644 index 000000000..c0bfc1c03 --- /dev/null +++ b/GEOSmkiau_GridComp/pyMKIAU/interface/interface.py @@ -0,0 +1,46 @@ +import cffi # type: ignore + +TMPFILEBASE = "pyMKIAU_interface_py" + +ffi = cffi.FFI() + +source = """ +from {} import ffi +from datetime import datetime +from pyMKIAU.core import pyMKIAU_init, pyMKIAU_run #< User code starts here +import traceback + +@ffi.def_extern() +def pyMKIAU_interface_py_setservices() -> int: + + try: + # Calling out off the bridge into the python + pyMKIAU_init() + except Exception as err: + print("Error in Python:") + print(traceback.format_exc()) + return -1 + return 0 + +@ffi.def_extern() +def pyMKIAU_interface_py_run(options, in_buffer, out_buffer) -> int: + + try: + # Calling out off the bridge into the python + pyMKIAU_run(options, in_buffer, out_buffer) + except Exception as err: + print("Error in Python:") + print(traceback.format_exc()) + return -1 + return 0 +""".format(TMPFILEBASE) + +with open("interface.h") as f: + data = "".join([line for line in f if not line.startswith("#")]) + data = data.replace("CFFI_DLLEXPORT", "") + ffi.embedding_api(data) + +ffi.set_source(TMPFILEBASE, '#include "interface.h"') + +ffi.embedding_init_code(source) +ffi.compile(target="lib" + TMPFILEBASE + ".so", verbose=True) diff --git a/GEOSmkiau_GridComp/pyMKIAU/pyMKIAU/__init__.py b/GEOSmkiau_GridComp/pyMKIAU/pyMKIAU/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/GEOSmkiau_GridComp/pyMKIAU/pyMKIAU/core.py b/GEOSmkiau_GridComp/pyMKIAU/pyMKIAU/core.py new file mode 100644 index 000000000..c3f9684d4 --- /dev/null +++ b/GEOSmkiau_GridComp/pyMKIAU/pyMKIAU/core.py @@ -0,0 +1,74 @@ +from _cffi_backend import _CDataBase as CFFIObj # type: ignore +import dataclasses +from pyMKIAU.f_py_conversion import FortranPythonConversion +from pyMKIAU.cuda_profiler import TimedCUDAProfiler +import numpy as np +from typing import Dict, List + + +@dataclasses.dataclass +class FPYOptions: + npx: int = 0 + npy: int = 0 + npz: int = 0 + mn_123456789: int = 0 + + +def options_fortran_to_python( + f_options: CFFIObj, +) -> FPYOptions: + if f_options.mn_123456789 != 123456789: # type:ignore + raise RuntimeError( + "Magic number failed, pyMoist interface is broken on the python side" + ) + + py_flags = FPYOptions() + keys = list(filter(lambda k: not k.startswith("__"), dir(type(py_flags)))) + for k in keys: + if hasattr(f_options, k): + setattr(py_flags, k, getattr(f_options, k)) + return py_flags + + +F_PY_MEMORY_CONV = None + + +def pyMKIAU_init(): + print("[pyMKIAU] Init called") + + +def pyMKIAU_run( + f_options: CFFIObj, + f_in_buffer: CFFIObj, + f_out_buffer: CFFIObj, +): + print("[pyMKIAU] Run called") + options = options_fortran_to_python(f_options) + print(f"[pyMKIAU] Options: {options}") + + # Dev Note: this should be doen better in it's own class + # and the `np` should be driven by the user code requirements + # for GPU or CPU memory + global F_PY_MEMORY_CONV + if F_PY_MEMORY_CONV is None: + F_PY_MEMORY_CONV = FortranPythonConversion( + options.npx, + options.npy, + options.npz, + np, + ) + + # Move memory into a manipulable numpy array + in_buffer = F_PY_MEMORY_CONV.fortran_to_python(f_in_buffer) + out_buffer = F_PY_MEMORY_CONV.fortran_to_python(f_out_buffer) + + # Here goes math and dragons + timings: Dict[str, List[float]] = {} + with TimedCUDAProfiler("pyMKIAU bogus math", timings): + out_buffer[:, :, :] = in_buffer[:, :, :] * 2 + + print(f"[pyMKIAU] At 5,5,5 in python OUT is: {out_buffer[5,5,5]}") + print(f"[pyMKIAU] Timers: {timings}") + + # Go back to fortran + F_PY_MEMORY_CONV.python_to_fortran(out_buffer, f_out_buffer) diff --git a/GEOSmkiau_GridComp/pyMKIAU/pyMKIAU/cuda_profiler.py b/GEOSmkiau_GridComp/pyMKIAU/pyMKIAU/cuda_profiler.py new file mode 100644 index 000000000..5a6e41a71 --- /dev/null +++ b/GEOSmkiau_GridComp/pyMKIAU/pyMKIAU/cuda_profiler.py @@ -0,0 +1,76 @@ +import time +from typing import Dict, List + + +# Conditional cupy import for non-GPU machines +try: + import cupy as cp +except ModuleNotFoundError: + cp = None + +# Run a deviceSynchronize() to check +# that the GPU is present and ready to run +if cp is not None: + try: + cp.cuda.runtime.deviceSynchronize() + GPU_AVAILABLE = True + except cp.cuda.runtime.CUDARuntimeError: + GPU_AVAILABLE = False +else: + GPU_AVAILABLE = False + + +class CUDAProfiler: + """Leverages NVTX & NSYS to profile CUDA kernels.""" + + def __init__(self, label: str) -> None: + self.label = label + + def __enter__(self): + if GPU_AVAILABLE: + cp.cuda.runtime.deviceSynchronize() + cp.cuda.nvtx.RangePush(self.label) + + def __exit__(self, _type, _val, _traceback): + if GPU_AVAILABLE: + cp.cuda.runtime.deviceSynchronize() + cp.cuda.nvtx.RangePop() + + @classmethod + def sync_device(cls): + if GPU_AVAILABLE: + cp.cuda.runtime.deviceSynchronize() + + @classmethod + def start_cuda_profiler(cls): + if GPU_AVAILABLE: + cp.cuda.profiler.start() + + @classmethod + def stop_cuda_profiler(cls): + if GPU_AVAILABLE: + cp.cuda.profiler.stop() + + @classmethod + def mark_cuda_profiler(cls, message: str): + if GPU_AVAILABLE: + cp.cuda.nvtx.Mark(message) + + +class TimedCUDAProfiler(CUDAProfiler): + def __init__(self, label: str, timings: Dict[str, List[float]]) -> None: + super().__init__(label) + self._start_time = 0 + self._timings = timings + + def __enter__(self): + super().__enter__() + self._start_time = time.perf_counter() + + def __exit__(self, _type, _val, _traceback): + super().__exit__(_type, _val, _traceback) + t = time.perf_counter() - self._start_time + if self.label not in self._timings: + self._timings[self.label] = [t] + else: + self._timings[self.label].append(t) diff --git a/GEOSmkiau_GridComp/pyMKIAU/pyMKIAU/f_py_conversion.py b/GEOSmkiau_GridComp/pyMKIAU/pyMKIAU/f_py_conversion.py new file mode 100644 index 000000000..47a17e731 --- /dev/null +++ b/GEOSmkiau_GridComp/pyMKIAU/pyMKIAU/f_py_conversion.py @@ -0,0 +1,219 @@ +from math import prod +from types import ModuleType +from typing import List, Optional, Tuple, TypeAlias + +import cffi +import numpy as np + +# Conditional cupy import for non-GPU machines +try: + import cupy as cp +except ModuleNotFoundError: + cp = None + + +# Dev note: we would like to use cp.ndarray for Device and +# Union of np and cp ndarray for Python but we can't +# because cp might not be importable! +DeviceArray: TypeAlias = np.ndarray +PythonArray: TypeAlias = np.ndarray + +# Default floating point cast +BaseFloat = np.float32 + + +class NullStream: + def __init__(self): + pass + + def synchronize(self): + pass + + def __enter__(self): + pass + + def __exit__(self, exc_type, exc_value, traceback): + pass + + +class FortranPythonConversion: + """ + Convert Fortran arrays to NumPy and vice-versa + """ + + def __init__( + self, + npx: int, + npy: int, + npz: int, + numpy_module: ModuleType, + ): + # Python numpy-like module is given by the caller leaving + # optional control of upload/download in the case + # of GPU/CPU system + self._target_np = numpy_module + + # Device parameters + # Pace targets gpu: we want the Pace layout to be on device + self._python_targets_gpu = self._target_np == cp + if self._python_targets_gpu: + self._stream_A = cp.cuda.Stream(non_blocking=True) + self._stream_B = cp.cuda.Stream(non_blocking=True) + else: + self._stream_A = NullStream() + self._stream_B = NullStream() + self._current_stream = self._stream_A + + # Layout & indexing + self._npx, self._npy, self._npz = npx, npy, npz + + # cffi init + self._ffi = cffi.FFI() + self._TYPEMAP = { + "float": np.float32, + "double": np.float64, + "int": np.int32, + } + + def device_sync(self): + """Synchronize the working CUDA streams""" + self._stream_A.synchronize() + self._stream_B.synchronize() + + def _fortran_to_numpy( + self, + fptr: "cffi.FFI.CData", + dim: Optional[List[int]] = None, + ) -> np.ndarray: + """ + Input: Fortran data pointed to by fptr and of shape dim = (i, j, k) + Output: C-ordered double precision NumPy data of shape (i, j, k) + """ + if not dim: + dim = [self._npx, self._npy, self._npz] + ftype = self._ffi.getctype(self._ffi.typeof(fptr).item) + assert ftype in self._TYPEMAP + return np.frombuffer( + self._ffi.buffer(fptr, prod(dim) * self._ffi.sizeof(ftype)), + self._TYPEMAP[ftype], + ) + + def _upload_and_transform( + self, + host_array: np.ndarray, + dim: Optional[List[int]] = None, + swap_axes: Optional[Tuple[int, int]] = None, + ) -> DeviceArray: + """Upload to device & transform to Pace compatible layout""" + with self._current_stream: + device_array = cp.asarray(host_array) + final_array = self._transform_from_fortran_layout( + device_array, + dim, + swap_axes, + ) + self._current_stream = ( + self._stream_A + if self._current_stream == self._stream_B + else self._stream_B + ) + return final_array + + def _transform_from_fortran_layout( + self, + array: PythonArray, + dim: Optional[List[int]] = None, + swap_axes: Optional[Tuple[int, int]] = None, + ) -> PythonArray: + """Transform from Fortran layout into a Pace compatible layout""" + if not dim: + dim = [self._npx, self._npy, self._npz] + trf_array = array.reshape(tuple(reversed(dim))).transpose().astype(BaseFloat) + if swap_axes: + trf_array = self._target_np.swapaxes( + trf_array, + swap_axes[0], + swap_axes[1], + ) + return trf_array + + def fortran_to_python( + self, + fptr: "cffi.FFI.CData", + dim: Optional[List[int]] = None, + swap_axes: Optional[Tuple[int, int]] = None, + ) -> PythonArray: + """Move fortran memory into python space""" + np_array = self._fortran_to_numpy(fptr, dim) + if self._python_targets_gpu: + return self._upload_and_transform(np_array, dim, swap_axes) + else: + return self._transform_from_fortran_layout( + np_array, + dim, + swap_axes, + ) + + def _transform_and_download( + self, + device_array: DeviceArray, + dtype: type, + swap_axes: Optional[Tuple[int, int]] = None, + ) -> np.ndarray: + with self._current_stream: + if swap_axes: + device_array = cp.swapaxes( + device_array, + swap_axes[0], + swap_axes[1], + ) + host_array = cp.asnumpy( + device_array.astype(dtype).flatten(order="F"), + ) + self._current_stream = ( + self._stream_A + if self._current_stream == self._stream_B + else self._stream_B + ) + return host_array + + def _transform_from_python_layout( + self, + array: PythonArray, + dtype: type, + swap_axes: Optional[Tuple[int, int]] = None, + ) -> np.ndarray: + """Copy back a numpy array in python layout to Fortran""" + + if self._python_targets_gpu: + numpy_array = self._transform_and_download(array, dtype, swap_axes) + else: + numpy_array = array.astype(dtype).flatten(order="F") + if swap_axes: + numpy_array = np.swapaxes( + numpy_array, + swap_axes[0], + swap_axes[1], + ) + return numpy_array + + def python_to_fortran( + self, + array: PythonArray, + fptr: "cffi.FFI.CData", + ptr_offset: int = 0, + swap_axes: Optional[Tuple[int, int]] = None, + ) -> np.ndarray: + """ + Input: Fortran data pointed to by fptr and of shape dim = (i, j, k) + Output: C-ordered double precision NumPy data of shape (i, j, k) + """ + ftype = self._ffi.getctype(self._ffi.typeof(fptr).item) + assert ftype in self._TYPEMAP + dtype = self._TYPEMAP[ftype] + numpy_array = self._transform_from_python_layout( + array, + dtype, + swap_axes, + ) + self._ffi.memmove(fptr + ptr_offset, numpy_array, 4 * numpy_array.size) diff --git a/GEOSmkiau_GridComp/pyMKIAU/setup.py b/GEOSmkiau_GridComp/pyMKIAU/setup.py new file mode 100644 index 000000000..851e0b1b6 --- /dev/null +++ b/GEOSmkiau_GridComp/pyMKIAU/setup.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +"""pyMKIAU - python sub-component of GEOS MKIAU.""" + +from setuptools import find_namespace_packages, setup + + +with open("README.md", encoding="utf-8") as readme_file: + readme = readme_file.read() + +setup( + author="NASA", + python_requires=">=3.11", + classifiers=[ + "Development Status :: 2 - Pre-Alpha", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache 2 License", + "Natural Language :: English", + "Programming Language :: Python :: 3.11", + ], + description=("pyMKIAU - python sub-component of GEOS MKIAU."), + install_requires=[], + extras_require={}, + long_description=readme, + include_package_data=True, + name="pyMKIAU", + packages=find_namespace_packages(include=["pyMKIAU", "pyMKIAU.*"]), + setup_requires=[], + url="https://github.com/GEOS-ESM/GEOSgcm_GridComp", + version="0.0.0", + zip_safe=False, +) diff --git a/GEOSogcm_GridComp/GEOSseaice_GridComp/CICE_GEOSPlug/CICE_GEOSPlug.F90 b/GEOSogcm_GridComp/GEOSseaice_GridComp/CICE_GEOSPlug/CICE_GEOSPlug.F90 index 46e2af25b..18f714055 100644 --- a/GEOSogcm_GridComp/GEOSseaice_GridComp/CICE_GEOSPlug/CICE_GEOSPlug.F90 +++ b/GEOSogcm_GridComp/GEOSseaice_GridComp/CICE_GEOSPlug/CICE_GEOSPlug.F90 @@ -94,7 +94,7 @@ subroutine SetServices ( GC, RC ) call MAPL_GridCompSetEntryPoint ( GC, ESMF_METHOD_RUN, Run, _RC) call MAPL_GridCompSetEntryPoint ( GC, ESMF_METHOD_FINALIZE, Finalize, _RC) call MAPL_GridCompSetEntryPoint ( GC, ESMF_METHOD_WRITERESTART, Record, _RC) - call MAPL_GridCompSetEntryPoint ( GC, ESMF_METHOD_READRESTART, Refresh, _RC) + call MAPL_GridCompSetEntryPoint ( GC, MAPL_METHOD_REFRESH, Refresh, _RC) ! Set the state variable specs. ! ----------------------------- @@ -933,6 +933,8 @@ subroutine Record ( GC, IMPORT, EXPORT, CLOCK, RC ) type(MAPL_MetaComp), pointer :: MAPL ! ErrLog Variables + character(len=ESMF_MAXSTR) :: IAm + integer :: STATUS character(len=ESMF_MAXSTR) :: COMP_NAME @@ -940,7 +942,7 @@ subroutine Record ( GC, IMPORT, EXPORT, CLOCK, RC ) character(len=14) :: timeStamp logical :: doRecord - __Iam__('Record') + Iam = "Record" ! Get the target components name and set-up traceback handle. ! ----------------------------------------------------------- @@ -1003,18 +1005,18 @@ subroutine Refresh ( GC, IMPORT, EXPORT, CLOCK, RC ) integer, optional, intent( OUT) :: RC ! Error code !EOP - type(MAPL_MetaComp), pointer :: MAPL ! ErrLog Variables - + character(len=ESMF_MAXSTR) :: IAm + integer :: STATUS character(len=ESMF_MAXSTR) :: COMP_NAME ! Locals character(len=14) :: timeStamp logical :: doRecord - __Iam__('Restore') + IAm = "Restore" ! Get the target components name and set-up traceback handle. ! -----------------------------------------------------------