diff --git a/.github/workflows/bash_code_analysis.yaml b/.github/workflows/bash_code_analysis.yaml
index 4e0559031cc..7e57dc7a317 100644
--- a/.github/workflows/bash_code_analysis.yaml
+++ b/.github/workflows/bash_code_analysis.yaml
@@ -3,7 +3,7 @@ on:
push:
branches:
- develop
- pull_request:
+ pull_request_target: # safe as long as code is not being run
workflow_dispatch:
@@ -19,19 +19,35 @@ jobs:
checks: write
steps:
+ - name: determine hash
+ uses: haya14busa/action-cond@v1
+ id: hash
+ with:
+ cond: ${{ github.event_name == 'pull_request' }}
+ if_true: ${{ github.event.pull_request.head.sha }}
+ if_false: ''
+
- name: checkout code
uses: actions/checkout@v6
with:
+ ref: ${{ steps.hash.outputs.value }}
submodules: false
+ - name: determine reporter
+ uses: haya14busa/action-cond@v1
+ id: reporter
+ with:
+ cond: ${{ github.event_name == 'pull_request' }}
+ if_true: 'github-pr-review'
+ if_false: 'github-check'
+
- name: shfmt scan
uses: reviewdog/action-shfmt@v1
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
filter_mode: nofilter
- fail_level: any
level: any
- reviewdog_flags: '-reporter=github-pr-review'
+ reviewdog_flags: '-reporter=${{ steps.reporter.outputs.value }} -fail-level=any'
shfmt_flags: ''
shellcheck:
@@ -44,16 +60,33 @@ jobs:
checks: write
steps:
+ - name: determine hash
+ uses: haya14busa/action-cond@v1
+ id: hash
+ with:
+ cond: ${{ github.event_name == 'pull_request' }}
+ if_true: ${{ github.event.pull_request.head.sha }}
+ if_false: ''
+
- name: checkout code
uses: actions/checkout@v6
with:
+ ref: ${{ steps.hash.outputs.value }}
submodules: false
-
+
+ - name: determine reporter
+ uses: haya14busa/action-cond@v1
+ id: reporter
+ with:
+ cond: ${{ github.event_name == 'pull_request' }}
+ if_true: 'github-pr-review'
+ if_false: 'github-check'
+
- name: shellcheck scan
uses: reviewdog/action-shellcheck@v1
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
- reporter: github-pr-review
+ reporter: ${{ steps.reporter.outputs.value }}
filter_mode: nofilter
fail_level: any
level: any
diff --git a/README.md b/README.md
index 96f4b99e91f..84f84b8bf4a 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,6 @@
[](http://global-workflow.readthedocs.io/)
-[](https://github.com/NOAA-EMC/global-workflow/actions/workflows/linters.yaml)
-[](https://github.com/NOAA-EMC/global-workflow/actions/workflows/pynorms.yaml)
+[](https://github.com/NOAA-EMC/global-workflow/actions?query=workflow%3Abash_code_analysis+event%3Apush+branch%3Adevelop)
+[](https://github.com/NOAA-EMC/global-workflow/actions?query=workflow%3Apython_code_analysis+event%3Apush+branch%3Adevelop)


@@ -28,12 +28,12 @@ The Global Workflow currently supports the following machines at the indicated t
| HPC | Tier | Notes |
| --------------------------------------- |:----:|:--------------------------------------------------------------------------:|
-| WCOSS2
NCO | 1 | GEFS testing is not regularly performed. |
-| Ursa
NOAA RDHPCS | 1 | METplus verification and vminmon GSI-monitor jobs and GCAFS system not supported yet. |
-| Hercules
MSU | 1 | Currently does not support the TC Tracker. |
+| WCOSS2
NCO | 1 | |
+| Ursa
NOAA RDHPCS | 1 | |
+| Hercules
MSU | 1 | |
| Gaea C6
RDHPCS | 1 | |
| Hera
NOAA RDHPCS | 2 | |
-| Orion
MSU | 2 | The GSI runs very slowly on Orion and the TC tracker is not supported. |
+| Orion
MSU | 2 | The GSI runs very slowly. |
| AWS, GCP, Azure
NOAA Parallel Works | 3 | Supported by EPIC. |
**Tier Definitions**
diff --git a/dev/ci/cases/pr/C96_gcafs_cycled.yaml b/dev/ci/cases/pr/C96_gcafs_cycled.yaml
index a51d4277648..3a261feb198 100644
--- a/dev/ci/cases/pr/C96_gcafs_cycled.yaml
+++ b/dev/ci/cases/pr/C96_gcafs_cycled.yaml
@@ -17,7 +17,6 @@ experiment:
skip_ci_on_hosts:
- gaeac5
- awsepicglobalworkflow
- - ursa
workflow:
engine: rocoto
diff --git a/dev/ci/cases/pr/C96_gcafs_cycled_noDA.yaml b/dev/ci/cases/pr/C96_gcafs_cycled_noDA.yaml
index 8a816eb850d..dbe69605e7f 100644
--- a/dev/ci/cases/pr/C96_gcafs_cycled_noDA.yaml
+++ b/dev/ci/cases/pr/C96_gcafs_cycled_noDA.yaml
@@ -19,7 +19,6 @@ skip_ci_on_hosts:
- gaeac5
- hercules
- awsepicglobalworkflow
- - ursa
workflow:
engine: rocoto
diff --git a/dev/ci/cases/pr/C96mx025_S2S.yaml b/dev/ci/cases/sfsv1/C96mx025_S2S.yaml
similarity index 100%
rename from dev/ci/cases/pr/C96mx025_S2S.yaml
rename to dev/ci/cases/sfsv1/C96mx025_S2S.yaml
diff --git a/dev/ci/cases/sfs/C96mx100_S2S.yaml b/dev/ci/cases/sfsv1/C96mx100_S2S.yaml
similarity index 100%
rename from dev/ci/cases/sfs/C96mx100_S2S.yaml
rename to dev/ci/cases/sfsv1/C96mx100_S2S.yaml
diff --git a/dev/ci/gitlab-ci-hosts.yml b/dev/ci/gitlab-ci-hosts.yml
index 222776f96ea..34a67cd5948 100644
--- a/dev/ci/gitlab-ci-hosts.yml
+++ b/dev/ci/gitlab-ci-hosts.yml
@@ -22,19 +22,19 @@
# Template matrices for case lists
.hera_cases_matrix: &hera_cases
- - caseName: ["C48_ATM", "C48_S2SW", "C48_S2SWA_gefs", "C48mx500_3DVarAOWCDA", "C48mx500_hybAOWCDA", "C96C48_hybatmDA", "C96C48_hybatmsnowDA", "C96C48_hybatmsoilDA", "C96C48_ufsgsi_hybatmDA", "C96C48_ufs_hybatmDA", "C96C48mx500_S2SW_cyc_gfs", "C96_atm3DVar", "C96_gcafs_cycled", "C96_gcafs_cycled_noDA", "C96mx100_S2S", "C96mx025_S2S"]
+ - caseName: ["C48_ATM", "C48_S2SW", "C48_S2SWA_gefs", "C48mx500_3DVarAOWCDA", "C48mx500_hybAOWCDA", "C96C48_hybatmDA", "C96C48_hybatmsnowDA", "C96C48_hybatmsoilDA", "C96C48_ufsgsi_hybatmDA", "C96C48_ufs_hybatmDA", "C96C48mx500_S2SW_cyc_gfs", "C96_atm3DVar", "C96_gcafs_cycled", "C96_gcafs_cycled_noDA", "C96mx100_S2S"]
.gaeac6_cases_matrix: &gaeac6_cases
- - caseName: ["C48_ATM", "C48_S2SW", "C48_S2SWA_gefs", "C48mx500_3DVarAOWCDA", "C48mx500_hybAOWCDA", "C96C48_hybatmDA", "C96C48_hybatmsnowDA", "C96C48_hybatmsoilDA", "C96C48mx500_S2SW_cyc_gfs", "C96_atm3DVar", "C96_gcafs_cycled", "C96_gcafs_cycled_noDA", "C96mx100_S2S", "C96mx025_S2S"]
+ - caseName: ["C48_ATM", "C48_S2SW", "C48_S2SWA_gefs", "C48mx500_3DVarAOWCDA", "C48mx500_hybAOWCDA", "C96C48_hybatmDA", "C96C48_hybatmsnowDA", "C96C48_hybatmsoilDA", "C96C48mx500_S2SW_cyc_gfs", "C96_atm3DVar", "C96_gcafs_cycled", "C96_gcafs_cycled_noDA", "C96mx100_S2S"]
.orion_cases_matrix: &orion_cases
- - caseName: ["C48_ATM", "C48_S2SW", "C48_S2SWA_gefs", "C96C48_hybatmDA", "C96C48mx500_S2SW_cyc_gfs", "C96_atm3DVar", "C96mx100_S2S", "C96_gcafs_cycled", "C96mx025_S2S"]
+ - caseName: ["C48_ATM", "C48_S2SW", "C48_S2SWA_gefs", "C96C48_hybatmDA", "C96C48mx500_S2SW_cyc_gfs", "C96_atm3DVar", "C96mx100_S2S", "C96_gcafs_cycled"]
.hercules_cases_matrix: &hercules_cases
- - caseName: ["C48_ATM", "C48_S2SW", "C48_S2SWA_gefs", "C48mx500_3DVarAOWCDA", "C48mx500_hybAOWCDA", "C96C48_hybatmDA", "C96C48mx500_S2SW_cyc_gfs", "C96_atm3DVar", "C96mx100_S2S", "C96_gcafs_cycled", "C96mx025_S2S"]
+ - caseName: ["C48_ATM", "C48_S2SW", "C48_S2SWA_gefs", "C48mx500_3DVarAOWCDA", "C48mx500_hybAOWCDA", "C96C48_hybatmDA", "C96C48mx500_S2SW_cyc_gfs", "C96_atm3DVar", "C96mx100_S2S", "C96_gcafs_cycled"]
.ursa_cases_matrix: &ursa_cases
- - caseName: ["C48_ATM", "C48_S2SW", "C48_S2SWA_gefs", "C48mx500_3DVarAOWCDA", "C48mx500_hybAOWCDA", "C96C48_hybatmDA", "C96C48_hybatmsnowDA", "C96C48_hybatmsoilDA", "C96C48_ufsgsi_hybatmDA", "C96C48_ufs_hybatmDA", "C96C48mx500_S2SW_cyc_gfs", "C96_atm3DVar", "C96mx100_S2S", "C96mx025_S2S"]
+ - caseName: ["C48_ATM", "C48_S2SW", "C48_S2SWA_gefs", "C48mx500_3DVarAOWCDA", "C48mx500_hybAOWCDA", "C96C48_hybatmDA", "C96C48_hybatmsnowDA", "C96C48_hybatmsoilDA", "C96C48_ufsgsi_hybatmDA", "C96C48_ufs_hybatmDA", "C96C48mx500_S2SW_cyc_gfs", "C96_atm3DVar", "C96mx100_S2S", "C96_gcafs_cycled", "C96_gcafs_cycled_noDA"]
# Host: Hera - Standard Cases
setup_experiments-hera:
@@ -277,7 +277,7 @@ setup_ctests-hera:
setup_ctests-gaeac6:
extends: .setup_ctests_template
- stage: setup_tests
+ stage: setup_tests
tags:
- gaeac6
variables:
diff --git a/docs/source/hpc.rst b/docs/source/hpc.rst
index f1b25fd7835..eb7d943d50d 100644
--- a/docs/source/hpc.rst
+++ b/docs/source/hpc.rst
@@ -97,7 +97,7 @@ The Global Workflow provides capabilities for deterministic and ensemble forecas
- 1
- X
- X
- -
+ - x
- X
- X
- X
diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py
index f6534b1b7df..1dfd668b5cc 100644
--- a/ush/python/pygfs/task/atmens_analysis.py
+++ b/ush/python/pygfs/task/atmens_analysis.py
@@ -88,7 +88,6 @@ def initialize(self) -> None:
# initialize JEDI applications
logger.info(f"Initializing JEDI LETKF observer application")
self.jedi_dict['atmensanlobs'].initialize(clean_empty_obsspaces=True)
- self.jedi_dict['atmensanlsol'].initialize()
self.jedi_dict['atmensanlfv3inc'].initialize()
@logit(logger)
@@ -123,6 +122,11 @@ def execute(self, jedi_dict_key: str) -> None:
None
"""
+ # Initialize solver immediately before execution so that obs space files are
+ # available for cleaning after running the observer
+ if jedi_dict_key == 'atmensanlsol':
+ self.jedi_dict['atmensanlsol'].initialize(clean_empty_obsspaces=True)
+
self.jedi_dict[jedi_dict_key].execute()
@logit(logger)