Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Multiplatform builds #36

Open
wants to merge 22 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/first_pull_request.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ jobs:
name: Welcome
runs-on: ubuntu-latest
steps:
- uses: actions/github-script@v6
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
script: |
// Get a list of all issues created by the PR opener
Expand Down
131 changes: 100 additions & 31 deletions .github/workflows/testing.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,48 +15,117 @@ jobs:
run:
shell: bash
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Harden Runner
uses: step-security/harden-runner@17d0e2bd7d51742c71671bd19fa12bdc9d40a3d6 # v2.8.1
with:
egress-policy: block
allowed-endpoints: >
azure.archive.ubuntu.com:80
esm.ubuntu.com:443
files.pythonhosted.org:443
github.com:443
motd.ubuntu.com:443
packages.microsoft.com:443
pypi.org:443

- name: Checkout Repository
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7

- name: Install linters
run: |
sudo apt-get update
sudo apt-get install uncrustify cppcheck iwyu
- uses: actions/setup-python@v4
- name: Install pre-commit

- name: Set up Python3
uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0
with:
python-version: '3.x'

- name: Run pre-commit hooks
uses: pre-commit/action@2c7b3805fd2a0fd8c1884dcaebf91fc102a13ecd # v3.0.1

build-netcdf:
name: Build (${{ matrix.build_type }}; ${{ matrix.os }}; NetCDF=True)
needs: lint
runs-on: ubuntu-latest
strategy:
matrix:
os: [ ubuntu-latest, macos-latest ]
build_type: [ Debug, Release ]
steps:
- name: Harden Runner
uses: step-security/harden-runner@17d0e2bd7d51742c71671bd19fa12bdc9d40a3d6 # v2.8.1
with:
disable-sudo: true
egress-policy: block
allowed-endpoints: >
azure.archive.ubuntu.com:80
esm.ubuntu.com:443
github.com:443
motd.ubuntu.com:443
packages.microsoft.com:443
raw.githubusercontent.com:443

- name: Checkout Repository
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7

- name: Fetch NetCDF CMake script
run: |
python -m pip install --upgrade pip
python -m pip install pre-commit
- name: Run pre-commit
wget https://raw.githubusercontent.com/Kitware/VTK/master/CMake/FindNetCDF.cmake -P cmake

- name: Build
id: build
continue-on-error: true
run: |
cmake .
cmake --build . --config ${{ matrix.build_type }} -j 3

- name: Run tests
if: steps.build.outcome == 'success'
run: |
pre-commit run --all-files
./Raven
./Raven -v

build:
name: Build RavenHydroFramework binary (${{ matrix.build_type }})
name: Build (${{ matrix.build_type }}; ${{ matrix.os }}; NetCDF=False)
needs: lint
runs-on: ubuntu-latest
strategy:
matrix:
build_type: [Debug, Release]
os: [ ubuntu-latest, macos-latest, windows-latest ]
build_type: [ Debug, Release ]
defaults:
run:
shell: bash
shell: bash -l {0}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install dependencies
run: |
sudo apt-get update
sudo apt-get install libnetcdf-dev build-essential cmake
- name: Fetch NetCDF CMake script
run: |
wget https://raw.githubusercontent.com/Kitware/VTK/master/CMake/FindNetCDF.cmake -P cmake
- name: Build
id: build
continue-on-error: true
run: |
cmake . -DCMAKE_BUILD_TYPE=${{ matrix.build_type }}
make -j4
- name: Run tests
if: steps.build.outcome == 'success'
run: |
./Raven
./Raven -v
- name: Harden Runner
uses: step-security/harden-runner@17d0e2bd7d51742c71671bd19fa12bdc9d40a3d6 # v2.8.1
with:
disable-sudo: true
egress-policy: block
allowed-endpoints: >
azure.archive.ubuntu.com:80
esm.ubuntu.com:443
github.com:443
motd.ubuntu.com:443
objects.githubusercontent.com:443
packages.microsoft.com:443

- name: Checkout Repository
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7

- name: Install dependencies (Windows)
if: matrix.os == 'windows-latest'
uses: lukka/get-cmake@2bcb1a4c14ab154443cc740dced0f9b6a8fb2b59 # v3.29.6

- name: Build
id: build
continue-on-error: true
run: |
cmake .
cmake --build . --config ${{ matrix.build_type }} -j 3

- name: Run tests
run: |
./Raven
./Raven -v
7 changes: 4 additions & 3 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ list(APPEND CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake")
# Find NetCDF
find_package(NetCDF) #may also be 'netCDF'
find_package(netCDF)

# find header & source
file(GLOB HEADER "src/*.h")
file(GLOB SOURCE "src/*.cpp")
Expand Down Expand Up @@ -54,9 +55,6 @@ if(COMPILE_EXE)
endif()
endif()

source_group("Header Files" FILES ${HEADER})
source_group("Source Files" FILES ${SOURCE})

if(NETCDF_FOUND)
add_definitions(-Dnetcdf)
include_directories(${NetCDF_INCLUDE_DIRS})
Expand All @@ -67,6 +65,9 @@ elseif(netCDF_FOUND)
target_link_libraries(Raven netcdf)
endif()

source_group("Header Files" FILES ${HEADER})
source_group("Source Files" FILES ${SOURCE})

# unset cmake variables to avoid polluting the cache
unset(COMPILE_LIB CACHE)
unset(COMPILE_EXE CACHE)
14 changes: 7 additions & 7 deletions src/DemandExpressionHandling.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -749,7 +749,7 @@ bool CDemandOptimizer::ConvertToExpressionTerm(const string s, expressionTerm* t
//----------------------------------------------------------------------
else if (GetWorkflowVariable(s,index) != RAV_BLANK_DATA) // workflow variable
{

term->type=TERM_WORKFLOW;
term->value=GetWorkflowVariable(s,index);// initially zero
term->DV_ind=index;
Expand Down Expand Up @@ -937,13 +937,13 @@ expressionStruct *CDemandOptimizer::ParseExpression(const char **s,
return tmp;
}
//////////////////////////////////////////////////////////////////
/// \brief Parses :Condition within management goal or workflow variable definition
/// \brief Parses :Condition within management goal or workflow variable definition
/// \param s [in] - array of strings of [size: Len]
/// \param Len [in] - length of string array
/// \param lineno [in] - line number of original expression in input file filename, referenced in errors
/// \param filename [in] - name of input file, referenced in errors
/// \returns exp_condition: a pointer to an expression condition variable
///
///
/// \todo[funct]: Would it be better to support @date(), @between, @day_of_year() in general expression??
/// :Condition !Q32[0] < 300 + @ts(myTs,0)
/// :Condition DATE IS_BETWEEN 1975-01-02 and 2010-01-02
Expand All @@ -957,13 +957,13 @@ expressionStruct *CDemandOptimizer::ParseExpression(const char **s,
/// :Condition DAY_OF_YEAR IS_BETWEEN Apr-1 Aug-1 //\todo [NOT YET SUPPORTED]
/// :Condition @is_between(DAY_OF_YEAR,300,20) = 1 // \todo [NOT YET SUPPORTED]
//
exp_condition* CDemandOptimizer::ParseCondition(const char** s, const int Len, const int lineno, const string filename) const
exp_condition* CDemandOptimizer::ParseCondition(const char** s, const int Len, const int lineno, const string filename) const
{
bool badcond=false;
exp_condition *pCond = new exp_condition();
pCond->dv_name=s[1];
const optStruct *Options=_pModel->GetOptStruct();

bool is_exp=false;
for (int i = 2; i < Len; i++) {
if ((s[i][0]=='+') || (s[i][0]=='-') || (s[i][0]=='*') || (s[i][0]=='/') || (s[i][0]=='=') || (s[i][0]=='<') || (s[i][0]=='>')){
Expand Down Expand Up @@ -1000,7 +1000,7 @@ exp_condition* CDemandOptimizer::ParseCondition(const char** s, const int Len, c
char tmp =pCond->dv_name[1];
string tmp2=pCond->dv_name.substr(2);
char code=pCond->dv_name[1];
if ((code=='Q') || (code=='h') || (code=='I')) //subbasin state decision variable
if ((code=='Q') || (code=='h') || (code=='I')) //subbasin state decision variable
{
long SBID=s_to_l(tmp2.c_str());
if (_pModel->GetSubBasinByID(SBID) == NULL) {
Expand Down Expand Up @@ -1122,7 +1122,7 @@ bool CDemandOptimizer::CheckOpRegimeConditions(const op_regime *pOperRegime, con
}
else {//handle user specified DVs and workflow variables
int i=GetUserDVIndex(pCond->dv_name);
if (i != DOESNT_EXIST) //decision variable
if (i != DOESNT_EXIST) //decision variable
{
dv_value =_pDecisionVars[i]->value;
}
Expand Down
6 changes: 3 additions & 3 deletions src/DemandOptimization.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -316,7 +316,7 @@ void CDemandOptimizer::AddDecisionVar(const decision_var* pDV)
//////////////////////////////////////////////////////////////////
/// \brief disables stage discharge curve handling for reservoir in subbasin p
//
void CDemandOptimizer::OverrideSDCurve(const int p)
void CDemandOptimizer::OverrideSDCurve(const int p)
{
_aDisableSDCurve[p]=true;
}
Expand Down Expand Up @@ -417,7 +417,7 @@ bool CDemandOptimizer::VariableNameExists(const string &name) const
for (int i = 0; i < _nUserConstants; i++) {
if (_aUserConstNames[i]==name){return true;}
}

if (GetUnitConversion(name)!=RAV_BLANK_DATA){return true;}

return false;
Expand Down Expand Up @@ -877,7 +877,7 @@ void CDemandOptimizer::InitializePostRVMRead(CModel* pModel, const optStruct& Op
cout<<" "<<i<<" [WORKFLOWVAR]: "<<_pWorkflowVars[i]->name<<endl;
for (int k=0; k<_pWorkflowVars[i]->nOperRegimes; k++)
{

cout<<" +oper regime: "<<_pWorkflowVars[i]->pOperRegimes[k]->reg_name<<endl;
cout<<" +expression: "<<_pWorkflowVars[i]->pOperRegimes[k]->pExpression->origexp<<endl;
comparison ctype=_pWorkflowVars[i]->pOperRegimes[k]->pExpression->compare;
Expand Down
4 changes: 2 additions & 2 deletions src/OrographicCorrections.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -399,8 +399,8 @@ void CModel::CorrectPET(const optStruct &Options,
if(iSnow!=DOESNT_EXIST) { SWE =pHRU->GetStateVarValue(iSnow); }
if(iSC !=DOESNT_EXIST) { snow_cov=pHRU->GetStateVarValue(iSC); }

if(SWE>0.1) {
F.PET *=(1.0-snow_cov);
if(SWE>0.1) {
F.PET *=(1.0-snow_cov);
F.OW_PET*=(1.0-snow_cov);
}
}
Expand Down
18 changes: 9 additions & 9 deletions src/ParseHRUFile.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,8 @@ bool ParseHRUPropsFile(CModel *&pModel, const optStruct &Options, bool terrain_r

ifstream INPUT2; //For Secondary input
CParser* pMainParser=NULL; //for storage of main parser while reading secondary files
ifstream INPUT3; //For tertiary input
CParser *pSecondaryParser=NULL; //for storage of secondary parser while reading tertiary files
ifstream INPUT3; //For tertiary input
CParser *pSecondaryParser=NULL; //for storage of secondary parser while reading tertiary files

if (Options.noisy){
cout <<"======================================================"<<endl;
Expand Down Expand Up @@ -120,26 +120,26 @@ bool ParseHRUPropsFile(CModel *&pModel, const optStruct &Options, bool terrain_r
if (pSecondaryParser != NULL){
ExitGracefully("ParseEnsembleFile::nested :RedirectToFile commands are not allowed to be nested more than two levels (e.g., rvm file to rvm file to rvm file to rvm file)",BAD_DATA);
}
if (pMainParser == NULL) { //from base .rvh file
INPUT2.open(filename.c_str());
if (pMainParser == NULL) { //from base .rvh file
INPUT2.open(filename.c_str());
if(INPUT2.fail()) {
string warn;
warn=":RedirectToFile (from .rvh): Cannot find file "+filename;
ExitGracefully(warn.c_str(),BAD_DATA);
}
pMainParser=pp;
pMainParser=pp;
pp=new CParser(INPUT2,filename,line);//open new parser
}
else { //from already redirected .rvh file
INPUT3.open(filename.c_str());
}
else { //from already redirected .rvh file
INPUT3.open(filename.c_str());
if(INPUT3.fail()) {
string warn;
warn=":RedirectToFile (from .rvh): Cannot find file "+filename;
ExitGracefully(warn.c_str(),BAD_DATA);
}
pSecondaryParser=pp;
pp=new CParser(INPUT3,filename,line);//open new parser
}
}
break;
}
case(-4): //----------------------------------------------
Expand Down
Loading
Loading