diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 0000000..65a4f44 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,40 @@ +# Pull Request Template + +## Description + +Please include a summary of the change and which issue is fixed. Please also include relevant motivation and context. List any dependencies that are required for this change. + +Fixes # (issue) + +## Type of change + +Please delete options that are not relevant. + +- [ ] Bug fix (non-breaking change which fixes an issue) +- [ ] New feature (non-breaking change which adds functionality) +- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected) +- [ ] This change requires a documentation update + +## How Has This Been Tested? + +Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration + +## Additional Context (Please include any Screenshots/gifs if relevant) + +... + +## Checklist: + +- [ ] My code follows the style guidelines of this project +- [ ] I have performed a self-review of my own code +- [ ] I have commented my code, particularly in hard-to-understand areas +- [ ] I have made corresponding changes to the documentation +- [ ] My changes generate no new warnings +- [ ] I have added tests that prove my fix is effective or that my feature works +- [ ] New and existing unit tests pass locally with my changes +- [ ] Any dependent changes have been merged and published in downstream modules +- [ ] I have checked my code and corrected any misspellings +- [ ] I have tagged the reviewers in a comment below incase my pull request is ready for a review +- [ ] I have signed the commit message to agree to Developer Certificate of Origin (DCO) (to certify that you wrote or otherwise have the right to submit your contribution to the project.) by adding "--signoff" to my git commit command. + + diff --git a/.github/config.yml b/.github/config.yml new file mode 100644 index 0000000..a0b2e87 --- /dev/null +++ b/.github/config.yml @@ -0,0 +1,49 @@ +# Configuration for welcome - https://github.com/behaviorbot/welcome + +# Configuration for new-issue-welcome - https://github.com/behaviorbot/new-issue-welcome + +# Comment to be posted to on first time issues +newIssueWelcomeComment: | + Hello there!πŸ‘‹ Welcome to moja global!πŸ’– + + Thank you and congratulations πŸŽ‰ for opening your very first issue in this project. + Moja global fosters an open and welcoming environment for al our contributors.🌸 Please adhere to our [Code Of Conduct](https://github.com/moja-global/About_moja_global/blob/master/CODE_OF_CONDUCT.md). + + Incase you want to claim this issue, please comment down below! + Please checkout if any of the already existing issue template fits for your isssue or you may open a blank issue as well ✨ + We will try to get back to you as soon as we can.πŸ‘€ + + Feel free to join us on [moja global Private Slack](https://mojaglobal.slack.com/) by dropping an email [here](mailto:info@moja.global).πŸ‘©β€πŸ’» We would love to hear your interesting ideas and engage in discussions.πŸ’– + Moja global is delighted to have you here :) + +# Configuration for new-pr-welcome - https://github.com/behaviorbot/new-pr-welcome + +# Comment to be posted to on PRs from first time contributors in your repository +newPRWelcomeComment: | + Hello there!πŸ‘‹ Welcome to moja global!πŸ’– + Thank you and congrats πŸŽ‰ for opening your first PR on this project.✨ + We will review it soon! Till then you can checkout the `README.md` for more details on it. + + Moja global fosters an open and welcoming environment for al our contributors.🌸 Please adhere to our [Code Of Conduct](https://github.com/moja-global/About_moja_global/blob/master/CODE_OF_CONDUCT.md). + + Feel free to join us on [moja global Private Slack](https://mojaglobal.slack.com/) by dropping an email [here](mailto:info@moja.global).πŸ‘©β€πŸ’» We would love to hear your interesting ideas and engage in discussions.πŸ’– + Moja global is delighted to have you here :) + +# Configuration for first-pr-merge - https://github.com/behaviorbot/first-pr-merge + +# Comment to be posted to on pull requests merged by a first time user +firstPRMergeComment: | + Congratulations on merging your first contribution to moja global!✨ + Your code is officially a part of moja global now!! πŸŽ‰ Please feel free to add yourself as a contributor by following these steps [here](https://github.com/moja-global/About_moja_global/blob/master/CONTRIBUTING.md#how-to-get-credit-for-your-contribution).πŸ™Œ + + Feel free to join us on [moja global Private Slack](https://mojaglobal.slack.com/) by dropping an email [here](mailto:info@moja.global).πŸ‘©β€πŸ’» We would love to hear your interesting ideas and engage in discussions.πŸ’– + +
+ Now that you've completed this, you can help someone else take their first step! +

Help a newcomer with their first pull request by providing feedback! This is their first time too, So be as encouraging as possible!πŸ˜„

+

Help them setup this project and resolve their queries! 🌸

+

Create a welcoming or beginner friendly issue for someone else! πŸ˜„ πŸŽ‰

+
+ + +# It is recommended to include as many gifs and emojis as possible! diff --git a/Docker/Dockerfile.base.ubuntu.18.04 b/Docker/Dockerfile.base.ubuntu.18.04 index fb74bc4..58eb49e 100644 --- a/Docker/Dockerfile.base.ubuntu.18.04 +++ b/Docker/Dockerfile.base.ubuntu.18.04 @@ -50,7 +50,6 @@ RUN wget https://github.com/Kitware/CMake/releases/download/v${CMAKE_VERSION}/cm && tar xzf cmake-${CMAKE_VERSION}.tar.gz \ && cd cmake-${CMAKE_VERSION} \ && ./bootstrap --system-curl --parallel=$NUM_CPU \ - && .$NUM_CPU \ && make --quiet install \ && make clean \ && cd .. diff --git a/Docker/Dockerfile.flint.ubuntu.18.04 b/Docker/Dockerfile.flint.ubuntu.18.04 index 9d1c720..7d48014 100644 --- a/Docker/Dockerfile.flint.ubuntu.18.04 +++ b/Docker/Dockerfile.flint.ubuntu.18.04 @@ -1,22 +1,21 @@ # ================================================================================================================== # -# Docker to ubuntu 16.04 image for Moja flint libraries and executables +# Docker to ubuntu 18.04 image for Moja flint libraries and executables # # Building this Docker: -# docker build -f Dockerfile.flint.ubuntu.18.04 --build-arg NUM_CPU=4 --build-arg FLINT_BRANCH=develop -t moja/flint:ubuntu-18.04 . +# docker build -f Dockerfile.flint.ubuntu.18.04 --build_arg BUILD_TYPE=RELEASE --build-arg NUM_CPU=4 --build-arg FLINT_BRANCH=develop -t moja/flint:ubuntu-18.04 . # # ================================================================================================================== -#FROM moja/baseimage:ubuntu-18.04 -FROM moja/baseimage:mg +FROM moja/baseimage:ubuntu-18.04 LABEL maintainer="info@moja.global" ARG FLINT_BRANCH ARG NUM_CPU=1 ARG DEBIAN_FRONTEND=noninteractive +ARG BUILD_TYPE=DEBUG -ENV ZIPPER_VERSION 1.0.1 ENV ROOTDIR /usr/local WORKDIR $ROOTDIR/src @@ -43,11 +42,10 @@ RUN git clone --recursive https://github.com/sebastiandev/zipper.git \ && make clean \ && cd $ROOTDIR/src - # GET moja.global RUN git clone --recursive --depth 1 -b ${FLINT_BRANCH} https://github.com/moja-global/FLINT.git flint \ && mkdir -p flint/Source/build && cd flint/Source/build \ - && cmake -DCMAKE_BUILD_TYPE=DEBUG \ + && cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE \ -DCMAKE_INSTALL_PREFIX=$ROOTDIR \ -DENABLE_TESTS:BOOL=OFF \ -DENABLE_MOJA.MODULES.GDAL=ON \ @@ -56,7 +54,6 @@ RUN git clone --recursive --depth 1 -b ${FLINT_BRANCH} https://github.com/moja-g -DBUILD_SHARED_LIBS=ON .. \ && make --quiet -j $NUM_CPU \ && make --quiet install \ -# && make clean \ && cd $ROOTDIR/src RUN ln -s $ROOTDIR/lib/libmoja.modules.* $ROOTDIR/bin diff --git a/README.md b/README.md index a9caee3..81df1ce 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # FLINT Open-source Library -[![All Contributors](https://img.shields.io/badge/all_contributors-5-orange.svg?style=flat-square)](#contributors) +[![All Contributors](https://img.shields.io/badge/all_contributors-5-orange.svg?style=flat-square)](#contributors) [![License: MPL 2.0](https://img.shields.io/badge/License-MPL%202.0-brightgreen.svg)](https://opensource.org/licenses/MPL-2.0) [![Code Of Conduct](https://img.shields.io/badge/code--of--conduct-Moja%20Global-blue)](https://github.com/moja-global/About_moja_global/blob/master/CODE_OF_CONDUCT.md) [![Contributions](https://img.shields.io/badge/newcomer-friendly-red)](https://github.com/moja-global/About_moja_global/blob/master/CONTRIBUTING.md) [![Chat on Slack](https://img.shields.io/badge/chat-slack-blueviolet)](https://mojaglobal.slack.com/) [![Twitter Handle](https://img.shields.io/badge/twitter-Moja%20Global-darkblue)](https://twitter.com/mojaglobal?lang=en) ## What is `FLINT`? @@ -15,146 +15,40 @@ The FLINT makes developing and operating advanced systems achievable by all coun The FLINT is using the lessons learned from first generation tools, to build a new framework that meets present and future needs. The key improvements compared to the first generation tools include: + a flexible, modular approach, allowing for country specific implementations from [IPCC Tier 1 to 3 ](https://www.reddcompass.org/mgd-content-v1/dita-webhelp/en/Box1.html) + support for [wall-to-wall, sample based, jurisdictional, and supply chain approaches](https://static1.squarespace.com/static/5896200f414fb57d26f3d600/t/59362b028419c2db8f57e747/1496722191543/REDD_nested_projects.pdf) -+ the ability to cover all [land uses and land use changes, and activity-based reporting such as REDD+](https://theredddesk.org/what-redd) ++ the ability to cover all [land uses and land use changes, and activity-based reporting such as REDD+](https://unfccc.int/topics/land-use/workstreams/redd/what-is-redd) + scenario analysis systems to allow the development of projections + the potential to be used for multiple other purposes, including economics, water and biodiversity -+ development managed using a true open source approach under [moja global](http://moja.global), which will allow users (countries, companies and organizations) to direct strategy and control the budget. ++ development managed using a true open source approach under [moja global](https://moja.global), which will allow users (countries, companies and organizations) to direct strategy and control the budget. + software that allows data processing on local desktops or cloud-based systems +## Installation Docs -## How to use FLINT? +Please checkout the [moja global developer docs](https://docs.moja.global) for complete instructions on how to setup the repository. You may also refer this documentation for setting up [FLINT.example](https://docs.moja.global/en/latest/DevelopmentSetup/FLINT.example_installation.html), [GCBM](https://docs.moja.global/en/latest/GCBMDevelopmentSetup/index.html) or just get an idea of the moja global workflow! You may also suggest an improvement in the current docs by creating an issue [here](hhttps://github.com/moja-global/GSoD.moja_global_docs). -### Development Environment How-To for Windows +## Installation Videos -These instructions are for building the FLINT on Windows using Visual Studio 2017, or Visual Studio 2019. +We also have a set of installation videos to help you out with the installation. If you prefer video installation procedure as opposed to textual documentation, this will be a perfect starter for you! +1. [FLINT Core on Visual Studio 2019](https://www.youtube.com/watch?v=BmHltWrxCTY&t=9s) +2. [FLINT Example (RothC model) on Visual Studio](https://www.youtube.com/watch?v=Jfi2-vEhfkg) +3. [FLINT Example (Chapman Richards model) on Visual Studio](https://www.youtube.com/watch?v=JFTyeZQbPjI) +4. [FLINT Docker on Ubuntu 20.04](https://www.youtube.com/watch?v=eiCPhv-SRNc) -#### Required Installs - -##### CMake - -- download [cmake-3.15.2-win64-x64.msi](https://github.com/Kitware/CMake/releases/download/v3.15.2/cmake-3.15.2-win64-x64.msi) - -#### Using vcpkg to install required libraries - -A fork of a *Vcpkg* repository has been created for the FLINT required libraries. To build these libraries you can use the following process: - -+ Clone the Vcpkg repository: https://github.com/moja-global/vcpkg - -+ Start a command shell in the Vcpkg repository folder and use the following commands: - - ```powershell - # bootstrap - bootstrap-vcpkg.bat - - # install packages - vcpkg.exe install boost-test:x64-windows boost-program-options:x64-windows boost-log:x64-windows turtle:x64-windows zipper:x64-windows poco:x64-windows libpq:x64-windows gdal:x64-windows sqlite3:x64-windows boost-ublas:x64-windows fmt:x64-windows - ``` - -+ Once this has completed, start a command shell in you FLINT repository folder. Now use the following commands to create the Visual Studio solution: - - ```powershell - # Create a build folder under the Source folder - cd Source - mkdir build - cd build - - # now create the Visual Studio Solution (2019) - cmake -G "Visual Studio 16 2019" -DCMAKE_INSTALL_PREFIX=C:/Development/Software/moja -DVCPKG_TARGET_TRIPLET=x64-windows -DENABLE_TESTS=OFF -DENABLE_MOJA.MODULES.ZIPPER=OFF -DCMAKE_TOOLCHAIN_FILE=c:\Development\moja-global\vcpkg\scripts\buildsystems\vcpkg.cmake .. - - # OR Visual Studio Solution (2017) - cmake -G "Visual Studio 15 2017" -DCMAKE_INSTALL_PREFIX=C:/Development/Software/moja -DVCPKG_TARGET_TRIPLET=x64-windows -DENABLE_TESTS=OFF -DENABLE_MOJA.MODULES.ZIPPER=OFF -DCMAKE_TOOLCHAIN_FILE=c:\Development\moja-global\vcpkg\scripts\buildsystems\vcpkg.cmake .. - ``` - -#### Install Moja Libraries - -It is possible to use the Visual Studio moja solution to install built versions of the Moja libraries. To do this you need to set the CMAKE variable '***CMAKE_INSTALL_PREFIX***' to your install path (i.e. "*C:/Development/Software/moja*"). - -#### Make edits to the Visual Studio Solution using CMake - -1. Launch the CMake GUI -2. In the '*Where to build the binaries*' field click β€œBrowse Build…” and select the folder you created above (i.e. `C:\Development\moja-global\FLINT\Source\build`)`. The '*Where is the source code:*' field should update, if not, set it correctly. -4. You should be able to edit any CMake setting now (i.e. ENABLE flags like `ENABLE_TESTS`), then click β€œ***Configure***” – assuming all libraries and required software has been installed you should have no errors. Now click ***"Generate"*** and the Solution with adjustments should be ready to load into Visual Studio. - -#### Other Useful Tools - -##### SQLIte Studio - -a simple windows SQLite database manager (http://sqlitestudio.pl/) -[sqlitestudio-3.1.0.zip](http://sqlitestudio.pl/files/sqlitestudio3/complete/win32/sqlitestudio-3.1.0.zip) - -##### TortoiseGit - -[TortoiseGit](https://code.google.com/p/tortoisegit/wiki/Download) - -### Docker for Ubuntu 18:04 - -Containers are a simple way to build FLINT and all required dependencies. Examples of how this can be done are provided for Ubuntu 18.04. See the [Examples docker directory.](https://github.com/moja-global/flint/tree/master/Examples/docker) - -#### Building the containers - -The build has been split into two Dockerfiles, the first to get and build required libraries. The second to get and build the moja FLINT libraries and CLI program. - -```bash -# working from the examples folder "flint/tree/master/Examples/docker" - -# build the base -docker build -f Dockerfile.base.ubuntu.18.04 --build-arg NUM_CPU=4 -t moja/baseimage:ubuntu-18.04 . - -# build the flint container -docker build -f Dockerfile.flint.ubuntu.18.04 --build-arg NUM_CPU=4 --build-arg FLINT_BRANCH=master -t moja/flint:ubuntu-18.04 . - -docker build -f Dockerfile.flint.ubuntu.18.04 --build-arg NUM_CPU=4 --build-arg GITHUB_AT=XXXX --build-arg FLINT_BRANCH=master -t moja/flint:ubuntu-18.04 . -``` - -How to use the final container depends on the task. However, the following command will bash into the flint container and allow you to use the CLI program. - -```bash -# run bash on the flint container -docker run --rm -ti moja/flint:ubuntu-18.04 bash -``` - -Once in, you should be able to run the CLI program `moja.cli` - -``` -# run CLI -moja.cli --help -``` - -That should respond: - -``` -Allowed options: - -General options: - -h [ --help ] produce a help message - --help-section arg produce a help message for a named section - -v [ --version ] output the version number - -Commandline only options: - --logging_config arg path to Moja logging config file - --config_file arg path to Moja run config file - --provider_file arg path to Moja data provider config file - -Configuration file options: - --config arg path to Moja project config files - --config_provider arg path to Moja project config files for data providers -``` +To learn more, you can also visit the [moja global youtube channel](https://www.youtube.com/channel/UCfQUrrNP1Xf-Fv4c8uHYXhQ). ## How to Get Involved? -moja global welcomes a wide range of contributions as explained in [Contributing document](https://github.com/moja-global/About-moja-global/blob/master/CONTRIBUTING.md) and in the [About moja-global Wiki](https://github.com/moja-global/.github/wiki). - +moja global welcomes a wide range of contributions as explained in [Contributing document](https://docs.moja.global/en/latest/contributing/index.html). ## FAQ and Other Questions -* You can find FAQs on the [Wiki](https://github.com/moja.global/.github/wiki). +* You can find FAQs on the [FAQs section of our docs](https://docs.moja.global/en/latest/faq.html). * If you have a question about the code, submit [user feedback](https://github.com/moja-global/About-moja-global/blob/master/Contributing/How-to-Provide-User-Feedback.md) in the relevant repository -* If you have a general question about a project or repository or moja global, [join moja global](https://github.com/moja-global/About-moja-global/blob/master/Contributing/How-to-Join-moja-global.md) and +* If you have a general question about a project or repository or moja global, [join moja global](https://docs.moja.global/en/latest/contact.html) and * [submit a discussion](https://help.github.com/en/articles/about-team-discussions) to the project, repository or moja global [team](https://github.com/orgs/moja-global/teams) - * [submit a message](https://get.slack.help/hc/en-us/categories/200111606#send-messages) to the relevant channel on [moja global's Slack workspace](mojaglobal.slack.com). + * [submit a message](https://get.slack.help/hc/en-us/categories/200111606#send-messages) to the relevant channel on [moja global's Slack workspace](https://mojaglobal.slack.com). * If you have other questions, please write to info@moja.global - + ## Contributors @@ -176,4 +70,3 @@ The following people are Maintainers of this repository **Reviewers** check proposed changes before they go to the Maintainers **Ambassadors** are available to provide training related to this repository **Coaches** are available to provide information to new contributors to this repository - diff --git a/Source/CMakeLists.txt b/Source/CMakeLists.txt index bd0f7f4..69c7ac7 100644 --- a/Source/CMakeLists.txt +++ b/Source/CMakeLists.txt @@ -11,6 +11,10 @@ set(CMAKE_CXX_EXTENSIONS OFF) #turn on parallel builds add_compile_options($<$:/MP>) +set(CMAKE_CXX_STANDARD 17) +set(CMAKE_CXX_STANDARD_REQUIRED ON) +set(CMAKE_CXX_EXTENSIONS OFF) + set(MOJA_VERSION_MAJOR "1") set(MOJA_VERSION_MINOR "0") set(MOJA_VERSION_PATCH "0") diff --git a/Source/moja.cli/src/moja.cpp b/Source/moja.cli/src/moja.cpp index 2c5584f..79f33bb 100644 --- a/Source/moja.cli/src/moja.cpp +++ b/Source/moja.cli/src/moja.cpp @@ -21,6 +21,7 @@ #include #include #include +#include static constexpr const char* CLI_VERSION_STRING = "flint cli version 1.0.0"; @@ -41,7 +42,13 @@ bool checkFilePath(const std::string& filePath) { return true; } +void handleOutOfMemory() { + MOJA_LOG_FATAL << "Failed to allocate memory"; + std::abort(); +} + int main(int argc, char* argv[]) { + std::set_new_handler(handleOutOfMemory); opt::options_description general_opt("General options"); general_opt.add_options() diff --git a/Source/moja.datarepository/src/providerrelationalsqlite.cpp b/Source/moja.datarepository/src/providerrelationalsqlite.cpp index ce31ba8..ba15231 100644 --- a/Source/moja.datarepository/src/providerrelationalsqlite.cpp +++ b/Source/moja.datarepository/src/providerrelationalsqlite.cpp @@ -31,7 +31,7 @@ class SQLiteConnection { } } - if (sqlite3_open(path.c_str(), &_conn) != SQLITE_OK) { + if (sqlite3_open_v2(path.c_str(), &_conn, SQLITE_OPEN_READONLY | SQLITE_OPEN_NOMUTEX, 0) != SQLITE_OK) { BOOST_THROW_EXCEPTION(ConnectionFailedException() << ConnectionError(sqlite3_errmsg(_conn))); } @@ -127,11 +127,9 @@ class ProviderRelationalSQLite::impl { std::vector result; const int nCols = stmt.n_cols(); - std::vector columnTypes(nCols); std::vector columnNames(nCols); for (int i = 0; i < nCols; i++) { columnNames[i] = stmt.column_name(i); - columnTypes[i] = stmt.column_type(i); } for (; resultCode == SQLITE_ROW || resultCode == SQLITE_BUSY || resultCode == SQLITE_LOCKED || @@ -140,7 +138,7 @@ class ProviderRelationalSQLite::impl { DynamicObject row; for (int i = 0; i < nCols; i++) { - switch (columnTypes[i]) { + switch (stmt.column_type(i)) { case SQLiteStatement::ColumnType::DOUBLE: { row.insert(columnNames[i], stmt.column_double(i)); } break; diff --git a/Source/moja.datarepository/src/rasterreader.cpp b/Source/moja.datarepository/src/rasterreader.cpp index 14fa2bd..fbfb3f9 100644 --- a/Source/moja.datarepository/src/rasterreader.cpp +++ b/Source/moja.datarepository/src/rasterreader.cpp @@ -26,13 +26,14 @@ namespace datarepository { FlintMetaDataRasterReader::FlintMetaDataRasterReader(const std::string& path, const std::string& prefix, const DynamicObject& settings) : MetaDataRasterReaderInterface(path, prefix, settings) { - _metaPath = (boost::format("%1%%2%%3%.json") % path % Poco::Path::separator() % prefix).str(); + auto filePath = Poco::Path(path); + auto abs = filePath.absolute().toString(); + _metaPath = (boost::format("%1%%2%%3%.json") % abs % Poco::Path::separator() % prefix).str(); } DynamicObject FlintMetaDataRasterReader::readMetaData() const { if (file_exists(_metaPath)) { Poco::JSON::Parser jsonMetadataParser; - // Poco::Dynamic::Var parsedMetadata; std::ifstream metadataFile(_metaPath, std::ifstream::in); jsonMetadataParser.parse(metadataFile); diff --git a/Source/moja.flint/include/moja/flint/aggregatoruncertainty.h b/Source/moja.flint/include/moja/flint/aggregatoruncertainty.h index eacddf9..0eb7b4c 100644 --- a/Source/moja.flint/include/moja/flint/aggregatoruncertainty.h +++ b/Source/moja.flint/include/moja/flint/aggregatoruncertainty.h @@ -56,6 +56,7 @@ class FLINT_API AggregatorUncertainty : public ModuleBase { bool module_info_on_; bool aggregate_sink_and_source_; bool aggregate_stock_; + bool clear_fluxes_after_recording_; // -- Land Unit level Collections // -- these collections will be kep for each land unit and put into greater aggregation on LU success. diff --git a/Source/moja.flint/include/moja/flint/aggregatoruncertaintylandunit.h b/Source/moja.flint/include/moja/flint/aggregatoruncertaintylandunit.h index 4971a77..6283af5 100644 --- a/Source/moja.flint/include/moja/flint/aggregatoruncertaintylandunit.h +++ b/Source/moja.flint/include/moja/flint/aggregatoruncertaintylandunit.h @@ -27,6 +27,7 @@ class FLINT_API AggregatorUncertaintyLandUnitSharedData { bool cell_index_on; bool do_stock; bool output_month_12_only; + bool clear_fluxes_after_recording; }; typedef Poco::Tuple runStatDataRecord; diff --git a/Source/moja.flint/include/moja/flint/outputerstreamflux.h b/Source/moja.flint/include/moja/flint/outputerstreamflux.h index b3fe790..f3d891d 100644 --- a/Source/moja.flint/include/moja/flint/outputerstreamflux.h +++ b/Source/moja.flint/include/moja/flint/outputerstreamflux.h @@ -28,6 +28,7 @@ class FLINT_API OutputerStreamFlux : public ModuleBase { void onSystemShutdown() override; void onTimingPostInit() override; void onTimingEndStep() override; + void onPostDisturbanceEvent() override; protected: std::string _fileName; diff --git a/Source/moja.flint/include/moja/flint/recordaccumulator.h b/Source/moja.flint/include/moja/flint/recordaccumulator.h index 8c5ad02..80f8474 100644 --- a/Source/moja.flint/include/moja/flint/recordaccumulator.h +++ b/Source/moja.flint/include/moja/flint/recordaccumulator.h @@ -8,7 +8,11 @@ #include +#include +#include #include +#include +#include namespace moja { namespace flint { @@ -175,28 +179,34 @@ class RecordAccumulator2 { std::vector getPersistableCollection() const { std::vector persistables; + persistables.reserve(_records.size()); for (const auto& record : _records) { - persistables.push_back(record.asPersistable()); + persistables.emplace_back(record.asPersistable()); } return persistables; } - // std::vector getPersistableCollection(size_t startIndex, size_t chunkSize) const { - // std::vector persistables; - // if (startIndex > _records.size()) - // return persistables; - // size_t chunkPosition = 0; - // for (auto it = _records.begin() + startIndex; it != _records.end() && chunkPosition++ < chunkSize; ++it) { - // persistables.push_back(record->asPersistable()); - // } - // return persistables; - //} + std::vector getPersistableCollectionRange(typename rec_accu_vec::const_iterator& rangeStart, + size_t chunkSize) const { + std::vector persistables; + persistables.reserve((std::min)(_records.size(), chunkSize)); + size_t chunkPosition = 0; + for (; (rangeStart != _records.end() && chunkPosition++ < chunkSize); ++rangeStart) { + persistables.emplace_back((*rangeStart).asPersistable()); + } + return persistables; + } void clear() { _recordsIdx.clear(); _records.clear(); } + void shrink_to_fit() { + _recordsIdx.clear(); + _records.shrink_to_fit(); + } + rec_accu_size_type size() const { return _records.size(); } const rec_accu_vec& records() const { return _records; } @@ -207,6 +217,173 @@ class RecordAccumulator2 { rec_accu_vec _records; }; +template +class list_of_persistables { + using vec_type = std::vector; + const vec_type& accumulator_vec_; + + public: + template + class persistables_iterator { + using iterator_type = std::conditional_t; + iterator_type iterator_current_; + iterator_type iterator_end_; + TPersistable record_{}; + + public: + explicit persistables_iterator(iterator_type iterator_begin, iterator_type iterator_end) + : iterator_current_{iterator_begin}, iterator_end_(iterator_end) { + if (iterator_current_ != iterator_end_) { + const auto& record = *iterator_current_; + record_ = record.asPersistable(); + } + } + using difference_type = std::ptrdiff_t; + using value_type = TPersistable; + using pointer = std::conditional_t; + using reference = std::conditional_t; + using iterator_category = std::forward_iterator_tag; + + reference operator*() const { return record_; } + pointer operator->() const { return &record_; } + + auto& operator++() { + ++iterator_current_; + if (iterator_current_ != iterator_end_) { + const auto& record = *iterator_current_; + record_ = record.asPersistable(); + } + return *this; + } + + auto operator++(int) { + auto result = *this; + ++*this; + return result; + } + // Support comparison between iterator and const_iterator types + template + bool operator==(const persistables_iterator& rhs) const { + return iterator_current_ == rhs.iterator_current_; + } + + template + bool operator!=(const persistables_iterator& rhs) const { + return iterator_current_ != rhs.iterator_current_; + } + + // Support implicit conversion of iterator to const_iterator + // (but not vice versa) + operator persistables_iterator() const { + return persistables_iterator(iterator_current_, iterator_end_); + } + }; + + using const_iterator = persistables_iterator; + using iterator = persistables_iterator; + + list_of_persistables(const vec_type& vec) : accumulator_vec_{vec} {} + + // Begin and end member functions + iterator begin() { return iterator{std::begin(accumulator_vec_), std::end(accumulator_vec_)}; } + iterator end() { return iterator{std::end(accumulator_vec_), std::end(accumulator_vec_)}; } + const_iterator begin() const { return const_iterator{std::cbegin(accumulator_vec_), std::cend(accumulator_vec_)}; } + const_iterator end() const { return const_iterator{std::cend(accumulator_vec_), std::cend(accumulator_vec_)}; } + + // Other member operations + const auto& front() const { return accumulator_vec_.front().asPersistable(); } + [[nodiscard]] bool empty() const noexcept { return accumulator_vec_.empty(); } + + typename vec_type::size_type size() const { return accumulator_vec_.size(); } +}; + +template +class RecordAccumulator3 { + struct RecordComparer { + bool operator()(const TRecord* lhs, const TRecord* rhs) const { return lhs->operator==(*rhs); } + }; + struct RecordHasher { + size_t operator()(const TRecord* record) const { return record->hash(); } + }; + size_t compute_size() { + if (records_.capacity() == 0) { + return (std::max)(64 / sizeof(TRecord), size_t(1)); + } + if (records_.capacity() > 4096 * 32 / sizeof(TRecord)) { + return records_.capacity() * 2; + } + return (records_.capacity() * 3 + 1) / 2; + } + + public: + typedef std::unordered_set rec_accu_set; + typedef std::vector rec_accu_vec; + typedef typename rec_accu_vec::size_type rec_accu_size_type; + + const TRecord* insert(Int64 id, TRecord record) { + if (records_.size() == records_.capacity()) { + records_idx_.clear(); + records_.reserve(compute_size()); + for (auto& rec : records_) { + records_idx_.insert(&rec); + } + } + // ID has been assigned by user, assume that we can run with this + next_id_ = id + 1; // can't guarantee that this will be called in 'id increasing' order but a good guess perhaps + record.setId(id); + records_.push_back(record); + auto& new_record = records_.back(); + records_idx_.insert(&new_record); + return &new_record; + } + + const TRecord* accumulate(TRecord record) { return accumulate(record, next_id_); } + + const TRecord* accumulate(TRecord record, Int64 requestedId) { + auto it = records_idx_.find(&record); + if (it != records_idx_.end()) { + // Found an existing ID for the key. + auto existing = *it; + existing->merge(record); + return existing; + } + return insert(requestedId, record); + } + + const TRecord* search(const TRecord& record) { + auto it = records_idx_.find(&record); + if (it != records_idx_.end()) { + // Found an existing ID for the key. + auto existing = *it; + return existing; + } + return nullptr; + } + + const rec_accu_vec& get_records() const { return records_; } + + void clear() { + records_idx_.clear(); + records_.clear(); + } + + void shrink_to_fit() { + records_idx_.clear(); + records_.shrink_to_fit(); + } + + rec_accu_size_type size() const { return records_.size(); } + + const rec_accu_vec& records() const { return records_; } + + auto persistables() const { return list_of_persistables{records_}; } + + private: + Int64 next_id_ = 1; + rec_accu_set records_idx_; + rec_accu_vec records_; +}; + template class RecordAccumulatorMap { public: @@ -267,6 +444,82 @@ class RecordAccumulatorMap { rec_accu_map _records; }; +template +class list_of_tuples { + using map_type = tlx::btree_map; + const map_type& accumulator_map_; + + public: + template + class tuples_iterator { + using iterator_type = std::conditional_t; + iterator_type iterator_current_; + iterator_type iterator_end_; + TTuple record_{}; + + public: + explicit tuples_iterator(iterator_type iterator_begin, iterator_type iterator_end) + : iterator_current_{iterator_begin}, iterator_end_(iterator_end) { + if (iterator_current_ != iterator_end_) { + const auto& record = *iterator_current_; + record_ = TRecordConv::asTuple(record.first, record.second); + } + } + using difference_type = std::ptrdiff_t; + using value_type = TTuple; + using pointer = std::conditional_t; + using reference = std::conditional_t; + using iterator_category = std::forward_iterator_tag; + + reference operator*() const { return record_; } + pointer operator->() const { return &record_; } + + auto& operator++() { + ++iterator_current_; + if (iterator_current_ != iterator_end_) { + const auto& record = *iterator_current_; + record_ = TRecordConv::asTuple(record.first, record.second); + } + return *this; + } + + auto operator++(int) { + auto result = *this; + ++*this; + return result; + } + // Support comparison between iterator and const_iterator types + template + bool operator==(const tuples_iterator& rhs) const { + return iterator_current_ == rhs.iterator_current_; + } + + template + bool operator!=(const tuples_iterator& rhs) const { + return iterator_current_ != rhs.iterator_current_; + } + + // Support implicit conversion of iterator to const_iterator + // (but not vice versa) + operator tuples_iterator() const { return tuples_iterator(iterator_current_, iterator_end_); } + }; + + using const_iterator = tuples_iterator; + using iterator = tuples_iterator; + + list_of_tuples(const map_type& map) : accumulator_map_{map} {} + + // Begin and end member functions + iterator begin() { return iterator{std::begin(accumulator_map_), std::end(accumulator_map_)}; } + iterator end() { return iterator{std::end(accumulator_map_), std::end(accumulator_map_)}; } + const_iterator begin() const { return const_iterator{std::cbegin(accumulator_map_), std::cend(accumulator_map_)}; } + const_iterator end() const { return const_iterator{std::cend(accumulator_map_), std::cend(accumulator_map_)}; } + + // Other member operations + const auto& front() const { return accumulator_map_.front().asPersistable(); } + size_t size() const { return accumulator_map_.size(); } +}; + template class RecordAccumulatorMap2 { public: @@ -300,8 +553,11 @@ class RecordAccumulatorMap2 { const rec_accu_map& getRecords() const { return _records; } + rec_accu_map& getRecords() { return _records; } + std::vector getPersistableCollection() const { std::vector persistables; + persistables.reserve(_records.size()); for (const auto& rec : _records) { persistables.emplace_back(TRecordConv::asPersistable(rec.first, rec.second)); } @@ -311,13 +567,19 @@ class RecordAccumulatorMap2 { std::vector getPersistableCollectionRange(typename rec_accu_map::const_iterator& rangeStart, size_t chunkSize) const { std::vector persistables; + persistables.reserve((std::min)(_records.size(), chunkSize)); size_t chunkPosition = 0; for (; (rangeStart != _records.end() && chunkPosition++ < chunkSize); ++rangeStart) { - persistables.push_back(TRecordConv::asPersistable((*rangeStart).first, (*rangeStart).second)); + persistables.emplace_back(TRecordConv::asPersistable((*rangeStart).first, (*rangeStart).second)); } return persistables; } + const auto tuples() const { + return list_of_tuples{_records}; + } + + [[deprecated("Replaced with tuples() method")]] std::vector getTupleCollection() { std::vector tuples; tuples.reserve(_records.size()); @@ -327,11 +589,14 @@ class RecordAccumulatorMap2 { return tuples; } - std::vector getTupleCollectionRange(typename rec_accu_map::const_iterator& rangeStart, size_t chunkSize) { + [[deprecated("Replaced with tuples() method")]] + std::vector getTupleCollectionRange( + typename rec_accu_map::const_iterator& rangeStart, size_t chunkSize) { std::vector tuples; + tuples.reserve((std::min)(_records.size(), chunkSize)); size_t chunkPosition = 0; for (; (rangeStart != _records.end() && chunkPosition++ < chunkSize); ++rangeStart) { - tuples.push_back(TRecordConv::asTuple((*rangeStart).first, (*rangeStart).second)); + tuples.emplace_back(TRecordConv::asTuple((*rangeStart).first, (*rangeStart).second)); } return tuples; } @@ -340,8 +605,6 @@ class RecordAccumulatorMap2 { rec_accu_size_type size() const { return _records.size(); } - rec_accu_map& get_records() { return _records; } - private: Int64 _nextId = 1; rec_accu_map _records; diff --git a/Source/moja.flint/src/aggregatoruncertainty.cpp b/Source/moja.flint/src/aggregatoruncertainty.cpp index ba10193..92195b9 100644 --- a/Source/moja.flint/src/aggregatoruncertainty.cpp +++ b/Source/moja.flint/src/aggregatoruncertainty.cpp @@ -1,5 +1,6 @@ #include "moja/flint/aggregatoruncertainty.h" +#include "moja/flint/spatiallocationinfo.h" #include "moja/flint/ilandunitdatawrapper.h" #include "moja/flint/ioperationresult.h" #include "moja/flint/ioperationresultflux.h" @@ -37,6 +38,10 @@ void AggregatorUncertainty::configure(const DynamicObject& config) { if (config.contains("aggregate_stock")) { aggregate_stock_ = config["aggregate_stock"]; } + clear_fluxes_after_recording_ = false; + if (config.contains("clear_fluxes_after_recording")) { + clear_fluxes_after_recording_ = config["clear_fluxes_after_recording"]; + } } void AggregatorUncertainty::subscribe(NotificationCenter& notificationCenter) { @@ -110,10 +115,15 @@ void AggregatorUncertainty::onTimingInit() { simulation_unit_data_->lu_count_processing_unit++; simulation_unit_data_->lu_count_local_domain++; - if (_landUnitData->hasVariable("landUnitArea")) - simulation_unit_data_->land_unit_area = _landUnitData->getVariable("landUnitArea")->value(); - else - simulation_unit_data_->land_unit_area = 1.0; + if (_landUnitData->hasVariable("spatialLocationInfo")) { + auto spatialLocationInfo = std::static_pointer_cast( + _landUnitData->getVariable("spatialLocationInfo")->value().extract>()); + simulation_unit_data_->land_unit_area = spatialLocationInfo->getProperty("landUnitArea"); + } else if (_landUnitData->hasVariable("landUnitArea")) { + simulation_unit_data_->land_unit_area = _landUnitData->getVariable("landUnitArea")->value(); + } else { + simulation_unit_data_->land_unit_area = 1.0; + } // Clear the LU data sets in prep for this LU processing fluxes_lu_.clear(); @@ -202,6 +212,10 @@ void AggregatorUncertainty::record_flux_set() { flux_data_lu{ date_record_id, module_info_id, src_ix, dst_ix, flux_value}); } } + + if (clear_fluxes_after_recording_) { + _landUnitData->clearLastAppliedOperationResults(); + } } void AggregatorUncertainty::record_stock_set() { diff --git a/Source/moja.flint/src/aggregatoruncertaintylandunit.cpp b/Source/moja.flint/src/aggregatoruncertaintylandunit.cpp index 78f8ac6..20cd79d 100644 --- a/Source/moja.flint/src/aggregatoruncertaintylandunit.cpp +++ b/Source/moja.flint/src/aggregatoruncertaintylandunit.cpp @@ -60,6 +60,10 @@ void AggregatorUncertaintyLandUnit::configure(const DynamicObject& config) { } else { classifier_set_var_name_ = "classifier_set"; } + aggregator_land_unit_shared_data_.clear_fluxes_after_recording = false; + if (config.contains("clear_fluxes_after_recording")) { + aggregator_land_unit_shared_data_.clear_fluxes_after_recording = config["clear_fluxes_after_recording"]; + } } void AggregatorUncertaintyLandUnit::subscribe(NotificationCenter& notificationCenter) { @@ -176,10 +180,15 @@ void AggregatorUncertaintyLandUnit::onTimingInit() { simulation_unit_data_->lu_count_processing_unit++; simulation_unit_data_->lu_count_local_domain++; - if (_landUnitData->hasVariable("landUnitArea")) - simulation_unit_data_->land_unit_area = _landUnitData->getVariable("landUnitArea")->value(); - else - simulation_unit_data_->land_unit_area = 1.0; + if (_landUnitData->hasVariable("spatialLocationInfo")) { + auto spatialLocationInfo = std::static_pointer_cast( + _landUnitData->getVariable("spatialLocationInfo")->value().extract>()); + simulation_unit_data_->land_unit_area = spatialLocationInfo->getProperty("landUnitArea"); + } else if (_landUnitData->hasVariable("landUnitArea")) { + simulation_unit_data_->land_unit_area = _landUnitData->getVariable("landUnitArea")->value(); + } else { + simulation_unit_data_->land_unit_area = 1.0; + } constructed_tile_id_ = 0; // TileIdx [24 bits], blockIdx [12 bits], cellIdx [28 bits] @@ -282,7 +291,9 @@ void AggregatorUncertaintyLandUnit::onTimingShutdown() { stocks_lu_.clear(); } -void AggregatorUncertaintyLandUnit::onOutputStep() { recordStockSet(); } +void AggregatorUncertaintyLandUnit::onOutputStep() { + recordStockSet(); +} void AggregatorUncertaintyLandUnit::onError(std::string msg) { fluxes_lu_.clear(); @@ -358,6 +369,10 @@ void AggregatorUncertaintyLandUnit::recordFluxSet() { fluxes_lu_.emplace_back(fluxDataLU{date_record_id, module_info_id, srcIx, dstIx, fluxValue}); } } + + if (aggregator_land_unit_shared_data_.clear_fluxes_after_recording) { + _landUnitData->clearLastAppliedOperationResults(); + } } } // namespace moja::flint diff --git a/Source/moja.flint/src/outputerstreamflux.cpp b/Source/moja.flint/src/outputerstreamflux.cpp index c1ac368..336b8a2 100644 --- a/Source/moja.flint/src/outputerstreamflux.cpp +++ b/Source/moja.flint/src/outputerstreamflux.cpp @@ -40,6 +40,7 @@ void OutputerStreamFlux::subscribe(NotificationCenter& notificationCenter) { notificationCenter.subscribe(signals::SystemShutdown, &OutputerStreamFlux::onSystemShutdown, *this); notificationCenter.subscribe(signals::TimingPostInit, &OutputerStreamFlux::onTimingPostInit, *this); notificationCenter.subscribe(signals::TimingEndStep, &OutputerStreamFlux::onTimingEndStep, *this); + notificationCenter.subscribe(signals::PostDisturbanceEvent, &OutputerStreamFlux::onPostDisturbanceEvent, *this); } // -------------------------------------------------------------------------------------------- @@ -151,5 +152,9 @@ void OutputerStreamFlux::onTimingPostInit() { outputInit(_output); } void OutputerStreamFlux::onTimingEndStep() { outputEndStep(_output); } +// -------------------------------------------------------------------------------------------- + +void OutputerStreamFlux::onPostDisturbanceEvent() { outputEndStep(_output); } + } // namespace flint } // namespace moja diff --git a/Source/moja.flint/src/uncertaintylandunitsqlitewriter.cpp b/Source/moja.flint/src/uncertaintylandunitsqlitewriter.cpp index bd0b6b1..12e69f4 100644 --- a/Source/moja.flint/src/uncertaintylandunitsqlitewriter.cpp +++ b/Source/moja.flint/src/uncertaintylandunitsqlitewriter.cpp @@ -477,7 +477,7 @@ double _st_dev(const std::vector& data) { void UncertaintyLandUnitSQLiteWriter::calculate_stdev() { { - auto& records = simulation_unit_data_->land_unit_stock_results.get_records(); + auto& records = simulation_unit_data_->land_unit_stock_results.getRecords(); for (auto& rec : records) { auto& values = rec.second.values; rec.second.stdev = _st_dev(values); @@ -487,7 +487,7 @@ void UncertaintyLandUnitSQLiteWriter::calculate_stdev() { } } { - auto& records = simulation_unit_data_->land_unit_flux_results.get_records(); + auto& records = simulation_unit_data_->land_unit_flux_results.getRecords(); for (auto& rec : records) { auto& values = rec.second.fluxes; rec.second.stdev = _st_dev(values); diff --git a/Source/moja.flint/src/uncertaintyvariable.cpp b/Source/moja.flint/src/uncertaintyvariable.cpp index e08e8ee..cb8d70f 100644 --- a/Source/moja.flint/src/uncertaintyvariable.cpp +++ b/Source/moja.flint/src/uncertaintyvariable.cpp @@ -72,8 +72,7 @@ const DynamicVar& UncertaintyVariable::value() const { } } } -// value_ = val; - variable_->set_value(val); + value_ = val; } } else { auto& val = value_; diff --git a/Source/moja.modules.gdal/src/rasterreadergdal.cpp b/Source/moja.modules.gdal/src/rasterreadergdal.cpp index f64ba63..aeee1dc 100644 --- a/Source/moja.modules.gdal/src/rasterreadergdal.cpp +++ b/Source/moja.modules.gdal/src/rasterreadergdal.cpp @@ -45,7 +45,7 @@ MetaDataRasterReaderGDAL::MetaDataRasterReaderGDAL(const std::string& path, cons auto filePath = Poco::Path(path); auto parent = filePath.parent().toString(); auto abs = filePath.parent().absolute().toString(); - _path = (boost::format("%1%%2%.json") % filePath.parent().absolute().toString() % filePath.getBaseName()).str(); + _path = (boost::format("%1%%2%.json") % abs % filePath.getBaseName()).str(); _metaDataRequired = true; if (settings.contains("metadata_required")) { _metaDataRequired = settings["metadata_required"].extract(); diff --git a/Source/moja.modules.gdal/src/writevariablegeotiff.cpp b/Source/moja.modules.gdal/src/writevariablegeotiff.cpp index e9c7e97..7885df3 100644 --- a/Source/moja.modules.gdal/src/writevariablegeotiff.cpp +++ b/Source/moja.modules.gdal/src/writevariablegeotiff.cpp @@ -589,18 +589,18 @@ T WriteVariableGeotiff::DataSettingsT::applyValueAdjustment( template void WriteVariableGeotiff::DataSettingsT::setLUValue( std::shared_ptr spatialLocationInfo, int timestep) { - if ((timestep - 1) % _outputInterval != 0) { - return; - } - - initData(spatialLocationInfo, timestep); - if (_variable != nullptr) { - setLUVariableValue(spatialLocationInfo, timestep); - } else if (!_pool.empty()) { - setLUPoolValue(spatialLocationInfo, timestep); - } else if (!_flux.empty()) { - setLUFluxValue(spatialLocationInfo, timestep); - } + if ((timestep - 1) % _outputInterval != 0) { + return; + } + + initData(spatialLocationInfo, timestep); + if (_variable != nullptr) { + setLUVariableValue(spatialLocationInfo, timestep); + } else if (!_pool.empty()) { + setLUPoolValue(spatialLocationInfo, timestep); + } else if (!_flux.empty()) { + setLUFluxValue(spatialLocationInfo, timestep); + } } // -------------------------------------------------------------------------------------------- @@ -608,27 +608,35 @@ void WriteVariableGeotiff::DataSettingsT::setLUValue( template void WriteVariableGeotiff::DataSettingsT::setLUVariableValue( std::shared_ptr spatialLocationInfo, int timestep) { - if (_propertyName != "") { - auto flintDataVariable = _variable->value().extract>(); - if (!_isArray) { - auto variablePropertyValue = flintDataVariable->getProperty(_propertyName); - _data[timestep][spatialLocationInfo->_cellIdx] = - applyValueAdjustment(spatialLocationInfo, timestep, variablePropertyValue.convert()); - } - } else { - auto variableValue = _variable->value(); - if (_isArray) { - auto val = variableValue.extract>>()[_arrayIndex]; - if (val.is_initialized()) { - _data[timestep][spatialLocationInfo->_cellIdx] = - applyValueAdjustment(spatialLocationInfo, timestep, val.value()); - } - } else { - if (!variableValue.isEmpty()) + if (_propertyName != "") { + auto variableValue = _variable->value(); + if (variableValue.isStruct()) { + const auto& structVal = variableValue.extract(); _data[timestep][spatialLocationInfo->_cellIdx] = - applyValueAdjustment(spatialLocationInfo, timestep, variableValue.convert()); - } - } + applyValueAdjustment(spatialLocationInfo, timestep, structVal[_propertyName].convert()); + } else { + auto flintDataVariable = _variable->value().extract>(); + if (!_isArray) { + auto variablePropertyValue = flintDataVariable->getProperty(_propertyName); + _data[timestep][spatialLocationInfo->_cellIdx] = + applyValueAdjustment(spatialLocationInfo, timestep, variablePropertyValue.convert()); + } + } + } else { + auto variableValue = _variable->value(); + if (_isArray) { + auto val = variableValue.extract>>()[_arrayIndex]; + if (val.is_initialized()) { + _data[timestep][spatialLocationInfo->_cellIdx] = + applyValueAdjustment(spatialLocationInfo, timestep, val.value()); + } + } else { + if (!variableValue.isEmpty()) { + _data[timestep][spatialLocationInfo->_cellIdx] = + applyValueAdjustment(spatialLocationInfo, timestep, variableValue.convert()); + } + } + } } // --------------------------------------------------------------------------------------------