From 1f41ae098b81a2b27bc92fa21473411e918ed164 Mon Sep 17 00:00:00 2001 From: Huihuo Zheng Date: Thu, 26 Aug 2021 16:11:23 -0500 Subject: [PATCH 01/55] added support for Cache VOL --- CMakeLists.txt | 10 ++++++++++ h5bench_patterns/h5bench_write.c | 13 +++++++++---- 2 files changed, 19 insertions(+), 4 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 6d4fe46c..f989afaf 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -22,6 +22,13 @@ if(WITH_ASYNC_VOL) add_definitions(-DUSE_ASYNC_VOL) endif() +# Dependency: VOL Cache +set(CACHE_HOME $ENV{CACHE_HOME}) +option(WITH_CACHE_VOL "" OFF) +if (WITH_CACHE_VOL) + add_definitions(-DUSE_CACHE_VOL) +endif() + # =========== Utilility libs ============== set(h5bench_util_src commons/h5bench_util.c @@ -31,6 +38,9 @@ add_library(h5bench_util ${h5bench_util_src}) if(WITH_ASYNC_VOL) target_link_libraries(h5bench_util asynchdf5 h5async) endif() +if(WITH_CACHE_VOL) + target_link_libraries(h5bench_util cache_new_h5api) +endif() # ================================================================= # =========== Build h5bench_patterns: h5bench_write and h5bench_read =========== diff --git a/h5bench_patterns/h5bench_write.c b/h5bench_patterns/h5bench_write.c index 3027aa98..2f5b9170 100644 --- a/h5bench_patterns/h5bench_write.c +++ b/h5bench_patterns/h5bench_write.c @@ -48,7 +48,9 @@ #include #include "../commons/h5bench_util.h" #include "../commons/async_adaptor.h" - +#ifdef USE_CACHE_VOL +#include "cache_new_h5api.h" +#endif #define DIM_MAX 3 herr_t ierr; @@ -463,7 +465,6 @@ data_write_contig_contig_MD_array(time_step *ts, hid_t loc, hid_t *dset_ids, hid dset_ids[7] = H5Dcreate_async(loc, "id_2", H5T_NATIVE_FLOAT, filespace, H5P_DEFAULT, dcpl, H5P_DEFAULT, ts->es_meta_create); unsigned t2 = get_time_usec(); - ierr = H5Dwrite_async(dset_ids[0], H5T_NATIVE_FLOAT, memspace, filespace, plist_id, data_in->x, ts->es_data); ierr = @@ -756,7 +757,9 @@ _run_benchmark_write(bench_params params, hid_t file_id, hid_t fapl, hid_t files if (MY_RANK == 0) printf("Writing %s ... \n", grp_name); - +#ifdef USE_CACHE_VOL + H5Fcache_async_op_pause(file_id); +#endif switch (pattern) { case CONTIG_CONTIG_1D: case CONTIG_CONTIG_2D: @@ -792,7 +795,9 @@ _run_benchmark_write(bench_params params, hid_t file_id, hid_t fapl, hid_t files default: break; } - +#ifdef USE_CACHE_VOL + H5Fcache_async_op_start(file_id); +#endif ts->status = TS_DELAY; if (params.cnt_time_step_delay == 0) { From 1c051a60c21862dc9eaf665a560405d2063db65a Mon Sep 17 00:00:00 2001 From: github-actions Date: Thu, 26 Aug 2021 21:12:14 +0000 Subject: [PATCH 02/55] Committing clang-format changes --- h5bench_patterns/h5bench_write.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/h5bench_patterns/h5bench_write.c b/h5bench_patterns/h5bench_write.c index 2f5b9170..4712767c 100644 --- a/h5bench_patterns/h5bench_write.c +++ b/h5bench_patterns/h5bench_write.c @@ -758,7 +758,7 @@ _run_benchmark_write(bench_params params, hid_t file_id, hid_t fapl, hid_t files if (MY_RANK == 0) printf("Writing %s ... \n", grp_name); #ifdef USE_CACHE_VOL - H5Fcache_async_op_pause(file_id); + H5Fcache_async_op_pause(file_id); #endif switch (pattern) { case CONTIG_CONTIG_1D: @@ -796,7 +796,7 @@ _run_benchmark_write(bench_params params, hid_t file_id, hid_t fapl, hid_t files break; } #ifdef USE_CACHE_VOL - H5Fcache_async_op_start(file_id); + H5Fcache_async_op_start(file_id); #endif ts->status = TS_DELAY; From b1e6a7dd02f83ed9f8db6e85b3b2802e811ec400 Mon Sep 17 00:00:00 2001 From: Huihuo Zheng Date: Tue, 31 Aug 2021 13:38:34 -0500 Subject: [PATCH 03/55] fixed async_sleep issue for cache vol --- commons/h5bench_util.c | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/commons/h5bench_util.c b/commons/h5bench_util.c index ee15e5e3..1dcf7bc5 100644 --- a/commons/h5bench_util.c +++ b/commons/h5bench_util.c @@ -23,6 +23,10 @@ #include "async_adaptor.h" #endif +#ifdef USE_CACHE_VOL +#include "cache_new_h5api.h" +#endif + #include "h5bench_util.h" int str_to_ull(char *str_in, unsigned long long *num_out); @@ -70,8 +74,12 @@ async_sleep(hid_t file_id, hid_t fapl, duration sleep_time) #ifdef USE_ASYNC_VOL unsigned cap = 0; H5Pget_vol_cap_flags(fapl, &cap); +#ifdef USE_CACHE_VOL + H5Fcache_async_op_start(file_id); +#else if (H5VL_CAP_FLAG_ASYNC & cap) H5Fstart(file_id, fapl); +#endif #endif h5bench_sleep(sleep_time); } From c6468b421b8aea5d2b0824159ebdbd217f41f344 Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 31 Aug 2021 18:41:08 +0000 Subject: [PATCH 04/55] Committing clang-format changes --- commons/h5bench_util.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/commons/h5bench_util.c b/commons/h5bench_util.c index 1dcf7bc5..fa263023 100644 --- a/commons/h5bench_util.c +++ b/commons/h5bench_util.c @@ -75,7 +75,7 @@ async_sleep(hid_t file_id, hid_t fapl, duration sleep_time) unsigned cap = 0; H5Pget_vol_cap_flags(fapl, &cap); #ifdef USE_CACHE_VOL - H5Fcache_async_op_start(file_id); + H5Fcache_async_op_start(file_id); #else if (H5VL_CAP_FLAG_ASYNC & cap) H5Fstart(file_id, fapl); From fdc9890cf5adaf9c3637b7da5869b55fc6336258 Mon Sep 17 00:00:00 2001 From: Huihuo Zheng Date: Tue, 31 Aug 2021 13:59:37 -0500 Subject: [PATCH 05/55] fixed async_sleep issue for cache vol integration --- commons/h5bench_util.c | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/commons/h5bench_util.c b/commons/h5bench_util.c index fa263023..c91f7171 100644 --- a/commons/h5bench_util.c +++ b/commons/h5bench_util.c @@ -71,12 +71,11 @@ h5bench_sleep(duration sleep_time) void async_sleep(hid_t file_id, hid_t fapl, duration sleep_time) { +#ifndef USE_CACHE_VOL #ifdef USE_ASYNC_VOL unsigned cap = 0; H5Pget_vol_cap_flags(fapl, &cap); -#ifdef USE_CACHE_VOL H5Fcache_async_op_start(file_id); -#else if (H5VL_CAP_FLAG_ASYNC & cap) H5Fstart(file_id, fapl); #endif From e79004d93a80f26275694e979340ccb131cf92a9 Mon Sep 17 00:00:00 2001 From: Huihuo Zheng Date: Tue, 31 Aug 2021 14:15:20 -0500 Subject: [PATCH 06/55] fixed issue in async_sleep --- commons/h5bench_util.c | 1 - 1 file changed, 1 deletion(-) diff --git a/commons/h5bench_util.c b/commons/h5bench_util.c index c91f7171..1ea5b29f 100644 --- a/commons/h5bench_util.c +++ b/commons/h5bench_util.c @@ -75,7 +75,6 @@ async_sleep(hid_t file_id, hid_t fapl, duration sleep_time) #ifdef USE_ASYNC_VOL unsigned cap = 0; H5Pget_vol_cap_flags(fapl, &cap); - H5Fcache_async_op_start(file_id); if (H5VL_CAP_FLAG_ASYNC & cap) H5Fstart(file_id, fapl); #endif From a40ab6265eac25895cb5ae44ffaa9397868c294b Mon Sep 17 00:00:00 2001 From: Huihuo Zheng Date: Wed, 1 Sep 2021 09:13:34 -0500 Subject: [PATCH 07/55] fixed issue for ts_delayed_close for ASYNC_NON mode --- commons/h5bench_util.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/commons/h5bench_util.c b/commons/h5bench_util.c index 1ea5b29f..7a2edc7b 100644 --- a/commons/h5bench_util.c +++ b/commons/h5bench_util.c @@ -185,8 +185,8 @@ ts_delayed_close(mem_monitor *mon, unsigned long *metadata_time_total, int dset_ unsigned long t1, t2; unsigned long meta_time = 0; - if (mon->mode == ASYNC_NON) - return 0; + //if (mon->mode == ASYNC_NON) + //return 0; /* even for ASYNC_MON, we still need to clse the datasets. */ for (int i = 0; i < mon->time_step_cnt; i++) { ts_run = &(mon->time_steps[i]); From 7aa2a6508f99c7a6bd57d6663ffa19f5ec1ea10c Mon Sep 17 00:00:00 2001 From: github-actions Date: Wed, 1 Sep 2021 14:14:15 +0000 Subject: [PATCH 08/55] Committing clang-format changes --- commons/h5bench_util.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/commons/h5bench_util.c b/commons/h5bench_util.c index 7a2edc7b..1e1187d1 100644 --- a/commons/h5bench_util.c +++ b/commons/h5bench_util.c @@ -185,8 +185,8 @@ ts_delayed_close(mem_monitor *mon, unsigned long *metadata_time_total, int dset_ unsigned long t1, t2; unsigned long meta_time = 0; - //if (mon->mode == ASYNC_NON) - //return 0; /* even for ASYNC_MON, we still need to clse the datasets. */ + // if (mon->mode == ASYNC_NON) + // return 0; /* even for ASYNC_MON, we still need to clse the datasets. */ for (int i = 0; i < mon->time_step_cnt; i++) { ts_run = &(mon->time_steps[i]); From 680060038a63c895edeb2962f0127f0740c28349 Mon Sep 17 00:00:00 2001 From: Huihuo Zheng Date: Wed, 1 Sep 2021 09:16:32 -0500 Subject: [PATCH 09/55] fixed clang-format checking issue for ts_delayed_close --- commons/h5bench_util.c | 2 -- 1 file changed, 2 deletions(-) diff --git a/commons/h5bench_util.c b/commons/h5bench_util.c index 7a2edc7b..dd5fd76d 100644 --- a/commons/h5bench_util.c +++ b/commons/h5bench_util.c @@ -185,8 +185,6 @@ ts_delayed_close(mem_monitor *mon, unsigned long *metadata_time_total, int dset_ unsigned long t1, t2; unsigned long meta_time = 0; - //if (mon->mode == ASYNC_NON) - //return 0; /* even for ASYNC_MON, we still need to clse the datasets. */ for (int i = 0; i < mon->time_step_cnt; i++) { ts_run = &(mon->time_steps[i]); From 065f39f058624b11dea1b36822736ec8574d5e8f Mon Sep 17 00:00:00 2001 From: github-actions Date: Wed, 1 Sep 2021 14:18:34 +0000 Subject: [PATCH 10/55] Committing clang-format changes --- commons/h5bench_util.c | 1 - 1 file changed, 1 deletion(-) diff --git a/commons/h5bench_util.c b/commons/h5bench_util.c index dd5fd76d..63a6efa9 100644 --- a/commons/h5bench_util.c +++ b/commons/h5bench_util.c @@ -185,7 +185,6 @@ ts_delayed_close(mem_monitor *mon, unsigned long *metadata_time_total, int dset_ unsigned long t1, t2; unsigned long meta_time = 0; - for (int i = 0; i < mon->time_step_cnt; i++) { ts_run = &(mon->time_steps[i]); if (mon->time_steps[i].status == TS_DELAY) { From cf173b44c6a9e358fef91803da38f49019a38d60 Mon Sep 17 00:00:00 2001 From: Huihuo Zheng Date: Tue, 7 Sep 2021 12:09:03 -0500 Subject: [PATCH 11/55] adding dclose time to the meta data timing --- h5bench_patterns/h5bench_write.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/h5bench_patterns/h5bench_write.c b/h5bench_patterns/h5bench_write.c index 4712767c..66856d03 100644 --- a/h5bench_patterns/h5bench_write.c +++ b/h5bench_patterns/h5bench_write.c @@ -818,7 +818,7 @@ _run_benchmark_write(bench_params params, hid_t file_id, hid_t fapl, hid_t files } } - *metadata_time_total += (meta_time1 + meta_time2 + meta_time3 + meta_time4); + *metadata_time_total += (meta_time1 + meta_time2 + meta_time3 + meta_time4 + meta_time5); *data_time_total += (data_time_exp + data_time_imp); } // end for timestep_cnt From f8426fdb7e0a58c4003875065b76df09e6ad35fe Mon Sep 17 00:00:00 2001 From: Huihuo Zheng Date: Tue, 7 Sep 2021 12:22:16 -0500 Subject: [PATCH 12/55] added meta data timing for ASYNC_NON mode --- commons/h5bench_util.c | 3 +++ 1 file changed, 3 insertions(+) diff --git a/commons/h5bench_util.c b/commons/h5bench_util.c index 63a6efa9..3595efba 100644 --- a/commons/h5bench_util.c +++ b/commons/h5bench_util.c @@ -259,6 +259,7 @@ mem_monitor_final_run(mem_monitor *mon, unsigned long *metadata_time_total, unsi int dset_cnt = 8; if (mon->mode == ASYNC_NON) { + t1 = get_time_usec(); for (int i = 0; i < mon->time_step_cnt; i++) { ts_run = &(mon->time_steps[i]); if (mon->time_steps[i].status == TS_DELAY) { @@ -268,6 +269,8 @@ mem_monitor_final_run(mem_monitor *mon, unsigned long *metadata_time_total, unsi H5Gclose_async(ts_run->grp_id, ts_run->es_meta_close); } } + t2 = get_time_usec(); + meta_time += (t2 - t1); return 0; } From 3a2ec2364a3951b7c4a74f78b3c975e5c08d37b4 Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 7 Sep 2021 17:22:49 +0000 Subject: [PATCH 13/55] Committing clang-format changes --- commons/h5bench_util.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/commons/h5bench_util.c b/commons/h5bench_util.c index 3595efba..e5ea110f 100644 --- a/commons/h5bench_util.c +++ b/commons/h5bench_util.c @@ -269,8 +269,8 @@ mem_monitor_final_run(mem_monitor *mon, unsigned long *metadata_time_total, unsi H5Gclose_async(ts_run->grp_id, ts_run->es_meta_close); } } - t2 = get_time_usec(); - meta_time += (t2 - t1); + t2 = get_time_usec(); + meta_time += (t2 - t1); return 0; } From 42ca0856ebb12e0e66b635cc943b870d8bfec74b Mon Sep 17 00:00:00 2001 From: Huihuo Zheng <54-hzheng@users.noreply.gitlab-ci.mcp.alcf.anl.gov> Date: Fri, 17 Sep 2021 12:48:10 -0500 Subject: [PATCH 14/55] added sleep for last time step --- h5bench_patterns/h5bench_write.c | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/h5bench_patterns/h5bench_write.c b/h5bench_patterns/h5bench_write.c index 66856d03..59c3591b 100644 --- a/h5bench_patterns/h5bench_write.c +++ b/h5bench_patterns/h5bench_write.c @@ -809,8 +809,11 @@ _run_benchmark_write(bench_params params, hid_t file_id, hid_t fapl, hid_t files t4 = get_time_usec(); meta_time5 += (t4 - t3); } - - if (ts_index != timestep_cnt - 1) { // no sleep after the last ts +#ifdef USE_CACHE_VOL + if (ts_index != timestep_cnt) { // I still want sleep after the last ts for Cache VOL +#else + if (ts_index != timestep_cnt - 1) { // no sleep after the last ts +#endif if (params.compute_time.time_num >= 0) { if (MY_RANK == 0) printf("Computing... \n"); From ce92354b6cd7ade65f64ba643fe09c9645929b71 Mon Sep 17 00:00:00 2001 From: github-actions Date: Fri, 17 Sep 2021 17:50:51 +0000 Subject: [PATCH 15/55] Committing clang-format changes --- h5bench_patterns/h5bench_write.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/h5bench_patterns/h5bench_write.c b/h5bench_patterns/h5bench_write.c index 59c3591b..593076c9 100644 --- a/h5bench_patterns/h5bench_write.c +++ b/h5bench_patterns/h5bench_write.c @@ -812,7 +812,7 @@ _run_benchmark_write(bench_params params, hid_t file_id, hid_t fapl, hid_t files #ifdef USE_CACHE_VOL if (ts_index != timestep_cnt) { // I still want sleep after the last ts for Cache VOL #else - if (ts_index != timestep_cnt - 1) { // no sleep after the last ts + if (ts_index != timestep_cnt - 1) { // no sleep after the last ts #endif if (params.compute_time.time_num >= 0) { if (MY_RANK == 0) From 288806abe3ce0c1f9052546d24e1806959db21d4 Mon Sep 17 00:00:00 2001 From: Huihuo Zheng <54-hzheng@users.noreply.gitlab-ci.mcp.alcf.anl.gov> Date: Fri, 17 Sep 2021 13:56:40 -0500 Subject: [PATCH 16/55] undo sleep time change --- h5bench_patterns/h5bench_write.c | 5 ----- 1 file changed, 5 deletions(-) diff --git a/h5bench_patterns/h5bench_write.c b/h5bench_patterns/h5bench_write.c index 593076c9..fa1450a7 100644 --- a/h5bench_patterns/h5bench_write.c +++ b/h5bench_patterns/h5bench_write.c @@ -809,11 +809,7 @@ _run_benchmark_write(bench_params params, hid_t file_id, hid_t fapl, hid_t files t4 = get_time_usec(); meta_time5 += (t4 - t3); } -#ifdef USE_CACHE_VOL - if (ts_index != timestep_cnt) { // I still want sleep after the last ts for Cache VOL -#else if (ts_index != timestep_cnt - 1) { // no sleep after the last ts -#endif if (params.compute_time.time_num >= 0) { if (MY_RANK == 0) printf("Computing... \n"); @@ -1074,7 +1070,6 @@ main(int argc, char *argv[]) mode_str = "Sync"; #endif printf("\n================== Performance results =================\n"); - unsigned long long total_sleep_time_us = read_time_val(params.compute_time, TIME_US) * (params.cnt_time_step - 1); unsigned long total_size_mb = NUM_RANKS * local_data_size / (1024 * 1024); From 2317a221393f378be2ce48388ce3e6cda319d490 Mon Sep 17 00:00:00 2001 From: Huihuo Zheng Date: Thu, 28 Oct 2021 11:34:21 -0400 Subject: [PATCH 17/55] added barrier in each iteration after the compute --- h5bench_patterns/h5bench_write.c | 1 + 1 file changed, 1 insertion(+) diff --git a/h5bench_patterns/h5bench_write.c b/h5bench_patterns/h5bench_write.c index fa1450a7..76f57fdd 100644 --- a/h5bench_patterns/h5bench_write.c +++ b/h5bench_patterns/h5bench_write.c @@ -758,6 +758,7 @@ _run_benchmark_write(bench_params params, hid_t file_id, hid_t fapl, hid_t files if (MY_RANK == 0) printf("Writing %s ... \n", grp_name); #ifdef USE_CACHE_VOL + MPI_Barrier(MPI_COMM_WORLD); H5Fcache_async_op_pause(file_id); #endif switch (pattern) { From 5fa7be591e9a98964a4e757025ac72f99a6dc51b Mon Sep 17 00:00:00 2001 From: github-actions Date: Thu, 28 Oct 2021 15:36:05 +0000 Subject: [PATCH 18/55] Committing clang-format changes --- h5bench_patterns/h5bench_write.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/h5bench_patterns/h5bench_write.c b/h5bench_patterns/h5bench_write.c index 76f57fdd..58096016 100644 --- a/h5bench_patterns/h5bench_write.c +++ b/h5bench_patterns/h5bench_write.c @@ -758,7 +758,7 @@ _run_benchmark_write(bench_params params, hid_t file_id, hid_t fapl, hid_t files if (MY_RANK == 0) printf("Writing %s ... \n", grp_name); #ifdef USE_CACHE_VOL - MPI_Barrier(MPI_COMM_WORLD); + MPI_Barrier(MPI_COMM_WORLD); H5Fcache_async_op_pause(file_id); #endif switch (pattern) { From 54cfded6316b411639a86dcae1cd42159ad3ddc9 Mon Sep 17 00:00:00 2001 From: github-actions Date: Wed, 26 Jan 2022 17:41:57 +0000 Subject: [PATCH 19/55] Committing clang-format changes --- commons/h5bench_util.c | 1636 ++++++++++++++++++++-------------------- 1 file changed, 806 insertions(+), 830 deletions(-) diff --git a/commons/h5bench_util.c b/commons/h5bench_util.c index 66c1c118..94a3f2a9 100644 --- a/commons/h5bench_util.c +++ b/commons/h5bench_util.c @@ -185,7 +185,6 @@ ts_delayed_close(mem_monitor *mon, unsigned long *metadata_time_total, int dset_ unsigned long t1, t2; unsigned long meta_time = 0; - if (!has_vol_async) return 0; @@ -264,283 +263,309 @@ mem_monitor_final_run(mem_monitor *mon, unsigned long *metadata_time_total, unsi if (mon->mode == ASYNC_NON) { t1 = get_time_usec(); - if (!has_vol_async) { + if (!has_vol_async) { + for (int i = 0; i < mon->time_step_cnt; i++) { + ts_run = &(mon->time_steps[i]); + if (mon->time_steps[i].status == TS_DELAY) { + for (int j = 0; j < dset_cnt; j++) + H5Dclose_async(ts_run->dset_ids[j], ts_run->es_meta_close); + H5Gclose_async(ts_run->grp_id, ts_run->es_meta_close); + } + } + t2 = get_time_usec(); + meta_time += (t2 - t1); + return 0; + } + + if (!mon || !metadata_time_total || !data_time_total) + return -1; + t1 = get_time_usec(); for (int i = 0; i < mon->time_step_cnt; i++) { ts_run = &(mon->time_steps[i]); if (mon->time_steps[i].status == TS_DELAY) { + for (int j = 0; j < dset_cnt; j++) H5Dclose_async(ts_run->dset_ids[j], ts_run->es_meta_close); H5Gclose_async(ts_run->grp_id, ts_run->es_meta_close); + + ts_run->status = TS_READY; } } + t2 = get_time_usec(); meta_time += (t2 - t1); - return 0; - } - - if (!mon || !metadata_time_total || !data_time_total) - return -1; - t1 = get_time_usec(); - for (int i = 0; i < mon->time_step_cnt; i++) { - ts_run = &(mon->time_steps[i]); - if (mon->time_steps[i].status == TS_DELAY) { - for (int j = 0; j < dset_cnt; j++) - H5Dclose_async(ts_run->dset_ids[j], ts_run->es_meta_close); - H5Gclose_async(ts_run->grp_id, ts_run->es_meta_close); + if (!has_vol_async) + return 0; - ts_run->status = TS_READY; - } - } - - t2 = get_time_usec(); - meta_time += (t2 - t1); - - if (!has_vol_async) - return 0; - - for (int i = 0; i < mon->time_step_cnt; i++) { - ts_run = &(mon->time_steps[i]); - if (mon->time_steps[i].status == TS_READY) { - t1 = get_time_usec(); - H5ESwait(ts_run->es_meta_create, H5ES_WAIT_FOREVER, &num_in_progress, &op_failed); - t2 = get_time_usec(); + for (int i = 0; i < mon->time_step_cnt; i++) { + ts_run = &(mon->time_steps[i]); + if (mon->time_steps[i].status == TS_READY) { + t1 = get_time_usec(); + H5ESwait(ts_run->es_meta_create, H5ES_WAIT_FOREVER, &num_in_progress, &op_failed); + t2 = get_time_usec(); - H5ESwait(ts_run->es_data, H5ES_WAIT_FOREVER, &num_in_progress, &op_failed); - t3 = get_time_usec(); + H5ESwait(ts_run->es_data, H5ES_WAIT_FOREVER, &num_in_progress, &op_failed); + t3 = get_time_usec(); - H5ESwait(ts_run->es_meta_close, H5ES_WAIT_FOREVER, &num_in_progress, &op_failed); - t4 = get_time_usec(); + H5ESwait(ts_run->es_meta_close, H5ES_WAIT_FOREVER, &num_in_progress, &op_failed); + t4 = get_time_usec(); - timestep_es_id_close(ts_run, mon->mode); - t5 = get_time_usec(); + timestep_es_id_close(ts_run, mon->mode); + t5 = get_time_usec(); - t6 = get_time_usec(); + t6 = get_time_usec(); - meta_time += ((t2 - t1) + (t4 - t3)); - data_time += (t3 - t2); - ts_run->status = TS_DONE; + meta_time += ((t2 - t1) + (t4 - t3)); + data_time += (t3 - t2); + ts_run->status = TS_DONE; + } } - } - - *metadata_time_total = meta_time; - *data_time_total = data_time; - return 0; -} -hid_t -es_id_set(async_mode mode) -{ - hid_t es_id = 0; - if (has_vol_async) { - es_id = H5EScreate(); - } - else { - es_id = H5ES_NONE; + *metadata_time_total = meta_time; + *data_time_total = data_time; + return 0; } - return es_id; -} + hid_t es_id_set(async_mode mode) + { + hid_t es_id = 0; + if (has_vol_async) { + es_id = H5EScreate(); + } + else { + es_id = H5ES_NONE; + } -void -es_id_close(hid_t es_id, async_mode mode) -{ - if (has_vol_async) { - H5ESclose(es_id); + return es_id; } -} - -float -uniform_random_number() -{ - return (((float)rand()) / ((float)(RAND_MAX))); -} - -data_contig_md * -prepare_contig_memory(long particle_cnt, long dim_1, long dim_2, long dim_3) -{ - data_contig_md *buf_struct = (data_contig_md *)malloc(sizeof(data_contig_md)); - buf_struct->particle_cnt = particle_cnt; - buf_struct->dim_1 = dim_1; - buf_struct->dim_2 = dim_2; - buf_struct->dim_3 = dim_3; - buf_struct->x = (float *)malloc(particle_cnt * sizeof(float)); - buf_struct->y = (float *)malloc(particle_cnt * sizeof(float)); - buf_struct->z = (float *)malloc(particle_cnt * sizeof(float)); - buf_struct->px = (float *)malloc(particle_cnt * sizeof(float)); - buf_struct->py = (float *)malloc(particle_cnt * sizeof(float)); - buf_struct->pz = (float *)malloc(particle_cnt * sizeof(float)); - buf_struct->id_1 = (int *)malloc(particle_cnt * sizeof(int)); - buf_struct->id_2 = (float *)malloc(particle_cnt * sizeof(float)); - return buf_struct; -} -data_contig_md * -prepare_contig_memory_multi_dim(unsigned long long dim_1, unsigned long long dim_2, unsigned long long dim_3) -{ - data_contig_md *buf_struct = (data_contig_md *)malloc(sizeof(data_contig_md)); - buf_struct->dim_1 = dim_1; - buf_struct->dim_2 = dim_2; - buf_struct->dim_3 = dim_3; - unsigned long long num_particles = dim_1 * dim_2 * dim_3; - - buf_struct->particle_cnt = num_particles; - buf_struct->x = (float *)malloc(num_particles * sizeof(float)); - buf_struct->y = (float *)malloc(num_particles * sizeof(float)); - buf_struct->z = (float *)malloc(num_particles * sizeof(float)); - buf_struct->px = (float *)malloc(num_particles * sizeof(float)); - buf_struct->py = (float *)malloc(num_particles * sizeof(float)); - buf_struct->pz = (float *)malloc(num_particles * sizeof(float)); - buf_struct->id_1 = (int *)malloc(num_particles * sizeof(int)); - buf_struct->id_2 = (float *)malloc(num_particles * sizeof(float)); - return buf_struct; -} - -void -free_contig_memory(data_contig_md *data) -{ - if (data) { - free(data->x); - free(data->y); - free(data->z); - free(data->px); - free(data->py); - free(data->pz); - free(data->id_1); - free(data->id_2); - free(data); + void es_id_close(hid_t es_id, async_mode mode) + { + if (has_vol_async) { + H5ESclose(es_id); + } } -} -int -parse_unit(char *str_in, unsigned long long *num, char **unit_str) -{ - char *str = strdup(str_in); - char *ptr = NULL; - ptr = strtok(str, " "); - char *num_str = strdup(ptr); - if (!num_str) { - printf("Number parsing failed: \"%s\" is not recognized.\n", str_in); - return -1; + float uniform_random_number() + { + return (((float)rand()) / ((float)(RAND_MAX))); + } + + data_contig_md *prepare_contig_memory(long particle_cnt, long dim_1, long dim_2, long dim_3) + { + data_contig_md *buf_struct = (data_contig_md *)malloc(sizeof(data_contig_md)); + buf_struct->particle_cnt = particle_cnt; + buf_struct->dim_1 = dim_1; + buf_struct->dim_2 = dim_2; + buf_struct->dim_3 = dim_3; + buf_struct->x = (float *)malloc(particle_cnt * sizeof(float)); + buf_struct->y = (float *)malloc(particle_cnt * sizeof(float)); + buf_struct->z = (float *)malloc(particle_cnt * sizeof(float)); + buf_struct->px = (float *)malloc(particle_cnt * sizeof(float)); + buf_struct->py = (float *)malloc(particle_cnt * sizeof(float)); + buf_struct->pz = (float *)malloc(particle_cnt * sizeof(float)); + buf_struct->id_1 = (int *)malloc(particle_cnt * sizeof(int)); + buf_struct->id_2 = (float *)malloc(particle_cnt * sizeof(float)); + return buf_struct; + } + + data_contig_md *prepare_contig_memory_multi_dim(unsigned long long dim_1, unsigned long long dim_2, + unsigned long long dim_3) + { + data_contig_md *buf_struct = (data_contig_md *)malloc(sizeof(data_contig_md)); + buf_struct->dim_1 = dim_1; + buf_struct->dim_2 = dim_2; + buf_struct->dim_3 = dim_3; + unsigned long long num_particles = dim_1 * dim_2 * dim_3; + + buf_struct->particle_cnt = num_particles; + buf_struct->x = (float *)malloc(num_particles * sizeof(float)); + buf_struct->y = (float *)malloc(num_particles * sizeof(float)); + buf_struct->z = (float *)malloc(num_particles * sizeof(float)); + buf_struct->px = (float *)malloc(num_particles * sizeof(float)); + buf_struct->py = (float *)malloc(num_particles * sizeof(float)); + buf_struct->pz = (float *)malloc(num_particles * sizeof(float)); + buf_struct->id_1 = (int *)malloc(num_particles * sizeof(int)); + buf_struct->id_2 = (float *)malloc(num_particles * sizeof(float)); + return buf_struct; + } + + void free_contig_memory(data_contig_md * data) + { + if (data) { + free(data->x); + free(data->y); + free(data->z); + free(data->px); + free(data->py); + free(data->pz); + free(data->id_1); + free(data->id_2); + free(data); + } } - char *endptr; - *num = strtoul(num_str, &endptr, 10); - ptr = strtok(NULL, " "); - if (ptr) - *unit_str = strdup(ptr); - else - *unit_str = NULL; - return 0; -} -int -parse_time(char *str_in, duration *time) -{ - if (!time) - time = calloc(1, sizeof(duration)); - unsigned long long num = 0; - char * unit_str; - parse_unit(str_in, &num, &unit_str); - - if (!unit_str) - time->unit = TIME_SEC; - else if (unit_str[0] == 'S' || unit_str[0] == 's') - time->unit = TIME_SEC; - else if (unit_str[0] == 'M' || unit_str[0] == 'm') { - if (strcmp(unit_str, "ms") == 0 || strcmp(unit_str, "MS") == 0) - time->unit = TIME_MS; + int parse_unit(char *str_in, unsigned long long *num, char **unit_str) + { + char *str = strdup(str_in); + char *ptr = NULL; + ptr = strtok(str, " "); + char *num_str = strdup(ptr); + if (!num_str) { + printf("Number parsing failed: \"%s\" is not recognized.\n", str_in); + return -1; + } + char *endptr; + *num = strtoul(num_str, &endptr, 10); + ptr = strtok(NULL, " "); + if (ptr) + *unit_str = strdup(ptr); else - time->unit = TIME_MIN; + *unit_str = NULL; + return 0; } - else if (unit_str[0] == 'U' || unit_str[0] == 'u') - time->unit = TIME_US; - else { - printf("time parsing failed\n"); - return -1; + + int parse_time(char *str_in, duration *time) + { + if (!time) + time = calloc(1, sizeof(duration)); + unsigned long long num = 0; + char * unit_str; + parse_unit(str_in, &num, &unit_str); + + if (!unit_str) + time->unit = TIME_SEC; + else if (unit_str[0] == 'S' || unit_str[0] == 's') + time->unit = TIME_SEC; + else if (unit_str[0] == 'M' || unit_str[0] == 'm') { + if (strcmp(unit_str, "ms") == 0 || strcmp(unit_str, "MS") == 0) + time->unit = TIME_MS; + else + time->unit = TIME_MIN; + } + else if (unit_str[0] == 'U' || unit_str[0] == 'u') + time->unit = TIME_US; + else { + printf("time parsing failed\n"); + return -1; + } + time->time_num = num; + return 0; } - time->time_num = num; - return 0; -} -int -str_to_ull(char *str_in, unsigned long long *num_out) -{ - if (!str_in) { - printf("Number parsing failed: \"%s\" is not recognized.\n", str_in); - return -1; + int str_to_ull(char *str_in, unsigned long long *num_out) + { + if (!str_in) { + printf("Number parsing failed: \"%s\" is not recognized.\n", str_in); + return -1; + } + unsigned long long num = 0; + char * unit_str; + int ret = parse_unit(str_in, &num, &unit_str); + if (ret < 0) + return -1; + if (!unit_str) + num = num * 1; + else if (unit_str[0] == 'K' || unit_str[0] == 'k') + num = num * K_VAL; + else if (unit_str[0] == 'M' || unit_str[0] == 'm') + num = num * M_VAL; + else if (unit_str[0] == 'G' || unit_str[0] == 'g') + num = num * G_VAL; + else if (unit_str[0] == 'T' || unit_str[0] == 't') + num = num * T_VAL; + + if (unit_str) + free(unit_str); + *num_out = num; + return 0; } - unsigned long long num = 0; - char * unit_str; - int ret = parse_unit(str_in, &num, &unit_str); - if (ret < 0) - return -1; - if (!unit_str) - num = num * 1; - else if (unit_str[0] == 'K' || unit_str[0] == 'k') - num = num * K_VAL; - else if (unit_str[0] == 'M' || unit_str[0] == 'm') - num = num * M_VAL; - else if (unit_str[0] == 'G' || unit_str[0] == 'g') - num = num * G_VAL; - else if (unit_str[0] == 'T' || unit_str[0] == 't') - num = num * T_VAL; - - if (unit_str) - free(unit_str); - *num_out = num; - return 0; -} -int -_set_io_pattern(bench_params *params_in_out) -{ - if (!params_in_out) - return -1; - int ret = 0; - if (params_in_out->io_op == IO_WRITE) { // mem --> file - if (params_in_out->mem_pattern == PATTERN_CONTIG) { - if (params_in_out->file_pattern == PATTERN_CONTIG) { // CC - switch (params_in_out->num_dims) { - case 1: - (*params_in_out).access_pattern.pattern_write = CONTIG_CONTIG_1D; - ret = 0; - break; - case 2: - (*params_in_out).access_pattern.pattern_write = CONTIG_CONTIG_2D; + int _set_io_pattern(bench_params * params_in_out) + { + if (!params_in_out) + return -1; + int ret = 0; + if (params_in_out->io_op == IO_WRITE) { // mem --> file + if (params_in_out->mem_pattern == PATTERN_CONTIG) { + if (params_in_out->file_pattern == PATTERN_CONTIG) { // CC + switch (params_in_out->num_dims) { + case 1: + (*params_in_out).access_pattern.pattern_write = CONTIG_CONTIG_1D; + ret = 0; + break; + case 2: + (*params_in_out).access_pattern.pattern_write = CONTIG_CONTIG_2D; + ret = 0; + break; + case 3: + (*params_in_out).access_pattern.pattern_write = CONTIG_CONTIG_3D; + ret = 0; + break; + default: + ret = -1; + printf("%s() failed on line %d\n", __func__, __LINE__); + break; + } + } + else if (params_in_out->file_pattern == PATTERN_INTERLEAVED) { // CI + if (params_in_out->num_dims == 1) { + (*params_in_out).access_pattern.pattern_write = CONTIG_COMPOUND_1D; ret = 0; - break; - case 3: - (*params_in_out).access_pattern.pattern_write = CONTIG_CONTIG_3D; + } + else if (params_in_out->num_dims == 2) { + (*params_in_out).access_pattern.pattern_write = CONTIG_COMPOUND_2D; ret = 0; - break; - default: + } + else { ret = -1; printf("%s() failed on line %d\n", __func__, __LINE__); - break; + } } - } - else if (params_in_out->file_pattern == PATTERN_INTERLEAVED) { // CI - if (params_in_out->num_dims == 1) { - (*params_in_out).access_pattern.pattern_write = CONTIG_COMPOUND_1D; - ret = 0; - } - else if (params_in_out->num_dims == 2) { - (*params_in_out).access_pattern.pattern_write = CONTIG_COMPOUND_2D; - ret = 0; + else if (params_in_out->file_pattern == PATTERN_STRIDED) { // Strided write 1d + if (params_in_out->num_dims == 1) { + (*params_in_out).access_pattern.pattern_write = CONTIG_CONTIG_STRIDED_1D; + ret = 0; + } + else { + ret = -1; + printf("%s() failed on line %d\n", __func__, __LINE__); + } } else { ret = -1; printf("%s() failed on line %d\n", __func__, __LINE__); } } - else if (params_in_out->file_pattern == PATTERN_STRIDED) { // Strided write 1d - if (params_in_out->num_dims == 1) { - (*params_in_out).access_pattern.pattern_write = CONTIG_CONTIG_STRIDED_1D; - ret = 0; + else if (params_in_out->mem_pattern == PATTERN_INTERLEAVED) { + if (params_in_out->file_pattern == PATTERN_CONTIG) { // IC + if (params_in_out->num_dims == 1) { + (*params_in_out).access_pattern.pattern_write = COMPOUND_CONTIG_1D; + ret = 0; + } + else if (params_in_out->num_dims == 2) { + (*params_in_out).access_pattern.pattern_write = COMPOUND_CONTIG_2D; + ret = 0; + } + else { + ret = -1; + printf("%s() failed on line %d\n", __func__, __LINE__); + } } - else { - ret = -1; - printf("%s() failed on line %d\n", __func__, __LINE__); + else if (params_in_out->file_pattern == PATTERN_INTERLEAVED) { // II + if (params_in_out->num_dims == 1) { + (*params_in_out).access_pattern.pattern_write = COMPOUND_COMPOUND_1D; + ret = 0; + } + else if (params_in_out->num_dims == 2) { + (*params_in_out).access_pattern.pattern_write = COMPOUND_COMPOUND_2D; + ret = 0; + } + else { + ret = -1; + printf("%s() failed on line %d\n", __func__, __LINE__); + } } } else { @@ -548,715 +573,666 @@ _set_io_pattern(bench_params *params_in_out) printf("%s() failed on line %d\n", __func__, __LINE__); } } - else if (params_in_out->mem_pattern == PATTERN_INTERLEAVED) { - if (params_in_out->file_pattern == PATTERN_CONTIG) { // IC - if (params_in_out->num_dims == 1) { - (*params_in_out).access_pattern.pattern_write = COMPOUND_CONTIG_1D; - ret = 0; - } - else if (params_in_out->num_dims == 2) { - (*params_in_out).access_pattern.pattern_write = COMPOUND_CONTIG_2D; - ret = 0; + else if (params_in_out->io_op == IO_READ) { // file --> mem + if (params_in_out->mem_pattern == PATTERN_CONTIG) { + if (params_in_out->file_pattern == PATTERN_CONTIG) { + switch (params_in_out->num_dims) { + case 1: + (*params_in_out).access_pattern.pattern_read = CONTIG_1D; + ret = 0; + break; + case 2: + (*params_in_out).access_pattern.pattern_read = CONTIG_2D; + ret = 0; + break; + case 3: + (*params_in_out).access_pattern.pattern_read = CONTIG_3D; + ret = 0; + break; + default: + ret = -1; + printf("%s() failed on line %d\n", __func__, __LINE__); + break; + } } - else { - ret = -1; - printf("%s() failed on line %d\n", __func__, __LINE__); + else if (params_in_out->file_pattern == PATTERN_STRIDED) { + (*params_in_out).access_pattern.pattern_read = STRIDED_1D; + ret = 0; } } - else if (params_in_out->file_pattern == PATTERN_INTERLEAVED) { // II - if (params_in_out->num_dims == 1) { - (*params_in_out).access_pattern.pattern_write = COMPOUND_COMPOUND_1D; - ret = 0; - } - else if (params_in_out->num_dims == 2) { - (*params_in_out).access_pattern.pattern_write = COMPOUND_COMPOUND_2D; - ret = 0; - } - else { - ret = -1; - printf("%s() failed on line %d\n", __func__, __LINE__); - } + else { + ret = -1; + printf("%s() failed on line %d\n", __func__, __LINE__); } } else { ret = -1; printf("%s() failed on line %d\n", __func__, __LINE__); } + if (ret < 0) + printf("%s() failed, unsupported value/patterns.\n", __func__); + return ret; } - else if (params_in_out->io_op == IO_READ) { // file --> mem - if (params_in_out->mem_pattern == PATTERN_CONTIG) { - if (params_in_out->file_pattern == PATTERN_CONTIG) { - switch (params_in_out->num_dims) { - case 1: - (*params_in_out).access_pattern.pattern_read = CONTIG_1D; - ret = 0; - break; - case 2: - (*params_in_out).access_pattern.pattern_read = CONTIG_2D; - ret = 0; - break; - case 3: - (*params_in_out).access_pattern.pattern_read = CONTIG_3D; - ret = 0; - break; - default: - ret = -1; - printf("%s() failed on line %d\n", __func__, __LINE__); - break; - } + + char *_parse_val(char *val_in) + { + char *val_str = strdup(val_in); + char *tokens[2]; + char *tok = strtok(val_str, "#"); + char *val = NULL; + val = strdup(tok); + // printf("_parse_val: val_in = [%s], val = [%s]\n", val_in, val); + if (val_str) + free(val_str); + return val; + } + + int _set_params(char *key, char *val_in, bench_params *params_in_out, int do_write) + { + if (!params_in_out) + return 0; + char *val = _parse_val(val_in); + + if (strcmp(key, "IO_OPERATION") == 0) { + if (strcmp(val, "READ") == 0) { + params_in_out->io_op = IO_READ; } - else if (params_in_out->file_pattern == PATTERN_STRIDED) { - (*params_in_out).access_pattern.pattern_read = STRIDED_1D; - ret = 0; + else if (strcmp(val, "WRITE") == 0) { + params_in_out->io_op = IO_WRITE; + } + else { + printf("Unknown value for \"IO_OPERATION\": %s\n", val); + return -1; } } - else { - ret = -1; - printf("%s() failed on line %d\n", __func__, __LINE__); + else if (strcmp(key, "MEM_PATTERN") == 0) { + if (strcmp(val_in, "CONTIG") == 0) { + params_in_out->mem_pattern = PATTERN_CONTIG; + } + else if (strcmp(val_in, "INTERLEAVED") == 0) { + params_in_out->mem_pattern = PATTERN_INTERLEAVED; + } + else if (strcmp(val_in, "STRIDED") == 0) { + params_in_out->mem_pattern = PATTERN_STRIDED; + } + else { + params_in_out->mem_pattern = PATTERN_INVALID; + } } - } - else { - ret = -1; - printf("%s() failed on line %d\n", __func__, __LINE__); - } - if (ret < 0) - printf("%s() failed, unsupported value/patterns.\n", __func__); - return ret; -} - -char * -_parse_val(char *val_in) -{ - char *val_str = strdup(val_in); - char *tokens[2]; - char *tok = strtok(val_str, "#"); - char *val = NULL; - val = strdup(tok); - // printf("_parse_val: val_in = [%s], val = [%s]\n", val_in, val); - if (val_str) - free(val_str); - return val; -} - -int -_set_params(char *key, char *val_in, bench_params *params_in_out, int do_write) -{ - if (!params_in_out) - return 0; - char *val = _parse_val(val_in); - - if (strcmp(key, "IO_OPERATION") == 0) { - if (strcmp(val, "READ") == 0) { - params_in_out->io_op = IO_READ; + else if (strcmp(key, "FILE_PATTERN") == 0) { + if (strcmp(val_in, "CONTIG") == 0) { + params_in_out->file_pattern = PATTERN_CONTIG; + } + else if (strcmp(val_in, "INTERLEAVED") == 0) { + params_in_out->file_pattern = PATTERN_INTERLEAVED; + } + else if (strcmp(val_in, "STRIDED") == 0) { + params_in_out->file_pattern = PATTERN_STRIDED; + } + else { + params_in_out->file_pattern = PATTERN_INVALID; + } } - else if (strcmp(val, "WRITE") == 0) { - params_in_out->io_op = IO_WRITE; + + else if (strcmp(key, "TO_READ_NUM_PARTICLES") == 0) { + if ((*params_in_out).io_op != IO_READ) { + printf( + "TO_READ_CNT_M parameter is only used with READ_PATTERNs, please check config file.\n"); + return -1; + } + unsigned long long num = 0; + if (str_to_ull(val, &num) < 0) + return -1; + (*params_in_out).try_num_particles = num; } - else { - printf("Unknown value for \"IO_OPERATION\": %s\n", val); - return -1; + else if (strcmp(key, "COLLECTIVE_METADATA") == 0) { + if (val[0] == 'Y' || val[0] == 'y') + (*params_in_out).meta_coll = 1; + else + (*params_in_out).meta_coll = 0; } - } - else if (strcmp(key, "MEM_PATTERN") == 0) { - if (strcmp(val_in, "CONTIG") == 0) { - params_in_out->mem_pattern = PATTERN_CONTIG; + else if (strcmp(key, "COLLECTIVE_DATA") == 0) { + if (val[0] == 'Y' || val[0] == 'y') + (*params_in_out).data_coll = 1; + else + (*params_in_out).data_coll = 0; } - else if (strcmp(val_in, "INTERLEAVED") == 0) { - params_in_out->mem_pattern = PATTERN_INTERLEAVED; + else if (strcmp(key, "COMPRESS") == 0) { + if (val[0] == 'Y' || val[0] == 'y') + (*params_in_out).useCompress = 1; + else + (*params_in_out).useCompress = 0; } - else if (strcmp(val_in, "STRIDED") == 0) { - params_in_out->mem_pattern = PATTERN_STRIDED; + else if (strcmp(key, "TIMESTEPS") == 0) { + int ts_cnt = atoi(val); + if (ts_cnt >= 1) + (*params_in_out).cnt_time_step = ts_cnt; + else { + printf("TIMESTEPS must be at least 1.\n"); + return -1; + } } - else { - params_in_out->mem_pattern = PATTERN_INVALID; + else if (strcmp(key, "DELAYED_CLOSE_TIMESTEPS") == 0) { + int delay_ts_cnt = atoi(val); + if (delay_ts_cnt < 0) + delay_ts_cnt = 0; + (*params_in_out).cnt_time_step_delay = delay_ts_cnt; } - } - else if (strcmp(key, "FILE_PATTERN") == 0) { - if (strcmp(val_in, "CONTIG") == 0) { - params_in_out->file_pattern = PATTERN_CONTIG; + else if (strcmp(key, "NUM_PARTICLES") == 0) { // 16M, 8K + unsigned long long num = 0; + if (str_to_ull(val, &num) < 0) + return -1; + + if (num >= 1) + (*params_in_out).num_particles = num; + else { + printf("NUM_PARTICLES must be at least 1.\n"); + return -1; + } } - else if (strcmp(val_in, "INTERLEAVED") == 0) { - params_in_out->file_pattern = PATTERN_INTERLEAVED; + else if (strcmp(key, "IO_MEM_LIMIT") == 0) { + unsigned long long num = 0; + if (str_to_ull(val, &num) < 0) + return -1; + if (num >= 0) { + (*params_in_out).io_mem_limit = num; + } + else { + printf("IO_MEM_LIMIT must be at least 0.\n"); + return -1; + } } - else if (strcmp(val_in, "STRIDED") == 0) { - params_in_out->file_pattern = PATTERN_STRIDED; + else if (strcmp(key, "EMULATED_COMPUTE_TIME_PER_TIMESTEP") == 0) { + duration time; + if (parse_time(val, &time) < 0) + return -1; + if (time.time_num >= 0) + (*params_in_out).compute_time = time; + else { + printf("EMULATED_COMPUTE_TIME_PER_TIMESTEP must be at least 0.\n"); + return -1; + } } - else { - params_in_out->file_pattern = PATTERN_INVALID; + else if (strcmp(key, "READ_OPTION") == 0) { + if (val_in[0] == 'F') { // FULL + (*params_in_out).read_option = READ_FULL; + } + else if (val_in[0] == 'P') { // PARTIAL + (*params_in_out).read_option = READ_PARTIAL; + } + else if (val_in[0] == 'S') { // STRIDED + (*params_in_out).read_option = READ_STRIDED; + } + else + (*params_in_out).read_option = READ_OPTION_INVALID; } - } - - else if (strcmp(key, "TO_READ_NUM_PARTICLES") == 0) { - if ((*params_in_out).io_op != IO_READ) { - printf("TO_READ_CNT_M parameter is only used with READ_PATTERNs, please check config file.\n"); - return -1; + else if (strcmp(key, "NUM_DIMS") == 0) { + int num = atoi(val); + if (num > 0) + (*params_in_out).num_dims = num; + else { + printf("NUM_DIMS must be at least 1\n"); + return -1; + } } - unsigned long long num = 0; - if (str_to_ull(val, &num) < 0) - return -1; - (*params_in_out).try_num_particles = num; - } - else if (strcmp(key, "COLLECTIVE_METADATA") == 0) { - if (val[0] == 'Y' || val[0] == 'y') - (*params_in_out).meta_coll = 1; - else - (*params_in_out).meta_coll = 0; - } - else if (strcmp(key, "COLLECTIVE_DATA") == 0) { - if (val[0] == 'Y' || val[0] == 'y') - (*params_in_out).data_coll = 1; - else - (*params_in_out).data_coll = 0; - } - else if (strcmp(key, "COMPRESS") == 0) { - if (val[0] == 'Y' || val[0] == 'y') - (*params_in_out).useCompress = 1; - else - (*params_in_out).useCompress = 0; - } - else if (strcmp(key, "TIMESTEPS") == 0) { - int ts_cnt = atoi(val); - if (ts_cnt >= 1) - (*params_in_out).cnt_time_step = ts_cnt; - else { - printf("TIMESTEPS must be at least 1.\n"); - return -1; + else if (strcmp(key, "DIM_1") == 0) { + unsigned long long num = 0; + if (str_to_ull(val, &num) < 0) + return -1; + if (num > 0) + (*params_in_out).dim_1 = num; + else { + printf("DIM_1 must be at least 1\n"); + return -1; + } } - } - else if (strcmp(key, "DELAYED_CLOSE_TIMESTEPS") == 0) { - int delay_ts_cnt = atoi(val); - if (delay_ts_cnt < 0) - delay_ts_cnt = 0; - (*params_in_out).cnt_time_step_delay = delay_ts_cnt; - } - else if (strcmp(key, "NUM_PARTICLES") == 0) { // 16M, 8K - unsigned long long num = 0; - if (str_to_ull(val, &num) < 0) - return -1; - - if (num >= 1) - (*params_in_out).num_particles = num; - else { - printf("NUM_PARTICLES must be at least 1.\n"); - return -1; + else if (strcmp(key, "DIM_2") == 0) { + if ((*params_in_out).num_dims == 1) + return 1; + unsigned long long num = 0; + if (str_to_ull(val, &num) < 0) + return -1; + if (num >= 1) + (*params_in_out).dim_2 = num; + else { + printf("DIM_2 must be at least 1\n"); + return -1; + } } - } - else if (strcmp(key, "IO_MEM_LIMIT") == 0) { - unsigned long long num = 0; - if (str_to_ull(val, &num) < 0) - return -1; - if (num >= 0) { - (*params_in_out).io_mem_limit = num; + else if (strcmp(key, "DIM_3") == 0) { + if ((*params_in_out).num_dims == 1 || (*params_in_out).num_dims == 2) + return 1; + unsigned long long num = 0; + if (str_to_ull(val, &num) < 0) + return -1; + if (num >= 1) + (*params_in_out).dim_3 = num; + else { + printf("DIM_3 must be at least 1\n"); + return -1; + } } - else { - printf("IO_MEM_LIMIT must be at least 0.\n"); - return -1; + else if (strcmp(key, "CHUNK_DIM_1") == 0) { + unsigned long long dim = 0; + if (str_to_ull(val, &dim) < 0) + return -1; + if (dim > 0) + (*params_in_out).chunk_dim_1 = dim; + else { + printf("CHUNK_DIM_1 must be at least 1\n"); + return -1; + } } - } - else if (strcmp(key, "EMULATED_COMPUTE_TIME_PER_TIMESTEP") == 0) { - duration time; - if (parse_time(val, &time) < 0) - return -1; - if (time.time_num >= 0) - (*params_in_out).compute_time = time; - else { - printf("EMULATED_COMPUTE_TIME_PER_TIMESTEP must be at least 0.\n"); - return -1; + else if (strcmp(key, "CHUNK_DIM_2") == 0) { + if ((*params_in_out).num_dims == 1) + return 1; + unsigned long long dim = 0; + if (str_to_ull(val, &dim) < 0) + return -1; + if (dim >= 1) + (*params_in_out).chunk_dim_2 = dim; + else { + printf("CHUNK_DIM_2 must be at least 1.\n"); + return -1; + } } - } - else if (strcmp(key, "READ_OPTION") == 0) { - if (val_in[0] == 'F') { // FULL - (*params_in_out).read_option = READ_FULL; + else if (strcmp(key, "CHUNK_DIM_3") == 0) { + if ((*params_in_out).num_dims == 1 || (*params_in_out).num_dims == 2) + return 1; + unsigned long long dim = 0; + if (str_to_ull(val, &dim) < 0) + return -1; + if (dim >= 1) + (*params_in_out).chunk_dim_3 = dim; + else { + printf("CHUNK_DIM_3 must be at least 1.\n"); + return -1; + } } - else if (val_in[0] == 'P') { // PARTIAL - (*params_in_out).read_option = READ_PARTIAL; + else if (strcmp(key, "STRIDE_SIZE") == 0) { + unsigned long long num = 0; + if (str_to_ull(val, &num) < 0) + return -1; + (*params_in_out).stride = num; } - else if (val_in[0] == 'S') { // STRIDED - (*params_in_out).read_option = READ_STRIDED; + else if (strcmp(key, "BLOCK_SIZE") == 0) { + unsigned long long num = 0; + if (str_to_ull(val, &num) < 0) + return -1; + (*params_in_out).block_size = num; } - else - (*params_in_out).read_option = READ_OPTION_INVALID; - } - else if (strcmp(key, "NUM_DIMS") == 0) { - int num = atoi(val); - if (num > 0) - (*params_in_out).num_dims = num; - else { - printf("NUM_DIMS must be at least 1\n"); - return -1; + else if (strcmp(key, "BLOCK_CNT") == 0) { + unsigned long long num = 0; + if (str_to_ull(val, &num) < 0) + return -1; + (*params_in_out).block_cnt = num; } - } - else if (strcmp(key, "DIM_1") == 0) { - unsigned long long num = 0; - if (str_to_ull(val, &num) < 0) - return -1; - if (num > 0) - (*params_in_out).dim_1 = num; - else { - printf("DIM_1 must be at least 1\n"); - return -1; + else if (strcmp(key, "CSV_FILE") == 0) { + (*params_in_out).useCSV = 1; + (*params_in_out).csv_path = strdup(val); } - } - else if (strcmp(key, "DIM_2") == 0) { - if ((*params_in_out).num_dims == 1) - return 1; - unsigned long long num = 0; - if (str_to_ull(val, &num) < 0) - return -1; - if (num >= 1) - (*params_in_out).dim_2 = num; - else { - printf("DIM_2 must be at least 1\n"); - return -1; + else if (strcmp(key, "ENV_METADATA_FILE") == 0) { + (*params_in_out).env_meta_path = strdup(val); } - } - else if (strcmp(key, "DIM_3") == 0) { - if ((*params_in_out).num_dims == 1 || (*params_in_out).num_dims == 2) - return 1; - unsigned long long num = 0; - if (str_to_ull(val, &num) < 0) - return -1; - if (num >= 1) - (*params_in_out).dim_3 = num; - else { - printf("DIM_3 must be at least 1\n"); - return -1; + else if (strcmp(key, "FILE_PER_PROC") == 0) { + if (val[0] == 'Y' || val[0] == 'y') + (*params_in_out).file_per_proc = 1; + else + (*params_in_out).file_per_proc = 0; } - } - else if (strcmp(key, "CHUNK_DIM_1") == 0) { - unsigned long long dim = 0; - if (str_to_ull(val, &dim) < 0) - return -1; - if (dim > 0) - (*params_in_out).chunk_dim_1 = dim; else { - printf("CHUNK_DIM_1 must be at least 1\n"); + printf("Unknown Parameter: %s\n", key); return -1; } - } - else if (strcmp(key, "CHUNK_DIM_2") == 0) { - if ((*params_in_out).num_dims == 1) - return 1; - unsigned long long dim = 0; - if (str_to_ull(val, &dim) < 0) - return -1; - if (dim >= 1) - (*params_in_out).chunk_dim_2 = dim; - else { - printf("CHUNK_DIM_2 must be at least 1.\n"); - return -1; + + has_vol_async = has_vol_connector(); + + if (has_vol_async) { + (*params_in_out).asyncMode = MODE_ASYNC; } - } - else if (strcmp(key, "CHUNK_DIM_3") == 0) { - if ((*params_in_out).num_dims == 1 || (*params_in_out).num_dims == 2) - return 1; - unsigned long long dim = 0; - if (str_to_ull(val, &dim) < 0) - return -1; - if (dim >= 1) - (*params_in_out).chunk_dim_3 = dim; else { - printf("CHUNK_DIM_3 must be at least 1.\n"); - return -1; + (*params_in_out).asyncMode = MODE_SYNC; } - } - else if (strcmp(key, "STRIDE_SIZE") == 0) { - unsigned long long num = 0; - if (str_to_ull(val, &num) < 0) - return -1; - (*params_in_out).stride = num; - } - else if (strcmp(key, "BLOCK_SIZE") == 0) { - unsigned long long num = 0; - if (str_to_ull(val, &num) < 0) - return -1; - (*params_in_out).block_size = num; - } - else if (strcmp(key, "BLOCK_CNT") == 0) { - unsigned long long num = 0; - if (str_to_ull(val, &num) < 0) - return -1; - (*params_in_out).block_cnt = num; - } - else if (strcmp(key, "CSV_FILE") == 0) { - (*params_in_out).useCSV = 1; - (*params_in_out).csv_path = strdup(val); - } - else if (strcmp(key, "ENV_METADATA_FILE") == 0) { - (*params_in_out).env_meta_path = strdup(val); - } - else if (strcmp(key, "FILE_PER_PROC") == 0) { - if (val[0] == 'Y' || val[0] == 'y') - (*params_in_out).file_per_proc = 1; - else - (*params_in_out).file_per_proc = 0; - } - else { - printf("Unknown Parameter: %s\n", key); - return -1; - } - has_vol_async = has_vol_connector(); + if ((*params_in_out).useCSV) + (*params_in_out).csv_fs = csv_init(params_in_out->csv_path, params_in_out->env_meta_path); - if (has_vol_async) { - (*params_in_out).asyncMode = MODE_ASYNC; - } - else { - (*params_in_out).asyncMode = MODE_SYNC; + if (val) + free(val); + return 1; } - - if ((*params_in_out).useCSV) - (*params_in_out).csv_fs = csv_init(params_in_out->csv_path, params_in_out->env_meta_path); - - if (val) - free(val); - return 1; -} -void -bench_params_init(bench_params *params_out) -{ - if (!params_out) - params_out = (bench_params *)calloc(1, sizeof(bench_params)); - (*params_out).pattern_name = NULL; - (*params_out).meta_coll = 0; - (*params_out).data_coll = 0; - (*params_out).asyncMode = MODE_SYNC; - - (*params_out).cnt_time_step = 0; - (*params_out).cnt_time_step_delay = 0; - (*params_out).num_particles = 0; // total number per rank - (*params_out).io_mem_limit = 0; - (*params_out).try_num_particles = 0; // to read - (*params_out).compute_time.time_num = 0; - (*params_out).num_dims = 1; - - (*params_out).stride = 0; - (*params_out).block_size = 0; - (*params_out).block_cnt = 0; - (*params_out).dim_1 = 1; - (*params_out).dim_2 = 1; - (*params_out).dim_3 = 1; - (*params_out).chunk_dim_1 = 1; - (*params_out).chunk_dim_2 = 1; - (*params_out).chunk_dim_3 = 1; - (*params_out).csv_path = NULL; - (*params_out).env_meta_path = NULL; - - (*params_out).csv_path = NULL; - (*params_out).csv_fs = NULL; - (*params_out).env_meta_path = NULL; - (*params_out).file_per_proc = 0; -} - -int -has_vol_connector() -{ + void bench_params_init(bench_params * params_out) + { + if (!params_out) + params_out = (bench_params *)calloc(1, sizeof(bench_params)); + (*params_out).pattern_name = NULL; + (*params_out).meta_coll = 0; + (*params_out).data_coll = 0; + (*params_out).asyncMode = MODE_SYNC; + + (*params_out).cnt_time_step = 0; + (*params_out).cnt_time_step_delay = 0; + (*params_out).num_particles = 0; // total number per rank + (*params_out).io_mem_limit = 0; + (*params_out).try_num_particles = 0; // to read + (*params_out).compute_time.time_num = 0; + (*params_out).num_dims = 1; + + (*params_out).stride = 0; + (*params_out).block_size = 0; + (*params_out).block_cnt = 0; + (*params_out).dim_1 = 1; + (*params_out).dim_2 = 1; + (*params_out).dim_3 = 1; + (*params_out).chunk_dim_1 = 1; + (*params_out).chunk_dim_2 = 1; + (*params_out).chunk_dim_3 = 1; + (*params_out).csv_path = NULL; + (*params_out).env_meta_path = NULL; + + (*params_out).csv_path = NULL; + (*params_out).csv_fs = NULL; + (*params_out).env_meta_path = NULL; + (*params_out).file_per_proc = 0; + } + + int has_vol_connector() + { #if H5_VERSION_GE(1, 13, 0) - char *connector = getenv("HDF5_VOL_CONNECTOR"); + char *connector = getenv("HDF5_VOL_CONNECTOR"); - if (connector != NULL) { - return 1; - } + if (connector != NULL) { + return 1; + } #endif - return 0; -} + return 0; + } -int -read_config(const char *file_path, bench_params *params_out, int do_write) -{ - char cfg_line[CFG_LINE_LEN_MAX] = ""; + int read_config(const char *file_path, bench_params *params_out, int do_write) + { + char cfg_line[CFG_LINE_LEN_MAX] = ""; - if (!params_out) - params_out = (bench_params *)calloc(1, sizeof(bench_params)); - else - memset(params_out, 0, sizeof(bench_params)); - // Default settings - bench_params_init(params_out); - (*params_out).data_file_path = strdup(file_path); + if (!params_out) + params_out = (bench_params *)calloc(1, sizeof(bench_params)); + else + memset(params_out, 0, sizeof(bench_params)); + // Default settings + bench_params_init(params_out); + (*params_out).data_file_path = strdup(file_path); - FILE *file = fopen(file_path, "r"); + FILE *file = fopen(file_path, "r"); - int parsed = 1; + int parsed = 1; - // default values - (*params_out).useCSV = 0; - if (do_write) - (*params_out).io_op = IO_WRITE; - else - (*params_out).io_op = IO_READ; + // default values + (*params_out).useCSV = 0; + if (do_write) + (*params_out).io_op = IO_WRITE; + else + (*params_out).io_op = IO_READ; - while (fgets(cfg_line, CFG_LINE_LEN_MAX, file) && (parsed == 1)) { - if (cfg_line[0] == '#') { // skip comment lines - continue; - } - char *tokens[2]; - char *tok = strtok(cfg_line, CFG_DELIMS); - if (tok) { - tokens[0] = tok; - tok = strtok(NULL, CFG_DELIMS); + while (fgets(cfg_line, CFG_LINE_LEN_MAX, file) && (parsed == 1)) { + if (cfg_line[0] == '#') { // skip comment lines + continue; + } + char *tokens[2]; + char *tok = strtok(cfg_line, CFG_DELIMS); if (tok) { - tokens[1] = tok; + tokens[0] = tok; + tok = strtok(NULL, CFG_DELIMS); + if (tok) { + tokens[1] = tok; + } + else + return -1; } else return -1; + // printf("key = [%s], val = [%s]\n", tokens[0], tokens[1]); + parsed = _set_params(tokens[0], tokens[1], params_out, do_write); } - else + if (parsed < 0) return -1; - // printf("key = [%s], val = [%s]\n", tokens[0], tokens[1]); - parsed = _set_params(tokens[0], tokens[1], params_out, do_write); - } - if (parsed < 0) - return -1; - int ret = _set_io_pattern(params_out); - if (ret < 0) - return ret; + int ret = _set_io_pattern(params_out); + if (ret < 0) + return ret; - if (params_out->io_op == IO_WRITE || - (params_out->io_op == IO_READ && params_out->try_num_particles == 0)) { - params_out->num_particles = params_out->dim_1 * params_out->dim_2 * params_out->dim_3; - } - - if (params_out->io_mem_limit > 0) { - if (params_out->num_particles * PARTICLE_SIZE >= params_out->io_mem_limit) { - printf("Requested memory (%llu particles, %llu, PARTICLE_SIZE = %ld) is larger than specified " - "memory bound (%llu), " - "please check IO_MEM_LIMIT in your config file.\n", - params_out->num_particles, params_out->num_particles * PARTICLE_SIZE, PARTICLE_SIZE, - params_out->io_mem_limit); - return -1; + if (params_out->io_op == IO_WRITE || + (params_out->io_op == IO_READ && params_out->try_num_particles == 0)) { + params_out->num_particles = params_out->dim_1 * params_out->dim_2 * params_out->dim_3; } - } - if (params_out->io_op == IO_WRITE) { - if (params_out->access_pattern.pattern_write == CONTIG_CONTIG_STRIDED_1D) { - if (params_out->stride < 1 || params_out->block_size < 1 || params_out->block_cnt < 1) { - printf("Strided read requires STRIDE_SIZE/BLOCK_SIZE/BLOCK_CNT no less than 1.\n"); + + if (params_out->io_mem_limit > 0) { + if (params_out->num_particles * PARTICLE_SIZE >= params_out->io_mem_limit) { + printf( + "Requested memory (%llu particles, %llu, PARTICLE_SIZE = %ld) is larger than specified " + "memory bound (%llu), " + "please check IO_MEM_LIMIT in your config file.\n", + params_out->num_particles, params_out->num_particles * PARTICLE_SIZE, PARTICLE_SIZE, + params_out->io_mem_limit); return -1; } } - } - else if (params_out->io_op == IO_READ) { // read - if (params_out->access_pattern.pattern_read == CONTIG_1D) { // read whole file - if (params_out->num_particles > 1) - params_out->try_num_particles = params_out->num_particles; - else - params_out->num_particles = params_out->try_num_particles; + if (params_out->io_op == IO_WRITE) { + if (params_out->access_pattern.pattern_write == CONTIG_CONTIG_STRIDED_1D) { + if (params_out->stride < 1 || params_out->block_size < 1 || params_out->block_cnt < 1) { + printf("Strided read requires STRIDE_SIZE/BLOCK_SIZE/BLOCK_CNT no less than 1.\n"); + return -1; + } + } } - if (params_out->access_pattern.pattern_read == STRIDED_1D) { - if (params_out->stride < 1 || params_out->block_size < 1 || params_out->block_cnt < 1) { - printf("Strided read requires STRIDE_SIZE/BLOCK_SIZE/BLOCK_CNT no less than 1.\n"); - return -1; + else if (params_out->io_op == IO_READ) { // read + if (params_out->access_pattern.pattern_read == CONTIG_1D) { // read whole file + if (params_out->num_particles > 1) + params_out->try_num_particles = params_out->num_particles; + else + params_out->num_particles = params_out->try_num_particles; + } + if (params_out->access_pattern.pattern_read == STRIDED_1D) { + if (params_out->stride < 1 || params_out->block_size < 1 || params_out->block_cnt < 1) { + printf("Strided read requires STRIDE_SIZE/BLOCK_SIZE/BLOCK_CNT no less than 1.\n"); + return -1; + } } } + return 0; } - return 0; -} -// print all fields of params -void -print_params(const bench_params *p) -{ - printf("=======================================\n"); - printf("Benchmark configuration: \nFile: %s\n", p->data_file_path); - printf("Number of particles per rank: %llu\n", p->num_particles); - printf("Number of time steps: %d\n", p->cnt_time_step); - printf("Emulated compute time per timestep: %lu\n", p->compute_time.time_num); - - printf("Mode: %s\n", p->asyncMode == MODE_SYNC ? "SYNC" : "ASYNC"); - printf("Collective metadata operations: %s\n", p->meta_coll == 1 ? "YES" : "NO"); - printf("Collective buffering for data operations: %s\n", p->data_coll == 1 ? "YES" : "NO"); - - printf("Number of dimensions: %d\n", p->num_dims); - printf(" Dim_1: %lu\n", p->dim_1); - if (p->num_dims >= 2) { - printf(" Dim_2: %lu\n", p->dim_2); - } - if (p->num_dims >= 3) { - printf(" Dim_3: %lu\n", p->dim_3); - } + // print all fields of params + void print_params(const bench_params *p) + { + printf("=======================================\n"); + printf("Benchmark configuration: \nFile: %s\n", p->data_file_path); + printf("Number of particles per rank: %llu\n", p->num_particles); + printf("Number of time steps: %d\n", p->cnt_time_step); + printf("Emulated compute time per timestep: %lu\n", p->compute_time.time_num); - if (p->access_pattern.pattern_read == STRIDED_1D || - p->access_pattern.pattern_write == CONTIG_CONTIG_STRIDED_1D) { - printf("Strided access settings:\n"); - printf(" Stride size = %ld\n", p->stride); - printf(" Block size = %ld\n", p->block_size); - } + printf("Mode: %s\n", p->asyncMode == MODE_SYNC ? "SYNC" : "ASYNC"); + printf("Collective metadata operations: %s\n", p->meta_coll == 1 ? "YES" : "NO"); + printf("Collective buffering for data operations: %s\n", p->data_coll == 1 ? "YES" : "NO"); - if (p->useCompress) { - printf("Use compression: %d\n", p->useCompress); - printf(" chunk_dim1: %lu\n", p->chunk_dim_1); + printf("Number of dimensions: %d\n", p->num_dims); + printf(" Dim_1: %lu\n", p->dim_1); if (p->num_dims >= 2) { - printf(" chunk_dim2: %lu\n", p->chunk_dim_2); + printf(" Dim_2: %lu\n", p->dim_2); } - else if (p->num_dims >= 3) { - printf(" chunk_dim3: %lu\n", p->chunk_dim_3); + if (p->num_dims >= 3) { + printf(" Dim_3: %lu\n", p->dim_3); } - } - printf("=======================================\n"); -} + if (p->access_pattern.pattern_read == STRIDED_1D || + p->access_pattern.pattern_write == CONTIG_CONTIG_STRIDED_1D) { + printf("Strided access settings:\n"); + printf(" Stride size = %ld\n", p->stride); + printf(" Block size = %ld\n", p->block_size); + } -void -bench_params_free(bench_params *p) -{ - if (!p) - return; - if (p->data_file_path) - free(p->data_file_path); - if (p->pattern_name) - free(p->pattern_name); -} + if (p->useCompress) { + printf("Use compression: %d\n", p->useCompress); + printf(" chunk_dim1: %lu\n", p->chunk_dim_1); + if (p->num_dims >= 2) { + printf(" chunk_dim2: %lu\n", p->chunk_dim_2); + } + else if (p->num_dims >= 3) { + printf(" chunk_dim3: %lu\n", p->chunk_dim_3); + } + } -int -file_create_try(const char *path) -{ - FILE *fs = fopen(path, "w+"); - if (!fs) { - printf("Failed to create file: %s, Please check permission.\n", path); - return -1; + printf("=======================================\n"); } - fclose(fs); - return 0; -} -int -file_exist(const char *path) -{ - FILE *f = fopen(path, "r"); - if (!f) { - printf("Failed to open file: %s, Please check if the file exists.\n", path); - return -1; + void bench_params_free(bench_params * p) + { + if (!p) + return; + if (p->data_file_path) + free(p->data_file_path); + if (p->pattern_name) + free(p->pattern_name); } - fclose(f); - return 0; -} -/* TODO: - * - read lines from metadata_list_file, each presents an environment variable name. - * - get val from getrnv(), write to fs. - * */ - -int -record_env_metadata(FILE *fs, const char *metadata_list_file) -{ - // read list file line, use each line as a key to search env - if (!fs) - return -1; - FILE *lfs = fopen(metadata_list_file, "r"); - if (!lfs) { - printf("Can not open metadata list file: %s\n", metadata_list_file); - return -1; + int file_create_try(const char *path) + { + FILE *fs = fopen(path, "w+"); + if (!fs) { + printf("Failed to create file: %s, Please check permission.\n", path); + return -1; + } + fclose(fs); + return 0; } - fprintf(fs, "======================= Metadata =====================\n"); + int file_exist(const char *path) + { + FILE *f = fopen(path, "r"); + if (!f) { + printf("Failed to open file: %s, Please check if the file exists.\n", path); + return -1; + } + fclose(f); + return 0; + } - char line[10 * CFG_LINE_LEN_MAX]; // some env val could be very large, such as PATH - while (fgets(line, CFG_LINE_LEN_MAX, lfs)) { - if (line[0] == '#') // skip comment lines - continue; - if (line[0] == '\n') - continue; + /* TODO: + * - read lines from metadata_list_file, each presents an environment variable name. + * - get val from getrnv(), write to fs. + * */ - if (line[strlen(line) - 1] == '\n') { - line[strlen(line) - 1] = 0; + int record_env_metadata(FILE * fs, const char *metadata_list_file) + { + // read list file line, use each line as a key to search env + if (!fs) + return -1; + FILE *lfs = fopen(metadata_list_file, "r"); + if (!lfs) { + printf("Can not open metadata list file: %s\n", metadata_list_file); + return -1; } - char *val = getenv(line); - // printf("%s = %s\n", line, val); - fprintf(fs, "%s = %s\n", line, val); + fprintf(fs, "======================= Metadata =====================\n"); - if (!val) { // null - printf(" %s not set.\n", line); - continue; - } - } + char line[10 * CFG_LINE_LEN_MAX]; // some env val could be very large, such as PATH + while (fgets(line, CFG_LINE_LEN_MAX, lfs)) { + if (line[0] == '#') // skip comment lines + continue; + if (line[0] == '\n') + continue; - fprintf(fs, "======================= Metadata end ====================\n"); - fclose(lfs); - return 0; -} + if (line[strlen(line) - 1] == '\n') { + line[strlen(line) - 1] = 0; + } -FILE * -csv_init(const char *csv_path, const char *metadata_list_file) -{ //, const char* metadata_list_file: should be optional. - FILE *fs = fopen(csv_path, "w+"); + char *val = getenv(line); + // printf("%s = %s\n", line, val); + fprintf(fs, "%s = %s\n", line, val); - if (!fs) { - printf("Failed to create file: %s, Please check permission.\n", csv_path); - return NULL; - } + if (!val) { // null + printf(" %s not set.\n", line); + continue; + } + } - if (metadata_list_file) { - if (record_env_metadata(fs, metadata_list_file) < 0) - return NULL; + fprintf(fs, "======================= Metadata end ====================\n"); + fclose(lfs); + return 0; } - return fs; -} + FILE *csv_init(const char *csv_path, const char *metadata_list_file) + { //, const char* metadata_list_file: should be optional. + FILE *fs = fopen(csv_path, "w+"); -int -csv_output_line(FILE *fs, char *name, char *val_str) -{ - fprintf(fs, "%s,", name); - fprintf(fs, " %s\n", val_str); - return 0; -} + if (!fs) { + printf("Failed to create file: %s, Please check permission.\n", csv_path); + return NULL; + } -int -argv_print(int argc, char *argv[]) -{ - if (argc < 1) - return -1; - printf("%d arguments provided.\n", argc); - for (int i = 0; i < argc; i++) { - printf("idx = %d, argv = %s\n", i, argv[i]); + if (metadata_list_file) { + if (record_env_metadata(fs, metadata_list_file) < 0) + return NULL; + } + + return fs; } - return 0; -} -char * -get_file_name_from_path(char *path) -{ - if (path == NULL) - return NULL; + int csv_output_line(FILE * fs, char *name, char *val_str) + { + fprintf(fs, "%s,", name); + fprintf(fs, " %s\n", val_str); + return 0; + } - char *pFileName = path; - for (char *pCur = path; *pCur != '\0'; pCur++) { - if (*pCur == '/' || *pCur == '\\') - pFileName = pCur + 1; + int argv_print(int argc, char *argv[]) + { + if (argc < 1) + return -1; + printf("%d arguments provided.\n", argc); + for (int i = 0; i < argc; i++) { + printf("idx = %d, argv = %s\n", i, argv[i]); + } + return 0; } - return pFileName; -} + char *get_file_name_from_path(char *path) + { + if (path == NULL) + return NULL; -char * -substr(char *src, size_t start, size_t len) -{ - if (start + len > strlen(src)) { - fprintf(stderr, "%s() error: invalid substring index (start+len > length).\n", __func__); - return NULL; - } + char *pFileName = path; + for (char *pCur = path; *pCur != '\0'; pCur++) { + if (*pCur == '/' || *pCur == '\\') + pFileName = pCur + 1; + } - char *sub = calloc(1, len + 1); - if (!sub) { - fprintf(stderr, "%s() error: memory allocation failed.\n", __func__); - return NULL; + return pFileName; } - memcpy(sub, src + start, len); - // sub[len] = '\0'; // by using calloc, sub is filled with 0 (null) + char *substr(char *src, size_t start, size_t len) + { + if (start + len > strlen(src)) { + fprintf(stderr, "%s() error: invalid substring index (start+len > length).\n", __func__); + return NULL; + } - return sub; -} + char *sub = calloc(1, len + 1); + if (!sub) { + fprintf(stderr, "%s() error: memory allocation failed.\n", __func__); + return NULL; + } -char * -get_dir_from_path(char *path) -{ - if (path == NULL) - return NULL; + memcpy(sub, src + start, len); + // sub[len] = '\0'; // by using calloc, sub is filled with 0 (null) - char *pDir = substr(path, 0, strlen(path) - strlen(get_file_name_from_path(path))); + return sub; + } - return pDir; -} + char *get_dir_from_path(char *path) + { + if (path == NULL) + return NULL; + + char *pDir = substr(path, 0, strlen(path) - strlen(get_file_name_from_path(path))); + + return pDir; + } From 8e504b1d17796d3350417234ee169a7db5220c95 Mon Sep 17 00:00:00 2001 From: Huihuo Zheng Date: Fri, 11 Feb 2022 11:41:56 -0600 Subject: [PATCH 20/55] Cache VOL support; added new API functions --- h5bench_patterns/h5bench_write.c | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/h5bench_patterns/h5bench_write.c b/h5bench_patterns/h5bench_write.c index 322f4b2d..3ee6ae9a 100644 --- a/h5bench_patterns/h5bench_write.c +++ b/h5bench_patterns/h5bench_write.c @@ -746,6 +746,9 @@ _run_benchmark_write(bench_params params, hid_t file_id, hid_t fapl, hid_t files unsigned long metadata_time_imp = 0, data_time_imp = 0; unsigned long meta_time1 = 0, meta_time2 = 0, meta_time3 = 0, meta_time4 = 0, meta_time5 = 0; for (int ts_index = 0; ts_index < timestep_cnt; ts_index++) { +#ifdef USE_CACHE_VOL + H5Fcache_async_close_wait(file_id); +#endif meta_time1 = 0, meta_time2 = 0, meta_time3 = 0, meta_time4 = 0, meta_time5 = 0; time_step *ts = &(MEM_MONITOR->time_steps[ts_index]); MEM_MONITOR->mem_used += ts->mem_size; @@ -1047,6 +1050,9 @@ main(int argc, char *argv[]) else { file_id = H5Fcreate_async(output_file, H5F_ACC_TRUNC, H5P_DEFAULT, fapl, 0); } +#ifdef USE_CACHE_VOL + H5Fcache_async_close_set(file_id); +#endif unsigned long tfopen_end = get_time_usec(); if (MY_RANK == 0) @@ -1150,4 +1156,4 @@ main(int argc, char *argv[]) MPI_Finalize(); return 0; -} \ No newline at end of file +} From c4e4d39c3bba69ead4576d41030789ab5c6c2adf Mon Sep 17 00:00:00 2001 From: Huihuo Zheng Date: Sun, 10 Apr 2022 21:31:18 -0500 Subject: [PATCH 21/55] update cache vol doc --- docs/source/buildinstructions.rst | 27 +++++++++++++++++++++++++++ docs/source/index.rst | 22 +++++++++++----------- docs/source/running.rst | 11 ++++++++++- h5bench | 5 ++++- 4 files changed, 52 insertions(+), 13 deletions(-) diff --git a/docs/source/buildinstructions.rst b/docs/source/buildinstructions.rst index b8cd8ba6..cd2a71e4 100644 --- a/docs/source/buildinstructions.rst +++ b/docs/source/buildinstructions.rst @@ -76,6 +76,33 @@ Necessary environment variable setting: And all the binaries will be built to the build/directory. + + +Build to run with Cache VOL +----------------------------------- + +To run h5bench with Cache VOL, you need the develop branchs of HDF5, Async VOL and Cache VOL. Please create a folder (HDF5_VOL_DIR) with the following structure. Please build Async VOL and Cache VOL first and copy the header files and library files to the folder, HDF5_VOL_DIR. + + HDF5_VOL_DIR: + ./include - contains header files for Cache VOL and Async VOL. + ./lib - contains Cache VOL and Async VOL libraries such as libh5cache_ext.so, libcache_new_h5api.a, libasynchdf5.a, libh5async.so + +.. code-block:: bash + mkdir build + cd build + cmake .. -DWITH_CACHE_VOL:BOOL=ON -DCMAKE_C_FLAGS="-I$HDF5_VOL_DIR/include -L$HDF5_VOL_DIR/lib -g" + make + +Please also set the following environment variable: + + export HDF5_HOME="$YOUR_HDF5_DEVELOP_BRANCH_BUILD/hdf5" + export HDF5_VOL_CONNECTOR="cache_ext config=config.cfg;under_vol=0;under_info={}" + export HDF5_PLUGIN_PATH="$HDF5_VOL_DIR/lib" + export DYLD_LIBRARY_PATH="$HDF5_HOME/lib:$HDF5_PLUGIN_PATH" + +On Linux platform, replace DYLD_LIBRARY_PATH with LD_LIBRARY_PATH. Please following instruction from https://vol-cache.readthedocs.io/en/latest/gettingstarted.html#set-environment-variables to set up the configuration for Cache VOL. + + ----------------------------------- Build with Spack ----------------------------------- diff --git a/docs/source/index.rst b/docs/source/index.rst index 5a20075e..b1e9b6b0 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -10,17 +10,17 @@ h5bench is a suite of parallel I/O benchmarks or kernels representing I/O patter These are the benchmarks and kernels currently available in h5bench: -==================== =========================== ==================== ======================== -**Benchmark** **Name** **SYNC** **ASYNC VOL** -==================== =========================== ==================== ======================== -h5bench write ``h5bench_write`` |:white_check_mark:| |:white_check_mark:| -h5bench read ``h5bench_read`` |:white_check_mark:| |:white_check_mark:| -Metadata Stress ``h5bench_hdf5_iotest`` |:white_check_mark:| |:white_large_square:| -AMReX ``h5bench_amrex`` |:white_check_mark:| |:white_check_mark:| -Exerciser ``h5bench_exerciser`` |:white_check_mark:| |:white_large_square:| -OpenPMD (write) ``h5bench_openpmd_write`` |:white_check_mark:| |:white_large_square:| -OpenPMD (read) ``h5bench_openpmd_read`` |:white_check_mark:| |:white_large_square:| -==================== =========================== ==================== ======================== +==================== =========================== ==================== ======================== ======================== +**Benchmark** **Name** **SYNC** **ASYNC VOL** **CACHE VOL** +==================== =========================== ==================== ======================== ======================== +h5bench write ``h5bench_write`` |:white_check_mark:| |:white_check_mark:| |:white_check_mark:| +h5bench read ``h5bench_read`` |:white_check_mark:| |:white_check_mark:| |:white_large_square:| +Metadata Stress ``h5bench_hdf5_iotest`` |:white_check_mark:| |:white_large_square:| |:white_large_square:| +AMReX ``h5bench_amrex`` |:white_check_mark:| |:white_check_mark:| |:white_large_square:| +Exerciser ``h5bench_exerciser`` |:white_check_mark:| |:white_large_square:| |:white_large_square:| +OpenPMD (write) ``h5bench_openpmd_write`` |:white_check_mark:| |:white_large_square:| |:white_large_square:| +OpenPMD (read) ``h5bench_openpmd_read`` |:white_check_mark:| |:white_large_square:| |:white_large_square:| +==================== =========================== ==================== ======================== ======================= .. toctree:: :maxdepth: 2 diff --git a/docs/source/running.rst b/docs/source/running.rst index b33856ce..0bfdc303 100644 --- a/docs/source/running.rst +++ b/docs/source/running.rst @@ -75,7 +75,16 @@ Because some benchmarks inside h5bench do not have support for VOL connectors ye "connector": "async under_vol=0;under_info={}" } -You should provide the absolute path for all the libraries required by the VOL connector using the ``library`` property, the ``path`` of the VOL connector, and the configuration in ``connector``. The provided example depicts how to configure the HDF5 VOL async connector. +You should provide the absolute path for all the libraries required by the VOL connector using the ``library`` property, the ``path`` of the VOL connector, and the configuration in ``connector``. The provided example depicts how to configure the HDF5 VOL async connector. For Cache connector, + +.. code-block:: + "vol": { + "library": "/hdf5-vol-dir/lib:/argobots/install/lib:/hdf5-install/install:", + "path": "/hdf5-vol-dir/lib", + "connector": "cache_ext config=cache.cfg;under_vol=512;under_info={under_vol=0;under_info={}}" + } + +One also has to provide the configuration file for the Cache storage: cache.cfg. Directory ^^^^^^^^^ diff --git a/h5bench b/h5bench index 2bdd4278..04f63af4 100755 --- a/h5bench +++ b/h5bench @@ -238,7 +238,10 @@ class H5bench: self.vol_environment['DYLD_LIBRARY_PATH'] = vol['library'] if 'path' in vol: self.vol_environment['HDF5_PLUGIN_PATH'] = vol['path'] - + if 'cache_write' in vol: + self.vol_environment['HDF5_CACHE_WR'] = vol['cache_write'] + if 'cache_read' in vol: + self.vol_environment['HDF5_CACHE_RD'] = vol['cache_read'] self.vol_environment['ABT_THREAD_STACKSIZE'] = '100000' def enable_vol(self, vol): From f4c8eb13e0cb677c3f1e46a153841cea2388ef0b Mon Sep 17 00:00:00 2001 From: Huihuo Zheng Date: Mon, 11 Apr 2022 14:52:14 -0500 Subject: [PATCH 22/55] pull changes from hpc-io and added documentation for cache-vol support --- .github/workflows/h5bench-hdf5-develop.yml | 81 +- commons/h5bench_util.c | 1519 ++++++++++---------- docs/source/vpic.rst | 6 +- h5bench | 3 +- samples/cache-write-1d-contig-contig.json | 37 + samples/cache-write-2d-contig-contig.json | 37 + samples/cache-write-3d-contig-contig.json | 37 + samples/update.py | 32 +- 8 files changed, 978 insertions(+), 774 deletions(-) create mode 100644 samples/cache-write-1d-contig-contig.json create mode 100644 samples/cache-write-2d-contig-contig.json create mode 100644 samples/cache-write-3d-contig-contig.json diff --git a/.github/workflows/h5bench-hdf5-develop.yml b/.github/workflows/h5bench-hdf5-develop.yml index e6203eca..3b421c1a 100644 --- a/.github/workflows/h5bench-hdf5-develop.yml +++ b/.github/workflows/h5bench-hdf5-develop.yml @@ -26,6 +26,9 @@ jobs: # VOL-ASYNC git clone --recursive https://github.com/hpc-io/vol-async.git + # VOL-CACHE + git clone --recursive https://github.com/hpc-io/vol-cache.git + # PnetCDF wget https://parallel-netcdf.github.io/Release/pnetcdf-1.12.2.tar.gz tar -zxf pnetcdf-1.12.2.tar.gz @@ -118,7 +121,23 @@ jobs: export HDF5_VOL_CONNECTOR="async under_vol=0;under_info={}" make check - + + - name: Build VOL-CACHE + run: | + current="$PWD" + export HDF5_ROOT=/opt/hdf5 + export ABT_DIR=$current/vol-async/argobots/install + [ -e $current/vol ] || mkdir $current/vol + [ -e $current/vol/bin ] || mkdir $current/vol/bin + [ -e $current/vol/lib ] || mkdir $current/vol/lib + [ -e $current/vol/include ] || mkdir $current/vol/include + export HDF5_VOL_DIR=$current/vol/ + cp -r $current/vol-async/src/*.so $current/vol-async/src/*.a $current/vol/lib/ + cp -r $current/vol-async/src/*.h $current/vol/include/ + export LD_LIBRARY_PATH=$HDF5_VOL_DIR/lib:$HDF5_ROOT/lib:$ABT_DIR/lib:$LD_LIBRARY_PATH + cd $current/vol-cache/src + make all install + - name: Build h5bench SYNC run: | current="$PWD" @@ -152,6 +171,21 @@ jobs: -DH5BENCH_ALL=ON make + - name: Build h5bench CACHE + run: | + current="$PWD" + export HDF5_ROOT=/opt/hdf5 + export HDF5_HOME=/opt/hdf5 + export PNETCDF_HOME=/opt/pnetcdf + export HDF5_VOL_DIR=$current/vol + mkdir build-cache + cmake .. \ + -DWITH_CACHE_VOL:BOOL=ON \ + -DWITH_ASYNC_VOL:BOOL=ON \ + -DCMAKE_C_FLAGS="-I/$HDF5_VOL_DIR/include -L/$HDF5_VOL_DIR/lib" + -DCMAKE_C_COMPILER=$HDF5_HOME/bin/h5pcc \ + make + - name: Test h5bench SYNC write/read run: | cd build-sync @@ -563,6 +597,51 @@ jobs: ./h5bench --debug --abort-on-failure ../samples/async-amrex.json + - name: Test h5bench CACHE write 3D contiguous (memory) contiguous (file) + run: | + current="$PWD" + export HDF5_DIR=/opt/hdf5 + export HDF5_ROOT=/opt/hdf5 + export ABT_DIR=$current/vol-async/argobots/install + export ASYNC_DIR=$current/vol-async/src + export VOL_DIR=$current/vol + export LD_PRELOAD=$ABT_DIR/lib/libabt.so + cd build-cache + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + [ -e SSD ] || mkdir SSD + python3 ../samples/update.py ../samples/cache-write-3d-contig-contig.json + ./h5bench --debug --abort-on-failure --validate-mode ../samples/cache-write-3d-contig-contig.json + + - name: Test h5bench CACHE write 2D contiguous (memory) contiguous (file) + run: | + current="$PWD" + export HDF5_DIR=/opt/hdf5 + export HDF5_ROOT=/opt/hdf5 + export ABT_DIR=$current/vol-async/argobots/install + export ASYNC_DIR=$current/vol-async/src + export VOL_DIR=$current/vol + export LD_PRELOAD=$ABT_DIR/lib/libabt.so + cd build-cache + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + python3 ../samples/update.py ../samples/cache-write-2d-contig-contig.json + [ -e SSD ] || mkdir SSD + ./h5bench --debug --abort-on-failure --validate-mode ../samples/cache-write-2d-contig-contig.json + + - name: Test h5bench CACHE write 1D contiguous (memory) contiguous (file) + run: | + current="$PWD" + export HDF5_DIR=/opt/hdf5 + export HDF5_ROOT=/opt/hdf5 + export ABT_DIR=$current/vol-async/argobots/install + export ASYNC_DIR=$current/vol-async/src + export VOL_DIR=$current/vol + export LD_PRELOAD=$ABT_DIR/lib/libabt.so + cd build-cache + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + python3 ../samples/update.py ../samples/cache-write-1d-contig-contig.json + [ -e SSD ] || mkdir SSD + ./h5bench --debug --abort-on-failure --validate-mode ../samples/cache-write-1d-contig-contig.json + - name: Upload artifact if: always() uses: actions/upload-artifact@v2 diff --git a/commons/h5bench_util.c b/commons/h5bench_util.c index 58bce77d..5f005688 100644 --- a/commons/h5bench_util.c +++ b/commons/h5bench_util.c @@ -23,10 +23,6 @@ #include "async_adaptor.h" #endif -#ifdef USE_CACHE_VOL -#include "cache_new_h5api.h" -#endif - #include "h5bench_util.h" int str_to_ull(char *str_in, unsigned long long *num_out); @@ -73,7 +69,7 @@ h5bench_sleep(duration sleep_time) void async_sleep(hid_t file_id, hid_t fapl, duration sleep_time) { -#ifndef USE_CACHE_VOL +#ifndef USE_CACHE_VOL #ifdef USE_ASYNC_VOL unsigned cap = 0; H5Pget_vol_cap_flags(fapl, &cap); @@ -263,311 +259,281 @@ mem_monitor_final_run(mem_monitor *mon, unsigned long *metadata_time_total, unsi unsigned long meta_time = 0, data_time = 0; int dset_cnt = 8; - if (mon->mode == ASYNC_NON) { - t1 = get_time_usec(); - if (!has_vol_async) { - for (int i = 0; i < mon->time_step_cnt; i++) { - ts_run = &(mon->time_steps[i]); - if (mon->time_steps[i].status == TS_DELAY) { - for (int j = 0; j < dset_cnt; j++) - H5Dclose_async(ts_run->dset_ids[j], ts_run->es_meta_close); - H5Gclose_async(ts_run->grp_id, ts_run->es_meta_close); - } - } - t2 = get_time_usec(); - meta_time += (t2 - t1); - return 0; - } - - if (!mon || !metadata_time_total || !data_time_total) - return -1; - t1 = get_time_usec(); + if (!has_vol_async) { for (int i = 0; i < mon->time_step_cnt; i++) { ts_run = &(mon->time_steps[i]); if (mon->time_steps[i].status == TS_DELAY) { - for (int j = 0; j < dset_cnt; j++) H5Dclose_async(ts_run->dset_ids[j], ts_run->es_meta_close); H5Gclose_async(ts_run->grp_id, ts_run->es_meta_close); - - ts_run->status = TS_READY; } } + return 0; + } - t2 = get_time_usec(); - meta_time += (t2 - t1); + if (!mon || !metadata_time_total || !data_time_total) + return -1; + t1 = get_time_usec(); + for (int i = 0; i < mon->time_step_cnt; i++) { + ts_run = &(mon->time_steps[i]); + if (mon->time_steps[i].status == TS_DELAY) { - if (!has_vol_async) - return 0; + for (int j = 0; j < dset_cnt; j++) + H5Dclose_async(ts_run->dset_ids[j], ts_run->es_meta_close); + H5Gclose_async(ts_run->grp_id, ts_run->es_meta_close); - for (int i = 0; i < mon->time_step_cnt; i++) { - ts_run = &(mon->time_steps[i]); - if (mon->time_steps[i].status == TS_READY) { - t1 = get_time_usec(); - H5ESwait(ts_run->es_meta_create, H5ES_WAIT_FOREVER, &num_in_progress, &op_failed); - t2 = get_time_usec(); + ts_run->status = TS_READY; + } + } - H5ESwait(ts_run->es_data, H5ES_WAIT_FOREVER, &num_in_progress, &op_failed); - t3 = get_time_usec(); + t2 = get_time_usec(); + meta_time += (t2 - t1); - H5ESwait(ts_run->es_meta_close, H5ES_WAIT_FOREVER, &num_in_progress, &op_failed); - t4 = get_time_usec(); + if (!has_vol_async) + return 0; - timestep_es_id_close(ts_run, mon->mode); - t5 = get_time_usec(); + for (int i = 0; i < mon->time_step_cnt; i++) { + ts_run = &(mon->time_steps[i]); + if (mon->time_steps[i].status == TS_READY) { + t1 = get_time_usec(); + H5ESwait(ts_run->es_meta_create, H5ES_WAIT_FOREVER, &num_in_progress, &op_failed); + t2 = get_time_usec(); - t6 = get_time_usec(); + H5ESwait(ts_run->es_data, H5ES_WAIT_FOREVER, &num_in_progress, &op_failed); + t3 = get_time_usec(); - meta_time += ((t2 - t1) + (t4 - t3)); - data_time += (t3 - t2); - ts_run->status = TS_DONE; - } + H5ESwait(ts_run->es_meta_close, H5ES_WAIT_FOREVER, &num_in_progress, &op_failed); + t4 = get_time_usec(); + + timestep_es_id_close(ts_run, mon->mode); + t5 = get_time_usec(); + + t6 = get_time_usec(); + + meta_time += ((t2 - t1) + (t4 - t3)); + data_time += (t3 - t2); + ts_run->status = TS_DONE; } + } - *metadata_time_total = meta_time; - *data_time_total = data_time; - return 0; + *metadata_time_total = meta_time; + *data_time_total = data_time; + return 0; +} + +hid_t +es_id_set(async_mode mode) +{ + hid_t es_id = 0; + if (has_vol_async) { + es_id = H5EScreate(); + } + else { + es_id = H5ES_NONE; } - hid_t es_id_set(async_mode mode) - { - hid_t es_id = 0; - if (has_vol_async) { - es_id = H5EScreate(); - } - else { - es_id = H5ES_NONE; - } + return es_id; +} - return es_id; +void +es_id_close(hid_t es_id, async_mode mode) +{ + if (has_vol_async) { + H5ESclose(es_id); } +} - void es_id_close(hid_t es_id, async_mode mode) - { - if (has_vol_async) { - H5ESclose(es_id); - } +float +uniform_random_number() +{ + return (((float)rand()) / ((float)(RAND_MAX))); +} + +data_contig_md * +prepare_contig_memory(long particle_cnt, long dim_1, long dim_2, long dim_3) +{ + data_contig_md *buf_struct = (data_contig_md *)malloc(sizeof(data_contig_md)); + buf_struct->particle_cnt = particle_cnt; + buf_struct->dim_1 = dim_1; + buf_struct->dim_2 = dim_2; + buf_struct->dim_3 = dim_3; + buf_struct->x = (float *)malloc(particle_cnt * sizeof(float)); + buf_struct->y = (float *)malloc(particle_cnt * sizeof(float)); + buf_struct->z = (float *)malloc(particle_cnt * sizeof(float)); + buf_struct->px = (float *)malloc(particle_cnt * sizeof(float)); + buf_struct->py = (float *)malloc(particle_cnt * sizeof(float)); + buf_struct->pz = (float *)malloc(particle_cnt * sizeof(float)); + buf_struct->id_1 = (int *)malloc(particle_cnt * sizeof(int)); + buf_struct->id_2 = (float *)malloc(particle_cnt * sizeof(float)); + return buf_struct; +} + +data_contig_md * +prepare_contig_memory_multi_dim(unsigned long long dim_1, unsigned long long dim_2, unsigned long long dim_3) +{ + data_contig_md *buf_struct = (data_contig_md *)malloc(sizeof(data_contig_md)); + buf_struct->dim_1 = dim_1; + buf_struct->dim_2 = dim_2; + buf_struct->dim_3 = dim_3; + unsigned long long num_particles = dim_1 * dim_2 * dim_3; + + buf_struct->particle_cnt = num_particles; + buf_struct->x = (float *)malloc(num_particles * sizeof(float)); + buf_struct->y = (float *)malloc(num_particles * sizeof(float)); + buf_struct->z = (float *)malloc(num_particles * sizeof(float)); + buf_struct->px = (float *)malloc(num_particles * sizeof(float)); + buf_struct->py = (float *)malloc(num_particles * sizeof(float)); + buf_struct->pz = (float *)malloc(num_particles * sizeof(float)); + buf_struct->id_1 = (int *)malloc(num_particles * sizeof(int)); + buf_struct->id_2 = (float *)malloc(num_particles * sizeof(float)); + return buf_struct; +} + +void +free_contig_memory(data_contig_md *data) +{ + if (data) { + free(data->x); + free(data->y); + free(data->z); + free(data->px); + free(data->py); + free(data->pz); + free(data->id_1); + free(data->id_2); + free(data); } +} - float uniform_random_number() - { - return (((float)rand()) / ((float)(RAND_MAX))); - } - - data_contig_md *prepare_contig_memory(long particle_cnt, long dim_1, long dim_2, long dim_3) - { - data_contig_md *buf_struct = (data_contig_md *)malloc(sizeof(data_contig_md)); - buf_struct->particle_cnt = particle_cnt; - buf_struct->dim_1 = dim_1; - buf_struct->dim_2 = dim_2; - buf_struct->dim_3 = dim_3; - buf_struct->x = (float *)malloc(particle_cnt * sizeof(float)); - buf_struct->y = (float *)malloc(particle_cnt * sizeof(float)); - buf_struct->z = (float *)malloc(particle_cnt * sizeof(float)); - buf_struct->px = (float *)malloc(particle_cnt * sizeof(float)); - buf_struct->py = (float *)malloc(particle_cnt * sizeof(float)); - buf_struct->pz = (float *)malloc(particle_cnt * sizeof(float)); - buf_struct->id_1 = (int *)malloc(particle_cnt * sizeof(int)); - buf_struct->id_2 = (float *)malloc(particle_cnt * sizeof(float)); - return buf_struct; - } - - data_contig_md *prepare_contig_memory_multi_dim(unsigned long long dim_1, unsigned long long dim_2, - unsigned long long dim_3) - { - data_contig_md *buf_struct = (data_contig_md *)malloc(sizeof(data_contig_md)); - buf_struct->dim_1 = dim_1; - buf_struct->dim_2 = dim_2; - buf_struct->dim_3 = dim_3; - unsigned long long num_particles = dim_1 * dim_2 * dim_3; - - buf_struct->particle_cnt = num_particles; - buf_struct->x = (float *)malloc(num_particles * sizeof(float)); - buf_struct->y = (float *)malloc(num_particles * sizeof(float)); - buf_struct->z = (float *)malloc(num_particles * sizeof(float)); - buf_struct->px = (float *)malloc(num_particles * sizeof(float)); - buf_struct->py = (float *)malloc(num_particles * sizeof(float)); - buf_struct->pz = (float *)malloc(num_particles * sizeof(float)); - buf_struct->id_1 = (int *)malloc(num_particles * sizeof(int)); - buf_struct->id_2 = (float *)malloc(num_particles * sizeof(float)); - return buf_struct; - } - - void free_contig_memory(data_contig_md * data) - { - if (data) { - free(data->x); - free(data->y); - free(data->z); - free(data->px); - free(data->py); - free(data->pz); - free(data->id_1); - free(data->id_2); - free(data); - } +int +parse_unit(char *str_in, unsigned long long *num, char **unit_str) +{ + char *str = strdup(str_in); + char *ptr = NULL; + ptr = strtok(str, " "); + char *num_str = strdup(ptr); + if (!num_str) { + printf("Number parsing failed: \"%s\" is not recognized.\n", str_in); + return -1; } + char *endptr; + *num = strtoul(num_str, &endptr, 10); + ptr = strtok(NULL, " "); + if (ptr) + *unit_str = strdup(ptr); + else + *unit_str = NULL; + return 0; +} - int parse_unit(char *str_in, unsigned long long *num, char **unit_str) - { - char *str = strdup(str_in); - char *ptr = NULL; - ptr = strtok(str, " "); - char *num_str = strdup(ptr); - if (!num_str) { - printf("Number parsing failed: \"%s\" is not recognized.\n", str_in); - return -1; - } - char *endptr; - *num = strtoul(num_str, &endptr, 10); - ptr = strtok(NULL, " "); - if (ptr) - *unit_str = strdup(ptr); +int +parse_time(char *str_in, duration *time) +{ + if (!time) + time = calloc(1, sizeof(duration)); + unsigned long long num = 0; + char * unit_str; + parse_unit(str_in, &num, &unit_str); + + if (!unit_str) + time->unit = TIME_SEC; + else if (unit_str[0] == 'S' || unit_str[0] == 's') + time->unit = TIME_SEC; + else if (unit_str[0] == 'M' || unit_str[0] == 'm') { + if (strcmp(unit_str, "ms") == 0 || strcmp(unit_str, "MS") == 0) + time->unit = TIME_MS; else - *unit_str = NULL; - return 0; + time->unit = TIME_MIN; } - - int parse_time(char *str_in, duration *time) - { - if (!time) - time = calloc(1, sizeof(duration)); - unsigned long long num = 0; - char * unit_str; - parse_unit(str_in, &num, &unit_str); - - if (!unit_str) - time->unit = TIME_SEC; - else if (unit_str[0] == 'S' || unit_str[0] == 's') - time->unit = TIME_SEC; - else if (unit_str[0] == 'M' || unit_str[0] == 'm') { - if (strcmp(unit_str, "ms") == 0 || strcmp(unit_str, "MS") == 0) - time->unit = TIME_MS; - else - time->unit = TIME_MIN; - } - else if (unit_str[0] == 'U' || unit_str[0] == 'u') - time->unit = TIME_US; - else { - printf("time parsing failed\n"); - return -1; - } - time->time_num = num; - return 0; + else if (unit_str[0] == 'U' || unit_str[0] == 'u') + time->unit = TIME_US; + else { + printf("time parsing failed\n"); + return -1; } + time->time_num = num; + return 0; +} - int str_to_ull(char *str_in, unsigned long long *num_out) - { - if (!str_in) { - printf("Number parsing failed: \"%s\" is not recognized.\n", str_in); - return -1; - } - unsigned long long num = 0; - char * unit_str; - int ret = parse_unit(str_in, &num, &unit_str); - if (ret < 0) - return -1; - if (!unit_str) - num = num * 1; - else if (unit_str[0] == 'K' || unit_str[0] == 'k') - num = num * K_VAL; - else if (unit_str[0] == 'M' || unit_str[0] == 'm') - num = num * M_VAL; - else if (unit_str[0] == 'G' || unit_str[0] == 'g') - num = num * G_VAL; - else if (unit_str[0] == 'T' || unit_str[0] == 't') - num = num * T_VAL; - - if (unit_str) - free(unit_str); - *num_out = num; - return 0; +int +str_to_ull(char *str_in, unsigned long long *num_out) +{ + if (!str_in) { + printf("Number parsing failed: \"%s\" is not recognized.\n", str_in); + return -1; } + unsigned long long num = 0; + char * unit_str; + int ret = parse_unit(str_in, &num, &unit_str); + if (ret < 0) + return -1; + if (!unit_str) + num = num * 1; + else if (unit_str[0] == 'K' || unit_str[0] == 'k') + num = num * K_VAL; + else if (unit_str[0] == 'M' || unit_str[0] == 'm') + num = num * M_VAL; + else if (unit_str[0] == 'G' || unit_str[0] == 'g') + num = num * G_VAL; + else if (unit_str[0] == 'T' || unit_str[0] == 't') + num = num * T_VAL; + + if (unit_str) + free(unit_str); + *num_out = num; + return 0; +} - int _set_io_pattern(bench_params * params_in_out) - { - if (!params_in_out) - return -1; - int ret = 0; - if (params_in_out->io_op == IO_WRITE) { // mem --> file - if (params_in_out->mem_pattern == PATTERN_CONTIG) { - if (params_in_out->file_pattern == PATTERN_CONTIG) { // CC - switch (params_in_out->num_dims) { - case 1: - (*params_in_out).access_pattern.pattern_write = CONTIG_CONTIG_1D; - ret = 0; - break; - case 2: - (*params_in_out).access_pattern.pattern_write = CONTIG_CONTIG_2D; - ret = 0; - break; - case 3: - (*params_in_out).access_pattern.pattern_write = CONTIG_CONTIG_3D; - ret = 0; - break; - default: - ret = -1; - printf("%s() failed on line %d\n", __func__, __LINE__); - break; - } - } - else if (params_in_out->file_pattern == PATTERN_INTERLEAVED) { // CI - if (params_in_out->num_dims == 1) { - (*params_in_out).access_pattern.pattern_write = CONTIG_COMPOUND_1D; +int +_set_io_pattern(bench_params *params_in_out) +{ + if (!params_in_out) + return -1; + int ret = 0; + if (params_in_out->io_op == IO_WRITE) { // mem --> file + if (params_in_out->mem_pattern == PATTERN_CONTIG) { + if (params_in_out->file_pattern == PATTERN_CONTIG) { // CC + switch (params_in_out->num_dims) { + case 1: + (*params_in_out).access_pattern.pattern_write = CONTIG_CONTIG_1D; ret = 0; - } - else if (params_in_out->num_dims == 2) { - (*params_in_out).access_pattern.pattern_write = CONTIG_COMPOUND_2D; + break; + case 2: + (*params_in_out).access_pattern.pattern_write = CONTIG_CONTIG_2D; ret = 0; - } - else { - ret = -1; - printf("%s() failed on line %d\n", __func__, __LINE__); - } - } - else if (params_in_out->file_pattern == PATTERN_STRIDED) { // Strided write 1d - if (params_in_out->num_dims == 1) { - (*params_in_out).access_pattern.pattern_write = CONTIG_CONTIG_STRIDED_1D; + break; + case 3: + (*params_in_out).access_pattern.pattern_write = CONTIG_CONTIG_3D; ret = 0; - } - else { + break; + default: ret = -1; printf("%s() failed on line %d\n", __func__, __LINE__); - } + break; + } + } + else if (params_in_out->file_pattern == PATTERN_INTERLEAVED) { // CI + if (params_in_out->num_dims == 1) { + (*params_in_out).access_pattern.pattern_write = CONTIG_COMPOUND_1D; + ret = 0; + } + else if (params_in_out->num_dims == 2) { + (*params_in_out).access_pattern.pattern_write = CONTIG_COMPOUND_2D; + ret = 0; } else { ret = -1; printf("%s() failed on line %d\n", __func__, __LINE__); } } - else if (params_in_out->mem_pattern == PATTERN_INTERLEAVED) { - if (params_in_out->file_pattern == PATTERN_CONTIG) { // IC - if (params_in_out->num_dims == 1) { - (*params_in_out).access_pattern.pattern_write = COMPOUND_CONTIG_1D; - ret = 0; - } - else if (params_in_out->num_dims == 2) { - (*params_in_out).access_pattern.pattern_write = COMPOUND_CONTIG_2D; - ret = 0; - } - else { - ret = -1; - printf("%s() failed on line %d\n", __func__, __LINE__); - } + else if (params_in_out->file_pattern == PATTERN_STRIDED) { // Strided write 1d + if (params_in_out->num_dims == 1) { + (*params_in_out).access_pattern.pattern_write = CONTIG_CONTIG_STRIDED_1D; + ret = 0; } - else if (params_in_out->file_pattern == PATTERN_INTERLEAVED) { // II - if (params_in_out->num_dims == 1) { - (*params_in_out).access_pattern.pattern_write = COMPOUND_COMPOUND_1D; - ret = 0; - } - else if (params_in_out->num_dims == 2) { - (*params_in_out).access_pattern.pattern_write = COMPOUND_COMPOUND_2D; - ret = 0; - } - else { - ret = -1; - printf("%s() failed on line %d\n", __func__, __LINE__); - } + else { + ret = -1; + printf("%s() failed on line %d\n", __func__, __LINE__); } } else { @@ -575,45 +541,40 @@ mem_monitor_final_run(mem_monitor *mon, unsigned long *metadata_time_total, unsi printf("%s() failed on line %d\n", __func__, __LINE__); } } - else if (params_in_out->io_op == IO_READ) { // file --> mem - if (params_in_out->mem_pattern == PATTERN_CONTIG) { - if (params_in_out->file_pattern == PATTERN_CONTIG) { - switch (params_in_out->num_dims) { - case 1: - (*params_in_out).access_pattern.pattern_read = CONTIG_1D; - ret = 0; - break; - case 2: - (*params_in_out).access_pattern.pattern_read = CONTIG_2D; - ret = 0; - break; - case 3: - (*params_in_out).access_pattern.pattern_read = CONTIG_3D; - ret = 0; - break; - default: - ret = -1; - printf("%s() failed on line %d\n", __func__, __LINE__); - break; - } + else if (params_in_out->mem_pattern == PATTERN_INTERLEAVED) { + if (params_in_out->file_pattern == PATTERN_CONTIG) { // IC + if (params_in_out->num_dims == 1) { + (*params_in_out).access_pattern.pattern_write = COMPOUND_CONTIG_1D; + ret = 0; + } + else if (params_in_out->num_dims == 2) { + (*params_in_out).access_pattern.pattern_write = COMPOUND_CONTIG_2D; + ret = 0; } - else if (params_in_out->file_pattern == PATTERN_STRIDED) { - (*params_in_out).access_pattern.pattern_read = STRIDED_1D; - ret = 0; + else { + ret = -1; + printf("%s() failed on line %d\n", __func__, __LINE__); } } - else { - ret = -1; - printf("%s() failed on line %d\n", __func__, __LINE__); + else if (params_in_out->file_pattern == PATTERN_INTERLEAVED) { // II + if (params_in_out->num_dims == 1) { + (*params_in_out).access_pattern.pattern_write = COMPOUND_COMPOUND_1D; + ret = 0; + } + else if (params_in_out->num_dims == 2) { + (*params_in_out).access_pattern.pattern_write = COMPOUND_COMPOUND_2D; + ret = 0; + } + else { + ret = -1; + printf("%s() failed on line %d\n", __func__, __LINE__); + } } } else { ret = -1; printf("%s() failed on line %d\n", __func__, __LINE__); } - if (ret < 0) - printf("%s() failed, unsupported value/patterns.\n", __func__); - return ret; } else if ((params_in_out->io_op == IO_READ) || (params_in_out->io_op == IO_OVERWRITE) || (params_in_out->io_op == IO_APPEND)) { // file --> mem @@ -638,53 +599,52 @@ mem_monitor_final_run(mem_monitor *mon, unsigned long *metadata_time_total, unsi break; } } - else if (strcmp(val, "WRITE") == 0) { - params_in_out->io_op = IO_WRITE; - } - else { - printf("Unknown value for \"IO_OPERATION\": %s\n", val); - return -1; - } - } - else if (strcmp(key, "MEM_PATTERN") == 0) { - if (strcmp(val_in, "CONTIG") == 0) { - params_in_out->mem_pattern = PATTERN_CONTIG; - } - else if (strcmp(val_in, "INTERLEAVED") == 0) { - params_in_out->mem_pattern = PATTERN_INTERLEAVED; - } - else if (strcmp(val_in, "STRIDED") == 0) { - params_in_out->mem_pattern = PATTERN_STRIDED; - } - else { - params_in_out->mem_pattern = PATTERN_INVALID; + else if (params_in_out->file_pattern == PATTERN_STRIDED) { + (*params_in_out).access_pattern.pattern_read = STRIDED_1D; + ret = 0; } } - else if (strcmp(key, "FILE_PATTERN") == 0) { - if (strcmp(val_in, "CONTIG") == 0) { - params_in_out->file_pattern = PATTERN_CONTIG; - } - else if (strcmp(val_in, "INTERLEAVED") == 0) { - params_in_out->file_pattern = PATTERN_INTERLEAVED; - } - else if (strcmp(val_in, "STRIDED") == 0) { - params_in_out->file_pattern = PATTERN_STRIDED; - } - else { - params_in_out->file_pattern = PATTERN_INVALID; - } + else { + ret = -1; + printf("%s() failed on line %d\n", __func__, __LINE__); } + } + else { + ret = -1; + printf("%s() failed on line %d\n", __func__, __LINE__); + } + if (ret < 0) + printf("%s() failed, unsupported value/patterns.\n", __func__); + return ret; +} - else if (strcmp(key, "TO_READ_NUM_PARTICLES") == 0) { - if ((*params_in_out).io_op != IO_READ) { - printf( - "TO_READ_CNT_M parameter is only used with READ_PATTERNs, please check config file.\n"); - return -1; - } - unsigned long long num = 0; - if (str_to_ull(val, &num) < 0) - return -1; - (*params_in_out).try_num_particles = num; +char * +_parse_val(char *val_in) +{ + char *val_str = strdup(val_in); + char *tokens[2]; + char *tok = strtok(val_str, "#"); + char *val = NULL; + val = strdup(tok); + // printf("_parse_val: val_in = [%s], val = [%s]\n", val_in, val); + if (val_str) + free(val_str); + return val; +} + +int +_set_params(char *key, char *val_in, bench_params *params_in_out, int do_write) +{ + if (!params_in_out) + return 0; + char *val = _parse_val(val_in); + + if (strcmp(key, "IO_OPERATION") == 0) { + if (strcmp(val, "READ") == 0) { + params_in_out->io_op = IO_READ; + } + else if (strcmp(val, "WRITE") == 0) { + params_in_out->io_op = IO_WRITE; } else if (strcmp(val, "OVERWRITE") == 0) { params_in_out->io_op = IO_OVERWRITE; @@ -696,317 +656,362 @@ mem_monitor_final_run(mem_monitor *mon, unsigned long *metadata_time_total, unsi printf("Unknown value for \"IO_OPERATION\": %s\n", val); return -1; } - else if (strcmp(key, "COLLECTIVE_DATA") == 0) { - if (val[0] == 'Y' || val[0] == 'y') - (*params_in_out).data_coll = 1; - else - (*params_in_out).data_coll = 0; + } + else if (strcmp(key, "MEM_PATTERN") == 0) { + if (strcmp(val_in, "CONTIG") == 0) { + params_in_out->mem_pattern = PATTERN_CONTIG; } - else if (strcmp(key, "COMPRESS") == 0) { - if (val[0] == 'Y' || val[0] == 'y') - (*params_in_out).useCompress = 1; - else - (*params_in_out).useCompress = 0; + else if (strcmp(val_in, "INTERLEAVED") == 0) { + params_in_out->mem_pattern = PATTERN_INTERLEAVED; } - else if (strcmp(key, "TIMESTEPS") == 0) { - int ts_cnt = atoi(val); - if (ts_cnt >= 1) - (*params_in_out).cnt_time_step = ts_cnt; - else { - printf("TIMESTEPS must be at least 1.\n"); - return -1; - } + else if (strcmp(val_in, "STRIDED") == 0) { + params_in_out->mem_pattern = PATTERN_STRIDED; } - else if (strcmp(key, "DELAYED_CLOSE_TIMESTEPS") == 0) { - int delay_ts_cnt = atoi(val); - if (delay_ts_cnt < 0) - delay_ts_cnt = 0; - (*params_in_out).cnt_time_step_delay = delay_ts_cnt; + else { + params_in_out->mem_pattern = PATTERN_INVALID; } - else if (strcmp(key, "NUM_PARTICLES") == 0) { // 16M, 8K - unsigned long long num = 0; - if (str_to_ull(val, &num) < 0) - return -1; - - if (num >= 1) - (*params_in_out).num_particles = num; - else { - printf("NUM_PARTICLES must be at least 1.\n"); - return -1; - } + } + else if (strcmp(key, "FILE_PATTERN") == 0) { + if (strcmp(val_in, "CONTIG") == 0) { + params_in_out->file_pattern = PATTERN_CONTIG; } - else if (strcmp(key, "IO_MEM_LIMIT") == 0) { - unsigned long long num = 0; - if (str_to_ull(val, &num) < 0) - return -1; - if (num >= 0) { - (*params_in_out).io_mem_limit = num; - } - else { - printf("IO_MEM_LIMIT must be at least 0.\n"); - return -1; - } + else if (strcmp(val_in, "INTERLEAVED") == 0) { + params_in_out->file_pattern = PATTERN_INTERLEAVED; } - else if (strcmp(key, "EMULATED_COMPUTE_TIME_PER_TIMESTEP") == 0) { - duration time; - if (parse_time(val, &time) < 0) - return -1; - if (time.time_num >= 0) - (*params_in_out).compute_time = time; - else { - printf("EMULATED_COMPUTE_TIME_PER_TIMESTEP must be at least 0.\n"); - return -1; - } + else if (strcmp(val_in, "STRIDED") == 0) { + params_in_out->file_pattern = PATTERN_STRIDED; } - else if (strcmp(key, "READ_OPTION") == 0) { - if (val_in[0] == 'F') { // FULL - (*params_in_out).read_option = READ_FULL; - } - else if (val_in[0] == 'P') { // PARTIAL - (*params_in_out).read_option = READ_PARTIAL; - } - else if (val_in[0] == 'S') { // STRIDED - (*params_in_out).read_option = READ_STRIDED; - } - else - (*params_in_out).read_option = READ_OPTION_INVALID; + else { + params_in_out->file_pattern = PATTERN_INVALID; } - else if (strcmp(key, "NUM_DIMS") == 0) { - int num = atoi(val); - if (num > 0) - (*params_in_out).num_dims = num; - else { - printf("NUM_DIMS must be at least 1\n"); - return -1; - } + } + + else if (strcmp(key, "TO_READ_NUM_PARTICLES") == 0) { + if ((*params_in_out).io_op != IO_READ) { + printf("TO_READ_CNT_M parameter is only used with READ_PATTERNs, please check config file.\n"); + return -1; } - else if (strcmp(key, "DIM_1") == 0) { - unsigned long long num = 0; - if (str_to_ull(val, &num) < 0) - return -1; - if (num > 0) - (*params_in_out).dim_1 = num; - else { - printf("DIM_1 must be at least 1\n"); - return -1; - } + unsigned long long num = 0; + if (str_to_ull(val, &num) < 0) + return -1; + (*params_in_out).try_num_particles = num; + } + else if (strcmp(key, "COLLECTIVE_METADATA") == 0) { + if (val[0] == 'Y' || val[0] == 'y') + (*params_in_out).meta_coll = 1; + else + (*params_in_out).meta_coll = 0; + } + else if (strcmp(key, "COLLECTIVE_DATA") == 0) { + if (val[0] == 'Y' || val[0] == 'y') + (*params_in_out).data_coll = 1; + else + (*params_in_out).data_coll = 0; + } + else if (strcmp(key, "COMPRESS") == 0) { + if (val[0] == 'Y' || val[0] == 'y') + (*params_in_out).useCompress = 1; + else + (*params_in_out).useCompress = 0; + } + else if (strcmp(key, "TIMESTEPS") == 0) { + int ts_cnt = atoi(val); + if (ts_cnt >= 1) + (*params_in_out).cnt_time_step = ts_cnt; + else { + printf("TIMESTEPS must be at least 1.\n"); + return -1; } - else if (strcmp(key, "DIM_2") == 0) { - if ((*params_in_out).num_dims == 1) - return 1; - unsigned long long num = 0; - if (str_to_ull(val, &num) < 0) - return -1; - if (num >= 1) - (*params_in_out).dim_2 = num; - else { - printf("DIM_2 must be at least 1\n"); - return -1; - } + } + else if (strcmp(key, "DELAYED_CLOSE_TIMESTEPS") == 0) { + int delay_ts_cnt = atoi(val); + if (delay_ts_cnt < 0) + delay_ts_cnt = 0; + (*params_in_out).cnt_time_step_delay = delay_ts_cnt; + } + else if (strcmp(key, "NUM_PARTICLES") == 0) { // 16M, 8K + unsigned long long num = 0; + if (str_to_ull(val, &num) < 0) + return -1; + + if (num >= 1) + (*params_in_out).num_particles = num; + else { + printf("NUM_PARTICLES must be at least 1.\n"); + return -1; } - else if (strcmp(key, "DIM_3") == 0) { - if ((*params_in_out).num_dims == 1 || (*params_in_out).num_dims == 2) - return 1; - unsigned long long num = 0; - if (str_to_ull(val, &num) < 0) - return -1; - if (num >= 1) - (*params_in_out).dim_3 = num; - else { - printf("DIM_3 must be at least 1\n"); - return -1; - } + } + else if (strcmp(key, "IO_MEM_LIMIT") == 0) { + unsigned long long num = 0; + if (str_to_ull(val, &num) < 0) + return -1; + if (num >= 0) { + (*params_in_out).io_mem_limit = num; } - else if (strcmp(key, "CHUNK_DIM_1") == 0) { - unsigned long long dim = 0; - if (str_to_ull(val, &dim) < 0) - return -1; - if (dim > 0) - (*params_in_out).chunk_dim_1 = dim; - else { - printf("CHUNK_DIM_1 must be at least 1\n"); - return -1; - } + else { + printf("IO_MEM_LIMIT must be at least 0.\n"); + return -1; } - else if (strcmp(key, "CHUNK_DIM_2") == 0) { - if ((*params_in_out).num_dims == 1) - return 1; - unsigned long long dim = 0; - if (str_to_ull(val, &dim) < 0) - return -1; - if (dim >= 1) - (*params_in_out).chunk_dim_2 = dim; - else { - printf("CHUNK_DIM_2 must be at least 1.\n"); - return -1; - } + } + else if (strcmp(key, "EMULATED_COMPUTE_TIME_PER_TIMESTEP") == 0) { + duration time; + if (parse_time(val, &time) < 0) + return -1; + if (time.time_num >= 0) + (*params_in_out).compute_time = time; + else { + printf("EMULATED_COMPUTE_TIME_PER_TIMESTEP must be at least 0.\n"); + return -1; } - else if (strcmp(key, "CHUNK_DIM_3") == 0) { - if ((*params_in_out).num_dims == 1 || (*params_in_out).num_dims == 2) - return 1; - unsigned long long dim = 0; - if (str_to_ull(val, &dim) < 0) - return -1; - if (dim >= 1) - (*params_in_out).chunk_dim_3 = dim; - else { - printf("CHUNK_DIM_3 must be at least 1.\n"); - return -1; - } + } + else if (strcmp(key, "READ_OPTION") == 0) { + if (val_in[0] == 'F') { // FULL + (*params_in_out).read_option = READ_FULL; } - else if (strcmp(key, "STRIDE_SIZE") == 0) { - unsigned long long num = 0; - if (str_to_ull(val, &num) < 0) - return -1; - (*params_in_out).stride = num; + else if (val_in[0] == 'P') { // PARTIAL + (*params_in_out).read_option = READ_PARTIAL; } - else if (strcmp(key, "BLOCK_SIZE") == 0) { - unsigned long long num = 0; - if (str_to_ull(val, &num) < 0) - return -1; - (*params_in_out).block_size = num; + else if (val_in[0] == 'S') { // STRIDED + (*params_in_out).read_option = READ_STRIDED; } - else if (strcmp(key, "BLOCK_CNT") == 0) { - unsigned long long num = 0; - if (str_to_ull(val, &num) < 0) - return -1; - (*params_in_out).block_cnt = num; + else + (*params_in_out).read_option = READ_OPTION_INVALID; + } + else if (strcmp(key, "NUM_DIMS") == 0) { + int num = atoi(val); + if (num > 0) + (*params_in_out).num_dims = num; + else { + printf("NUM_DIMS must be at least 1\n"); + return -1; } - else if (strcmp(key, "CSV_FILE") == 0) { - (*params_in_out).useCSV = 1; - (*params_in_out).csv_path = strdup(val); + } + else if (strcmp(key, "DIM_1") == 0) { + unsigned long long num = 0; + if (str_to_ull(val, &num) < 0) + return -1; + if (num > 0) + (*params_in_out).dim_1 = num; + else { + printf("DIM_1 must be at least 1\n"); + return -1; } - else if (strcmp(key, "ENV_METADATA_FILE") == 0) { - (*params_in_out).env_meta_path = strdup(val); + } + else if (strcmp(key, "DIM_2") == 0) { + if ((*params_in_out).num_dims == 1) + return 1; + unsigned long long num = 0; + if (str_to_ull(val, &num) < 0) + return -1; + if (num >= 1) + (*params_in_out).dim_2 = num; + else { + printf("DIM_2 must be at least 1\n"); + return -1; } - else if (strcmp(key, "FILE_PER_PROC") == 0) { - if (val[0] == 'Y' || val[0] == 'y') - (*params_in_out).file_per_proc = 1; - else - (*params_in_out).file_per_proc = 0; + } + else if (strcmp(key, "DIM_3") == 0) { + if ((*params_in_out).num_dims == 1 || (*params_in_out).num_dims == 2) + return 1; + unsigned long long num = 0; + if (str_to_ull(val, &num) < 0) + return -1; + if (num >= 1) + (*params_in_out).dim_3 = num; + else { + printf("DIM_3 must be at least 1\n"); + return -1; } + } + else if (strcmp(key, "CHUNK_DIM_1") == 0) { + unsigned long long dim = 0; + if (str_to_ull(val, &dim) < 0) + return -1; + if (dim > 0) + (*params_in_out).chunk_dim_1 = dim; else { - printf("Unknown Parameter: %s\n", key); + printf("CHUNK_DIM_1 must be at least 1\n"); return -1; } - - has_vol_async = has_vol_connector(); - - if (has_vol_async) { - (*params_in_out).asyncMode = MODE_ASYNC; + } + else if (strcmp(key, "CHUNK_DIM_2") == 0) { + if ((*params_in_out).num_dims == 1) + return 1; + unsigned long long dim = 0; + if (str_to_ull(val, &dim) < 0) + return -1; + if (dim >= 1) + (*params_in_out).chunk_dim_2 = dim; + else { + printf("CHUNK_DIM_2 must be at least 1.\n"); + return -1; } + } + else if (strcmp(key, "CHUNK_DIM_3") == 0) { + if ((*params_in_out).num_dims == 1 || (*params_in_out).num_dims == 2) + return 1; + unsigned long long dim = 0; + if (str_to_ull(val, &dim) < 0) + return -1; + if (dim >= 1) + (*params_in_out).chunk_dim_3 = dim; else { - (*params_in_out).asyncMode = MODE_SYNC; + printf("CHUNK_DIM_3 must be at least 1.\n"); + return -1; } + } + else if (strcmp(key, "STRIDE_SIZE") == 0) { + unsigned long long num = 0; + if (str_to_ull(val, &num) < 0) + return -1; + (*params_in_out).stride = num; + } + else if (strcmp(key, "BLOCK_SIZE") == 0) { + unsigned long long num = 0; + if (str_to_ull(val, &num) < 0) + return -1; + (*params_in_out).block_size = num; + } + else if (strcmp(key, "BLOCK_CNT") == 0) { + unsigned long long num = 0; + if (str_to_ull(val, &num) < 0) + return -1; + (*params_in_out).block_cnt = num; + } + else if (strcmp(key, "CSV_FILE") == 0) { + (*params_in_out).useCSV = 1; + (*params_in_out).csv_path = strdup(val); + } + else if (strcmp(key, "ENV_METADATA_FILE") == 0) { + (*params_in_out).env_meta_path = strdup(val); + } + else if (strcmp(key, "FILE_PER_PROC") == 0) { + if (val[0] == 'Y' || val[0] == 'y') + (*params_in_out).file_per_proc = 1; + else + (*params_in_out).file_per_proc = 0; + } + else { + printf("Unknown Parameter: %s\n", key); + return -1; + } - if ((*params_in_out).useCSV) - (*params_in_out).csv_fs = csv_init(params_in_out->csv_path, params_in_out->env_meta_path); + has_vol_async = has_vol_connector(); - if (val) - free(val); - return 1; + if (has_vol_async) { + (*params_in_out).asyncMode = MODE_ASYNC; + } + else { + (*params_in_out).asyncMode = MODE_SYNC; } - void bench_params_init(bench_params * params_out) - { - if (!params_out) - params_out = (bench_params *)calloc(1, sizeof(bench_params)); - (*params_out).pattern_name = NULL; - (*params_out).meta_coll = 0; - (*params_out).data_coll = 0; - (*params_out).asyncMode = MODE_SYNC; - - (*params_out).cnt_time_step = 0; - (*params_out).cnt_time_step_delay = 0; - (*params_out).num_particles = 0; // total number per rank - (*params_out).io_mem_limit = 0; - (*params_out).try_num_particles = 0; // to read - (*params_out).compute_time.time_num = 0; - (*params_out).num_dims = 1; - - (*params_out).stride = 0; - (*params_out).block_size = 0; - (*params_out).block_cnt = 0; - (*params_out).dim_1 = 1; - (*params_out).dim_2 = 1; - (*params_out).dim_3 = 1; - (*params_out).chunk_dim_1 = 1; - (*params_out).chunk_dim_2 = 1; - (*params_out).chunk_dim_3 = 1; - (*params_out).csv_path = NULL; - (*params_out).env_meta_path = NULL; - - (*params_out).csv_path = NULL; - (*params_out).csv_fs = NULL; - (*params_out).env_meta_path = NULL; - (*params_out).file_per_proc = 0; - } - - int has_vol_connector() - { + + if ((*params_in_out).useCSV) + (*params_in_out).csv_fs = csv_init(params_in_out->csv_path, params_in_out->env_meta_path); + + if (val) + free(val); + return 1; +} +void +bench_params_init(bench_params *params_out) +{ + if (!params_out) + params_out = (bench_params *)calloc(1, sizeof(bench_params)); + (*params_out).pattern_name = NULL; + (*params_out).meta_coll = 0; + (*params_out).data_coll = 0; + (*params_out).asyncMode = MODE_SYNC; + + (*params_out).cnt_time_step = 0; + (*params_out).cnt_time_step_delay = 0; + (*params_out).num_particles = 0; // total number per rank + (*params_out).io_mem_limit = 0; + (*params_out).try_num_particles = 0; // to read + (*params_out).compute_time.time_num = 0; + (*params_out).num_dims = 1; + + (*params_out).stride = 0; + (*params_out).block_size = 0; + (*params_out).block_cnt = 0; + (*params_out).dim_1 = 1; + (*params_out).dim_2 = 1; + (*params_out).dim_3 = 1; + (*params_out).chunk_dim_1 = 1; + (*params_out).chunk_dim_2 = 1; + (*params_out).chunk_dim_3 = 1; + (*params_out).csv_path = NULL; + (*params_out).env_meta_path = NULL; + + (*params_out).csv_path = NULL; + (*params_out).csv_fs = NULL; + (*params_out).env_meta_path = NULL; + (*params_out).file_per_proc = 0; +} + +int +has_vol_connector() +{ #if H5_VERSION_GE(1, 13, 0) - char *connector = getenv("HDF5_VOL_CONNECTOR"); + char *connector = getenv("HDF5_VOL_CONNECTOR"); if (connector != NULL && strstr(connector, "async")) { return 1; } #endif -#if H5_VERSION_GE(1, 13, 1) - if (connector != NULL && strstr(connector, "cache_ext")) { - return 1; - } -#endif - return 0; - } - - int read_config(const char *file_path, bench_params *params_out, int do_write) - { - char cfg_line[CFG_LINE_LEN_MAX] = ""; - - if (!params_out) - params_out = (bench_params *)calloc(1, sizeof(bench_params)); - else - memset(params_out, 0, sizeof(bench_params)); - // Default settings - bench_params_init(params_out); - (*params_out).data_file_path = strdup(file_path); - FILE *file = fopen(file_path, "r"); - - int parsed = 1; - - // default values - (*params_out).useCSV = 0; - if (do_write) - (*params_out).io_op = IO_WRITE; - else - (*params_out).io_op = IO_READ; + return 0; +} - while (fgets(cfg_line, CFG_LINE_LEN_MAX, file) && (parsed == 1)) { - if (cfg_line[0] == '#') { // skip comment lines - continue; - } - char *tokens[2]; - char *tok = strtok(cfg_line, CFG_DELIMS); +int +read_config(const char *file_path, bench_params *params_out, int do_write) +{ + char cfg_line[CFG_LINE_LEN_MAX] = ""; + + if (!params_out) + params_out = (bench_params *)calloc(1, sizeof(bench_params)); + else + memset(params_out, 0, sizeof(bench_params)); + // Default settings + bench_params_init(params_out); + (*params_out).data_file_path = strdup(file_path); + + FILE *file = fopen(file_path, "r"); + + int parsed = 1; + + // default values + (*params_out).useCSV = 0; + if (do_write) + (*params_out).io_op = IO_WRITE; + else + (*params_out).io_op = IO_READ; + + while (fgets(cfg_line, CFG_LINE_LEN_MAX, file) && (parsed == 1)) { + if (cfg_line[0] == '#') { // skip comment lines + continue; + } + char *tokens[2]; + char *tok = strtok(cfg_line, CFG_DELIMS); + if (tok) { + tokens[0] = tok; + tok = strtok(NULL, CFG_DELIMS); if (tok) { - tokens[0] = tok; - tok = strtok(NULL, CFG_DELIMS); - if (tok) { - tokens[1] = tok; - } - else - return -1; + tokens[1] = tok; } else return -1; - // printf("key = [%s], val = [%s]\n", tokens[0], tokens[1]); - parsed = _set_params(tokens[0], tokens[1], params_out, do_write); } - if (parsed < 0) + else return -1; + // printf("key = [%s], val = [%s]\n", tokens[0], tokens[1]); + parsed = _set_params(tokens[0], tokens[1], params_out, do_write); + } + if (parsed < 0) + return -1; - int ret = _set_io_pattern(params_out); - if (ret < 0) - return ret; + int ret = _set_io_pattern(params_out); + if (ret < 0) + return ret; if (params_out->io_op == IO_WRITE || params_out->io_op == IO_OVERWRITE || params_out->io_op == IO_APPEND || @@ -1023,15 +1028,11 @@ mem_monitor_final_run(mem_monitor *mon, unsigned long *metadata_time_total, unsi params_out->io_mem_limit); return -1; } - - if (params_out->io_mem_limit > 0) { - if (params_out->num_particles * PARTICLE_SIZE >= params_out->io_mem_limit) { - printf( - "Requested memory (%llu particles, %llu, PARTICLE_SIZE = %ld) is larger than specified " - "memory bound (%llu), " - "please check IO_MEM_LIMIT in your config file.\n", - params_out->num_particles, params_out->num_particles * PARTICLE_SIZE, PARTICLE_SIZE, - params_out->io_mem_limit); + } + if (params_out->io_op == IO_WRITE) { + if (params_out->access_pattern.pattern_write == CONTIG_CONTIG_STRIDED_1D) { + if (params_out->stride < 1 || params_out->block_size < 1 || params_out->block_cnt < 1) { + printf("Strided read requires STRIDE_SIZE/BLOCK_SIZE/BLOCK_CNT no less than 1.\n"); return -1; } } @@ -1044,22 +1045,15 @@ mem_monitor_final_run(mem_monitor *mon, unsigned long *metadata_time_total, unsi else params_out->num_particles = params_out->try_num_particles; } - else if (params_out->io_op == IO_READ) { // read - if (params_out->access_pattern.pattern_read == CONTIG_1D) { // read whole file - if (params_out->num_particles > 1) - params_out->try_num_particles = params_out->num_particles; - else - params_out->num_particles = params_out->try_num_particles; - } - if (params_out->access_pattern.pattern_read == STRIDED_1D) { - if (params_out->stride < 1 || params_out->block_size < 1 || params_out->block_cnt < 1) { - printf("Strided read requires STRIDE_SIZE/BLOCK_SIZE/BLOCK_CNT no less than 1.\n"); - return -1; - } + if (params_out->access_pattern.pattern_read == STRIDED_1D) { + if (params_out->stride < 1 || params_out->block_size < 1 || params_out->block_cnt < 1) { + printf("Strided read requires STRIDE_SIZE/BLOCK_SIZE/BLOCK_CNT no less than 1.\n"); + return -1; } } - return 0; } + return 0; +} // print all fields of params void @@ -1085,180 +1079,187 @@ print_params(const bench_params *p) printf(" Dim_3: %lu\n", p->dim_3); } - printf("Mode: %s\n", p->asyncMode == MODE_SYNC ? "SYNC" : "ASYNC"); - printf("Collective metadata operations: %s\n", p->meta_coll == 1 ? "YES" : "NO"); - printf("Collective buffering for data operations: %s\n", p->data_coll == 1 ? "YES" : "NO"); + if (p->access_pattern.pattern_read == STRIDED_1D || + p->access_pattern.pattern_write == CONTIG_CONTIG_STRIDED_1D) { + printf("Strided access settings:\n"); + printf(" Stride size = %ld\n", p->stride); + printf(" Block size = %ld\n", p->block_size); + } - printf("Number of dimensions: %d\n", p->num_dims); - printf(" Dim_1: %lu\n", p->dim_1); + if (p->useCompress) { + printf("Use compression: %d\n", p->useCompress); + printf(" chunk_dim1: %lu\n", p->chunk_dim_1); if (p->num_dims >= 2) { - printf(" Dim_2: %lu\n", p->dim_2); + printf(" chunk_dim2: %lu\n", p->chunk_dim_2); } - if (p->num_dims >= 3) { - printf(" Dim_3: %lu\n", p->dim_3); + else if (p->num_dims >= 3) { + printf(" chunk_dim3: %lu\n", p->chunk_dim_3); } } printf("===========================================================\n"); printf("\n"); } - if (p->useCompress) { - printf("Use compression: %d\n", p->useCompress); - printf(" chunk_dim1: %lu\n", p->chunk_dim_1); - if (p->num_dims >= 2) { - printf(" chunk_dim2: %lu\n", p->chunk_dim_2); - } - else if (p->num_dims >= 3) { - printf(" chunk_dim3: %lu\n", p->chunk_dim_3); - } - } +void +bench_params_free(bench_params *p) +{ + if (!p) + return; + if (p->data_file_path) + free(p->data_file_path); + if (p->pattern_name) + free(p->pattern_name); +} - printf("=======================================\n"); +int +file_create_try(const char *path) +{ + FILE *fs = fopen(path, "w+"); + if (!fs) { + printf("Failed to create file: %s, Please check permission.\n", path); + return -1; } + fclose(fs); + return 0; +} - void bench_params_free(bench_params * p) - { - if (!p) - return; - if (p->data_file_path) - free(p->data_file_path); - if (p->pattern_name) - free(p->pattern_name); +int +file_exist(const char *path) +{ + FILE *f = fopen(path, "r"); + if (!f) { + printf("Failed to open file: %s, Please check if the file exists.\n", path); + return -1; } + fclose(f); + return 0; +} - int file_create_try(const char *path) - { - FILE *fs = fopen(path, "w+"); - if (!fs) { - printf("Failed to create file: %s, Please check permission.\n", path); - return -1; - } - fclose(fs); - return 0; - } +/* TODO: + * - read lines from metadata_list_file, each presents an environment variable name. + * - get val from getrnv(), write to fs. + * */ - int file_exist(const char *path) - { - FILE *f = fopen(path, "r"); - if (!f) { - printf("Failed to open file: %s, Please check if the file exists.\n", path); - return -1; - } - fclose(f); - return 0; +int +record_env_metadata(FILE *fs, const char *metadata_list_file) +{ + // read list file line, use each line as a key to search env + if (!fs) + return -1; + FILE *lfs = fopen(metadata_list_file, "r"); + if (!lfs) { + printf("Can not open metadata list file: %s\n", metadata_list_file); + return -1; } - /* TODO: - * - read lines from metadata_list_file, each presents an environment variable name. - * - get val from getrnv(), write to fs. - * */ + fprintf(fs, "======================= Metadata =====================\n"); - int record_env_metadata(FILE * fs, const char *metadata_list_file) - { - // read list file line, use each line as a key to search env - if (!fs) - return -1; - FILE *lfs = fopen(metadata_list_file, "r"); - if (!lfs) { - printf("Can not open metadata list file: %s\n", metadata_list_file); - return -1; - } + char line[10 * CFG_LINE_LEN_MAX]; // some env val could be very large, such as PATH + while (fgets(line, CFG_LINE_LEN_MAX, lfs)) { + if (line[0] == '#') // skip comment lines + continue; + if (line[0] == '\n') + continue; - fprintf(fs, "======================= Metadata =====================\n"); + if (line[strlen(line) - 1] == '\n') { + line[strlen(line) - 1] = 0; + } - char line[10 * CFG_LINE_LEN_MAX]; // some env val could be very large, such as PATH - while (fgets(line, CFG_LINE_LEN_MAX, lfs)) { - if (line[0] == '#') // skip comment lines - continue; - if (line[0] == '\n') - continue; + char *val = getenv(line); + // printf("%s = %s\n", line, val); + fprintf(fs, "%s = %s\n", line, val); - if (line[strlen(line) - 1] == '\n') { - line[strlen(line) - 1] = 0; - } + if (!val) { // null + printf(" %s not set.\n", line); + continue; + } + } - char *val = getenv(line); - // printf("%s = %s\n", line, val); - fprintf(fs, "%s = %s\n", line, val); + fprintf(fs, "======================= Metadata end ====================\n"); + fclose(lfs); + return 0; +} - if (!val) { // null - printf(" %s not set.\n", line); - continue; - } - } +FILE * +csv_init(const char *csv_path, const char *metadata_list_file) +{ //, const char* metadata_list_file: should be optional. + FILE *fs = fopen(csv_path, "w+"); - fprintf(fs, "======================= Metadata end ====================\n"); - fclose(lfs); - return 0; + if (!fs) { + printf("Failed to create file: %s, Please check permission.\n", csv_path); + return NULL; } - FILE *csv_init(const char *csv_path, const char *metadata_list_file) - { //, const char* metadata_list_file: should be optional. - FILE *fs = fopen(csv_path, "w+"); - - if (!fs) { - printf("Failed to create file: %s, Please check permission.\n", csv_path); + if (metadata_list_file) { + if (record_env_metadata(fs, metadata_list_file) < 0) return NULL; - } + } - if (metadata_list_file) { - if (record_env_metadata(fs, metadata_list_file) < 0) - return NULL; - } + return fs; +} - return fs; - } +int +csv_output_line(FILE *fs, char *name, char *val_str) +{ + fprintf(fs, "%s,", name); + fprintf(fs, " %s\n", val_str); + return 0; +} - int csv_output_line(FILE * fs, char *name, char *val_str) - { - fprintf(fs, "%s,", name); - fprintf(fs, " %s\n", val_str); - return 0; +int +argv_print(int argc, char *argv[]) +{ + if (argc < 1) + return -1; + printf("%d arguments provided.\n", argc); + for (int i = 0; i < argc; i++) { + printf("idx = %d, argv = %s\n", i, argv[i]); } + return 0; +} - int argv_print(int argc, char *argv[]) - { - if (argc < 1) - return -1; - printf("%d arguments provided.\n", argc); - for (int i = 0; i < argc; i++) { - printf("idx = %d, argv = %s\n", i, argv[i]); - } - return 0; +char * +get_file_name_from_path(char *path) +{ + if (path == NULL) + return NULL; + + char *pFileName = path; + for (char *pCur = path; *pCur != '\0'; pCur++) { + if (*pCur == '/' || *pCur == '\\') + pFileName = pCur + 1; } - char *get_file_name_from_path(char *path) - { - if (path == NULL) - return NULL; + return pFileName; +} - char *pFileName = path; - for (char *pCur = path; *pCur != '\0'; pCur++) { - if (*pCur == '/' || *pCur == '\\') - pFileName = pCur + 1; - } +char * +substr(char *src, size_t start, size_t len) +{ + if (start + len > strlen(src)) { + fprintf(stderr, "%s() error: invalid substring index (start+len > length).\n", __func__); + return NULL; + } - return pFileName; + char *sub = calloc(1, len + 1); + if (!sub) { + fprintf(stderr, "%s() error: memory allocation failed.\n", __func__); + return NULL; } - char *substr(char *src, size_t start, size_t len) - { - if (start + len > strlen(src)) { - fprintf(stderr, "%s() error: invalid substring index (start+len > length).\n", __func__); - return NULL; - } + memcpy(sub, src + start, len); + // sub[len] = '\0'; // by using calloc, sub is filled with 0 (null) - char *sub = calloc(1, len + 1); - if (!sub) { - fprintf(stderr, "%s() error: memory allocation failed.\n", __func__); - return NULL; - } + return sub; +} - memcpy(sub, src + start, len); - // sub[len] = '\0'; // by using calloc, sub is filled with 0 (null) +char * +get_dir_from_path(char *path) +{ + if (path == NULL) + return NULL; - return sub; - } + char *pDir = substr(path, 0, strlen(path) - strlen(get_file_name_from_path(path))); return pDir; } diff --git a/docs/source/vpic.rst b/docs/source/vpic.rst index b9a1b2ae..d975a5dd 100644 --- a/docs/source/vpic.rst +++ b/docs/source/vpic.rst @@ -220,7 +220,7 @@ Known Issues .. warning:: - In Cori/NERSC or similar platforms that use Cray-MPICH library, if you encouter a failed assertion regarding support for ``MPI_THREAD_MULTIPLE`` you should define the following environment variable: + In Cori/NERSC and Theta/ALCF, or similar platforms that use Cray-MPICH library, if you encouter a failed assertion regarding support for ``MPI_THREAD_MULTIPLE`` you should define the following environment variable: .. code-block:: bash @@ -228,8 +228,10 @@ Known Issues .. warning:: - If you're trying to run the benchmark with the HDF5 VOL ASYNC connector in MacOS and are getting segmentation fault (from ``ABT_thread_create``), please try to set the following environment variable: + If you're trying to run the benchmark with the HDF5 VOL ASYNC in MacOS (or any other platforms) and are getting segmentation fault (from ``ABT_thread_create``), please try to set the following environment variable: .. code-block:: bash export ABT_THREAD_STACKSIZE=100000 + + If you run the benchmark with the HDF5 VOL CACHE connector, we set this value automatically inside the VOL connector. diff --git a/h5bench b/h5bench index c6786ae1..cee8d44f 100755 --- a/h5bench +++ b/h5bench @@ -297,7 +297,7 @@ class H5bench: file = '{}/{}'.format(self.directory, setup['file']) configuration = setup['configuration'] - if configuration['MODE'] == 'ASYNC': + if vol["connector"] !="": self.enable_vol(vol) configuration_file = '{}/{}/h5bench.cfg'.format(self.directory, id) @@ -341,7 +341,6 @@ class H5bench: configuration_file, file ) - self.logger.info(command) # Make sure the command line is in the correct format diff --git a/samples/cache-write-1d-contig-contig.json b/samples/cache-write-1d-contig-contig.json new file mode 100644 index 00000000..f6f8f7c5 --- /dev/null +++ b/samples/cache-write-1d-contig-contig.json @@ -0,0 +1,37 @@ +{ + "mpi": { + "command": "mpirun", + "ranks": "4", + "configuration": "--allow-run-as-root --np 2 --oversubscribe" + }, + "vol": { + "library": "/vol/lib:/argobots/lib:/hdf5/lib", + "path": [ + "vol/lib" + ], + "connector": "cache_ext config=cache_1.cfg;under_vol=512;under_info={under_vol=0;under_info={}}" + }, + "file-system": {}, + "directory": "storage", + "benchmarks": [ + { + "benchmark": "write", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "0", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "1 s", + "NUM_DIMS": "1", + "DIM_1": "4194304", + "DIM_2": "1", + "DIM_3": "1", + "CSV_FILE": "output.csv", + "MODE": "SYNC" + } + } + ] +} diff --git a/samples/cache-write-2d-contig-contig.json b/samples/cache-write-2d-contig-contig.json new file mode 100644 index 00000000..7d80a656 --- /dev/null +++ b/samples/cache-write-2d-contig-contig.json @@ -0,0 +1,37 @@ +{ + "mpi": { + "command": "mpirun", + "ranks": "4", + "configuration": "--allow-run-as-root --np 2 --oversubscribe" + }, + "vol": { + "library": "/vol/lib:/argobots/lib:/hdf5/lib", + "path": [ + "vol/lib" + ], + "connector": "cache_ext config=cache_1.cfg;under_vol=512;under_info={under_vol=0;under_info={}}" + }, + "file-system": {}, + "directory": "storage", + "benchmarks": [ + { + "benchmark": "write", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "0", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "1 s", + "NUM_DIMS": "2", + "DIM_1": "1024", + "DIM_2": "1024", + "DIM_3": "1", + "CSV_FILE": "output.csv", + "MODE": "SYNC" + } + } + ] +} diff --git a/samples/cache-write-3d-contig-contig.json b/samples/cache-write-3d-contig-contig.json new file mode 100644 index 00000000..617bebe0 --- /dev/null +++ b/samples/cache-write-3d-contig-contig.json @@ -0,0 +1,37 @@ +{ + "mpi": { + "command": "mpirun", + "ranks": "4", + "configuration": "--allow-run-as-root --np 2 --oversubscribe" + }, + "vol": { + "library": "/vol/lib:/argobots/lib:/hdf5/lib", + "path": [ + "vol/lib" + ], + "connector": "cache_ext config=cache_1.cfg;under_vol=512;under_info={under_vol=0;under_info={}}" + }, + "file-system": {}, + "directory": "storage", + "benchmarks": [ + { + "benchmark": "write", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "0", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "1 s", + "NUM_DIMS": "3", + "DIM_1": "64", + "DIM_2": "64", + "DIM_3": "64", + "CSV_FILE": "output.csv", + "MODE": "SYNC" + } + } + ] +} diff --git a/samples/update.py b/samples/update.py index 40c983f9..85c02185 100644 --- a/samples/update.py +++ b/samples/update.py @@ -18,7 +18,7 @@ HDF5_DIR = os.getenv('HDF5_DIR') ABT_DIR = os.getenv('ABT_DIR') ASYNC_DIR = os.getenv('ASYNC_DIR') - +VOL_DIR = os.getenv('VOL_DIR') if HDF5_DIR is None: print('HDF5_DIR enviroment variable is not set!') @@ -32,16 +32,28 @@ print('ASYNC_DIR enviroment variable is not set!') exit(-1) + with open(ARGS.setup, 'r') as f: data = json.load(f, object_pairs_hook=collections.OrderedDict) - -data['vol']['library'] = '{}:{}:{}'.format( - ASYNC_DIR, - '/'.join([ABT_DIR, 'lib']), - '/'.join([HDF5_DIR, 'lib']) -) - -data['vol']['path'] = ASYNC_DIR + +if VOL_DIR is not None: + ''' multiple vol connectors case ''' + data['vol']['library'] = '{}:{}:{}'.format( + '/'.join([VOL_DIR, 'lib']), + '/'.join([ABT_DIR, 'lib']), + '/'.join([HDF5_DIR, 'lib']) + ) + + data['vol']['path'] = '/'.join([VOL_DIR, 'lib']), +else: + ''' single vol connector case ''' + data['vol']['library'] = '{}:{}:{}'.format( + ASYNC_DIR, + '/'.join([ABT_DIR, 'lib']), + '/'.join([HDF5_DIR, 'lib']) + ) + + data['vol']['path'] = ASYNC_DIR with open(ARGS.setup, 'w') as f: - json.dump(data, f, indent=4, sort_keys=False) \ No newline at end of file + json.dump(data, f, indent=4, sort_keys=False) From 889d57fae16ee018fc7d698c8c1617a9420426b7 Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 11 Apr 2022 19:52:48 +0000 Subject: [PATCH 23/55] Committing clang-format changes --- commons/h5bench_util.c | 2 +- h5bench_patterns/h5bench_write.c | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/commons/h5bench_util.c b/commons/h5bench_util.c index 5f005688..99fa96c7 100644 --- a/commons/h5bench_util.c +++ b/commons/h5bench_util.c @@ -69,7 +69,7 @@ h5bench_sleep(duration sleep_time) void async_sleep(hid_t file_id, hid_t fapl, duration sleep_time) { -#ifndef USE_CACHE_VOL +#ifndef USE_CACHE_VOL #ifdef USE_ASYNC_VOL unsigned cap = 0; H5Pget_vol_cap_flags(fapl, &cap); diff --git a/h5bench_patterns/h5bench_write.c b/h5bench_patterns/h5bench_write.c index 76efae54..d8175685 100644 --- a/h5bench_patterns/h5bench_write.c +++ b/h5bench_patterns/h5bench_write.c @@ -1066,7 +1066,7 @@ main(int argc, char *argv[]) } #ifdef USE_CACHE_VOL H5Fcache_async_close_set(file_id); -#endif +#endif unsigned long tfopen_end = get_time_usec(); if (MY_RANK == 0) From 0293c3d76355e47e1aed91e37d24e416a7ce622c Mon Sep 17 00:00:00 2001 From: Huihuo Zheng Date: Tue, 12 Apr 2022 16:22:59 -0500 Subject: [PATCH 24/55] fixed formatting issue --- .github/workflows/h5bench-hdf5-develop.yml | 6 +++--- h5bench | 2 +- h5bench_patterns/h5bench_write.c | 8 ++++++++ 3 files changed, 12 insertions(+), 4 deletions(-) diff --git a/.github/workflows/h5bench-hdf5-develop.yml b/.github/workflows/h5bench-hdf5-develop.yml index 3b421c1a..6864e2b4 100644 --- a/.github/workflows/h5bench-hdf5-develop.yml +++ b/.github/workflows/h5bench-hdf5-develop.yml @@ -121,7 +121,7 @@ jobs: export HDF5_VOL_CONNECTOR="async under_vol=0;under_info={}" make check - + - name: Build VOL-CACHE run: | current="$PWD" @@ -137,7 +137,7 @@ jobs: export LD_LIBRARY_PATH=$HDF5_VOL_DIR/lib:$HDF5_ROOT/lib:$ABT_DIR/lib:$LD_LIBRARY_PATH cd $current/vol-cache/src make all install - + - name: Build h5bench SYNC run: | current="$PWD" @@ -185,7 +185,7 @@ jobs: -DCMAKE_C_FLAGS="-I/$HDF5_VOL_DIR/include -L/$HDF5_VOL_DIR/lib" -DCMAKE_C_COMPILER=$HDF5_HOME/bin/h5pcc \ make - + - name: Test h5bench SYNC write/read run: | cd build-sync diff --git a/h5bench b/h5bench index cee8d44f..25c1b56b 100755 --- a/h5bench +++ b/h5bench @@ -297,7 +297,7 @@ class H5bench: file = '{}/{}'.format(self.directory, setup['file']) configuration = setup['configuration'] - if vol["connector"] !="": + if vol['connector'] != '': self.enable_vol(vol) configuration_file = '{}/{}/h5bench.cfg'.format(self.directory, id) diff --git a/h5bench_patterns/h5bench_write.c b/h5bench_patterns/h5bench_write.c index d8175685..63b0737e 100644 --- a/h5bench_patterns/h5bench_write.c +++ b/h5bench_patterns/h5bench_write.c @@ -1001,6 +1001,14 @@ main(int argc, char *argv[]) if (params.useCompress) params.data_coll = 1; +#if H5_VERSION_GE(1, 13, 1) + if (H5VLis_connector_registered_by_name("cache_ext")) { + if (MY_RANK == 0) { + printf("Using 'cache_ext' VOL connector\n"); + } + } +#endif + #if H5_VERSION_GE(1, 13, 0) if (H5VLis_connector_registered_by_name("async")) { if (MY_RANK == 0) { From 6bd26160d2fb3ffcdf37f6a22fa41f2294e3ddb4 Mon Sep 17 00:00:00 2001 From: Huihuo Zheng Date: Tue, 12 Apr 2022 17:32:42 -0500 Subject: [PATCH 25/55] fixing h5bench issue --- h5bench | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/h5bench b/h5bench index 25c1b56b..4927b3ed 100755 --- a/h5bench +++ b/h5bench @@ -297,7 +297,7 @@ class H5bench: file = '{}/{}'.format(self.directory, setup['file']) configuration = setup['configuration'] - if vol['connector'] != '': + if 'connector' in vol.keys(): self.enable_vol(vol) configuration_file = '{}/{}/h5bench.cfg'.format(self.directory, id) From 0773b2b7a4c6eaf5854877277a2f0f649c48238e Mon Sep 17 00:00:00 2001 From: Huihuo Zheng Date: Tue, 12 Apr 2022 17:50:09 -0500 Subject: [PATCH 26/55] fixing yaml file for develop --- .github/workflows/h5bench-hdf5-develop.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/h5bench-hdf5-develop.yml b/.github/workflows/h5bench-hdf5-develop.yml index 6864e2b4..cc73a7f4 100644 --- a/.github/workflows/h5bench-hdf5-develop.yml +++ b/.github/workflows/h5bench-hdf5-develop.yml @@ -136,7 +136,7 @@ jobs: cp -r $current/vol-async/src/*.h $current/vol/include/ export LD_LIBRARY_PATH=$HDF5_VOL_DIR/lib:$HDF5_ROOT/lib:$ABT_DIR/lib:$LD_LIBRARY_PATH cd $current/vol-cache/src - make all install + make all - name: Build h5bench SYNC run: | From 1ed91b15a95eefaf72ea6b288a4b0a40c1f50d88 Mon Sep 17 00:00:00 2001 From: Huihuo Zheng Date: Tue, 12 Apr 2022 19:06:34 -0500 Subject: [PATCH 27/55] fixed issue for VOL_DIR environment variable --- .github/workflows/h5bench-hdf5-develop.yml | 6 +++--- samples/async-write-read-contig-1d-small.json | 2 +- samples/update.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/h5bench-hdf5-develop.yml b/.github/workflows/h5bench-hdf5-develop.yml index cc73a7f4..472efdc8 100644 --- a/.github/workflows/h5bench-hdf5-develop.yml +++ b/.github/workflows/h5bench-hdf5-develop.yml @@ -604,7 +604,7 @@ jobs: export HDF5_ROOT=/opt/hdf5 export ABT_DIR=$current/vol-async/argobots/install export ASYNC_DIR=$current/vol-async/src - export VOL_DIR=$current/vol + export HDF5_VOL_DIR=$current/vol export LD_PRELOAD=$ABT_DIR/lib/libabt.so cd build-cache printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg @@ -619,7 +619,7 @@ jobs: export HDF5_ROOT=/opt/hdf5 export ABT_DIR=$current/vol-async/argobots/install export ASYNC_DIR=$current/vol-async/src - export VOL_DIR=$current/vol + export HDF5_VOL_DIR=$current/vol export LD_PRELOAD=$ABT_DIR/lib/libabt.so cd build-cache printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg @@ -634,7 +634,7 @@ jobs: export HDF5_ROOT=/opt/hdf5 export ABT_DIR=$current/vol-async/argobots/install export ASYNC_DIR=$current/vol-async/src - export VOL_DIR=$current/vol + export HDF5_VOL_DIR=$current/vol export LD_PRELOAD=$ABT_DIR/lib/libabt.so cd build-cache printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg diff --git a/samples/async-write-read-contig-1d-small.json b/samples/async-write-read-contig-1d-small.json index b3f2aef5..96450733 100644 --- a/samples/async-write-read-contig-1d-small.json +++ b/samples/async-write-read-contig-1d-small.json @@ -2,7 +2,7 @@ "mpi": { "command": "mpirun", "ranks": "4", - "configuration": "--allow-run-as-root --oversubscribe" + "configuration": "--allow-run-as-root --np 2 --oversubscribe" }, "vol": { "library": "/vol-async/src:/hdf5-async-vol-register-install/lib:/argobots/install/lib:/hdf5-install/install:", diff --git a/samples/update.py b/samples/update.py index 85c02185..ae7188f4 100644 --- a/samples/update.py +++ b/samples/update.py @@ -18,7 +18,7 @@ HDF5_DIR = os.getenv('HDF5_DIR') ABT_DIR = os.getenv('ABT_DIR') ASYNC_DIR = os.getenv('ASYNC_DIR') -VOL_DIR = os.getenv('VOL_DIR') +VOL_DIR = os.getenv('HDF5_VOL_DIR') if HDF5_DIR is None: print('HDF5_DIR enviroment variable is not set!') From 58337308eb647b5b447ebcd19619d288de7bbba9 Mon Sep 17 00:00:00 2001 From: Huihuo Zheng Date: Tue, 12 Apr 2022 20:56:39 -0500 Subject: [PATCH 28/55] fixed issue for Build h5bench CACHE --- .github/workflows/h5bench-hdf5-develop.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/h5bench-hdf5-develop.yml b/.github/workflows/h5bench-hdf5-develop.yml index 472efdc8..d02b671f 100644 --- a/.github/workflows/h5bench-hdf5-develop.yml +++ b/.github/workflows/h5bench-hdf5-develop.yml @@ -179,10 +179,11 @@ jobs: export PNETCDF_HOME=/opt/pnetcdf export HDF5_VOL_DIR=$current/vol mkdir build-cache + cd build-cache cmake .. \ -DWITH_CACHE_VOL:BOOL=ON \ -DWITH_ASYNC_VOL:BOOL=ON \ - -DCMAKE_C_FLAGS="-I/$HDF5_VOL_DIR/include -L/$HDF5_VOL_DIR/lib" + -DCMAKE_C_FLAGS="-I/$HDF5_VOL_DIR/include -L/$HDF5_VOL_DIR/lib" \ -DCMAKE_C_COMPILER=$HDF5_HOME/bin/h5pcc \ make From c21996c5d53aeeb3e8e1e6aad4117341f49e622d Mon Sep 17 00:00:00 2001 From: Huihuo Zheng Date: Tue, 12 Apr 2022 22:36:06 -0500 Subject: [PATCH 29/55] fixed update.py issue --- samples/update.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/update.py b/samples/update.py index ae7188f4..ea82b433 100644 --- a/samples/update.py +++ b/samples/update.py @@ -44,7 +44,7 @@ '/'.join([HDF5_DIR, 'lib']) ) - data['vol']['path'] = '/'.join([VOL_DIR, 'lib']), + data['vol']['path'] = VOL_DIR+'/lib' else: ''' single vol connector case ''' data['vol']['library'] = '{}:{}:{}'.format( From 2589227931cd34497fe9d6aca47b4534df9d03ed Mon Sep 17 00:00:00 2001 From: Huihuo Zheng Date: Thu, 14 Apr 2022 10:16:57 -0500 Subject: [PATCH 30/55] fixed Build h5bench CACHE issue --- .github/workflows/h5bench-hdf5-develop.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/h5bench-hdf5-develop.yml b/.github/workflows/h5bench-hdf5-develop.yml index d02b671f..de21f8da 100644 --- a/.github/workflows/h5bench-hdf5-develop.yml +++ b/.github/workflows/h5bench-hdf5-develop.yml @@ -184,7 +184,7 @@ jobs: -DWITH_CACHE_VOL:BOOL=ON \ -DWITH_ASYNC_VOL:BOOL=ON \ -DCMAKE_C_FLAGS="-I/$HDF5_VOL_DIR/include -L/$HDF5_VOL_DIR/lib" \ - -DCMAKE_C_COMPILER=$HDF5_HOME/bin/h5pcc \ + -DCMAKE_C_COMPILER=$HDF5_HOME/bin/h5pcc make - name: Test h5bench SYNC write/read From 016f30c0d9c56102c16927a548fa9aa4b53899b6 Mon Sep 17 00:00:00 2001 From: Huihuo Zheng Date: Thu, 14 Apr 2022 18:15:33 -0500 Subject: [PATCH 31/55] fixing H5ES_VOL_g error --- .../h5bench-hdf5-1.13.0-mpich-3.4.3.yml | 36 ++++++++-------- .github/workflows/h5bench-hdf5-1.13.0.yml | 36 ++++++++-------- .github/workflows/h5bench-hdf5-develop.yml | 42 +++++++++---------- 3 files changed, 57 insertions(+), 57 deletions(-) diff --git a/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml b/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml index 6ddfbeca..aef804e7 100644 --- a/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml +++ b/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml @@ -80,7 +80,7 @@ jobs: cd $VOL_DIR/test tail -n 47 Makefile.summit > Makefile - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" make -j 8 - name: Test VOL-ASYNC @@ -293,7 +293,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async export PATH=$(pwd):$PATH @@ -311,7 +311,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async export PATH=$(pwd):$PATH @@ -329,7 +329,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async export PATH=$(pwd):$PATH @@ -347,7 +347,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async export PATH=$(pwd):$PATH @@ -365,7 +365,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async export PATH=$(pwd):$PATH @@ -383,7 +383,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async export PATH=$(pwd):$PATH @@ -401,7 +401,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async export PATH=$(pwd):$PATH @@ -419,7 +419,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async export PATH=$(pwd):$PATH @@ -437,7 +437,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async export PATH=$(pwd):$PATH @@ -455,7 +455,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async export PATH=$(pwd):$PATH @@ -473,7 +473,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async export PATH=$(pwd):$PATH @@ -491,7 +491,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async export PATH=$(pwd):$PATH @@ -509,7 +509,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async export PATH=$(pwd):$PATH @@ -527,7 +527,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async export PATH=$(pwd):$PATH @@ -545,7 +545,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async export PATH=$(pwd):$PATH @@ -563,7 +563,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async export PATH=$(pwd):$PATH @@ -581,7 +581,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async export PATH=$(pwd):$PATH diff --git a/.github/workflows/h5bench-hdf5-1.13.0.yml b/.github/workflows/h5bench-hdf5-1.13.0.yml index 036a5e9d..f19fafdb 100644 --- a/.github/workflows/h5bench-hdf5-1.13.0.yml +++ b/.github/workflows/h5bench-hdf5-1.13.0.yml @@ -81,7 +81,7 @@ jobs: cd $VOL_DIR/test tail -n 47 Makefile.summit > Makefile - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" make -j 8 - name: Test VOL-ASYNC @@ -264,7 +264,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -281,7 +281,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -298,7 +298,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -315,7 +315,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -332,7 +332,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -349,7 +349,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -366,7 +366,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -383,7 +383,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -400,7 +400,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -417,7 +417,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -434,7 +434,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -451,7 +451,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -468,7 +468,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -485,7 +485,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -502,7 +502,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -519,7 +519,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -536,7 +536,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async diff --git a/.github/workflows/h5bench-hdf5-develop.yml b/.github/workflows/h5bench-hdf5-develop.yml index de21f8da..e65e572d 100644 --- a/.github/workflows/h5bench-hdf5-develop.yml +++ b/.github/workflows/h5bench-hdf5-develop.yml @@ -103,7 +103,7 @@ jobs: cd $VOL_DIR/test tail -n 47 Makefile.summit > Makefile - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" make -j 8 - name: Test VOL-ASYNC @@ -318,7 +318,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -335,7 +335,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -352,7 +352,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -369,7 +369,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -386,7 +386,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -403,7 +403,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -420,7 +420,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -437,7 +437,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -454,7 +454,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -471,7 +471,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -488,7 +488,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -505,7 +505,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -522,7 +522,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -539,7 +539,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -556,7 +556,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -573,7 +573,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -590,7 +590,7 @@ jobs: export VOL_DIR=$current/vol-async export ASYNC_DIR=$VOL_DIR/src - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-async @@ -606,7 +606,7 @@ jobs: export ABT_DIR=$current/vol-async/argobots/install export ASYNC_DIR=$current/vol-async/src export HDF5_VOL_DIR=$current/vol - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-cache printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg [ -e SSD ] || mkdir SSD @@ -621,7 +621,7 @@ jobs: export ABT_DIR=$current/vol-async/argobots/install export ASYNC_DIR=$current/vol-async/src export HDF5_VOL_DIR=$current/vol - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-cache printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg python3 ../samples/update.py ../samples/cache-write-2d-contig-contig.json @@ -636,7 +636,7 @@ jobs: export ABT_DIR=$current/vol-async/argobots/install export ASYNC_DIR=$current/vol-async/src export HDF5_VOL_DIR=$current/vol - export LD_PRELOAD=$ABT_DIR/lib/libabt.so + export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" cd build-cache printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg python3 ../samples/update.py ../samples/cache-write-1d-contig-contig.json From ce324b55573f13165f1369f15ac440d8221f8589 Mon Sep 17 00:00:00 2001 From: Huihuo Zheng Date: Fri, 15 Apr 2022 16:45:29 -0500 Subject: [PATCH 32/55] fixing LD_PRELOAD issue --- .github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml | 2 +- .github/workflows/h5bench-hdf5-1.13.0.yml | 2 +- .github/workflows/h5bench-hdf5-develop.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml b/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml index aef804e7..46d4aff1 100644 --- a/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml +++ b/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml @@ -80,7 +80,7 @@ jobs: cd $VOL_DIR/test tail -n 47 Makefile.summit > Makefile - export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" + export LD_PRELOAD="$ABT_DIR/lib/libabt.so" make -j 8 - name: Test VOL-ASYNC diff --git a/.github/workflows/h5bench-hdf5-1.13.0.yml b/.github/workflows/h5bench-hdf5-1.13.0.yml index f19fafdb..1db16f8c 100644 --- a/.github/workflows/h5bench-hdf5-1.13.0.yml +++ b/.github/workflows/h5bench-hdf5-1.13.0.yml @@ -81,7 +81,7 @@ jobs: cd $VOL_DIR/test tail -n 47 Makefile.summit > Makefile - export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" + export LD_PRELOAD="$ABT_DIR/lib/libabt.so" make -j 8 - name: Test VOL-ASYNC diff --git a/.github/workflows/h5bench-hdf5-develop.yml b/.github/workflows/h5bench-hdf5-develop.yml index e65e572d..72bb9c04 100644 --- a/.github/workflows/h5bench-hdf5-develop.yml +++ b/.github/workflows/h5bench-hdf5-develop.yml @@ -103,7 +103,7 @@ jobs: cd $VOL_DIR/test tail -n 47 Makefile.summit > Makefile - export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" + export LD_PRELOAD="$ABT_DIR/lib/libabt.so" make -j 8 - name: Test VOL-ASYNC From dbc486a85222808fd835558e4c4c80ebe94c69d7 Mon Sep 17 00:00:00 2001 From: Jean Luca Bez Date: Fri, 29 Apr 2022 14:41:18 -0700 Subject: [PATCH 33/55] Update h5bench develop CI --- .github/workflows/h5bench-hdf5-develop.yml | 82 ++++++++++++---------- 1 file changed, 45 insertions(+), 37 deletions(-) diff --git a/.github/workflows/h5bench-hdf5-develop.yml b/.github/workflows/h5bench-hdf5-develop.yml index 0818ba10..bd262a00 100644 --- a/.github/workflows/h5bench-hdf5-develop.yml +++ b/.github/workflows/h5bench-hdf5-develop.yml @@ -109,18 +109,23 @@ jobs: - name: Build VOL-CACHE run: | - current="$PWD" export HDF5_ROOT=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - [ -e $current/vol ] || mkdir $current/vol - [ -e $current/vol/bin ] || mkdir $current/vol/bin - [ -e $current/vol/lib ] || mkdir $current/vol/lib - [ -e $current/vol/include ] || mkdir $current/vol/include - export HDF5_VOL_DIR=$current/vol/ - cp -r $current/vol-async/src/*.so $current/vol-async/src/*.a $current/vol/lib/ - cp -r $current/vol-async/src/*.h $current/vol/include/ + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + export HDF5_VOL_DIR=/opt/vol-connectors + + [ -e $HDF5_VOL_DIR ] || mkdir $HDF5_VOL_DIR + [ -e $HDF5_VOL_DIR/bin ] || mkdir $HDF5_VOL_DIR/bin + [ -e $HDF5_VOL_DIR/lib ] || mkdir $HDF5_VOL_DIR/lib + [ -e $HDF5_VOL_DIR/include ] || mkdir $HDF5_VOL_DIR/include + + cp -r $ASYNC_DIR/lib/*.so $ASYNC_DIR/lib/*.a $HDF5_VOL_DIR/lib/ + cp -r $ASYNC_DIR/include/*.h $HDF5_VOL_DIR/include/ + export LD_LIBRARY_PATH=$HDF5_VOL_DIR/lib:$HDF5_ROOT/lib:$ABT_DIR/lib:$LD_LIBRARY_PATH - cd $current/vol-cache/src + + cd $HDF5_VOL_DIR/src + make all - name: Build h5bench SYNC @@ -137,8 +142,6 @@ jobs: - name: Build h5bench ASYNC run: | - current="$PWD" - export HDF5_HOME=/opt/hdf5 export PNETCDF_HOME=/opt/pnetcdf export ABT_HOME=/opt/argobots @@ -157,19 +160,20 @@ jobs: - name: Build h5bench CACHE run: | - current="$PWD" export HDF5_ROOT=/opt/hdf5 export HDF5_HOME=/opt/hdf5 export PNETCDF_HOME=/opt/pnetcdf - export HDF5_VOL_DIR=$current/vol + export HDF5_VOL_HOME=/opt/vol-connectors + mkdir build-cache cd build-cache + cmake .. \ -DWITH_CACHE_VOL:BOOL=ON \ -DWITH_ASYNC_VOL:BOOL=ON \ - -DCMAKE_C_FLAGS="-I/$HDF5_VOL_DIR/include -L/$HDF5_VOL_DIR/lib" \ - -DCMAKE_C_COMPILER=$HDF5_HOME/bin/h5pcc - make + -DCMAKE_C_FLAGS="-I/$HDF5_VOL_HOME/include -L/$HDF5_VOL_HOME/lib" \ + -DH5BENCH_ALL=ON + make -j 2 - name: Test h5bench SYNC write/read run: | @@ -379,8 +383,6 @@ jobs: - name: Test h5bench ASYNC write 2D contiguous (memory) interleaved (file) run: | - current="$PWD" - export HDF5_DIR=/opt/hdf5 export ABT_DIR=/opt/argobots export ASYNC_DIR=/opt/vol-async @@ -501,47 +503,53 @@ jobs: - name: Test h5bench CACHE write 3D contiguous (memory) contiguous (file) run: | - current="$PWD" export HDF5_DIR=/opt/hdf5 - export HDF5_ROOT=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export ASYNC_DIR=$current/vol-async/src - export HDF5_VOL_DIR=$current/vol - export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + export HDF5_VOL_HOME=/opt/vol-connectors + cd build-cache + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + [ -e SSD ] || mkdir SSD + python3 ../samples/update.py ../samples/cache-write-3d-contig-contig.json + ./h5bench --debug --abort-on-failure --validate-mode ../samples/cache-write-3d-contig-contig.json - name: Test h5bench CACHE write 2D contiguous (memory) contiguous (file) run: | - current="$PWD" export HDF5_DIR=/opt/hdf5 - export HDF5_ROOT=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export ASYNC_DIR=$current/vol-async/src - export HDF5_VOL_DIR=$current/vol - export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + export HDF5_VOL_HOME=/opt/vol-connectors + cd build-cache + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + python3 ../samples/update.py ../samples/cache-write-2d-contig-contig.json + [ -e SSD ] || mkdir SSD + ./h5bench --debug --abort-on-failure --validate-mode ../samples/cache-write-2d-contig-contig.json - name: Test h5bench CACHE write 1D contiguous (memory) contiguous (file) run: | - current="$PWD" export HDF5_DIR=/opt/hdf5 - export HDF5_ROOT=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export ASYNC_DIR=$current/vol-async/src - export HDF5_VOL_DIR=$current/vol - export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + export HDF5_VOL_HOME=/opt/vol-connectors + cd build-cache + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + python3 ../samples/update.py ../samples/cache-write-1d-contig-contig.json + [ -e SSD ] || mkdir SSD + ./h5bench --debug --abort-on-failure --validate-mode ../samples/cache-write-1d-contig-contig.json - name: Upload artifact From 6ae1a8842efbed2ac1bf910f3e5eae8b2eb970fd Mon Sep 17 00:00:00 2001 From: Jean Luca Bez Date: Fri, 29 Apr 2022 16:11:06 -0700 Subject: [PATCH 34/55] Update h5bench develop CI --- .github/workflows/h5bench-hdf5-develop.yml | 28 +++++++++++----------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/.github/workflows/h5bench-hdf5-develop.yml b/.github/workflows/h5bench-hdf5-develop.yml index bd262a00..ca41b73e 100644 --- a/.github/workflows/h5bench-hdf5-develop.yml +++ b/.github/workflows/h5bench-hdf5-develop.yml @@ -112,19 +112,19 @@ jobs: export HDF5_ROOT=/opt/hdf5 export ABT_DIR=/opt/argobots export ASYNC_DIR=/opt/vol-async - export HDF5_VOL_DIR=/opt/vol-connectors + export CACHE_DIR=/opt/vol-cache - [ -e $HDF5_VOL_DIR ] || mkdir $HDF5_VOL_DIR - [ -e $HDF5_VOL_DIR/bin ] || mkdir $HDF5_VOL_DIR/bin - [ -e $HDF5_VOL_DIR/lib ] || mkdir $HDF5_VOL_DIR/lib - [ -e $HDF5_VOL_DIR/include ] || mkdir $HDF5_VOL_DIR/include + [ -e $CACHE_DIR ] || mkdir $CACHE_DIR + [ -e $CACHE_DIR/bin ] || mkdir $CACHE_DIR/bin + [ -e $CACHE_DIR/lib ] || mkdir $CACHE_DIR/lib + [ -e $CACHE_DIR/include ] || mkdir $CACHE_DIR/include - cp -r $ASYNC_DIR/lib/*.so $ASYNC_DIR/lib/*.a $HDF5_VOL_DIR/lib/ - cp -r $ASYNC_DIR/include/*.h $HDF5_VOL_DIR/include/ + cp -r $ASYNC_DIR/lib/*.so $ASYNC_DIR/lib/*.a $CACHE_DIR/lib/ + cp -r $ASYNC_DIR/include/*.h $CACHE_DIR/include/ - export LD_LIBRARY_PATH=$HDF5_VOL_DIR/lib:$HDF5_ROOT/lib:$ABT_DIR/lib:$LD_LIBRARY_PATH + export LD_LIBRARY_PATH=$CACHE_DIR/lib:$HDF5_ROOT/lib:$ABT_DIR/lib:$LD_LIBRARY_PATH - cd $HDF5_VOL_DIR/src + cd $CACHE_DIR/src make all @@ -163,7 +163,7 @@ jobs: export HDF5_ROOT=/opt/hdf5 export HDF5_HOME=/opt/hdf5 export PNETCDF_HOME=/opt/pnetcdf - export HDF5_VOL_HOME=/opt/vol-connectors + export CACHE_DIR=/opt/vol-cache mkdir build-cache cd build-cache @@ -171,7 +171,7 @@ jobs: cmake .. \ -DWITH_CACHE_VOL:BOOL=ON \ -DWITH_ASYNC_VOL:BOOL=ON \ - -DCMAKE_C_FLAGS="-I/$HDF5_VOL_HOME/include -L/$HDF5_VOL_HOME/lib" \ + -DCMAKE_C_FLAGS="-I/$CACHE_DIR/include -L/$CACHE_DIR/lib" \ -DH5BENCH_ALL=ON make -j 2 @@ -506,7 +506,7 @@ jobs: export HDF5_DIR=/opt/hdf5 export ABT_DIR=/opt/argobots export ASYNC_DIR=/opt/vol-async - export HDF5_VOL_HOME=/opt/vol-connectors + export CACHE_DIR=/opt/vol-cache cd build-cache @@ -523,7 +523,7 @@ jobs: export HDF5_DIR=/opt/hdf5 export ABT_DIR=/opt/argobots export ASYNC_DIR=/opt/vol-async - export HDF5_VOL_HOME=/opt/vol-connectors + export CACHE_DIR=/opt/vol-cache cd build-cache @@ -540,7 +540,7 @@ jobs: export HDF5_DIR=/opt/hdf5 export ABT_DIR=/opt/argobots export ASYNC_DIR=/opt/vol-async - export HDF5_VOL_HOME=/opt/vol-connectors + export CACHE_DIR=/opt/vol-cache cd build-cache From ce9dcc7f3998cd69d9a51994e87de68af2d0dbc7 Mon Sep 17 00:00:00 2001 From: Jean Luca Bez Date: Fri, 29 Apr 2022 16:38:08 -0700 Subject: [PATCH 35/55] Update h5bench develop CI --- .github/workflows/h5bench-hdf5-develop.yml | 28 +++++++++++----------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/.github/workflows/h5bench-hdf5-develop.yml b/.github/workflows/h5bench-hdf5-develop.yml index ca41b73e..992e3113 100644 --- a/.github/workflows/h5bench-hdf5-develop.yml +++ b/.github/workflows/h5bench-hdf5-develop.yml @@ -112,19 +112,19 @@ jobs: export HDF5_ROOT=/opt/hdf5 export ABT_DIR=/opt/argobots export ASYNC_DIR=/opt/vol-async - export CACHE_DIR=/opt/vol-cache + export HDF5_VOL_DIR=/opt/vol-cache - [ -e $CACHE_DIR ] || mkdir $CACHE_DIR - [ -e $CACHE_DIR/bin ] || mkdir $CACHE_DIR/bin - [ -e $CACHE_DIR/lib ] || mkdir $CACHE_DIR/lib - [ -e $CACHE_DIR/include ] || mkdir $CACHE_DIR/include + [ -e $HDF5_VOL_DIR ] || mkdir $HDF5_VOL_DIR + [ -e $HDF5_VOL_DIR/bin ] || mkdir $HDF5_VOL_DIR/bin + [ -e $HDF5_VOL_DIR/lib ] || mkdir $HDF5_VOL_DIR/lib + [ -e $HDF5_VOL_DIR/include ] || mkdir $HDF5_VOL_DIR/include - cp -r $ASYNC_DIR/lib/*.so $ASYNC_DIR/lib/*.a $CACHE_DIR/lib/ - cp -r $ASYNC_DIR/include/*.h $CACHE_DIR/include/ + cp -r $ASYNC_DIR/lib/*.so $ASYNC_DIR/lib/*.a $HDF5_VOL_DIR/lib/ + cp -r $ASYNC_DIR/include/*.h $HDF5_VOL_DIR/include/ - export LD_LIBRARY_PATH=$CACHE_DIR/lib:$HDF5_ROOT/lib:$ABT_DIR/lib:$LD_LIBRARY_PATH + export LD_LIBRARY_PATH=$HDF5_VOL_DIR/lib:$HDF5_ROOT/lib:$ABT_DIR/lib:$LD_LIBRARY_PATH - cd $CACHE_DIR/src + cd $HDF5_VOL_DIR/src make all @@ -163,7 +163,7 @@ jobs: export HDF5_ROOT=/opt/hdf5 export HDF5_HOME=/opt/hdf5 export PNETCDF_HOME=/opt/pnetcdf - export CACHE_DIR=/opt/vol-cache + export HDF5_VOL_DIR=/opt/vol-cache mkdir build-cache cd build-cache @@ -171,7 +171,7 @@ jobs: cmake .. \ -DWITH_CACHE_VOL:BOOL=ON \ -DWITH_ASYNC_VOL:BOOL=ON \ - -DCMAKE_C_FLAGS="-I/$CACHE_DIR/include -L/$CACHE_DIR/lib" \ + -DCMAKE_C_FLAGS="-I/$HDF5_VOL_DIR/include -L/$HDF5_VOL_DIR/lib" \ -DH5BENCH_ALL=ON make -j 2 @@ -506,7 +506,7 @@ jobs: export HDF5_DIR=/opt/hdf5 export ABT_DIR=/opt/argobots export ASYNC_DIR=/opt/vol-async - export CACHE_DIR=/opt/vol-cache + export HDF5_VOL_DIR=/opt/vol-cache cd build-cache @@ -523,7 +523,7 @@ jobs: export HDF5_DIR=/opt/hdf5 export ABT_DIR=/opt/argobots export ASYNC_DIR=/opt/vol-async - export CACHE_DIR=/opt/vol-cache + export HDF5_VOL_DIR=/opt/vol-cache cd build-cache @@ -540,7 +540,7 @@ jobs: export HDF5_DIR=/opt/hdf5 export ABT_DIR=/opt/argobots export ASYNC_DIR=/opt/vol-async - export CACHE_DIR=/opt/vol-cache + export HDF5_VOL_DIR=/opt/vol-cache cd build-cache From b7390f6b6698faa9d645fd46f73bfbeacee52440 Mon Sep 17 00:00:00 2001 From: Jean Luca Bez Date: Fri, 29 Apr 2022 16:53:32 -0700 Subject: [PATCH 36/55] Update h5bench develop CI --- .github/workflows/h5bench-hdf5-develop.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/h5bench-hdf5-develop.yml b/.github/workflows/h5bench-hdf5-develop.yml index 992e3113..380c1ac9 100644 --- a/.github/workflows/h5bench-hdf5-develop.yml +++ b/.github/workflows/h5bench-hdf5-develop.yml @@ -86,7 +86,7 @@ jobs: mkdir build cd build - cmake .. -DCMAKE_INSTALL_PREFIX=$ASYNC_DIR -DCMAKE_PREFIX_PATH=$HDF5_DIR + cmake .. -DCMAKE_INSTALL_PREFIX=$ASYNC_DIR -DCMAKE_PREFIX_PATH=$HDF5_DIR -DCMAKE_C_FLAGS="-fPIC" make make install From 95514ada6434d38d3d43269018910e5804df3430 Mon Sep 17 00:00:00 2001 From: Jean Luca Bez Date: Fri, 29 Apr 2022 19:30:34 -0700 Subject: [PATCH 37/55] Update h5bench develop CI --- .github/workflows/h5bench-hdf5-develop.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/h5bench-hdf5-develop.yml b/.github/workflows/h5bench-hdf5-develop.yml index 380c1ac9..362a2834 100644 --- a/.github/workflows/h5bench-hdf5-develop.yml +++ b/.github/workflows/h5bench-hdf5-develop.yml @@ -171,8 +171,7 @@ jobs: cmake .. \ -DWITH_CACHE_VOL:BOOL=ON \ -DWITH_ASYNC_VOL:BOOL=ON \ - -DCMAKE_C_FLAGS="-I/$HDF5_VOL_DIR/include -L/$HDF5_VOL_DIR/lib" \ - -DH5BENCH_ALL=ON + -DCMAKE_C_FLAGS="-I/$HDF5_VOL_DIR/include -L/$HDF5_VOL_DIR/lib" make -j 2 - name: Test h5bench SYNC write/read From f62b16b55de351ed69c89d0888a0d2eba39e8ff0 Mon Sep 17 00:00:00 2001 From: Jean Luca Bez Date: Sat, 30 Apr 2022 23:55:49 -0700 Subject: [PATCH 38/55] Update CI with all VOL-CACHE tests --- .../h5bench-hdf5-1.13.0-mpich-3.4.3.yml | 532 ++++++++++++------ .github/workflows/h5bench-hdf5-1.13.0.yml | 286 +++++++++- .github/workflows/h5bench-hdf5-develop.yml | 211 ++++++- 3 files changed, 848 insertions(+), 181 deletions(-) diff --git a/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml b/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml index 46d4aff1..3fff4543 100644 --- a/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml +++ b/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml @@ -11,6 +11,9 @@ jobs: container: image: jlbez/hdf5-1.13.0-mpich-3.4.3 timeout-minutes: 60 + env: + OMPI_ALLOW_RUN_AS_ROOT: 1 + OMPI_ALLOW_RUN_AS_ROOT_CONFIRM: 1 steps: - uses: actions/checkout@v2 @@ -19,7 +22,11 @@ jobs: - name: Dependencies run: | - git clone --recursive https://github.com/hpc-io/vol-async.git + # VOL-ASYNC + git clone --recursive https://github.com/hpc-io/vol-async.git /opt/vol-async + + # VOL-CACHE + git clone --recursive https://github.com/hpc-io/vol-cache.git /opt/vol-cache # PnetCDF wget https://parallel-netcdf.github.io/Release/pnetcdf-1.12.2.tar.gz @@ -35,73 +42,75 @@ jobs: ./configure --prefix=${PNETCDF_DIR} CC=mpicc - make -j 8 + make -j 2 make install - name: Build Argobots run: | - current="$PWD" - - export ABT_DIR=$current/vol-async/argobots + export ABT_DIR=/opt/argobots - cd $ABT_DIR + cd /opt/vol-async/argobots ./autogen.sh - ./configure --prefix=$ABT_DIR/install + ./configure --prefix=$ABT_DIR - make -j 8 + make -j 2 make install - name: Build VOL-ASYNC run: | - current="$PWD" - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async - cd $VOL_DIR - cd src - tail -n 48 Makefile.summit > Makefile + cd $ASYNC_DIR + mkdir build + cd build + + cmake .. -DCMAKE_INSTALL_PREFIX=$ASYNC_DIR -DCMAKE_PREFIX_PATH=$HDF5_DIR -DCMAKE_C_FLAGS="-fPIC" make + make install - - name: Build VOL-ASYNC Tests + - name: Test VOL-ASYNC run: | - current="$PWD" - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$current/vol-async/src + export HDF5_HOME=$HDF5_DIR + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async - export LD_LIBRARY_PATH=$VOL_DIR/src:$HDF5_DIR/lib:$ABT_DIR/lib:$LD_LIBRARY_PATH - export HDF5_PLUGIN_PATH="$VOL_DIR/src" + export LD_LIBRARY_PATH=$ASYNC_DIR/lib:$HDF5_DIR/lib:$ABT_DIR/lib:$LD_LIBRARY_PATH + export HDF5_PLUGIN_PATH="$ASYNC_DIR/include" export HDF5_VOL_CONNECTOR="async under_vol=0;under_info={}" - cd $VOL_DIR/test - tail -n 47 Makefile.summit > Makefile - export LD_PRELOAD="$ABT_DIR/lib/libabt.so" - make -j 8 + cd $ASYNC_DIR/build + + export LD_PRELOAD=$ASYNC_DIR/lib/libh5async.so:$ABT_DIR/lib/libabt.so:$HDF5_DIR/lib/libhdf5.so - - name: Test VOL-ASYNC + ctest + + - name: Build VOL-CACHE run: | - current="$PWD" + export HDF5_ROOT=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + export HDF5_VOL_DIR=/opt/vol-cache - cd $current/vol-async/test - - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async + [ -e $HDF5_VOL_DIR ] || mkdir $HDF5_VOL_DIR + [ -e $HDF5_VOL_DIR/bin ] || mkdir $HDF5_VOL_DIR/bin + [ -e $HDF5_VOL_DIR/lib ] || mkdir $HDF5_VOL_DIR/lib + [ -e $HDF5_VOL_DIR/include ] || mkdir $HDF5_VOL_DIR/include - export LD_LIBRARY_PATH=$VOL_DIR/src:$HDF5_DIR/lib:$ABT_DIR/lib:$LD_LIBRARY_PATH - export HDF5_PLUGIN_PATH="$VOL_DIR/src" - export HDF5_VOL_CONNECTOR="async under_vol=0;under_info={}" - - make check + cp -r $ASYNC_DIR/lib/*.so $ASYNC_DIR/lib/*.a $HDF5_VOL_DIR/lib/ + cp -r $ASYNC_DIR/include/*.h $HDF5_VOL_DIR/include/ + + export LD_LIBRARY_PATH=$HDF5_VOL_DIR/lib:$HDF5_ROOT/lib:$ABT_DIR/lib:$LD_LIBRARY_PATH + + cd $HDF5_VOL_DIR/src + + make all - name: Build h5bench SYNC run: | - current="$PWD" export HDF5_HOME=/opt/hdf5 export PNETCDF_HOME=/opt/pnetcdf @@ -109,27 +118,42 @@ jobs: cd build-sync cmake .. \ - -DCMAKE_C_COMPILER=$HDF5_HOME/bin/h5pcc \ -DH5BENCH_ALL=ON - make + make -j 2 - name: Build h5bench ASYNC run: | - current="$PWD" - export HDF5_HOME=/opt/hdf5 export PNETCDF_HOME=/opt/pnetcdf - export ASYNC_HOME=$current/vol-async/src - + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + mkdir build-async cd build-async cmake .. \ -DWITH_ASYNC_VOL:BOOL=ON \ - -DCMAKE_C_FLAGS="-I/$current/vol-async/src -L/$current/vol-async/src" \ - -DCMAKE_C_COMPILER=$HDF5_HOME/bin/h5pcc \ + -DCMAKE_C_FLAGS="-I$ASYNC_HOME/include -L$ASYNC_HOME/lib" \ -DH5BENCH_ALL=ON - make + make -j 2 + + - name: Build h5bench CACHE + run: | + export HDF5_ROOT=/opt/hdf5 + export HDF5_HOME=/opt/hdf5 + export PNETCDF_HOME=/opt/pnetcdf + export HDF5_VOL_DIR=/opt/vol-cache + + mkdir build-cache + cd build-cache + + cmake .. \ + -DWITH_CACHE_VOL:BOOL=ON \ + -DWITH_ASYNC_VOL:BOOL=ON \ + -DCMAKE_C_FLAGS="-I/$HDF5_VOL_DIR/include -L/$HDF5_VOL_DIR/lib" + make -j 2 - name: Configure for MPICH run: | @@ -286,14 +310,9 @@ jobs: - name: Test h5bench ASYNC write/read run: | - current="$PWD" - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async cd build-async export PATH=$(pwd):$PATH @@ -304,14 +323,9 @@ jobs: - name: Test h5bench ASYNC write 1D contiguous (memory) strided (file) run: | - current="$PWD" - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async cd build-async export PATH=$(pwd):$PATH @@ -322,14 +336,9 @@ jobs: - name: Test h5bench ASYNC write 1D contiguous (memory) contiguous (file) run: | - current="$PWD" - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async cd build-async export PATH=$(pwd):$PATH @@ -340,14 +349,9 @@ jobs: - name: Test h5bench ASYNC write 1D contiguous (memory) interleaved (file) run: | - current="$PWD" - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async cd build-async export PATH=$(pwd):$PATH @@ -358,14 +362,9 @@ jobs: - name: Test h5bench ASYNC write 1D interleaved (memory) contiguous (file) run: | - current="$PWD" - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async cd build-async export PATH=$(pwd):$PATH @@ -376,14 +375,9 @@ jobs: - name: Test h5bench ASYNC write 1D interleaved (memory) interleaved (file) run: | - current="$PWD" - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async cd build-async export PATH=$(pwd):$PATH @@ -394,14 +388,9 @@ jobs: - name: Test h5bench ASYNC write 2D contiguous (memory) contiguous (file) run: | - current="$PWD" - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async cd build-async export PATH=$(pwd):$PATH @@ -412,14 +401,9 @@ jobs: - name: Test h5bench ASYNC write 2D contiguous (memory) interleaved (file) run: | - current="$PWD" - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async cd build-async export PATH=$(pwd):$PATH @@ -430,14 +414,9 @@ jobs: - name: Test h5bench ASYNC write 2D interleaved (memory) contiguous (file) run: | - current="$PWD" - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async cd build-async export PATH=$(pwd):$PATH @@ -448,14 +427,9 @@ jobs: - name: Test h5bench ASYNC write 2D interleaved (memory) interleaved (file) run: | - current="$PWD" - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async cd build-async export PATH=$(pwd):$PATH @@ -466,14 +440,9 @@ jobs: - name: Test h5bench ASYNC write 3D contiguous (memory) contiguous (file) run: | - current="$PWD" - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async cd build-async export PATH=$(pwd):$PATH @@ -484,14 +453,9 @@ jobs: - name: Test h5bench ASYNC read 1D contiguous (memory) contiguous (file) full run: | - current="$PWD" - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async cd build-async export PATH=$(pwd):$PATH @@ -502,14 +466,9 @@ jobs: - name: Test h5bench ASYNC read 1D contiguous (memory) contiguous (file) partial run: | - current="$PWD" - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async cd build-async export PATH=$(pwd):$PATH @@ -520,14 +479,9 @@ jobs: - name: Test h5bench ASYNC read 1D contiguous (memory) contiguous (file) strided run: | - current="$PWD" - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async cd build-async export PATH=$(pwd):$PATH @@ -538,14 +492,9 @@ jobs: - name: Test h5bench ASYNC read 2D contiguous (memory) contiguous (file) full run: | - current="$PWD" - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async cd build-async export PATH=$(pwd):$PATH @@ -556,14 +505,9 @@ jobs: - name: Test h5bench ASYNC read 3D contiguous (memory) contiguous (file) full run: | - current="$PWD" - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async cd build-async export PATH=$(pwd):$PATH @@ -574,14 +518,9 @@ jobs: - name: Test h5bench ASYNC amrex run: | - current="$PWD" - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD="$ABT_DIR/lib/libabt.so $ASYNC_DIR/libh5async.so" + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async cd build-async export PATH=$(pwd):$PATH @@ -590,6 +529,263 @@ jobs: ./h5bench --debug --abort-on-failure ../samples/async-amrex.json + + - name: Test h5bench CACHE write 1D contiguous (memory) strided (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + export HDF5_VOL_DIR=/opt/vol-cache + + cd build-cache + export PATH=$(pwd):$PATH + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-strided.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-strided.json + + - name: Test h5bench CACHE write 1D contiguous (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + export PATH=$(pwd):$PATH + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig.json + + - name: Test h5bench CACHE write 1D contiguous (memory) interleaved (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + export PATH=$(pwd):$PATH + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-interleaved.json + + - name: Test h5bench CACHE write 1D interleaved (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + export PATH=$(pwd):$PATH + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-interleaved-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-interleaved-contig.json + + - name: Test h5bench CACHE write 1D interleaved (memory) interleaved (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + export PATH=$(pwd):$PATH + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-interleaved-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-interleaved-interleaved.json + + - name: Test h5bench CACHE write 2D contiguous (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + export PATH=$(pwd):$PATH + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-contig-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig.json + + - name: Test h5bench CACHE write 2D contiguous (memory) interleaved (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + export PATH=$(pwd):$PATH + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-contig-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-interleaved.json + + - name: Test h5bench CACHE write 2D interleaved (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + export PATH=$(pwd):$PATH + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-interleaved-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-interleaved-contig.json + + - name: Test h5bench CACHE write 2D interleaved (memory) interleaved (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + export PATH=$(pwd):$PATH + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-interleaved-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-interleaved-interleaved.json + + - name: Test h5bench CACHE write 3D contiguous (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + export PATH=$(pwd):$PATH + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-3d-contig-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-3d-contig-contig.json + + - name: Test h5bench CACHE read 1D contiguous (memory) contiguous (file) full + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + export PATH=$(pwd):$PATH + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-full.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-full.json + + - name: Test h5bench CACHE read 1D contiguous (memory) contiguous (file) partial + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + export PATH=$(pwd):$PATH + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-partial.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-partial.json + + - name: Test h5bench CACHE read 1D contiguous (memory) contiguous (file) strided + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + export PATH=$(pwd):$PATH + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-strided.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-strided.json + + - name: Test h5bench CACHE read 2D contiguous (memory) contiguous (file) full + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + export PATH=$(pwd):$PATH + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-contig-contig-read-full.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig-read-full.json + + - name: Test h5bench CACHE read 3D contiguous (memory) contiguous (file) full + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + export PATH=$(pwd):$PATH + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-contig-contig-read-full.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig-read-full.json + - name: Upload artifact if: always() uses: actions/upload-artifact@v2 diff --git a/.github/workflows/h5bench-hdf5-1.13.0.yml b/.github/workflows/h5bench-hdf5-1.13.0.yml index 2becb30c..2c2fb38a 100644 --- a/.github/workflows/h5bench-hdf5-1.13.0.yml +++ b/.github/workflows/h5bench-hdf5-1.13.0.yml @@ -25,6 +25,9 @@ jobs: # VOL-ASYNC git clone --recursive https://github.com/hpc-io/vol-async.git /opt/vol-async + # VOL-CACHE + git clone --recursive https://github.com/hpc-io/vol-cache.git /opt/vol-cache + # PnetCDF wget https://parallel-netcdf.github.io/Release/pnetcdf-1.12.2.tar.gz tar -zxf pnetcdf-1.12.2.tar.gz @@ -64,7 +67,7 @@ jobs: mkdir build cd build - cmake .. -DCMAKE_INSTALL_PREFIX=$ASYNC_DIR -DCMAKE_PREFIX_PATH=$HDF5_DIR + cmake .. -DCMAKE_INSTALL_PREFIX=$ASYNC_DIR -DCMAKE_PREFIX_PATH=$HDF5_DIR -DCMAKE_C_FLAGS="-fPIC" make make install @@ -85,6 +88,27 @@ jobs: ctest + - name: Build VOL-CACHE + run: | + export HDF5_ROOT=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + export HDF5_VOL_DIR=/opt/vol-cache + + [ -e $HDF5_VOL_DIR ] || mkdir $HDF5_VOL_DIR + [ -e $HDF5_VOL_DIR/bin ] || mkdir $HDF5_VOL_DIR/bin + [ -e $HDF5_VOL_DIR/lib ] || mkdir $HDF5_VOL_DIR/lib + [ -e $HDF5_VOL_DIR/include ] || mkdir $HDF5_VOL_DIR/include + + cp -r $ASYNC_DIR/lib/*.so $ASYNC_DIR/lib/*.a $HDF5_VOL_DIR/lib/ + cp -r $ASYNC_DIR/include/*.h $HDF5_VOL_DIR/include/ + + export LD_LIBRARY_PATH=$HDF5_VOL_DIR/lib:$HDF5_ROOT/lib:$ABT_DIR/lib:$LD_LIBRARY_PATH + + cd $HDF5_VOL_DIR/src + + make all + - name: Build h5bench SYNC run: | export HDF5_HOME=/opt/hdf5 @@ -115,6 +139,22 @@ jobs: -DH5BENCH_ALL=ON make -j 2 + - name: Build h5bench CACHE + run: | + export HDF5_ROOT=/opt/hdf5 + export HDF5_HOME=/opt/hdf5 + export PNETCDF_HOME=/opt/pnetcdf + export HDF5_VOL_DIR=/opt/vol-cache + + mkdir build-cache + cd build-cache + + cmake .. \ + -DWITH_CACHE_VOL:BOOL=ON \ + -DWITH_ASYNC_VOL:BOOL=ON \ + -DCMAKE_C_FLAGS="-I/$HDF5_VOL_DIR/include -L/$HDF5_VOL_DIR/lib" + make -j 2 + - name: Test h5bench SYNC write/read run: | cd build-sync @@ -323,8 +363,6 @@ jobs: - name: Test h5bench ASYNC write 2D contiguous (memory) interleaved (file) run: | - current="$PWD" - export HDF5_DIR=/opt/hdf5 export ABT_DIR=/opt/argobots export ASYNC_DIR=/opt/vol-async @@ -443,6 +481,248 @@ jobs: ./h5bench --debug --abort-on-failure ../samples/async-amrex.json + + - name: Test h5bench CACHE write 1D contiguous (memory) strided (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + export HDF5_VOL_DIR=/opt/vol-cache + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-strided.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-strided.json + + - name: Test h5bench CACHE write 1D contiguous (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig.json + + - name: Test h5bench CACHE write 1D contiguous (memory) interleaved (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-interleaved.json + + - name: Test h5bench CACHE write 1D interleaved (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-interleaved-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-interleaved-contig.json + + - name: Test h5bench CACHE write 1D interleaved (memory) interleaved (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-interleaved-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-interleaved-interleaved.json + + - name: Test h5bench CACHE write 2D contiguous (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-contig-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig.json + + - name: Test h5bench CACHE write 2D contiguous (memory) interleaved (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-contig-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-interleaved.json + + - name: Test h5bench CACHE write 2D interleaved (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-interleaved-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-interleaved-contig.json + + - name: Test h5bench CACHE write 2D interleaved (memory) interleaved (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-interleaved-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-interleaved-interleaved.json + + - name: Test h5bench CACHE write 3D contiguous (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-3d-contig-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-3d-contig-contig.json + + - name: Test h5bench CACHE read 1D contiguous (memory) contiguous (file) full + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-full.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-full.json + + - name: Test h5bench CACHE read 1D contiguous (memory) contiguous (file) partial + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-partial.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-partial.json + + - name: Test h5bench CACHE read 1D contiguous (memory) contiguous (file) strided + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-strided.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-strided.json + + - name: Test h5bench CACHE read 2D contiguous (memory) contiguous (file) full + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-contig-contig-read-full.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig-read-full.json + + - name: Test h5bench CACHE read 3D contiguous (memory) contiguous (file) full + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-contig-contig-read-full.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig-read-full.json + - name: Upload artifact if: always() uses: actions/upload-artifact@v2 diff --git a/.github/workflows/h5bench-hdf5-develop.yml b/.github/workflows/h5bench-hdf5-develop.yml index 362a2834..a9c0bbd1 100644 --- a/.github/workflows/h5bench-hdf5-develop.yml +++ b/.github/workflows/h5bench-hdf5-develop.yml @@ -500,7 +500,8 @@ jobs: ./h5bench --debug --abort-on-failure ../samples/async-amrex.json - - name: Test h5bench CACHE write 3D contiguous (memory) contiguous (file) + + - name: Test h5bench CACHE write 1D contiguous (memory) strided (file) run: | export HDF5_DIR=/opt/hdf5 export ABT_DIR=/opt/argobots @@ -513,43 +514,233 @@ jobs: [ -e SSD ] || mkdir SSD - python3 ../samples/update.py ../samples/cache-write-3d-contig-contig.json + python3 ../samples/update.py ../samples/async-write-1d-contig-strided.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-strided.json + + - name: Test h5bench CACHE write 1D contiguous (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig.json + + - name: Test h5bench CACHE write 1D contiguous (memory) interleaved (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-interleaved.json + + - name: Test h5bench CACHE write 1D interleaved (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-interleaved-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-interleaved-contig.json + + - name: Test h5bench CACHE write 1D interleaved (memory) interleaved (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - ./h5bench --debug --abort-on-failure --validate-mode ../samples/cache-write-3d-contig-contig.json + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-interleaved-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-interleaved-interleaved.json - name: Test h5bench CACHE write 2D contiguous (memory) contiguous (file) run: | export HDF5_DIR=/opt/hdf5 export ABT_DIR=/opt/argobots export ASYNC_DIR=/opt/vol-async - export HDF5_VOL_DIR=/opt/vol-cache + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-contig-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig.json + + - name: Test h5bench CACHE write 2D contiguous (memory) interleaved (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-contig-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-interleaved.json + + - name: Test h5bench CACHE write 2D interleaved (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-interleaved-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-interleaved-contig.json + + - name: Test h5bench CACHE write 2D interleaved (memory) interleaved (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + cd build-cache printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - python3 ../samples/update.py ../samples/cache-write-2d-contig-contig.json + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-interleaved-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-interleaved-interleaved.json + + - name: Test h5bench CACHE write 3D contiguous (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-3d-contig-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-3d-contig-contig.json + + - name: Test h5bench CACHE read 1D contiguous (memory) contiguous (file) full + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - ./h5bench --debug --abort-on-failure --validate-mode ../samples/cache-write-2d-contig-contig.json + [ -e SSD ] || mkdir SSD - - name: Test h5bench CACHE write 1D contiguous (memory) contiguous (file) + python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-full.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-full.json + + - name: Test h5bench CACHE read 1D contiguous (memory) contiguous (file) partial run: | export HDF5_DIR=/opt/hdf5 export ABT_DIR=/opt/argobots export ASYNC_DIR=/opt/vol-async - export HDF5_VOL_DIR=/opt/vol-cache + + cd build-cache + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-partial.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-partial.json + + - name: Test h5bench CACHE read 1D contiguous (memory) contiguous (file) strided + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + cd build-cache printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - python3 ../samples/update.py ../samples/cache-write-1d-contig-contig.json + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-strided.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-strided.json + + - name: Test h5bench CACHE read 2D contiguous (memory) contiguous (file) full + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-contig-contig-read-full.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig-read-full.json + + - name: Test h5bench CACHE read 3D contiguous (memory) contiguous (file) full + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - ./h5bench --debug --abort-on-failure --validate-mode ../samples/cache-write-1d-contig-contig.json + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-contig-contig-read-full.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig-read-full.json - name: Upload artifact if: always() From 03a3b7ed884edef08747eb9a9cf988a33dbc1fe6 Mon Sep 17 00:00:00 2001 From: Jean Luca Bez Date: Sun, 1 May 2022 00:29:45 -0700 Subject: [PATCH 39/55] Update CI with all VOL-CACHE tests --- .../h5bench-hdf5-1.13.0-mpich-3.4.3.yml | 299 +----------------- .github/workflows/h5bench-hdf5-1.13.0.yml | 286 +---------------- 2 files changed, 4 insertions(+), 581 deletions(-) diff --git a/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml b/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml index 3fff4543..7bcc62d3 100644 --- a/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml +++ b/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml @@ -25,9 +25,6 @@ jobs: # VOL-ASYNC git clone --recursive https://github.com/hpc-io/vol-async.git /opt/vol-async - # VOL-CACHE - git clone --recursive https://github.com/hpc-io/vol-cache.git /opt/vol-cache - # PnetCDF wget https://parallel-netcdf.github.io/Release/pnetcdf-1.12.2.tar.gz tar -zxf pnetcdf-1.12.2.tar.gz @@ -88,27 +85,6 @@ jobs: ctest - - name: Build VOL-CACHE - run: | - export HDF5_ROOT=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - export HDF5_VOL_DIR=/opt/vol-cache - - [ -e $HDF5_VOL_DIR ] || mkdir $HDF5_VOL_DIR - [ -e $HDF5_VOL_DIR/bin ] || mkdir $HDF5_VOL_DIR/bin - [ -e $HDF5_VOL_DIR/lib ] || mkdir $HDF5_VOL_DIR/lib - [ -e $HDF5_VOL_DIR/include ] || mkdir $HDF5_VOL_DIR/include - - cp -r $ASYNC_DIR/lib/*.so $ASYNC_DIR/lib/*.a $HDF5_VOL_DIR/lib/ - cp -r $ASYNC_DIR/include/*.h $HDF5_VOL_DIR/include/ - - export LD_LIBRARY_PATH=$HDF5_VOL_DIR/lib:$HDF5_ROOT/lib:$ABT_DIR/lib:$LD_LIBRARY_PATH - - cd $HDF5_VOL_DIR/src - - make all - - name: Build h5bench SYNC run: | export HDF5_HOME=/opt/hdf5 @@ -137,23 +113,7 @@ jobs: -DWITH_ASYNC_VOL:BOOL=ON \ -DCMAKE_C_FLAGS="-I$ASYNC_HOME/include -L$ASYNC_HOME/lib" \ -DH5BENCH_ALL=ON - make -j 2 - - - name: Build h5bench CACHE - run: | - export HDF5_ROOT=/opt/hdf5 - export HDF5_HOME=/opt/hdf5 - export PNETCDF_HOME=/opt/pnetcdf - export HDF5_VOL_DIR=/opt/vol-cache - - mkdir build-cache - cd build-cache - - cmake .. \ - -DWITH_CACHE_VOL:BOOL=ON \ - -DWITH_ASYNC_VOL:BOOL=ON \ - -DCMAKE_C_FLAGS="-I/$HDF5_VOL_DIR/include -L/$HDF5_VOL_DIR/lib" - make -j 2 + make - name: Configure for MPICH run: | @@ -529,263 +489,6 @@ jobs: ./h5bench --debug --abort-on-failure ../samples/async-amrex.json - - - name: Test h5bench CACHE write 1D contiguous (memory) strided (file) - run: | - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - export HDF5_VOL_DIR=/opt/vol-cache - - cd build-cache - export PATH=$(pwd):$PATH - - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - - [ -e SSD ] || mkdir SSD - - python3 ../samples/update.py ../samples/async-write-1d-contig-strided.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-strided.json - - - name: Test h5bench CACHE write 1D contiguous (memory) contiguous (file) - run: | - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - - cd build-cache - export PATH=$(pwd):$PATH - - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - - [ -e SSD ] || mkdir SSD - - python3 ../samples/update.py ../samples/async-write-1d-contig-contig.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig.json - - - name: Test h5bench CACHE write 1D contiguous (memory) interleaved (file) - run: | - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - - cd build-cache - export PATH=$(pwd):$PATH - - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - - [ -e SSD ] || mkdir SSD - - python3 ../samples/update.py ../samples/async-write-1d-contig-interleaved.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-interleaved.json - - - name: Test h5bench CACHE write 1D interleaved (memory) contiguous (file) - run: | - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - - cd build-cache - export PATH=$(pwd):$PATH - - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - - [ -e SSD ] || mkdir SSD - - python3 ../samples/update.py ../samples/async-write-1d-interleaved-contig.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-interleaved-contig.json - - - name: Test h5bench CACHE write 1D interleaved (memory) interleaved (file) - run: | - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - - cd build-cache - export PATH=$(pwd):$PATH - - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - - [ -e SSD ] || mkdir SSD - - python3 ../samples/update.py ../samples/async-write-1d-interleaved-interleaved.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-interleaved-interleaved.json - - - name: Test h5bench CACHE write 2D contiguous (memory) contiguous (file) - run: | - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - - cd build-cache - export PATH=$(pwd):$PATH - - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - - [ -e SSD ] || mkdir SSD - - python3 ../samples/update.py ../samples/async-write-2d-contig-contig.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig.json - - - name: Test h5bench CACHE write 2D contiguous (memory) interleaved (file) - run: | - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - - cd build-cache - export PATH=$(pwd):$PATH - - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - - [ -e SSD ] || mkdir SSD - - python3 ../samples/update.py ../samples/async-write-2d-contig-interleaved.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-interleaved.json - - - name: Test h5bench CACHE write 2D interleaved (memory) contiguous (file) - run: | - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - - cd build-cache - export PATH=$(pwd):$PATH - - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - - [ -e SSD ] || mkdir SSD - - python3 ../samples/update.py ../samples/async-write-2d-interleaved-contig.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-interleaved-contig.json - - - name: Test h5bench CACHE write 2D interleaved (memory) interleaved (file) - run: | - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - - cd build-cache - export PATH=$(pwd):$PATH - - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - - [ -e SSD ] || mkdir SSD - - python3 ../samples/update.py ../samples/async-write-2d-interleaved-interleaved.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-interleaved-interleaved.json - - - name: Test h5bench CACHE write 3D contiguous (memory) contiguous (file) - run: | - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - - cd build-cache - export PATH=$(pwd):$PATH - - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - - [ -e SSD ] || mkdir SSD - - python3 ../samples/update.py ../samples/async-write-3d-contig-contig.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-3d-contig-contig.json - - - name: Test h5bench CACHE read 1D contiguous (memory) contiguous (file) full - run: | - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - - cd build-cache - export PATH=$(pwd):$PATH - - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - - [ -e SSD ] || mkdir SSD - - python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-full.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-full.json - - - name: Test h5bench CACHE read 1D contiguous (memory) contiguous (file) partial - run: | - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - - cd build-cache - export PATH=$(pwd):$PATH - - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - - [ -e SSD ] || mkdir SSD - - python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-partial.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-partial.json - - - name: Test h5bench CACHE read 1D contiguous (memory) contiguous (file) strided - run: | - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - - cd build-cache - export PATH=$(pwd):$PATH - - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - - [ -e SSD ] || mkdir SSD - - python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-strided.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-strided.json - - - name: Test h5bench CACHE read 2D contiguous (memory) contiguous (file) full - run: | - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - - cd build-cache - export PATH=$(pwd):$PATH - - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - - [ -e SSD ] || mkdir SSD - - python3 ../samples/update.py ../samples/async-write-2d-contig-contig-read-full.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig-read-full.json - - - name: Test h5bench CACHE read 3D contiguous (memory) contiguous (file) full - run: | - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - - cd build-cache - export PATH=$(pwd):$PATH - - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - - [ -e SSD ] || mkdir SSD - - python3 ../samples/update.py ../samples/async-write-2d-contig-contig-read-full.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig-read-full.json - - name: Upload artifact if: always() uses: actions/upload-artifact@v2 diff --git a/.github/workflows/h5bench-hdf5-1.13.0.yml b/.github/workflows/h5bench-hdf5-1.13.0.yml index 2c2fb38a..2becb30c 100644 --- a/.github/workflows/h5bench-hdf5-1.13.0.yml +++ b/.github/workflows/h5bench-hdf5-1.13.0.yml @@ -25,9 +25,6 @@ jobs: # VOL-ASYNC git clone --recursive https://github.com/hpc-io/vol-async.git /opt/vol-async - # VOL-CACHE - git clone --recursive https://github.com/hpc-io/vol-cache.git /opt/vol-cache - # PnetCDF wget https://parallel-netcdf.github.io/Release/pnetcdf-1.12.2.tar.gz tar -zxf pnetcdf-1.12.2.tar.gz @@ -67,7 +64,7 @@ jobs: mkdir build cd build - cmake .. -DCMAKE_INSTALL_PREFIX=$ASYNC_DIR -DCMAKE_PREFIX_PATH=$HDF5_DIR -DCMAKE_C_FLAGS="-fPIC" + cmake .. -DCMAKE_INSTALL_PREFIX=$ASYNC_DIR -DCMAKE_PREFIX_PATH=$HDF5_DIR make make install @@ -88,27 +85,6 @@ jobs: ctest - - name: Build VOL-CACHE - run: | - export HDF5_ROOT=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - export HDF5_VOL_DIR=/opt/vol-cache - - [ -e $HDF5_VOL_DIR ] || mkdir $HDF5_VOL_DIR - [ -e $HDF5_VOL_DIR/bin ] || mkdir $HDF5_VOL_DIR/bin - [ -e $HDF5_VOL_DIR/lib ] || mkdir $HDF5_VOL_DIR/lib - [ -e $HDF5_VOL_DIR/include ] || mkdir $HDF5_VOL_DIR/include - - cp -r $ASYNC_DIR/lib/*.so $ASYNC_DIR/lib/*.a $HDF5_VOL_DIR/lib/ - cp -r $ASYNC_DIR/include/*.h $HDF5_VOL_DIR/include/ - - export LD_LIBRARY_PATH=$HDF5_VOL_DIR/lib:$HDF5_ROOT/lib:$ABT_DIR/lib:$LD_LIBRARY_PATH - - cd $HDF5_VOL_DIR/src - - make all - - name: Build h5bench SYNC run: | export HDF5_HOME=/opt/hdf5 @@ -139,22 +115,6 @@ jobs: -DH5BENCH_ALL=ON make -j 2 - - name: Build h5bench CACHE - run: | - export HDF5_ROOT=/opt/hdf5 - export HDF5_HOME=/opt/hdf5 - export PNETCDF_HOME=/opt/pnetcdf - export HDF5_VOL_DIR=/opt/vol-cache - - mkdir build-cache - cd build-cache - - cmake .. \ - -DWITH_CACHE_VOL:BOOL=ON \ - -DWITH_ASYNC_VOL:BOOL=ON \ - -DCMAKE_C_FLAGS="-I/$HDF5_VOL_DIR/include -L/$HDF5_VOL_DIR/lib" - make -j 2 - - name: Test h5bench SYNC write/read run: | cd build-sync @@ -363,6 +323,8 @@ jobs: - name: Test h5bench ASYNC write 2D contiguous (memory) interleaved (file) run: | + current="$PWD" + export HDF5_DIR=/opt/hdf5 export ABT_DIR=/opt/argobots export ASYNC_DIR=/opt/vol-async @@ -481,248 +443,6 @@ jobs: ./h5bench --debug --abort-on-failure ../samples/async-amrex.json - - - name: Test h5bench CACHE write 1D contiguous (memory) strided (file) - run: | - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - export HDF5_VOL_DIR=/opt/vol-cache - - cd build-cache - - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - - [ -e SSD ] || mkdir SSD - - python3 ../samples/update.py ../samples/async-write-1d-contig-strided.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-strided.json - - - name: Test h5bench CACHE write 1D contiguous (memory) contiguous (file) - run: | - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - - cd build-cache - - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - - [ -e SSD ] || mkdir SSD - - python3 ../samples/update.py ../samples/async-write-1d-contig-contig.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig.json - - - name: Test h5bench CACHE write 1D contiguous (memory) interleaved (file) - run: | - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - - cd build-cache - - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - - [ -e SSD ] || mkdir SSD - - python3 ../samples/update.py ../samples/async-write-1d-contig-interleaved.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-interleaved.json - - - name: Test h5bench CACHE write 1D interleaved (memory) contiguous (file) - run: | - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - - cd build-cache - - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - - [ -e SSD ] || mkdir SSD - - python3 ../samples/update.py ../samples/async-write-1d-interleaved-contig.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-interleaved-contig.json - - - name: Test h5bench CACHE write 1D interleaved (memory) interleaved (file) - run: | - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - - cd build-cache - - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - - [ -e SSD ] || mkdir SSD - - python3 ../samples/update.py ../samples/async-write-1d-interleaved-interleaved.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-interleaved-interleaved.json - - - name: Test h5bench CACHE write 2D contiguous (memory) contiguous (file) - run: | - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - - cd build-cache - - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - - [ -e SSD ] || mkdir SSD - - python3 ../samples/update.py ../samples/async-write-2d-contig-contig.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig.json - - - name: Test h5bench CACHE write 2D contiguous (memory) interleaved (file) - run: | - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - - cd build-cache - - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - - [ -e SSD ] || mkdir SSD - - python3 ../samples/update.py ../samples/async-write-2d-contig-interleaved.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-interleaved.json - - - name: Test h5bench CACHE write 2D interleaved (memory) contiguous (file) - run: | - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - - cd build-cache - - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - - [ -e SSD ] || mkdir SSD - - python3 ../samples/update.py ../samples/async-write-2d-interleaved-contig.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-interleaved-contig.json - - - name: Test h5bench CACHE write 2D interleaved (memory) interleaved (file) - run: | - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - - cd build-cache - - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - - [ -e SSD ] || mkdir SSD - - python3 ../samples/update.py ../samples/async-write-2d-interleaved-interleaved.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-interleaved-interleaved.json - - - name: Test h5bench CACHE write 3D contiguous (memory) contiguous (file) - run: | - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - - cd build-cache - - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - - [ -e SSD ] || mkdir SSD - - python3 ../samples/update.py ../samples/async-write-3d-contig-contig.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-3d-contig-contig.json - - - name: Test h5bench CACHE read 1D contiguous (memory) contiguous (file) full - run: | - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - - cd build-cache - - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - - [ -e SSD ] || mkdir SSD - - python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-full.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-full.json - - - name: Test h5bench CACHE read 1D contiguous (memory) contiguous (file) partial - run: | - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - - cd build-cache - - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - - [ -e SSD ] || mkdir SSD - - python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-partial.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-partial.json - - - name: Test h5bench CACHE read 1D contiguous (memory) contiguous (file) strided - run: | - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - - cd build-cache - - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - - [ -e SSD ] || mkdir SSD - - python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-strided.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-strided.json - - - name: Test h5bench CACHE read 2D contiguous (memory) contiguous (file) full - run: | - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - - cd build-cache - - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - - [ -e SSD ] || mkdir SSD - - python3 ../samples/update.py ../samples/async-write-2d-contig-contig-read-full.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig-read-full.json - - - name: Test h5bench CACHE read 3D contiguous (memory) contiguous (file) full - run: | - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=/opt/argobots - export ASYNC_DIR=/opt/vol-async - - cd build-cache - - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg - - [ -e SSD ] || mkdir SSD - - python3 ../samples/update.py ../samples/async-write-2d-contig-contig-read-full.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig-read-full.json - - name: Upload artifact if: always() uses: actions/upload-artifact@v2 From e670fd9f98912f06c87cff8de228324c66388fa7 Mon Sep 17 00:00:00 2001 From: Jean Luca Bez Date: Sun, 1 May 2022 13:04:05 -0700 Subject: [PATCH 40/55] Update CI with all VOL-CACHE tests --- .github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml b/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml index 7bcc62d3..4d8bc047 100644 --- a/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml +++ b/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml @@ -75,13 +75,13 @@ jobs: export ABT_DIR=/opt/argobots export ASYNC_DIR=/opt/vol-async - export LD_LIBRARY_PATH=$ASYNC_DIR/lib:$HDF5_DIR/lib:$ABT_DIR/lib:$LD_LIBRARY_PATH + export LD_LIBRARY_PATH=$ASYNC_DIR/lib:$HDF5_DIR/lib:$ABT_DIR/lib:/usr/local/lib:$LD_LIBRARY_PATH export HDF5_PLUGIN_PATH="$ASYNC_DIR/include" export HDF5_VOL_CONNECTOR="async under_vol=0;under_info={}" cd $ASYNC_DIR/build - export LD_PRELOAD=$ASYNC_DIR/lib/libh5async.so:$ABT_DIR/lib/libabt.so:$HDF5_DIR/lib/libhdf5.so + export LD_PRELOAD=$ASYNC_DIR/lib/libh5async.so:$ABT_DIR/lib/libabt.so:$HDF5_DIR/lib/libhdf5.so:$LD_PRELOAD ctest From f91a1e8cf5a741bc670dd501f5dab48fc00deeb0 Mon Sep 17 00:00:00 2001 From: Huihuo Zheng Date: Fri, 9 Sep 2022 14:53:17 -0500 Subject: [PATCH 41/55] adding support for aprun --- h5bench | 2 ++ 1 file changed, 2 insertions(+) diff --git a/h5bench b/h5bench index fa1dec42..5563c32c 100755 --- a/h5bench +++ b/h5bench @@ -223,6 +223,8 @@ class H5bench: self.mpi = '{} -np {}'.format(mpi['command'], mpi['ranks']) elif mpi['command'] == 'srun': self.mpi = '{} --cpu_bind=cores -n {}'.format(mpi['command'], mpi['ranks']) + elif mpi['command'] == 'aprun': + self.mpi = '{} -n {} -N {} -cc depth'.format(mpi['command'], mpi['ranks'], mpi['ppn']) else: self.logger.warning('Unknown MPI launcher selected!') From b4fcf34cc2852b1d2a3353c3f35d6c1633c55413 Mon Sep 17 00:00:00 2001 From: Huihuo Zheng Date: Fri, 23 Sep 2022 16:40:09 -0500 Subject: [PATCH 42/55] further edits for aprun --- src/h5bench.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/h5bench.py b/src/h5bench.py index 4a685d54..8fbec6a3 100755 --- a/src/h5bench.py +++ b/src/h5bench.py @@ -50,7 +50,7 @@ def check_parallel(self): """Check for parallel overwrite command.""" mpi = [ 'mpirun', 'mpiexec', - 'srun' + 'srun', 'aprun' ] # Get user defined shell @@ -226,7 +226,7 @@ def prepare_parallel(self, mpi): elif mpi['command'] == 'srun': self.mpi = '{} --cpu_bind=cores -n {}'.format(mpi['command'], mpi['ranks']) elif mpi['command'] == 'aprun': - self.mpi = '{} -n {} -N {} -cc depth'.format(mpi['command'], mpi['ranks'], mpi['ppn']) + self.mpi = '{} -n {} -N {} '.format(mpi['command'], mpi['ranks'], mpi['ppn']) else: self.logger.warning('Unknown MPI launcher selected!') From 649be7541c3948295b63232eb9ecfefb646ac376 Mon Sep 17 00:00:00 2001 From: github-actions Date: Fri, 7 Oct 2022 22:11:38 +0000 Subject: [PATCH 43/55] Committing clang-format changes --- h5bench_patterns/h5bench_write.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/h5bench_patterns/h5bench_write.c b/h5bench_patterns/h5bench_write.c index 285efab3..5bdb5424 100644 --- a/h5bench_patterns/h5bench_write.c +++ b/h5bench_patterns/h5bench_write.c @@ -1020,7 +1020,7 @@ main(int argc, char *argv[]) if (params.useCompress) params.data_coll = 1; -if (params.subfiling) + if (params.subfiling) subfiling = 1; #if H5_VERSION_GE(1, 13, 1) From bfdfc13165c1342ebc5813597aae6b3dec8f2368 Mon Sep 17 00:00:00 2001 From: Jean Luca Bez Date: Fri, 7 Oct 2022 15:14:24 -0700 Subject: [PATCH 44/55] Update h5bench.py --- src/h5bench.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/h5bench.py b/src/h5bench.py index 8fbec6a3..e6ffa52e 100755 --- a/src/h5bench.py +++ b/src/h5bench.py @@ -50,7 +50,8 @@ def check_parallel(self): """Check for parallel overwrite command.""" mpi = [ 'mpirun', 'mpiexec', - 'srun', 'aprun' + 'srun', + 'aprun' ] # Get user defined shell From 77c960f05fab07d251bf3753f083bf3d643c2d9a Mon Sep 17 00:00:00 2001 From: Jean Luca Bez Date: Sun, 9 Oct 2022 00:02:48 -0700 Subject: [PATCH 45/55] Update h5bench-hdf5-develop.yml --- .github/workflows/h5bench-hdf5-develop.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/h5bench-hdf5-develop.yml b/.github/workflows/h5bench-hdf5-develop.yml index 811300ea..b0449d6a 100644 --- a/.github/workflows/h5bench-hdf5-develop.yml +++ b/.github/workflows/h5bench-hdf5-develop.yml @@ -8,7 +8,7 @@ on: jobs: h5bench: runs-on: ubuntu-latest - timeout-minutes: 60 + timeout-minutes: 90 env: OMPI_ALLOW_RUN_AS_ROOT: 1 OMPI_ALLOW_RUN_AS_ROOT_CONFIRM: 1 From 2bbde20eaec243ad2775c829ca1562d62a51eaab Mon Sep 17 00:00:00 2001 From: Jean Luca Bez Date: Mon, 10 Oct 2022 15:19:19 -0700 Subject: [PATCH 46/55] Update h5bench-hdf5-1.13.0-mpich-3.4.3.yml --- .github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml b/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml index 4d8bc047..4e113864 100644 --- a/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml +++ b/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml @@ -96,6 +96,7 @@ jobs: cmake .. \ -DH5BENCH_ALL=ON make -j 2 + make install - name: Build h5bench ASYNC run: | @@ -113,7 +114,8 @@ jobs: -DWITH_ASYNC_VOL:BOOL=ON \ -DCMAKE_C_FLAGS="-I$ASYNC_HOME/include -L$ASYNC_HOME/lib" \ -DH5BENCH_ALL=ON - make + make -j 2 + make install - name: Configure for MPICH run: | @@ -262,7 +264,7 @@ jobs: - name: Test h5bench SYNC e3sm run: | - export LD_LIBRARY_PATH=/opt/hdf5/lib:$LD_LIBRARY_PATH + export LD_LIBRARY_PATH=/opt/hdf5/lib:/opt/pnetcdf/lib:$LD_LIBRARY_PATH cd build-sync export PATH=$(pwd):$PATH From 4864359c416779a6bb52f433598dc613f44e7cf1 Mon Sep 17 00:00:00 2001 From: Jean Luca Bez Date: Mon, 10 Oct 2022 19:44:40 -0700 Subject: [PATCH 47/55] Update h5bench-hdf5-1.13.0-mpich-3.4.3.yml --- .github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml b/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml index 4e113864..18c7050a 100644 --- a/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml +++ b/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml @@ -94,6 +94,7 @@ jobs: cd build-sync cmake .. \ + -DCMAKE_C_COMPILER=$HDF5_HOME/bin/h5pcc \ -DH5BENCH_ALL=ON make -j 2 make install @@ -111,6 +112,7 @@ jobs: cd build-async cmake .. \ + -DCMAKE_C_COMPILER=$HDF5_HOME/bin/h5pcc \ -DWITH_ASYNC_VOL:BOOL=ON \ -DCMAKE_C_FLAGS="-I$ASYNC_HOME/include -L$ASYNC_HOME/lib" \ -DH5BENCH_ALL=ON From 1d905d918e8ca1575e66a60ad1b2cef53f2b8967 Mon Sep 17 00:00:00 2001 From: Jean Luca Bez Date: Tue, 11 Oct 2022 10:58:48 -0700 Subject: [PATCH 48/55] Update h5bench-hdf5-1.13.0-mpich-3.4.3.yml --- .github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml b/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml index 18c7050a..977f75d9 100644 --- a/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml +++ b/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml @@ -254,6 +254,8 @@ jobs: - name: Test h5bench SYNC amrex run: | + export LD_LIBRARY_PATH=/opt/hdf5/lib:/opt/pnetcdf/lib:$LD_LIBRARY_PATH + cd build-sync export PATH=$(pwd):$PATH ./h5bench --debug --abort-on-failure ../samples/sync-amrex.json From 99d446711d13227e0904cf816e0bcde2924937d6 Mon Sep 17 00:00:00 2001 From: Jean Luca Bez Date: Tue, 11 Oct 2022 11:44:34 -0700 Subject: [PATCH 49/55] Update h5bench-hdf5-1.13.0-mpich-3.4.3.yml --- .github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml b/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml index 977f75d9..195b9163 100644 --- a/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml +++ b/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml @@ -262,6 +262,8 @@ jobs: - name: Test h5bench SYNC openpmd run: | + export LD_LIBRARY_PATH=/opt/hdf5/lib:/opt/pnetcdf/lib:$LD_LIBRARY_PATH + cd build-sync export PATH=$(pwd):$PATH ./h5bench --debug --abort-on-failure ../samples/sync-openpmd.json From 2986f90ee1487e8c4f41802d36fcf2ddb784b023 Mon Sep 17 00:00:00 2001 From: Jean Luca Bez Date: Tue, 11 Oct 2022 15:09:57 -0700 Subject: [PATCH 50/55] Update and rename h5bench-hdf5-1.13.0-mpich-3.4.3.yml to h5bench-hdf5-1.13.1-mpich-3.4.3.yml --- ....0-mpich-3.4.3.yml => h5bench-hdf5-1.13.1-mpich-3.4.3.yml} | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) rename .github/workflows/{h5bench-hdf5-1.13.0-mpich-3.4.3.yml => h5bench-hdf5-1.13.1-mpich-3.4.3.yml} (99%) diff --git a/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml b/.github/workflows/h5bench-hdf5-1.13.1-mpich-3.4.3.yml similarity index 99% rename from .github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml rename to .github/workflows/h5bench-hdf5-1.13.1-mpich-3.4.3.yml index 195b9163..5c53363c 100644 --- a/.github/workflows/h5bench-hdf5-1.13.0-mpich-3.4.3.yml +++ b/.github/workflows/h5bench-hdf5-1.13.1-mpich-3.4.3.yml @@ -1,4 +1,4 @@ -name: h5bench (HDF5 1.13.0, MPICH 3.4.3) +name: h5bench (HDF5 1.13.1, MPICH 3.4.3) on: pull_request: @@ -9,7 +9,7 @@ jobs: h5bench: runs-on: ubuntu-20.04 container: - image: jlbez/hdf5-1.13.0-mpich-3.4.3 + image: hpcio/hdf5-1.13.1-mpich-3.4.3 timeout-minutes: 60 env: OMPI_ALLOW_RUN_AS_ROOT: 1 From 3e584282d991ddc7c23df61499a81facd6e7e625 Mon Sep 17 00:00:00 2001 From: Jean Luca Bez Date: Tue, 11 Oct 2022 20:06:18 -0700 Subject: [PATCH 51/55] Update h5bench-hdf5-1.13.1-mpich-3.4.3.yml --- .github/workflows/h5bench-hdf5-1.13.1-mpich-3.4.3.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/h5bench-hdf5-1.13.1-mpich-3.4.3.yml b/.github/workflows/h5bench-hdf5-1.13.1-mpich-3.4.3.yml index 5c53363c..12b5f3c3 100644 --- a/.github/workflows/h5bench-hdf5-1.13.1-mpich-3.4.3.yml +++ b/.github/workflows/h5bench-hdf5-1.13.1-mpich-3.4.3.yml @@ -14,6 +14,7 @@ jobs: env: OMPI_ALLOW_RUN_AS_ROOT: 1 OMPI_ALLOW_RUN_AS_ROOT_CONFIRM: 1 + OMPI_MCA_rmaps_base_oversubscribe: "yes" steps: - uses: actions/checkout@v2 From 3a1115bdf7099865bffeeafe4689a10f9406b1f5 Mon Sep 17 00:00:00 2001 From: Jean Luca Bez Date: Mon, 28 Nov 2022 10:45:16 -0800 Subject: [PATCH 52/55] Update h5bench-hdf5-1.13.1-mpich-3.4.3.yml --- .../h5bench-hdf5-1.13.1-mpich-3.4.3.yml | 455 ++++++++++++++++-- 1 file changed, 427 insertions(+), 28 deletions(-) diff --git a/.github/workflows/h5bench-hdf5-1.13.1-mpich-3.4.3.yml b/.github/workflows/h5bench-hdf5-1.13.1-mpich-3.4.3.yml index 12b5f3c3..c5c457be 100644 --- a/.github/workflows/h5bench-hdf5-1.13.1-mpich-3.4.3.yml +++ b/.github/workflows/h5bench-hdf5-1.13.1-mpich-3.4.3.yml @@ -26,22 +26,8 @@ jobs: # VOL-ASYNC git clone --recursive https://github.com/hpc-io/vol-async.git /opt/vol-async - # PnetCDF - wget https://parallel-netcdf.github.io/Release/pnetcdf-1.12.2.tar.gz - tar -zxf pnetcdf-1.12.2.tar.gz - mv pnetcdf-1.12.2 pnetcdf - - - name: Build PnetCDF - run: | - export HDF5_DIR=/opt/hdf5 - export PNETCDF_DIR=/opt/pnetcdf - - cd pnetcdf - - ./configure --prefix=${PNETCDF_DIR} CC=mpicc - - make -j 2 - make install + # VOL-CACHE + git clone --recursive https://github.com/hpc-io/vol-cache.git /opt/vol-cache - name: Build Argobots run: | @@ -76,34 +62,54 @@ jobs: export ABT_DIR=/opt/argobots export ASYNC_DIR=/opt/vol-async - export LD_LIBRARY_PATH=$ASYNC_DIR/lib:$HDF5_DIR/lib:$ABT_DIR/lib:/usr/local/lib:$LD_LIBRARY_PATH + export LD_LIBRARY_PATH=$ASYNC_DIR/lib:$HDF5_DIR/lib:$ABT_DIR/lib:$LD_LIBRARY_PATH export HDF5_PLUGIN_PATH="$ASYNC_DIR/include" export HDF5_VOL_CONNECTOR="async under_vol=0;under_info={}" cd $ASYNC_DIR/build - export LD_PRELOAD=$ASYNC_DIR/lib/libh5async.so:$ABT_DIR/lib/libabt.so:$HDF5_DIR/lib/libhdf5.so:$LD_PRELOAD + export LD_PRELOAD=$ASYNC_DIR/lib/libh5async.so:$ABT_DIR/lib/libabt.so:$HDF5_DIR/lib/libhdf5.so ctest + - name: Build VOL-CACHE + run: | + export HDF5_ROOT=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + export HDF5_VOL_DIR=/opt/vol-cache + + [ -e $HDF5_VOL_DIR ] || mkdir $HDF5_VOL_DIR + [ -e $HDF5_VOL_DIR/bin ] || mkdir $HDF5_VOL_DIR/bin + [ -e $HDF5_VOL_DIR/lib ] || mkdir $HDF5_VOL_DIR/lib + [ -e $HDF5_VOL_DIR/include ] || mkdir $HDF5_VOL_DIR/include + + cp -r $ASYNC_DIR/lib/*.so $ASYNC_DIR/lib/*.a $HDF5_VOL_DIR/lib/ + cp -r $ASYNC_DIR/include/*.h $HDF5_VOL_DIR/include/ + + export LD_LIBRARY_PATH=$HDF5_VOL_DIR/lib:$HDF5_ROOT/lib:$ABT_DIR/lib:$LD_LIBRARY_PATH + + cd $HDF5_VOL_DIR/src + + make all - name: Build h5bench SYNC run: | - export HDF5_HOME=/opt/hdf5 - export PNETCDF_HOME=/opt/pnetcdf + export HDF5_DIR=/opt/hdf5 + export HDF5_HOME=$HDF5_DIR + + export LD_LIBRARY_PATH=$HDF5_HOME/lib:$LD_LIBRARY_PATH mkdir build-sync cd build-sync cmake .. \ - -DCMAKE_C_COMPILER=$HDF5_HOME/bin/h5pcc \ -DH5BENCH_ALL=ON make -j 2 - make install + sudo make install - name: Build h5bench ASYNC run: | export HDF5_HOME=/opt/hdf5 - export PNETCDF_HOME=/opt/pnetcdf export ABT_HOME=/opt/argobots export ASYNC_HOME=/opt/vol-async @@ -113,12 +119,26 @@ jobs: cd build-async cmake .. \ - -DCMAKE_C_COMPILER=$HDF5_HOME/bin/h5pcc \ -DWITH_ASYNC_VOL:BOOL=ON \ -DCMAKE_C_FLAGS="-I$ASYNC_HOME/include -L$ASYNC_HOME/lib" \ -DH5BENCH_ALL=ON make -j 2 - make install + sudo make install + + - name: Build h5bench CACHE + run: | + export HDF5_ROOT=/opt/hdf5 + export HDF5_HOME=/opt/hdf5 + export HDF5_VOL_DIR=/opt/vol-cache + + mkdir build-cache + cd build-cache + + cmake .. \ + -DWITH_CACHE_VOL:BOOL=ON \ + -DWITH_ASYNC_VOL:BOOL=ON \ + -DCMAKE_C_FLAGS="-I/$HDF5_VOL_DIR/include -L/$HDF5_VOL_DIR/lib" + make -j 2 - name: Configure for MPICH run: | @@ -129,133 +149,263 @@ jobs: - name: Test h5bench SYNC write/read run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + cd build-sync export PATH=$(pwd):$PATH ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-read-contig-1d-small.json - name: Test h5bench SYNC write 1D contiguous (memory) strided (file) run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + cd build-sync export PATH=$(pwd):$PATH ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-contig-strided.json - name: Test h5bench SYNC write 1D contiguous (memory) contiguous (file) run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + cd build-sync export PATH=$(pwd):$PATH ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-contig-contig.json - name: Test h5bench SYNC write 1D contiguous (memory) interleaved (file) run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + cd build-sync export PATH=$(pwd):$PATH ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-contig-interleaved.json - name: Test h5bench SYNC write 1D interleaved (memory) contiguous (file) run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + cd build-sync export PATH=$(pwd):$PATH ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-interleaved-contig.json - name: Test h5bench SYNC write 1D interleaved (memory) interleaved (file) run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + cd build-sync export PATH=$(pwd):$PATH ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-interleaved-interleaved.json - name: Test h5bench SYNC write 2D contiguous (memory) contiguous (file) run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + cd build-sync export PATH=$(pwd):$PATH ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-2d-contig-contig.json - name: Test h5bench SYNC write 2D contiguous (memory) interleaved (file) run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + cd build-sync export PATH=$(pwd):$PATH ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-2d-contig-interleaved.json - name: Test h5bench SYNC write 2D interleaved (memory) contiguous (file) run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + cd build-sync export PATH=$(pwd):$PATH ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-2d-interleaved-contig.json - name: Test h5bench SYNC write 2D interleaved (memory) interleaved (file) run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + cd build-sync export PATH=$(pwd):$PATH ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-2d-interleaved-interleaved.json - name: Test h5bench SYNC write 3D contiguous (memory) contiguous (file) run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + cd build-sync export PATH=$(pwd):$PATH ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-3d-contig-contig.json - name: Test h5bench SYNC read 1D contiguous (memory) contiguous (file) full run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + cd build-sync export PATH=$(pwd):$PATH ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-contig-contig-read-full.json - name: Test h5bench SYNC read 1D contiguous (memory) contiguous (file) partial run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + cd build-sync export PATH=$(pwd):$PATH ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-contig-contig-read-partial.json - name: Test h5bench SYNC read 1D contiguous (memory) contiguous (file) strided run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + cd build-sync export PATH=$(pwd):$PATH ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-contig-contig-read-strided.json - name: Test h5bench SYNC read 2D contiguous (memory) contiguous (file) full run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + cd build-sync export PATH=$(pwd):$PATH ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-2d-contig-contig-read-full.json - name: Test h5bench SYNC read 3D contiguous (memory) contiguous (file) full run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + cd build-sync export PATH=$(pwd):$PATH ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-2d-contig-contig-read-full.json - name: Test h5bench SYNC write unlimited run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + cd build-sync export PATH=$(pwd):$PATH ./h5bench --debug --abort-on-failure ../samples/sync-write-unlimited.json - name: Test h5bench SYNC overwrite run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + cd build-sync export PATH=$(pwd):$PATH ./h5bench --debug --abort-on-failure ../samples/sync-overwrite.json - name: Test h5bench SYNC append run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + cd build-sync export PATH=$(pwd):$PATH ./h5bench --debug --abort-on-failure ../samples/sync-append.json - name: Test h5bench SYNC exerciser run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + cd build-sync export PATH=$(pwd):$PATH ./h5bench --debug --abort-on-failure ../samples/sync-exerciser.json - name: Test h5bench SYNC metadata run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + cd build-sync export PATH=$(pwd):$PATH ./h5bench --debug --abort-on-failure ../samples/sync-metadata.json - name: Test h5bench SYNC amrex run: | - export LD_LIBRARY_PATH=/opt/hdf5/lib:/opt/pnetcdf/lib:$LD_LIBRARY_PATH + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH cd build-sync export PATH=$(pwd):$PATH @@ -263,7 +413,11 @@ jobs: - name: Test h5bench SYNC openpmd run: | - export LD_LIBRARY_PATH=/opt/hdf5/lib:/opt/pnetcdf/lib:$LD_LIBRARY_PATH + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH cd build-sync export PATH=$(pwd):$PATH @@ -271,7 +425,11 @@ jobs: - name: Test h5bench SYNC e3sm run: | - export LD_LIBRARY_PATH=/opt/hdf5/lib:/opt/pnetcdf/lib:$LD_LIBRARY_PATH + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH cd build-sync export PATH=$(pwd):$PATH @@ -498,6 +656,247 @@ jobs: ./h5bench --debug --abort-on-failure ../samples/async-amrex.json + - name: Test h5bench CACHE write 1D contiguous (memory) strided (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + export HDF5_VOL_DIR=/opt/vol-cache + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-strided.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-strided.json + + - name: Test h5bench CACHE write 1D contiguous (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig.json + + - name: Test h5bench CACHE write 1D contiguous (memory) interleaved (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-interleaved.json + + - name: Test h5bench CACHE write 1D interleaved (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-interleaved-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-interleaved-contig.json + + - name: Test h5bench CACHE write 1D interleaved (memory) interleaved (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-interleaved-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-interleaved-interleaved.json + + - name: Test h5bench CACHE write 2D contiguous (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-contig-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig.json + + - name: Test h5bench CACHE write 2D contiguous (memory) interleaved (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-contig-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-interleaved.json + + - name: Test h5bench CACHE write 2D interleaved (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-interleaved-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-interleaved-contig.json + + - name: Test h5bench CACHE write 2D interleaved (memory) interleaved (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-interleaved-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-interleaved-interleaved.json + + - name: Test h5bench CACHE write 3D contiguous (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-3d-contig-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-3d-contig-contig.json + + - name: Test h5bench CACHE read 1D contiguous (memory) contiguous (file) full + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-full.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-full.json + + - name: Test h5bench CACHE read 1D contiguous (memory) contiguous (file) partial + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-partial.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-partial.json + + - name: Test h5bench CACHE read 1D contiguous (memory) contiguous (file) strided + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-strided.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-strided.json + + - name: Test h5bench CACHE read 2D contiguous (memory) contiguous (file) full + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-contig-contig-read-full.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig-read-full.json + + - name: Test h5bench CACHE read 3D contiguous (memory) contiguous (file) full + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-contig-contig-read-full.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig-read-full.json + - name: Upload artifact if: always() uses: actions/upload-artifact@v2 From d6a0a0211b6539697bc9a588acd6a479d4440416 Mon Sep 17 00:00:00 2001 From: Jean Luca Bez Date: Mon, 28 Nov 2022 11:23:40 -0800 Subject: [PATCH 53/55] Update h5bench-hdf5-1.13.1-mpich-3.4.3.yml --- .../h5bench-hdf5-1.13.1-mpich-3.4.3.yml | 30 +++++++++---------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/.github/workflows/h5bench-hdf5-1.13.1-mpich-3.4.3.yml b/.github/workflows/h5bench-hdf5-1.13.1-mpich-3.4.3.yml index c5c457be..f8e313a9 100644 --- a/.github/workflows/h5bench-hdf5-1.13.1-mpich-3.4.3.yml +++ b/.github/workflows/h5bench-hdf5-1.13.1-mpich-3.4.3.yml @@ -665,7 +665,7 @@ jobs: cd build-cache - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184\n" > cache_1.cfg [ -e SSD ] || mkdir SSD @@ -681,7 +681,7 @@ jobs: cd build-cache - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184\n" > cache_1.cfg [ -e SSD ] || mkdir SSD @@ -697,7 +697,7 @@ jobs: cd build-cache - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184\n" > cache_1.cfg [ -e SSD ] || mkdir SSD @@ -713,7 +713,7 @@ jobs: cd build-cache - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184\n" > cache_1.cfg [ -e SSD ] || mkdir SSD @@ -729,7 +729,7 @@ jobs: cd build-cache - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184\n" > cache_1.cfg [ -e SSD ] || mkdir SSD @@ -745,7 +745,7 @@ jobs: cd build-cache - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184\n" > cache_1.cfg [ -e SSD ] || mkdir SSD @@ -761,7 +761,7 @@ jobs: cd build-cache - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184\n" > cache_1.cfg [ -e SSD ] || mkdir SSD @@ -777,7 +777,7 @@ jobs: cd build-cache - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184\n" > cache_1.cfg [ -e SSD ] || mkdir SSD @@ -793,7 +793,7 @@ jobs: cd build-cache - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184\n" > cache_1.cfg [ -e SSD ] || mkdir SSD @@ -809,7 +809,7 @@ jobs: cd build-cache - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184\n" > cache_1.cfg [ -e SSD ] || mkdir SSD @@ -825,7 +825,7 @@ jobs: cd build-cache - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184\n" > cache_1.cfg [ -e SSD ] || mkdir SSD @@ -841,7 +841,7 @@ jobs: cd build-cache - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184\n" > cache_1.cfg [ -e SSD ] || mkdir SSD @@ -857,7 +857,7 @@ jobs: cd build-cache - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184\n" > cache_1.cfg [ -e SSD ] || mkdir SSD @@ -873,7 +873,7 @@ jobs: cd build-cache - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184\n" > cache_1.cfg [ -e SSD ] || mkdir SSD @@ -889,7 +889,7 @@ jobs: cd build-cache - printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184\n" > cache_1.cfg [ -e SSD ] || mkdir SSD From 0babbee8d4e0138e29123ad211fff30dd7a87da5 Mon Sep 17 00:00:00 2001 From: Huihuo Zheng Date: Mon, 5 Dec 2022 21:57:20 -0600 Subject: [PATCH 54/55] Minor changes to the PR 1) Added back "MODE" option in cfg file. By default if the user does not specify MODE in cfg file, it will check whether it has async vol connector. If so, it will set as ASYNC; otherwise it will set as SYNC. For Cache VOL, one has to explicitly set MODE: SYNC in cfg file. 2) Removed unnecessary MPI_Barrier --- commons/h5bench_util.c | 29 +++++++++++++++++++++-------- h5bench_patterns/h5bench_write.c | 1 - 2 files changed, 21 insertions(+), 9 deletions(-) diff --git a/commons/h5bench_util.c b/commons/h5bench_util.c index 8286dc69..f4b930a1 100644 --- a/commons/h5bench_util.c +++ b/commons/h5bench_util.c @@ -648,6 +648,15 @@ _set_params(char *key, char *val_in, bench_params *params_in_out, int do_write) return 0; char *val = _parse_val(val_in); + has_vol_async = has_vol_connector(); + + if (has_vol_async) { + (*params_in_out).asyncMode = MODE_ASYNC; + } + else { + (*params_in_out).asyncMode = MODE_SYNC; + } + if (strcmp(key, "IO_OPERATION") == 0) { if (strcmp(val, "READ") == 0) { params_in_out->io_op = IO_READ; @@ -911,19 +920,23 @@ _set_params(char *key, char *val_in, bench_params *params_in_out, int do_write) else (*params_in_out).subfiling = 0; } + else if (strcmp(key, "MODE") == 0) { + if (strcmp(val_in, "SYNC") == 0) { + params_in_out->asyncMode = MODE_ASYNC; + } + else if (strcmp(val_in, "ASYNC") == 0) { + params_in_out->asyncMode = MODE_SYNC; + } + else { + printf("Unknown MODE: %s\n", key); + return -1; + } + } else { printf("Unknown Parameter: %s\n", key); return -1; } - has_vol_async = has_vol_connector(); - - if (has_vol_async) { - (*params_in_out).asyncMode = MODE_ASYNC; - } - else { - (*params_in_out).asyncMode = MODE_SYNC; - } if ((*params_in_out).useCSV) (*params_in_out).csv_fs = csv_init(params_in_out->csv_path, params_in_out->env_meta_path); diff --git a/h5bench_patterns/h5bench_write.c b/h5bench_patterns/h5bench_write.c index 5bdb5424..ad8030c9 100644 --- a/h5bench_patterns/h5bench_write.c +++ b/h5bench_patterns/h5bench_write.c @@ -789,7 +789,6 @@ _run_benchmark_write(bench_params params, hid_t file_id, hid_t fapl, hid_t files if (MY_RANK == 0) printf("Writing %s ... \n", grp_name); #ifdef USE_CACHE_VOL - MPI_Barrier(MPI_COMM_WORLD); H5Fcache_async_op_pause(file_id); #endif switch (pattern) { From 7cedfc6eda7c843b664c5b6ece6cc7b19a56291c Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 6 Dec 2022 04:00:09 +0000 Subject: [PATCH 55/55] Committing clang-format changes --- commons/h5bench_util.c | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/commons/h5bench_util.c b/commons/h5bench_util.c index f4b930a1..d0694b02 100644 --- a/commons/h5bench_util.c +++ b/commons/h5bench_util.c @@ -656,7 +656,7 @@ _set_params(char *key, char *val_in, bench_params *params_in_out, int do_write) else { (*params_in_out).asyncMode = MODE_SYNC; } - + if (strcmp(key, "IO_OPERATION") == 0) { if (strcmp(val, "READ") == 0) { params_in_out->io_op = IO_READ; @@ -928,8 +928,8 @@ _set_params(char *key, char *val_in, bench_params *params_in_out, int do_write) params_in_out->asyncMode = MODE_SYNC; } else { - printf("Unknown MODE: %s\n", key); - return -1; + printf("Unknown MODE: %s\n", key); + return -1; } } else { @@ -937,7 +937,6 @@ _set_params(char *key, char *val_in, bench_params *params_in_out, int do_write) return -1; } - if ((*params_in_out).useCSV) (*params_in_out).csv_fs = csv_init(params_in_out->csv_path, params_in_out->env_meta_path);