From ea3f98064c0e0e66eec2ff5e17cbb2962e6185a9 Mon Sep 17 00:00:00 2001 From: Matthew Larson Date: Fri, 13 Dec 2024 13:47:41 -0600 Subject: [PATCH 1/6] Remove vol-tests submodule --- .gitmodules | 3 --- build_vol_autotools.sh | 6 ------ build_vol_cmake.sh | 24 +----------------------- test/vol-tests | 1 - 4 files changed, 1 insertion(+), 33 deletions(-) delete mode 160000 test/vol-tests diff --git a/.gitmodules b/.gitmodules index 7c1f0753..e69de29b 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +0,0 @@ -[submodule "test/vol-tests"] - path = test/vol-tests - url = https://github.com/HDFGroup/vol-tests diff --git a/build_vol_autotools.sh b/build_vol_autotools.sh index 8ce30bfb..c5e0fc45 100755 --- a/build_vol_autotools.sh +++ b/build_vol_autotools.sh @@ -163,12 +163,6 @@ if [ "$NPROCS" -eq "0" ]; then fi fi -# Ensure that the HDF5 and VOL tests submodules get checked out -if [ -z "$(ls -A ${SCRIPT_DIR}/${HDF5_DIR})" ]; then - git submodule init - git submodule update -fi - # If the user hasn't already, first build HDF5 if [ "$build_hdf5" = true ]; then echo "*****************" diff --git a/build_vol_cmake.sh b/build_vol_cmake.sh index 1655029b..58e3cae1 100755 --- a/build_vol_cmake.sh +++ b/build_vol_cmake.sh @@ -175,12 +175,6 @@ if [ "$NPROCS" -eq "0" ]; then fi fi -# Ensure that the vol-tests submodule gets checked out -if [ -z "$(ls -A ${SCRIPT_DIR}/test/vol-tests)" ]; then - git submodule init - git submodule update -fi - # Build the REST VOL connector against HDF5. echo "*******************************************" echo "* Building REST VOL connector and test suite *" @@ -195,7 +189,7 @@ rm -f "${BUILD_DIR}/CMakeCache.txt" cd "${BUILD_DIR}" -CFLAGS="-D_POSIX_C_SOURCE=200809L" cmake -G "${CMAKE_GENERATOR}" "-DHDF5_ROOT=${HDF5_INSTALL_DIR}" -DCMAKE_INSTALL_PREFIX="${INSTALL_DIR}" "${CURL_OPT}" "${YAJL_OPT}" "${YAJL_LIB_OPT}" "${CONNECTOR_DEBUG_OPT}" "${CURL_DEBUG_OPT}" "${MEM_TRACK_OPT}" "${THREAD_SAFE_OPT}" "${SCRIPT_DIR}" +CFLAGS="-D_POSIX_C_SOURCE=200809L -g -O0" cmake -G "-DCMAKE_C_COMPILER=gcc-12" "${CMAKE_GENERATOR}" "-DHDF5_ROOT=${HDF5_INSTALL_DIR}" -DCMAKE_INSTALL_PREFIX="${INSTALL_DIR}" "${CURL_OPT}" "${YAJL_OPT}" "${YAJL_LIB_OPT}" "${CONNECTOR_DEBUG_OPT}" "${CURL_DEBUG_OPT}" "${MEM_TRACK_OPT}" "${THREAD_SAFE_OPT}" "${SCRIPT_DIR}" echo "Build files have been generated for CMake generator '${CMAKE_GENERATOR}'" @@ -206,20 +200,4 @@ fi echo "REST VOL built" -# Clean out the old CMake cache -rm -f "${BUILD_DIR}/CMakeCache.txt" - -# Configure vol-tests - -mkdir -p "${BUILD_DIR}/tests/vol-tests" -cd "${BUILD_DIR}/tests/vol-tests" - -CFLAGS="-D_POSIX_C_SOURCE=200809L" cmake -G "${CMAKE_GENERATOR}" "-DHDF5_DIR=${HDF5_INSTALL_DIR}" -DCMAKE_INSTALL_PREFIX="${INSTALL_DIR}" "${CONNECTOR_DEBUG_OPT}" "${CURL_DEBUG_OPT}" "${MEM_TRACK_OPT}" "${THREAD_SAFE_OPT}" "${SCRIPT_DIR}/test/vol-tests" - -echo "Build files generated for vol-tests" - -make || exit 1 - -echo "VOL tests built" - exit 0 diff --git a/test/vol-tests b/test/vol-tests deleted file mode 160000 index 996dd872..00000000 --- a/test/vol-tests +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 996dd87212b2547f1ce638d29b64c8ca436d859c From 8c85c7f04a5ed867135e0625cb39fece326442d4 Mon Sep 17 00:00:00 2001 From: Matthew Larson Date: Fri, 13 Dec 2024 13:52:07 -0600 Subject: [PATCH 2/6] Show HSDS log on CI failure --- .github/workflows/main.yml | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index fb6f053a..d98b5cd7 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -150,6 +150,12 @@ jobs: run: | HDF5_PLUGIN_PATH=${{github.workspace}}/vol-rest/install/lib HDF5_VOL_CONNECTOR=REST ./test/test_rest_vol + - name: Show HSDS Logs on Fail + if: ${{failure()}} + working-directory: ${{github.workspace}}/hsds + run: | + cat hs.log + build_and_test_with_cmake: strategy: fail-fast: false @@ -281,6 +287,12 @@ jobs: run: | valgrind --leak-check=full -s ctest -R "test_rest_vol" -VV + - name: Show HSDS Logs on Fail + if: ${{failure()}} + working-directory: ${{github.workspace}}/hsds + run: | + cat hs.log + # TODO: Attribute, dataset, link, and testhdf5 tests currently fail # - name: Test REST VOL with API # run: | From 43e4a0f2548106dbc9ea47c4fe825b37ec9b0f34 Mon Sep 17 00:00:00 2001 From: Matthew Larson Date: Fri, 13 Dec 2024 15:14:10 -0600 Subject: [PATCH 3/6] Fix warnings --- .github/workflows/main.yml | 4 ++-- src/rest_vol.c | 16 ++++++---------- src/rest_vol_attr.c | 8 ++++---- src/rest_vol_config.h.in | 16 +++++++++++----- src/rest_vol_dataset.c | 25 ++++++++++--------------- src/rest_vol_datatype.c | 4 ++-- src/rest_vol_group.c | 2 +- src/rest_vol_link.c | 2 +- src/rest_vol_object.c | 2 +- 9 files changed, 38 insertions(+), 41 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index d98b5cd7..f3d8a77f 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -80,7 +80,7 @@ jobs: - name: Build + Install REST VOL run: | - make + make -j make install shell: bash working-directory: ${{github.workspace}}/vol-rest/ @@ -212,7 +212,7 @@ jobs: - name: Build + Install REST VOL run: | - make + make -j make install shell: bash working-directory: ${{github.workspace}}/vol-rest/build diff --git a/src/rest_vol.c b/src/rest_vol.c index dba6f201..545d2136 100644 --- a/src/rest_vol.c +++ b/src/rest_vol.c @@ -2965,7 +2965,7 @@ RV_parse_dataspace(char *space) for (i = 0; i < dims_obj->u.array.len; i++) { if (i > 0) printf(", "); - printf("%llu", space_dims[i]); + printf("%" PRIuHSIZE, space_dims[i]); } printf(" ]\n\n"); if (maxdims_specified) { @@ -2973,7 +2973,7 @@ RV_parse_dataspace(char *space) for (i = 0; i < maxdims_obj->u.array.len; i++) { if (i > 0) printf(", "); - printf("%llu", space_maxdims[i]); + printf("%" PRIuHSIZE, space_maxdims[i]); } printf(" ]\n\n"); } @@ -4125,8 +4125,7 @@ RV_curl_delete(CURL *curl_handle, server_info_t *server_info, const char *reques strcpy(host_header, host_string); - curl_headers_local = curl_slist_append( - curl_headers_local, strncat(host_header, filename, host_header_len - strlen(host_string) - 1)); + curl_headers_local = curl_slist_append(curl_headers_local, strcat(host_header, filename)); /* Disable use of Expect: 100 Continue HTTP response */ curl_headers_local = curl_slist_append(curl_headers_local, "Expect:"); @@ -4201,8 +4200,7 @@ RV_curl_put(CURL *curl_handle, server_info_t *server_info, const char *request_e strcpy(host_header, host_string); - curl_headers_local = curl_slist_append( - curl_headers_local, strncat(host_header, filename, host_header_len - strlen(host_string) - 1)); + curl_headers_local = curl_slist_append(curl_headers_local, strcat(host_header, filename)); /* Disable use of Expect: 100 Continue HTTP response */ curl_headers_local = curl_slist_append(curl_headers_local, "Expect:"); @@ -4301,8 +4299,7 @@ RV_curl_get(CURL *curl_handle, server_info_t *server_info, const char *request_e strcpy(host_header, host_string); - curl_headers_local = curl_slist_append( - curl_headers_local, strncat(host_header, filename, host_header_len - strlen(host_string) - 1)); + curl_headers_local = curl_slist_append(curl_headers_local, strcat(host_header, filename)); /* Specify type of content being sent through cURL */ switch (content_type) { @@ -4387,8 +4384,7 @@ RV_curl_post(CURL *curl_handle, server_info_t *server_info, const char *request_ strcpy(host_header, host_string); - curl_headers_local = curl_slist_append( - curl_headers_local, strncat(host_header, filename, host_header_len - strlen(host_string) - 1)); + curl_headers_local = curl_slist_append(curl_headers_local, strcat(host_header, filename)); /* Specify type of content being sent through cURL */ switch (content_type) { diff --git a/src/rest_vol_attr.c b/src/rest_vol_attr.c index d612ac24..65e66ba8 100644 --- a/src/rest_vol_attr.c +++ b/src/rest_vol_attr.c @@ -710,7 +710,7 @@ RV_attr_read(void *attr, hid_t dtype_id, void *buf, hid_t dxpl_id, void **req) FUNC_GOTO_ERROR(H5E_DATATYPE, H5E_BADVALUE, FAIL, "memory datatype is invalid"); #ifdef RV_CONNECTOR_DEBUG - printf("-> %lld points selected for attribute read\n", file_select_npoints); + printf("-> %" PRIuHSIZE "points selected for attribute read\n", file_select_npoints); printf("-> Attribute's datatype size: %zu\n\n", dtype_size); #endif @@ -827,7 +827,7 @@ RV_attr_write(void *attr, hid_t dtype_id, const void *buf, hid_t dxpl_id, void * FUNC_GOTO_ERROR(H5E_DATASPACE, H5E_BADVALUE, FAIL, "memory datatype is invalid"); #ifdef RV_CONNECTOR_DEBUG - printf("-> %lld points selected for attribute write\n", file_select_npoints); + printf("-> %" PRIuHSIZE "points selected for attribute write\n", file_select_npoints); printf("-> Attribute's datatype size: %zu\n\n", dtype_size); #endif @@ -2525,7 +2525,7 @@ RV_traverse_attr_table(attr_table_entry *attr_table, size_t num_entries, const i #ifdef RV_CONNECTOR_DEBUG printf("-> Attribute %zu name: %s\n", last_idx, attr_table[last_idx].attr_name); printf("-> Attribute %zu creation time: %f\n", last_idx, attr_table[last_idx].crt_time); - printf("-> Attribute %zu data size: %llu\n\n", last_idx, + printf("-> Attribute %zu data size: %" PRIuHSIZE "\n\n", last_idx, attr_table[last_idx].attr_info.data_size); printf("-> Calling supplied callback function\n\n"); @@ -2556,7 +2556,7 @@ RV_traverse_attr_table(attr_table_entry *attr_table, size_t num_entries, const i #ifdef RV_CONNECTOR_DEBUG printf("-> Attribute %zu name: %s\n", last_idx, attr_table[last_idx].attr_name); printf("-> Attribute %zu creation time: %f\n", last_idx, attr_table[last_idx].crt_time); - printf("-> Attribute %zu data size: %llu\n\n", last_idx, + printf("-> Attribute %zu data size: %" PRIuHSIZE "\n\n", last_idx, attr_table[last_idx].attr_info.data_size); printf("-> Calling supplied callback function\n\n"); diff --git a/src/rest_vol_config.h.in b/src/rest_vol_config.h.in index 5e0997d4..c49f1de1 100644 --- a/src/rest_vol_config.h.in +++ b/src/rest_vol_config.h.in @@ -30,9 +30,6 @@ /* Define to 1 if you have the header file. */ #undef HAVE_MACH_MACH_TIME_H -/* Define to 1 if you have the header file. */ -#undef HAVE_MEMORY_H - /* Define to 1 if you have the header file. */ #undef HAVE_SETJMP_H @@ -48,6 +45,9 @@ /* Define to 1 if you have the header file. */ #undef HAVE_STDINT_H +/* Define to 1 if you have the header file. */ +#undef HAVE_STDIO_H + /* Define to 1 if you have the header file. */ #undef HAVE_STDLIB_H @@ -81,6 +81,9 @@ /* Define to 1 if you have the header file. */ #undef HAVE_UNISTD_H +/* Whether HDF5 version is at least 2.0.0 */ +#undef HDF5_2 + /* Define to the sub-directory where libtool stores uninstalled libraries. */ #undef LT_OBJDIR @@ -111,10 +114,13 @@ /* The size of `off_t', as computed by sizeof. */ #undef SIZEOF_OFF_T -/* Define to 1 if you have the ANSI C header files. */ +/* Define to 1 if all of the C90 standard headers exist (not just the ones + required in a freestanding environment). This macro is provided for + backward compatibility; new code need not use it. */ #undef STDC_HEADERS -/* Define to 1 if you can safely include both and . */ +/* Define to 1 if you can safely include both and . This + macro is obsolete. */ #undef TIME_WITH_SYS_TIME /* Define to have the REST VOL track memory usage. */ diff --git a/src/rest_vol_dataset.c b/src/rest_vol_dataset.c index b5fc7cbc..3790a507 100644 --- a/src/rest_vol_dataset.c +++ b/src/rest_vol_dataset.c @@ -607,8 +607,8 @@ RV_dataset_read(size_t count, void *dset[], hid_t mem_type_id[], hid_t _mem_spac "memory selection num points != file selection num points"); #ifdef RV_CONNECTOR_DEBUG - printf("-> %lld points selected in file dataspace\n", file_select_npoints); - printf("-> %lld points selected in memory dataspace\n\n", mem_select_npoints); + printf("-> %" PRIuHSIZE "points selected in file dataspace\n", file_select_npoints); + printf("-> %" PRIuHSIZE "points selected in memory dataspace\n\n", mem_select_npoints); #endif /* Setup the host header */ @@ -986,8 +986,8 @@ RV_dataset_write(size_t count, void *dset[], hid_t mem_type_id[], hid_t _mem_spa "memory selection num points != file selection num points"); #ifdef RV_CONNECTOR_DEBUG - printf("-> %lld points selected in file dataspace\n", file_select_npoints); - printf("-> %lld points selected in memory dataspace\n\n", mem_select_npoints); + printf("-> %" PRIuHSIZE "points selected in file dataspace\n", file_select_npoints); + printf("-> %" PRIuHSIZE "points selected in memory dataspace\n\n", mem_select_npoints); #endif if ((file_type_size = H5Tget_size(transfer_info[i].file_type_id)) == 0) @@ -1266,24 +1266,20 @@ RV_dataset_write(size_t count, void *dset[], hid_t mem_type_id[], hid_t _mem_spa #ifdef RV_CONNECTOR_DEBUG printf("-> Base64-encoded data buffer for dataset %zu: %s\n\n", i, - transfer_info[i].u.write_info.base64_encoded_values); + (char *)transfer_info[i].u.write_info.base64_encoded_values); #endif /* Copy encoded values into format string */ write_body_len = (strlen(fmt_string) - 4) + selection_body_len + value_body_len; if (NULL == (transfer_info[i].u.write_info.point_sel_buf = RV_malloc(write_body_len + 1))) FUNC_GOTO_ERROR(H5E_DATASET, H5E_CANTALLOC, FAIL, "can't allocate space for write buffer"); - if ((bytes_printed = - snprintf(transfer_info[i].u.write_info.point_sel_buf, write_body_len + 1, fmt_string, - selection_body, transfer_info[i].u.write_info.base64_encoded_values)) < 0) + if ((bytes_printed = snprintf(transfer_info[i].u.write_info.point_sel_buf, write_body_len + 1, + fmt_string, selection_body, + (char *)transfer_info[i].u.write_info.base64_encoded_values)) < 0) FUNC_GOTO_ERROR(H5E_DATASET, H5E_SYSERRSTR, FAIL, "snprintf error"); transfer_info[i].u.write_info.uinfo.buffer = transfer_info[i].u.write_info.point_sel_buf; -#ifdef RV_CONNECTOR_DEBUG - printf("-> Write body: %s\n\n", transfer_info[i].u.write_info.selection_buf); -#endif - if (bytes_printed >= write_body_len + 1) FUNC_GOTO_ERROR(H5E_DATASET, H5E_SYSERRSTR, FAIL, "point selection write buffer exceeded allocated buffer size"); @@ -2509,7 +2505,7 @@ RV_parse_dataset_creation_properties_callback(char *HTTP_response, const void *c for (i = 0; i < YAJL_GET_ARRAY(chunk_dims_obj)->len; i++) { if (i > 0) printf(", "); - printf("%llu", chunk_dims[i]); + printf("%" PRIuHSIZE, chunk_dims[i]); } printf(" ]\n"); #endif @@ -3498,8 +3494,7 @@ RV_convert_dataset_creation_properties_to_JSON(hid_t dcpl, char **creation_prope const char *const external_file_str = "%s{" "\"name\": %s," "\"offset\": " OFF_T_SPECIFIER "," - "\"size\": %llu" - "}"; + "\"size\": " PRIuHSIZE "}"; /* Check whether the buffer needs to be grown */ bytes_to_print += strlen(external_storage_str); diff --git a/src/rest_vol_datatype.c b/src/rest_vol_datatype.c index b6bbceb6..6ae7cf92 100644 --- a/src/rest_vol_datatype.c +++ b/src/rest_vol_datatype.c @@ -1948,7 +1948,7 @@ RV_convert_JSON_to_datatype(const char *type) for (i = 0; i < YAJL_GET_ARRAY(key_obj)->len; i++) { if (i > 0) printf(", "); - printf("%llu", array_dims[i]); + printf("%" PRIuHSIZE, array_dims[i]); } printf("]\n"); #endif @@ -2170,7 +2170,7 @@ RV_convert_JSON_to_datatype(const char *type) memcpy(tmp_vlen_type_buffer + 2, type_class_keys[0], strlen(type_class_keys[0])); #ifdef RV_CONNECTOR_DEBUG - printf("-> Converting variable length datatype's parent type from JSON to hid_t\n", i); + printf("-> Converting variable length datatype's parent type from JSON to hid_t\n"); #endif /* Recursively parse parent datatype from JSON */ diff --git a/src/rest_vol_group.c b/src/rest_vol_group.c index 9c9825a1..6696cca0 100644 --- a/src/rest_vol_group.c +++ b/src/rest_vol_group.c @@ -729,7 +729,7 @@ RV_get_group_info_callback(char *HTTP_response, const void *callback_data_in, vo group_info->nlinks = (hsize_t)YAJL_GET_INTEGER(key_obj); #ifdef RV_CONNECTOR_DEBUG - printf("-> Group had %llu links in it\n\n", group_info->nlinks); + printf("-> Group had %" PRIuHSIZE "links in it\n\n", group_info->nlinks); #endif done: diff --git a/src/rest_vol_link.c b/src/rest_vol_link.c index 88e03f38..b98c555e 100644 --- a/src/rest_vol_link.c +++ b/src/rest_vol_link.c @@ -1574,7 +1574,7 @@ RV_get_link_name_by_idx_callback(char *HTTP_response, const void *callback_data_ FUNC_GOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "link index number larger than number of links"); #ifdef RV_CONNECTOR_DEBUG - printf("-> Retrieving link name of link at index %PRIuHSIZE\n\n", *by_idx_data->idx_p); + printf("-> Retrieving link name of link at index %" PRIuHSIZE "\n\n", *by_idx_data->idx_p); #endif /* Retrieve the nth link name */ diff --git a/src/rest_vol_object.c b/src/rest_vol_object.c index 5c4ea994..0ab5a2b5 100644 --- a/src/rest_vol_object.c +++ b/src/rest_vol_object.c @@ -1127,7 +1127,7 @@ RV_get_object_info_callback(char *HTTP_response, const void *callback_data_in, v obj_info->num_attrs = (hsize_t)YAJL_GET_INTEGER(key_obj); #ifdef RV_CONNECTOR_DEBUG - printf("-> Object had %llu attributes attached to it\n\n", obj_info->num_attrs); + printf("-> Object had %" PRIuHSIZE "attributes attached to it\n\n", obj_info->num_attrs); #endif /* Retrieve the object's class */ From db5f887c18039aa002ceceb240ec041a4dd9d9a4 Mon Sep 17 00:00:00 2001 From: Matthew Larson Date: Mon, 16 Dec 2024 15:25:48 -0600 Subject: [PATCH 4/6] Add Ubuntu 22/24 to CI --- .github/workflows/main.yml | 215 ++++++++++++------------------------- 1 file changed, 71 insertions(+), 144 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index f3d8a77f..7dc58f0b 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -27,13 +27,25 @@ env: BUCKET_NAME: hsdstest jobs: - build_and_test_with_autotools: + build_and_test: strategy: fail-fast: false matrix: - os: [ubuntu-latest] - python-version: ["3.10"] - hdf5-branch: ["hdf5_1_14"] + os: [ubuntu-22.04, ubuntu-latest] + build_system: ["autotools", "cmake"] + python-version: ["3.10", "3.11", "3.12"] + hdf5-branch: ["hdf5_1_14", "develop"] + compiler: ["gcc-11", "gcc-12", "gcc-13"] + exclude: + # Ubuntu 22.04 runner does not have gcc-13 + - os: ubuntu-22.04 + compiler: "gcc-13" + # Ubuntu 24.04 runner does not have gcc-11 + - os: ubuntu-latest + compiler: "gcc-11" + # hdf5 2.0.0+ does not support autotools + - hdf5-branch: "develop" + build_system: "autotools" runs-on: ${{matrix.os}} steps: @@ -50,6 +62,7 @@ jobs: sudo apt-get install libyajl-dev - name: Get Autotools Dependencies + if: matrix.build_system == 'autotools' run: | sudo apt update sudo apt install automake autoconf libtool libtool-bin @@ -59,32 +72,78 @@ jobs: with: path: ${{github.workspace}}/vol-rest - - name: Autotools Configure + Build HDF5 + - name: CMake Configure + Build HDF5 + if: matrix.build_system == 'cmake' + run: | + mkdir ${{github.workspace}}/hdf5/build + cd ./build + cmake \ + -DHDF5_BUILD_HL_LIB=ON \ + -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_SZIP_SUPPORT=OFF \ + -DHDF5_TEST_API=ON \ + -DHDF5_ENABLE_Z_LIB_SUPPORT=OFF \ + -DCMAKE_BUILD_TYPE=RelWithDebInfo -DHDF5_ENABLE_THREADSAFE=OFF \ + -DCMAKE_INSTALL_PREFIX=${{github.workspace}}/hdf5install \ + -DCMAKE_C_COMPILER=${{matrix.compiler}} \ + .. + make + make install + shell: bash + working-directory: ${{github.workspace}}/hdf5 + + - name: Autotools Configure + Build HDF5 + if: matrix.build_system == 'autotools' run: | ./autogen.sh ./configure --prefix=${{github.workspace}}/hdf5install \ --enable-hl --disable-threadsafe \ - --enable-build-mode=production --enable-shared + --enable-build-mode=production --enable-shared \ + CC=${{matrix.compiler}} make make install shell: bash working-directory: ${{github.workspace}}/hdf5 + - name: CMake Configure REST VOL + if: matrix.build_system == 'cmake' + run: | + mkdir ./build + cd ./build + CFLAGS="-D_POSIX_C_SOURCE=200809L" cmake -G "Unix Makefiles" -DHDF5_ROOT=${{github.workspace}}/hdf5install \ + -DCMAKE_INSTALL_PREFIX=${{github.workspace}}/vol-rest/install \ + -DCMAKE_C_COMPILER=${{matrix.compiler}} \ + .. + shell: bash + working-directory: ${{github.workspace}}/vol-rest + - name: Autotools Configure REST VOL + if: matrix.build_system == 'autotools' run: | ./autogen.sh mkdir ${{github.workspace}}/vol-rest/install - CFLAGS="-D_POSIX_C_SOURCE=200809L" ./configure --prefix=${{github.workspace}}/vol-rest/install --with-hdf5=${{github.workspace}}/hdf5install + CFLAGS="-D_POSIX_C_SOURCE=200809L" ./configure \ + --prefix=${{github.workspace}}/vol-rest/install \ + --with-hdf5=${{github.workspace}}/hdf5install \ + CC=${{matrix.compiler}} shell: bash working-directory: ${{github.workspace}}/vol-rest - - name: Build + Install REST VOL + - name: Build + Install REST VOL (Autotools) + if: matrix.build_system == 'autotools' run: | make -j make install shell: bash working-directory: ${{github.workspace}}/vol-rest/ + - name: Build + Install REST VOL (CMake) + if: matrix.build_system == 'cmake' + run: | + make -j + make install + shell: bash + working-directory: ${{github.workspace}}/vol-rest/build/ + - uses: actions/checkout@v4 with: repository: HDFGroup/hsds @@ -126,7 +185,6 @@ jobs: pip install requests==2.31.0 - name: Start HSDS - if: ${{ matrix.endpoint != 'http://127.0.0.1:5101'}} run: | cd ${{github.workspace}}/hsds mkdir ${{github.workspace}}/hsdsdata && @@ -140,149 +198,18 @@ jobs: working-directory: ${{github.workspace}}/hsds - name: Test HSDS - if: ${{matrix.endpoint != 'http://127.0.0.1:5101'}} run: | python tests/integ/setup_test.py working-directory: ${{github.workspace}}/hsds - - name: Test REST VOL + - name: Test REST VOL (Autotools) + if: matrix.build_system == 'autotools' working-directory: ${{github.workspace}}/vol-rest/ run: | HDF5_PLUGIN_PATH=${{github.workspace}}/vol-rest/install/lib HDF5_VOL_CONNECTOR=REST ./test/test_rest_vol - - name: Show HSDS Logs on Fail - if: ${{failure()}} - working-directory: ${{github.workspace}}/hsds - run: | - cat hs.log - - build_and_test_with_cmake: - strategy: - fail-fast: false - matrix: - os: [ubuntu-latest] - python-version: ["3.10"] - hdf5-branch: ["hdf5_1_14", "develop"] - - runs-on: ${{matrix.os}} - steps: - - uses: actions/checkout@v4 - with: - repository: HDFGroup/hdf5 - ref: ${{matrix.hdf5-branch}} - path: ${{github.workspace}}/hdf5 - - - name: Get REST VOL dependencies - run: | - sudo apt-get update - sudo apt-get install libcurl4-openssl-dev - sudo apt-get install libyajl-dev - - - name: Get REST VOL - uses: actions/checkout@v4 - with: - path: ${{github.workspace}}/vol-rest - - - name: CMake Configure + Build HDF5 - run: | - mkdir ${{github.workspace}}/hdf5/build - cd ./build - cmake \ - -DHDF5_BUILD_HL_LIB=ON \ - -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_SZIP_SUPPORT=OFF \ - -DHDF5_TEST_API=ON \ - -DHDF5_ENABLE_Z_LIB_SUPPORT=OFF \ - -DCMAKE_BUILD_TYPE=RelWithDebInfo -DHDF5_ENABLE_THREADSAFE=OFF \ - -DCMAKE_INSTALL_PREFIX=${{github.workspace}}/hdf5install \ - .. - make - make install - shell: bash - working-directory: ${{github.workspace}}/hdf5 - - - name: CMake Configure REST VOL - run: | - mkdir ./build - cd ./build - CFLAGS="-D_POSIX_C_SOURCE=200809L" cmake -G "Unix Makefiles" -DHDF5_ROOT=${{github.workspace}}/hdf5install \ - -DCMAKE_INSTALL_PREFIX=${{github.workspace}}/vol-rest/install \ - .. - shell: bash - working-directory: ${{github.workspace}}/vol-rest - - - name: Build + Install REST VOL - run: | - make -j - make install - shell: bash - working-directory: ${{github.workspace}}/vol-rest/build - - - uses: actions/checkout@v4 - with: - repository: HDFGroup/hsds - path: ${{github.workspace}}/hsds - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - - name: Install HSDS dependencies - shell: bash - run: | - python -m pip install --upgrade pip - python -m pip install pytest - if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - - - name: Install HSDS package - shell: bash - run: | - cd ${{github.workspace}}/hsds - pip install -e . - - - name: Run HSDS unit tests - shell: bash - run: | - cd ${{github.workspace}}/hsds - pytest - - - name: Install valgrind - run: | - sudo apt update - sudo apt install valgrind - working-directory: ${{ github.workspace }} - - # Requests 2.32.0 breaks requests-unixsocket, used by HSDS for socket connections - - name: Fix requests version - run: | - pip install requests==2.31.0 - - - name: Start HSDS - if: ${{ matrix.endpoint != 'http://127.0.0.1:5101'}} - run: | - cd ${{github.workspace}}/hsds - mkdir ${{github.workspace}}/hsdsdata && - mkdir ${{github.workspace}}/hsdsdata/hsdstest && - cp admin/config/groups.default admin/config/groups.txt && - cp admin/config/passwd.default admin/config/passwd.txt && - cp admin/config/groups.default admin/config/groups.txt && - cp admin/config/passwd.default admin/config/passwd.txt - ROOT_DIR=${{github.workspace}}/hsdadata ./runall.sh --no-docker 1 & - sleep 10 - working-directory: ${{github.workspace}}/hsds - - - name: Test HSDS - if: ${{matrix.endpoint != 'http://127.0.0.1:5101'}} - run: | - python tests/integ/setup_test.py - working-directory: ${{github.workspace}}/hsds - - - name: Set HDF5 Plugin path - run: | - echo "HDF5_PLUGIN_PATH=${{github.workspace}}/vol-rest/build/bin/" >> $GITHUB_ENV - echo "HDF5_VOL_CONNECTOR=REST" >> $GITHUB_ENV - - - name: Test REST VOL + - name: Test REST VOL (CMake) + if: matrix.build_system == 'cmmake' working-directory: ${{github.workspace}}/vol-rest/build/ run: | valgrind --leak-check=full -s ctest -R "test_rest_vol" -VV From 2a5f5c8fac7ba54a5109752d60184181facc31fb Mon Sep 17 00:00:00 2001 From: Matthew Larson Date: Mon, 16 Dec 2024 15:18:17 -0600 Subject: [PATCH 5/6] Fix bad length in snprintf --- .github/workflows/main.yml | 2 +- build_vol_cmake.sh | 2 +- src/rest_vol.c | 7 +++---- src/rest_vol_config.h.in | 16 +++++----------- 4 files changed, 10 insertions(+), 17 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 7dc58f0b..fdd5647b 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -33,7 +33,7 @@ jobs: matrix: os: [ubuntu-22.04, ubuntu-latest] build_system: ["autotools", "cmake"] - python-version: ["3.10", "3.11", "3.12"] + python-version: ["3.10"] hdf5-branch: ["hdf5_1_14", "develop"] compiler: ["gcc-11", "gcc-12", "gcc-13"] exclude: diff --git a/build_vol_cmake.sh b/build_vol_cmake.sh index 58e3cae1..13a519f1 100755 --- a/build_vol_cmake.sh +++ b/build_vol_cmake.sh @@ -189,7 +189,7 @@ rm -f "${BUILD_DIR}/CMakeCache.txt" cd "${BUILD_DIR}" -CFLAGS="-D_POSIX_C_SOURCE=200809L -g -O0" cmake -G "-DCMAKE_C_COMPILER=gcc-12" "${CMAKE_GENERATOR}" "-DHDF5_ROOT=${HDF5_INSTALL_DIR}" -DCMAKE_INSTALL_PREFIX="${INSTALL_DIR}" "${CURL_OPT}" "${YAJL_OPT}" "${YAJL_LIB_OPT}" "${CONNECTOR_DEBUG_OPT}" "${CURL_DEBUG_OPT}" "${MEM_TRACK_OPT}" "${THREAD_SAFE_OPT}" "${SCRIPT_DIR}" +CFLAGS="-D_POSIX_C_SOURCE=200809L" cmake -G "${CMAKE_GENERATOR}" "-DHDF5_ROOT=${HDF5_INSTALL_DIR}" -DCMAKE_INSTALL_PREFIX="${INSTALL_DIR}" "${CURL_OPT}" "${YAJL_OPT}" "${YAJL_LIB_OPT}" "${CONNECTOR_DEBUG_OPT}" "${CURL_DEBUG_OPT}" "${MEM_TRACK_OPT}" "${THREAD_SAFE_OPT}" "${SCRIPT_DIR}" echo "Build files have been generated for CMake generator '${CMAKE_GENERATOR}'" diff --git a/src/rest_vol.c b/src/rest_vol.c index 545d2136..a6377254 100644 --- a/src/rest_vol.c +++ b/src/rest_vol.c @@ -3164,10 +3164,9 @@ RV_convert_dataspace_shape_to_JSON(hid_t space_id, char **shape_body, char **max strcat(maxdims_out_string_curr_pos++, "0"); } /* end if */ else { - if ((bytes_printed = - snprintf(maxdims_out_string_curr_pos, - maxdims_out_string_new_len - (size_t)maxdims_out_string_curr_pos, - "%s%" PRIuHSIZE, i > 0 ? "," : "", maxdims[i])) < 0) + if ((bytes_printed = snprintf(maxdims_out_string_curr_pos, + maxdims_out_string_new_len - (size_t)buf_ptrdiff, + "%s%" PRIuHSIZE, i > 0 ? "," : "", maxdims[i])) < 0) FUNC_GOTO_ERROR(H5E_DATASPACE, H5E_SYSERRSTR, FAIL, "snprintf error"); maxdims_out_string_curr_pos += bytes_printed; } /* end else */ diff --git a/src/rest_vol_config.h.in b/src/rest_vol_config.h.in index c49f1de1..5e0997d4 100644 --- a/src/rest_vol_config.h.in +++ b/src/rest_vol_config.h.in @@ -30,6 +30,9 @@ /* Define to 1 if you have the header file. */ #undef HAVE_MACH_MACH_TIME_H +/* Define to 1 if you have the header file. */ +#undef HAVE_MEMORY_H + /* Define to 1 if you have the header file. */ #undef HAVE_SETJMP_H @@ -45,9 +48,6 @@ /* Define to 1 if you have the header file. */ #undef HAVE_STDINT_H -/* Define to 1 if you have the header file. */ -#undef HAVE_STDIO_H - /* Define to 1 if you have the header file. */ #undef HAVE_STDLIB_H @@ -81,9 +81,6 @@ /* Define to 1 if you have the header file. */ #undef HAVE_UNISTD_H -/* Whether HDF5 version is at least 2.0.0 */ -#undef HDF5_2 - /* Define to the sub-directory where libtool stores uninstalled libraries. */ #undef LT_OBJDIR @@ -114,13 +111,10 @@ /* The size of `off_t', as computed by sizeof. */ #undef SIZEOF_OFF_T -/* Define to 1 if all of the C90 standard headers exist (not just the ones - required in a freestanding environment). This macro is provided for - backward compatibility; new code need not use it. */ +/* Define to 1 if you have the ANSI C header files. */ #undef STDC_HEADERS -/* Define to 1 if you can safely include both and . This - macro is obsolete. */ +/* Define to 1 if you can safely include both and . */ #undef TIME_WITH_SYS_TIME /* Define to have the REST VOL track memory usage. */ From cb41021a0e481a9f5b141bd444cb7a6c0b4c0c91 Mon Sep 17 00:00:00 2001 From: Matthew Larson Date: Tue, 17 Dec 2024 16:11:57 -0600 Subject: [PATCH 6/6] Fix workstep step being skipped with CMake --- .github/workflows/main.yml | 38 ++++++++++++-------------------------- 1 file changed, 12 insertions(+), 26 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index fdd5647b..e2b5ff4c 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -35,14 +35,7 @@ jobs: build_system: ["autotools", "cmake"] python-version: ["3.10"] hdf5-branch: ["hdf5_1_14", "develop"] - compiler: ["gcc-11", "gcc-12", "gcc-13"] exclude: - # Ubuntu 22.04 runner does not have gcc-13 - - os: ubuntu-22.04 - compiler: "gcc-13" - # Ubuntu 24.04 runner does not have gcc-11 - - os: ubuntu-latest - compiler: "gcc-11" # hdf5 2.0.0+ does not support autotools - hdf5-branch: "develop" build_system: "autotools" @@ -79,12 +72,10 @@ jobs: cd ./build cmake \ -DHDF5_BUILD_HL_LIB=ON \ - -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_SZIP_SUPPORT=OFF \ + -DBUILD_SHARED_LIBS=ON \ -DHDF5_TEST_API=ON \ - -DHDF5_ENABLE_Z_LIB_SUPPORT=OFF \ - -DCMAKE_BUILD_TYPE=RelWithDebInfo -DHDF5_ENABLE_THREADSAFE=OFF \ + -DCMAKE_BUILD_TYPE=Release \ -DCMAKE_INSTALL_PREFIX=${{github.workspace}}/hdf5install \ - -DCMAKE_C_COMPILER=${{matrix.compiler}} \ .. make make install @@ -97,8 +88,7 @@ jobs: ./autogen.sh ./configure --prefix=${{github.workspace}}/hdf5install \ --enable-hl --disable-threadsafe \ - --enable-build-mode=production --enable-shared \ - CC=${{matrix.compiler}} + --enable-build-mode=production --enable-shared make make install shell: bash @@ -111,7 +101,6 @@ jobs: cd ./build CFLAGS="-D_POSIX_C_SOURCE=200809L" cmake -G "Unix Makefiles" -DHDF5_ROOT=${{github.workspace}}/hdf5install \ -DCMAKE_INSTALL_PREFIX=${{github.workspace}}/vol-rest/install \ - -DCMAKE_C_COMPILER=${{matrix.compiler}} \ .. shell: bash working-directory: ${{github.workspace}}/vol-rest @@ -123,8 +112,7 @@ jobs: mkdir ${{github.workspace}}/vol-rest/install CFLAGS="-D_POSIX_C_SOURCE=200809L" ./configure \ --prefix=${{github.workspace}}/vol-rest/install \ - --with-hdf5=${{github.workspace}}/hdf5install \ - CC=${{matrix.compiler}} + --with-hdf5=${{github.workspace}}/hdf5install shell: bash working-directory: ${{github.workspace}}/vol-rest @@ -173,12 +161,6 @@ jobs: cd ${{github.workspace}}/hsds pytest - - name: Install valgrind - run: | - sudo apt update - sudo apt install valgrind - working-directory: ${{ github.workspace }} - # Requests 2.32.0 breaks requests-unixsocket, used by HSDS for socket connections - name: Fix requests version run: | @@ -205,14 +187,18 @@ jobs: - name: Test REST VOL (Autotools) if: matrix.build_system == 'autotools' working-directory: ${{github.workspace}}/vol-rest/ + env: + HDF5_PLUGIN_PATH: ${{github.workspace}}/vol-rest/install/lib run: | - HDF5_PLUGIN_PATH=${{github.workspace}}/vol-rest/install/lib HDF5_VOL_CONNECTOR=REST ./test/test_rest_vol + ./test/test_rest_vol - name: Test REST VOL (CMake) - if: matrix.build_system == 'cmmake' + if: matrix.build_system == 'cmake' working-directory: ${{github.workspace}}/vol-rest/build/ + env: + HDF5_PLUGIN_PATH: ${{github.workspace}}/vol-rest/install/lib run: | - valgrind --leak-check=full -s ctest -R "test_rest_vol" -VV + ctest -R "test_rest_vol" -VV - name: Show HSDS Logs on Fail if: ${{failure()}} @@ -223,5 +209,5 @@ jobs: # TODO: Attribute, dataset, link, and testhdf5 tests currently fail # - name: Test REST VOL with API # run: | -# valgrind --leak-check=full -s ctest -R "vol-rest" -VV +# ctest -R "vol-rest" -VV # working-directory: ${{github.workspace}}/hdf5/build/